diff --git a/dist/cjs/change-event-buffer.js b/dist/cjs/change-event-buffer.js deleted file mode 100644 index b304ea4e6ad..00000000000 --- a/dist/cjs/change-event-buffer.js +++ /dev/null @@ -1,103 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.ChangeEventBuffer = void 0; -exports.createChangeEventBuffer = createChangeEventBuffer; -var _operators = require("rxjs/operators"); -/** - * a buffer-cache which holds the last X changeEvents of the collection - */ -var ChangeEventBuffer = exports.ChangeEventBuffer = /*#__PURE__*/function () { - /** - * array with changeEvents - * starts with oldest known event, ends with newest - */ - - function ChangeEventBuffer(collection) { - this.subs = []; - this.limit = 100; - this.counter = 0; - this.eventCounterMap = new WeakMap(); - this.buffer = []; - this.collection = collection; - this.subs.push(this.collection.$.pipe((0, _operators.filter)(cE => !cE.isLocal)).subscribe(cE => this._handleChangeEvent(cE))); - } - var _proto = ChangeEventBuffer.prototype; - _proto._handleChangeEvent = function _handleChangeEvent(changeEvent) { - this.counter++; - this.buffer.push(changeEvent); - this.eventCounterMap.set(changeEvent, this.counter); - while (this.buffer.length > this.limit) { - this.buffer.shift(); - } - } - - /** - * gets the array-index for the given pointer - * @return arrayIndex which can be used to iterate from there. If null, pointer is out of lower bound - */; - _proto.getArrayIndexByPointer = function getArrayIndexByPointer(pointer) { - var oldestEvent = this.buffer[0]; - var oldestCounter = this.eventCounterMap.get(oldestEvent); - if (pointer < oldestCounter) return null; // out of bounds - - var rest = pointer - oldestCounter; - return rest; - } - - /** - * get all changeEvents which came in later than the pointer-event - * @return array with change-events. If null, pointer out of bounds - */; - _proto.getFrom = function getFrom(pointer) { - var ret = []; - var currentIndex = this.getArrayIndexByPointer(pointer); - if (currentIndex === null) - // out of bounds - return null; - while (true) { - var nextEvent = this.buffer[currentIndex]; - currentIndex++; - if (!nextEvent) { - return ret; - } else { - ret.push(nextEvent); - } - } - }; - _proto.runFrom = function runFrom(pointer, fn) { - var ret = this.getFrom(pointer); - if (ret === null) { - throw new Error('out of bounds'); - } else { - ret.forEach(cE => fn(cE)); - } - } - - /** - * no matter how many operations are done on one document, - * only the last operation has to be checked to calculate the new state - * this function reduces the events to the last ChangeEvent of each doc - */; - _proto.reduceByLastOfDoc = function reduceByLastOfDoc(changeEvents) { - return changeEvents.slice(0); - // TODO the old implementation was wrong - // because it did not correctly reassigned the previousData of the changeevents - // this should be added to the event-reduce library and not be done in RxDB - var docEventMap = {}; - changeEvents.forEach(changeEvent => { - docEventMap[changeEvent.documentId] = changeEvent; - }); - return Object.values(docEventMap); - }; - _proto.destroy = function destroy() { - this.subs.forEach(sub => sub.unsubscribe()); - }; - return ChangeEventBuffer; -}(); -function createChangeEventBuffer(collection) { - return new ChangeEventBuffer(collection); -} -//# sourceMappingURL=change-event-buffer.js.map \ No newline at end of file diff --git a/dist/cjs/change-event-buffer.js.map b/dist/cjs/change-event-buffer.js.map deleted file mode 100644 index 9bf8da5b148..00000000000 --- a/dist/cjs/change-event-buffer.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"change-event-buffer.js","names":["_operators","require","ChangeEventBuffer","exports","collection","subs","limit","counter","eventCounterMap","WeakMap","buffer","push","$","pipe","filter","cE","isLocal","subscribe","_handleChangeEvent","_proto","prototype","changeEvent","set","length","shift","getArrayIndexByPointer","pointer","oldestEvent","oldestCounter","get","rest","getFrom","ret","currentIndex","nextEvent","runFrom","fn","Error","forEach","reduceByLastOfDoc","changeEvents","slice","docEventMap","documentId","Object","values","destroy","sub","unsubscribe","createChangeEventBuffer"],"sources":["../../src/change-event-buffer.ts"],"sourcesContent":["/**\n * a buffer-cache which holds the last X changeEvents of the collection\n */\nimport {\n Subscription\n} from 'rxjs';\nimport { filter } from 'rxjs/operators';\nimport type {\n RxChangeEvent,\n RxCollection\n} from './types/index.d.ts';\n\nexport class ChangeEventBuffer {\n private subs: Subscription[] = [];\n public limit: number = 100;\n public counter: number = 0;\n private eventCounterMap: WeakMap<\n RxChangeEvent, number\n > = new WeakMap();\n\n /**\n * array with changeEvents\n * starts with oldest known event, ends with newest\n */\n public buffer: RxChangeEvent[] = [];\n\n constructor(\n public collection: RxCollection\n ) {\n this.subs.push(\n this.collection.$.pipe(\n filter(cE => !cE.isLocal)\n ).subscribe((cE: any) => this._handleChangeEvent(cE))\n );\n }\n\n _handleChangeEvent(changeEvent: RxChangeEvent) {\n this.counter++;\n this.buffer.push(changeEvent);\n this.eventCounterMap.set(changeEvent, this.counter);\n while (this.buffer.length > this.limit) {\n this.buffer.shift();\n }\n }\n\n /**\n * gets the array-index for the given pointer\n * @return arrayIndex which can be used to iterate from there. If null, pointer is out of lower bound\n */\n getArrayIndexByPointer(pointer: number): number | null {\n const oldestEvent = this.buffer[0];\n const oldestCounter = this.eventCounterMap.get(\n oldestEvent\n ) as number;\n\n if (pointer < oldestCounter)\n return null; // out of bounds\n\n const rest = pointer - oldestCounter;\n return rest;\n }\n\n /**\n * get all changeEvents which came in later than the pointer-event\n * @return array with change-events. If null, pointer out of bounds\n */\n getFrom(pointer: number): RxChangeEvent[] | null {\n const ret = [];\n let currentIndex = this.getArrayIndexByPointer(pointer);\n if (currentIndex === null) // out of bounds\n return null;\n\n while (true) {\n const nextEvent = this.buffer[currentIndex];\n currentIndex++;\n if (!nextEvent) {\n return ret;\n } else {\n ret.push(nextEvent);\n }\n }\n }\n\n runFrom(pointer: number, fn: Function) {\n const ret = this.getFrom(pointer);\n if (ret === null) {\n throw new Error('out of bounds');\n } else {\n ret.forEach(cE => fn(cE));\n }\n }\n\n /**\n * no matter how many operations are done on one document,\n * only the last operation has to be checked to calculate the new state\n * this function reduces the events to the last ChangeEvent of each doc\n */\n reduceByLastOfDoc(changeEvents: RxChangeEvent[]): RxChangeEvent[] {\n return changeEvents.slice(0);\n // TODO the old implementation was wrong\n // because it did not correctly reassigned the previousData of the changeevents\n // this should be added to the event-reduce library and not be done in RxDB\n const docEventMap: any = {};\n changeEvents.forEach(changeEvent => {\n docEventMap[changeEvent.documentId] = changeEvent;\n });\n return Object.values(docEventMap);\n }\n\n destroy() {\n this.subs.forEach(sub => sub.unsubscribe());\n }\n}\n\nexport function createChangeEventBuffer(\n collection: RxCollection\n) {\n return new ChangeEventBuffer(collection);\n}\n"],"mappings":";;;;;;;AAMA,IAAAA,UAAA,GAAAC,OAAA;AANA;AACA;AACA;AAFA,IAYaC,iBAAiB,GAAAC,OAAA,CAAAD,iBAAA;EAQ1B;AACJ;AACA;AACA;;EAGI,SAAAA,kBACWE,UAAwB,EACjC;IAAA,KAfMC,IAAI,GAAmB,EAAE;IAAA,KAC1BC,KAAK,GAAW,GAAG;IAAA,KACnBC,OAAO,GAAW,CAAC;IAAA,KAClBC,eAAe,GAEnB,IAAIC,OAAO,CAAC,CAAC;IAAA,KAMVC,MAAM,GAA+B,EAAE;IAAA,KAGnCN,UAAwB,GAAxBA,UAAwB;IAE/B,IAAI,CAACC,IAAI,CAACM,IAAI,CACV,IAAI,CAACP,UAAU,CAACQ,CAAC,CAACC,IAAI,CAClB,IAAAC,iBAAM,EAACC,EAAE,IAAI,CAACA,EAAE,CAACC,OAAO,CAC5B,CAAC,CAACC,SAAS,CAAEF,EAAO,IAAK,IAAI,CAACG,kBAAkB,CAACH,EAAE,CAAC,CACxD,CAAC;EACL;EAAC,IAAAI,MAAA,GAAAjB,iBAAA,CAAAkB,SAAA;EAAAD,MAAA,CAEDD,kBAAkB,GAAlB,SAAAA,mBAAmBG,WAAqC,EAAE;IACtD,IAAI,CAACd,OAAO,EAAE;IACd,IAAI,CAACG,MAAM,CAACC,IAAI,CAACU,WAAW,CAAC;IAC7B,IAAI,CAACb,eAAe,CAACc,GAAG,CAACD,WAAW,EAAE,IAAI,CAACd,OAAO,CAAC;IACnD,OAAO,IAAI,CAACG,MAAM,CAACa,MAAM,GAAG,IAAI,CAACjB,KAAK,EAAE;MACpC,IAAI,CAACI,MAAM,CAACc,KAAK,CAAC,CAAC;IACvB;EACJ;;EAEA;AACJ;AACA;AACA,KAHI;EAAAL,MAAA,CAIAM,sBAAsB,GAAtB,SAAAA,uBAAuBC,OAAe,EAAiB;IACnD,IAAMC,WAAW,GAAG,IAAI,CAACjB,MAAM,CAAC,CAAC,CAAC;IAClC,IAAMkB,aAAa,GAAG,IAAI,CAACpB,eAAe,CAACqB,GAAG,CAC1CF,WACJ,CAAW;IAEX,IAAID,OAAO,GAAGE,aAAa,EACvB,OAAO,IAAI,CAAC,CAAC;;IAEjB,IAAME,IAAI,GAAGJ,OAAO,GAAGE,aAAa;IACpC,OAAOE,IAAI;EACf;;EAEA;AACJ;AACA;AACA,KAHI;EAAAX,MAAA,CAIAY,OAAO,GAAP,SAAAA,QAAQL,OAAe,EAAqC;IACxD,IAAMM,GAAG,GAAG,EAAE;IACd,IAAIC,YAAY,GAAG,IAAI,CAACR,sBAAsB,CAACC,OAAO,CAAC;IACvD,IAAIO,YAAY,KAAK,IAAI;MAAE;MACvB,OAAO,IAAI;IAEf,OAAO,IAAI,EAAE;MACT,IAAMC,SAAS,GAAG,IAAI,CAACxB,MAAM,CAACuB,YAAY,CAAC;MAC3CA,YAAY,EAAE;MACd,IAAI,CAACC,SAAS,EAAE;QACZ,OAAOF,GAAG;MACd,CAAC,MAAM;QACHA,GAAG,CAACrB,IAAI,CAACuB,SAAS,CAAC;MACvB;IACJ;EACJ,CAAC;EAAAf,MAAA,CAEDgB,OAAO,GAAP,SAAAA,QAAQT,OAAe,EAAEU,EAAY,EAAE;IACnC,IAAMJ,GAAG,GAAG,IAAI,CAACD,OAAO,CAACL,OAAO,CAAC;IACjC,IAAIM,GAAG,KAAK,IAAI,EAAE;MACd,MAAM,IAAIK,KAAK,CAAC,eAAe,CAAC;IACpC,CAAC,MAAM;MACHL,GAAG,CAACM,OAAO,CAACvB,EAAE,IAAIqB,EAAE,CAACrB,EAAE,CAAC,CAAC;IAC7B;EACJ;;EAEA;AACJ;AACA;AACA;AACA,KAJI;EAAAI,MAAA,CAKAoB,iBAAiB,GAAjB,SAAAA,kBAAkBC,YAAwC,EAA8B;IACpF,OAAOA,YAAY,CAACC,KAAK,CAAC,CAAC,CAAC;IAC5B;IACA;IACA;IACA,IAAMC,WAAgB,GAAG,CAAC,CAAC;IAC3BF,YAAY,CAACF,OAAO,CAACjB,WAAW,IAAI;MAChCqB,WAAW,CAACrB,WAAW,CAACsB,UAAU,CAAC,GAAGtB,WAAW;IACrD,CAAC,CAAC;IACF,OAAOuB,MAAM,CAACC,MAAM,CAACH,WAAW,CAAC;EACrC,CAAC;EAAAvB,MAAA,CAED2B,OAAO,GAAP,SAAAA,QAAA,EAAU;IACN,IAAI,CAACzC,IAAI,CAACiC,OAAO,CAACS,GAAG,IAAIA,GAAG,CAACC,WAAW,CAAC,CAAC,CAAC;EAC/C,CAAC;EAAA,OAAA9C,iBAAA;AAAA;AAGE,SAAS+C,uBAAuBA,CACnC7C,UAAwC,EAC1C;EACE,OAAO,IAAIF,iBAAiB,CAAYE,UAAU,CAAC;AACvD","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/custom-index.js b/dist/cjs/custom-index.js deleted file mode 100644 index c20fd19c381..00000000000 --- a/dist/cjs/custom-index.js +++ /dev/null @@ -1,284 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.changeIndexableStringByOneQuantum = changeIndexableStringByOneQuantum; -exports.getIndexMeta = getIndexMeta; -exports.getIndexStringLength = getIndexStringLength; -exports.getIndexableStringMonad = getIndexableStringMonad; -exports.getNumberIndexString = getNumberIndexString; -exports.getPrimaryKeyFromIndexableString = getPrimaryKeyFromIndexableString; -exports.getStartIndexStringFromLowerBound = getStartIndexStringFromLowerBound; -exports.getStartIndexStringFromUpperBound = getStartIndexStringFromUpperBound; -exports.getStringLengthOfIndexNumber = getStringLengthOfIndexNumber; -var _rxSchemaHelper = require("./rx-schema-helper.js"); -var _index = require("./plugins/utils/index.js"); -var _queryPlanner = require("./query-planner.js"); -/** - * For some RxStorage implementations, - * we need to use our custom crafted indexes - * so we can easily iterate over them. And sort plain arrays of document data. - * - * We really often have to craft an index string for a given document. - * Performance of everything in this file is very important - * which is why the code sometimes looks strange. - * Run performance tests before and after you touch anything here! - */ - -/** - * Prepare all relevant information - * outside of the returned function - * from getIndexableStringMonad() - * to save performance when the returned - * function is called many times. - */ - -function getIndexMeta(schema, index) { - var fieldNameProperties = index.map(fieldName => { - var schemaPart = (0, _rxSchemaHelper.getSchemaByObjectPath)(schema, fieldName); - if (!schemaPart) { - throw new Error('not in schema: ' + fieldName); - } - var type = schemaPart.type; - var parsedLengths; - if (type === 'number' || type === 'integer') { - parsedLengths = getStringLengthOfIndexNumber(schemaPart); - } - var getValue = (0, _index.objectPathMonad)(fieldName); - var maxLength = schemaPart.maxLength ? schemaPart.maxLength : 0; - var getIndexStringPart; - if (type === 'string') { - getIndexStringPart = docData => { - var fieldValue = getValue(docData); - if (!fieldValue) { - fieldValue = ''; - } - return fieldValue.padEnd(maxLength, ' '); - }; - } else if (type === 'boolean') { - getIndexStringPart = docData => { - var fieldValue = getValue(docData); - return fieldValue ? '1' : '0'; - }; - } else { - // number - getIndexStringPart = docData => { - var fieldValue = getValue(docData); - return getNumberIndexString(parsedLengths, fieldValue); - }; - } - var ret = { - fieldName, - schemaPart, - parsedLengths, - getValue, - getIndexStringPart - }; - return ret; - }); - return fieldNameProperties; -} - -/** - * Crafts an indexable string that can be used - * to check if a document would be sorted below or above - * another documents, dependent on the index values. - * @monad for better performance - * - * IMPORTANT: Performance is really important here - * which is why we code so 'strange'. - * Always run performance tests when you want to - * change something in this method. - */ -function getIndexableStringMonad(schema, index) { - var fieldNameProperties = getIndexMeta(schema, index); - var fieldNamePropertiesAmount = fieldNameProperties.length; - var indexPartsFunctions = fieldNameProperties.map(r => r.getIndexStringPart); - - /** - * @hotPath Performance of this function is very critical! - */ - var ret = function (docData) { - var str = ''; - for (var i = 0; i < fieldNamePropertiesAmount; ++i) { - str += indexPartsFunctions[i](docData); - } - return str; - }; - return ret; -} -function getStringLengthOfIndexNumber(schemaPart) { - var minimum = Math.floor(schemaPart.minimum); - var maximum = Math.ceil(schemaPart.maximum); - var multipleOf = schemaPart.multipleOf; - var valueSpan = maximum - minimum; - var nonDecimals = valueSpan.toString().length; - var multipleOfParts = multipleOf.toString().split('.'); - var decimals = 0; - if (multipleOfParts.length > 1) { - decimals = multipleOfParts[1].length; - } - return { - minimum, - maximum, - nonDecimals, - decimals, - roundedMinimum: minimum - }; -} -function getIndexStringLength(schema, index) { - var fieldNameProperties = getIndexMeta(schema, index); - var length = 0; - fieldNameProperties.forEach(props => { - var schemaPart = props.schemaPart; - var type = schemaPart.type; - if (type === 'string') { - length += schemaPart.maxLength; - } else if (type === 'boolean') { - length += 1; - } else { - var parsedLengths = props.parsedLengths; - length = length + parsedLengths.nonDecimals + parsedLengths.decimals; - } - }); - return length; -} -function getPrimaryKeyFromIndexableString(indexableString, primaryKeyLength) { - var paddedPrimaryKey = indexableString.slice(primaryKeyLength * -1); - // we can safely trim here because the primary key is not allowed to start or end with a space char. - var primaryKey = paddedPrimaryKey.trim(); - return primaryKey; -} -function getNumberIndexString(parsedLengths, fieldValue) { - /** - * Ensure that the given value is in the boundaries - * of the schema, otherwise it would create a broken index string. - * This can happen for example if you have a minimum of 0 - * and run a query like - * selector { - * numField: { $gt: -1000 } - * } - */ - if (typeof fieldValue === 'undefined') { - fieldValue = 0; - } - if (fieldValue < parsedLengths.minimum) { - fieldValue = parsedLengths.minimum; - } - if (fieldValue > parsedLengths.maximum) { - fieldValue = parsedLengths.maximum; - } - var nonDecimalsValueAsString = (Math.floor(fieldValue) - parsedLengths.roundedMinimum).toString(); - var str = nonDecimalsValueAsString.padStart(parsedLengths.nonDecimals, '0'); - if (parsedLengths.decimals > 0) { - var splitByDecimalPoint = fieldValue.toString().split('.'); - var decimalValueAsString = splitByDecimalPoint.length > 1 ? splitByDecimalPoint[1] : '0'; - str += decimalValueAsString.padEnd(parsedLengths.decimals, '0'); - } - return str; -} -function getStartIndexStringFromLowerBound(schema, index, lowerBound) { - var str = ''; - index.forEach((fieldName, idx) => { - var schemaPart = (0, _rxSchemaHelper.getSchemaByObjectPath)(schema, fieldName); - var bound = lowerBound[idx]; - var type = schemaPart.type; - switch (type) { - case 'string': - var maxLength = (0, _index.ensureNotFalsy)(schemaPart.maxLength, 'maxLength not set'); - if (typeof bound === 'string') { - str += bound.padEnd(maxLength, ' '); - } else { - // str += ''.padStart(maxLength, inclusiveStart ? ' ' : INDEX_MAX); - str += ''.padEnd(maxLength, ' '); - } - break; - case 'boolean': - if (bound === null) { - str += '0'; - } else if (bound === _queryPlanner.INDEX_MIN) { - str += '0'; - } else if (bound === _queryPlanner.INDEX_MAX) { - str += '1'; - } else { - var boolToStr = bound ? '1' : '0'; - str += boolToStr; - } - break; - case 'number': - case 'integer': - var parsedLengths = getStringLengthOfIndexNumber(schemaPart); - if (bound === null || bound === _queryPlanner.INDEX_MIN) { - var fillChar = '0'; - str += fillChar.repeat(parsedLengths.nonDecimals + parsedLengths.decimals); - } else if (bound === _queryPlanner.INDEX_MAX) { - str += getNumberIndexString(parsedLengths, parsedLengths.maximum); - } else { - var add = getNumberIndexString(parsedLengths, bound); - str += add; - } - break; - default: - throw new Error('unknown index type ' + type); - } - }); - return str; -} -function getStartIndexStringFromUpperBound(schema, index, upperBound) { - var str = ''; - index.forEach((fieldName, idx) => { - var schemaPart = (0, _rxSchemaHelper.getSchemaByObjectPath)(schema, fieldName); - var bound = upperBound[idx]; - var type = schemaPart.type; - switch (type) { - case 'string': - var maxLength = (0, _index.ensureNotFalsy)(schemaPart.maxLength, 'maxLength not set'); - if (typeof bound === 'string' && bound !== _queryPlanner.INDEX_MAX) { - str += bound.padEnd(maxLength, ' '); - } else if (bound === _queryPlanner.INDEX_MIN) { - str += ''.padEnd(maxLength, ' '); - } else { - str += ''.padEnd(maxLength, _queryPlanner.INDEX_MAX); - } - break; - case 'boolean': - if (bound === null) { - str += '1'; - } else { - var boolToStr = bound ? '1' : '0'; - str += boolToStr; - } - break; - case 'number': - case 'integer': - var parsedLengths = getStringLengthOfIndexNumber(schemaPart); - if (bound === null || bound === _queryPlanner.INDEX_MAX) { - var fillChar = '9'; - str += fillChar.repeat(parsedLengths.nonDecimals + parsedLengths.decimals); - } else if (bound === _queryPlanner.INDEX_MIN) { - var _fillChar = '0'; - str += _fillChar.repeat(parsedLengths.nonDecimals + parsedLengths.decimals); - } else { - str += getNumberIndexString(parsedLengths, bound); - } - break; - default: - throw new Error('unknown index type ' + type); - } - }); - return str; -} - -/** - * Used in storages where it is not possible - * to define inclusiveEnd/inclusiveStart - */ -function changeIndexableStringByOneQuantum(str, direction) { - var lastChar = str.slice(-1); - var charCode = lastChar.charCodeAt(0); - charCode = charCode + direction; - var withoutLastChar = str.slice(0, -1); - return withoutLastChar + String.fromCharCode(charCode); -} -//# sourceMappingURL=custom-index.js.map \ No newline at end of file diff --git a/dist/cjs/custom-index.js.map b/dist/cjs/custom-index.js.map deleted file mode 100644 index bd2200b3654..00000000000 --- a/dist/cjs/custom-index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"custom-index.js","names":["_rxSchemaHelper","require","_index","_queryPlanner","getIndexMeta","schema","index","fieldNameProperties","map","fieldName","schemaPart","getSchemaByObjectPath","Error","type","parsedLengths","getStringLengthOfIndexNumber","getValue","objectPathMonad","maxLength","getIndexStringPart","docData","fieldValue","padEnd","getNumberIndexString","ret","getIndexableStringMonad","fieldNamePropertiesAmount","length","indexPartsFunctions","r","str","i","minimum","Math","floor","maximum","ceil","multipleOf","valueSpan","nonDecimals","toString","multipleOfParts","split","decimals","roundedMinimum","getIndexStringLength","forEach","props","getPrimaryKeyFromIndexableString","indexableString","primaryKeyLength","paddedPrimaryKey","slice","primaryKey","trim","nonDecimalsValueAsString","padStart","splitByDecimalPoint","decimalValueAsString","getStartIndexStringFromLowerBound","lowerBound","idx","bound","ensureNotFalsy","INDEX_MIN","INDEX_MAX","boolToStr","fillChar","repeat","add","getStartIndexStringFromUpperBound","upperBound","changeIndexableStringByOneQuantum","direction","lastChar","charCode","charCodeAt","withoutLastChar","String","fromCharCode"],"sources":["../../src/custom-index.ts"],"sourcesContent":["/**\n * For some RxStorage implementations,\n * we need to use our custom crafted indexes\n * so we can easily iterate over them. And sort plain arrays of document data.\n *\n * We really often have to craft an index string for a given document.\n * Performance of everything in this file is very important\n * which is why the code sometimes looks strange.\n * Run performance tests before and after you touch anything here!\n */\n\nimport {\n getSchemaByObjectPath\n} from './rx-schema-helper.ts';\nimport type {\n JsonSchema,\n RxDocumentData,\n RxJsonSchema\n} from './types/index.ts';\nimport {\n ensureNotFalsy,\n objectPathMonad,\n ObjectPathMonadFunction\n} from './plugins/utils/index.ts';\nimport {\n INDEX_MAX,\n INDEX_MIN\n} from './query-planner.ts';\n\n\n/**\n * Prepare all relevant information\n * outside of the returned function\n * from getIndexableStringMonad()\n * to save performance when the returned\n * function is called many times.\n */\ntype IndexMetaField = {\n fieldName: string;\n schemaPart: JsonSchema;\n /*\n * Only in number fields.\n */\n parsedLengths?: ParsedLengths;\n getValue: ObjectPathMonadFunction;\n getIndexStringPart: (docData: RxDocumentData) => string;\n};\n\nexport function getIndexMeta(\n schema: RxJsonSchema>,\n index: string[]\n): IndexMetaField[] {\n const fieldNameProperties: IndexMetaField[] = index.map(fieldName => {\n const schemaPart = getSchemaByObjectPath(\n schema,\n fieldName\n );\n if (!schemaPart) {\n throw new Error('not in schema: ' + fieldName);\n }\n const type = schemaPart.type;\n let parsedLengths: ParsedLengths | undefined;\n if (type === 'number' || type === 'integer') {\n parsedLengths = getStringLengthOfIndexNumber(\n schemaPart\n );\n }\n\n const getValue = objectPathMonad(fieldName);\n const maxLength = schemaPart.maxLength ? schemaPart.maxLength : 0;\n\n let getIndexStringPart: (docData: RxDocumentData) => string;\n if (type === 'string') {\n getIndexStringPart = docData => {\n let fieldValue = getValue(docData);\n if (!fieldValue) {\n fieldValue = '';\n }\n return fieldValue.padEnd(maxLength, ' ');\n };\n } else if (type === 'boolean') {\n getIndexStringPart = docData => {\n const fieldValue = getValue(docData);\n return fieldValue ? '1' : '0';\n };\n } else { // number\n getIndexStringPart = docData => {\n const fieldValue = getValue(docData);\n return getNumberIndexString(\n parsedLengths as any,\n fieldValue\n );\n };\n }\n\n const ret: IndexMetaField = {\n fieldName,\n schemaPart,\n parsedLengths,\n getValue,\n getIndexStringPart\n };\n return ret;\n });\n return fieldNameProperties;\n}\n\n\n/**\n * Crafts an indexable string that can be used\n * to check if a document would be sorted below or above\n * another documents, dependent on the index values.\n * @monad for better performance\n *\n * IMPORTANT: Performance is really important here\n * which is why we code so 'strange'.\n * Always run performance tests when you want to\n * change something in this method.\n */\nexport function getIndexableStringMonad(\n schema: RxJsonSchema>,\n index: string[]\n): (docData: RxDocumentData) => string {\n const fieldNameProperties = getIndexMeta(schema, index);\n const fieldNamePropertiesAmount = fieldNameProperties.length;\n const indexPartsFunctions = fieldNameProperties.map(r => r.getIndexStringPart);\n\n\n /**\n * @hotPath Performance of this function is very critical!\n */\n const ret = function (docData: RxDocumentData): string {\n let str = '';\n for (let i = 0; i < fieldNamePropertiesAmount; ++i) {\n str += indexPartsFunctions[i](docData);\n }\n return str;\n };\n return ret;\n}\n\n\ndeclare type ParsedLengths = {\n minimum: number;\n maximum: number;\n nonDecimals: number;\n decimals: number;\n roundedMinimum: number;\n};\nexport function getStringLengthOfIndexNumber(\n schemaPart: JsonSchema\n): ParsedLengths {\n const minimum = Math.floor(schemaPart.minimum as number);\n const maximum = Math.ceil(schemaPart.maximum as number);\n const multipleOf: number = schemaPart.multipleOf as number;\n\n const valueSpan = maximum - minimum;\n const nonDecimals = valueSpan.toString().length;\n\n const multipleOfParts = multipleOf.toString().split('.');\n let decimals = 0;\n if (multipleOfParts.length > 1) {\n decimals = multipleOfParts[1].length;\n }\n return {\n minimum,\n maximum,\n nonDecimals,\n decimals,\n roundedMinimum: minimum\n };\n}\n\nexport function getIndexStringLength(\n schema: RxJsonSchema>,\n index: string[]\n): number {\n const fieldNameProperties = getIndexMeta(schema, index);\n let length = 0;\n fieldNameProperties.forEach(props => {\n const schemaPart = props.schemaPart;\n const type = schemaPart.type;\n\n if (type === 'string') {\n length += schemaPart.maxLength as number;\n } else if (type === 'boolean') {\n length += 1;\n } else {\n const parsedLengths = props.parsedLengths as ParsedLengths;\n length = length + parsedLengths.nonDecimals + parsedLengths.decimals;\n }\n\n });\n return length;\n}\n\n\nexport function getPrimaryKeyFromIndexableString(\n indexableString: string,\n primaryKeyLength: number\n): string {\n const paddedPrimaryKey = indexableString.slice(primaryKeyLength * -1);\n // we can safely trim here because the primary key is not allowed to start or end with a space char.\n const primaryKey = paddedPrimaryKey.trim();\n return primaryKey;\n}\n\n\nexport function getNumberIndexString(\n parsedLengths: ParsedLengths,\n fieldValue: number\n): string {\n /**\n * Ensure that the given value is in the boundaries\n * of the schema, otherwise it would create a broken index string.\n * This can happen for example if you have a minimum of 0\n * and run a query like\n * selector {\n * numField: { $gt: -1000 }\n * }\n */\n if (typeof fieldValue === 'undefined') {\n fieldValue = 0;\n }\n if (fieldValue < parsedLengths.minimum) {\n fieldValue = parsedLengths.minimum;\n }\n if (fieldValue > parsedLengths.maximum) {\n fieldValue = parsedLengths.maximum;\n }\n\n const nonDecimalsValueAsString = (Math.floor(fieldValue) - parsedLengths.roundedMinimum).toString();\n let str = nonDecimalsValueAsString.padStart(parsedLengths.nonDecimals, '0');\n\n if (parsedLengths.decimals > 0) {\n const splitByDecimalPoint = fieldValue.toString().split('.');\n const decimalValueAsString = splitByDecimalPoint.length > 1 ? splitByDecimalPoint[1] : '0';\n str += decimalValueAsString.padEnd(parsedLengths.decimals, '0');\n }\n return str;\n}\n\nexport function getStartIndexStringFromLowerBound(\n schema: RxJsonSchema,\n index: string[],\n lowerBound: (string | boolean | number | null | undefined)[]\n): string {\n let str = '';\n index.forEach((fieldName, idx) => {\n const schemaPart = getSchemaByObjectPath(\n schema,\n fieldName\n );\n const bound = lowerBound[idx];\n const type = schemaPart.type;\n\n switch (type) {\n case 'string':\n const maxLength = ensureNotFalsy(schemaPart.maxLength, 'maxLength not set');\n if (typeof bound === 'string') {\n str += (bound as string).padEnd(maxLength, ' ');\n } else {\n // str += ''.padStart(maxLength, inclusiveStart ? ' ' : INDEX_MAX);\n str += ''.padEnd(maxLength, ' ');\n }\n break;\n case 'boolean':\n if (bound === null) {\n str += '0';\n } else if (bound === INDEX_MIN) {\n str += '0';\n } else if (bound === INDEX_MAX) {\n str += '1';\n } else {\n const boolToStr = bound ? '1' : '0';\n str += boolToStr;\n }\n break;\n case 'number':\n case 'integer':\n const parsedLengths = getStringLengthOfIndexNumber(\n schemaPart\n );\n if (bound === null || bound === INDEX_MIN) {\n const fillChar = '0';\n str += fillChar.repeat(parsedLengths.nonDecimals + parsedLengths.decimals);\n } else if (bound === INDEX_MAX) {\n str += getNumberIndexString(\n parsedLengths,\n parsedLengths.maximum\n );\n } else {\n const add = getNumberIndexString(\n parsedLengths,\n bound as number\n );\n str += add;\n }\n break;\n default:\n throw new Error('unknown index type ' + type);\n }\n });\n return str;\n}\n\n\nexport function getStartIndexStringFromUpperBound(\n schema: RxJsonSchema,\n index: string[],\n upperBound: (string | boolean | number | null | undefined)[]\n): string {\n let str = '';\n index.forEach((fieldName, idx) => {\n const schemaPart = getSchemaByObjectPath(\n schema,\n fieldName\n );\n const bound = upperBound[idx];\n const type = schemaPart.type;\n\n switch (type) {\n case 'string':\n const maxLength = ensureNotFalsy(schemaPart.maxLength, 'maxLength not set');\n if (typeof bound === 'string' && bound !== INDEX_MAX) {\n str += (bound as string).padEnd(maxLength, ' ');\n } else if (bound === INDEX_MIN) {\n str += ''.padEnd(maxLength, ' ');\n } else {\n str += ''.padEnd(maxLength, INDEX_MAX);\n }\n break;\n case 'boolean':\n if (bound === null) {\n str += '1';\n } else {\n const boolToStr = bound ? '1' : '0';\n str += boolToStr;\n }\n break;\n case 'number':\n case 'integer':\n const parsedLengths = getStringLengthOfIndexNumber(\n schemaPart\n );\n if (bound === null || bound === INDEX_MAX) {\n const fillChar = '9';\n str += fillChar.repeat(parsedLengths.nonDecimals + parsedLengths.decimals);\n } else if (bound === INDEX_MIN) {\n const fillChar = '0';\n str += fillChar.repeat(parsedLengths.nonDecimals + parsedLengths.decimals);\n } else {\n str += getNumberIndexString(\n parsedLengths,\n bound as number\n );\n }\n break;\n default:\n throw new Error('unknown index type ' + type);\n }\n });\n return str;\n}\n\n/**\n * Used in storages where it is not possible\n * to define inclusiveEnd/inclusiveStart\n */\nexport function changeIndexableStringByOneQuantum(str: string, direction: 1 | -1): string {\n const lastChar = str.slice(-1);\n let charCode = lastChar.charCodeAt(0);\n charCode = charCode + direction;\n const withoutLastChar = str.slice(0, -1);\n return withoutLastChar + String.fromCharCode(charCode);\n}\n"],"mappings":";;;;;;;;;;;;;;AAWA,IAAAA,eAAA,GAAAC,OAAA;AAQA,IAAAC,MAAA,GAAAD,OAAA;AAKA,IAAAE,aAAA,GAAAF,OAAA;AAxBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAqBA;AACA;AACA;AACA;AACA;AACA;AACA;;AAYO,SAASG,YAAYA,CACxBC,MAA+C,EAC/CC,KAAe,EACY;EAC3B,IAAMC,mBAAgD,GAAGD,KAAK,CAACE,GAAG,CAACC,SAAS,IAAI;IAC5E,IAAMC,UAAU,GAAG,IAAAC,qCAAqB,EACpCN,MAAM,EACNI,SACJ,CAAC;IACD,IAAI,CAACC,UAAU,EAAE;MACb,MAAM,IAAIE,KAAK,CAAC,iBAAiB,GAAGH,SAAS,CAAC;IAClD;IACA,IAAMI,IAAI,GAAGH,UAAU,CAACG,IAAI;IAC5B,IAAIC,aAAwC;IAC5C,IAAID,IAAI,KAAK,QAAQ,IAAIA,IAAI,KAAK,SAAS,EAAE;MACzCC,aAAa,GAAGC,4BAA4B,CACxCL,UACJ,CAAC;IACL;IAEA,IAAMM,QAAQ,GAAG,IAAAC,sBAAe,EAACR,SAAS,CAAC;IAC3C,IAAMS,SAAS,GAAGR,UAAU,CAACQ,SAAS,GAAGR,UAAU,CAACQ,SAAS,GAAG,CAAC;IAEjE,IAAIC,kBAAkE;IACtE,IAAIN,IAAI,KAAK,QAAQ,EAAE;MACnBM,kBAAkB,GAAGC,OAAO,IAAI;QAC5B,IAAIC,UAAU,GAAGL,QAAQ,CAACI,OAAO,CAAC;QAClC,IAAI,CAACC,UAAU,EAAE;UACbA,UAAU,GAAG,EAAE;QACnB;QACA,OAAOA,UAAU,CAACC,MAAM,CAACJ,SAAS,EAAE,GAAG,CAAC;MAC5C,CAAC;IACL,CAAC,MAAM,IAAIL,IAAI,KAAK,SAAS,EAAE;MAC3BM,kBAAkB,GAAGC,OAAO,IAAI;QAC5B,IAAMC,UAAU,GAAGL,QAAQ,CAACI,OAAO,CAAC;QACpC,OAAOC,UAAU,GAAG,GAAG,GAAG,GAAG;MACjC,CAAC;IACL,CAAC,MAAM;MAAE;MACLF,kBAAkB,GAAGC,OAAO,IAAI;QAC5B,IAAMC,UAAU,GAAGL,QAAQ,CAACI,OAAO,CAAC;QACpC,OAAOG,oBAAoB,CACvBT,aAAa,EACbO,UACJ,CAAC;MACL,CAAC;IACL;IAEA,IAAMG,GAA8B,GAAG;MACnCf,SAAS;MACTC,UAAU;MACVI,aAAa;MACbE,QAAQ;MACRG;IACJ,CAAC;IACD,OAAOK,GAAG;EACd,CAAC,CAAC;EACF,OAAOjB,mBAAmB;AAC9B;;AAGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACO,SAASkB,uBAAuBA,CACnCpB,MAA+C,EAC/CC,KAAe,EAC+B;EAC9C,IAAMC,mBAAmB,GAAGH,YAAY,CAACC,MAAM,EAAEC,KAAK,CAAC;EACvD,IAAMoB,yBAAyB,GAAGnB,mBAAmB,CAACoB,MAAM;EAC5D,IAAMC,mBAAmB,GAAGrB,mBAAmB,CAACC,GAAG,CAACqB,CAAC,IAAIA,CAAC,CAACV,kBAAkB,CAAC;;EAG9E;AACJ;AACA;EACI,IAAMK,GAAG,GAAG,SAAAA,CAAUJ,OAAkC,EAAU;IAC9D,IAAIU,GAAG,GAAG,EAAE;IACZ,KAAK,IAAIC,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGL,yBAAyB,EAAE,EAAEK,CAAC,EAAE;MAChDD,GAAG,IAAIF,mBAAmB,CAACG,CAAC,CAAC,CAACX,OAAO,CAAC;IAC1C;IACA,OAAOU,GAAG;EACd,CAAC;EACD,OAAON,GAAG;AACd;AAUO,SAAST,4BAA4BA,CACxCL,UAAsB,EACT;EACb,IAAMsB,OAAO,GAAGC,IAAI,CAACC,KAAK,CAACxB,UAAU,CAACsB,OAAiB,CAAC;EACxD,IAAMG,OAAO,GAAGF,IAAI,CAACG,IAAI,CAAC1B,UAAU,CAACyB,OAAiB,CAAC;EACvD,IAAME,UAAkB,GAAG3B,UAAU,CAAC2B,UAAoB;EAE1D,IAAMC,SAAS,GAAGH,OAAO,GAAGH,OAAO;EACnC,IAAMO,WAAW,GAAGD,SAAS,CAACE,QAAQ,CAAC,CAAC,CAACb,MAAM;EAE/C,IAAMc,eAAe,GAAGJ,UAAU,CAACG,QAAQ,CAAC,CAAC,CAACE,KAAK,CAAC,GAAG,CAAC;EACxD,IAAIC,QAAQ,GAAG,CAAC;EAChB,IAAIF,eAAe,CAACd,MAAM,GAAG,CAAC,EAAE;IAC5BgB,QAAQ,GAAGF,eAAe,CAAC,CAAC,CAAC,CAACd,MAAM;EACxC;EACA,OAAO;IACHK,OAAO;IACPG,OAAO;IACPI,WAAW;IACXI,QAAQ;IACRC,cAAc,EAAEZ;EACpB,CAAC;AACL;AAEO,SAASa,oBAAoBA,CAChCxC,MAA+C,EAC/CC,KAAe,EACT;EACN,IAAMC,mBAAmB,GAAGH,YAAY,CAACC,MAAM,EAAEC,KAAK,CAAC;EACvD,IAAIqB,MAAM,GAAG,CAAC;EACdpB,mBAAmB,CAACuC,OAAO,CAACC,KAAK,IAAI;IACjC,IAAMrC,UAAU,GAAGqC,KAAK,CAACrC,UAAU;IACnC,IAAMG,IAAI,GAAGH,UAAU,CAACG,IAAI;IAE5B,IAAIA,IAAI,KAAK,QAAQ,EAAE;MACnBc,MAAM,IAAIjB,UAAU,CAACQ,SAAmB;IAC5C,CAAC,MAAM,IAAIL,IAAI,KAAK,SAAS,EAAE;MAC3Bc,MAAM,IAAI,CAAC;IACf,CAAC,MAAM;MACH,IAAMb,aAAa,GAAGiC,KAAK,CAACjC,aAA8B;MAC1Da,MAAM,GAAGA,MAAM,GAAGb,aAAa,CAACyB,WAAW,GAAGzB,aAAa,CAAC6B,QAAQ;IACxE;EAEJ,CAAC,CAAC;EACF,OAAOhB,MAAM;AACjB;AAGO,SAASqB,gCAAgCA,CAC5CC,eAAuB,EACvBC,gBAAwB,EAClB;EACN,IAAMC,gBAAgB,GAAGF,eAAe,CAACG,KAAK,CAACF,gBAAgB,GAAG,CAAC,CAAC,CAAC;EACrE;EACA,IAAMG,UAAU,GAAGF,gBAAgB,CAACG,IAAI,CAAC,CAAC;EAC1C,OAAOD,UAAU;AACrB;AAGO,SAAS9B,oBAAoBA,CAChCT,aAA4B,EAC5BO,UAAkB,EACZ;EACN;AACJ;AACA;AACA;AACA;AACA;AACA;AACA;AACA;EACI,IAAI,OAAOA,UAAU,KAAK,WAAW,EAAE;IACnCA,UAAU,GAAG,CAAC;EAClB;EACA,IAAIA,UAAU,GAAGP,aAAa,CAACkB,OAAO,EAAE;IACpCX,UAAU,GAAGP,aAAa,CAACkB,OAAO;EACtC;EACA,IAAIX,UAAU,GAAGP,aAAa,CAACqB,OAAO,EAAE;IACpCd,UAAU,GAAGP,aAAa,CAACqB,OAAO;EACtC;EAEA,IAAMoB,wBAAwB,GAAG,CAACtB,IAAI,CAACC,KAAK,CAACb,UAAU,CAAC,GAAGP,aAAa,CAAC8B,cAAc,EAAEJ,QAAQ,CAAC,CAAC;EACnG,IAAIV,GAAG,GAAGyB,wBAAwB,CAACC,QAAQ,CAAC1C,aAAa,CAACyB,WAAW,EAAE,GAAG,CAAC;EAE3E,IAAIzB,aAAa,CAAC6B,QAAQ,GAAG,CAAC,EAAE;IAC5B,IAAMc,mBAAmB,GAAGpC,UAAU,CAACmB,QAAQ,CAAC,CAAC,CAACE,KAAK,CAAC,GAAG,CAAC;IAC5D,IAAMgB,oBAAoB,GAAGD,mBAAmB,CAAC9B,MAAM,GAAG,CAAC,GAAG8B,mBAAmB,CAAC,CAAC,CAAC,GAAG,GAAG;IAC1F3B,GAAG,IAAI4B,oBAAoB,CAACpC,MAAM,CAACR,aAAa,CAAC6B,QAAQ,EAAE,GAAG,CAAC;EACnE;EACA,OAAOb,GAAG;AACd;AAEO,SAAS6B,iCAAiCA,CAC7CtD,MAAyB,EACzBC,KAAe,EACfsD,UAA4D,EACtD;EACN,IAAI9B,GAAG,GAAG,EAAE;EACZxB,KAAK,CAACwC,OAAO,CAAC,CAACrC,SAAS,EAAEoD,GAAG,KAAK;IAC9B,IAAMnD,UAAU,GAAG,IAAAC,qCAAqB,EACpCN,MAAM,EACNI,SACJ,CAAC;IACD,IAAMqD,KAAK,GAAGF,UAAU,CAACC,GAAG,CAAC;IAC7B,IAAMhD,IAAI,GAAGH,UAAU,CAACG,IAAI;IAE5B,QAAQA,IAAI;MACR,KAAK,QAAQ;QACT,IAAMK,SAAS,GAAG,IAAA6C,qBAAc,EAACrD,UAAU,CAACQ,SAAS,EAAE,mBAAmB,CAAC;QAC3E,IAAI,OAAO4C,KAAK,KAAK,QAAQ,EAAE;UAC3BhC,GAAG,IAAKgC,KAAK,CAAYxC,MAAM,CAACJ,SAAS,EAAE,GAAG,CAAC;QACnD,CAAC,MAAM;UACH;UACAY,GAAG,IAAI,EAAE,CAACR,MAAM,CAACJ,SAAS,EAAE,GAAG,CAAC;QACpC;QACA;MACJ,KAAK,SAAS;QACV,IAAI4C,KAAK,KAAK,IAAI,EAAE;UAChBhC,GAAG,IAAI,GAAG;QACd,CAAC,MAAM,IAAIgC,KAAK,KAAKE,uBAAS,EAAE;UAC5BlC,GAAG,IAAI,GAAG;QACd,CAAC,MAAM,IAAIgC,KAAK,KAAKG,uBAAS,EAAE;UAC5BnC,GAAG,IAAI,GAAG;QACd,CAAC,MAAM;UACH,IAAMoC,SAAS,GAAGJ,KAAK,GAAG,GAAG,GAAG,GAAG;UACnChC,GAAG,IAAIoC,SAAS;QACpB;QACA;MACJ,KAAK,QAAQ;MACb,KAAK,SAAS;QACV,IAAMpD,aAAa,GAAGC,4BAA4B,CAC9CL,UACJ,CAAC;QACD,IAAIoD,KAAK,KAAK,IAAI,IAAIA,KAAK,KAAKE,uBAAS,EAAE;UACvC,IAAMG,QAAQ,GAAG,GAAG;UACpBrC,GAAG,IAAIqC,QAAQ,CAACC,MAAM,CAACtD,aAAa,CAACyB,WAAW,GAAGzB,aAAa,CAAC6B,QAAQ,CAAC;QAC9E,CAAC,MAAM,IAAImB,KAAK,KAAKG,uBAAS,EAAE;UAC5BnC,GAAG,IAAIP,oBAAoB,CACvBT,aAAa,EACbA,aAAa,CAACqB,OAClB,CAAC;QACL,CAAC,MAAM;UACH,IAAMkC,GAAG,GAAG9C,oBAAoB,CAC5BT,aAAa,EACbgD,KACJ,CAAC;UACDhC,GAAG,IAAIuC,GAAG;QACd;QACA;MACJ;QACI,MAAM,IAAIzD,KAAK,CAAC,qBAAqB,GAAGC,IAAI,CAAC;IACrD;EACJ,CAAC,CAAC;EACF,OAAOiB,GAAG;AACd;AAGO,SAASwC,iCAAiCA,CAC7CjE,MAAyB,EACzBC,KAAe,EACfiE,UAA4D,EACtD;EACN,IAAIzC,GAAG,GAAG,EAAE;EACZxB,KAAK,CAACwC,OAAO,CAAC,CAACrC,SAAS,EAAEoD,GAAG,KAAK;IAC9B,IAAMnD,UAAU,GAAG,IAAAC,qCAAqB,EACpCN,MAAM,EACNI,SACJ,CAAC;IACD,IAAMqD,KAAK,GAAGS,UAAU,CAACV,GAAG,CAAC;IAC7B,IAAMhD,IAAI,GAAGH,UAAU,CAACG,IAAI;IAE5B,QAAQA,IAAI;MACR,KAAK,QAAQ;QACT,IAAMK,SAAS,GAAG,IAAA6C,qBAAc,EAACrD,UAAU,CAACQ,SAAS,EAAE,mBAAmB,CAAC;QAC3E,IAAI,OAAO4C,KAAK,KAAK,QAAQ,IAAIA,KAAK,KAAKG,uBAAS,EAAE;UAClDnC,GAAG,IAAKgC,KAAK,CAAYxC,MAAM,CAACJ,SAAS,EAAE,GAAG,CAAC;QACnD,CAAC,MAAM,IAAI4C,KAAK,KAAKE,uBAAS,EAAE;UAC5BlC,GAAG,IAAI,EAAE,CAACR,MAAM,CAACJ,SAAS,EAAE,GAAG,CAAC;QACpC,CAAC,MAAM;UACHY,GAAG,IAAI,EAAE,CAACR,MAAM,CAACJ,SAAS,EAAE+C,uBAAS,CAAC;QAC1C;QACA;MACJ,KAAK,SAAS;QACV,IAAIH,KAAK,KAAK,IAAI,EAAE;UAChBhC,GAAG,IAAI,GAAG;QACd,CAAC,MAAM;UACH,IAAMoC,SAAS,GAAGJ,KAAK,GAAG,GAAG,GAAG,GAAG;UACnChC,GAAG,IAAIoC,SAAS;QACpB;QACA;MACJ,KAAK,QAAQ;MACb,KAAK,SAAS;QACV,IAAMpD,aAAa,GAAGC,4BAA4B,CAC9CL,UACJ,CAAC;QACD,IAAIoD,KAAK,KAAK,IAAI,IAAIA,KAAK,KAAKG,uBAAS,EAAE;UACvC,IAAME,QAAQ,GAAG,GAAG;UACpBrC,GAAG,IAAIqC,QAAQ,CAACC,MAAM,CAACtD,aAAa,CAACyB,WAAW,GAAGzB,aAAa,CAAC6B,QAAQ,CAAC;QAC9E,CAAC,MAAM,IAAImB,KAAK,KAAKE,uBAAS,EAAE;UAC5B,IAAMG,SAAQ,GAAG,GAAG;UACpBrC,GAAG,IAAIqC,SAAQ,CAACC,MAAM,CAACtD,aAAa,CAACyB,WAAW,GAAGzB,aAAa,CAAC6B,QAAQ,CAAC;QAC9E,CAAC,MAAM;UACHb,GAAG,IAAIP,oBAAoB,CACvBT,aAAa,EACbgD,KACJ,CAAC;QACL;QACA;MACJ;QACI,MAAM,IAAIlD,KAAK,CAAC,qBAAqB,GAAGC,IAAI,CAAC;IACrD;EACJ,CAAC,CAAC;EACF,OAAOiB,GAAG;AACd;;AAEA;AACA;AACA;AACA;AACO,SAAS0C,iCAAiCA,CAAC1C,GAAW,EAAE2C,SAAiB,EAAU;EACtF,IAAMC,QAAQ,GAAG5C,GAAG,CAACsB,KAAK,CAAC,CAAC,CAAC,CAAC;EAC9B,IAAIuB,QAAQ,GAAGD,QAAQ,CAACE,UAAU,CAAC,CAAC,CAAC;EACrCD,QAAQ,GAAGA,QAAQ,GAAGF,SAAS;EAC/B,IAAMI,eAAe,GAAG/C,GAAG,CAACsB,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;EACxC,OAAOyB,eAAe,GAAGC,MAAM,CAACC,YAAY,CAACJ,QAAQ,CAAC;AAC1D","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/doc-cache.js b/dist/cjs/doc-cache.js deleted file mode 100644 index b1888f5cd49..00000000000 --- a/dist/cjs/doc-cache.js +++ /dev/null @@ -1,191 +0,0 @@ -"use strict"; - -var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault"); -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.DocumentCache = void 0; -exports.mapDocumentsDataToCacheDocs = mapDocumentsDataToCacheDocs; -var _createClass2 = _interopRequireDefault(require("@babel/runtime/helpers/createClass")); -var _index2 = require("./plugins/utils/index.js"); -var _overwritable = require("./overwritable.js"); -var _rxChangeEvent = require("./rx-change-event.js"); -/** - * Because we have to create many cache items, - * we use an array instead of an object with properties - * for better performance and less memory usage. - * @link https://stackoverflow.com/questions/17295056/array-vs-object-efficiency-in-javascript - */ -/** - * @link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/FinalizationRegistry - */ -/** - * The DocumentCache stores RxDocument objects - * by their primary key and revision. - * This is useful on client side applications where - * it is not known how much memory can be used, so - * we de-duplicate RxDocument states to save memory. - * To not fill up the memory with old document states, the DocumentCache - * only contains weak references to the RxDocuments themself. - * @link https://caniuse.com/?search=weakref - */ -var DocumentCache = exports.DocumentCache = /*#__PURE__*/function () { - /** - * Some JavaScript runtimes like QuickJS, - * so not have a FinalizationRegistry or WeakRef. - * Therefore we need a workaround which might waste a lot of memory, - * but at least works. - */ - - function DocumentCache(primaryPath, changes$, - /** - * A method that can create a RxDocument by the given document data. - */ - documentCreator) { - this.cacheItemByDocId = new Map(); - this.registry = typeof FinalizationRegistry === 'function' ? new FinalizationRegistry(docMeta => { - var docId = docMeta.docId; - var cacheItem = this.cacheItemByDocId.get(docId); - if (cacheItem) { - cacheItem[0].delete(docMeta.revisionHeight); - if (cacheItem[0].size === 0) { - /** - * No state of the document is cached anymore, - * so we can clean up. - */ - this.cacheItemByDocId.delete(docId); - } - } - }) : undefined; - this.primaryPath = primaryPath; - this.changes$ = changes$; - this.documentCreator = documentCreator; - changes$.subscribe(changeEvent => { - var docId = changeEvent.documentId; - var cacheItem = this.cacheItemByDocId.get(docId); - if (cacheItem) { - var documentData = (0, _rxChangeEvent.getDocumentDataOfRxChangeEvent)(changeEvent); - cacheItem[1] = documentData; - } - }); - } - - /** - * Get the RxDocument from the cache - * and create a new one if not exits before. - * @overwrites itself with the actual function - * because this is @performance relevant. - * It is called on each document row for each write and read. - */ - var _proto = DocumentCache.prototype; - /** - * Throws if not exists - */ - _proto.getLatestDocumentData = function getLatestDocumentData(docId) { - var cacheItem = (0, _index2.getFromMapOrThrow)(this.cacheItemByDocId, docId); - return cacheItem[1]; - }; - _proto.getLatestDocumentDataIfExists = function getLatestDocumentDataIfExists(docId) { - var cacheItem = this.cacheItemByDocId.get(docId); - if (cacheItem) { - return cacheItem[1]; - } - }; - return (0, _createClass2.default)(DocumentCache, [{ - key: "getCachedRxDocuments", - get: function () { - var fn = getCachedRxDocumentMonad(this); - return (0, _index2.overwriteGetterForCaching)(this, 'getCachedRxDocuments', fn); - } - }, { - key: "getCachedRxDocument", - get: function () { - var fn = getCachedRxDocumentMonad(this); - return (0, _index2.overwriteGetterForCaching)(this, 'getCachedRxDocument', doc => fn([doc])[0]); - } - }]); -}(); -/** - * This function is called very very often. - * This is likely the most important function for RxDB overall performance - * @hotPath This is one of the most important methods for performance. - * It is used in many places to transform the raw document data into RxDocuments. - */ -function getCachedRxDocumentMonad(docCache) { - var primaryPath = docCache.primaryPath; - var cacheItemByDocId = docCache.cacheItemByDocId; - var registry = docCache.registry; - var deepFreezeWhenDevMode = _overwritable.overwritable.deepFreezeWhenDevMode; - var documentCreator = docCache.documentCreator; - var fn = docsData => { - var ret = new Array(docsData.length); - var registryTasks = []; - for (var index = 0; index < docsData.length; index++) { - var docData = docsData[index]; - var docId = docData[primaryPath]; - var revisionHeight = (0, _index2.getHeightOfRevision)(docData._rev); - var byRev = void 0; - var cachedRxDocumentWeakRef = void 0; - var cacheItem = cacheItemByDocId.get(docId); - if (!cacheItem) { - byRev = new Map(); - cacheItem = [byRev, docData]; - cacheItemByDocId.set(docId, cacheItem); - } else { - byRev = cacheItem[0]; - cachedRxDocumentWeakRef = byRev.get(revisionHeight); - } - var cachedRxDocument = cachedRxDocumentWeakRef ? cachedRxDocumentWeakRef.deref() : undefined; - if (!cachedRxDocument) { - docData = deepFreezeWhenDevMode(docData); - cachedRxDocument = documentCreator(docData); - byRev.set(revisionHeight, createWeakRefWithFallback(cachedRxDocument)); - if (registry) { - registryTasks.push(cachedRxDocument); - } - } - ret[index] = cachedRxDocument; - } - if (registryTasks.length > 0 && registry) { - /** - * Calling registry.register() has shown to have - * really bad performance. So we add the cached documents - * lazily. - */ - (0, _index2.requestIdlePromiseNoQueue)().then(() => { - for (var _index = 0; _index < registryTasks.length; _index++) { - var doc = registryTasks[_index]; - registry.register(doc, { - docId: doc.primary, - revisionHeight: (0, _index2.getHeightOfRevision)(doc.revision) - }); - } - }); - } - return ret; - }; - return fn; -} -function mapDocumentsDataToCacheDocs(docCache, docsData) { - var getCachedRxDocuments = docCache.getCachedRxDocuments; - return getCachedRxDocuments(docsData); -} - -/** - * Fallback for JavaScript runtimes that do not support WeakRef. - * The fallback will keep the items in cache forever, - * but at least works. - */ -var HAS_WEAK_REF = typeof WeakRef === 'function'; -var createWeakRefWithFallback = HAS_WEAK_REF ? createWeakRef : createWeakRefFallback; -function createWeakRef(obj) { - return new WeakRef(obj); -} -function createWeakRefFallback(obj) { - return { - deref() { - return obj; - } - }; -} -//# sourceMappingURL=doc-cache.js.map \ No newline at end of file diff --git a/dist/cjs/doc-cache.js.map b/dist/cjs/doc-cache.js.map deleted file mode 100644 index 796afd81abf..00000000000 --- a/dist/cjs/doc-cache.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"doc-cache.js","names":["_index2","require","_overwritable","_rxChangeEvent","DocumentCache","exports","primaryPath","changes$","documentCreator","cacheItemByDocId","Map","registry","FinalizationRegistry","docMeta","docId","cacheItem","get","delete","revisionHeight","size","undefined","subscribe","changeEvent","documentId","documentData","getDocumentDataOfRxChangeEvent","_proto","prototype","getLatestDocumentData","getFromMapOrThrow","getLatestDocumentDataIfExists","_createClass2","default","key","fn","getCachedRxDocumentMonad","overwriteGetterForCaching","doc","docCache","deepFreezeWhenDevMode","overwritable","docsData","ret","Array","length","registryTasks","index","docData","getHeightOfRevision","_rev","byRev","cachedRxDocumentWeakRef","set","cachedRxDocument","deref","createWeakRefWithFallback","push","requestIdlePromiseNoQueue","then","register","primary","revision","mapDocumentsDataToCacheDocs","getCachedRxDocuments","HAS_WEAK_REF","WeakRef","createWeakRef","createWeakRefFallback","obj"],"sources":["../../src/doc-cache.ts"],"sourcesContent":["import type {\n RxChangeEvent,\n RxDocument,\n RxDocumentData\n} from './types/index.d.ts';\nimport {\n getFromMapOrThrow,\n getHeightOfRevision,\n overwriteGetterForCaching,\n requestIdlePromiseNoQueue\n} from './plugins/utils/index.ts';\nimport {\n overwritable\n} from './overwritable.ts';\nimport { getDocumentDataOfRxChangeEvent } from './rx-change-event.ts';\nimport { Observable } from 'rxjs';\n\n/**\n * Because we have to create many cache items,\n * we use an array instead of an object with properties\n * for better performance and less memory usage.\n * @link https://stackoverflow.com/questions/17295056/array-vs-object-efficiency-in-javascript\n */\ndeclare type CacheItem = [\n /**\n * Store the different document states of time\n * based on their revision height.\n * We store WeakRefs so that we can later clean up\n * document states that are no longer needed.\n */\n Map>>,\n\n /**\n * Store the latest known document state.\n * As long as any state of the document is in the cache,\n * we observe the changestream and update the latestDoc accordingly.\n * This makes it easier to optimize performance on other parts\n * because for each known document we can always get the current state\n * in the storage.\n * Also it makes it possible to call RxDocument.latest() in a non-async way\n * to retrieve the latest document state or to observe$ some property.\n *\n * To not prevent the whole cacheItem from being garbage collected,\n * we store only the document data here, but not the RxDocument.\n */\n RxDocumentData\n];\n\n\n/**\n * @link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/FinalizationRegistry\n */\ndeclare type FinalizationRegistryValue = {\n docId: string;\n revisionHeight: number;\n};\n\n/**\n * The DocumentCache stores RxDocument objects\n * by their primary key and revision.\n * This is useful on client side applications where\n * it is not known how much memory can be used, so\n * we de-duplicate RxDocument states to save memory.\n * To not fill up the memory with old document states, the DocumentCache\n * only contains weak references to the RxDocuments themself.\n * @link https://caniuse.com/?search=weakref\n */\nexport class DocumentCache {\n public cacheItemByDocId = new Map>();\n\n /**\n * Some JavaScript runtimes like QuickJS,\n * so not have a FinalizationRegistry or WeakRef.\n * Therefore we need a workaround which might waste a lot of memory,\n * but at least works.\n */\n public readonly registry?: FinalizationRegistry = typeof FinalizationRegistry === 'function' ?\n new FinalizationRegistry(docMeta => {\n const docId = docMeta.docId;\n const cacheItem = this.cacheItemByDocId.get(docId);\n if (cacheItem) {\n cacheItem[0].delete(docMeta.revisionHeight);\n if (cacheItem[0].size === 0) {\n /**\n * No state of the document is cached anymore,\n * so we can clean up.\n */\n this.cacheItemByDocId.delete(docId);\n }\n }\n }) :\n undefined;\n\n constructor(\n public readonly primaryPath: string,\n public readonly changes$: Observable>,\n /**\n * A method that can create a RxDocument by the given document data.\n */\n public documentCreator: (docData: RxDocumentData) => RxDocument\n ) {\n changes$.subscribe(changeEvent => {\n const docId = changeEvent.documentId;\n const cacheItem = this.cacheItemByDocId.get(docId);\n if (cacheItem) {\n const documentData = getDocumentDataOfRxChangeEvent(changeEvent);\n cacheItem[1] = documentData;\n }\n });\n }\n\n /**\n * Get the RxDocument from the cache\n * and create a new one if not exits before.\n * @overwrites itself with the actual function\n * because this is @performance relevant.\n * It is called on each document row for each write and read.\n */\n get getCachedRxDocuments(): (docsData: RxDocumentData[]) => RxDocument[] {\n const fn = getCachedRxDocumentMonad(this);\n return overwriteGetterForCaching(\n this,\n 'getCachedRxDocuments',\n fn\n );\n }\n\n get getCachedRxDocument(): (docData: RxDocumentData) => RxDocument {\n const fn = getCachedRxDocumentMonad(this);\n return overwriteGetterForCaching(\n this,\n 'getCachedRxDocument',\n doc => fn([doc])[0]\n );\n }\n\n /**\n * Throws if not exists\n */\n public getLatestDocumentData(docId: string): RxDocumentData {\n const cacheItem = getFromMapOrThrow(this.cacheItemByDocId, docId);\n return cacheItem[1];\n }\n\n public getLatestDocumentDataIfExists(docId: string): RxDocumentData | undefined {\n const cacheItem = this.cacheItemByDocId.get(docId);\n if (cacheItem) {\n return cacheItem[1];\n }\n }\n}\n\n/**\n * This function is called very very often.\n * This is likely the most important function for RxDB overall performance\n * @hotPath This is one of the most important methods for performance.\n * It is used in many places to transform the raw document data into RxDocuments.\n */\nfunction getCachedRxDocumentMonad(\n docCache: DocumentCache\n): (docsData: RxDocumentData[]) => RxDocument[] {\n const primaryPath = docCache.primaryPath;\n const cacheItemByDocId = docCache.cacheItemByDocId;\n const registry = docCache.registry;\n const deepFreezeWhenDevMode = overwritable.deepFreezeWhenDevMode;\n const documentCreator = docCache.documentCreator;\n const fn: (docsData: RxDocumentData[]) => RxDocument[] = (docsData: RxDocumentData[]) => {\n const ret: RxDocument[] = new Array(docsData.length);\n const registryTasks: RxDocument[] = [];\n for (let index = 0; index < docsData.length; index++) {\n let docData = docsData[index];\n const docId: string = (docData as any)[primaryPath];\n const revisionHeight = getHeightOfRevision(docData._rev);\n\n let byRev: Map>>;\n let cachedRxDocumentWeakRef: WeakRef> | undefined;\n let cacheItem = cacheItemByDocId.get(docId);\n if (!cacheItem) {\n byRev = new Map();\n cacheItem = [\n byRev,\n docData\n ];\n cacheItemByDocId.set(docId, cacheItem);\n } else {\n byRev = cacheItem[0];\n cachedRxDocumentWeakRef = byRev.get(revisionHeight);\n }\n let cachedRxDocument = cachedRxDocumentWeakRef ? cachedRxDocumentWeakRef.deref() : undefined;\n if (!cachedRxDocument) {\n docData = deepFreezeWhenDevMode(docData) as any;\n cachedRxDocument = documentCreator(docData) as RxDocument;\n byRev.set(revisionHeight, createWeakRefWithFallback(cachedRxDocument));\n if (registry) {\n registryTasks.push(cachedRxDocument);\n }\n }\n ret[index] = cachedRxDocument;\n }\n if (registryTasks.length > 0 && registry) {\n /**\n * Calling registry.register() has shown to have\n * really bad performance. So we add the cached documents\n * lazily.\n */\n requestIdlePromiseNoQueue().then(() => {\n for (let index = 0; index < registryTasks.length; index++) {\n const doc = registryTasks[index];\n registry.register(doc, {\n docId: doc.primary,\n revisionHeight: getHeightOfRevision(doc.revision)\n });\n }\n });\n }\n return ret;\n };\n return fn;\n}\n\nexport function mapDocumentsDataToCacheDocs(\n docCache: DocumentCache,\n docsData: RxDocumentData[]\n) {\n const getCachedRxDocuments = docCache.getCachedRxDocuments;\n return getCachedRxDocuments(docsData);\n}\n\n/**\n * Fallback for JavaScript runtimes that do not support WeakRef.\n * The fallback will keep the items in cache forever,\n * but at least works.\n */\nconst HAS_WEAK_REF = typeof WeakRef === 'function';\nconst createWeakRefWithFallback = HAS_WEAK_REF ? createWeakRef : createWeakRefFallback;\nfunction createWeakRef(obj: T): WeakRef {\n return new WeakRef(obj) as any;\n}\nfunction createWeakRefFallback(obj: T): WeakRef {\n return {\n deref() {\n return obj;\n }\n } as any;\n}\n"],"mappings":";;;;;;;;;AAKA,IAAAA,OAAA,GAAAC,OAAA;AAMA,IAAAC,aAAA,GAAAD,OAAA;AAGA,IAAAE,cAAA,GAAAF,OAAA;AAGA;AACA;AACA;AACA;AACA;AACA;AA2BA;AACA;AACA;AAMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AATA,IAUaG,aAAa,GAAAC,OAAA,CAAAD,aAAA;EAGtB;AACJ;AACA;AACA;AACA;AACA;;EAkBI,SAAAA,cACoBE,WAAmB,EACnBC,QAA8C;EAC9D;AACR;AACA;EACeC,eAA0F,EACnG;IAAA,KAhCKC,gBAAgB,GAAG,IAAIC,GAAG,CAA2C,CAAC;IAAA,KAQ7DC,QAAQ,GAAqD,OAAOC,oBAAoB,KAAK,UAAU,GACnH,IAAIA,oBAAoB,CAA4BC,OAAO,IAAI;MAC3D,IAAMC,KAAK,GAAGD,OAAO,CAACC,KAAK;MAC3B,IAAMC,SAAS,GAAG,IAAI,CAACN,gBAAgB,CAACO,GAAG,CAACF,KAAK,CAAC;MAClD,IAAIC,SAAS,EAAE;QACXA,SAAS,CAAC,CAAC,CAAC,CAACE,MAAM,CAACJ,OAAO,CAACK,cAAc,CAAC;QAC3C,IAAIH,SAAS,CAAC,CAAC,CAAC,CAACI,IAAI,KAAK,CAAC,EAAE;UACzB;AACpB;AACA;AACA;UACoB,IAAI,CAACV,gBAAgB,CAACQ,MAAM,CAACH,KAAK,CAAC;QACvC;MACJ;IACJ,CAAC,CAAC,GACFM,SAAS;IAAA,KAGOd,WAAmB,GAAnBA,WAAmB;IAAA,KACnBC,QAA8C,GAA9CA,QAA8C;IAAA,KAIvDC,eAA0F,GAA1FA,eAA0F;IAEjGD,QAAQ,CAACc,SAAS,CAACC,WAAW,IAAI;MAC9B,IAAMR,KAAK,GAAGQ,WAAW,CAACC,UAAU;MACpC,IAAMR,SAAS,GAAG,IAAI,CAACN,gBAAgB,CAACO,GAAG,CAACF,KAAK,CAAC;MAClD,IAAIC,SAAS,EAAE;QACX,IAAMS,YAAY,GAAG,IAAAC,6CAA8B,EAACH,WAAW,CAAC;QAChEP,SAAS,CAAC,CAAC,CAAC,GAAGS,YAAY;MAC/B;IACJ,CAAC,CAAC;EACN;;EAEA;AACJ;AACA;AACA;AACA;AACA;AACA;EANI,IAAAE,MAAA,GAAAtB,aAAA,CAAAuB,SAAA;EAyBA;AACJ;AACA;EAFID,MAAA,CAGOE,qBAAqB,GAA5B,SAAAA,sBAA6Bd,KAAa,EAA6B;IACnE,IAAMC,SAAS,GAAG,IAAAc,yBAAiB,EAAC,IAAI,CAACpB,gBAAgB,EAAEK,KAAK,CAAC;IACjE,OAAOC,SAAS,CAAC,CAAC,CAAC;EACvB,CAAC;EAAAW,MAAA,CAEMI,6BAA6B,GAApC,SAAAA,8BAAqChB,KAAa,EAAyC;IACvF,IAAMC,SAAS,GAAG,IAAI,CAACN,gBAAgB,CAACO,GAAG,CAACF,KAAK,CAAC;IAClD,IAAIC,SAAS,EAAE;MACX,OAAOA,SAAS,CAAC,CAAC,CAAC;IACvB;EACJ,CAAC;EAAA,WAAAgB,aAAA,CAAAC,OAAA,EAAA5B,aAAA;IAAA6B,GAAA;IAAAjB,GAAA,EA/BD,SAAAA,CAAA,EAA2G;MACvG,IAAMkB,EAAE,GAAGC,wBAAwB,CAAC,IAAI,CAAC;MACzC,OAAO,IAAAC,iCAAyB,EAC5B,IAAI,EACJ,sBAAsB,EACtBF,EACJ,CAAC;IACL;EAAC;IAAAD,GAAA;IAAAjB,GAAA,EAED,SAAAA,CAAA,EAAqG;MACjG,IAAMkB,EAAE,GAAGC,wBAAwB,CAAC,IAAI,CAAC;MACzC,OAAO,IAAAC,iCAAyB,EAC5B,IAAI,EACJ,qBAAqB,EACrBC,GAAG,IAAIH,EAAE,CAAC,CAACG,GAAG,CAAC,CAAC,CAAC,CAAC,CACtB,CAAC;IACL;EAAC;AAAA;AAkBL;AACA;AACA;AACA;AACA;AACA;AACA,SAASF,wBAAwBA,CAC7BG,QAA8C,EACgC;EAC9E,IAAMhC,WAAW,GAAGgC,QAAQ,CAAChC,WAAW;EACxC,IAAMG,gBAAgB,GAAG6B,QAAQ,CAAC7B,gBAAgB;EAClD,IAAME,QAAQ,GAAG2B,QAAQ,CAAC3B,QAAQ;EAClC,IAAM4B,qBAAqB,GAAGC,0BAAY,CAACD,qBAAqB;EAChE,IAAM/B,eAAe,GAAG8B,QAAQ,CAAC9B,eAAe;EAChD,IAAM0B,EAAkF,GAAIO,QAAqC,IAAK;IAClI,IAAMC,GAAwC,GAAG,IAAIC,KAAK,CAACF,QAAQ,CAACG,MAAM,CAAC;IAC3E,IAAMC,aAAkD,GAAG,EAAE;IAC7D,KAAK,IAAIC,KAAK,GAAG,CAAC,EAAEA,KAAK,GAAGL,QAAQ,CAACG,MAAM,EAAEE,KAAK,EAAE,EAAE;MAClD,IAAIC,OAAO,GAAGN,QAAQ,CAACK,KAAK,CAAC;MAC7B,IAAMhC,KAAa,GAAIiC,OAAO,CAASzC,WAAW,CAAC;MACnD,IAAMY,cAAc,GAAG,IAAA8B,2BAAmB,EAACD,OAAO,CAACE,IAAI,CAAC;MAExD,IAAIC,KAA8D;MAClE,IAAIC,uBAA+E;MACnF,IAAIpC,SAAS,GAAGN,gBAAgB,CAACO,GAAG,CAACF,KAAK,CAAC;MAC3C,IAAI,CAACC,SAAS,EAAE;QACZmC,KAAK,GAAG,IAAIxC,GAAG,CAAC,CAAC;QACjBK,SAAS,GAAG,CACRmC,KAAK,EACLH,OAAO,CACV;QACDtC,gBAAgB,CAAC2C,GAAG,CAACtC,KAAK,EAAEC,SAAS,CAAC;MAC1C,CAAC,MAAM;QACHmC,KAAK,GAAGnC,SAAS,CAAC,CAAC,CAAC;QACpBoC,uBAAuB,GAAGD,KAAK,CAAClC,GAAG,CAACE,cAAc,CAAC;MACvD;MACA,IAAImC,gBAAgB,GAAGF,uBAAuB,GAAGA,uBAAuB,CAACG,KAAK,CAAC,CAAC,GAAGlC,SAAS;MAC5F,IAAI,CAACiC,gBAAgB,EAAE;QACnBN,OAAO,GAAGR,qBAAqB,CAACQ,OAAO,CAAQ;QAC/CM,gBAAgB,GAAG7C,eAAe,CAACuC,OAAO,CAAsC;QAChFG,KAAK,CAACE,GAAG,CAAClC,cAAc,EAAEqC,yBAAyB,CAACF,gBAAgB,CAAC,CAAC;QACtE,IAAI1C,QAAQ,EAAE;UACVkC,aAAa,CAACW,IAAI,CAACH,gBAAgB,CAAC;QACxC;MACJ;MACAX,GAAG,CAACI,KAAK,CAAC,GAAGO,gBAAgB;IACjC;IACA,IAAIR,aAAa,CAACD,MAAM,GAAG,CAAC,IAAIjC,QAAQ,EAAE;MACtC;AACZ;AACA;AACA;AACA;MACY,IAAA8C,iCAAyB,EAAC,CAAC,CAACC,IAAI,CAAC,MAAM;QACnC,KAAK,IAAIZ,MAAK,GAAG,CAAC,EAAEA,MAAK,GAAGD,aAAa,CAACD,MAAM,EAAEE,MAAK,EAAE,EAAE;UACvD,IAAMT,GAAG,GAAGQ,aAAa,CAACC,MAAK,CAAC;UAChCnC,QAAQ,CAACgD,QAAQ,CAACtB,GAAG,EAAE;YACnBvB,KAAK,EAAEuB,GAAG,CAACuB,OAAO;YAClB1C,cAAc,EAAE,IAAA8B,2BAAmB,EAACX,GAAG,CAACwB,QAAQ;UACpD,CAAC,CAAC;QACN;MACJ,CAAC,CAAC;IACN;IACA,OAAOnB,GAAG;EACd,CAAC;EACD,OAAOR,EAAE;AACb;AAEO,SAAS4B,2BAA2BA,CACvCxB,QAA8C,EAC9CG,QAAqC,EACvC;EACE,IAAMsB,oBAAoB,GAAGzB,QAAQ,CAACyB,oBAAoB;EAC1D,OAAOA,oBAAoB,CAACtB,QAAQ,CAAC;AACzC;;AAEA;AACA;AACA;AACA;AACA;AACA,IAAMuB,YAAY,GAAG,OAAOC,OAAO,KAAK,UAAU;AAClD,IAAMV,yBAAyB,GAAGS,YAAY,GAAGE,aAAa,GAAGC,qBAAqB;AACtF,SAASD,aAAaA,CAAmBE,GAAM,EAAc;EACzD,OAAO,IAAIH,OAAO,CAACG,GAAG,CAAC;AAC3B;AACA,SAASD,qBAAqBA,CAAmBC,GAAM,EAAc;EACjE,OAAO;IACHd,KAAKA,CAAA,EAAG;MACJ,OAAOc,GAAG;IACd;EACJ,CAAC;AACL","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/event-reduce.js b/dist/cjs/event-reduce.js deleted file mode 100644 index 94260a6c526..00000000000 --- a/dist/cjs/event-reduce.js +++ /dev/null @@ -1,106 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.RXQUERY_QUERY_PARAMS_CACHE = void 0; -exports.calculateNewResults = calculateNewResults; -exports.getQueryParams = getQueryParams; -exports.getSortFieldsOfQuery = getSortFieldsOfQuery; -var _eventReduceJs = require("event-reduce-js"); -var _rxChangeEvent = require("./rx-change-event.js"); -var _index = require("./plugins/utils/index.js"); -var _rxQueryHelper = require("./rx-query-helper.js"); -function getSortFieldsOfQuery(primaryKey, query) { - if (!query.sort || query.sort.length === 0) { - return [primaryKey]; - } else { - return query.sort.map(part => Object.keys(part)[0]); - } -} -var RXQUERY_QUERY_PARAMS_CACHE = exports.RXQUERY_QUERY_PARAMS_CACHE = new WeakMap(); -function getQueryParams(rxQuery) { - return (0, _index.getFromMapOrCreate)(RXQUERY_QUERY_PARAMS_CACHE, rxQuery, () => { - var collection = rxQuery.collection; - var normalizedMangoQuery = (0, _rxQueryHelper.normalizeMangoQuery)(collection.storageInstance.schema, (0, _index.clone)(rxQuery.mangoQuery)); - var primaryKey = collection.schema.primaryPath; - - /** - * Create a custom sort comparator - * that uses the hooks to ensure - * we send for example compressed documents to be sorted by compressed queries. - */ - var sortComparator = (0, _rxQueryHelper.getSortComparator)(collection.schema.jsonSchema, normalizedMangoQuery); - var useSortComparator = (docA, docB) => { - var sortComparatorData = { - docA, - docB, - rxQuery - }; - return sortComparator(sortComparatorData.docA, sortComparatorData.docB); - }; - - /** - * Create a custom query matcher - * that uses the hooks to ensure - * we send for example compressed documents to match compressed queries. - */ - var queryMatcher = (0, _rxQueryHelper.getQueryMatcher)(collection.schema.jsonSchema, normalizedMangoQuery); - var useQueryMatcher = doc => { - var queryMatcherData = { - doc, - rxQuery - }; - return queryMatcher(queryMatcherData.doc); - }; - var ret = { - primaryKey: rxQuery.collection.schema.primaryPath, - skip: normalizedMangoQuery.skip, - limit: normalizedMangoQuery.limit, - sortFields: getSortFieldsOfQuery(primaryKey, normalizedMangoQuery), - sortComparator: useSortComparator, - queryMatcher: useQueryMatcher - }; - return ret; - }); -} -function calculateNewResults(rxQuery, rxChangeEvents) { - if (!rxQuery.collection.database.eventReduce) { - return { - runFullQueryAgain: true - }; - } - var queryParams = getQueryParams(rxQuery); - var previousResults = (0, _index.ensureNotFalsy)(rxQuery._result).docsData.slice(0); - var previousResultsMap = (0, _index.ensureNotFalsy)(rxQuery._result).docsDataMap; - var changed = false; - var eventReduceEvents = rxChangeEvents.map(cE => (0, _rxChangeEvent.rxChangeEventToEventReduceChangeEvent)(cE)).filter(_index.arrayFilterNotEmpty); - var foundNonOptimizeable = eventReduceEvents.find(eventReduceEvent => { - var stateResolveFunctionInput = { - queryParams, - changeEvent: eventReduceEvent, - previousResults, - keyDocumentMap: previousResultsMap - }; - var actionName = (0, _eventReduceJs.calculateActionName)(stateResolveFunctionInput); - if (actionName === 'runFullQueryAgain') { - return true; - } else if (actionName !== 'doNothing') { - changed = true; - (0, _eventReduceJs.runAction)(actionName, queryParams, eventReduceEvent, previousResults, previousResultsMap); - return false; - } - }); - if (foundNonOptimizeable) { - return { - runFullQueryAgain: true - }; - } else { - return { - runFullQueryAgain: false, - changed, - newResults: previousResults - }; - } -} -//# sourceMappingURL=event-reduce.js.map \ No newline at end of file diff --git a/dist/cjs/event-reduce.js.map b/dist/cjs/event-reduce.js.map deleted file mode 100644 index 03ba3c05ad5..00000000000 --- a/dist/cjs/event-reduce.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"event-reduce.js","names":["_eventReduceJs","require","_rxChangeEvent","_index","_rxQueryHelper","getSortFieldsOfQuery","primaryKey","query","sort","length","map","part","Object","keys","RXQUERY_QUERY_PARAMS_CACHE","exports","WeakMap","getQueryParams","rxQuery","getFromMapOrCreate","collection","normalizedMangoQuery","normalizeMangoQuery","storageInstance","schema","clone","mangoQuery","primaryPath","sortComparator","getSortComparator","jsonSchema","useSortComparator","docA","docB","sortComparatorData","queryMatcher","getQueryMatcher","useQueryMatcher","doc","queryMatcherData","ret","skip","limit","sortFields","calculateNewResults","rxChangeEvents","database","eventReduce","runFullQueryAgain","queryParams","previousResults","ensureNotFalsy","_result","docsData","slice","previousResultsMap","docsDataMap","changed","eventReduceEvents","cE","rxChangeEventToEventReduceChangeEvent","filter","arrayFilterNotEmpty","foundNonOptimizeable","find","eventReduceEvent","stateResolveFunctionInput","changeEvent","keyDocumentMap","actionName","calculateActionName","runAction","newResults"],"sources":["../../src/event-reduce.ts"],"sourcesContent":["import {\n ActionName,\n calculateActionName,\n runAction,\n QueryParams,\n QueryMatcher,\n DeterministicSortComparator,\n StateResolveFunctionInput,\n ChangeEvent\n} from 'event-reduce-js';\nimport type {\n RxQuery,\n MangoQuery,\n RxChangeEvent,\n StringKeys,\n RxDocumentData\n} from './types/index.d.ts';\nimport { rxChangeEventToEventReduceChangeEvent } from './rx-change-event.ts';\nimport {\n arrayFilterNotEmpty,\n clone,\n ensureNotFalsy,\n getFromMapOrCreate\n} from './plugins/utils/index.ts';\nimport { getQueryMatcher, getSortComparator, normalizeMangoQuery } from './rx-query-helper.ts';\n\nexport type EventReduceResultNeg = {\n runFullQueryAgain: true;\n};\nexport type EventReduceResultPos = {\n runFullQueryAgain: false;\n changed: boolean;\n newResults: RxDocumentType[];\n};\nexport type EventReduceResult = EventReduceResultNeg | EventReduceResultPos;\n\n\nexport function getSortFieldsOfQuery(\n primaryKey: StringKeys>,\n query: MangoQuery\n): (string | StringKeys)[] {\n if (!query.sort || query.sort.length === 0) {\n return [primaryKey];\n } else {\n return query.sort.map(part => Object.keys(part)[0]);\n }\n}\n\n\n\nexport const RXQUERY_QUERY_PARAMS_CACHE: WeakMap> = new WeakMap();\nexport function getQueryParams(\n rxQuery: RxQuery\n): QueryParams {\n return getFromMapOrCreate(\n RXQUERY_QUERY_PARAMS_CACHE,\n rxQuery,\n () => {\n const collection = rxQuery.collection;\n const normalizedMangoQuery = normalizeMangoQuery(\n collection.storageInstance.schema,\n clone(rxQuery.mangoQuery)\n );\n const primaryKey = collection.schema.primaryPath;\n\n /**\n * Create a custom sort comparator\n * that uses the hooks to ensure\n * we send for example compressed documents to be sorted by compressed queries.\n */\n const sortComparator = getSortComparator(\n collection.schema.jsonSchema,\n normalizedMangoQuery\n );\n\n const useSortComparator: DeterministicSortComparator = (docA: RxDocType, docB: RxDocType) => {\n const sortComparatorData = {\n docA,\n docB,\n rxQuery\n };\n return sortComparator(sortComparatorData.docA, sortComparatorData.docB);\n };\n\n /**\n * Create a custom query matcher\n * that uses the hooks to ensure\n * we send for example compressed documents to match compressed queries.\n */\n const queryMatcher = getQueryMatcher(\n collection.schema.jsonSchema,\n normalizedMangoQuery\n );\n const useQueryMatcher: QueryMatcher> = (doc: RxDocumentData) => {\n const queryMatcherData = {\n doc,\n rxQuery\n };\n return queryMatcher(queryMatcherData.doc);\n };\n\n const ret: QueryParams = {\n primaryKey: rxQuery.collection.schema.primaryPath as any,\n skip: normalizedMangoQuery.skip,\n limit: normalizedMangoQuery.limit,\n sortFields: getSortFieldsOfQuery(primaryKey, normalizedMangoQuery) as string[],\n sortComparator: useSortComparator,\n queryMatcher: useQueryMatcher\n };\n return ret;\n }\n );\n}\n\n\nexport function calculateNewResults(\n rxQuery: RxQuery,\n rxChangeEvents: RxChangeEvent[]\n): EventReduceResult {\n if (!rxQuery.collection.database.eventReduce) {\n return {\n runFullQueryAgain: true\n };\n }\n const queryParams = getQueryParams(rxQuery);\n const previousResults: RxDocumentType[] = ensureNotFalsy(rxQuery._result).docsData.slice(0);\n const previousResultsMap: Map = ensureNotFalsy(rxQuery._result).docsDataMap;\n let changed: boolean = false;\n\n const eventReduceEvents: ChangeEvent[] = rxChangeEvents\n .map(cE => rxChangeEventToEventReduceChangeEvent(cE))\n .filter(arrayFilterNotEmpty);\n\n const foundNonOptimizeable = eventReduceEvents.find(eventReduceEvent => {\n const stateResolveFunctionInput: StateResolveFunctionInput = {\n queryParams,\n changeEvent: eventReduceEvent,\n previousResults,\n keyDocumentMap: previousResultsMap\n };\n\n const actionName: ActionName = calculateActionName(stateResolveFunctionInput);\n if (actionName === 'runFullQueryAgain') {\n return true;\n } else if (actionName !== 'doNothing') {\n changed = true;\n runAction(\n actionName,\n queryParams,\n eventReduceEvent,\n previousResults,\n previousResultsMap\n );\n return false;\n }\n });\n if (foundNonOptimizeable) {\n return {\n runFullQueryAgain: true,\n };\n } else {\n return {\n runFullQueryAgain: false,\n changed,\n newResults: previousResults\n };\n }\n}\n"],"mappings":";;;;;;;;;AAAA,IAAAA,cAAA,GAAAC,OAAA;AAiBA,IAAAC,cAAA,GAAAD,OAAA;AACA,IAAAE,MAAA,GAAAF,OAAA;AAMA,IAAAG,cAAA,GAAAH,OAAA;AAaO,SAASI,oBAAoBA,CAChCC,UAAiD,EACjDC,KAA4B,EACM;EAClC,IAAI,CAACA,KAAK,CAACC,IAAI,IAAID,KAAK,CAACC,IAAI,CAACC,MAAM,KAAK,CAAC,EAAE;IACxC,OAAO,CAACH,UAAU,CAAC;EACvB,CAAC,MAAM;IACH,OAAOC,KAAK,CAACC,IAAI,CAACE,GAAG,CAACC,IAAI,IAAIC,MAAM,CAACC,IAAI,CAACF,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;EACvD;AACJ;AAIO,IAAMG,0BAA8D,GAAAC,OAAA,CAAAD,0BAAA,GAAG,IAAIE,OAAO,CAAC,CAAC;AACpF,SAASC,cAAcA,CAC1BC,OAA2B,EACL;EACtB,OAAO,IAAAC,yBAAkB,EACrBL,0BAA0B,EAC1BI,OAAO,EACP,MAAM;IACF,IAAME,UAAU,GAAGF,OAAO,CAACE,UAAU;IACrC,IAAMC,oBAAoB,GAAG,IAAAC,kCAAmB,EAC5CF,UAAU,CAACG,eAAe,CAACC,MAAM,EACjC,IAAAC,YAAK,EAACP,OAAO,CAACQ,UAAU,CAC5B,CAAC;IACD,IAAMpB,UAAU,GAAGc,UAAU,CAACI,MAAM,CAACG,WAAW;;IAEhD;AACZ;AACA;AACA;AACA;IACY,IAAMC,cAAc,GAAG,IAAAC,gCAAiB,EACpCT,UAAU,CAACI,MAAM,CAACM,UAAU,EAC5BT,oBACJ,CAAC;IAED,IAAMU,iBAAyD,GAAGA,CAACC,IAAe,EAAEC,IAAe,KAAK;MACpG,IAAMC,kBAAkB,GAAG;QACvBF,IAAI;QACJC,IAAI;QACJf;MACJ,CAAC;MACD,OAAOU,cAAc,CAACM,kBAAkB,CAACF,IAAI,EAAEE,kBAAkB,CAACD,IAAI,CAAC;IAC3E,CAAC;;IAED;AACZ;AACA;AACA;AACA;IACY,IAAME,YAAY,GAAG,IAAAC,8BAAe,EAChChB,UAAU,CAACI,MAAM,CAACM,UAAU,EAC5BT,oBACJ,CAAC;IACD,IAAMgB,eAAwD,GAAIC,GAA8B,IAAK;MACjG,IAAMC,gBAAgB,GAAG;QACrBD,GAAG;QACHpB;MACJ,CAAC;MACD,OAAOiB,YAAY,CAACI,gBAAgB,CAACD,GAAG,CAAC;IAC7C,CAAC;IAED,IAAME,GAAqB,GAAG;MAC1BlC,UAAU,EAAEY,OAAO,CAACE,UAAU,CAACI,MAAM,CAACG,WAAkB;MACxDc,IAAI,EAAEpB,oBAAoB,CAACoB,IAAI;MAC/BC,KAAK,EAAErB,oBAAoB,CAACqB,KAAK;MACjCC,UAAU,EAAEtC,oBAAoB,CAACC,UAAU,EAAEe,oBAAoB,CAAa;MAC9EO,cAAc,EAAEG,iBAAiB;MACjCI,YAAY,EAAEE;IAClB,CAAC;IACD,OAAOG,GAAG;EACd,CACJ,CAAC;AACL;AAGO,SAASI,mBAAmBA,CAC/B1B,OAAgC,EAChC2B,cAA+C,EACd;EACjC,IAAI,CAAC3B,OAAO,CAACE,UAAU,CAAC0B,QAAQ,CAACC,WAAW,EAAE;IAC1C,OAAO;MACHC,iBAAiB,EAAE;IACvB,CAAC;EACL;EACA,IAAMC,WAAW,GAAGhC,cAAc,CAACC,OAAO,CAAC;EAC3C,IAAMgC,eAAiC,GAAG,IAAAC,qBAAc,EAACjC,OAAO,CAACkC,OAAO,CAAC,CAACC,QAAQ,CAACC,KAAK,CAAC,CAAC,CAAC;EAC3F,IAAMC,kBAA+C,GAAG,IAAAJ,qBAAc,EAACjC,OAAO,CAACkC,OAAO,CAAC,CAACI,WAAW;EACnG,IAAIC,OAAgB,GAAG,KAAK;EAE5B,IAAMC,iBAAgD,GAAGb,cAAc,CAClEnC,GAAG,CAACiD,EAAE,IAAI,IAAAC,oDAAqC,EAACD,EAAE,CAAC,CAAC,CACpDE,MAAM,CAACC,0BAAmB,CAAC;EAEhC,IAAMC,oBAAoB,GAAGL,iBAAiB,CAACM,IAAI,CAACC,gBAAgB,IAAI;IACpE,IAAMC,yBAAoE,GAAG;MACzEjB,WAAW;MACXkB,WAAW,EAAEF,gBAAgB;MAC7Bf,eAAe;MACfkB,cAAc,EAAEb;IACpB,CAAC;IAED,IAAMc,UAAsB,GAAG,IAAAC,kCAAmB,EAACJ,yBAAyB,CAAC;IAC7E,IAAIG,UAAU,KAAK,mBAAmB,EAAE;MACpC,OAAO,IAAI;IACf,CAAC,MAAM,IAAIA,UAAU,KAAK,WAAW,EAAE;MACnCZ,OAAO,GAAG,IAAI;MACd,IAAAc,wBAAS,EACLF,UAAU,EACVpB,WAAW,EACXgB,gBAAgB,EAChBf,eAAe,EACfK,kBACJ,CAAC;MACD,OAAO,KAAK;IAChB;EACJ,CAAC,CAAC;EACF,IAAIQ,oBAAoB,EAAE;IACtB,OAAO;MACHf,iBAAiB,EAAE;IACvB,CAAC;EACL,CAAC,MAAM;IACH,OAAO;MACHA,iBAAiB,EAAE,KAAK;MACxBS,OAAO;MACPe,UAAU,EAAEtB;IAChB,CAAC;EACL;AACJ","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/hooks.js b/dist/cjs/hooks.js deleted file mode 100644 index aac429d3ae0..00000000000 --- a/dist/cjs/hooks.js +++ /dev/null @@ -1,127 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.HOOKS = void 0; -exports._clearHook = _clearHook; -exports.runAsyncPluginHooks = runAsyncPluginHooks; -exports.runPluginHooks = runPluginHooks; -/** - * hook-functions that can be extended by the plugin - */ -var HOOKS = exports.HOOKS = { - /** - * Runs before a plugin is added. - * Use this to block the usage of non-compatible plugins. - */ - preAddRxPlugin: [], - /** - * functions that run before the database is created - */ - preCreateRxDatabase: [], - /** - * runs after the database is created and prepared - * but before the instance is returned to the user - * @async - */ - createRxDatabase: [], - preCreateRxCollection: [], - createRxCollection: [], - createRxState: [], - /** - * runs at the end of the destroy-process of a collection - * @async - */ - postDestroyRxCollection: [], - /** - * Runs after a collection is removed. - * @async - */ - postRemoveRxCollection: [], - /** - * functions that get the json-schema as input - * to do additionally checks/manipulation - */ - preCreateRxSchema: [], - /** - * functions that run after the RxSchema is created - * gets RxSchema as attribute - */ - createRxSchema: [], - preCreateRxQuery: [], - /** - * Runs before a query is send to the - * prepareQuery function of the storage engine. - */ - prePrepareQuery: [], - createRxDocument: [], - /** - * runs after a RxDocument is created, - * cannot be async - */ - postCreateRxDocument: [], - /** - * Runs before a RxStorageInstance is created - * gets the params of createStorageInstance() - * as attribute so you can manipulate them. - * Notice that you have to clone stuff before mutating the inputs. - */ - preCreateRxStorageInstance: [], - preStorageWrite: [], - /** - * runs on the document-data before the document is migrated - * { - * doc: Object, // original doc-data - * migrated: // migrated doc-data after run through migration-strategies - * } - */ - preMigrateDocument: [], - /** - * runs after the migration of a document has been done - */ - postMigrateDocument: [], - /** - * runs at the beginning of the destroy-process of a database - */ - preDestroyRxDatabase: [], - /** - * runs after a database has been removed - * @async - */ - postRemoveRxDatabase: [], - /** - * runs before the replication writes the rows to master - * but before the rows have been modified - * @async - */ - preReplicationMasterWrite: [], - /** - * runs after the replication has been sent to the server - * but before the new documents have been handled - * @async - */ - preReplicationMasterWriteDocumentsHandle: [] -}; -function runPluginHooks(hookKey, obj) { - if (HOOKS[hookKey].length > 0) { - HOOKS[hookKey].forEach(fun => fun(obj)); - } -} - -/** - * TODO - * we should not run the hooks in parallel - * this makes stuff unpredictable. - */ -function runAsyncPluginHooks(hookKey, obj) { - return Promise.all(HOOKS[hookKey].map(fun => fun(obj))); -} - -/** - * used in tests to remove hooks - */ -function _clearHook(type, fun) { - HOOKS[type] = HOOKS[type].filter(h => h !== fun); -} -//# sourceMappingURL=hooks.js.map \ No newline at end of file diff --git a/dist/cjs/hooks.js.map b/dist/cjs/hooks.js.map deleted file mode 100644 index 28c3e53131d..00000000000 --- a/dist/cjs/hooks.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"hooks.js","names":["HOOKS","exports","preAddRxPlugin","preCreateRxDatabase","createRxDatabase","preCreateRxCollection","createRxCollection","createRxState","postDestroyRxCollection","postRemoveRxCollection","preCreateRxSchema","createRxSchema","preCreateRxQuery","prePrepareQuery","createRxDocument","postCreateRxDocument","preCreateRxStorageInstance","preStorageWrite","preMigrateDocument","postMigrateDocument","preDestroyRxDatabase","postRemoveRxDatabase","preReplicationMasterWrite","preReplicationMasterWriteDocumentsHandle","runPluginHooks","hookKey","obj","length","forEach","fun","runAsyncPluginHooks","Promise","all","map","_clearHook","type","filter","h"],"sources":["../../src/hooks.ts"],"sourcesContent":["\n/**\n * hook-functions that can be extended by the plugin\n */\nexport const HOOKS: { [k: string]: any[]; } = {\n /**\n * Runs before a plugin is added.\n * Use this to block the usage of non-compatible plugins.\n */\n preAddRxPlugin: [],\n /**\n * functions that run before the database is created\n */\n preCreateRxDatabase: [],\n /**\n * runs after the database is created and prepared\n * but before the instance is returned to the user\n * @async\n */\n createRxDatabase: [],\n preCreateRxCollection: [],\n createRxCollection: [],\n createRxState: [],\n /**\n * runs at the end of the destroy-process of a collection\n * @async\n */\n postDestroyRxCollection: [],\n /**\n * Runs after a collection is removed.\n * @async\n */\n postRemoveRxCollection: [],\n /**\n * functions that get the json-schema as input\n * to do additionally checks/manipulation\n */\n preCreateRxSchema: [],\n /**\n * functions that run after the RxSchema is created\n * gets RxSchema as attribute\n */\n createRxSchema: [],\n preCreateRxQuery: [],\n /**\n * Runs before a query is send to the\n * prepareQuery function of the storage engine.\n */\n prePrepareQuery: [],\n createRxDocument: [],\n /**\n * runs after a RxDocument is created,\n * cannot be async\n */\n postCreateRxDocument: [],\n /**\n * Runs before a RxStorageInstance is created\n * gets the params of createStorageInstance()\n * as attribute so you can manipulate them.\n * Notice that you have to clone stuff before mutating the inputs.\n */\n preCreateRxStorageInstance: [],\n preStorageWrite: [],\n /**\n * runs on the document-data before the document is migrated\n * {\n * doc: Object, // original doc-data\n * migrated: // migrated doc-data after run through migration-strategies\n * }\n */\n preMigrateDocument: [],\n /**\n * runs after the migration of a document has been done\n */\n postMigrateDocument: [],\n /**\n * runs at the beginning of the destroy-process of a database\n */\n preDestroyRxDatabase: [],\n /**\n * runs after a database has been removed\n * @async\n */\n postRemoveRxDatabase: [],\n\n /**\n * runs before the replication writes the rows to master\n * but before the rows have been modified\n * @async\n */\n preReplicationMasterWrite: [],\n\n /**\n * runs after the replication has been sent to the server\n * but before the new documents have been handled\n * @async\n */\n preReplicationMasterWriteDocumentsHandle: [],\n};\n\nexport function runPluginHooks(hookKey: string, obj: any) {\n if (HOOKS[hookKey].length > 0) {\n HOOKS[hookKey].forEach(fun => fun(obj));\n }\n}\n\n\n/**\n * TODO\n * we should not run the hooks in parallel\n * this makes stuff unpredictable.\n */\nexport function runAsyncPluginHooks(hookKey: string, obj: any): Promise {\n return Promise.all(\n HOOKS[hookKey].map(fun => fun(obj))\n );\n}\n\n/**\n * used in tests to remove hooks\n */\nexport function _clearHook(type: string, fun: Function) {\n HOOKS[type] = HOOKS[type].filter(h => h !== fun);\n}\n"],"mappings":";;;;;;;;;AACA;AACA;AACA;AACO,IAAMA,KAA8B,GAAAC,OAAA,CAAAD,KAAA,GAAG;EAC1C;AACJ;AACA;AACA;EACIE,cAAc,EAAE,EAAE;EAClB;AACJ;AACA;EACIC,mBAAmB,EAAE,EAAE;EACvB;AACJ;AACA;AACA;AACA;EACIC,gBAAgB,EAAE,EAAE;EACpBC,qBAAqB,EAAE,EAAE;EACzBC,kBAAkB,EAAE,EAAE;EACtBC,aAAa,EAAE,EAAE;EACjB;AACJ;AACA;AACA;EACIC,uBAAuB,EAAE,EAAE;EAC3B;AACJ;AACA;AACA;EACIC,sBAAsB,EAAE,EAAE;EAC1B;AACJ;AACA;AACA;EACIC,iBAAiB,EAAE,EAAE;EACrB;AACJ;AACA;AACA;EACIC,cAAc,EAAE,EAAE;EAClBC,gBAAgB,EAAE,EAAE;EACpB;AACJ;AACA;AACA;EACIC,eAAe,EAAE,EAAE;EACnBC,gBAAgB,EAAE,EAAE;EACpB;AACJ;AACA;AACA;EACIC,oBAAoB,EAAE,EAAE;EACxB;AACJ;AACA;AACA;AACA;AACA;EACIC,0BAA0B,EAAE,EAAE;EAC9BC,eAAe,EAAE,EAAE;EACnB;AACJ;AACA;AACA;AACA;AACA;AACA;EACIC,kBAAkB,EAAE,EAAE;EACtB;AACJ;AACA;EACIC,mBAAmB,EAAE,EAAE;EACvB;AACJ;AACA;EACIC,oBAAoB,EAAE,EAAE;EACxB;AACJ;AACA;AACA;EACIC,oBAAoB,EAAE,EAAE;EAExB;AACJ;AACA;AACA;AACA;EACIC,yBAAyB,EAAE,EAAE;EAE7B;AACJ;AACA;AACA;AACA;EACIC,wCAAwC,EAAE;AAC9C,CAAC;AAEM,SAASC,cAAcA,CAACC,OAAe,EAAEC,GAAQ,EAAE;EACtD,IAAI1B,KAAK,CAACyB,OAAO,CAAC,CAACE,MAAM,GAAG,CAAC,EAAE;IAC3B3B,KAAK,CAACyB,OAAO,CAAC,CAACG,OAAO,CAACC,GAAG,IAAIA,GAAG,CAACH,GAAG,CAAC,CAAC;EAC3C;AACJ;;AAGA;AACA;AACA;AACA;AACA;AACO,SAASI,mBAAmBA,CAACL,OAAe,EAAEC,GAAQ,EAAgB;EACzE,OAAOK,OAAO,CAACC,GAAG,CACdhC,KAAK,CAACyB,OAAO,CAAC,CAACQ,GAAG,CAACJ,GAAG,IAAIA,GAAG,CAACH,GAAG,CAAC,CACtC,CAAC;AACL;;AAEA;AACA;AACA;AACO,SAASQ,UAAUA,CAACC,IAAY,EAAEN,GAAa,EAAE;EACpD7B,KAAK,CAACmC,IAAI,CAAC,GAAGnC,KAAK,CAACmC,IAAI,CAAC,CAACC,MAAM,CAACC,CAAC,IAAIA,CAAC,KAAKR,GAAG,CAAC;AACpD","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/incremental-write.js b/dist/cjs/incremental-write.js deleted file mode 100644 index 8c47a5dc501..00000000000 --- a/dist/cjs/incremental-write.js +++ /dev/null @@ -1,169 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.IncrementalWriteQueue = void 0; -exports.findNewestOfDocumentStates = findNewestOfDocumentStates; -exports.modifierFromPublicToInternal = modifierFromPublicToInternal; -var _rxError = require("./rx-error.js"); -var _index = require("./plugins/utils/index.js"); -/** - * The incremental write queue - * batches up all incremental writes to a collection - * so that performance can be improved by: - * - Running only one write even when there are multiple modifications to the same document. - * - Run all writes ins a single bulkWrite() call even when there are writes to many documents. - */ -var IncrementalWriteQueue = exports.IncrementalWriteQueue = /*#__PURE__*/function () { - function IncrementalWriteQueue(storageInstance, primaryPath, - // can be used to run hooks etc. - preWrite, postWrite) { - this.queueByDocId = new Map(); - this.isRunning = false; - this.storageInstance = storageInstance; - this.primaryPath = primaryPath; - this.preWrite = preWrite; - this.postWrite = postWrite; - } - var _proto = IncrementalWriteQueue.prototype; - _proto.addWrite = function addWrite(lastKnownDocumentState, modifier) { - var docId = lastKnownDocumentState[this.primaryPath]; - var ar = (0, _index.getFromMapOrCreate)(this.queueByDocId, docId, () => []); - var ret = new Promise((resolve, reject) => { - var item = { - lastKnownDocumentState, - modifier, - resolve, - reject - }; - (0, _index.ensureNotFalsy)(ar).push(item); - this.triggerRun(); - }); - return ret; - }; - _proto.triggerRun = async function triggerRun() { - if (this.isRunning === true || this.queueByDocId.size === 0) { - // already running - return; - } - this.isRunning = true; - var writeRows = []; - - /** - * 'take over' so that while the async functions runs, - * new incremental updates could be added from the outside. - */ - var itemsById = this.queueByDocId; - this.queueByDocId = new Map(); - await Promise.all(Array.from(itemsById.entries()).map(async ([_docId, items]) => { - var oldData = findNewestOfDocumentStates(items.map(i => i.lastKnownDocumentState)); - var newData = oldData; - for (var item of items) { - try { - newData = await item.modifier( - /** - * We have to clone() each time because the modifier - * might throw while it already changed some properties - * of the document. - */ - (0, _index.clone)(newData)); - } catch (err) { - item.reject(err); - item.reject = () => {}; - item.resolve = () => {}; - } - } - try { - await this.preWrite(newData, oldData); - } catch (err) { - /** - * If the before-hooks fail, - * we reject all of the writes because it is - * not possible to determine which one is to blame. - */ - items.forEach(item => item.reject(err)); - return; - } - writeRows.push({ - previous: oldData, - document: newData - }); - })); - var writeResult = writeRows.length > 0 ? await this.storageInstance.bulkWrite(writeRows, 'incremental-write') : { - error: [], - success: [] - }; - - // process success - await Promise.all(writeResult.success.map(result => { - var docId = result[this.primaryPath]; - this.postWrite(result); - var items = (0, _index.getFromMapOrThrow)(itemsById, docId); - items.forEach(item => item.resolve(result)); - })); - - // process errors - writeResult.error.forEach(error => { - var docId = error.documentId; - var items = (0, _index.getFromMapOrThrow)(itemsById, docId); - var isConflict = (0, _rxError.isBulkWriteConflictError)(error); - if (isConflict) { - // had conflict -> retry afterwards - var ar = (0, _index.getFromMapOrCreate)(this.queueByDocId, docId, () => []); - /** - * Add the items back to this.queueByDocId - * by maintaining the original order. - */ - items.reverse().forEach(item => { - item.lastKnownDocumentState = (0, _index.ensureNotFalsy)(isConflict.documentInDb); - (0, _index.ensureNotFalsy)(ar).unshift(item); - }); - } else { - // other error -> must be thrown - var rxError = (0, _rxError.rxStorageWriteErrorToRxError)(error); - items.forEach(item => item.reject(rxError)); - } - }); - this.isRunning = false; - - /** - * Always trigger another run - * because in between there might be new items - * been added to the queue. - */ - return this.triggerRun(); - }; - return IncrementalWriteQueue; -}(); -function modifierFromPublicToInternal(publicModifier) { - var ret = async docData => { - var withoutMeta = (0, _index.stripMetaDataFromDocument)(docData); - withoutMeta._deleted = docData._deleted; - var modified = await publicModifier(withoutMeta); - var reattachedMeta = Object.assign({}, modified, { - _meta: docData._meta, - _attachments: docData._attachments, - _rev: docData._rev, - _deleted: typeof modified._deleted !== 'undefined' ? modified._deleted : docData._deleted - }); - if (typeof reattachedMeta._deleted === 'undefined') { - reattachedMeta._deleted = false; - } - return reattachedMeta; - }; - return ret; -} -function findNewestOfDocumentStates(docs) { - var newest = docs[0]; - var newestRevisionHeight = (0, _index.getHeightOfRevision)(newest._rev); - docs.forEach(doc => { - var height = (0, _index.getHeightOfRevision)(doc._rev); - if (height > newestRevisionHeight) { - newest = doc; - newestRevisionHeight = height; - } - }); - return newest; -} -//# sourceMappingURL=incremental-write.js.map \ No newline at end of file diff --git a/dist/cjs/incremental-write.js.map b/dist/cjs/incremental-write.js.map deleted file mode 100644 index e7f46c3841b..00000000000 --- a/dist/cjs/incremental-write.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"incremental-write.js","names":["_rxError","require","_index","IncrementalWriteQueue","exports","storageInstance","primaryPath","preWrite","postWrite","queueByDocId","Map","isRunning","_proto","prototype","addWrite","lastKnownDocumentState","modifier","docId","ar","getFromMapOrCreate","ret","Promise","resolve","reject","item","ensureNotFalsy","push","triggerRun","size","writeRows","itemsById","all","Array","from","entries","map","_docId","items","oldData","findNewestOfDocumentStates","i","newData","clone","err","forEach","previous","document","writeResult","length","bulkWrite","error","success","result","getFromMapOrThrow","documentId","isConflict","isBulkWriteConflictError","reverse","documentInDb","unshift","rxError","rxStorageWriteErrorToRxError","modifierFromPublicToInternal","publicModifier","docData","withoutMeta","stripMetaDataFromDocument","_deleted","modified","reattachedMeta","Object","assign","_meta","_attachments","_rev","docs","newest","newestRevisionHeight","getHeightOfRevision","doc","height"],"sources":["../../src/incremental-write.ts"],"sourcesContent":["import {\n isBulkWriteConflictError,\n rxStorageWriteErrorToRxError\n} from './rx-error.ts';\nimport type {\n ModifyFunction,\n BulkWriteRow,\n MaybePromise,\n RxDocumentData,\n RxDocumentWriteData,\n RxError,\n RxStorageBulkWriteResponse,\n RxStorageInstance,\n StringKeys,\n WithDeleted\n} from './types/index.d.ts';\nimport {\n clone,\n ensureNotFalsy,\n getFromMapOrCreate,\n getFromMapOrThrow,\n getHeightOfRevision,\n stripMetaDataFromDocument\n} from './plugins/utils/index.ts';\n\n\n\nexport type IncrementalWriteModifier = (\n doc: RxDocumentData\n) => MaybePromise> | MaybePromise>;\n\n\ntype IncrementalWriteQueueItem = {\n lastKnownDocumentState: RxDocumentData;\n modifier: IncrementalWriteModifier;\n resolve: (d: RxDocumentData) => void;\n reject: (error: RxError) => void;\n};\n\n/**\n * The incremental write queue\n * batches up all incremental writes to a collection\n * so that performance can be improved by:\n * - Running only one write even when there are multiple modifications to the same document.\n * - Run all writes ins a single bulkWrite() call even when there are writes to many documents.\n */\nexport class IncrementalWriteQueue {\n public queueByDocId = new Map[]>();\n public isRunning: boolean = false;\n\n constructor(\n public readonly storageInstance: RxStorageInstance,\n public readonly primaryPath: StringKeys>,\n // can be used to run hooks etc.\n public readonly preWrite: (newData: RxDocumentData, oldData: RxDocumentData) => MaybePromise,\n public readonly postWrite: (docData: RxDocumentData) => void\n\n ) { }\n\n addWrite(\n lastKnownDocumentState: RxDocumentData,\n modifier: IncrementalWriteModifier\n ): Promise> {\n const docId: string = lastKnownDocumentState[this.primaryPath] as any;\n const ar = getFromMapOrCreate(this.queueByDocId, docId, () => []);\n const ret = new Promise>((resolve, reject) => {\n const item: IncrementalWriteQueueItem = {\n lastKnownDocumentState,\n modifier,\n resolve,\n reject\n };\n ensureNotFalsy(ar).push(item);\n this.triggerRun();\n });\n return ret;\n }\n\n async triggerRun(): Promise {\n if (\n this.isRunning === true ||\n this.queueByDocId.size === 0\n ) {\n // already running\n return;\n }\n this.isRunning = true;\n const writeRows: BulkWriteRow[] = [];\n\n /**\n * 'take over' so that while the async functions runs,\n * new incremental updates could be added from the outside.\n */\n const itemsById = this.queueByDocId;\n this.queueByDocId = new Map();\n await Promise.all(\n Array.from(itemsById.entries())\n .map(async ([_docId, items]) => {\n const oldData = findNewestOfDocumentStates(\n items.map(i => i.lastKnownDocumentState)\n );\n let newData = oldData;\n for (const item of items) {\n try {\n newData = await item.modifier(\n /**\n * We have to clone() each time because the modifier\n * might throw while it already changed some properties\n * of the document.\n */\n clone(newData)\n ) as any;\n } catch (err: any) {\n item.reject(err);\n item.reject = () => { };\n item.resolve = () => { };\n }\n }\n\n try {\n await this.preWrite(newData, oldData);\n } catch (err: any) {\n /**\n * If the before-hooks fail,\n * we reject all of the writes because it is\n * not possible to determine which one is to blame.\n */\n items.forEach(item => item.reject(err));\n return;\n }\n writeRows.push({\n previous: oldData,\n document: newData\n });\n })\n );\n const writeResult: RxStorageBulkWriteResponse = writeRows.length > 0 ?\n await this.storageInstance.bulkWrite(writeRows, 'incremental-write') :\n { error: [], success: [] };\n\n // process success\n await Promise.all(\n writeResult.success.map(result => {\n const docId = result[this.primaryPath] as string;\n this.postWrite(result);\n const items = getFromMapOrThrow(itemsById, docId);\n items.forEach(item => item.resolve(result));\n })\n );\n\n // process errors\n writeResult.error\n .forEach(error => {\n const docId = error.documentId;\n const items = getFromMapOrThrow(itemsById, docId);\n const isConflict = isBulkWriteConflictError(error);\n if (isConflict) {\n // had conflict -> retry afterwards\n const ar = getFromMapOrCreate(this.queueByDocId, docId, () => []);\n /**\n * Add the items back to this.queueByDocId\n * by maintaining the original order.\n */\n items\n .reverse()\n .forEach(item => {\n item.lastKnownDocumentState = ensureNotFalsy(isConflict.documentInDb);\n ensureNotFalsy(ar).unshift(item);\n });\n } else {\n // other error -> must be thrown\n const rxError = rxStorageWriteErrorToRxError(error);\n items.forEach(item => item.reject(rxError));\n }\n });\n this.isRunning = false;\n\n /**\n * Always trigger another run\n * because in between there might be new items\n * been added to the queue.\n */\n return this.triggerRun();\n }\n}\n\n\nexport function modifierFromPublicToInternal(\n publicModifier: ModifyFunction\n): IncrementalWriteModifier {\n const ret = async (docData: RxDocumentData) => {\n const withoutMeta: WithDeleted = stripMetaDataFromDocument(docData) as any;\n withoutMeta._deleted = docData._deleted;\n const modified = await publicModifier(withoutMeta);\n const reattachedMeta: RxDocumentData = Object.assign({}, modified, {\n _meta: docData._meta,\n _attachments: docData._attachments,\n _rev: docData._rev,\n _deleted: typeof (modified as WithDeleted)._deleted !== 'undefined' ?\n (modified as WithDeleted)._deleted :\n docData._deleted\n });\n if (typeof reattachedMeta._deleted === 'undefined') {\n reattachedMeta._deleted = false;\n }\n return reattachedMeta;\n };\n return ret;\n}\n\n\nexport function findNewestOfDocumentStates(\n docs: RxDocumentData[]\n): RxDocumentData {\n\n let newest = docs[0];\n let newestRevisionHeight = getHeightOfRevision(newest._rev);\n docs.forEach(doc => {\n const height = getHeightOfRevision(doc._rev);\n if (height > newestRevisionHeight) {\n newest = doc;\n newestRevisionHeight = height;\n }\n });\n return newest;\n}\n"],"mappings":";;;;;;;;AAAA,IAAAA,QAAA,GAAAC,OAAA;AAgBA,IAAAC,MAAA,GAAAD,OAAA;AAuBA;AACA;AACA;AACA;AACA;AACA;AACA;AANA,IAOaE,qBAAqB,GAAAC,OAAA,CAAAD,qBAAA;EAI9B,SAAAA,sBACoBE,eAAuD,EACvDC,WAAkD;EAClE;EACgBC,QAAwG,EACxGC,SAAuD,EAEzE;IAAA,KAVKC,YAAY,GAAG,IAAIC,GAAG,CAAiD,CAAC;IAAA,KACxEC,SAAS,GAAY,KAAK;IAAA,KAGbN,eAAuD,GAAvDA,eAAuD;IAAA,KACvDC,WAAkD,GAAlDA,WAAkD;IAAA,KAElDC,QAAwG,GAAxGA,QAAwG;IAAA,KACxGC,SAAuD,GAAvDA,SAAuD;EAEvE;EAAC,IAAAI,MAAA,GAAAT,qBAAA,CAAAU,SAAA;EAAAD,MAAA,CAELE,QAAQ,GAAR,SAAAA,SACIC,sBAAiD,EACjDC,QAA6C,EACX;IAClC,IAAMC,KAAa,GAAGF,sBAAsB,CAAC,IAAI,CAACT,WAAW,CAAQ;IACrE,IAAMY,EAAE,GAAG,IAAAC,yBAAkB,EAAC,IAAI,CAACV,YAAY,EAAEQ,KAAK,EAAE,MAAM,EAAE,CAAC;IACjE,IAAMG,GAAG,GAAG,IAAIC,OAAO,CAA4B,CAACC,OAAO,EAAEC,MAAM,KAAK;MACpE,IAAMC,IAA0C,GAAG;QAC/CT,sBAAsB;QACtBC,QAAQ;QACRM,OAAO;QACPC;MACJ,CAAC;MACD,IAAAE,qBAAc,EAACP,EAAE,CAAC,CAACQ,IAAI,CAACF,IAAI,CAAC;MAC7B,IAAI,CAACG,UAAU,CAAC,CAAC;IACrB,CAAC,CAAC;IACF,OAAOP,GAAG;EACd,CAAC;EAAAR,MAAA,CAEKe,UAAU,GAAhB,eAAAA,WAAA,EAAkC;IAC9B,IACI,IAAI,CAAChB,SAAS,KAAK,IAAI,IACvB,IAAI,CAACF,YAAY,CAACmB,IAAI,KAAK,CAAC,EAC9B;MACE;MACA;IACJ;IACA,IAAI,CAACjB,SAAS,GAAG,IAAI;IACrB,IAAMkB,SAAoC,GAAG,EAAE;;IAE/C;AACR;AACA;AACA;IACQ,IAAMC,SAAS,GAAG,IAAI,CAACrB,YAAY;IACnC,IAAI,CAACA,YAAY,GAAG,IAAIC,GAAG,CAAC,CAAC;IAC7B,MAAMW,OAAO,CAACU,GAAG,CACbC,KAAK,CAACC,IAAI,CAACH,SAAS,CAACI,OAAO,CAAC,CAAC,CAAC,CAC1BC,GAAG,CAAC,OAAO,CAACC,MAAM,EAAEC,KAAK,CAAC,KAAK;MAC5B,IAAMC,OAAO,GAAGC,0BAA0B,CACtCF,KAAK,CAACF,GAAG,CAACK,CAAC,IAAIA,CAAC,CAACzB,sBAAsB,CAC3C,CAAC;MACD,IAAI0B,OAAO,GAAGH,OAAO;MACrB,KAAK,IAAMd,IAAI,IAAIa,KAAK,EAAE;QACtB,IAAI;UACAI,OAAO,GAAG,MAAMjB,IAAI,CAACR,QAAQ;UACzB;AAChC;AACA;AACA;AACA;UACgC,IAAA0B,YAAK,EAACD,OAAO,CACjB,CAAQ;QACZ,CAAC,CAAC,OAAOE,GAAQ,EAAE;UACfnB,IAAI,CAACD,MAAM,CAACoB,GAAG,CAAC;UAChBnB,IAAI,CAACD,MAAM,GAAG,MAAM,CAAE,CAAC;UACvBC,IAAI,CAACF,OAAO,GAAG,MAAM,CAAE,CAAC;QAC5B;MACJ;MAEA,IAAI;QACA,MAAM,IAAI,CAACf,QAAQ,CAACkC,OAAO,EAAEH,OAAO,CAAC;MACzC,CAAC,CAAC,OAAOK,GAAQ,EAAE;QACf;AACxB;AACA;AACA;AACA;QACwBN,KAAK,CAACO,OAAO,CAACpB,IAAI,IAAIA,IAAI,CAACD,MAAM,CAACoB,GAAG,CAAC,CAAC;QACvC;MACJ;MACAd,SAAS,CAACH,IAAI,CAAC;QACXmB,QAAQ,EAAEP,OAAO;QACjBQ,QAAQ,EAAEL;MACd,CAAC,CAAC;IACN,CAAC,CACT,CAAC;IACD,IAAMM,WAAkD,GAAGlB,SAAS,CAACmB,MAAM,GAAG,CAAC,GAC3E,MAAM,IAAI,CAAC3C,eAAe,CAAC4C,SAAS,CAACpB,SAAS,EAAE,mBAAmB,CAAC,GACpE;MAAEqB,KAAK,EAAE,EAAE;MAAEC,OAAO,EAAE;IAAG,CAAC;;IAE9B;IACA,MAAM9B,OAAO,CAACU,GAAG,CACbgB,WAAW,CAACI,OAAO,CAAChB,GAAG,CAACiB,MAAM,IAAI;MAC9B,IAAMnC,KAAK,GAAGmC,MAAM,CAAC,IAAI,CAAC9C,WAAW,CAAW;MAChD,IAAI,CAACE,SAAS,CAAC4C,MAAM,CAAC;MACtB,IAAMf,KAAK,GAAG,IAAAgB,wBAAiB,EAACvB,SAAS,EAAEb,KAAK,CAAC;MACjDoB,KAAK,CAACO,OAAO,CAACpB,IAAI,IAAIA,IAAI,CAACF,OAAO,CAAC8B,MAAM,CAAC,CAAC;IAC/C,CAAC,CACL,CAAC;;IAED;IACAL,WAAW,CAACG,KAAK,CACZN,OAAO,CAACM,KAAK,IAAI;MACd,IAAMjC,KAAK,GAAGiC,KAAK,CAACI,UAAU;MAC9B,IAAMjB,KAAK,GAAG,IAAAgB,wBAAiB,EAACvB,SAAS,EAAEb,KAAK,CAAC;MACjD,IAAMsC,UAAU,GAAG,IAAAC,iCAAwB,EAAYN,KAAK,CAAC;MAC7D,IAAIK,UAAU,EAAE;QACZ;QACA,IAAMrC,EAAE,GAAG,IAAAC,yBAAkB,EAAC,IAAI,CAACV,YAAY,EAAEQ,KAAK,EAAE,MAAM,EAAE,CAAC;QACjE;AACpB;AACA;AACA;QACoBoB,KAAK,CACAoB,OAAO,CAAC,CAAC,CACTb,OAAO,CAACpB,IAAI,IAAI;UACbA,IAAI,CAACT,sBAAsB,GAAG,IAAAU,qBAAc,EAAC8B,UAAU,CAACG,YAAY,CAAC;UACrE,IAAAjC,qBAAc,EAACP,EAAE,CAAC,CAACyC,OAAO,CAACnC,IAAI,CAAC;QACpC,CAAC,CAAC;MACV,CAAC,MAAM;QACH;QACA,IAAMoC,OAAO,GAAG,IAAAC,qCAA4B,EAACX,KAAK,CAAC;QACnDb,KAAK,CAACO,OAAO,CAACpB,IAAI,IAAIA,IAAI,CAACD,MAAM,CAACqC,OAAO,CAAC,CAAC;MAC/C;IACJ,CAAC,CAAC;IACN,IAAI,CAACjD,SAAS,GAAG,KAAK;;IAEtB;AACR;AACA;AACA;AACA;IACQ,OAAO,IAAI,CAACgB,UAAU,CAAC,CAAC;EAC5B,CAAC;EAAA,OAAAxB,qBAAA;AAAA;AAIE,SAAS2D,4BAA4BA,CACxCC,cAAyC,EACN;EACnC,IAAM3C,GAAG,GAAG,MAAO4C,OAAkC,IAAK;IACtD,IAAMC,WAAmC,GAAG,IAAAC,gCAAyB,EAACF,OAAO,CAAQ;IACrFC,WAAW,CAACE,QAAQ,GAAGH,OAAO,CAACG,QAAQ;IACvC,IAAMC,QAAQ,GAAG,MAAML,cAAc,CAACE,WAAW,CAAC;IAClD,IAAMI,cAAyC,GAAGC,MAAM,CAACC,MAAM,CAAC,CAAC,CAAC,EAAEH,QAAQ,EAAE;MAC1EI,KAAK,EAAER,OAAO,CAACQ,KAAK;MACpBC,YAAY,EAAET,OAAO,CAACS,YAAY;MAClCC,IAAI,EAAEV,OAAO,CAACU,IAAI;MAClBP,QAAQ,EAAE,OAAQC,QAAQ,CAA4BD,QAAQ,KAAK,WAAW,GACzEC,QAAQ,CAA4BD,QAAQ,GAC7CH,OAAO,CAACG;IAChB,CAAC,CAAC;IACF,IAAI,OAAOE,cAAc,CAACF,QAAQ,KAAK,WAAW,EAAE;MAChDE,cAAc,CAACF,QAAQ,GAAG,KAAK;IACnC;IACA,OAAOE,cAAc;EACzB,CAAC;EACD,OAAOjD,GAAG;AACd;AAGO,SAASmB,0BAA0BA,CACtCoC,IAAiC,EACR;EAEzB,IAAIC,MAAM,GAAGD,IAAI,CAAC,CAAC,CAAC;EACpB,IAAIE,oBAAoB,GAAG,IAAAC,0BAAmB,EAACF,MAAM,CAACF,IAAI,CAAC;EAC3DC,IAAI,CAAC/B,OAAO,CAACmC,GAAG,IAAI;IAChB,IAAMC,MAAM,GAAG,IAAAF,0BAAmB,EAACC,GAAG,CAACL,IAAI,CAAC;IAC5C,IAAIM,MAAM,GAAGH,oBAAoB,EAAE;MAC/BD,MAAM,GAAGG,GAAG;MACZF,oBAAoB,GAAGG,MAAM;IACjC;EACJ,CAAC,CAAC;EACF,OAAOJ,MAAM;AACjB","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/index.js b/dist/cjs/index.js deleted file mode 100644 index 219e4e999e5..00000000000 --- a/dist/cjs/index.js +++ /dev/null @@ -1,270 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -var _plugin = require("./plugin.js"); -Object.keys(_plugin).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _plugin[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _plugin[key]; - } - }); -}); -var _rxDatabase = require("./rx-database.js"); -Object.keys(_rxDatabase).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _rxDatabase[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _rxDatabase[key]; - } - }); -}); -var _rxError = require("./rx-error.js"); -Object.keys(_rxError).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _rxError[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _rxError[key]; - } - }); -}); -var _rxDatabaseInternalStore = require("./rx-database-internal-store.js"); -Object.keys(_rxDatabaseInternalStore).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _rxDatabaseInternalStore[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _rxDatabaseInternalStore[key]; - } - }); -}); -var _overwritable = require("./overwritable.js"); -Object.keys(_overwritable).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _overwritable[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _overwritable[key]; - } - }); -}); -var _rxCollection = require("./rx-collection.js"); -Object.keys(_rxCollection).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _rxCollection[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _rxCollection[key]; - } - }); -}); -var _rxCollectionHelper = require("./rx-collection-helper.js"); -Object.keys(_rxCollectionHelper).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _rxCollectionHelper[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _rxCollectionHelper[key]; - } - }); -}); -var _rxDocument = require("./rx-document.js"); -Object.keys(_rxDocument).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _rxDocument[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _rxDocument[key]; - } - }); -}); -var _rxChangeEvent = require("./rx-change-event.js"); -Object.keys(_rxChangeEvent).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _rxChangeEvent[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _rxChangeEvent[key]; - } - }); -}); -var _rxDocumentPrototypeMerge = require("./rx-document-prototype-merge.js"); -Object.keys(_rxDocumentPrototypeMerge).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _rxDocumentPrototypeMerge[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _rxDocumentPrototypeMerge[key]; - } - }); -}); -var _rxQuery = require("./rx-query.js"); -Object.keys(_rxQuery).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _rxQuery[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _rxQuery[key]; - } - }); -}); -var _rxQuerySingleResult = require("./rx-query-single-result.js"); -Object.keys(_rxQuerySingleResult).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _rxQuerySingleResult[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _rxQuerySingleResult[key]; - } - }); -}); -var _rxQueryHelper = require("./rx-query-helper.js"); -Object.keys(_rxQueryHelper).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _rxQueryHelper[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _rxQueryHelper[key]; - } - }); -}); -var _rxSchema = require("./rx-schema.js"); -Object.keys(_rxSchema).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _rxSchema[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _rxSchema[key]; - } - }); -}); -var _rxSchemaHelper = require("./rx-schema-helper.js"); -Object.keys(_rxSchemaHelper).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _rxSchemaHelper[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _rxSchemaHelper[key]; - } - }); -}); -var _rxStorageHelper = require("./rx-storage-helper.js"); -Object.keys(_rxStorageHelper).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _rxStorageHelper[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _rxStorageHelper[key]; - } - }); -}); -var _index = require("./replication-protocol/index.js"); -Object.keys(_index).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _index[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _index[key]; - } - }); -}); -var _rxStorageMultiinstance = require("./rx-storage-multiinstance.js"); -Object.keys(_rxStorageMultiinstance).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _rxStorageMultiinstance[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _rxStorageMultiinstance[key]; - } - }); -}); -var _customIndex = require("./custom-index.js"); -Object.keys(_customIndex).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _customIndex[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _customIndex[key]; - } - }); -}); -var _queryPlanner = require("./query-planner.js"); -Object.keys(_queryPlanner).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _queryPlanner[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _queryPlanner[key]; - } - }); -}); -var _pluginHelpers = require("./plugin-helpers.js"); -Object.keys(_pluginHelpers).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _pluginHelpers[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _pluginHelpers[key]; - } - }); -}); -var _index2 = require("./plugins/utils/index.js"); -Object.keys(_index2).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _index2[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _index2[key]; - } - }); -}); -var _hooks = require("./hooks.js"); -Object.keys(_hooks).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _hooks[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _hooks[key]; - } - }); -}); -var _queryCache = require("./query-cache.js"); -Object.keys(_queryCache).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _queryCache[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _queryCache[key]; - } - }); -}); -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/cjs/index.js.map b/dist/cjs/index.js.map deleted file mode 100644 index 769f18278e0..00000000000 --- a/dist/cjs/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["_plugin","require","Object","keys","forEach","key","exports","defineProperty","enumerable","get","_rxDatabase","_rxError","_rxDatabaseInternalStore","_overwritable","_rxCollection","_rxCollectionHelper","_rxDocument","_rxChangeEvent","_rxDocumentPrototypeMerge","_rxQuery","_rxQuerySingleResult","_rxQueryHelper","_rxSchema","_rxSchemaHelper","_rxStorageHelper","_index","_rxStorageMultiinstance","_customIndex","_queryPlanner","_pluginHelpers","_index2","_hooks","_queryCache"],"sources":["../../src/index.ts"],"sourcesContent":["/**\n * this is the main entry-point\n * for when the you call \"import from 'rxdb'\".\n */\n\nexport * from './plugin.ts';\nexport * from './rx-database.ts';\nexport * from './rx-error.ts';\nexport * from './rx-database-internal-store.ts';\nexport * from './overwritable.ts';\nexport * from './rx-collection.ts';\nexport * from './rx-collection-helper.ts';\nexport * from './rx-document.ts';\nexport * from './rx-change-event.ts';\nexport * from './rx-document-prototype-merge.ts';\nexport * from './rx-query.ts';\nexport * from './rx-query-single-result.ts';\nexport * from './rx-query-helper.ts';\nexport * from './rx-schema.ts';\nexport * from './rx-schema-helper.ts';\nexport * from './rx-storage-helper.ts';\nexport * from './replication-protocol/index.ts';\nexport * from './rx-storage-multiinstance.ts';\nexport * from './custom-index.ts';\nexport * from './query-planner.ts';\nexport * from './plugin-helpers.ts';\nexport * from './plugins/utils/index.ts';\nexport * from './hooks.ts';\nexport * from './query-cache.ts';\nexport type * from './types/index.ts';\n"],"mappings":";;;;;AAKA,IAAAA,OAAA,GAAAC,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAH,OAAA,EAAAI,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAL,OAAA,CAAAK,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAT,OAAA,CAAAK,GAAA;IAAA;EAAA;AAAA;AACA,IAAAK,WAAA,GAAAT,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAO,WAAA,EAAAN,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAK,WAAA,CAAAL,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAC,WAAA,CAAAL,GAAA;IAAA;EAAA;AAAA;AACA,IAAAM,QAAA,GAAAV,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAQ,QAAA,EAAAP,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAM,QAAA,CAAAN,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAE,QAAA,CAAAN,GAAA;IAAA;EAAA;AAAA;AACA,IAAAO,wBAAA,GAAAX,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAS,wBAAA,EAAAR,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAO,wBAAA,CAAAP,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAG,wBAAA,CAAAP,GAAA;IAAA;EAAA;AAAA;AACA,IAAAQ,aAAA,GAAAZ,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAU,aAAA,EAAAT,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAQ,aAAA,CAAAR,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAI,aAAA,CAAAR,GAAA;IAAA;EAAA;AAAA;AACA,IAAAS,aAAA,GAAAb,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAW,aAAA,EAAAV,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAS,aAAA,CAAAT,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAK,aAAA,CAAAT,GAAA;IAAA;EAAA;AAAA;AACA,IAAAU,mBAAA,GAAAd,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAY,mBAAA,EAAAX,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAU,mBAAA,CAAAV,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAM,mBAAA,CAAAV,GAAA;IAAA;EAAA;AAAA;AACA,IAAAW,WAAA,GAAAf,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAa,WAAA,EAAAZ,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAW,WAAA,CAAAX,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAO,WAAA,CAAAX,GAAA;IAAA;EAAA;AAAA;AACA,IAAAY,cAAA,GAAAhB,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAc,cAAA,EAAAb,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAY,cAAA,CAAAZ,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAQ,cAAA,CAAAZ,GAAA;IAAA;EAAA;AAAA;AACA,IAAAa,yBAAA,GAAAjB,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAe,yBAAA,EAAAd,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAa,yBAAA,CAAAb,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAS,yBAAA,CAAAb,GAAA;IAAA;EAAA;AAAA;AACA,IAAAc,QAAA,GAAAlB,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAgB,QAAA,EAAAf,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAc,QAAA,CAAAd,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAU,QAAA,CAAAd,GAAA;IAAA;EAAA;AAAA;AACA,IAAAe,oBAAA,GAAAnB,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAiB,oBAAA,EAAAhB,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAe,oBAAA,CAAAf,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAW,oBAAA,CAAAf,GAAA;IAAA;EAAA;AAAA;AACA,IAAAgB,cAAA,GAAApB,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAkB,cAAA,EAAAjB,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAgB,cAAA,CAAAhB,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAY,cAAA,CAAAhB,GAAA;IAAA;EAAA;AAAA;AACA,IAAAiB,SAAA,GAAArB,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAmB,SAAA,EAAAlB,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAiB,SAAA,CAAAjB,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAa,SAAA,CAAAjB,GAAA;IAAA;EAAA;AAAA;AACA,IAAAkB,eAAA,GAAAtB,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAoB,eAAA,EAAAnB,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAkB,eAAA,CAAAlB,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAc,eAAA,CAAAlB,GAAA;IAAA;EAAA;AAAA;AACA,IAAAmB,gBAAA,GAAAvB,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAqB,gBAAA,EAAApB,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAmB,gBAAA,CAAAnB,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAe,gBAAA,CAAAnB,GAAA;IAAA;EAAA;AAAA;AACA,IAAAoB,MAAA,GAAAxB,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAsB,MAAA,EAAArB,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAoB,MAAA,CAAApB,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAgB,MAAA,CAAApB,GAAA;IAAA;EAAA;AAAA;AACA,IAAAqB,uBAAA,GAAAzB,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAuB,uBAAA,EAAAtB,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAqB,uBAAA,CAAArB,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAiB,uBAAA,CAAArB,GAAA;IAAA;EAAA;AAAA;AACA,IAAAsB,YAAA,GAAA1B,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAwB,YAAA,EAAAvB,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAsB,YAAA,CAAAtB,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAkB,YAAA,CAAAtB,GAAA;IAAA;EAAA;AAAA;AACA,IAAAuB,aAAA,GAAA3B,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAyB,aAAA,EAAAxB,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAuB,aAAA,CAAAvB,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAmB,aAAA,CAAAvB,GAAA;IAAA;EAAA;AAAA;AACA,IAAAwB,cAAA,GAAA5B,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAA0B,cAAA,EAAAzB,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAwB,cAAA,CAAAxB,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAoB,cAAA,CAAAxB,GAAA;IAAA;EAAA;AAAA;AACA,IAAAyB,OAAA,GAAA7B,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAA2B,OAAA,EAAA1B,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAyB,OAAA,CAAAzB,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAqB,OAAA,CAAAzB,GAAA;IAAA;EAAA;AAAA;AACA,IAAA0B,MAAA,GAAA9B,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAA4B,MAAA,EAAA3B,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAA0B,MAAA,CAAA1B,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAsB,MAAA,CAAA1B,GAAA;IAAA;EAAA;AAAA;AACA,IAAA2B,WAAA,GAAA/B,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAA6B,WAAA,EAAA5B,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAA2B,WAAA,CAAA3B,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAuB,WAAA,CAAA3B,GAAA;IAAA;EAAA;AAAA","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/overwritable.js b/dist/cjs/overwritable.js deleted file mode 100644 index 1dc2182f33c..00000000000 --- a/dist/cjs/overwritable.js +++ /dev/null @@ -1,39 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.overwritable = void 0; -/** - * functions that can or should be overwritten by plugins - * IMPORTANT: Do not import any big stuff from RxDB here! - * An 'overwritable' can be used inside WebWorkers for RxStorage only, - * and we do not want to have the full RxDB lib bundled in them. - */ - -var overwritable = exports.overwritable = { - /** - * if this method is overwritten with one - * that returns true, we do additional checks - * which help the developer but have bad performance - */ - isDevMode() { - return false; - }, - /** - * Deep freezes and object when in dev-mode. - * Deep-Freezing has the same performance as deep-cloning, so we only do that in dev-mode. - * Also, we can ensure the readonly state via typescript - * @link https://developer.mozilla.org/de/docs/Web/JavaScript/Reference/Global_Objects/Object/freeze - */ - deepFreezeWhenDevMode(obj) { - return obj; - }, - /** - * overwritten to map error-codes to text-messages - */ - tunnelErrorMessage(message) { - return "RxDB Error-Code " + message + ".\n Error messages are not included in RxDB core to reduce build size.\n "; - } -}; -//# sourceMappingURL=overwritable.js.map \ No newline at end of file diff --git a/dist/cjs/overwritable.js.map b/dist/cjs/overwritable.js.map deleted file mode 100644 index b76bc9b1e03..00000000000 --- a/dist/cjs/overwritable.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"overwritable.js","names":["overwritable","exports","isDevMode","deepFreezeWhenDevMode","obj","tunnelErrorMessage","message"],"sources":["../../src/overwritable.ts"],"sourcesContent":["/**\n * functions that can or should be overwritten by plugins\n * IMPORTANT: Do not import any big stuff from RxDB here!\n * An 'overwritable' can be used inside WebWorkers for RxStorage only,\n * and we do not want to have the full RxDB lib bundled in them.\n */\n\nimport type { DeepReadonly } from './types/util.d.ts';\n\nexport const overwritable = {\n /**\n * if this method is overwritten with one\n * that returns true, we do additional checks\n * which help the developer but have bad performance\n */\n isDevMode(): boolean {\n return false;\n },\n\n /**\n * Deep freezes and object when in dev-mode.\n * Deep-Freezing has the same performance as deep-cloning, so we only do that in dev-mode.\n * Also, we can ensure the readonly state via typescript\n * @link https://developer.mozilla.org/de/docs/Web/JavaScript/Reference/Global_Objects/Object/freeze\n */\n deepFreezeWhenDevMode(obj: T): DeepReadonly {\n return obj as any;\n },\n\n /**\n * overwritten to map error-codes to text-messages\n */\n tunnelErrorMessage(message: string): string {\n return `RxDB Error-Code ${message}.\n Error messages are not included in RxDB core to reduce build size.\n `;\n }\n};\n"],"mappings":";;;;;;AAAA;AACA;AACA;AACA;AACA;AACA;;AAIO,IAAMA,YAAY,GAAAC,OAAA,CAAAD,YAAA,GAAG;EACxB;AACJ;AACA;AACA;AACA;EACIE,SAASA,CAAA,EAAY;IACjB,OAAO,KAAK;EAChB,CAAC;EAED;AACJ;AACA;AACA;AACA;AACA;EACIC,qBAAqBA,CAAIC,GAAM,EAAmB;IAC9C,OAAOA,GAAG;EACd,CAAC;EAED;AACJ;AACA;EACIC,kBAAkBA,CAACC,OAAe,EAAU;IACxC,4BAA0BA,OAAO;EAGrC;AACJ,CAAC","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugin-helpers.js b/dist/cjs/plugin-helpers.js deleted file mode 100644 index fe440605b8f..00000000000 --- a/dist/cjs/plugin-helpers.js +++ /dev/null @@ -1,255 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.wrapRxStorageInstance = wrapRxStorageInstance; -exports.wrappedValidateStorageFactory = wrappedValidateStorageFactory; -var _operators = require("rxjs/operators"); -var _rxSchemaHelper = require("./rx-schema-helper.js"); -var _index = require("./plugins/utils/index.js"); -var _rxjs = require("rxjs"); -/** - * Returns the validation errors. - * If document is fully valid, returns an empty array. - */ - -/** - * cache the validators by the schema string - * so we can reuse them when multiple collections have the same schema - * - * Notice: to make it easier and not dependent on a hash function, - * we use the plain json string. - */ -var VALIDATOR_CACHE_BY_VALIDATOR_KEY = new Map(); - -/** - * This factory is used in the validation plugins - * so that we can reuse the basic storage wrapping code. - */ -function wrappedValidateStorageFactory( -/** - * Returns a method that can be used to validate - * documents and throws when the document is not valid. - */ -getValidator, -/** - * A string to identify the validation library. - */ -validatorKey) { - var VALIDATOR_CACHE = (0, _index.getFromMapOrCreate)(VALIDATOR_CACHE_BY_VALIDATOR_KEY, validatorKey, () => new Map()); - function initValidator(schema) { - return (0, _index.getFromMapOrCreate)(VALIDATOR_CACHE, JSON.stringify(schema), () => getValidator(schema)); - } - return args => { - return Object.assign({}, args.storage, { - async createStorageInstance(params) { - var instance = await args.storage.createStorageInstance(params); - var primaryPath = (0, _rxSchemaHelper.getPrimaryFieldOfPrimaryKey)(params.schema.primaryKey); - - /** - * Lazy initialize the validator - * to save initial page load performance. - * Some libraries take really long to initialize the validator - * from the schema. - */ - var validatorCached; - (0, _index.requestIdleCallbackIfAvailable)(() => validatorCached = initValidator(params.schema)); - var oldBulkWrite = instance.bulkWrite.bind(instance); - instance.bulkWrite = (documentWrites, context) => { - if (!validatorCached) { - validatorCached = initValidator(params.schema); - } - var errors = []; - var continueWrites = []; - documentWrites.forEach(row => { - var documentId = row.document[primaryPath]; - var validationErrors = validatorCached(row.document); - if (validationErrors.length > 0) { - errors.push({ - status: 422, - isError: true, - documentId, - writeRow: row, - validationErrors - }); - } else { - continueWrites.push(row); - } - }); - var writePromise = continueWrites.length > 0 ? oldBulkWrite(continueWrites, context) : Promise.resolve({ - error: [], - success: [] - }); - return writePromise.then(writeResult => { - errors.forEach(validationError => { - writeResult.error.push(validationError); - }); - return writeResult; - }); - }; - return instance; - } - }); - }; -} - -/** - * Used in plugins to easily modify all in- and outgoing - * data of that storage instance. - */ -function wrapRxStorageInstance(originalSchema, instance, modifyToStorage, modifyFromStorage, modifyAttachmentFromStorage = v => v) { - async function toStorage(docData) { - if (!docData) { - return docData; - } - return await modifyToStorage(docData); - } - async function fromStorage(docData) { - if (!docData) { - return docData; - } - return await modifyFromStorage(docData); - } - async function errorFromStorage(error) { - var ret = (0, _index.flatClone)(error); - ret.writeRow = (0, _index.flatClone)(ret.writeRow); - if (ret.documentInDb) { - ret.documentInDb = await fromStorage(ret.documentInDb); - } - if (ret.writeRow.previous) { - ret.writeRow.previous = await fromStorage(ret.writeRow.previous); - } - ret.writeRow.document = await fromStorage(ret.writeRow.document); - return ret; - } - var processingChangesCount$ = new _rxjs.BehaviorSubject(0); - var wrappedInstance = { - databaseName: instance.databaseName, - internals: instance.internals, - cleanup: instance.cleanup.bind(instance), - options: instance.options, - close: instance.close.bind(instance), - schema: originalSchema, - collectionName: instance.collectionName, - count: instance.count.bind(instance), - remove: instance.remove.bind(instance), - originalStorageInstance: instance, - bulkWrite: async (documentWrites, context) => { - var useRows = []; - await Promise.all(documentWrites.map(async row => { - var [previous, document] = await Promise.all([row.previous ? toStorage(row.previous) : undefined, toStorage(row.document)]); - useRows.push({ - previous, - document - }); - })); - var writeResult = await instance.bulkWrite(useRows, context); - var ret = { - success: [], - error: [] - }; - var promises = []; - writeResult.success.forEach(v => { - promises.push(fromStorage(v).then(v2 => ret.success.push(v2))); - }); - writeResult.error.forEach(error => { - promises.push(errorFromStorage(error).then(err => ret.error.push(err))); - }); - await Promise.all(promises); - - /** - * By definition, all change events must be emitted - * BEFORE the write call resolves. - * To ensure that even when the modifiers are async, - * we wait here until the processing queue is empty. - */ - await (0, _rxjs.firstValueFrom)(processingChangesCount$.pipe((0, _operators.filter)(v => v === 0))); - return ret; - }, - query: preparedQuery => { - return instance.query(preparedQuery).then(queryResult => { - return Promise.all(queryResult.documents.map(doc => fromStorage(doc))); - }).then(documents => ({ - documents: documents - })); - }, - getAttachmentData: async (documentId, attachmentId, digest) => { - var data = await instance.getAttachmentData(documentId, attachmentId, digest); - data = await modifyAttachmentFromStorage(data); - return data; - }, - findDocumentsById: (ids, deleted) => { - return instance.findDocumentsById(ids, deleted).then(async findResult => { - var ret = []; - await Promise.all(findResult.map(async doc => { - ret.push(await fromStorage(doc)); - })); - return ret; - }); - }, - getChangedDocumentsSince: !instance.getChangedDocumentsSince ? undefined : (limit, checkpoint) => { - return instance.getChangedDocumentsSince(limit, checkpoint).then(async result => { - return { - checkpoint: result.checkpoint, - documents: await Promise.all(result.documents.map(d => fromStorage(d))) - }; - }); - }, - changeStream: () => { - return instance.changeStream().pipe((0, _operators.tap)(() => processingChangesCount$.next(processingChangesCount$.getValue() + 1)), (0, _operators.mergeMap)(async eventBulk => { - var useEvents = await Promise.all(eventBulk.events.map(async event => { - var [documentData, previousDocumentData] = await Promise.all([fromStorage(event.documentData), fromStorage(event.previousDocumentData)]); - var ev = { - operation: event.operation, - documentId: event.documentId, - documentData: documentData, - previousDocumentData: previousDocumentData, - isLocal: false - }; - return ev; - })); - var ret = { - id: eventBulk.id, - events: useEvents, - checkpoint: eventBulk.checkpoint, - context: eventBulk.context, - startTime: eventBulk.startTime, - endTime: eventBulk.endTime - }; - return ret; - }), (0, _operators.tap)(() => processingChangesCount$.next(processingChangesCount$.getValue() - 1))); - }, - conflictResultionTasks: () => { - return instance.conflictResultionTasks().pipe((0, _operators.mergeMap)(async task => { - var assumedMasterState = await fromStorage(task.input.assumedMasterState); - var newDocumentState = await fromStorage(task.input.newDocumentState); - var realMasterState = await fromStorage(task.input.realMasterState); - return { - id: task.id, - context: task.context, - input: { - assumedMasterState, - realMasterState, - newDocumentState - } - }; - })); - }, - resolveConflictResultionTask: taskSolution => { - if (taskSolution.output.isEqual) { - return instance.resolveConflictResultionTask(taskSolution); - } - var useSolution = { - id: taskSolution.id, - output: { - isEqual: false, - documentData: taskSolution.output.documentData - } - }; - return instance.resolveConflictResultionTask(useSolution); - } - }; - return wrappedInstance; -} -//# sourceMappingURL=plugin-helpers.js.map \ No newline at end of file diff --git a/dist/cjs/plugin-helpers.js.map b/dist/cjs/plugin-helpers.js.map deleted file mode 100644 index 9c0a703962d..00000000000 --- a/dist/cjs/plugin-helpers.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"plugin-helpers.js","names":["_operators","require","_rxSchemaHelper","_index","_rxjs","VALIDATOR_CACHE_BY_VALIDATOR_KEY","Map","wrappedValidateStorageFactory","getValidator","validatorKey","VALIDATOR_CACHE","getFromMapOrCreate","initValidator","schema","JSON","stringify","args","Object","assign","storage","createStorageInstance","params","instance","primaryPath","getPrimaryFieldOfPrimaryKey","primaryKey","validatorCached","requestIdleCallbackIfAvailable","oldBulkWrite","bulkWrite","bind","documentWrites","context","errors","continueWrites","forEach","row","documentId","document","validationErrors","length","push","status","isError","writeRow","writePromise","Promise","resolve","error","success","then","writeResult","validationError","wrapRxStorageInstance","originalSchema","modifyToStorage","modifyFromStorage","modifyAttachmentFromStorage","v","toStorage","docData","fromStorage","errorFromStorage","ret","flatClone","documentInDb","previous","processingChangesCount$","BehaviorSubject","wrappedInstance","databaseName","internals","cleanup","options","close","collectionName","count","remove","originalStorageInstance","useRows","all","map","undefined","promises","v2","err","firstValueFrom","pipe","filter","query","preparedQuery","queryResult","documents","doc","getAttachmentData","attachmentId","digest","data","findDocumentsById","ids","deleted","findResult","getChangedDocumentsSince","limit","checkpoint","result","d","changeStream","tap","next","getValue","mergeMap","eventBulk","useEvents","events","event","documentData","previousDocumentData","ev","operation","isLocal","id","startTime","endTime","conflictResultionTasks","task","assumedMasterState","input","newDocumentState","realMasterState","resolveConflictResultionTask","taskSolution","output","isEqual","useSolution"],"sources":["../../src/plugin-helpers.ts"],"sourcesContent":["import { filter, mergeMap, tap } from 'rxjs/operators';\nimport { getPrimaryFieldOfPrimaryKey } from './rx-schema-helper.ts';\nimport { WrappedRxStorageInstance } from './rx-storage-helper.ts';\nimport type {\n BulkWriteRow,\n EventBulk,\n RxChangeEvent,\n RxDocumentData,\n RxDocumentWriteData,\n RxJsonSchema,\n RxStorage,\n RxStorageWriteError,\n RxStorageBulkWriteResponse,\n RxStorageChangeEvent,\n RxStorageInstance,\n RxStorageInstanceCreationParams,\n RxValidationError,\n RxStorageWriteErrorConflict,\n MaybePromise\n} from './types/index.d.ts';\nimport {\n flatClone,\n getFromMapOrCreate,\n requestIdleCallbackIfAvailable\n} from './plugins/utils/index.ts';\nimport { BehaviorSubject, firstValueFrom } from 'rxjs';\n\n\ntype WrappedStorageFunction = (\n args: {\n storage: RxStorage;\n }\n) => RxStorage;\n\n/**\n * Returns the validation errors.\n * If document is fully valid, returns an empty array.\n */\ntype ValidatorFunction = (docData: RxDocumentData) => RxValidationError[];\n\n/**\n * cache the validators by the schema string\n * so we can reuse them when multiple collections have the same schema\n *\n * Notice: to make it easier and not dependent on a hash function,\n * we use the plain json string.\n */\nconst VALIDATOR_CACHE_BY_VALIDATOR_KEY: Map> = new Map();\n\n/**\n * This factory is used in the validation plugins\n * so that we can reuse the basic storage wrapping code.\n */\nexport function wrappedValidateStorageFactory(\n /**\n * Returns a method that can be used to validate\n * documents and throws when the document is not valid.\n */\n getValidator: (schema: RxJsonSchema) => ValidatorFunction,\n /**\n * A string to identify the validation library.\n */\n validatorKey: string\n): WrappedStorageFunction {\n const VALIDATOR_CACHE = getFromMapOrCreate(\n VALIDATOR_CACHE_BY_VALIDATOR_KEY,\n validatorKey,\n () => new Map()\n );\n\n function initValidator(\n schema: RxJsonSchema\n ): ValidatorFunction {\n return getFromMapOrCreate(\n VALIDATOR_CACHE,\n JSON.stringify(schema),\n () => getValidator(schema)\n );\n }\n\n return (args) => {\n return Object.assign(\n {},\n args.storage,\n {\n async createStorageInstance(\n params: RxStorageInstanceCreationParams\n ) {\n const instance = await args.storage.createStorageInstance(params);\n const primaryPath = getPrimaryFieldOfPrimaryKey(params.schema.primaryKey);\n\n /**\n * Lazy initialize the validator\n * to save initial page load performance.\n * Some libraries take really long to initialize the validator\n * from the schema.\n */\n let validatorCached: ValidatorFunction;\n requestIdleCallbackIfAvailable(() => validatorCached = initValidator(params.schema));\n\n const oldBulkWrite = instance.bulkWrite.bind(instance);\n instance.bulkWrite = (\n documentWrites: BulkWriteRow[],\n context: string\n ) => {\n if (!validatorCached) {\n validatorCached = initValidator(params.schema);\n }\n const errors: RxStorageWriteError[] = [];\n const continueWrites: typeof documentWrites = [];\n documentWrites.forEach(row => {\n const documentId: string = row.document[primaryPath] as any;\n const validationErrors = validatorCached(row.document);\n if (validationErrors.length > 0) {\n errors.push({\n status: 422,\n isError: true,\n documentId,\n writeRow: row,\n validationErrors\n });\n } else {\n continueWrites.push(row);\n }\n });\n const writePromise: Promise> = continueWrites.length > 0 ?\n oldBulkWrite(continueWrites, context) :\n Promise.resolve({ error: [], success: [] });\n return writePromise.then(writeResult => {\n errors.forEach(validationError => {\n writeResult.error.push(validationError);\n });\n return writeResult;\n });\n };\n\n return instance;\n }\n }\n );\n };\n\n}\n\n\n\n/**\n * Used in plugins to easily modify all in- and outgoing\n * data of that storage instance.\n */\nexport function wrapRxStorageInstance(\n originalSchema: RxJsonSchema>,\n instance: RxStorageInstance,\n modifyToStorage: (docData: RxDocumentWriteData) => MaybePromise>,\n modifyFromStorage: (docData: RxDocumentData) => MaybePromise>,\n modifyAttachmentFromStorage: (attachmentData: string) => MaybePromise = (v) => v\n): WrappedRxStorageInstance {\n async function toStorage(docData: RxDocumentWriteData): Promise> {\n if (!docData) {\n return docData;\n }\n return await modifyToStorage(docData);\n }\n async function fromStorage(docData: RxDocumentData | null): Promise> {\n if (!docData) {\n return docData;\n }\n return await modifyFromStorage(docData);\n }\n async function errorFromStorage(\n error: RxStorageWriteError\n ): Promise> {\n const ret = flatClone(error);\n ret.writeRow = flatClone(ret.writeRow);\n if ((ret as RxStorageWriteErrorConflict).documentInDb) {\n (ret as RxStorageWriteErrorConflict).documentInDb = await fromStorage((ret as RxStorageWriteErrorConflict).documentInDb);\n }\n if (ret.writeRow.previous) {\n ret.writeRow.previous = await fromStorage(ret.writeRow.previous);\n }\n ret.writeRow.document = await fromStorage(ret.writeRow.document);\n return ret;\n }\n\n\n const processingChangesCount$ = new BehaviorSubject(0);\n\n const wrappedInstance: WrappedRxStorageInstance = {\n databaseName: instance.databaseName,\n internals: instance.internals,\n cleanup: instance.cleanup.bind(instance),\n options: instance.options,\n close: instance.close.bind(instance),\n schema: originalSchema,\n collectionName: instance.collectionName,\n count: instance.count.bind(instance),\n remove: instance.remove.bind(instance),\n originalStorageInstance: instance,\n bulkWrite: async (\n documentWrites: BulkWriteRow[],\n context: string\n ) => {\n const useRows: BulkWriteRow[] = [];\n await Promise.all(\n documentWrites.map(async (row) => {\n const [previous, document] = await Promise.all([\n row.previous ? toStorage(row.previous) : undefined,\n toStorage(row.document)\n ]);\n useRows.push({ previous, document });\n })\n );\n\n const writeResult = await instance.bulkWrite(useRows, context);\n const ret: RxStorageBulkWriteResponse = {\n success: [],\n error: []\n };\n const promises: Promise[] = [];\n writeResult.success.forEach(v => {\n promises.push(\n fromStorage(v).then(v2 => ret.success.push(v2))\n );\n });\n writeResult.error.forEach(error => {\n promises.push(\n errorFromStorage(error).then(err => ret.error.push(err))\n );\n });\n await Promise.all(promises);\n\n /**\n * By definition, all change events must be emitted\n * BEFORE the write call resolves.\n * To ensure that even when the modifiers are async,\n * we wait here until the processing queue is empty.\n */\n await firstValueFrom(\n processingChangesCount$.pipe(\n filter(v => v === 0)\n )\n );\n return ret;\n },\n query: (preparedQuery) => {\n return instance.query(preparedQuery)\n .then(queryResult => {\n return Promise.all(queryResult.documents.map(doc => fromStorage(doc)));\n })\n .then(documents => ({ documents: documents as any }));\n },\n getAttachmentData: async (\n documentId: string,\n attachmentId: string,\n digest: string\n ) => {\n let data = await instance.getAttachmentData(documentId, attachmentId, digest);\n data = await modifyAttachmentFromStorage(data);\n return data;\n },\n findDocumentsById: (ids, deleted) => {\n return instance.findDocumentsById(ids, deleted)\n .then(async (findResult) => {\n const ret: RxDocumentData[] = [];\n await Promise.all(\n findResult\n .map(async (doc) => {\n ret.push(await fromStorage(doc));\n })\n );\n return ret;\n });\n },\n getChangedDocumentsSince: !instance.getChangedDocumentsSince ? undefined : (limit, checkpoint) => {\n return ((instance as any).getChangedDocumentsSince)(limit, checkpoint)\n .then(async (result: any) => {\n return {\n checkpoint: result.checkpoint,\n documents: await Promise.all(\n result.documents.map((d: any) => fromStorage(d))\n )\n };\n });\n },\n changeStream: () => {\n return instance.changeStream().pipe(\n tap(() => processingChangesCount$.next(processingChangesCount$.getValue() + 1)),\n mergeMap(async (eventBulk) => {\n const useEvents = await Promise.all(\n eventBulk.events.map(async (event) => {\n const [\n documentData,\n previousDocumentData\n ] = await Promise.all([\n fromStorage(event.documentData),\n fromStorage(event.previousDocumentData)\n ]);\n const ev: RxChangeEvent = {\n operation: event.operation,\n documentId: event.documentId,\n documentData: documentData as any,\n previousDocumentData: previousDocumentData as any,\n isLocal: false\n };\n return ev;\n })\n );\n const ret: EventBulk>, any> = {\n id: eventBulk.id,\n events: useEvents,\n checkpoint: eventBulk.checkpoint,\n context: eventBulk.context,\n startTime: eventBulk.startTime,\n endTime: eventBulk.endTime\n };\n return ret;\n }),\n tap(() => processingChangesCount$.next(processingChangesCount$.getValue() - 1))\n );\n },\n conflictResultionTasks: () => {\n return instance.conflictResultionTasks().pipe(\n mergeMap(async (task) => {\n const assumedMasterState = await fromStorage(task.input.assumedMasterState);\n const newDocumentState = await fromStorage(task.input.newDocumentState);\n const realMasterState = await fromStorage(task.input.realMasterState);\n return {\n id: task.id,\n context: task.context,\n input: {\n assumedMasterState,\n realMasterState,\n newDocumentState\n }\n };\n })\n );\n },\n resolveConflictResultionTask: (taskSolution) => {\n if (taskSolution.output.isEqual) {\n return instance.resolveConflictResultionTask(taskSolution);\n }\n const useSolution = {\n id: taskSolution.id,\n output: {\n isEqual: false,\n documentData: taskSolution.output.documentData\n }\n };\n return instance.resolveConflictResultionTask(useSolution);\n }\n };\n\n return wrappedInstance;\n}\n"],"mappings":";;;;;;;AAAA,IAAAA,UAAA,GAAAC,OAAA;AACA,IAAAC,eAAA,GAAAD,OAAA;AAmBA,IAAAE,MAAA,GAAAF,OAAA;AAKA,IAAAG,KAAA,GAAAH,OAAA;AASA;AACA;AACA;AACA;;AAGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,IAAMI,gCAA6E,GAAG,IAAIC,GAAG,CAAC,CAAC;;AAE/F;AACA;AACA;AACA;AACO,SAASC,6BAA6BA;AACzC;AACJ;AACA;AACA;AACIC,YAA8D;AAC9D;AACJ;AACA;AACIC,YAAoB,EACE;EACtB,IAAMC,eAAe,GAAG,IAAAC,yBAAkB,EACtCN,gCAAgC,EAChCI,YAAY,EACZ,MAAM,IAAIH,GAAG,CAAC,CAClB,CAAC;EAED,SAASM,aAAaA,CAClBC,MAAyB,EACR;IACjB,OAAO,IAAAF,yBAAkB,EACrBD,eAAe,EACfI,IAAI,CAACC,SAAS,CAACF,MAAM,CAAC,EACtB,MAAML,YAAY,CAACK,MAAM,CAC7B,CAAC;EACL;EAEA,OAAQG,IAAI,IAAK;IACb,OAAOC,MAAM,CAACC,MAAM,CAChB,CAAC,CAAC,EACFF,IAAI,CAACG,OAAO,EACZ;MACI,MAAMC,qBAAqBA,CACvBC,MAAuD,EACzD;QACE,IAAMC,QAAQ,GAAG,MAAMN,IAAI,CAACG,OAAO,CAACC,qBAAqB,CAACC,MAAM,CAAC;QACjE,IAAME,WAAW,GAAG,IAAAC,2CAA2B,EAACH,MAAM,CAACR,MAAM,CAACY,UAAU,CAAC;;QAEzE;AACpB;AACA;AACA;AACA;AACA;QACoB,IAAIC,eAAkC;QACtC,IAAAC,qCAA8B,EAAC,MAAMD,eAAe,GAAGd,aAAa,CAACS,MAAM,CAACR,MAAM,CAAC,CAAC;QAEpF,IAAMe,YAAY,GAAGN,QAAQ,CAACO,SAAS,CAACC,IAAI,CAACR,QAAQ,CAAC;QACtDA,QAAQ,CAACO,SAAS,GAAG,CACjBE,cAAyC,EACzCC,OAAe,KACd;UACD,IAAI,CAACN,eAAe,EAAE;YAClBA,eAAe,GAAGd,aAAa,CAACS,MAAM,CAACR,MAAM,CAAC;UAClD;UACA,IAAMoB,MAAwC,GAAG,EAAE;UACnD,IAAMC,cAAqC,GAAG,EAAE;UAChDH,cAAc,CAACI,OAAO,CAACC,GAAG,IAAI;YAC1B,IAAMC,UAAkB,GAAGD,GAAG,CAACE,QAAQ,CAACf,WAAW,CAAQ;YAC3D,IAAMgB,gBAAgB,GAAGb,eAAe,CAACU,GAAG,CAACE,QAAQ,CAAC;YACtD,IAAIC,gBAAgB,CAACC,MAAM,GAAG,CAAC,EAAE;cAC7BP,MAAM,CAACQ,IAAI,CAAC;gBACRC,MAAM,EAAE,GAAG;gBACXC,OAAO,EAAE,IAAI;gBACbN,UAAU;gBACVO,QAAQ,EAAER,GAAG;gBACbG;cACJ,CAAC,CAAC;YACN,CAAC,MAAM;cACHL,cAAc,CAACO,IAAI,CAACL,GAAG,CAAC;YAC5B;UACJ,CAAC,CAAC;UACF,IAAMS,YAA4D,GAAGX,cAAc,CAACM,MAAM,GAAG,CAAC,GAC1FZ,YAAY,CAACM,cAAc,EAAEF,OAAO,CAAC,GACrCc,OAAO,CAACC,OAAO,CAAC;YAAEC,KAAK,EAAE,EAAE;YAAEC,OAAO,EAAE;UAAG,CAAC,CAAC;UAC/C,OAAOJ,YAAY,CAACK,IAAI,CAACC,WAAW,IAAI;YACpClB,MAAM,CAACE,OAAO,CAACiB,eAAe,IAAI;cAC9BD,WAAW,CAACH,KAAK,CAACP,IAAI,CAACW,eAAe,CAAC;YAC3C,CAAC,CAAC;YACF,OAAOD,WAAW;UACtB,CAAC,CAAC;QACN,CAAC;QAED,OAAO7B,QAAQ;MACnB;IACJ,CACJ,CAAC;EACL,CAAC;AAEL;;AAIA;AACA;AACA;AACA;AACO,SAAS+B,qBAAqBA,CACjCC,cAAuD,EACvDhC,QAAgD,EAChDiC,eAA+F,EAC/FC,iBAA4F,EAC5FC,2BAA6E,GAAIC,CAAC,IAAKA,CAAC,EAC3C;EAC7C,eAAeC,SAASA,CAACC,OAAuC,EAAgC;IAC5F,IAAI,CAACA,OAAO,EAAE;MACV,OAAOA,OAAO;IAClB;IACA,OAAO,MAAML,eAAe,CAACK,OAAO,CAAC;EACzC;EACA,eAAeC,WAAWA,CAACD,OAAmC,EAAsC;IAChG,IAAI,CAACA,OAAO,EAAE;MACV,OAAOA,OAAO;IAClB;IACA,OAAO,MAAMJ,iBAAiB,CAACI,OAAO,CAAC;EAC3C;EACA,eAAeE,gBAAgBA,CAC3Bd,KAA+B,EACQ;IACvC,IAAMe,GAAG,GAAG,IAAAC,gBAAS,EAAChB,KAAK,CAAC;IAC5Be,GAAG,CAACnB,QAAQ,GAAG,IAAAoB,gBAAS,EAACD,GAAG,CAACnB,QAAQ,CAAC;IACtC,IAAKmB,GAAG,CAAsCE,YAAY,EAAE;MACvDF,GAAG,CAAsCE,YAAY,GAAG,MAAMJ,WAAW,CAAEE,GAAG,CAAsCE,YAAY,CAAC;IACtI;IACA,IAAIF,GAAG,CAACnB,QAAQ,CAACsB,QAAQ,EAAE;MACvBH,GAAG,CAACnB,QAAQ,CAACsB,QAAQ,GAAG,MAAML,WAAW,CAACE,GAAG,CAACnB,QAAQ,CAACsB,QAAQ,CAAC;IACpE;IACAH,GAAG,CAACnB,QAAQ,CAACN,QAAQ,GAAG,MAAMuB,WAAW,CAACE,GAAG,CAACnB,QAAQ,CAACN,QAAQ,CAAC;IAChE,OAAOyB,GAAG;EACd;EAGA,IAAMI,uBAAuB,GAAG,IAAIC,qBAAe,CAAC,CAAC,CAAC;EAEtD,IAAMC,eAA8D,GAAG;IACnEC,YAAY,EAAEhD,QAAQ,CAACgD,YAAY;IACnCC,SAAS,EAAEjD,QAAQ,CAACiD,SAAS;IAC7BC,OAAO,EAAElD,QAAQ,CAACkD,OAAO,CAAC1C,IAAI,CAACR,QAAQ,CAAC;IACxCmD,OAAO,EAAEnD,QAAQ,CAACmD,OAAO;IACzBC,KAAK,EAAEpD,QAAQ,CAACoD,KAAK,CAAC5C,IAAI,CAACR,QAAQ,CAAC;IACpCT,MAAM,EAAEyC,cAAc;IACtBqB,cAAc,EAAErD,QAAQ,CAACqD,cAAc;IACvCC,KAAK,EAAEtD,QAAQ,CAACsD,KAAK,CAAC9C,IAAI,CAACR,QAAQ,CAAC;IACpCuD,MAAM,EAAEvD,QAAQ,CAACuD,MAAM,CAAC/C,IAAI,CAACR,QAAQ,CAAC;IACtCwD,uBAAuB,EAAExD,QAAQ;IACjCO,SAAS,EAAE,MAAAA,CACPE,cAAyC,EACzCC,OAAe,KACd;MACD,IAAM+C,OAA4B,GAAG,EAAE;MACvC,MAAMjC,OAAO,CAACkC,GAAG,CACbjD,cAAc,CAACkD,GAAG,CAAC,MAAO7C,GAAG,IAAK;QAC9B,IAAM,CAAC8B,QAAQ,EAAE5B,QAAQ,CAAC,GAAG,MAAMQ,OAAO,CAACkC,GAAG,CAAC,CAC3C5C,GAAG,CAAC8B,QAAQ,GAAGP,SAAS,CAACvB,GAAG,CAAC8B,QAAQ,CAAC,GAAGgB,SAAS,EAClDvB,SAAS,CAACvB,GAAG,CAACE,QAAQ,CAAC,CAC1B,CAAC;QACFyC,OAAO,CAACtC,IAAI,CAAC;UAAEyB,QAAQ;UAAE5B;QAAS,CAAC,CAAC;MACxC,CAAC,CACL,CAAC;MAED,IAAMa,WAAW,GAAG,MAAM7B,QAAQ,CAACO,SAAS,CAACkD,OAAO,EAAE/C,OAAO,CAAC;MAC9D,IAAM+B,GAA0C,GAAG;QAC/Cd,OAAO,EAAE,EAAE;QACXD,KAAK,EAAE;MACX,CAAC;MACD,IAAMmC,QAAwB,GAAG,EAAE;MACnChC,WAAW,CAACF,OAAO,CAACd,OAAO,CAACuB,CAAC,IAAI;QAC7ByB,QAAQ,CAAC1C,IAAI,CACToB,WAAW,CAACH,CAAC,CAAC,CAACR,IAAI,CAACkC,EAAE,IAAIrB,GAAG,CAACd,OAAO,CAACR,IAAI,CAAC2C,EAAE,CAAC,CAClD,CAAC;MACL,CAAC,CAAC;MACFjC,WAAW,CAACH,KAAK,CAACb,OAAO,CAACa,KAAK,IAAI;QAC/BmC,QAAQ,CAAC1C,IAAI,CACTqB,gBAAgB,CAACd,KAAK,CAAC,CAACE,IAAI,CAACmC,GAAG,IAAItB,GAAG,CAACf,KAAK,CAACP,IAAI,CAAC4C,GAAG,CAAC,CAC3D,CAAC;MACL,CAAC,CAAC;MACF,MAAMvC,OAAO,CAACkC,GAAG,CAACG,QAAQ,CAAC;;MAE3B;AACZ;AACA;AACA;AACA;AACA;MACY,MAAM,IAAAG,oBAAc,EAChBnB,uBAAuB,CAACoB,IAAI,CACxB,IAAAC,iBAAM,EAAC9B,CAAC,IAAIA,CAAC,KAAK,CAAC,CACvB,CACJ,CAAC;MACD,OAAOK,GAAG;IACd,CAAC;IACD0B,KAAK,EAAGC,aAAa,IAAK;MACtB,OAAOpE,QAAQ,CAACmE,KAAK,CAACC,aAAa,CAAC,CAC/BxC,IAAI,CAACyC,WAAW,IAAI;QACjB,OAAO7C,OAAO,CAACkC,GAAG,CAACW,WAAW,CAACC,SAAS,CAACX,GAAG,CAACY,GAAG,IAAIhC,WAAW,CAACgC,GAAG,CAAC,CAAC,CAAC;MAC1E,CAAC,CAAC,CACD3C,IAAI,CAAC0C,SAAS,KAAK;QAAEA,SAAS,EAAEA;MAAiB,CAAC,CAAC,CAAC;IAC7D,CAAC;IACDE,iBAAiB,EAAE,MAAAA,CACfzD,UAAkB,EAClB0D,YAAoB,EACpBC,MAAc,KACb;MACD,IAAIC,IAAI,GAAG,MAAM3E,QAAQ,CAACwE,iBAAiB,CAACzD,UAAU,EAAE0D,YAAY,EAAEC,MAAM,CAAC;MAC7EC,IAAI,GAAG,MAAMxC,2BAA2B,CAACwC,IAAI,CAAC;MAC9C,OAAOA,IAAI;IACf,CAAC;IACDC,iBAAiB,EAAEA,CAACC,GAAG,EAAEC,OAAO,KAAK;MACjC,OAAO9E,QAAQ,CAAC4E,iBAAiB,CAACC,GAAG,EAAEC,OAAO,CAAC,CAC1ClD,IAAI,CAAC,MAAOmD,UAAU,IAAK;QACxB,IAAMtC,GAAgC,GAAG,EAAE;QAC3C,MAAMjB,OAAO,CAACkC,GAAG,CACbqB,UAAU,CACLpB,GAAG,CAAC,MAAOY,GAAG,IAAK;UAChB9B,GAAG,CAACtB,IAAI,CAAC,MAAMoB,WAAW,CAACgC,GAAG,CAAC,CAAC;QACpC,CAAC,CACT,CAAC;QACD,OAAO9B,GAAG;MACd,CAAC,CAAC;IACV,CAAC;IACDuC,wBAAwB,EAAE,CAAChF,QAAQ,CAACgF,wBAAwB,GAAGpB,SAAS,GAAG,CAACqB,KAAK,EAAEC,UAAU,KAAK;MAC9F,OAASlF,QAAQ,CAASgF,wBAAwB,CAAEC,KAAK,EAAEC,UAAU,CAAC,CACjEtD,IAAI,CAAC,MAAOuD,MAAW,IAAK;QACzB,OAAO;UACHD,UAAU,EAAEC,MAAM,CAACD,UAAU;UAC7BZ,SAAS,EAAE,MAAM9C,OAAO,CAACkC,GAAG,CACxByB,MAAM,CAACb,SAAS,CAACX,GAAG,CAAEyB,CAAM,IAAK7C,WAAW,CAAC6C,CAAC,CAAC,CACnD;QACJ,CAAC;MACL,CAAC,CAAC;IACV,CAAC;IACDC,YAAY,EAAEA,CAAA,KAAM;MAChB,OAAOrF,QAAQ,CAACqF,YAAY,CAAC,CAAC,CAACpB,IAAI,CAC/B,IAAAqB,cAAG,EAAC,MAAMzC,uBAAuB,CAAC0C,IAAI,CAAC1C,uBAAuB,CAAC2C,QAAQ,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAC/E,IAAAC,mBAAQ,EAAC,MAAOC,SAAS,IAAK;QAC1B,IAAMC,SAAS,GAAG,MAAMnE,OAAO,CAACkC,GAAG,CAC/BgC,SAAS,CAACE,MAAM,CAACjC,GAAG,CAAC,MAAOkC,KAAK,IAAK;UAClC,IAAM,CACFC,YAAY,EACZC,oBAAoB,CACvB,GAAG,MAAMvE,OAAO,CAACkC,GAAG,CAAC,CAClBnB,WAAW,CAACsD,KAAK,CAACC,YAAY,CAAC,EAC/BvD,WAAW,CAACsD,KAAK,CAACE,oBAAoB,CAAC,CAC1C,CAAC;UACF,IAAMC,EAA4B,GAAG;YACjCC,SAAS,EAAEJ,KAAK,CAACI,SAAS;YAC1BlF,UAAU,EAAE8E,KAAK,CAAC9E,UAAU;YAC5B+E,YAAY,EAAEA,YAAmB;YACjCC,oBAAoB,EAAEA,oBAA2B;YACjDG,OAAO,EAAE;UACb,CAAC;UACD,OAAOF,EAAE;QACb,CAAC,CACL,CAAC;QACD,IAAMvD,GAAoE,GAAG;UACzE0D,EAAE,EAAET,SAAS,CAACS,EAAE;UAChBP,MAAM,EAAED,SAAS;UACjBT,UAAU,EAAEQ,SAAS,CAACR,UAAU;UAChCxE,OAAO,EAAEgF,SAAS,CAAChF,OAAO;UAC1B0F,SAAS,EAAEV,SAAS,CAACU,SAAS;UAC9BC,OAAO,EAAEX,SAAS,CAACW;QACvB,CAAC;QACD,OAAO5D,GAAG;MACd,CAAC,CAAC,EACF,IAAA6C,cAAG,EAAC,MAAMzC,uBAAuB,CAAC0C,IAAI,CAAC1C,uBAAuB,CAAC2C,QAAQ,CAAC,CAAC,GAAG,CAAC,CAAC,CAClF,CAAC;IACL,CAAC;IACDc,sBAAsB,EAAEA,CAAA,KAAM;MAC1B,OAAOtG,QAAQ,CAACsG,sBAAsB,CAAC,CAAC,CAACrC,IAAI,CACzC,IAAAwB,mBAAQ,EAAC,MAAOc,IAAI,IAAK;QACrB,IAAMC,kBAAkB,GAAG,MAAMjE,WAAW,CAACgE,IAAI,CAACE,KAAK,CAACD,kBAAkB,CAAC;QAC3E,IAAME,gBAAgB,GAAG,MAAMnE,WAAW,CAACgE,IAAI,CAACE,KAAK,CAACC,gBAAgB,CAAC;QACvE,IAAMC,eAAe,GAAG,MAAMpE,WAAW,CAACgE,IAAI,CAACE,KAAK,CAACE,eAAe,CAAC;QACrE,OAAO;UACHR,EAAE,EAAEI,IAAI,CAACJ,EAAE;UACXzF,OAAO,EAAE6F,IAAI,CAAC7F,OAAO;UACrB+F,KAAK,EAAE;YACHD,kBAAkB;YAClBG,eAAe;YACfD;UACJ;QACJ,CAAC;MACL,CAAC,CACL,CAAC;IACL,CAAC;IACDE,4BAA4B,EAAGC,YAAY,IAAK;MAC5C,IAAIA,YAAY,CAACC,MAAM,CAACC,OAAO,EAAE;QAC7B,OAAO/G,QAAQ,CAAC4G,4BAA4B,CAACC,YAAY,CAAC;MAC9D;MACA,IAAMG,WAAW,GAAG;QAChBb,EAAE,EAAEU,YAAY,CAACV,EAAE;QACnBW,MAAM,EAAE;UACJC,OAAO,EAAE,KAAK;UACdjB,YAAY,EAAEe,YAAY,CAACC,MAAM,CAAChB;QACtC;MACJ,CAAC;MACD,OAAO9F,QAAQ,CAAC4G,4BAA4B,CAACI,WAAW,CAAC;IAC7D;EACJ,CAAC;EAED,OAAOjE,eAAe;AAC1B","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugin.js b/dist/cjs/plugin.js deleted file mode 100644 index 4cda7de0603..00000000000 --- a/dist/cjs/plugin.js +++ /dev/null @@ -1,94 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.addRxPlugin = addRxPlugin; -var _rxSchema = require("./rx-schema.js"); -var _rxDocument = require("./rx-document.js"); -var _rxQuery = require("./rx-query.js"); -var _rxCollection = require("./rx-collection.js"); -var _rxDatabase = require("./rx-database.js"); -var _overwritable = require("./overwritable.js"); -var _hooks = require("./hooks.js"); -var _rxError = require("./rx-error.js"); -/** - * this handles how plugins are added to rxdb - * basically it changes the internal prototypes - * by passing them to the plugins-functions - */ - -/** - * prototypes that can be manipulated with a plugin - */ -var PROTOTYPES = { - RxSchema: _rxSchema.RxSchema.prototype, - RxDocument: _rxDocument.basePrototype, - RxQuery: _rxQuery.RxQueryBase.prototype, - RxCollection: _rxCollection.RxCollectionBase.prototype, - RxDatabase: _rxDatabase.RxDatabaseBase.prototype -}; -var ADDED_PLUGINS = new Set(); -var ADDED_PLUGIN_NAMES = new Set(); - -/** - * Add a plugin to the RxDB library. - * Plugins are added globally and cannot be removed. - */ -function addRxPlugin(plugin) { - (0, _hooks.runPluginHooks)('preAddRxPlugin', { - plugin, - plugins: ADDED_PLUGINS - }); - - // do nothing if added before - if (ADDED_PLUGINS.has(plugin)) { - return; - } else { - // ensure no other plugin with the same name was already added - if (ADDED_PLUGIN_NAMES.has(plugin.name)) { - throw (0, _rxError.newRxError)('PL3', { - name: plugin.name, - plugin - }); - } - ADDED_PLUGINS.add(plugin); - ADDED_PLUGIN_NAMES.add(plugin.name); - } - - /** - * To identify broken configurations, - * we only allow RxDB plugins to be passed into addRxPlugin(). - */ - if (!plugin.rxdb) { - throw (0, _rxError.newRxTypeError)('PL1', { - plugin - }); - } - if (plugin.init) { - plugin.init(); - } - - // prototype-overwrites - if (plugin.prototypes) { - Object.entries(plugin.prototypes).forEach(([name, fun]) => { - return fun(PROTOTYPES[name]); - }); - } - // overwritable-overwrites - if (plugin.overwritable) { - Object.assign(_overwritable.overwritable, plugin.overwritable); - } - // extend-hooks - if (plugin.hooks) { - Object.entries(plugin.hooks).forEach(([name, hooksObj]) => { - if (hooksObj.after) { - _hooks.HOOKS[name].push(hooksObj.after); - } - if (hooksObj.before) { - _hooks.HOOKS[name].unshift(hooksObj.before); - } - }); - } -} -//# sourceMappingURL=plugin.js.map \ No newline at end of file diff --git a/dist/cjs/plugin.js.map b/dist/cjs/plugin.js.map deleted file mode 100644 index 3e602ae1088..00000000000 --- a/dist/cjs/plugin.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"plugin.js","names":["_rxSchema","require","_rxDocument","_rxQuery","_rxCollection","_rxDatabase","_overwritable","_hooks","_rxError","PROTOTYPES","RxSchema","prototype","RxDocument","RxDocumentPrototype","RxQuery","RxQueryBase","RxCollection","RxCollectionBase","RxDatabase","RxDatabaseBase","ADDED_PLUGINS","Set","ADDED_PLUGIN_NAMES","addRxPlugin","plugin","runPluginHooks","plugins","has","name","newRxError","add","rxdb","newRxTypeError","init","prototypes","Object","entries","forEach","fun","overwritable","assign","hooks","hooksObj","after","HOOKS","push","before","unshift"],"sources":["../../src/plugin.ts"],"sourcesContent":["/**\n * this handles how plugins are added to rxdb\n * basically it changes the internal prototypes\n * by passing them to the plugins-functions\n */\nimport {\n RxSchema\n} from './rx-schema.ts';\nimport {\n basePrototype as RxDocumentPrototype\n} from './rx-document.ts';\nimport {\n RxQueryBase\n} from './rx-query.ts';\nimport {\n RxCollectionBase\n} from './rx-collection.ts';\nimport {\n RxDatabaseBase\n} from './rx-database.ts';\nimport type {\n RxPlugin\n} from './types/index.d.ts';\n\nimport { overwritable } from './overwritable.ts';\nimport {\n HOOKS,\n runPluginHooks\n} from './hooks.ts';\nimport { newRxError, newRxTypeError } from './rx-error.ts';\n\n/**\n * prototypes that can be manipulated with a plugin\n */\nconst PROTOTYPES: { [k: string]: any; } = {\n RxSchema: RxSchema.prototype,\n RxDocument: RxDocumentPrototype,\n RxQuery: RxQueryBase.prototype,\n RxCollection: RxCollectionBase.prototype,\n RxDatabase: RxDatabaseBase.prototype\n};\n\nconst ADDED_PLUGINS: Set = new Set();\nconst ADDED_PLUGIN_NAMES: Set = new Set();\n\n/**\n * Add a plugin to the RxDB library.\n * Plugins are added globally and cannot be removed.\n */\nexport function addRxPlugin(plugin: RxPlugin) {\n runPluginHooks('preAddRxPlugin', { plugin, plugins: ADDED_PLUGINS });\n\n // do nothing if added before\n if (ADDED_PLUGINS.has(plugin)) {\n return;\n } else {\n\n // ensure no other plugin with the same name was already added\n if (ADDED_PLUGIN_NAMES.has(plugin.name)) {\n throw newRxError('PL3', {\n name: plugin.name,\n plugin,\n });\n }\n\n ADDED_PLUGINS.add(plugin);\n ADDED_PLUGIN_NAMES.add(plugin.name);\n }\n\n /**\n * To identify broken configurations,\n * we only allow RxDB plugins to be passed into addRxPlugin().\n */\n if (!plugin.rxdb) {\n throw newRxTypeError('PL1', {\n plugin\n });\n }\n\n if (plugin.init) {\n plugin.init();\n }\n\n // prototype-overwrites\n if (plugin.prototypes) {\n Object\n .entries(plugin.prototypes)\n .forEach(([name, fun]) => {\n return (fun as any)(PROTOTYPES[name]);\n });\n }\n // overwritable-overwrites\n if (plugin.overwritable) {\n Object.assign(\n overwritable,\n plugin.overwritable\n );\n }\n // extend-hooks\n if (plugin.hooks) {\n Object\n .entries(plugin.hooks)\n .forEach(([name, hooksObj]) => {\n if (hooksObj.after) {\n HOOKS[name].push(hooksObj.after);\n }\n if (hooksObj.before) {\n HOOKS[name].unshift(hooksObj.before);\n }\n });\n }\n}\n\n"],"mappings":";;;;;;AAKA,IAAAA,SAAA,GAAAC,OAAA;AAGA,IAAAC,WAAA,GAAAD,OAAA;AAGA,IAAAE,QAAA,GAAAF,OAAA;AAGA,IAAAG,aAAA,GAAAH,OAAA;AAGA,IAAAI,WAAA,GAAAJ,OAAA;AAOA,IAAAK,aAAA,GAAAL,OAAA;AACA,IAAAM,MAAA,GAAAN,OAAA;AAIA,IAAAO,QAAA,GAAAP,OAAA;AA7BA;AACA;AACA;AACA;AACA;;AA2BA;AACA;AACA;AACA,IAAMQ,UAAiC,GAAG;EACtCC,QAAQ,EAAEA,kBAAQ,CAACC,SAAS;EAC5BC,UAAU,EAAEC,yBAAmB;EAC/BC,OAAO,EAAEC,oBAAW,CAACJ,SAAS;EAC9BK,YAAY,EAAEC,8BAAgB,CAACN,SAAS;EACxCO,UAAU,EAAEC,0BAAc,CAACR;AAC/B,CAAC;AAED,IAAMS,aAAkC,GAAG,IAAIC,GAAG,CAAC,CAAC;AACpD,IAAMC,kBAA+B,GAAG,IAAID,GAAG,CAAC,CAAC;;AAEjD;AACA;AACA;AACA;AACO,SAASE,WAAWA,CAACC,MAAgB,EAAE;EAC1C,IAAAC,qBAAc,EAAC,gBAAgB,EAAE;IAAED,MAAM;IAAEE,OAAO,EAAEN;EAAc,CAAC,CAAC;;EAEpE;EACA,IAAIA,aAAa,CAACO,GAAG,CAACH,MAAM,CAAC,EAAE;IAC3B;EACJ,CAAC,MAAM;IAEH;IACA,IAAIF,kBAAkB,CAACK,GAAG,CAACH,MAAM,CAACI,IAAI,CAAC,EAAE;MACrC,MAAM,IAAAC,mBAAU,EAAC,KAAK,EAAE;QACpBD,IAAI,EAAEJ,MAAM,CAACI,IAAI;QACjBJ;MACJ,CAAC,CAAC;IACN;IAEAJ,aAAa,CAACU,GAAG,CAACN,MAAM,CAAC;IACzBF,kBAAkB,CAACQ,GAAG,CAACN,MAAM,CAACI,IAAI,CAAC;EACvC;;EAEA;AACJ;AACA;AACA;EACI,IAAI,CAACJ,MAAM,CAACO,IAAI,EAAE;IACd,MAAM,IAAAC,uBAAc,EAAC,KAAK,EAAE;MACxBR;IACJ,CAAC,CAAC;EACN;EAEA,IAAIA,MAAM,CAACS,IAAI,EAAE;IACbT,MAAM,CAACS,IAAI,CAAC,CAAC;EACjB;;EAEA;EACA,IAAIT,MAAM,CAACU,UAAU,EAAE;IACnBC,MAAM,CACDC,OAAO,CAACZ,MAAM,CAACU,UAAU,CAAC,CAC1BG,OAAO,CAAC,CAAC,CAACT,IAAI,EAAEU,GAAG,CAAC,KAAK;MACtB,OAAQA,GAAG,CAAS7B,UAAU,CAACmB,IAAI,CAAC,CAAC;IACzC,CAAC,CAAC;EACV;EACA;EACA,IAAIJ,MAAM,CAACe,YAAY,EAAE;IACrBJ,MAAM,CAACK,MAAM,CACTD,0BAAY,EACZf,MAAM,CAACe,YACX,CAAC;EACL;EACA;EACA,IAAIf,MAAM,CAACiB,KAAK,EAAE;IACdN,MAAM,CACDC,OAAO,CAACZ,MAAM,CAACiB,KAAK,CAAC,CACrBJ,OAAO,CAAC,CAAC,CAACT,IAAI,EAAEc,QAAQ,CAAC,KAAK;MAC3B,IAAIA,QAAQ,CAACC,KAAK,EAAE;QAChBC,YAAK,CAAChB,IAAI,CAAC,CAACiB,IAAI,CAACH,QAAQ,CAACC,KAAK,CAAC;MACpC;MACA,IAAID,QAAQ,CAACI,MAAM,EAAE;QACjBF,YAAK,CAAChB,IAAI,CAAC,CAACmB,OAAO,CAACL,QAAQ,CAACI,MAAM,CAAC;MACxC;IACJ,CAAC,CAAC;EACV;AACJ","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/attachments-compression/index.js b/dist/cjs/plugins/attachments-compression/index.js deleted file mode 100644 index 79ba96a5ecd..00000000000 --- a/dist/cjs/plugins/attachments-compression/index.js +++ /dev/null @@ -1,71 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.compressBase64 = compressBase64; -exports.decompressBase64 = decompressBase64; -exports.wrappedAttachmentsCompressionStorage = wrappedAttachmentsCompressionStorage; -var _pluginHelpers = require("../../plugin-helpers.js"); -var _index = require("../utils/index.js"); -/** - * @link https://github.com/WICG/compression/blob/main/explainer.md - */ -async function compressBase64(mode, base64String) { - var arrayBuffer = (0, _index.base64ToArrayBuffer)(base64String); - var stream = (0, _index.ensureNotFalsy)(new Response(arrayBuffer).body).pipeThrough(new CompressionStream(mode)); - var result = await new Response(stream).arrayBuffer(); - return (0, _index.arrayBufferToBase64)(result); -} -async function decompressBase64(mode, base64String) { - var arrayBuffer = (0, _index.base64ToArrayBuffer)(base64String); - var stream = (0, _index.ensureNotFalsy)(new Response(arrayBuffer).body).pipeThrough(new DecompressionStream(mode)); - var result = await new Response(stream).arrayBuffer(); - return (0, _index.arrayBufferToBase64)(result); -} - -/** - * A RxStorage wrapper that compresses attachment data on writes - * and decompresses the data on reads. - * - * This is using the CompressionStream API, - * @link https://caniuse.com/?search=compressionstream - */ -function wrappedAttachmentsCompressionStorage(args) { - return Object.assign({}, args.storage, { - async createStorageInstance(params) { - if (!params.schema.attachments || !params.schema.attachments.compression) { - return args.storage.createStorageInstance(params); - } - var mode = params.schema.attachments.compression; - async function modifyToStorage(docData) { - await Promise.all(Object.values(docData._attachments).map(async attachment => { - if (!attachment.data) { - return; - } - var attachmentWriteData = attachment; - attachmentWriteData.data = await compressBase64(mode, attachmentWriteData.data); - })); - return docData; - } - function modifyAttachmentFromStorage(attachmentData) { - return decompressBase64(mode, attachmentData); - } - - /** - * Because this wrapper resolves the attachments.compression, - * we have to remove it before sending it to the underlying RxStorage. - * which allows underlying storages to detect wrong configurations - * like when compression is set to false but no attachment-compression module is used. - */ - var childSchema = (0, _index.flatClone)(params.schema); - childSchema.attachments = (0, _index.flatClone)(childSchema.attachments); - delete (0, _index.ensureNotFalsy)(childSchema.attachments).compression; - var instance = await args.storage.createStorageInstance(Object.assign({}, params, { - schema: childSchema - })); - return (0, _pluginHelpers.wrapRxStorageInstance)(params.schema, instance, modifyToStorage, d => d, modifyAttachmentFromStorage); - } - }); -} -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/attachments-compression/index.js.map b/dist/cjs/plugins/attachments-compression/index.js.map deleted file mode 100644 index 7099576aaa4..00000000000 --- a/dist/cjs/plugins/attachments-compression/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["_pluginHelpers","require","_index","compressBase64","mode","base64String","arrayBuffer","base64ToArrayBuffer","stream","ensureNotFalsy","Response","body","pipeThrough","CompressionStream","result","arrayBufferToBase64","decompressBase64","DecompressionStream","wrappedAttachmentsCompressionStorage","args","Object","assign","storage","createStorageInstance","params","schema","attachments","compression","modifyToStorage","docData","Promise","all","values","_attachments","map","attachment","data","attachmentWriteData","modifyAttachmentFromStorage","attachmentData","childSchema","flatClone","instance","wrapRxStorageInstance","d"],"sources":["../../../../src/plugins/attachments-compression/index.ts"],"sourcesContent":["import { wrapRxStorageInstance } from '../../plugin-helpers.ts';\nimport type {\n RxStorage,\n RxStorageInstanceCreationParams,\n RxDocumentWriteData,\n CompressionMode,\n RxAttachmentWriteData\n} from '../../types/index.d.ts';\n\nimport {\n arrayBufferToBase64,\n base64ToArrayBuffer,\n ensureNotFalsy,\n flatClone\n} from '../utils/index.ts';\n\n\n/**\n * @link https://github.com/WICG/compression/blob/main/explainer.md\n */\nexport async function compressBase64(\n mode: CompressionMode,\n base64String: string\n): Promise {\n const arrayBuffer = base64ToArrayBuffer(base64String);\n const stream = ensureNotFalsy(new Response(arrayBuffer).body)\n .pipeThrough(new CompressionStream(mode));\n const result = await new Response(stream).arrayBuffer();\n return arrayBufferToBase64(result);\n}\nexport async function decompressBase64(\n mode: CompressionMode,\n base64String: string\n): Promise {\n const arrayBuffer = base64ToArrayBuffer(base64String);\n const stream = ensureNotFalsy(new Response(arrayBuffer).body)\n .pipeThrough(new DecompressionStream(mode));\n const result = await new Response(stream).arrayBuffer();\n return arrayBufferToBase64(result);\n}\n\n\n/**\n * A RxStorage wrapper that compresses attachment data on writes\n * and decompresses the data on reads.\n *\n * This is using the CompressionStream API,\n * @link https://caniuse.com/?search=compressionstream\n */\nexport function wrappedAttachmentsCompressionStorage(\n args: {\n storage: RxStorage;\n }\n): RxStorage {\n return Object.assign(\n {},\n args.storage,\n {\n async createStorageInstance(\n params: RxStorageInstanceCreationParams\n ) {\n if (\n !params.schema.attachments ||\n !params.schema.attachments.compression\n ) {\n return args.storage.createStorageInstance(params);\n }\n\n const mode = params.schema.attachments.compression;\n\n async function modifyToStorage(docData: RxDocumentWriteData) {\n await Promise.all(\n Object.values(docData._attachments).map(async (attachment) => {\n if (!(attachment as RxAttachmentWriteData).data) {\n return;\n }\n const attachmentWriteData = attachment as RxAttachmentWriteData;\n attachmentWriteData.data = await compressBase64(mode, attachmentWriteData.data);\n })\n );\n return docData;\n }\n function modifyAttachmentFromStorage(attachmentData: string): Promise {\n return decompressBase64(mode, attachmentData);\n }\n\n /**\n * Because this wrapper resolves the attachments.compression,\n * we have to remove it before sending it to the underlying RxStorage.\n * which allows underlying storages to detect wrong configurations\n * like when compression is set to false but no attachment-compression module is used.\n */\n const childSchema = flatClone(params.schema);\n childSchema.attachments = flatClone(childSchema.attachments);\n delete ensureNotFalsy(childSchema.attachments).compression;\n\n const instance = await args.storage.createStorageInstance(\n Object.assign(\n {},\n params,\n {\n schema: childSchema\n }\n )\n );\n\n return wrapRxStorageInstance(\n params.schema,\n instance,\n modifyToStorage,\n d => d,\n modifyAttachmentFromStorage\n );\n }\n }\n );\n}\n"],"mappings":";;;;;;;;AAAA,IAAAA,cAAA,GAAAC,OAAA;AASA,IAAAC,MAAA,GAAAD,OAAA;AAQA;AACA;AACA;AACO,eAAeE,cAAcA,CAChCC,IAAqB,EACrBC,YAAoB,EACL;EACf,IAAMC,WAAW,GAAG,IAAAC,0BAAmB,EAACF,YAAY,CAAC;EACrD,IAAMG,MAAM,GAAG,IAAAC,qBAAc,EAAC,IAAIC,QAAQ,CAACJ,WAAW,CAAC,CAACK,IAAI,CAAC,CACxDC,WAAW,CAAC,IAAIC,iBAAiB,CAACT,IAAI,CAAC,CAAC;EAC7C,IAAMU,MAAM,GAAG,MAAM,IAAIJ,QAAQ,CAACF,MAAM,CAAC,CAACF,WAAW,CAAC,CAAC;EACvD,OAAO,IAAAS,0BAAmB,EAACD,MAAM,CAAC;AACtC;AACO,eAAeE,gBAAgBA,CAClCZ,IAAqB,EACrBC,YAAoB,EACL;EACf,IAAMC,WAAW,GAAG,IAAAC,0BAAmB,EAACF,YAAY,CAAC;EACrD,IAAMG,MAAM,GAAG,IAAAC,qBAAc,EAAC,IAAIC,QAAQ,CAACJ,WAAW,CAAC,CAACK,IAAI,CAAC,CACxDC,WAAW,CAAC,IAAIK,mBAAmB,CAACb,IAAI,CAAC,CAAC;EAC/C,IAAMU,MAAM,GAAG,MAAM,IAAIJ,QAAQ,CAACF,MAAM,CAAC,CAACF,WAAW,CAAC,CAAC;EACvD,OAAO,IAAAS,0BAAmB,EAACD,MAAM,CAAC;AACtC;;AAGA;AACA;AACA;AACA;AACA;AACA;AACA;AACO,SAASI,oCAAoCA,CAChDC,IAEC,EAC4C;EAC7C,OAAOC,MAAM,CAACC,MAAM,CAChB,CAAC,CAAC,EACFF,IAAI,CAACG,OAAO,EACZ;IACI,MAAMC,qBAAqBA,CACvBC,MAAuD,EACzD;MACE,IACI,CAACA,MAAM,CAACC,MAAM,CAACC,WAAW,IAC1B,CAACF,MAAM,CAACC,MAAM,CAACC,WAAW,CAACC,WAAW,EACxC;QACE,OAAOR,IAAI,CAACG,OAAO,CAACC,qBAAqB,CAACC,MAAM,CAAC;MACrD;MAEA,IAAMpB,IAAI,GAAGoB,MAAM,CAACC,MAAM,CAACC,WAAW,CAACC,WAAW;MAElD,eAAeC,eAAeA,CAACC,OAAuC,EAAE;QACpE,MAAMC,OAAO,CAACC,GAAG,CACbX,MAAM,CAACY,MAAM,CAACH,OAAO,CAACI,YAAY,CAAC,CAACC,GAAG,CAAC,MAAOC,UAAU,IAAK;UAC1D,IAAI,CAAEA,UAAU,CAA2BC,IAAI,EAAE;YAC7C;UACJ;UACA,IAAMC,mBAAmB,GAAGF,UAAmC;UAC/DE,mBAAmB,CAACD,IAAI,GAAG,MAAMjC,cAAc,CAACC,IAAI,EAAEiC,mBAAmB,CAACD,IAAI,CAAC;QACnF,CAAC,CACL,CAAC;QACD,OAAOP,OAAO;MAClB;MACA,SAASS,2BAA2BA,CAACC,cAAsB,EAAmB;QAC1E,OAAOvB,gBAAgB,CAACZ,IAAI,EAAEmC,cAAc,CAAC;MACjD;;MAEA;AAChB;AACA;AACA;AACA;AACA;MACgB,IAAMC,WAAW,GAAG,IAAAC,gBAAS,EAACjB,MAAM,CAACC,MAAM,CAAC;MAC5Ce,WAAW,CAACd,WAAW,GAAG,IAAAe,gBAAS,EAACD,WAAW,CAACd,WAAW,CAAC;MAC5D,OAAO,IAAAjB,qBAAc,EAAC+B,WAAW,CAACd,WAAW,CAAC,CAACC,WAAW;MAE1D,IAAMe,QAAQ,GAAG,MAAMvB,IAAI,CAACG,OAAO,CAACC,qBAAqB,CACrDH,MAAM,CAACC,MAAM,CACT,CAAC,CAAC,EACFG,MAAM,EACN;QACIC,MAAM,EAAEe;MACZ,CACJ,CACJ,CAAC;MAED,OAAO,IAAAG,oCAAqB,EACxBnB,MAAM,CAACC,MAAM,EACbiB,QAAQ,EACRd,eAAe,EACfgB,CAAC,IAAIA,CAAC,EACNN,2BACJ,CAAC;IACL;EACJ,CACJ,CAAC;AACL","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/attachments/attachments-utils.js b/dist/cjs/plugins/attachments/attachments-utils.js deleted file mode 100644 index 7b9fc7f0edc..00000000000 --- a/dist/cjs/plugins/attachments/attachments-utils.js +++ /dev/null @@ -1,47 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.assignMethodsToAttachment = assignMethodsToAttachment; -exports.ensureSchemaSupportsAttachments = ensureSchemaSupportsAttachments; -exports.fillWriteDataForAttachmentsChange = fillWriteDataForAttachmentsChange; -var _rxError = require("../../rx-error.js"); -var _index = require("../utils/index.js"); -function ensureSchemaSupportsAttachments(doc) { - var schemaJson = doc.collection.schema.jsonSchema; - if (!schemaJson.attachments) { - throw (0, _rxError.newRxError)('AT1', { - link: 'https://pubkey.github.io/rxdb/rx-attachment.html' - }); - } -} -function assignMethodsToAttachment(attachment) { - Object.entries(attachment.doc.collection.attachments).forEach(([funName, fun]) => { - Object.defineProperty(attachment, funName, { - get: () => fun.bind(attachment) - }); - }); -} - -/** - * Fill up the missing attachment.data of the newDocument - * so that the new document can be send to somewhere else - * which could then receive all required attachments data - * that it did not have before. - */ -async function fillWriteDataForAttachmentsChange(primaryPath, storageInstance, newDocument, originalDocument) { - if (!newDocument._attachments || originalDocument && !originalDocument._attachments) { - throw new Error('_attachments missing'); - } - var docId = newDocument[primaryPath]; - var originalAttachmentsIds = new Set(originalDocument && originalDocument._attachments ? Object.keys(originalDocument._attachments) : []); - await Promise.all(Object.entries(newDocument._attachments).map(async ([key, value]) => { - if ((!originalAttachmentsIds.has(key) || originalDocument && (0, _index.ensureNotFalsy)(originalDocument._attachments)[key].digest !== value.digest) && !value.data) { - var attachmentDataString = await storageInstance.getAttachmentData(docId, key, value.digest); - value.data = attachmentDataString; - } - })); - return newDocument; -} -//# sourceMappingURL=attachments-utils.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/attachments/attachments-utils.js.map b/dist/cjs/plugins/attachments/attachments-utils.js.map deleted file mode 100644 index e616f7af092..00000000000 --- a/dist/cjs/plugins/attachments/attachments-utils.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"attachments-utils.js","names":["_rxError","require","_index","ensureSchemaSupportsAttachments","doc","schemaJson","collection","schema","jsonSchema","attachments","newRxError","link","assignMethodsToAttachment","attachment","Object","entries","forEach","funName","fun","defineProperty","get","bind","fillWriteDataForAttachmentsChange","primaryPath","storageInstance","newDocument","originalDocument","_attachments","Error","docId","originalAttachmentsIds","Set","keys","Promise","all","map","key","value","has","ensureNotFalsy","digest","data","attachmentDataString","getAttachmentData"],"sources":["../../../../src/plugins/attachments/attachments-utils.ts"],"sourcesContent":["import { newRxError } from '../../rx-error.ts';\nimport type {\n RxAttachmentWriteData,\n RxStorageInstance,\n WithDeletedAndAttachments\n} from '../../types/index.d.ts';\nimport { ensureNotFalsy } from '../utils/index.ts';\n\nexport function ensureSchemaSupportsAttachments(doc: any) {\n const schemaJson = doc.collection.schema.jsonSchema;\n if (!schemaJson.attachments) {\n throw newRxError('AT1', {\n link: 'https://pubkey.github.io/rxdb/rx-attachment.html'\n });\n }\n}\n\nexport function assignMethodsToAttachment(attachment: any) {\n Object\n .entries(attachment.doc.collection.attachments)\n .forEach(([funName, fun]) => {\n Object.defineProperty(attachment, funName, {\n get: () => (fun as any).bind(attachment)\n });\n });\n}\n\n/**\n * Fill up the missing attachment.data of the newDocument\n * so that the new document can be send to somewhere else\n * which could then receive all required attachments data\n * that it did not have before.\n */\nexport async function fillWriteDataForAttachmentsChange(\n primaryPath: string,\n storageInstance: RxStorageInstance,\n newDocument: WithDeletedAndAttachments,\n originalDocument?: WithDeletedAndAttachments\n): Promise> {\n\n if (\n !newDocument._attachments ||\n (\n originalDocument &&\n !originalDocument._attachments\n )\n ) {\n throw new Error('_attachments missing');\n }\n\n const docId: string = (newDocument as any)[primaryPath];\n const originalAttachmentsIds = new Set(\n originalDocument && originalDocument._attachments\n ? Object.keys(originalDocument._attachments)\n : []\n );\n await Promise.all(\n Object\n .entries(newDocument._attachments)\n .map(async ([key, value]) => {\n if (\n (\n !originalAttachmentsIds.has(key) ||\n (\n originalDocument &&\n ensureNotFalsy(originalDocument._attachments)[key].digest !== value.digest\n )\n ) &&\n !(value as RxAttachmentWriteData).data\n ) {\n const attachmentDataString = await storageInstance.getAttachmentData(\n docId,\n key,\n value.digest\n );\n (value as RxAttachmentWriteData).data = attachmentDataString;\n }\n })\n );\n\n return newDocument;\n}\n"],"mappings":";;;;;;;;AAAA,IAAAA,QAAA,GAAAC,OAAA;AAMA,IAAAC,MAAA,GAAAD,OAAA;AAEO,SAASE,+BAA+BA,CAACC,GAAQ,EAAE;EACtD,IAAMC,UAAU,GAAGD,GAAG,CAACE,UAAU,CAACC,MAAM,CAACC,UAAU;EACnD,IAAI,CAACH,UAAU,CAACI,WAAW,EAAE;IACzB,MAAM,IAAAC,mBAAU,EAAC,KAAK,EAAE;MACpBC,IAAI,EAAE;IACV,CAAC,CAAC;EACN;AACJ;AAEO,SAASC,yBAAyBA,CAACC,UAAe,EAAE;EACvDC,MAAM,CACDC,OAAO,CAACF,UAAU,CAACT,GAAG,CAACE,UAAU,CAACG,WAAW,CAAC,CAC9CO,OAAO,CAAC,CAAC,CAACC,OAAO,EAAEC,GAAG,CAAC,KAAK;IACzBJ,MAAM,CAACK,cAAc,CAACN,UAAU,EAAEI,OAAO,EAAE;MACvCG,GAAG,EAAEA,CAAA,KAAOF,GAAG,CAASG,IAAI,CAACR,UAAU;IAC3C,CAAC,CAAC;EACN,CAAC,CAAC;AACV;;AAEA;AACA;AACA;AACA;AACA;AACA;AACO,eAAeS,iCAAiCA,CACnDC,WAAmB,EACnBC,eAA4D,EAC5DC,WAAiD,EACjDC,gBAAuD,EACV;EAE7C,IACI,CAACD,WAAW,CAACE,YAAY,IAErBD,gBAAgB,IAChB,CAACA,gBAAgB,CAACC,YACrB,EACH;IACE,MAAM,IAAIC,KAAK,CAAC,sBAAsB,CAAC;EAC3C;EAEA,IAAMC,KAAa,GAAIJ,WAAW,CAASF,WAAW,CAAC;EACvD,IAAMO,sBAAsB,GAAG,IAAIC,GAAG,CAClCL,gBAAgB,IAAIA,gBAAgB,CAACC,YAAY,GAC3Cb,MAAM,CAACkB,IAAI,CAACN,gBAAgB,CAACC,YAAY,CAAC,GAC1C,EACV,CAAC;EACD,MAAMM,OAAO,CAACC,GAAG,CACbpB,MAAM,CACDC,OAAO,CAACU,WAAW,CAACE,YAAY,CAAC,CACjCQ,GAAG,CAAC,OAAO,CAACC,GAAG,EAAEC,KAAK,CAAC,KAAK;IACzB,IACI,CACI,CAACP,sBAAsB,CAACQ,GAAG,CAACF,GAAG,CAAC,IAE5BV,gBAAgB,IAChB,IAAAa,qBAAc,EAACb,gBAAgB,CAACC,YAAY,CAAC,CAACS,GAAG,CAAC,CAACI,MAAM,KAAKH,KAAK,CAACG,MACvE,KAEL,CAAEH,KAAK,CAA2BI,IAAI,EACxC;MACE,IAAMC,oBAAoB,GAAG,MAAMlB,eAAe,CAACmB,iBAAiB,CAChEd,KAAK,EACLO,GAAG,EACHC,KAAK,CAACG,MACV,CAAC;MACAH,KAAK,CAA2BI,IAAI,GAAGC,oBAAoB;IAChE;EACJ,CAAC,CACT,CAAC;EAED,OAAOjB,WAAW;AACtB","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/attachments/index.js b/dist/cjs/plugins/attachments/index.js deleted file mode 100644 index 29133a6ad9e..00000000000 --- a/dist/cjs/plugins/attachments/index.js +++ /dev/null @@ -1,201 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -var _exportNames = { - RxAttachment: true, - fromStorageInstanceResult: true, - putAttachment: true, - getAttachment: true, - allAttachments: true, - preMigrateDocument: true, - postMigrateDocument: true, - RxDBAttachmentsPlugin: true -}; -exports.RxDBAttachmentsPlugin = exports.RxAttachment = void 0; -exports.allAttachments = allAttachments; -exports.fromStorageInstanceResult = fromStorageInstanceResult; -exports.getAttachment = getAttachment; -exports.postMigrateDocument = postMigrateDocument; -exports.preMigrateDocument = preMigrateDocument; -exports.putAttachment = putAttachment; -var _rxjs = require("rxjs"); -var _index = require("../../plugins/utils/index.js"); -var _attachmentsUtils = require("./attachments-utils.js"); -Object.keys(_attachmentsUtils).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; - if (key in exports && exports[key] === _attachmentsUtils[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _attachmentsUtils[key]; - } - }); -}); -/** - * an RxAttachment is basically just the attachment-stub - * wrapped so that you can access the attachment-data - */ -var RxAttachment = exports.RxAttachment = /*#__PURE__*/function () { - function RxAttachment({ - doc, - id, - type, - length, - digest - }) { - this.doc = doc; - this.id = id; - this.type = type; - this.length = length; - this.digest = digest; - (0, _attachmentsUtils.assignMethodsToAttachment)(this); - } - var _proto = RxAttachment.prototype; - _proto.remove = function remove() { - return this.doc.collection.incrementalWriteQueue.addWrite(this.doc._data, docWriteData => { - delete docWriteData._attachments[this.id]; - return docWriteData; - }).then(() => {}); - } - - /** - * returns the data for the attachment - */; - _proto.getData = async function getData() { - var plainDataBase64 = await this.doc.collection.storageInstance.getAttachmentData(this.doc.primary, this.id, this.digest); - var ret = await (0, _index.createBlobFromBase64)(plainDataBase64, this.type); - return ret; - }; - _proto.getStringData = async function getStringData() { - var data = await this.getData(); - var asString = await (0, _index.blobToString)(data); - return asString; - }; - return RxAttachment; -}(); -function fromStorageInstanceResult(id, attachmentData, rxDocument) { - return new RxAttachment({ - doc: rxDocument, - id, - type: attachmentData.type, - length: attachmentData.length, - digest: attachmentData.digest - }); -} -async function putAttachment(attachmentData) { - (0, _attachmentsUtils.ensureSchemaSupportsAttachments)(this); - var dataSize = (0, _index.getBlobSize)(attachmentData.data); - var dataString = await (0, _index.blobToBase64String)(attachmentData.data); - var digest = await this.collection.database.hashFunction(dataString); - var id = attachmentData.id; - var type = attachmentData.type; - var data = dataString; - return this.collection.incrementalWriteQueue.addWrite(this._data, docWriteData => { - docWriteData = (0, _index.flatClone)(docWriteData); - docWriteData._attachments = (0, _index.flatClone)(docWriteData._attachments); - docWriteData._attachments[id] = { - length: dataSize, - type, - data, - digest - }; - return docWriteData; - }).then(writeResult => { - var newDocument = this.collection._docCache.getCachedRxDocument(writeResult); - var attachmentDataOfId = writeResult._attachments[id]; - var attachment = fromStorageInstanceResult(id, attachmentDataOfId, newDocument); - return attachment; - }); -} - -/** - * get an attachment of the document by its id - */ -function getAttachment(id) { - (0, _attachmentsUtils.ensureSchemaSupportsAttachments)(this); - var docData = this._data; - if (!docData._attachments || !docData._attachments[id]) return null; - var attachmentData = docData._attachments[id]; - var attachment = fromStorageInstanceResult(id, attachmentData, this); - return attachment; -} - -/** - * returns all attachments of the document - */ -function allAttachments() { - (0, _attachmentsUtils.ensureSchemaSupportsAttachments)(this); - var docData = this._data; - - // if there are no attachments, the field is missing - if (!docData._attachments) { - return []; - } - return Object.keys(docData._attachments).map(id => { - return fromStorageInstanceResult(id, docData._attachments[id], this); - }); -} -async function preMigrateDocument(data) { - var attachments = data.docData._attachments; - if (attachments) { - var newAttachments = {}; - await Promise.all(Object.keys(attachments).map(async attachmentId => { - var attachment = attachments[attachmentId]; - var docPrimary = data.docData[data.oldCollection.schema.primaryPath]; - var rawAttachmentData = await data.oldCollection.storageInstance.getAttachmentData(docPrimary, attachmentId, attachment.digest); - var digest = await data.oldCollection.database.hashFunction(rawAttachmentData); - newAttachments[attachmentId] = { - length: attachment.length, - type: attachment.type, - data: rawAttachmentData, - digest - }; - })); - - /** - * Hooks mutate the input - * instead of returning stuff - */ - data.docData._attachments = newAttachments; - } -} -function postMigrateDocument(_action) { - /** - * No longer needed because - * we store the attachments data buffers directly in the document. - */ - return _index.PROMISE_RESOLVE_VOID; -} -var RxDBAttachmentsPlugin = exports.RxDBAttachmentsPlugin = { - name: 'attachments', - rxdb: true, - prototypes: { - RxDocument: proto => { - proto.putAttachment = putAttachment; - proto.getAttachment = getAttachment; - proto.allAttachments = allAttachments; - Object.defineProperty(proto, 'allAttachments$', { - get: function allAttachments$() { - return this.$.pipe((0, _rxjs.map)(rxDocument => Object.entries(rxDocument.toJSON(true)._attachments)), (0, _rxjs.map)(entries => { - return entries.map(([id, attachmentData]) => { - return fromStorageInstanceResult(id, attachmentData, this); - }); - })); - } - }); - } - }, - overwritable: {}, - hooks: { - preMigrateDocument: { - after: preMigrateDocument - }, - postMigrateDocument: { - after: postMigrateDocument - } - } -}; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/attachments/index.js.map b/dist/cjs/plugins/attachments/index.js.map deleted file mode 100644 index 602dd8d0df6..00000000000 --- a/dist/cjs/plugins/attachments/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["_rxjs","require","_index","_attachmentsUtils","Object","keys","forEach","key","prototype","hasOwnProperty","call","_exportNames","exports","defineProperty","enumerable","get","RxAttachment","doc","id","type","length","digest","assignMethodsToAttachment","_proto","remove","collection","incrementalWriteQueue","addWrite","_data","docWriteData","_attachments","then","getData","plainDataBase64","storageInstance","getAttachmentData","primary","ret","createBlobFromBase64","getStringData","data","asString","blobToString","fromStorageInstanceResult","attachmentData","rxDocument","putAttachment","ensureSchemaSupportsAttachments","dataSize","getBlobSize","dataString","blobToBase64String","database","hashFunction","flatClone","writeResult","newDocument","_docCache","getCachedRxDocument","attachmentDataOfId","attachment","getAttachment","docData","allAttachments","map","preMigrateDocument","attachments","newAttachments","Promise","all","attachmentId","docPrimary","oldCollection","schema","primaryPath","rawAttachmentData","postMigrateDocument","_action","PROMISE_RESOLVE_VOID","RxDBAttachmentsPlugin","name","rxdb","prototypes","RxDocument","proto","allAttachments$","$","pipe","entries","toJSON","overwritable","hooks","after"],"sources":["../../../../src/plugins/attachments/index.ts"],"sourcesContent":["import {\n map\n} from 'rxjs';\n\nimport {\n blobToBase64String,\n blobToString,\n createBlobFromBase64,\n flatClone,\n getBlobSize,\n PROMISE_RESOLVE_VOID\n} from '../../plugins/utils/index.ts';\nimport type {\n RxDocument,\n RxPlugin,\n RxDocumentWriteData,\n RxAttachmentData,\n RxDocumentData,\n RxAttachmentCreator,\n RxAttachmentWriteData\n} from '../../types/index.ts';\nimport { assignMethodsToAttachment, ensureSchemaSupportsAttachments } from './attachments-utils.ts';\n\n\n\n/**\n * an RxAttachment is basically just the attachment-stub\n * wrapped so that you can access the attachment-data\n */\nexport class RxAttachment {\n public doc: RxDocument;\n public id: string;\n public type: string;\n public length: number;\n public digest: string;\n constructor({\n doc,\n id,\n type,\n length,\n digest\n }: any) {\n this.doc = doc;\n this.id = id;\n this.type = type;\n this.length = length;\n this.digest = digest;\n\n assignMethodsToAttachment(this);\n }\n\n remove(): Promise {\n return this.doc.collection.incrementalWriteQueue.addWrite(\n this.doc._data,\n docWriteData => {\n delete docWriteData._attachments[this.id];\n return docWriteData;\n }\n ).then(() => { });\n }\n\n /**\n * returns the data for the attachment\n */\n async getData(): Promise {\n const plainDataBase64 = await this.doc.collection.storageInstance.getAttachmentData(\n this.doc.primary,\n this.id,\n this.digest\n );\n const ret = await createBlobFromBase64(\n plainDataBase64,\n this.type as any\n );\n return ret;\n }\n\n async getStringData(): Promise {\n const data = await this.getData();\n const asString = await blobToString(data);\n return asString;\n }\n}\n\nexport function fromStorageInstanceResult(\n id: string,\n attachmentData: RxAttachmentData,\n rxDocument: RxDocument\n) {\n return new RxAttachment({\n doc: rxDocument,\n id,\n type: attachmentData.type,\n length: attachmentData.length,\n digest: attachmentData.digest\n });\n}\n\n\n\nexport async function putAttachment(\n this: RxDocument,\n attachmentData: RxAttachmentCreator\n): Promise {\n ensureSchemaSupportsAttachments(this);\n\n const dataSize = getBlobSize(attachmentData.data);\n const dataString = await blobToBase64String(attachmentData.data);\n const digest = await this.collection.database.hashFunction(dataString);\n\n const id = attachmentData.id;\n const type = attachmentData.type;\n const data = dataString;\n\n return this.collection.incrementalWriteQueue.addWrite(\n this._data,\n (docWriteData: RxDocumentWriteData) => {\n docWriteData = flatClone(docWriteData);\n docWriteData._attachments = flatClone(docWriteData._attachments);\n docWriteData._attachments[id] = {\n length: dataSize,\n type,\n data,\n digest\n };\n return docWriteData;\n }).then(writeResult => {\n const newDocument = this.collection._docCache.getCachedRxDocument(writeResult);\n const attachmentDataOfId = writeResult._attachments[id];\n const attachment = fromStorageInstanceResult(\n id,\n attachmentDataOfId,\n newDocument\n );\n return attachment;\n });\n}\n\n/**\n * get an attachment of the document by its id\n */\nexport function getAttachment(\n this: RxDocument,\n id: string\n): RxAttachment | null {\n ensureSchemaSupportsAttachments(this);\n const docData: any = this._data;\n if (!docData._attachments || !docData._attachments[id])\n return null;\n\n const attachmentData = docData._attachments[id];\n const attachment = fromStorageInstanceResult(\n id,\n attachmentData,\n this\n );\n return attachment;\n}\n\n/**\n * returns all attachments of the document\n */\nexport function allAttachments(\n this: RxDocument\n): RxAttachment[] {\n ensureSchemaSupportsAttachments(this);\n const docData: any = this._data;\n\n // if there are no attachments, the field is missing\n if (!docData._attachments) {\n return [];\n }\n return Object.keys(docData._attachments)\n .map(id => {\n return fromStorageInstanceResult(\n id,\n docData._attachments[id],\n this\n );\n });\n}\n\nexport async function preMigrateDocument(\n data: {\n docData: RxDocumentData;\n oldCollection: any; // TODO\n }\n): Promise {\n const attachments = data.docData._attachments;\n if (attachments) {\n const newAttachments: { [attachmentId: string]: RxAttachmentWriteData; } = {};\n await Promise.all(\n Object.keys(attachments).map(async (attachmentId) => {\n const attachment: RxAttachmentData = attachments[attachmentId];\n const docPrimary: string = (data.docData as any)[data.oldCollection.schema.primaryPath];\n const rawAttachmentData = await data.oldCollection.storageInstance.getAttachmentData(\n docPrimary,\n attachmentId,\n attachment.digest\n );\n const digest = await data.oldCollection.database.hashFunction(rawAttachmentData);\n newAttachments[attachmentId] = {\n length: attachment.length,\n type: attachment.type,\n data: rawAttachmentData,\n digest\n };\n })\n );\n\n /**\n * Hooks mutate the input\n * instead of returning stuff\n */\n (data.docData as RxDocumentWriteData)._attachments = newAttachments;\n }\n}\n\nexport function postMigrateDocument(_action: any): Promise {\n /**\n * No longer needed because\n * we store the attachments data buffers directly in the document.\n */\n return PROMISE_RESOLVE_VOID;\n}\n\nexport const RxDBAttachmentsPlugin: RxPlugin = {\n name: 'attachments',\n rxdb: true,\n prototypes: {\n RxDocument: (proto: any) => {\n proto.putAttachment = putAttachment;\n proto.getAttachment = getAttachment;\n proto.allAttachments = allAttachments;\n Object.defineProperty(proto, 'allAttachments$', {\n get: function allAttachments$(this: RxDocument) {\n return this.$\n .pipe(\n map(rxDocument => Object.entries(\n rxDocument.toJSON(true)._attachments\n )),\n map(entries => {\n return (entries as any)\n .map(([id, attachmentData]: any) => {\n return fromStorageInstanceResult(\n id,\n attachmentData,\n this\n );\n });\n })\n );\n }\n });\n }\n },\n overwritable: {},\n hooks: {\n preMigrateDocument: {\n after: preMigrateDocument\n },\n postMigrateDocument: {\n after: postMigrateDocument\n }\n }\n};\n\n\nexport * from './attachments-utils.ts';\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;AAAA,IAAAA,KAAA,GAAAC,OAAA;AAIA,IAAAC,MAAA,GAAAD,OAAA;AAiBA,IAAAE,iBAAA,GAAAF,OAAA;AAuPAG,MAAA,CAAAC,IAAA,CAAAF,iBAAA,EAAAG,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAJ,iBAAA,CAAAI,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAZ,iBAAA,CAAAI,GAAA;IAAA;EAAA;AAAA;AAnPA;AACA;AACA;AACA;AAHA,IAIaS,YAAY,GAAAJ,OAAA,CAAAI,YAAA;EAMrB,SAAAA,aAAY;IACRC,GAAG;IACHC,EAAE;IACFC,IAAI;IACJC,MAAM;IACNC;EACC,CAAC,EAAE;IACJ,IAAI,CAACJ,GAAG,GAAGA,GAAG;IACd,IAAI,CAACC,EAAE,GAAGA,EAAE;IACZ,IAAI,CAACC,IAAI,GAAGA,IAAI;IAChB,IAAI,CAACC,MAAM,GAAGA,MAAM;IACpB,IAAI,CAACC,MAAM,GAAGA,MAAM;IAEpB,IAAAC,2CAAyB,EAAC,IAAI,CAAC;EACnC;EAAC,IAAAC,MAAA,GAAAP,YAAA,CAAAR,SAAA;EAAAe,MAAA,CAEDC,MAAM,GAAN,SAAAA,OAAA,EAAwB;IACpB,OAAO,IAAI,CAACP,GAAG,CAACQ,UAAU,CAACC,qBAAqB,CAACC,QAAQ,CACrD,IAAI,CAACV,GAAG,CAACW,KAAK,EACdC,YAAY,IAAI;MACZ,OAAOA,YAAY,CAACC,YAAY,CAAC,IAAI,CAACZ,EAAE,CAAC;MACzC,OAAOW,YAAY;IACvB,CACJ,CAAC,CAACE,IAAI,CAAC,MAAM,CAAE,CAAC,CAAC;EACrB;;EAEA;AACJ;AACA,KAFI;EAAAR,MAAA,CAGMS,OAAO,GAAb,eAAAA,QAAA,EAA+B;IAC3B,IAAMC,eAAe,GAAG,MAAM,IAAI,CAAChB,GAAG,CAACQ,UAAU,CAACS,eAAe,CAACC,iBAAiB,CAC/E,IAAI,CAAClB,GAAG,CAACmB,OAAO,EAChB,IAAI,CAAClB,EAAE,EACP,IAAI,CAACG,MACT,CAAC;IACD,IAAMgB,GAAG,GAAG,MAAM,IAAAC,2BAAoB,EAClCL,eAAe,EACf,IAAI,CAACd,IACT,CAAC;IACD,OAAOkB,GAAG;EACd,CAAC;EAAAd,MAAA,CAEKgB,aAAa,GAAnB,eAAAA,cAAA,EAAuC;IACnC,IAAMC,IAAI,GAAG,MAAM,IAAI,CAACR,OAAO,CAAC,CAAC;IACjC,IAAMS,QAAQ,GAAG,MAAM,IAAAC,mBAAY,EAACF,IAAI,CAAC;IACzC,OAAOC,QAAQ;EACnB,CAAC;EAAA,OAAAzB,YAAA;AAAA;AAGE,SAAS2B,yBAAyBA,CACrCzB,EAAU,EACV0B,cAAgC,EAChCC,UAAiC,EACnC;EACE,OAAO,IAAI7B,YAAY,CAAC;IACpBC,GAAG,EAAE4B,UAAU;IACf3B,EAAE;IACFC,IAAI,EAAEyB,cAAc,CAACzB,IAAI;IACzBC,MAAM,EAAEwB,cAAc,CAACxB,MAAM;IAC7BC,MAAM,EAAEuB,cAAc,CAACvB;EAC3B,CAAC,CAAC;AACN;AAIO,eAAeyB,aAAaA,CAE/BF,cAAmC,EACd;EACrB,IAAAG,iDAA+B,EAAC,IAAI,CAAC;EAErC,IAAMC,QAAQ,GAAG,IAAAC,kBAAW,EAACL,cAAc,CAACJ,IAAI,CAAC;EACjD,IAAMU,UAAU,GAAG,MAAM,IAAAC,yBAAkB,EAACP,cAAc,CAACJ,IAAI,CAAC;EAChE,IAAMnB,MAAM,GAAG,MAAM,IAAI,CAACI,UAAU,CAAC2B,QAAQ,CAACC,YAAY,CAACH,UAAU,CAAC;EAEtE,IAAMhC,EAAE,GAAG0B,cAAc,CAAC1B,EAAE;EAC5B,IAAMC,IAAI,GAAGyB,cAAc,CAACzB,IAAI;EAChC,IAAMqB,IAAI,GAAGU,UAAU;EAEvB,OAAO,IAAI,CAACzB,UAAU,CAACC,qBAAqB,CAACC,QAAQ,CACjD,IAAI,CAACC,KAAK,EACTC,YAA4C,IAAK;IAC9CA,YAAY,GAAG,IAAAyB,gBAAS,EAACzB,YAAY,CAAC;IACtCA,YAAY,CAACC,YAAY,GAAG,IAAAwB,gBAAS,EAACzB,YAAY,CAACC,YAAY,CAAC;IAChED,YAAY,CAACC,YAAY,CAACZ,EAAE,CAAC,GAAG;MAC5BE,MAAM,EAAE4B,QAAQ;MAChB7B,IAAI;MACJqB,IAAI;MACJnB;IACJ,CAAC;IACD,OAAOQ,YAAY;EACvB,CAAC,CAAC,CAACE,IAAI,CAACwB,WAAW,IAAI;IACnB,IAAMC,WAAW,GAAG,IAAI,CAAC/B,UAAU,CAACgC,SAAS,CAACC,mBAAmB,CAACH,WAAW,CAAC;IAC9E,IAAMI,kBAAkB,GAAGJ,WAAW,CAACzB,YAAY,CAACZ,EAAE,CAAC;IACvD,IAAM0C,UAAU,GAAGjB,yBAAyB,CACxCzB,EAAE,EACFyC,kBAAkB,EAClBH,WACJ,CAAC;IACD,OAAOI,UAAU;EACrB,CAAC,CAAC;AACV;;AAEA;AACA;AACA;AACO,SAASC,aAAaA,CAEzB3C,EAAU,EACS;EACnB,IAAA6B,iDAA+B,EAAC,IAAI,CAAC;EACrC,IAAMe,OAAY,GAAG,IAAI,CAAClC,KAAK;EAC/B,IAAI,CAACkC,OAAO,CAAChC,YAAY,IAAI,CAACgC,OAAO,CAAChC,YAAY,CAACZ,EAAE,CAAC,EAClD,OAAO,IAAI;EAEf,IAAM0B,cAAc,GAAGkB,OAAO,CAAChC,YAAY,CAACZ,EAAE,CAAC;EAC/C,IAAM0C,UAAU,GAAGjB,yBAAyB,CACxCzB,EAAE,EACF0B,cAAc,EACd,IACJ,CAAC;EACD,OAAOgB,UAAU;AACrB;;AAEA;AACA;AACA;AACO,SAASG,cAAcA,CAAA,EAEZ;EACd,IAAAhB,iDAA+B,EAAC,IAAI,CAAC;EACrC,IAAMe,OAAY,GAAG,IAAI,CAAClC,KAAK;;EAE/B;EACA,IAAI,CAACkC,OAAO,CAAChC,YAAY,EAAE;IACvB,OAAO,EAAE;EACb;EACA,OAAO1B,MAAM,CAACC,IAAI,CAACyD,OAAO,CAAChC,YAAY,CAAC,CACnCkC,GAAG,CAAC9C,EAAE,IAAI;IACP,OAAOyB,yBAAyB,CAC5BzB,EAAE,EACF4C,OAAO,CAAChC,YAAY,CAACZ,EAAE,CAAC,EACxB,IACJ,CAAC;EACL,CAAC,CAAC;AACV;AAEO,eAAe+C,kBAAkBA,CACpCzB,IAGC,EACY;EACb,IAAM0B,WAAW,GAAG1B,IAAI,CAACsB,OAAO,CAAChC,YAAY;EAC7C,IAAIoC,WAAW,EAAE;IACb,IAAMC,cAAkE,GAAG,CAAC,CAAC;IAC7E,MAAMC,OAAO,CAACC,GAAG,CACbjE,MAAM,CAACC,IAAI,CAAC6D,WAAW,CAAC,CAACF,GAAG,CAAC,MAAOM,YAAY,IAAK;MACjD,IAAMV,UAA4B,GAAGM,WAAW,CAACI,YAAY,CAAC;MAC9D,IAAMC,UAAkB,GAAI/B,IAAI,CAACsB,OAAO,CAAStB,IAAI,CAACgC,aAAa,CAACC,MAAM,CAACC,WAAW,CAAC;MACvF,IAAMC,iBAAiB,GAAG,MAAMnC,IAAI,CAACgC,aAAa,CAACtC,eAAe,CAACC,iBAAiB,CAChFoC,UAAU,EACVD,YAAY,EACZV,UAAU,CAACvC,MACf,CAAC;MACD,IAAMA,MAAM,GAAG,MAAMmB,IAAI,CAACgC,aAAa,CAACpB,QAAQ,CAACC,YAAY,CAACsB,iBAAiB,CAAC;MAChFR,cAAc,CAACG,YAAY,CAAC,GAAG;QAC3BlD,MAAM,EAAEwC,UAAU,CAACxC,MAAM;QACzBD,IAAI,EAAEyC,UAAU,CAACzC,IAAI;QACrBqB,IAAI,EAAEmC,iBAAiB;QACvBtD;MACJ,CAAC;IACL,CAAC,CACL,CAAC;;IAED;AACR;AACA;AACA;IACSmB,IAAI,CAACsB,OAAO,CAAoChC,YAAY,GAAGqC,cAAc;EAClF;AACJ;AAEO,SAASS,mBAAmBA,CAACC,OAAY,EAAiB;EAC7D;AACJ;AACA;AACA;EACI,OAAOC,2BAAoB;AAC/B;AAEO,IAAMC,qBAA+B,GAAAnE,OAAA,CAAAmE,qBAAA,GAAG;EAC3CC,IAAI,EAAE,aAAa;EACnBC,IAAI,EAAE,IAAI;EACVC,UAAU,EAAE;IACRC,UAAU,EAAGC,KAAU,IAAK;MACxBA,KAAK,CAACtC,aAAa,GAAGA,aAAa;MACnCsC,KAAK,CAACvB,aAAa,GAAGA,aAAa;MACnCuB,KAAK,CAACrB,cAAc,GAAGA,cAAc;MACrC3D,MAAM,CAACS,cAAc,CAACuE,KAAK,EAAE,iBAAiB,EAAE;QAC5CrE,GAAG,EAAE,SAASsE,eAAeA,CAAA,EAAmB;UAC5C,OAAO,IAAI,CAACC,CAAC,CACRC,IAAI,CACD,IAAAvB,SAAG,EAACnB,UAAU,IAAIzC,MAAM,CAACoF,OAAO,CAC5B3C,UAAU,CAAC4C,MAAM,CAAC,IAAI,CAAC,CAAC3D,YAC5B,CAAC,CAAC,EACF,IAAAkC,SAAG,EAACwB,OAAO,IAAI;YACX,OAAQA,OAAO,CACVxB,GAAG,CAAC,CAAC,CAAC9C,EAAE,EAAE0B,cAAc,CAAM,KAAK;cAChC,OAAOD,yBAAyB,CAC5BzB,EAAE,EACF0B,cAAc,EACd,IACJ,CAAC;YACL,CAAC,CAAC;UACV,CAAC,CACL,CAAC;QACT;MACJ,CAAC,CAAC;IACN;EACJ,CAAC;EACD8C,YAAY,EAAE,CAAC,CAAC;EAChBC,KAAK,EAAE;IACH1B,kBAAkB,EAAE;MAChB2B,KAAK,EAAE3B;IACX,CAAC;IACDW,mBAAmB,EAAE;MACjBgB,KAAK,EAAEhB;IACX;EACJ;AACJ,CAAC","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/backup/file-util.js b/dist/cjs/plugins/backup/file-util.js deleted file mode 100644 index f7e5087f09e..00000000000 --- a/dist/cjs/plugins/backup/file-util.js +++ /dev/null @@ -1,103 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.clearFolder = clearFolder; -exports.deleteFolder = deleteFolder; -exports.documentFolder = documentFolder; -exports.ensureFolderExists = ensureFolderExists; -exports.getMeta = getMeta; -exports.metaFileLocation = metaFileLocation; -exports.prepareFolders = prepareFolders; -exports.setMeta = setMeta; -exports.writeJsonToFile = writeJsonToFile; -exports.writeToFile = writeToFile; -var fs = _interopRequireWildcard(require("node:fs")); -var path = _interopRequireWildcard(require("node:path")); -var _index = require("../../plugins/utils/index.js"); -function _getRequireWildcardCache(e) { if ("function" != typeof WeakMap) return null; var r = new WeakMap(), t = new WeakMap(); return (_getRequireWildcardCache = function (e) { return e ? t : r; })(e); } -function _interopRequireWildcard(e, r) { if (!r && e && e.__esModule) return e; if (null === e || "object" != typeof e && "function" != typeof e) return { default: e }; var t = _getRequireWildcardCache(r); if (t && t.has(e)) return t.get(e); var n = { __proto__: null }, a = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var u in e) if ("default" !== u && {}.hasOwnProperty.call(e, u)) { var i = a ? Object.getOwnPropertyDescriptor(e, u) : null; i && (i.get || i.set) ? Object.defineProperty(n, u, i) : n[u] = e[u]; } return n.default = e, t && t.set(e, n), n; } -/** - * ensure that the given folder exists - */ -function ensureFolderExists(folderPath) { - if (!fs.existsSync(folderPath)) { - fs.mkdirSync(folderPath, { - recursive: true - }); - } -} - -/** - * deletes and recreates the folder - */ -function clearFolder(folderPath) { - deleteFolder(folderPath); - ensureFolderExists(folderPath); -} -function deleteFolder(folderPath) { - // only remove if exists to not raise warning - if (fs.existsSync(folderPath)) { - fs.rmdirSync(folderPath, { - recursive: true - }); - } -} -function prepareFolders(database, options) { - ensureFolderExists(options.directory); - var metaLoc = metaFileLocation(options); - if (!fs.existsSync(metaLoc)) { - var currentTime = (0, _index.now)(); - var metaData = { - createdAt: currentTime, - updatedAt: currentTime, - collectionStates: {} - }; - fs.writeFileSync(metaLoc, JSON.stringify(metaData), 'utf-8'); - } - Object.keys(database.collections).forEach(collectionName => { - ensureFolderExists(path.join(options.directory, collectionName)); - }); -} -async function writeToFile(location, data) { - if (typeof data !== 'string') { - data = await (0, _index.blobToString)(data); - } - return new Promise(function (res, rej) { - fs.writeFile(location, data, 'utf-8', err => { - if (err) { - rej(err); - } else { - res(); - } - }); - }); -} -function writeJsonToFile(location, data) { - return writeToFile(location, JSON.stringify(data)); -} -function metaFileLocation(options) { - return path.join(options.directory, 'backup_meta.json'); -} -function getMeta(options) { - var loc = metaFileLocation(options); - return new Promise((res, rej) => { - fs.readFile(loc, 'utf-8', (err, data) => { - if (err) { - rej(err); - } else { - var metaContent = JSON.parse(data); - res(metaContent); - } - }); - }); -} -function setMeta(options, meta) { - var loc = metaFileLocation(options); - return writeJsonToFile(loc, meta); -} -function documentFolder(options, docId) { - return path.join(options.directory, docId); -} -//# sourceMappingURL=file-util.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/backup/file-util.js.map b/dist/cjs/plugins/backup/file-util.js.map deleted file mode 100644 index 4df111a9c62..00000000000 --- a/dist/cjs/plugins/backup/file-util.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"file-util.js","names":["fs","_interopRequireWildcard","require","path","_index","_getRequireWildcardCache","e","WeakMap","r","t","__esModule","default","has","get","n","__proto__","a","Object","defineProperty","getOwnPropertyDescriptor","u","hasOwnProperty","call","i","set","ensureFolderExists","folderPath","existsSync","mkdirSync","recursive","clearFolder","deleteFolder","rmdirSync","prepareFolders","database","options","directory","metaLoc","metaFileLocation","currentTime","now","metaData","createdAt","updatedAt","collectionStates","writeFileSync","JSON","stringify","keys","collections","forEach","collectionName","join","writeToFile","location","data","blobToString","Promise","res","rej","writeFile","err","writeJsonToFile","getMeta","loc","readFile","metaContent","parse","setMeta","meta","documentFolder","docId"],"sources":["../../../../src/plugins/backup/file-util.ts"],"sourcesContent":["import * as fs from 'node:fs';\nimport * as path from 'node:path';\nimport type {\n BackupMetaFileContent,\n BackupOptions,\n RxDatabase\n} from '../../types/index.d.ts';\nimport { blobToString, now } from '../../plugins/utils/index.ts';\n\n/**\n * ensure that the given folder exists\n */\nexport function ensureFolderExists(folderPath: string): void {\n if (!fs.existsSync(folderPath)) {\n fs.mkdirSync(folderPath, { recursive: true });\n }\n}\n\n/**\n * deletes and recreates the folder\n */\nexport function clearFolder(folderPath: string): void {\n deleteFolder(folderPath);\n ensureFolderExists(folderPath);\n}\n\nexport function deleteFolder(folderPath: string): void {\n // only remove if exists to not raise warning\n if (fs.existsSync(folderPath)) {\n fs.rmdirSync(folderPath, { recursive: true });\n }\n}\n\nexport function prepareFolders(\n database: RxDatabase,\n options: BackupOptions\n) {\n ensureFolderExists(options.directory);\n\n const metaLoc = metaFileLocation(options);\n\n if (!fs.existsSync(metaLoc)) {\n const currentTime = now();\n const metaData: BackupMetaFileContent = {\n createdAt: currentTime,\n updatedAt: currentTime,\n collectionStates: {}\n };\n fs.writeFileSync(metaLoc, JSON.stringify(metaData), 'utf-8');\n }\n\n Object.keys(database.collections).forEach(collectionName => {\n ensureFolderExists(\n path.join(\n options.directory,\n collectionName\n )\n );\n });\n}\n\nexport async function writeToFile(\n location: string,\n data: string | Blob\n): Promise {\n if (typeof data !== 'string') {\n data = await blobToString(data);\n }\n return new Promise(function (res, rej) {\n fs.writeFile(\n location,\n data as string,\n 'utf-8',\n (err) => {\n if (err) {\n rej(err);\n } else {\n res();\n }\n }\n );\n });\n}\n\nexport function writeJsonToFile(\n location: string,\n data: any\n): Promise {\n return writeToFile(\n location,\n JSON.stringify(data)\n );\n}\n\nexport function metaFileLocation(options: BackupOptions): string {\n return path.join(\n options.directory,\n 'backup_meta.json'\n );\n}\n\nexport function getMeta(options: BackupOptions): Promise {\n const loc = metaFileLocation(options);\n return new Promise((res, rej) => {\n fs.readFile(loc, 'utf-8', (err, data) => {\n if (err) {\n rej(err);\n } else {\n const metaContent = JSON.parse(data);\n res(metaContent);\n }\n });\n });\n}\n\nexport function setMeta(\n options: BackupOptions,\n meta: BackupMetaFileContent\n): Promise {\n const loc = metaFileLocation(options);\n return writeJsonToFile(loc, meta);\n}\n\nexport function documentFolder(\n options: BackupOptions,\n docId: string\n): string {\n return path.join(\n options.directory,\n docId\n );\n}\n"],"mappings":";;;;;;;;;;;;;;;AAAA,IAAAA,EAAA,GAAAC,uBAAA,CAAAC,OAAA;AACA,IAAAC,IAAA,GAAAF,uBAAA,CAAAC,OAAA;AAMA,IAAAE,MAAA,GAAAF,OAAA;AAAiE,SAAAG,yBAAAC,CAAA,6BAAAC,OAAA,mBAAAC,CAAA,OAAAD,OAAA,IAAAE,CAAA,OAAAF,OAAA,YAAAF,wBAAA,YAAAA,CAAAC,CAAA,WAAAA,CAAA,GAAAG,CAAA,GAAAD,CAAA,KAAAF,CAAA;AAAA,SAAAL,wBAAAK,CAAA,EAAAE,CAAA,SAAAA,CAAA,IAAAF,CAAA,IAAAA,CAAA,CAAAI,UAAA,SAAAJ,CAAA,eAAAA,CAAA,uBAAAA,CAAA,yBAAAA,CAAA,WAAAK,OAAA,EAAAL,CAAA,QAAAG,CAAA,GAAAJ,wBAAA,CAAAG,CAAA,OAAAC,CAAA,IAAAA,CAAA,CAAAG,GAAA,CAAAN,CAAA,UAAAG,CAAA,CAAAI,GAAA,CAAAP,CAAA,OAAAQ,CAAA,KAAAC,SAAA,UAAAC,CAAA,GAAAC,MAAA,CAAAC,cAAA,IAAAD,MAAA,CAAAE,wBAAA,WAAAC,CAAA,IAAAd,CAAA,oBAAAc,CAAA,OAAAC,cAAA,CAAAC,IAAA,CAAAhB,CAAA,EAAAc,CAAA,SAAAG,CAAA,GAAAP,CAAA,GAAAC,MAAA,CAAAE,wBAAA,CAAAb,CAAA,EAAAc,CAAA,UAAAG,CAAA,KAAAA,CAAA,CAAAV,GAAA,IAAAU,CAAA,CAAAC,GAAA,IAAAP,MAAA,CAAAC,cAAA,CAAAJ,CAAA,EAAAM,CAAA,EAAAG,CAAA,IAAAT,CAAA,CAAAM,CAAA,IAAAd,CAAA,CAAAc,CAAA,YAAAN,CAAA,CAAAH,OAAA,GAAAL,CAAA,EAAAG,CAAA,IAAAA,CAAA,CAAAe,GAAA,CAAAlB,CAAA,EAAAQ,CAAA,GAAAA,CAAA;AAEjE;AACA;AACA;AACO,SAASW,kBAAkBA,CAACC,UAAkB,EAAQ;EACzD,IAAI,CAAC1B,EAAE,CAAC2B,UAAU,CAACD,UAAU,CAAC,EAAE;IAC5B1B,EAAE,CAAC4B,SAAS,CAACF,UAAU,EAAE;MAAEG,SAAS,EAAE;IAAK,CAAC,CAAC;EACjD;AACJ;;AAEA;AACA;AACA;AACO,SAASC,WAAWA,CAACJ,UAAkB,EAAQ;EAClDK,YAAY,CAACL,UAAU,CAAC;EACxBD,kBAAkB,CAACC,UAAU,CAAC;AAClC;AAEO,SAASK,YAAYA,CAACL,UAAkB,EAAQ;EACnD;EACA,IAAI1B,EAAE,CAAC2B,UAAU,CAACD,UAAU,CAAC,EAAE;IAC3B1B,EAAE,CAACgC,SAAS,CAACN,UAAU,EAAE;MAAEG,SAAS,EAAE;IAAK,CAAC,CAAC;EACjD;AACJ;AAEO,SAASI,cAAcA,CAC1BC,QAAoB,EACpBC,OAAsB,EACxB;EACEV,kBAAkB,CAACU,OAAO,CAACC,SAAS,CAAC;EAErC,IAAMC,OAAO,GAAGC,gBAAgB,CAACH,OAAO,CAAC;EAEzC,IAAI,CAACnC,EAAE,CAAC2B,UAAU,CAACU,OAAO,CAAC,EAAE;IACzB,IAAME,WAAW,GAAG,IAAAC,UAAG,EAAC,CAAC;IACzB,IAAMC,QAA+B,GAAG;MACpCC,SAAS,EAAEH,WAAW;MACtBI,SAAS,EAAEJ,WAAW;MACtBK,gBAAgB,EAAE,CAAC;IACvB,CAAC;IACD5C,EAAE,CAAC6C,aAAa,CAACR,OAAO,EAAES,IAAI,CAACC,SAAS,CAACN,QAAQ,CAAC,EAAE,OAAO,CAAC;EAChE;EAEAxB,MAAM,CAAC+B,IAAI,CAACd,QAAQ,CAACe,WAAW,CAAC,CAACC,OAAO,CAACC,cAAc,IAAI;IACxD1B,kBAAkB,CACdtB,IAAI,CAACiD,IAAI,CACLjB,OAAO,CAACC,SAAS,EACjBe,cACJ,CACJ,CAAC;EACL,CAAC,CAAC;AACN;AAEO,eAAeE,WAAWA,CAC7BC,QAAgB,EAChBC,IAAmB,EACN;EACb,IAAI,OAAOA,IAAI,KAAK,QAAQ,EAAE;IAC1BA,IAAI,GAAG,MAAM,IAAAC,mBAAY,EAACD,IAAI,CAAC;EACnC;EACA,OAAO,IAAIE,OAAO,CAAC,UAAUC,GAAG,EAAEC,GAAG,EAAE;IACnC3D,EAAE,CAAC4D,SAAS,CACRN,QAAQ,EACRC,IAAI,EACJ,OAAO,EACNM,GAAG,IAAK;MACL,IAAIA,GAAG,EAAE;QACLF,GAAG,CAACE,GAAG,CAAC;MACZ,CAAC,MAAM;QACHH,GAAG,CAAC,CAAC;MACT;IACJ,CACJ,CAAC;EACL,CAAC,CAAC;AACN;AAEO,SAASI,eAAeA,CAC3BR,QAAgB,EAChBC,IAAS,EACI;EACb,OAAOF,WAAW,CACdC,QAAQ,EACRR,IAAI,CAACC,SAAS,CAACQ,IAAI,CACvB,CAAC;AACL;AAEO,SAASjB,gBAAgBA,CAACH,OAAsB,EAAU;EAC7D,OAAOhC,IAAI,CAACiD,IAAI,CACZjB,OAAO,CAACC,SAAS,EACjB,kBACJ,CAAC;AACL;AAEO,SAAS2B,OAAOA,CAAC5B,OAAsB,EAAkC;EAC5E,IAAM6B,GAAG,GAAG1B,gBAAgB,CAACH,OAAO,CAAC;EACrC,OAAO,IAAIsB,OAAO,CAAC,CAACC,GAAG,EAAEC,GAAG,KAAK;IAC7B3D,EAAE,CAACiE,QAAQ,CAACD,GAAG,EAAE,OAAO,EAAE,CAACH,GAAG,EAAEN,IAAI,KAAK;MACrC,IAAIM,GAAG,EAAE;QACLF,GAAG,CAACE,GAAG,CAAC;MACZ,CAAC,MAAM;QACH,IAAMK,WAAW,GAAGpB,IAAI,CAACqB,KAAK,CAACZ,IAAI,CAAC;QACpCG,GAAG,CAACQ,WAAW,CAAC;MACpB;IACJ,CAAC,CAAC;EACN,CAAC,CAAC;AACN;AAEO,SAASE,OAAOA,CACnBjC,OAAsB,EACtBkC,IAA2B,EACd;EACb,IAAML,GAAG,GAAG1B,gBAAgB,CAACH,OAAO,CAAC;EACrC,OAAO2B,eAAe,CAACE,GAAG,EAAEK,IAAI,CAAC;AACrC;AAEO,SAASC,cAAcA,CAC1BnC,OAAsB,EACtBoC,KAAa,EACP;EACN,OAAOpE,IAAI,CAACiD,IAAI,CACZjB,OAAO,CAACC,SAAS,EACjBmC,KACJ,CAAC;AACL","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/backup/index.js b/dist/cjs/plugins/backup/index.js deleted file mode 100644 index a7ee8c49e9b..00000000000 --- a/dist/cjs/plugins/backup/index.js +++ /dev/null @@ -1,207 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -var _exportNames = { - backupSingleDocument: true, - RxBackupState: true, - backup: true, - RxDBBackupPlugin: true -}; -exports.RxDBBackupPlugin = exports.RxBackupState = void 0; -exports.backup = backup; -exports.backupSingleDocument = backupSingleDocument; -var path = _interopRequireWildcard(require("node:path")); -var _rxjs = require("rxjs"); -var _index = require("../../plugins/utils/index.js"); -var _fileUtil = require("./file-util.js"); -Object.keys(_fileUtil).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; - if (key in exports && exports[key] === _fileUtil[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _fileUtil[key]; - } - }); -}); -var _rxStorageHelper = require("../../rx-storage-helper.js"); -function _getRequireWildcardCache(e) { if ("function" != typeof WeakMap) return null; var r = new WeakMap(), t = new WeakMap(); return (_getRequireWildcardCache = function (e) { return e ? t : r; })(e); } -function _interopRequireWildcard(e, r) { if (!r && e && e.__esModule) return e; if (null === e || "object" != typeof e && "function" != typeof e) return { default: e }; var t = _getRequireWildcardCache(r); if (t && t.has(e)) return t.get(e); var n = { __proto__: null }, a = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var u in e) if ("default" !== u && {}.hasOwnProperty.call(e, u)) { var i = a ? Object.getOwnPropertyDescriptor(e, u) : null; i && (i.get || i.set) ? Object.defineProperty(n, u, i) : n[u] = e[u]; } return n.default = e, t && t.set(e, n), n; } -/** - * Backups a single documents, - * returns the paths to all written files - */ -async function backupSingleDocument(rxDocument, options) { - var data = rxDocument.toJSON(true); - var writtenFiles = []; - var docFolder = (0, _fileUtil.documentFolder)(options, rxDocument.primary); - await (0, _fileUtil.clearFolder)(docFolder); - var fileLocation = path.join(docFolder, 'document.json'); - await (0, _fileUtil.writeJsonToFile)(fileLocation, data); - writtenFiles.push(fileLocation); - if (options.attachments) { - var attachmentsFolder = path.join(docFolder, 'attachments'); - (0, _fileUtil.ensureFolderExists)(attachmentsFolder); - var attachments = rxDocument.allAttachments(); - await Promise.all(attachments.map(async attachment => { - var content = await attachment.getData(); - var attachmentFileLocation = path.join(attachmentsFolder, attachment.id); - await (0, _fileUtil.writeToFile)(attachmentFileLocation, content); - writtenFiles.push(attachmentFileLocation); - })); - } - return writtenFiles; -} -var BACKUP_STATES_BY_DB = new WeakMap(); -function addToBackupStates(db, state) { - var ar = (0, _index.getFromMapOrCreate)(BACKUP_STATES_BY_DB, db, () => []); - ar.push(state); -} -var RxBackupState = exports.RxBackupState = /*#__PURE__*/function () { - function RxBackupState(database, options) { - this.isStopped = false; - this.subs = []; - this.persistRunning = _index.PROMISE_RESOLVE_VOID; - this.initialReplicationDone$ = new _rxjs.BehaviorSubject(false); - this.internalWriteEvents$ = new _rxjs.Subject(); - this.writeEvents$ = this.internalWriteEvents$.asObservable(); - this.database = database; - this.options = options; - if (!this.options.batchSize) { - this.options.batchSize = 10; - } - addToBackupStates(database, this); - (0, _fileUtil.prepareFolders)(database, options); - } - - /** - * Persists all data from all collections, - * beginning from the oldest sequence checkpoint - * to the newest one. - * Do not call this while it is already running. - * Returns true if there are more documents to process - */ - var _proto = RxBackupState.prototype; - _proto.persistOnce = function persistOnce() { - return this.persistRunning = this.persistRunning.then(() => this._persistOnce()); - }; - _proto._persistOnce = async function _persistOnce() { - var _this = this; - var meta = await (0, _fileUtil.getMeta)(this.options); - await Promise.all(Object.entries(this.database.collections).map(async ([collectionName, collection]) => { - var primaryKey = collection.schema.primaryPath; - var processedDocuments = new Set(); - await this.database.requestIdlePromise(); - if (!meta.collectionStates[collectionName]) { - meta.collectionStates[collectionName] = {}; - } - var lastCheckpoint = meta.collectionStates[collectionName].checkpoint; - var hasMore = true; - var _loop = async function () { - await _this.database.requestIdlePromise(); - var changesResult = await (0, _rxStorageHelper.getChangedDocumentsSince)(collection.storageInstance, _this.options.batchSize ? _this.options.batchSize : 0, lastCheckpoint); - lastCheckpoint = changesResult.documents.length > 0 ? changesResult.checkpoint : lastCheckpoint; - meta.collectionStates[collectionName].checkpoint = lastCheckpoint; - var docIds = changesResult.documents.map(doc => doc[primaryKey]).filter(id => { - if (processedDocuments.has(id)) { - return false; - } else { - processedDocuments.add(id); - return true; - } - }).filter((elem, pos, arr) => arr.indexOf(elem) === pos); // unique - await _this.database.requestIdlePromise(); - var docs = await collection.findByIds(docIds).exec(); - if (docs.size === 0) { - hasMore = false; - return 1; // continue - } - await Promise.all(Array.from(docs.values()).map(async doc => { - var writtenFiles = await backupSingleDocument(doc, _this.options); - _this.internalWriteEvents$.next({ - collectionName: collection.name, - documentId: doc.primary, - files: writtenFiles, - deleted: false - }); - })); - // handle deleted documents - await Promise.all(docIds.filter(docId => !docs.has(docId)).map(async docId => { - await (0, _fileUtil.deleteFolder)((0, _fileUtil.documentFolder)(_this.options, docId)); - _this.internalWriteEvents$.next({ - collectionName: collection.name, - documentId: docId, - files: [], - deleted: true - }); - })); - }; - while (hasMore && !this.isStopped) { - if (await _loop()) continue; - } - meta.collectionStates[collectionName].checkpoint = lastCheckpoint; - await (0, _fileUtil.setMeta)(this.options, meta); - })); - if (!this.initialReplicationDone$.getValue()) { - this.initialReplicationDone$.next(true); - } - }; - _proto.watchForChanges = function watchForChanges() { - var collections = Object.values(this.database.collections); - collections.forEach(collection => { - var changes$ = collection.storageInstance.changeStream(); - var sub = changes$.subscribe(() => { - this.persistOnce(); - }); - this.subs.push(sub); - }); - } - - /** - * Returns a promise that resolves when the initial backup is done - * and the filesystem is in sync with the database state - */; - _proto.awaitInitialBackup = function awaitInitialBackup() { - return (0, _rxjs.firstValueFrom)(this.initialReplicationDone$.pipe((0, _rxjs.filter)(v => !!v), (0, _rxjs.map)(() => true))); - }; - _proto.cancel = function cancel() { - if (this.isStopped) { - return _index.PROMISE_RESOLVE_FALSE; - } - this.isStopped = true; - this.subs.forEach(sub => sub.unsubscribe()); - return _index.PROMISE_RESOLVE_TRUE; - }; - return RxBackupState; -}(); -function backup(options) { - var backupState = new RxBackupState(this, options); - backupState.persistOnce(); - if (options.live) { - backupState.watchForChanges(); - } - return backupState; -} -var RxDBBackupPlugin = exports.RxDBBackupPlugin = { - name: 'backup', - rxdb: true, - prototypes: { - RxDatabase(proto) { - proto.backup = backup; - } - }, - hooks: { - preDestroyRxDatabase: { - after: function preDestroyRxDatabase(db) { - var states = BACKUP_STATES_BY_DB.get(db); - if (states) { - states.forEach(state => state.cancel()); - } - } - } - } -}; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/backup/index.js.map b/dist/cjs/plugins/backup/index.js.map deleted file mode 100644 index 86627b9be54..00000000000 --- a/dist/cjs/plugins/backup/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["path","_interopRequireWildcard","require","_rxjs","_index","_fileUtil","Object","keys","forEach","key","prototype","hasOwnProperty","call","_exportNames","exports","defineProperty","enumerable","get","_rxStorageHelper","_getRequireWildcardCache","e","WeakMap","r","t","__esModule","default","has","n","__proto__","a","getOwnPropertyDescriptor","u","i","set","backupSingleDocument","rxDocument","options","data","toJSON","writtenFiles","docFolder","documentFolder","primary","clearFolder","fileLocation","join","writeJsonToFile","push","attachments","attachmentsFolder","ensureFolderExists","allAttachments","Promise","all","map","attachment","content","getData","attachmentFileLocation","id","writeToFile","BACKUP_STATES_BY_DB","addToBackupStates","db","state","ar","getFromMapOrCreate","RxBackupState","database","isStopped","subs","persistRunning","PROMISE_RESOLVE_VOID","initialReplicationDone$","BehaviorSubject","internalWriteEvents$","Subject","writeEvents$","asObservable","batchSize","prepareFolders","_proto","persistOnce","then","_persistOnce","_this","meta","getMeta","entries","collections","collectionName","collection","primaryKey","schema","primaryPath","processedDocuments","Set","requestIdlePromise","collectionStates","lastCheckpoint","checkpoint","hasMore","_loop","changesResult","getChangedDocumentsSince","storageInstance","documents","length","docIds","doc","filter","add","elem","pos","arr","indexOf","docs","findByIds","exec","size","Array","from","values","next","name","documentId","files","deleted","docId","deleteFolder","setMeta","getValue","watchForChanges","changes$","changeStream","sub","subscribe","awaitInitialBackup","firstValueFrom","pipe","v","cancel","PROMISE_RESOLVE_FALSE","unsubscribe","PROMISE_RESOLVE_TRUE","backup","backupState","live","RxDBBackupPlugin","rxdb","prototypes","RxDatabase","proto","hooks","preDestroyRxDatabase","after","states"],"sources":["../../../../src/plugins/backup/index.ts"],"sourcesContent":["import * as path from 'node:path';\nimport {\n BehaviorSubject,\n firstValueFrom,\n Observable,\n Subject,\n Subscription\n} from 'rxjs';\nimport {\n filter,\n map\n} from 'rxjs';\nimport type {\n BackupOptions,\n RxBackupWriteEvent,\n RxCollection,\n RxDatabase,\n RxDocument,\n RxPlugin\n} from '../../types/index.d.ts';\nimport {\n getFromMapOrCreate,\n PROMISE_RESOLVE_FALSE,\n PROMISE_RESOLVE_TRUE,\n PROMISE_RESOLVE_VOID\n} from '../../plugins/utils/index.ts';\nimport {\n clearFolder,\n deleteFolder,\n documentFolder,\n ensureFolderExists,\n getMeta,\n prepareFolders,\n setMeta,\n writeJsonToFile,\n writeToFile\n} from './file-util.ts';\nimport { getChangedDocumentsSince } from '../../rx-storage-helper.ts';\n\n\n/**\n * Backups a single documents,\n * returns the paths to all written files\n */\nexport async function backupSingleDocument(\n rxDocument: RxDocument,\n options: BackupOptions\n): Promise {\n const data = rxDocument.toJSON(true);\n const writtenFiles: string[] = [];\n\n const docFolder = documentFolder(options, rxDocument.primary);\n await clearFolder(docFolder);\n\n const fileLocation = path.join(\n docFolder,\n 'document.json'\n );\n await writeJsonToFile(fileLocation, data);\n writtenFiles.push(fileLocation);\n\n if (options.attachments) {\n const attachmentsFolder = path.join(\n docFolder,\n 'attachments'\n );\n ensureFolderExists(attachmentsFolder);\n const attachments = (rxDocument as RxDocument).allAttachments();\n await Promise.all(\n attachments\n .map(async (attachment) => {\n const content = await attachment.getData();\n const attachmentFileLocation = path.join(\n attachmentsFolder,\n attachment.id\n );\n await writeToFile(attachmentFileLocation, content);\n writtenFiles.push(attachmentFileLocation);\n })\n );\n }\n\n return writtenFiles;\n}\n\nconst BACKUP_STATES_BY_DB: WeakMap = new WeakMap();\nfunction addToBackupStates(db: RxDatabase, state: RxBackupState) {\n const ar = getFromMapOrCreate(\n BACKUP_STATES_BY_DB,\n db,\n () => []\n );\n ar.push(state);\n}\n\nexport class RxBackupState {\n public isStopped: boolean = false;\n private subs: Subscription[] = [];\n private persistRunning: Promise = PROMISE_RESOLVE_VOID;\n private initialReplicationDone$: BehaviorSubject = new BehaviorSubject(false as any);\n\n private readonly internalWriteEvents$: Subject = new Subject();\n public readonly writeEvents$: Observable = this.internalWriteEvents$.asObservable();\n\n constructor(\n public readonly database: RxDatabase,\n public readonly options: BackupOptions\n ) {\n if (!this.options.batchSize) {\n this.options.batchSize = 10;\n }\n addToBackupStates(database, this);\n prepareFolders(database, options);\n }\n\n /**\n * Persists all data from all collections,\n * beginning from the oldest sequence checkpoint\n * to the newest one.\n * Do not call this while it is already running.\n * Returns true if there are more documents to process\n */\n public persistOnce() {\n return this.persistRunning = this.persistRunning.then(() => this._persistOnce());\n }\n\n public async _persistOnce() {\n const meta = await getMeta(this.options);\n\n await Promise.all(\n Object\n .entries(this.database.collections)\n .map(async ([collectionName, collection]) => {\n const primaryKey = collection.schema.primaryPath;\n const processedDocuments: Set = new Set();\n\n await this.database.requestIdlePromise();\n\n if (!meta.collectionStates[collectionName]) {\n meta.collectionStates[collectionName] = {};\n }\n let lastCheckpoint = meta.collectionStates[collectionName].checkpoint;\n\n let hasMore = true;\n while (hasMore && !this.isStopped) {\n await this.database.requestIdlePromise();\n const changesResult = await getChangedDocumentsSince(\n collection.storageInstance,\n this.options.batchSize ? this.options.batchSize : 0,\n lastCheckpoint\n );\n lastCheckpoint = changesResult.documents.length > 0 ? changesResult.checkpoint : lastCheckpoint;\n meta.collectionStates[collectionName].checkpoint = lastCheckpoint;\n\n const docIds: string[] = changesResult.documents\n .map(doc => doc[primaryKey])\n .filter(id => {\n if (\n processedDocuments.has(id)\n ) {\n return false;\n } else {\n processedDocuments.add(id);\n return true;\n }\n })\n .filter((elem, pos, arr) => arr.indexOf(elem) === pos); // unique\n await this.database.requestIdlePromise();\n\n const docs: Map = await collection.findByIds(docIds).exec();\n if (docs.size === 0) {\n hasMore = false;\n continue;\n }\n await Promise.all(\n Array\n .from(docs.values())\n .map(async (doc) => {\n const writtenFiles = await backupSingleDocument(doc, this.options);\n this.internalWriteEvents$.next({\n collectionName: collection.name,\n documentId: doc.primary,\n files: writtenFiles,\n deleted: false\n });\n })\n );\n // handle deleted documents\n await Promise.all(\n docIds\n .filter(docId => !docs.has(docId))\n .map(async (docId) => {\n await deleteFolder(documentFolder(this.options, docId));\n this.internalWriteEvents$.next({\n collectionName: collection.name,\n documentId: docId,\n files: [],\n deleted: true\n });\n })\n );\n }\n meta.collectionStates[collectionName].checkpoint = lastCheckpoint;\n await setMeta(this.options, meta);\n })\n );\n\n if (!this.initialReplicationDone$.getValue()) {\n this.initialReplicationDone$.next(true);\n }\n }\n\n public watchForChanges() {\n const collections: RxCollection[] = Object.values(this.database.collections);\n collections.forEach(collection => {\n const changes$ = collection.storageInstance.changeStream();\n const sub = changes$.subscribe(() => {\n this.persistOnce();\n });\n this.subs.push(sub);\n });\n }\n\n /**\n * Returns a promise that resolves when the initial backup is done\n * and the filesystem is in sync with the database state\n */\n public awaitInitialBackup(): Promise {\n return firstValueFrom(\n this.initialReplicationDone$.pipe(\n filter(v => !!v),\n map(() => true)\n )\n );\n }\n\n cancel(): Promise {\n if (this.isStopped) {\n return PROMISE_RESOLVE_FALSE;\n }\n this.isStopped = true;\n this.subs.forEach(sub => sub.unsubscribe());\n return PROMISE_RESOLVE_TRUE;\n }\n}\n\n\nexport function backup(\n this: RxDatabase,\n options: BackupOptions\n): RxBackupState {\n const backupState = new RxBackupState(this, options);\n backupState.persistOnce();\n\n if (options.live) {\n backupState.watchForChanges();\n }\n\n return backupState;\n}\n\nexport * from './file-util.ts';\nexport const RxDBBackupPlugin: RxPlugin = {\n name: 'backup',\n rxdb: true,\n prototypes: {\n RxDatabase(proto: any) {\n proto.backup = backup;\n }\n },\n hooks: {\n preDestroyRxDatabase: {\n after: function preDestroyRxDatabase(db: RxDatabase) {\n const states = BACKUP_STATES_BY_DB.get(db);\n if (states) {\n states.forEach(state => state.cancel());\n }\n }\n }\n }\n};\n"],"mappings":";;;;;;;;;;;;;;AAAA,IAAAA,IAAA,GAAAC,uBAAA,CAAAC,OAAA;AACA,IAAAC,KAAA,GAAAD,OAAA;AAmBA,IAAAE,MAAA,GAAAF,OAAA;AAMA,IAAAG,SAAA,GAAAH,OAAA;AA2OAI,MAAA,CAAAC,IAAA,CAAAF,SAAA,EAAAG,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAJ,SAAA,CAAAI,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAZ,SAAA,CAAAI,GAAA;IAAA;EAAA;AAAA;AAhOA,IAAAS,gBAAA,GAAAhB,OAAA;AAAsE,SAAAiB,yBAAAC,CAAA,6BAAAC,OAAA,mBAAAC,CAAA,OAAAD,OAAA,IAAAE,CAAA,OAAAF,OAAA,YAAAF,wBAAA,YAAAA,CAAAC,CAAA,WAAAA,CAAA,GAAAG,CAAA,GAAAD,CAAA,KAAAF,CAAA;AAAA,SAAAnB,wBAAAmB,CAAA,EAAAE,CAAA,SAAAA,CAAA,IAAAF,CAAA,IAAAA,CAAA,CAAAI,UAAA,SAAAJ,CAAA,eAAAA,CAAA,uBAAAA,CAAA,yBAAAA,CAAA,WAAAK,OAAA,EAAAL,CAAA,QAAAG,CAAA,GAAAJ,wBAAA,CAAAG,CAAA,OAAAC,CAAA,IAAAA,CAAA,CAAAG,GAAA,CAAAN,CAAA,UAAAG,CAAA,CAAAN,GAAA,CAAAG,CAAA,OAAAO,CAAA,KAAAC,SAAA,UAAAC,CAAA,GAAAvB,MAAA,CAAAS,cAAA,IAAAT,MAAA,CAAAwB,wBAAA,WAAAC,CAAA,IAAAX,CAAA,oBAAAW,CAAA,OAAApB,cAAA,CAAAC,IAAA,CAAAQ,CAAA,EAAAW,CAAA,SAAAC,CAAA,GAAAH,CAAA,GAAAvB,MAAA,CAAAwB,wBAAA,CAAAV,CAAA,EAAAW,CAAA,UAAAC,CAAA,KAAAA,CAAA,CAAAf,GAAA,IAAAe,CAAA,CAAAC,GAAA,IAAA3B,MAAA,CAAAS,cAAA,CAAAY,CAAA,EAAAI,CAAA,EAAAC,CAAA,IAAAL,CAAA,CAAAI,CAAA,IAAAX,CAAA,CAAAW,CAAA,YAAAJ,CAAA,CAAAF,OAAA,GAAAL,CAAA,EAAAG,CAAA,IAAAA,CAAA,CAAAU,GAAA,CAAAb,CAAA,EAAAO,CAAA,GAAAA,CAAA;AAGtE;AACA;AACA;AACA;AACO,eAAeO,oBAAoBA,CACtCC,UAAgC,EAChCC,OAAsB,EACL;EACjB,IAAMC,IAAI,GAAGF,UAAU,CAACG,MAAM,CAAC,IAAI,CAAC;EACpC,IAAMC,YAAsB,GAAG,EAAE;EAEjC,IAAMC,SAAS,GAAG,IAAAC,wBAAc,EAACL,OAAO,EAAED,UAAU,CAACO,OAAO,CAAC;EAC7D,MAAM,IAAAC,qBAAW,EAACH,SAAS,CAAC;EAE5B,IAAMI,YAAY,GAAG5C,IAAI,CAAC6C,IAAI,CAC1BL,SAAS,EACT,eACJ,CAAC;EACD,MAAM,IAAAM,yBAAe,EAACF,YAAY,EAAEP,IAAI,CAAC;EACzCE,YAAY,CAACQ,IAAI,CAACH,YAAY,CAAC;EAE/B,IAAIR,OAAO,CAACY,WAAW,EAAE;IACrB,IAAMC,iBAAiB,GAAGjD,IAAI,CAAC6C,IAAI,CAC/BL,SAAS,EACT,aACJ,CAAC;IACD,IAAAU,4BAAkB,EAACD,iBAAiB,CAAC;IACrC,IAAMD,WAAW,GAAIb,UAAU,CAAgBgB,cAAc,CAAC,CAAC;IAC/D,MAAMC,OAAO,CAACC,GAAG,CACbL,WAAW,CACNM,GAAG,CAAC,MAAOC,UAAU,IAAK;MACvB,IAAMC,OAAO,GAAG,MAAMD,UAAU,CAACE,OAAO,CAAC,CAAC;MAC1C,IAAMC,sBAAsB,GAAG1D,IAAI,CAAC6C,IAAI,CACpCI,iBAAiB,EACjBM,UAAU,CAACI,EACf,CAAC;MACD,MAAM,IAAAC,qBAAW,EAACF,sBAAsB,EAAEF,OAAO,CAAC;MAClDjB,YAAY,CAACQ,IAAI,CAACW,sBAAsB,CAAC;IAC7C,CAAC,CACT,CAAC;EACL;EAEA,OAAOnB,YAAY;AACvB;AAEA,IAAMsB,mBAAyD,GAAG,IAAIxC,OAAO,CAAC,CAAC;AAC/E,SAASyC,iBAAiBA,CAACC,EAAc,EAAEC,KAAoB,EAAE;EAC7D,IAAMC,EAAE,GAAG,IAAAC,yBAAkB,EACzBL,mBAAmB,EACnBE,EAAE,EACF,MAAM,EACV,CAAC;EACDE,EAAE,CAAClB,IAAI,CAACiB,KAAK,CAAC;AAClB;AAAC,IAEYG,aAAa,GAAArD,OAAA,CAAAqD,aAAA;EAStB,SAAAA,cACoBC,QAAoB,EACpBhC,OAAsB,EACxC;IAAA,KAXKiC,SAAS,GAAY,KAAK;IAAA,KACzBC,IAAI,GAAmB,EAAE;IAAA,KACzBC,cAAc,GAAkBC,2BAAoB;IAAA,KACpDC,uBAAuB,GAA6B,IAAIC,qBAAe,CAAC,KAAY,CAAC;IAAA,KAE5EC,oBAAoB,GAAgC,IAAIC,aAAO,CAAC,CAAC;IAAA,KAClEC,YAAY,GAAmC,IAAI,CAACF,oBAAoB,CAACG,YAAY,CAAC,CAAC;IAAA,KAGnFV,QAAoB,GAApBA,QAAoB;IAAA,KACpBhC,OAAsB,GAAtBA,OAAsB;IAEtC,IAAI,CAAC,IAAI,CAACA,OAAO,CAAC2C,SAAS,EAAE;MACzB,IAAI,CAAC3C,OAAO,CAAC2C,SAAS,GAAG,EAAE;IAC/B;IACAjB,iBAAiB,CAACM,QAAQ,EAAE,IAAI,CAAC;IACjC,IAAAY,wBAAc,EAACZ,QAAQ,EAAEhC,OAAO,CAAC;EACrC;;EAEA;AACJ;AACA;AACA;AACA;AACA;AACA;EANI,IAAA6C,MAAA,GAAAd,aAAA,CAAAzD,SAAA;EAAAuE,MAAA,CAOOC,WAAW,GAAlB,SAAAA,YAAA,EAAqB;IACjB,OAAO,IAAI,CAACX,cAAc,GAAG,IAAI,CAACA,cAAc,CAACY,IAAI,CAAC,MAAM,IAAI,CAACC,YAAY,CAAC,CAAC,CAAC;EACpF,CAAC;EAAAH,MAAA,CAEYG,YAAY,GAAzB,eAAAA,aAAA,EAA4B;IAAA,IAAAC,KAAA;IACxB,IAAMC,IAAI,GAAG,MAAM,IAAAC,iBAAO,EAAC,IAAI,CAACnD,OAAO,CAAC;IAExC,MAAMgB,OAAO,CAACC,GAAG,CACb/C,MAAM,CACDkF,OAAO,CAAC,IAAI,CAACpB,QAAQ,CAACqB,WAAW,CAAC,CAClCnC,GAAG,CAAC,OAAO,CAACoC,cAAc,EAAEC,UAAU,CAAC,KAAK;MACzC,IAAMC,UAAU,GAAGD,UAAU,CAACE,MAAM,CAACC,WAAW;MAChD,IAAMC,kBAA+B,GAAG,IAAIC,GAAG,CAAC,CAAC;MAEjD,MAAM,IAAI,CAAC5B,QAAQ,CAAC6B,kBAAkB,CAAC,CAAC;MAExC,IAAI,CAACX,IAAI,CAACY,gBAAgB,CAACR,cAAc,CAAC,EAAE;QACxCJ,IAAI,CAACY,gBAAgB,CAACR,cAAc,CAAC,GAAG,CAAC,CAAC;MAC9C;MACA,IAAIS,cAAc,GAAGb,IAAI,CAACY,gBAAgB,CAACR,cAAc,CAAC,CAACU,UAAU;MAErE,IAAIC,OAAO,GAAG,IAAI;MAAC,IAAAC,KAAA,kBAAAA,CAAA,EACgB;QAC/B,MAAMjB,KAAI,CAACjB,QAAQ,CAAC6B,kBAAkB,CAAC,CAAC;QACxC,IAAMM,aAAa,GAAG,MAAM,IAAAC,yCAAwB,EAChDb,UAAU,CAACc,eAAe,EAC1BpB,KAAI,CAACjD,OAAO,CAAC2C,SAAS,GAAGM,KAAI,CAACjD,OAAO,CAAC2C,SAAS,GAAG,CAAC,EACnDoB,cACJ,CAAC;QACDA,cAAc,GAAGI,aAAa,CAACG,SAAS,CAACC,MAAM,GAAG,CAAC,GAAGJ,aAAa,CAACH,UAAU,GAAGD,cAAc;QAC/Fb,IAAI,CAACY,gBAAgB,CAACR,cAAc,CAAC,CAACU,UAAU,GAAGD,cAAc;QAEjE,IAAMS,MAAgB,GAAGL,aAAa,CAACG,SAAS,CAC3CpD,GAAG,CAACuD,GAAG,IAAIA,GAAG,CAACjB,UAAU,CAAC,CAAC,CAC3BkB,MAAM,CAACnD,EAAE,IAAI;UACV,IACIoC,kBAAkB,CAACrE,GAAG,CAACiC,EAAE,CAAC,EAC5B;YACE,OAAO,KAAK;UAChB,CAAC,MAAM;YACHoC,kBAAkB,CAACgB,GAAG,CAACpD,EAAE,CAAC;YAC1B,OAAO,IAAI;UACf;QACJ,CAAC,CAAC,CACDmD,MAAM,CAAC,CAACE,IAAI,EAAEC,GAAG,EAAEC,GAAG,KAAKA,GAAG,CAACC,OAAO,CAACH,IAAI,CAAC,KAAKC,GAAG,CAAC,CAAC,CAAC;QAC5D,MAAM5B,KAAI,CAACjB,QAAQ,CAAC6B,kBAAkB,CAAC,CAAC;QAExC,IAAMmB,IAA6B,GAAG,MAAMzB,UAAU,CAAC0B,SAAS,CAACT,MAAM,CAAC,CAACU,IAAI,CAAC,CAAC;QAC/E,IAAIF,IAAI,CAACG,IAAI,KAAK,CAAC,EAAE;UACjBlB,OAAO,GAAG,KAAK;UAAC;QAEpB;QACA,MAAMjD,OAAO,CAACC,GAAG,CACbmE,KAAK,CACAC,IAAI,CAACL,IAAI,CAACM,MAAM,CAAC,CAAC,CAAC,CACnBpE,GAAG,CAAC,MAAOuD,GAAG,IAAK;UAChB,IAAMtE,YAAY,GAAG,MAAML,oBAAoB,CAAC2E,GAAG,EAAExB,KAAI,CAACjD,OAAO,CAAC;UAClEiD,KAAI,CAACV,oBAAoB,CAACgD,IAAI,CAAC;YAC3BjC,cAAc,EAAEC,UAAU,CAACiC,IAAI;YAC/BC,UAAU,EAAEhB,GAAG,CAACnE,OAAO;YACvBoF,KAAK,EAAEvF,YAAY;YACnBwF,OAAO,EAAE;UACb,CAAC,CAAC;QACN,CAAC,CACT,CAAC;QACD;QACA,MAAM3E,OAAO,CAACC,GAAG,CACbuD,MAAM,CACDE,MAAM,CAACkB,KAAK,IAAI,CAACZ,IAAI,CAAC1F,GAAG,CAACsG,KAAK,CAAC,CAAC,CACjC1E,GAAG,CAAC,MAAO0E,KAAK,IAAK;UAClB,MAAM,IAAAC,sBAAY,EAAC,IAAAxF,wBAAc,EAAC4C,KAAI,CAACjD,OAAO,EAAE4F,KAAK,CAAC,CAAC;UACvD3C,KAAI,CAACV,oBAAoB,CAACgD,IAAI,CAAC;YAC3BjC,cAAc,EAAEC,UAAU,CAACiC,IAAI;YAC/BC,UAAU,EAAEG,KAAK;YACjBF,KAAK,EAAE,EAAE;YACTC,OAAO,EAAE;UACb,CAAC,CAAC;QACN,CAAC,CACT,CAAC;MACL,CAAC;MAzDD,OAAO1B,OAAO,IAAI,CAAC,IAAI,CAAChC,SAAS;QAAA,UAAAiC,KAAA,IA4BzB;MAAS;MA8BjBhB,IAAI,CAACY,gBAAgB,CAACR,cAAc,CAAC,CAACU,UAAU,GAAGD,cAAc;MACjE,MAAM,IAAA+B,iBAAO,EAAC,IAAI,CAAC9F,OAAO,EAAEkD,IAAI,CAAC;IACrC,CAAC,CACT,CAAC;IAED,IAAI,CAAC,IAAI,CAACb,uBAAuB,CAAC0D,QAAQ,CAAC,CAAC,EAAE;MAC1C,IAAI,CAAC1D,uBAAuB,CAACkD,IAAI,CAAC,IAAI,CAAC;IAC3C;EACJ,CAAC;EAAA1C,MAAA,CAEMmD,eAAe,GAAtB,SAAAA,gBAAA,EAAyB;IACrB,IAAM3C,WAA2B,GAAGnF,MAAM,CAACoH,MAAM,CAAC,IAAI,CAACtD,QAAQ,CAACqB,WAAW,CAAC;IAC5EA,WAAW,CAACjF,OAAO,CAACmF,UAAU,IAAI;MAC9B,IAAM0C,QAAQ,GAAG1C,UAAU,CAACc,eAAe,CAAC6B,YAAY,CAAC,CAAC;MAC1D,IAAMC,GAAG,GAAGF,QAAQ,CAACG,SAAS,CAAC,MAAM;QACjC,IAAI,CAACtD,WAAW,CAAC,CAAC;MACtB,CAAC,CAAC;MACF,IAAI,CAACZ,IAAI,CAACvB,IAAI,CAACwF,GAAG,CAAC;IACvB,CAAC,CAAC;EACN;;EAEA;AACJ;AACA;AACA,KAHI;EAAAtD,MAAA,CAIOwD,kBAAkB,GAAzB,SAAAA,mBAAA,EAA8C;IAC1C,OAAO,IAAAC,oBAAc,EACjB,IAAI,CAACjE,uBAAuB,CAACkE,IAAI,CAC7B,IAAA7B,YAAM,EAAC8B,CAAC,IAAI,CAAC,CAACA,CAAC,CAAC,EAChB,IAAAtF,SAAG,EAAC,MAAM,IAAI,CAClB,CACJ,CAAC;EACL,CAAC;EAAA2B,MAAA,CAED4D,MAAM,GAAN,SAAAA,OAAA,EAA2B;IACvB,IAAI,IAAI,CAACxE,SAAS,EAAE;MAChB,OAAOyE,4BAAqB;IAChC;IACA,IAAI,CAACzE,SAAS,GAAG,IAAI;IACrB,IAAI,CAACC,IAAI,CAAC9D,OAAO,CAAC+H,GAAG,IAAIA,GAAG,CAACQ,WAAW,CAAC,CAAC,CAAC;IAC3C,OAAOC,2BAAoB;EAC/B,CAAC;EAAA,OAAA7E,aAAA;AAAA;AAIE,SAAS8E,MAAMA,CAElB7G,OAAsB,EACT;EACb,IAAM8G,WAAW,GAAG,IAAI/E,aAAa,CAAC,IAAI,EAAE/B,OAAO,CAAC;EACpD8G,WAAW,CAAChE,WAAW,CAAC,CAAC;EAEzB,IAAI9C,OAAO,CAAC+G,IAAI,EAAE;IACdD,WAAW,CAACd,eAAe,CAAC,CAAC;EACjC;EAEA,OAAOc,WAAW;AACtB;AAGO,IAAME,gBAA0B,GAAAtI,OAAA,CAAAsI,gBAAA,GAAG;EACtCxB,IAAI,EAAE,QAAQ;EACdyB,IAAI,EAAE,IAAI;EACVC,UAAU,EAAE;IACRC,UAAUA,CAACC,KAAU,EAAE;MACnBA,KAAK,CAACP,MAAM,GAAGA,MAAM;IACzB;EACJ,CAAC;EACDQ,KAAK,EAAE;IACHC,oBAAoB,EAAE;MAClBC,KAAK,EAAE,SAASD,oBAAoBA,CAAC3F,EAAc,EAAE;QACjD,IAAM6F,MAAM,GAAG/F,mBAAmB,CAAC5C,GAAG,CAAC8C,EAAE,CAAC;QAC1C,IAAI6F,MAAM,EAAE;UACRA,MAAM,CAACpJ,OAAO,CAACwD,KAAK,IAAIA,KAAK,CAAC6E,MAAM,CAAC,CAAC,CAAC;QAC3C;MACJ;IACJ;EACJ;AACJ,CAAC","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/cleanup/cleanup-helper.js b/dist/cjs/plugins/cleanup/cleanup-helper.js deleted file mode 100644 index 71ef9cba72e..00000000000 --- a/dist/cjs/plugins/cleanup/cleanup-helper.js +++ /dev/null @@ -1,17 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.DEFAULT_CLEANUP_POLICY = void 0; -var DEFAULT_CLEANUP_POLICY = exports.DEFAULT_CLEANUP_POLICY = { - minimumDeletedTime: 1000 * 60 * 60 * 24 * 31, - // one month - minimumCollectionAge: 1000 * 60, - // 60 seconds - runEach: 1000 * 60 * 5, - // 5 minutes - awaitReplicationsInSync: true, - waitForLeadership: true -}; -//# sourceMappingURL=cleanup-helper.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/cleanup/cleanup-helper.js.map b/dist/cjs/plugins/cleanup/cleanup-helper.js.map deleted file mode 100644 index 4faba788238..00000000000 --- a/dist/cjs/plugins/cleanup/cleanup-helper.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"cleanup-helper.js","names":["DEFAULT_CLEANUP_POLICY","exports","minimumDeletedTime","minimumCollectionAge","runEach","awaitReplicationsInSync","waitForLeadership"],"sources":["../../../../src/plugins/cleanup/cleanup-helper.ts"],"sourcesContent":["import type {\n RxCleanupPolicy\n} from '../../types/index.d.ts';\n\nexport const DEFAULT_CLEANUP_POLICY: RxCleanupPolicy = {\n minimumDeletedTime: 1000 * 60 * 60 * 24 * 31, // one month\n minimumCollectionAge: 1000 * 60, // 60 seconds\n runEach: 1000 * 60 * 5, // 5 minutes\n awaitReplicationsInSync: true,\n waitForLeadership: true\n};\n"],"mappings":";;;;;;AAIO,IAAMA,sBAAuC,GAAAC,OAAA,CAAAD,sBAAA,GAAG;EACnDE,kBAAkB,EAAE,IAAI,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE;EAAE;EAC9CC,oBAAoB,EAAE,IAAI,GAAG,EAAE;EAAE;EACjCC,OAAO,EAAE,IAAI,GAAG,EAAE,GAAG,CAAC;EAAE;EACxBC,uBAAuB,EAAE,IAAI;EAC7BC,iBAAiB,EAAE;AACvB,CAAC","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/cleanup/cleanup-state.js b/dist/cjs/plugins/cleanup/cleanup-state.js deleted file mode 100644 index d0951193a20..00000000000 --- a/dist/cjs/plugins/cleanup/cleanup-state.js +++ /dev/null @@ -1,81 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.cleanupRxState = cleanupRxState; -exports.runCleanupAfterWrite = runCleanupAfterWrite; -exports.startCleanupForRxState = startCleanupForRxState; -var _index = require("../../plugins/utils/index.js"); -var _index2 = require("../replication/index.js"); -var _cleanupHelper = require("./cleanup-helper.js"); -var _cleanup = require("./cleanup.js"); -var RXSTATE_CLEANUP_QUEUE = _index.PROMISE_RESOLVE_TRUE; -async function startCleanupForRxState(state) { - var rxCollection = state.collection; - var rxDatabase = rxCollection.database; - var cleanupPolicy = Object.assign({}, _cleanupHelper.DEFAULT_CLEANUP_POLICY, rxDatabase.cleanupPolicy ? rxDatabase.cleanupPolicy : {}); - await (0, _cleanup.initialCleanupWait)(rxCollection, cleanupPolicy); - if (rxCollection.destroyed) { - return; - } - - // initially cleanup the state - await cleanupRxState(state, cleanupPolicy); - - /** - * Afterwards we listen to writes - * and only re-run the cleanup if there was a write - * to the state. - */ - await runCleanupAfterWrite(state, cleanupPolicy); -} -/** - * Runs the cleanup for a single RxState - */ -async function cleanupRxState(state, cleanupPolicy) { - var rxCollection = state.collection; - var rxDatabase = rxCollection.database; - - // run cleanup() until it returns true - var isDone = false; - while (!isDone && !rxCollection.destroyed) { - if (cleanupPolicy.awaitReplicationsInSync) { - var replicationStates = _index2.REPLICATION_STATE_BY_COLLECTION.get(rxCollection); - if (replicationStates) { - await Promise.all(replicationStates.map(replicationState => { - if (!replicationState.isStopped()) { - return replicationState.awaitInSync(); - } - })); - } - } - if (rxCollection.destroyed) { - return; - } - RXSTATE_CLEANUP_QUEUE = RXSTATE_CLEANUP_QUEUE.then(async () => { - if (rxCollection.destroyed) { - return true; - } - await rxDatabase.requestIdlePromise(); - return state._cleanup(); - }); - isDone = await RXSTATE_CLEANUP_QUEUE; - } -} - -/** - * TODO this is not waiting for writes! - * it just runs on interval. - */ -async function runCleanupAfterWrite(state, cleanupPolicy) { - var rxCollection = state.collection; - while (!rxCollection.destroyed) { - await rxCollection.promiseWait(cleanupPolicy.runEach); - if (rxCollection.destroyed) { - return; - } - await cleanupRxState(state, cleanupPolicy); - } -} -//# sourceMappingURL=cleanup-state.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/cleanup/cleanup-state.js.map b/dist/cjs/plugins/cleanup/cleanup-state.js.map deleted file mode 100644 index 0a6b7fb0e8f..00000000000 --- a/dist/cjs/plugins/cleanup/cleanup-state.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"cleanup-state.js","names":["_index","require","_index2","_cleanupHelper","_cleanup","RXSTATE_CLEANUP_QUEUE","PROMISE_RESOLVE_TRUE","startCleanupForRxState","state","rxCollection","collection","rxDatabase","database","cleanupPolicy","Object","assign","DEFAULT_CLEANUP_POLICY","initialCleanupWait","destroyed","cleanupRxState","runCleanupAfterWrite","isDone","awaitReplicationsInSync","replicationStates","REPLICATION_STATE_BY_COLLECTION","get","Promise","all","map","replicationState","isStopped","awaitInSync","then","requestIdlePromise","promiseWait","runEach"],"sources":["../../../../src/plugins/cleanup/cleanup-state.ts"],"sourcesContent":["import type { RxCleanupPolicy, RxCollection, RxState } from '../../types/index.d.ts';\nimport { PROMISE_RESOLVE_TRUE } from '../../plugins/utils/index.ts';\nimport { REPLICATION_STATE_BY_COLLECTION } from '../replication/index.ts';\nimport { DEFAULT_CLEANUP_POLICY } from './cleanup-helper.ts';\nimport { initialCleanupWait } from './cleanup.ts';\n\nlet RXSTATE_CLEANUP_QUEUE: Promise = PROMISE_RESOLVE_TRUE;\n\nexport async function startCleanupForRxState(state: RxState) {\n const rxCollection = state.collection;\n const rxDatabase = rxCollection.database;\n const cleanupPolicy = Object.assign(\n {},\n DEFAULT_CLEANUP_POLICY,\n rxDatabase.cleanupPolicy ? rxDatabase.cleanupPolicy : {}\n );\n\n await initialCleanupWait(rxCollection, cleanupPolicy);\n if (rxCollection.destroyed) {\n return;\n }\n\n // initially cleanup the state\n await cleanupRxState(state, cleanupPolicy);\n\n /**\n * Afterwards we listen to writes\n * and only re-run the cleanup if there was a write\n * to the state.\n */\n await runCleanupAfterWrite(state, cleanupPolicy);\n}\n/**\n * Runs the cleanup for a single RxState\n */\nexport async function cleanupRxState(\n state: RxState,\n cleanupPolicy: RxCleanupPolicy\n) {\n const rxCollection = state.collection;\n const rxDatabase = rxCollection.database;\n\n // run cleanup() until it returns true\n let isDone = false;\n while (!isDone && !rxCollection.destroyed) {\n if (cleanupPolicy.awaitReplicationsInSync) {\n const replicationStates = REPLICATION_STATE_BY_COLLECTION.get(rxCollection);\n if (replicationStates) {\n await Promise.all(\n replicationStates.map(replicationState => {\n if (!replicationState.isStopped()) {\n return replicationState.awaitInSync();\n }\n })\n );\n }\n }\n if (rxCollection.destroyed) {\n return;\n }\n RXSTATE_CLEANUP_QUEUE = RXSTATE_CLEANUP_QUEUE\n .then(async () => {\n if (rxCollection.destroyed) {\n return true;\n }\n await rxDatabase.requestIdlePromise();\n return state._cleanup();\n });\n isDone = await RXSTATE_CLEANUP_QUEUE;\n }\n}\n\n/**\n * TODO this is not waiting for writes!\n * it just runs on interval.\n */\nexport async function runCleanupAfterWrite(\n state: RxState,\n cleanupPolicy: RxCleanupPolicy\n) {\n const rxCollection = state.collection;\n while (!rxCollection.destroyed) {\n await rxCollection.promiseWait(cleanupPolicy.runEach);\n if (rxCollection.destroyed) {\n return;\n }\n await cleanupRxState(state, cleanupPolicy);\n }\n}\n"],"mappings":";;;;;;;;AACA,IAAAA,MAAA,GAAAC,OAAA;AACA,IAAAC,OAAA,GAAAD,OAAA;AACA,IAAAE,cAAA,GAAAF,OAAA;AACA,IAAAG,QAAA,GAAAH,OAAA;AAEA,IAAII,qBAAmC,GAAGC,2BAAoB;AAEvD,eAAeC,sBAAsBA,CAACC,KAAgC,EAAE;EAC3E,IAAMC,YAAY,GAAGD,KAAK,CAACE,UAAU;EACrC,IAAMC,UAAU,GAAGF,YAAY,CAACG,QAAQ;EACxC,IAAMC,aAAa,GAAGC,MAAM,CAACC,MAAM,CAC/B,CAAC,CAAC,EACFC,qCAAsB,EACtBL,UAAU,CAACE,aAAa,GAAGF,UAAU,CAACE,aAAa,GAAG,CAAC,CAC3D,CAAC;EAED,MAAM,IAAAI,2BAAkB,EAACR,YAAY,EAAEI,aAAa,CAAC;EACrD,IAAIJ,YAAY,CAACS,SAAS,EAAE;IACxB;EACJ;;EAEA;EACA,MAAMC,cAAc,CAACX,KAAK,EAAEK,aAAa,CAAC;;EAE1C;AACJ;AACA;AACA;AACA;EACI,MAAMO,oBAAoB,CAACZ,KAAK,EAAEK,aAAa,CAAC;AACpD;AACA;AACA;AACA;AACO,eAAeM,cAAcA,CAChCX,KAAgC,EAChCK,aAA8B,EAChC;EACE,IAAMJ,YAAY,GAAGD,KAAK,CAACE,UAAU;EACrC,IAAMC,UAAU,GAAGF,YAAY,CAACG,QAAQ;;EAExC;EACA,IAAIS,MAAM,GAAG,KAAK;EAClB,OAAO,CAACA,MAAM,IAAI,CAACZ,YAAY,CAACS,SAAS,EAAE;IACvC,IAAIL,aAAa,CAACS,uBAAuB,EAAE;MACvC,IAAMC,iBAAiB,GAAGC,uCAA+B,CAACC,GAAG,CAAChB,YAAY,CAAC;MAC3E,IAAIc,iBAAiB,EAAE;QACnB,MAAMG,OAAO,CAACC,GAAG,CACbJ,iBAAiB,CAACK,GAAG,CAACC,gBAAgB,IAAI;UACtC,IAAI,CAACA,gBAAgB,CAACC,SAAS,CAAC,CAAC,EAAE;YAC/B,OAAOD,gBAAgB,CAACE,WAAW,CAAC,CAAC;UACzC;QACJ,CAAC,CACL,CAAC;MACL;IACJ;IACA,IAAItB,YAAY,CAACS,SAAS,EAAE;MACxB;IACJ;IACAb,qBAAqB,GAAGA,qBAAqB,CACxC2B,IAAI,CAAC,YAAY;MACd,IAAIvB,YAAY,CAACS,SAAS,EAAE;QACxB,OAAO,IAAI;MACf;MACA,MAAMP,UAAU,CAACsB,kBAAkB,CAAC,CAAC;MACrC,OAAOzB,KAAK,CAACJ,QAAQ,CAAC,CAAC;IAC3B,CAAC,CAAC;IACNiB,MAAM,GAAG,MAAMhB,qBAAqB;EACxC;AACJ;;AAEA;AACA;AACA;AACA;AACO,eAAee,oBAAoBA,CACtCZ,KAAgC,EAChCK,aAA8B,EAChC;EACE,IAAMJ,YAAY,GAAGD,KAAK,CAACE,UAAU;EACrC,OAAO,CAACD,YAAY,CAACS,SAAS,EAAE;IAC5B,MAAMT,YAAY,CAACyB,WAAW,CAACrB,aAAa,CAACsB,OAAO,CAAC;IACrD,IAAI1B,YAAY,CAACS,SAAS,EAAE;MACxB;IACJ;IACA,MAAMC,cAAc,CAACX,KAAK,EAAEK,aAAa,CAAC;EAC9C;AACJ","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/cleanup/cleanup.js b/dist/cjs/plugins/cleanup/cleanup.js deleted file mode 100644 index f46ee28acab..00000000000 --- a/dist/cjs/plugins/cleanup/cleanup.js +++ /dev/null @@ -1,100 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.cleanupRxCollection = cleanupRxCollection; -exports.initialCleanupWait = initialCleanupWait; -exports.runCleanupAfterDelete = runCleanupAfterDelete; -exports.startCleanupForRxCollection = startCleanupForRxCollection; -var _index = require("../../plugins/utils/index.js"); -var _index2 = require("../replication/index.js"); -var _cleanupHelper = require("./cleanup-helper.js"); -/** - * Even on multiple databases, - * the calls to RxStorage().cleanup() - * must never run in parallel. - * The cleanup is a background task which should - * not affect the performance of other, more important tasks. - */ -var RXSTORAGE_CLEANUP_QUEUE = _index.PROMISE_RESOLVE_TRUE; -async function startCleanupForRxCollection(rxCollection) { - var rxDatabase = rxCollection.database; - var cleanupPolicy = Object.assign({}, _cleanupHelper.DEFAULT_CLEANUP_POLICY, rxDatabase.cleanupPolicy ? rxDatabase.cleanupPolicy : {}); - await initialCleanupWait(rxCollection, cleanupPolicy); - if (rxCollection.destroyed) { - return; - } - - // initially cleanup the collection - await cleanupRxCollection(rxCollection, cleanupPolicy); - - /** - * Afterwards we listen to deletes - * and only re-run the cleanup after - * minimumDeletedTime is reached. - */ - await runCleanupAfterDelete(rxCollection, cleanupPolicy); -} -async function initialCleanupWait(collection, cleanupPolicy) { - /** - * Wait until minimumDatabaseInstanceAge is reached - * or collection is destroyed. - */ - await collection.promiseWait(cleanupPolicy.minimumCollectionAge); - if (collection.destroyed) { - return; - } - if (cleanupPolicy.waitForLeadership) { - await collection.database.waitForLeadership(); - } -} - -/** - * Runs the cleanup for a single RxCollection - */ -async function cleanupRxCollection(rxCollection, cleanupPolicy) { - var rxDatabase = rxCollection.database; - var storageInstance = rxCollection.storageInstance; - - // run cleanup() until it returns true - var isDone = false; - while (!isDone && !rxCollection.destroyed) { - if (cleanupPolicy.awaitReplicationsInSync) { - var replicationStates = _index2.REPLICATION_STATE_BY_COLLECTION.get(rxCollection); - if (replicationStates) { - await Promise.all(replicationStates.map(replicationState => { - if (!replicationState.isStopped()) { - return replicationState.awaitInSync(); - } - })); - } - } - if (rxCollection.destroyed) { - return; - } - RXSTORAGE_CLEANUP_QUEUE = RXSTORAGE_CLEANUP_QUEUE.then(async () => { - if (rxCollection.destroyed) { - return true; - } - await rxDatabase.requestIdlePromise(); - return storageInstance.cleanup(cleanupPolicy.minimumDeletedTime); - }); - isDone = await RXSTORAGE_CLEANUP_QUEUE; - } -} - -/** - * TODO this is not waiting for deletes! - * it just runs on interval. - */ -async function runCleanupAfterDelete(rxCollection, cleanupPolicy) { - while (!rxCollection.destroyed) { - await rxCollection.promiseWait(cleanupPolicy.runEach); - if (rxCollection.destroyed) { - return; - } - await cleanupRxCollection(rxCollection, cleanupPolicy); - } -} -//# sourceMappingURL=cleanup.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/cleanup/cleanup.js.map b/dist/cjs/plugins/cleanup/cleanup.js.map deleted file mode 100644 index cdc6b0ebd14..00000000000 --- a/dist/cjs/plugins/cleanup/cleanup.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"cleanup.js","names":["_index","require","_index2","_cleanupHelper","RXSTORAGE_CLEANUP_QUEUE","PROMISE_RESOLVE_TRUE","startCleanupForRxCollection","rxCollection","rxDatabase","database","cleanupPolicy","Object","assign","DEFAULT_CLEANUP_POLICY","initialCleanupWait","destroyed","cleanupRxCollection","runCleanupAfterDelete","collection","promiseWait","minimumCollectionAge","waitForLeadership","storageInstance","isDone","awaitReplicationsInSync","replicationStates","REPLICATION_STATE_BY_COLLECTION","get","Promise","all","map","replicationState","isStopped","awaitInSync","then","requestIdlePromise","cleanup","minimumDeletedTime","runEach"],"sources":["../../../../src/plugins/cleanup/cleanup.ts"],"sourcesContent":["import type { RxCleanupPolicy, RxCollection } from '../../types/index.d.ts';\nimport { PROMISE_RESOLVE_TRUE } from '../../plugins/utils/index.ts';\nimport { REPLICATION_STATE_BY_COLLECTION } from '../replication/index.ts';\nimport { DEFAULT_CLEANUP_POLICY } from './cleanup-helper.ts';\n\n/**\n * Even on multiple databases,\n * the calls to RxStorage().cleanup()\n * must never run in parallel.\n * The cleanup is a background task which should\n * not affect the performance of other, more important tasks.\n */\nlet RXSTORAGE_CLEANUP_QUEUE: Promise = PROMISE_RESOLVE_TRUE;\n\nexport async function startCleanupForRxCollection(\n rxCollection: RxCollection\n) {\n const rxDatabase = rxCollection.database;\n const cleanupPolicy = Object.assign(\n {},\n DEFAULT_CLEANUP_POLICY,\n rxDatabase.cleanupPolicy ? rxDatabase.cleanupPolicy : {}\n );\n\n\n await initialCleanupWait(rxCollection, cleanupPolicy);\n if (rxCollection.destroyed) {\n return;\n }\n\n // initially cleanup the collection\n await cleanupRxCollection(rxCollection, cleanupPolicy);\n\n /**\n * Afterwards we listen to deletes\n * and only re-run the cleanup after\n * minimumDeletedTime is reached.\n */\n await runCleanupAfterDelete(rxCollection, cleanupPolicy);\n}\n\n\nexport async function initialCleanupWait(collection: RxCollection, cleanupPolicy: RxCleanupPolicy) {\n /**\n * Wait until minimumDatabaseInstanceAge is reached\n * or collection is destroyed.\n */\n await collection.promiseWait(cleanupPolicy.minimumCollectionAge);\n if (collection.destroyed) {\n return;\n }\n\n if (cleanupPolicy.waitForLeadership) {\n await collection.database.waitForLeadership();\n }\n}\n\n/**\n * Runs the cleanup for a single RxCollection\n */\nexport async function cleanupRxCollection(\n rxCollection: RxCollection,\n cleanupPolicy: RxCleanupPolicy\n) {\n const rxDatabase = rxCollection.database;\n const storageInstance = rxCollection.storageInstance;\n\n // run cleanup() until it returns true\n let isDone = false;\n while (!isDone && !rxCollection.destroyed) {\n if (cleanupPolicy.awaitReplicationsInSync) {\n const replicationStates = REPLICATION_STATE_BY_COLLECTION.get(rxCollection);\n if (replicationStates) {\n await Promise.all(\n replicationStates.map(replicationState => {\n if (!replicationState.isStopped()) {\n return replicationState.awaitInSync();\n }\n })\n );\n }\n }\n if (rxCollection.destroyed) {\n return;\n }\n RXSTORAGE_CLEANUP_QUEUE = RXSTORAGE_CLEANUP_QUEUE\n .then(async () => {\n if (rxCollection.destroyed) {\n return true;\n }\n await rxDatabase.requestIdlePromise();\n return storageInstance.cleanup(cleanupPolicy.minimumDeletedTime);\n });\n isDone = await RXSTORAGE_CLEANUP_QUEUE;\n }\n}\n\n/**\n * TODO this is not waiting for deletes!\n * it just runs on interval.\n */\nexport async function runCleanupAfterDelete(\n rxCollection: RxCollection,\n cleanupPolicy: RxCleanupPolicy\n) {\n while (!rxCollection.destroyed) {\n await rxCollection.promiseWait(cleanupPolicy.runEach);\n if (rxCollection.destroyed) {\n return;\n }\n await cleanupRxCollection(rxCollection, cleanupPolicy);\n }\n}\n"],"mappings":";;;;;;;;;AACA,IAAAA,MAAA,GAAAC,OAAA;AACA,IAAAC,OAAA,GAAAD,OAAA;AACA,IAAAE,cAAA,GAAAF,OAAA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,IAAIG,uBAAyC,GAAGC,2BAAoB;AAE7D,eAAeC,2BAA2BA,CAC7CC,YAA0B,EAC5B;EACE,IAAMC,UAAU,GAAGD,YAAY,CAACE,QAAQ;EACxC,IAAMC,aAAa,GAAGC,MAAM,CAACC,MAAM,CAC/B,CAAC,CAAC,EACFC,qCAAsB,EACtBL,UAAU,CAACE,aAAa,GAAGF,UAAU,CAACE,aAAa,GAAG,CAAC,CAC3D,CAAC;EAGD,MAAMI,kBAAkB,CAACP,YAAY,EAAEG,aAAa,CAAC;EACrD,IAAIH,YAAY,CAACQ,SAAS,EAAE;IACxB;EACJ;;EAEA;EACA,MAAMC,mBAAmB,CAACT,YAAY,EAAEG,aAAa,CAAC;;EAEtD;AACJ;AACA;AACA;AACA;EACI,MAAMO,qBAAqB,CAACV,YAAY,EAAEG,aAAa,CAAC;AAC5D;AAGO,eAAeI,kBAAkBA,CAACI,UAAwB,EAAER,aAA8B,EAAE;EAC/F;AACJ;AACA;AACA;EACI,MAAMQ,UAAU,CAACC,WAAW,CAACT,aAAa,CAACU,oBAAoB,CAAC;EAChE,IAAIF,UAAU,CAACH,SAAS,EAAE;IACtB;EACJ;EAEA,IAAIL,aAAa,CAACW,iBAAiB,EAAE;IACjC,MAAMH,UAAU,CAACT,QAAQ,CAACY,iBAAiB,CAAC,CAAC;EACjD;AACJ;;AAEA;AACA;AACA;AACO,eAAeL,mBAAmBA,CACrCT,YAA0B,EAC1BG,aAA8B,EAChC;EACE,IAAMF,UAAU,GAAGD,YAAY,CAACE,QAAQ;EACxC,IAAMa,eAAe,GAAGf,YAAY,CAACe,eAAe;;EAEpD;EACA,IAAIC,MAAM,GAAG,KAAK;EAClB,OAAO,CAACA,MAAM,IAAI,CAAChB,YAAY,CAACQ,SAAS,EAAE;IACvC,IAAIL,aAAa,CAACc,uBAAuB,EAAE;MACvC,IAAMC,iBAAiB,GAAGC,uCAA+B,CAACC,GAAG,CAACpB,YAAY,CAAC;MAC3E,IAAIkB,iBAAiB,EAAE;QACnB,MAAMG,OAAO,CAACC,GAAG,CACbJ,iBAAiB,CAACK,GAAG,CAACC,gBAAgB,IAAI;UACtC,IAAI,CAACA,gBAAgB,CAACC,SAAS,CAAC,CAAC,EAAE;YAC/B,OAAOD,gBAAgB,CAACE,WAAW,CAAC,CAAC;UACzC;QACJ,CAAC,CACL,CAAC;MACL;IACJ;IACA,IAAI1B,YAAY,CAACQ,SAAS,EAAE;MACxB;IACJ;IACAX,uBAAuB,GAAGA,uBAAuB,CAC5C8B,IAAI,CAAC,YAAY;MACd,IAAI3B,YAAY,CAACQ,SAAS,EAAE;QACxB,OAAO,IAAI;MACf;MACA,MAAMP,UAAU,CAAC2B,kBAAkB,CAAC,CAAC;MACrC,OAAOb,eAAe,CAACc,OAAO,CAAC1B,aAAa,CAAC2B,kBAAkB,CAAC;IACpE,CAAC,CAAC;IACNd,MAAM,GAAG,MAAMnB,uBAAuB;EAC1C;AACJ;;AAEA;AACA;AACA;AACA;AACO,eAAea,qBAAqBA,CACvCV,YAA0B,EAC1BG,aAA8B,EAChC;EACE,OAAO,CAACH,YAAY,CAACQ,SAAS,EAAE;IAC5B,MAAMR,YAAY,CAACY,WAAW,CAACT,aAAa,CAAC4B,OAAO,CAAC;IACrD,IAAI/B,YAAY,CAACQ,SAAS,EAAE;MACxB;IACJ;IACA,MAAMC,mBAAmB,CAACT,YAAY,EAAEG,aAAa,CAAC;EAC1D;AACJ","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/cleanup/index.js b/dist/cjs/plugins/cleanup/index.js deleted file mode 100644 index a4cb2967574..00000000000 --- a/dist/cjs/plugins/cleanup/index.js +++ /dev/null @@ -1,56 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -var _exportNames = { - RxDBCleanupPlugin: true -}; -exports.RxDBCleanupPlugin = void 0; -var _cleanupHelper = require("./cleanup-helper.js"); -var _cleanupState = require("./cleanup-state.js"); -var _cleanup = require("./cleanup.js"); -Object.keys(_cleanup).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; - if (key in exports && exports[key] === _cleanup[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _cleanup[key]; - } - }); -}); -var RxDBCleanupPlugin = exports.RxDBCleanupPlugin = { - name: 'cleanup', - rxdb: true, - prototypes: { - RxCollection: proto => { - proto.cleanup = async function (minimumDeletedTime) { - var cleanupPolicy = Object.assign({}, _cleanupHelper.DEFAULT_CLEANUP_POLICY, this.database.cleanupPolicy ? this.database.cleanupPolicy : {}); - if (typeof minimumDeletedTime === 'undefined') { - minimumDeletedTime = cleanupPolicy.minimumDeletedTime; - } - - // run cleanup() until it returns true - var isDone = false; - while (!isDone && !this.destroyed) { - isDone = await this.storageInstance.cleanup(minimumDeletedTime); - } - }; - } - }, - hooks: { - createRxCollection: { - after: i => { - (0, _cleanup.startCleanupForRxCollection)(i.collection); - } - }, - createRxState: { - after: i => { - (0, _cleanupState.startCleanupForRxState)(i.state); - } - } - } -}; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/cleanup/index.js.map b/dist/cjs/plugins/cleanup/index.js.map deleted file mode 100644 index 8c38ace2521..00000000000 --- a/dist/cjs/plugins/cleanup/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["_cleanupHelper","require","_cleanupState","_cleanup","Object","keys","forEach","key","prototype","hasOwnProperty","call","_exportNames","exports","defineProperty","enumerable","get","RxDBCleanupPlugin","name","rxdb","prototypes","RxCollection","proto","cleanup","minimumDeletedTime","cleanupPolicy","assign","DEFAULT_CLEANUP_POLICY","database","isDone","destroyed","storageInstance","hooks","createRxCollection","after","i","startCleanupForRxCollection","collection","createRxState","startCleanupForRxState","state"],"sources":["../../../../src/plugins/cleanup/index.ts"],"sourcesContent":["import type {\n RxCollection,\n RxPlugin\n} from '../../types/index.d.ts';\nimport { DEFAULT_CLEANUP_POLICY } from './cleanup-helper.ts';\nimport { startCleanupForRxState } from './cleanup-state.ts';\nimport { startCleanupForRxCollection } from './cleanup.ts';\n\nexport const RxDBCleanupPlugin: RxPlugin = {\n name: 'cleanup',\n rxdb: true,\n prototypes: {\n RxCollection: (proto: any) => {\n proto.cleanup = async function (this: RxCollection, minimumDeletedTime?: number): Promise {\n const cleanupPolicy = Object.assign(\n {},\n DEFAULT_CLEANUP_POLICY,\n this.database.cleanupPolicy ? this.database.cleanupPolicy : {}\n );\n\n if (typeof minimumDeletedTime === 'undefined') {\n minimumDeletedTime = cleanupPolicy.minimumDeletedTime;\n }\n\n // run cleanup() until it returns true\n let isDone = false;\n while (!isDone && !this.destroyed) {\n isDone = await this.storageInstance.cleanup(minimumDeletedTime);\n }\n };\n }\n },\n hooks: {\n createRxCollection: {\n after: (i) => {\n startCleanupForRxCollection(i.collection);\n }\n },\n createRxState: {\n after: (i) => {\n startCleanupForRxState(i.state);\n }\n }\n }\n};\n\nexport * from './cleanup.ts';\n"],"mappings":";;;;;;;;;AAIA,IAAAA,cAAA,GAAAC,OAAA;AACA,IAAAC,aAAA,GAAAD,OAAA;AACA,IAAAE,QAAA,GAAAF,OAAA;AAwCAG,MAAA,CAAAC,IAAA,CAAAF,QAAA,EAAAG,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAJ,QAAA,CAAAI,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAZ,QAAA,CAAAI,GAAA;IAAA;EAAA;AAAA;AAtCO,IAAMS,iBAA2B,GAAAJ,OAAA,CAAAI,iBAAA,GAAG;EACvCC,IAAI,EAAE,SAAS;EACfC,IAAI,EAAE,IAAI;EACVC,UAAU,EAAE;IACRC,YAAY,EAAGC,KAAU,IAAK;MAC1BA,KAAK,CAACC,OAAO,GAAG,gBAAoCC,kBAA2B,EAAiB;QAC5F,IAAMC,aAAa,GAAGpB,MAAM,CAACqB,MAAM,CAC/B,CAAC,CAAC,EACFC,qCAAsB,EACtB,IAAI,CAACC,QAAQ,CAACH,aAAa,GAAG,IAAI,CAACG,QAAQ,CAACH,aAAa,GAAG,CAAC,CACjE,CAAC;QAED,IAAI,OAAOD,kBAAkB,KAAK,WAAW,EAAE;UAC3CA,kBAAkB,GAAGC,aAAa,CAACD,kBAAkB;QACzD;;QAEA;QACA,IAAIK,MAAM,GAAG,KAAK;QAClB,OAAO,CAACA,MAAM,IAAI,CAAC,IAAI,CAACC,SAAS,EAAE;UAC/BD,MAAM,GAAG,MAAM,IAAI,CAACE,eAAe,CAACR,OAAO,CAACC,kBAAkB,CAAC;QACnE;MACJ,CAAC;IACL;EACJ,CAAC;EACDQ,KAAK,EAAE;IACHC,kBAAkB,EAAE;MAChBC,KAAK,EAAGC,CAAC,IAAK;QACV,IAAAC,oCAA2B,EAACD,CAAC,CAACE,UAAU,CAAC;MAC7C;IACJ,CAAC;IACDC,aAAa,EAAE;MACXJ,KAAK,EAAGC,CAAC,IAAK;QACV,IAAAI,oCAAsB,EAACJ,CAAC,CAACK,KAAK,CAAC;MACnC;IACJ;EACJ;AACJ,CAAC","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/crdt/index.js b/dist/cjs/plugins/crdt/index.js deleted file mode 100644 index 5705e679f72..00000000000 --- a/dist/cjs/plugins/crdt/index.js +++ /dev/null @@ -1,395 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.RxDBcrdtPlugin = exports.RX_CRDT_CONTEXT = void 0; -exports.getCRDTConflictHandler = getCRDTConflictHandler; -exports.getCRDTSchemaPart = getCRDTSchemaPart; -exports.hashCRDTOperations = hashCRDTOperations; -exports.insertCRDT = insertCRDT; -exports.mergeCRDTFields = mergeCRDTFields; -exports.rebuildFromCRDT = rebuildFromCRDT; -exports.sortOperationComparator = sortOperationComparator; -exports.updateCRDT = updateCRDT; -var _rxError = require("../../rx-error.js"); -var _index = require("../../plugins/utils/index.js"); -var _index2 = require("../../index.js"); -var _mingoUpdater = require("../update/mingo-updater.js"); -async function updateCRDT(entry) { - entry = _index2.overwritable.deepFreezeWhenDevMode(entry); - var jsonSchema = this.collection.schema.jsonSchema; - if (!jsonSchema.crdt) { - throw (0, _rxError.newRxError)('CRDT1', { - schema: jsonSchema, - queryObj: entry - }); - } - var crdtOptions = (0, _index.ensureNotFalsy)(jsonSchema.crdt); - var storageToken = await this.collection.database.storageToken; - return this.incrementalModify(async docData => { - var crdtDocField = (0, _index.clone)((0, _index.getProperty)(docData, crdtOptions.field)); - var operation = { - body: (0, _index.toArray)(entry), - creator: storageToken, - time: (0, _index.now)() - }; - - /** - * A new write will ALWAYS be an operation in the last - * array which was non existing before. - */ - var lastAr = [operation]; - crdtDocField.operations.push(lastAr); - crdtDocField.hash = await hashCRDTOperations(this.collection.database.hashFunction, crdtDocField); - docData = runOperationOnDocument(this.collection.schema.jsonSchema, docData, operation); - (0, _index.setProperty)(docData, crdtOptions.field, crdtDocField); - return docData; - }, RX_CRDT_CONTEXT); -} -async function insertCRDT(entry) { - entry = _index2.overwritable.deepFreezeWhenDevMode(entry); - var jsonSchema = this.schema.jsonSchema; - if (!jsonSchema.crdt) { - throw (0, _rxError.newRxError)('CRDT1', { - schema: jsonSchema, - queryObj: entry - }); - } - var crdtOptions = (0, _index.ensureNotFalsy)(jsonSchema.crdt); - var storageToken = await this.database.storageToken; - var operation = { - body: Array.isArray(entry) ? entry : [entry], - creator: storageToken, - time: (0, _index.now)() - }; - var insertData = {}; - insertData = runOperationOnDocument(this.schema.jsonSchema, insertData, operation); - var crdtDocField = { - operations: [], - hash: '' - }; - (0, _index.setProperty)(insertData, crdtOptions.field, crdtDocField); - var lastAr = [operation]; - crdtDocField.operations.push(lastAr); - crdtDocField.hash = await hashCRDTOperations(this.database.hashFunction, crdtDocField); - var result = await this.insert(insertData).catch(async err => { - if (err.code === 'CONFLICT') { - // was a conflict, update document instead of inserting - var doc = await this.findOne(err.parameters.id).exec(true); - return doc.updateCRDT(entry); - } else { - throw err; - } - }); - return result; -} -function sortOperationComparator(a, b) { - return a.creator > b.creator ? 1 : -1; -} -function runOperationOnDocument(schema, docData, operation) { - var entryParts = operation.body; - entryParts.forEach(entryPart => { - var isMatching; - if (entryPart.selector) { - var query = { - selector: (0, _index.ensureNotFalsy)(entryPart.selector), - sort: [], - skip: 0 - }; - var matcher = (0, _index2.getQueryMatcher)(schema, query); - isMatching = matcher(docData); - } else { - isMatching = true; - } - if (isMatching) { - if (entryPart.ifMatch) { - docData = (0, _mingoUpdater.mingoUpdater)(docData, entryPart.ifMatch); - } - } else { - if (entryPart.ifNotMatch) { - docData = (0, _mingoUpdater.mingoUpdater)(docData, entryPart.ifNotMatch); - } - } - }); - return docData; -} -async function hashCRDTOperations(hashFunction, crdts) { - var hashObj = crdts.operations.map(operations => { - return operations.map(op => op.creator); - }); - var hash = await hashFunction(JSON.stringify(hashObj)); - return hash; -} -function getCRDTSchemaPart() { - var operationSchema = { - type: 'object', - properties: { - body: { - type: 'array', - items: { - type: 'object', - properties: { - selector: { - type: 'object' - }, - ifMatch: { - type: 'object' - }, - ifNotMatch: { - type: 'object' - } - }, - additionalProperties: false - }, - minItems: 1 - }, - creator: { - type: 'string' - }, - time: { - type: 'number', - minimum: 1, - maximum: 1000000000000000, - multipleOf: 0.01 - } - }, - additionalProperties: false, - required: ['body', 'creator', 'time'] - }; - return { - type: 'object', - properties: { - operations: { - type: 'array', - items: { - type: 'array', - items: operationSchema - } - }, - hash: { - type: 'string', - // set a minLength to not accidentally store an empty string - minLength: 2 - } - }, - additionalProperties: false, - required: ['operations', 'hash'] - }; -} -async function mergeCRDTFields(hashFunction, crdtsA, crdtsB) { - // the value with most operations must be A to - // ensure we not miss out rows when iterating over both fields. - if (crdtsA.operations.length < crdtsB.operations.length) { - [crdtsA, crdtsB] = [crdtsB, crdtsA]; - } - var ret = { - operations: [], - hash: '' - }; - crdtsA.operations.forEach((row, index) => { - var mergedOps = []; - var ids = new Set(); // used to deduplicate - - row.forEach(op => { - ids.add(op.creator); - mergedOps.push(op); - }); - if (crdtsB.operations[index]) { - crdtsB.operations[index].forEach(op => { - if (!ids.has(op.creator)) { - mergedOps.push(op); - } - }); - } - mergedOps = mergedOps.sort(sortOperationComparator); - ret.operations[index] = mergedOps; - }); - ret.hash = await hashCRDTOperations(hashFunction, ret); - return ret; -} -function rebuildFromCRDT(schema, docData, crdts) { - var base = { - _deleted: false - }; - (0, _index.setProperty)(base, (0, _index.ensureNotFalsy)(schema.crdt).field, crdts); - crdts.operations.forEach(operations => { - operations.forEach(op => { - base = runOperationOnDocument(schema, base, op); - }); - }); - return base; -} -function getCRDTConflictHandler(hashFunction, schema) { - var crdtOptions = (0, _index.ensureNotFalsy)(schema.crdt); - var crdtField = crdtOptions.field; - var getCRDTValue = (0, _index.objectPathMonad)(crdtField); - var conflictHandler = async (i, _context) => { - var newDocCrdt = getCRDTValue(i.newDocumentState); - var masterDocCrdt = getCRDTValue(i.realMasterState); - if (newDocCrdt.hash === masterDocCrdt.hash) { - return Promise.resolve({ - isEqual: true - }); - } - var mergedCrdt = await mergeCRDTFields(hashFunction, newDocCrdt, masterDocCrdt); - var mergedDoc = rebuildFromCRDT(schema, i.newDocumentState, mergedCrdt); - return Promise.resolve({ - isEqual: false, - documentData: mergedDoc - }); - }; - return conflictHandler; -} -var RX_CRDT_CONTEXT = exports.RX_CRDT_CONTEXT = 'rx-crdt'; -var RxDBcrdtPlugin = exports.RxDBcrdtPlugin = { - name: 'crdt', - rxdb: true, - prototypes: { - RxDocument: proto => { - proto.updateCRDT = updateCRDT; - var oldRemove = proto.remove; - proto.remove = function () { - if (!this.collection.schema.jsonSchema.crdt) { - return oldRemove.bind(this)(); - } - return this.updateCRDT({ - ifMatch: { - $set: { - _deleted: true - } - } - }); - }; - var oldincrementalPatch = proto.incrementalPatch; - proto.incrementalPatch = function (patch) { - if (!this.collection.schema.jsonSchema.crdt) { - return oldincrementalPatch.bind(this)(patch); - } - return this.updateCRDT({ - ifMatch: { - $set: patch - } - }); - }; - var oldincrementalModify = proto.incrementalModify; - proto.incrementalModify = function (fn, context) { - if (!this.collection.schema.jsonSchema.crdt) { - return oldincrementalModify.bind(this)(fn); - } - if (context === RX_CRDT_CONTEXT) { - return oldincrementalModify.bind(this)(fn); - } else { - throw (0, _rxError.newRxError)('CRDT2', { - id: this.primary, - args: { - context - } - }); - } - }; - }, - RxCollection: proto => { - proto.insertCRDT = insertCRDT; - } - }, - overwritable: {}, - hooks: { - preCreateRxCollection: { - after: data => { - if (!data.schema.crdt) { - return; - } - if (data.conflictHandler) { - throw (0, _rxError.newRxError)('CRDT3', { - collection: data.name, - schema: data.schema - }); - } - data.conflictHandler = getCRDTConflictHandler(data.database.hashFunction, data.schema); - } - }, - createRxCollection: { - after: ({ - collection - }) => { - if (!collection.schema.jsonSchema.crdt) { - return; - } - var crdtOptions = (0, _index.ensureNotFalsy)(collection.schema.jsonSchema.crdt); - var crdtField = crdtOptions.field; - var getCrdt = (0, _index.objectPathMonad)(crdtOptions.field); - - /** - * In dev-mode we have to ensure that all document writes - * have the correct crdt state so that nothing is missed out - * or could accidentally do non-crdt writes to the document. - */ - if (_index2.overwritable.isDevMode()) { - var bulkWriteBefore = collection.storageInstance.bulkWrite.bind(collection.storageInstance); - collection.storageInstance.bulkWrite = async function (writes, context) { - await Promise.all(writes.map(async write => { - var newDocState = (0, _index.clone)(write.document); - var crdts = getCrdt(newDocState); - var rebuild = rebuildFromCRDT(collection.schema.jsonSchema, newDocState, crdts); - function docWithoutMeta(doc) { - var ret = {}; - Object.entries(doc).forEach(([k, v]) => { - if (!k.startsWith('_') && typeof v !== 'undefined') { - ret[k] = v; - } - }); - return ret; - } - if (!(0, _index.deepEqual)(docWithoutMeta(newDocState), docWithoutMeta(rebuild))) { - throw (0, _rxError.newRxError)('SNH', { - document: newDocState - }); - } - var recalculatedHash = await hashCRDTOperations(collection.database.hashFunction, crdts); - if (crdts.hash !== recalculatedHash) { - throw (0, _rxError.newRxError)('SNH', { - document: newDocState, - args: { - hash: crdts.hash, - recalculatedHash - } - }); - } - })); - return bulkWriteBefore(writes, context); - }; - } - var bulkInsertBefore = collection.bulkInsert.bind(collection); - collection.bulkInsert = async function (docsData) { - var storageToken = await collection.database.storageToken; - var useDocsData = await Promise.all(docsData.map(async docData => { - var setMe = {}; - Object.entries(docData).forEach(([key, value]) => { - if (!key.startsWith('_') && key !== crdtField) { - setMe[key] = value; - } - }); - var crdtOperations = { - operations: [[{ - creator: storageToken, - body: [{ - ifMatch: { - $set: setMe - } - }], - time: (0, _index.now)() - }]], - hash: '' - }; - crdtOperations.hash = await hashCRDTOperations(collection.database.hashFunction, crdtOperations); - (0, _index.setProperty)(docData, crdtOptions.field, crdtOperations); - return docData; - })); - return bulkInsertBefore(useDocsData); - }; - } - } - } -}; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/crdt/index.js.map b/dist/cjs/plugins/crdt/index.js.map deleted file mode 100644 index 414a039e614..00000000000 --- a/dist/cjs/plugins/crdt/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["_rxError","require","_index","_index2","_mingoUpdater","updateCRDT","entry","overwritable","deepFreezeWhenDevMode","jsonSchema","collection","schema","crdt","newRxError","queryObj","crdtOptions","ensureNotFalsy","storageToken","database","incrementalModify","docData","crdtDocField","clone","getProperty","field","operation","body","toArray","creator","time","now","lastAr","operations","push","hash","hashCRDTOperations","hashFunction","runOperationOnDocument","setProperty","RX_CRDT_CONTEXT","insertCRDT","Array","isArray","insertData","result","insert","catch","err","code","doc","findOne","parameters","id","exec","sortOperationComparator","a","b","entryParts","forEach","entryPart","isMatching","selector","query","sort","skip","matcher","getQueryMatcher","ifMatch","mingoUpdater","ifNotMatch","crdts","hashObj","map","op","JSON","stringify","getCRDTSchemaPart","operationSchema","type","properties","items","additionalProperties","minItems","minimum","maximum","multipleOf","required","minLength","mergeCRDTFields","crdtsA","crdtsB","length","ret","row","index","mergedOps","ids","Set","add","has","rebuildFromCRDT","base","_deleted","getCRDTConflictHandler","crdtField","getCRDTValue","objectPathMonad","conflictHandler","i","_context","newDocCrdt","newDocumentState","masterDocCrdt","realMasterState","Promise","resolve","isEqual","mergedCrdt","mergedDoc","documentData","exports","RxDBcrdtPlugin","name","rxdb","prototypes","RxDocument","proto","oldRemove","remove","bind","$set","oldincrementalPatch","incrementalPatch","patch","oldincrementalModify","fn","context","primary","args","RxCollection","hooks","preCreateRxCollection","after","data","createRxCollection","getCrdt","isDevMode","bulkWriteBefore","storageInstance","bulkWrite","writes","all","write","newDocState","document","rebuild","docWithoutMeta","Object","entries","k","v","startsWith","deepEqual","recalculatedHash","bulkInsertBefore","bulkInsert","docsData","useDocsData","setMe","key","value","crdtOperations"],"sources":["../../../../src/plugins/crdt/index.ts"],"sourcesContent":["import { newRxError } from '../../rx-error.ts';\nimport type {\n CRDTDocumentField,\n CRDTEntry,\n CRDTOperation,\n FilledMangoQuery,\n HashFunction,\n JsonSchema,\n RxConflictHandler,\n RxConflictHandlerInput,\n RxDocument,\n RxDocumentData,\n RxJsonSchema,\n RxPlugin,\n WithDeleted\n} from '../../types/index.d.ts';\nimport {\n clone,\n deepEqual,\n ensureNotFalsy,\n getProperty,\n now,\n objectPathMonad,\n setProperty,\n toArray\n} from '../../plugins/utils/index.ts';\nimport {\n getQueryMatcher,\n overwritable,\n RxCollection,\n RxDocumentWriteData,\n RxError\n} from '../../index.ts';\nimport { mingoUpdater } from '../update/mingo-updater.ts';\n\n\n\nexport async function updateCRDT(\n this: RxDocument,\n entry: CRDTEntry | CRDTEntry[]\n) {\n entry = overwritable.deepFreezeWhenDevMode(entry) as any;\n\n const jsonSchema = this.collection.schema.jsonSchema;\n if (!jsonSchema.crdt) {\n throw newRxError('CRDT1', {\n schema: jsonSchema,\n queryObj: entry\n });\n }\n const crdtOptions = ensureNotFalsy(jsonSchema.crdt);\n const storageToken = await this.collection.database.storageToken;\n\n return this.incrementalModify(async (docData) => {\n const crdtDocField: CRDTDocumentField = clone(getProperty(docData as any, crdtOptions.field));\n const operation: CRDTOperation = {\n body: toArray(entry),\n creator: storageToken,\n time: now()\n };\n\n /**\n * A new write will ALWAYS be an operation in the last\n * array which was non existing before.\n */\n const lastAr: CRDTOperation[] = [operation];\n crdtDocField.operations.push(lastAr);\n crdtDocField.hash = await hashCRDTOperations(this.collection.database.hashFunction, crdtDocField);\n\n docData = runOperationOnDocument(\n this.collection.schema.jsonSchema,\n docData,\n operation\n );\n setProperty(docData, crdtOptions.field, crdtDocField);\n return docData;\n }, RX_CRDT_CONTEXT);\n}\n\n\nexport async function insertCRDT(\n this: RxCollection,\n entry: CRDTEntry | CRDTEntry[]\n) {\n entry = overwritable.deepFreezeWhenDevMode(entry) as any;\n\n const jsonSchema = this.schema.jsonSchema;\n if (!jsonSchema.crdt) {\n throw newRxError('CRDT1', {\n schema: jsonSchema,\n queryObj: entry\n });\n }\n const crdtOptions = ensureNotFalsy(jsonSchema.crdt);\n const storageToken = await this.database.storageToken;\n const operation: CRDTOperation = {\n body: Array.isArray(entry) ? entry : [entry],\n creator: storageToken,\n time: now()\n };\n\n let insertData: RxDocumentWriteData = {} as any;\n insertData = runOperationOnDocument(\n this.schema.jsonSchema,\n insertData as any,\n operation\n ) as any;\n const crdtDocField: CRDTDocumentField = {\n operations: [],\n hash: ''\n };\n setProperty(insertData as any, crdtOptions.field, crdtDocField);\n\n const lastAr: CRDTOperation[] = [operation];\n crdtDocField.operations.push(lastAr);\n crdtDocField.hash = await hashCRDTOperations(this.database.hashFunction, crdtDocField);\n\n const result = await this.insert(insertData).catch(async (err: RxError) => {\n if (err.code === 'CONFLICT') {\n // was a conflict, update document instead of inserting\n const doc = await this.findOne(err.parameters.id).exec(true);\n return doc.updateCRDT(entry);\n } else {\n throw err;\n }\n });\n return result;\n}\n\n\nexport function sortOperationComparator(a: CRDTOperation, b: CRDTOperation) {\n return a.creator > b.creator ? 1 : -1;\n}\n\n\nfunction runOperationOnDocument(\n schema: RxJsonSchema>,\n docData: WithDeleted,\n operation: CRDTOperation\n): WithDeleted {\n const entryParts = operation.body;\n entryParts.forEach(entryPart => {\n let isMatching: boolean;\n if (entryPart.selector) {\n const query: FilledMangoQuery = {\n selector: ensureNotFalsy(entryPart.selector as any),\n sort: [],\n skip: 0\n };\n const matcher = getQueryMatcher(schema, query);\n isMatching = matcher(docData as any);\n } else {\n isMatching = true;\n }\n if (isMatching) {\n if (entryPart.ifMatch) {\n docData = mingoUpdater>(docData, entryPart.ifMatch);\n }\n } else {\n if (entryPart.ifNotMatch) {\n docData = mingoUpdater>(docData, entryPart.ifNotMatch);\n }\n }\n });\n return docData;\n}\n\nexport async function hashCRDTOperations(\n hashFunction: HashFunction,\n crdts: CRDTDocumentField\n): Promise {\n const hashObj = crdts.operations.map((operations) => {\n return operations.map(op => op.creator);\n });\n const hash = await hashFunction(JSON.stringify(hashObj));\n return hash;\n}\n\nexport function getCRDTSchemaPart(): JsonSchema> {\n const operationSchema: JsonSchema> = {\n type: 'object',\n properties: {\n body: {\n type: 'array',\n items: {\n type: 'object',\n properties: {\n selector: {\n type: 'object'\n },\n ifMatch: {\n type: 'object'\n },\n ifNotMatch: {\n type: 'object'\n }\n },\n additionalProperties: false\n },\n minItems: 1\n },\n creator: {\n type: 'string'\n },\n time: {\n type: 'number',\n minimum: 1,\n maximum: 1000000000000000,\n multipleOf: 0.01\n }\n },\n additionalProperties: false,\n required: [\n 'body',\n 'creator',\n 'time'\n ]\n };\n return {\n type: 'object',\n properties: {\n operations: {\n type: 'array',\n items: {\n type: 'array',\n items: operationSchema\n }\n },\n hash: {\n type: 'string',\n // set a minLength to not accidentally store an empty string\n minLength: 2\n }\n },\n additionalProperties: false,\n required: ['operations', 'hash']\n };\n}\n\n\nexport async function mergeCRDTFields(\n hashFunction: HashFunction,\n crdtsA: CRDTDocumentField,\n crdtsB: CRDTDocumentField\n): Promise> {\n\n // the value with most operations must be A to\n // ensure we not miss out rows when iterating over both fields.\n if (crdtsA.operations.length < crdtsB.operations.length) {\n [crdtsA, crdtsB] = [crdtsB, crdtsA];\n }\n\n const ret: CRDTDocumentField = {\n operations: [],\n hash: ''\n };\n crdtsA.operations.forEach((row, index) => {\n let mergedOps: CRDTOperation[] = [];\n const ids = new Set(); // used to deduplicate\n\n row.forEach(op => {\n ids.add(op.creator);\n mergedOps.push(op);\n });\n if (crdtsB.operations[index]) {\n crdtsB.operations[index].forEach(op => {\n if (!ids.has(op.creator)) {\n mergedOps.push(op);\n }\n });\n }\n mergedOps = mergedOps.sort(sortOperationComparator);\n ret.operations[index] = mergedOps;\n });\n\n\n ret.hash = await hashCRDTOperations(hashFunction, ret);\n return ret;\n}\n\nexport function rebuildFromCRDT(\n schema: RxJsonSchema>,\n docData: WithDeleted | RxDocType,\n crdts: CRDTDocumentField\n): WithDeleted {\n let base: WithDeleted = {\n _deleted: false\n } as any;\n setProperty(base, ensureNotFalsy(schema.crdt).field, crdts);\n crdts.operations.forEach(operations => {\n operations.forEach(op => {\n base = runOperationOnDocument(\n schema,\n base,\n op\n );\n });\n });\n return base;\n}\n\n\nexport function getCRDTConflictHandler(\n hashFunction: HashFunction,\n schema: RxJsonSchema>\n): RxConflictHandler {\n const crdtOptions = ensureNotFalsy(schema.crdt);\n const crdtField = crdtOptions.field;\n const getCRDTValue = objectPathMonad | RxDocType, CRDTDocumentField>(crdtField);\n\n const conflictHandler: RxConflictHandler = async (\n i: RxConflictHandlerInput,\n _context: string\n ) => {\n const newDocCrdt = getCRDTValue(i.newDocumentState);\n const masterDocCrdt = getCRDTValue(i.realMasterState);\n\n if (newDocCrdt.hash === masterDocCrdt.hash) {\n return Promise.resolve({\n isEqual: true\n });\n }\n\n const mergedCrdt = await mergeCRDTFields(hashFunction, newDocCrdt, masterDocCrdt);\n const mergedDoc = rebuildFromCRDT(\n schema,\n i.newDocumentState,\n mergedCrdt\n );\n return Promise.resolve({\n isEqual: false,\n documentData: mergedDoc\n });\n };\n\n return conflictHandler;\n}\n\n\nexport const RX_CRDT_CONTEXT = 'rx-crdt';\n\nexport const RxDBcrdtPlugin: RxPlugin = {\n name: 'crdt',\n rxdb: true,\n prototypes: {\n RxDocument: (proto: any) => {\n proto.updateCRDT = updateCRDT;\n\n const oldRemove = proto.remove;\n proto.remove = function (this: RxDocument) {\n if (!this.collection.schema.jsonSchema.crdt) {\n return oldRemove.bind(this)();\n }\n return this.updateCRDT({\n ifMatch: {\n $set: {\n _deleted: true\n }\n }\n });\n };\n\n const oldincrementalPatch = proto.incrementalPatch;\n proto.incrementalPatch = function (this: RxDocument, patch: any) {\n if (!this.collection.schema.jsonSchema.crdt) {\n return oldincrementalPatch.bind(this)(patch);\n }\n return this.updateCRDT({\n ifMatch: {\n $set: patch\n }\n });\n };\n const oldincrementalModify = proto.incrementalModify;\n proto.incrementalModify = function (fn: any, context: string) {\n if (!this.collection.schema.jsonSchema.crdt) {\n return oldincrementalModify.bind(this)(fn);\n }\n if (context === RX_CRDT_CONTEXT) {\n return oldincrementalModify.bind(this)(fn);\n } else {\n throw newRxError('CRDT2', {\n id: this.primary,\n args: { context }\n });\n }\n };\n },\n RxCollection: (proto: any) => {\n proto.insertCRDT = insertCRDT;\n }\n },\n overwritable: {},\n hooks: {\n preCreateRxCollection: {\n after: (data) => {\n if (!data.schema.crdt) {\n return;\n }\n if (data.conflictHandler) {\n throw newRxError('CRDT3', {\n collection: data.name,\n schema: data.schema\n });\n }\n data.conflictHandler = getCRDTConflictHandler(\n data.database.hashFunction,\n data.schema\n );\n }\n },\n createRxCollection: {\n after: ({ collection }) => {\n if (!collection.schema.jsonSchema.crdt) {\n return;\n }\n\n const crdtOptions = ensureNotFalsy(collection.schema.jsonSchema.crdt);\n const crdtField = crdtOptions.field;\n const getCrdt = objectPathMonad>(crdtOptions.field);\n\n /**\n * In dev-mode we have to ensure that all document writes\n * have the correct crdt state so that nothing is missed out\n * or could accidentally do non-crdt writes to the document.\n */\n if (overwritable.isDevMode()) {\n const bulkWriteBefore = collection.storageInstance.bulkWrite.bind(collection.storageInstance);\n collection.storageInstance.bulkWrite = async function (writes, context) {\n\n await Promise.all(\n writes.map(async (write) => {\n const newDocState: typeof write.document = clone(write.document);\n const crdts = getCrdt(newDocState);\n\n const rebuild = rebuildFromCRDT(\n collection.schema.jsonSchema,\n newDocState,\n crdts\n );\n\n function docWithoutMeta(doc: any) {\n const ret: any = {};\n Object.entries(doc).forEach(([k, v]) => {\n if (\n !k.startsWith('_') &&\n typeof v !== 'undefined'\n ) {\n ret[k] = v;\n }\n });\n return ret;\n }\n if (!deepEqual(docWithoutMeta(newDocState), docWithoutMeta(rebuild))) {\n throw newRxError('SNH', {\n document: newDocState\n });\n }\n const recalculatedHash = await hashCRDTOperations(collection.database.hashFunction, crdts);\n if (crdts.hash !== recalculatedHash) {\n throw newRxError('SNH', {\n document: newDocState,\n args: { hash: crdts.hash, recalculatedHash }\n });\n }\n })\n );\n\n return bulkWriteBefore(writes, context);\n };\n }\n\n\n const bulkInsertBefore = collection.bulkInsert.bind(collection);\n collection.bulkInsert = async function (docsData: any[]) {\n const storageToken = await collection.database.storageToken;\n const useDocsData = await Promise.all(\n docsData.map(async (docData) => {\n const setMe: Partial> = {};\n Object.entries(docData).forEach(([key, value]) => {\n if (\n !key.startsWith('_') &&\n key !== crdtField\n ) {\n setMe[key] = value;\n }\n });\n\n const crdtOperations: CRDTDocumentField = {\n operations: [\n [{\n creator: storageToken,\n body: [{\n ifMatch: {\n $set: setMe\n }\n }],\n time: now()\n }]\n ],\n hash: ''\n };\n crdtOperations.hash = await hashCRDTOperations(collection.database.hashFunction, crdtOperations);\n setProperty(docData, crdtOptions.field, crdtOperations);\n return docData;\n })\n );\n return bulkInsertBefore(useDocsData);\n };\n }\n }\n }\n};\n"],"mappings":";;;;;;;;;;;;;;AAAA,IAAAA,QAAA,GAAAC,OAAA;AAgBA,IAAAC,MAAA,GAAAD,OAAA;AAUA,IAAAE,OAAA,GAAAF,OAAA;AAOA,IAAAG,aAAA,GAAAH,OAAA;AAIO,eAAeI,UAAUA,CAE5BC,KAAoD,EACtD;EACEA,KAAK,GAAGC,oBAAY,CAACC,qBAAqB,CAACF,KAAK,CAAQ;EAExD,IAAMG,UAAU,GAAG,IAAI,CAACC,UAAU,CAACC,MAAM,CAACF,UAAU;EACpD,IAAI,CAACA,UAAU,CAACG,IAAI,EAAE;IAClB,MAAM,IAAAC,mBAAU,EAAC,OAAO,EAAE;MACtBF,MAAM,EAAEF,UAAU;MAClBK,QAAQ,EAAER;IACd,CAAC,CAAC;EACN;EACA,IAAMS,WAAW,GAAG,IAAAC,qBAAc,EAACP,UAAU,CAACG,IAAI,CAAC;EACnD,IAAMK,YAAY,GAAG,MAAM,IAAI,CAACP,UAAU,CAACQ,QAAQ,CAACD,YAAY;EAEhE,OAAO,IAAI,CAACE,iBAAiB,CAAC,MAAOC,OAAO,IAAK;IAC7C,IAAMC,YAA0C,GAAG,IAAAC,YAAK,EAAC,IAAAC,kBAAW,EAACH,OAAO,EAASL,WAAW,CAACS,KAAK,CAAC,CAAC;IACxG,IAAMC,SAAmC,GAAG;MACxCC,IAAI,EAAE,IAAAC,cAAO,EAACrB,KAAK,CAAC;MACpBsB,OAAO,EAAEX,YAAY;MACrBY,IAAI,EAAE,IAAAC,UAAG,EAAC;IACd,CAAC;;IAED;AACR;AACA;AACA;IACQ,IAAMC,MAAkC,GAAG,CAACN,SAAS,CAAC;IACtDJ,YAAY,CAACW,UAAU,CAACC,IAAI,CAACF,MAAM,CAAC;IACpCV,YAAY,CAACa,IAAI,GAAG,MAAMC,kBAAkB,CAAC,IAAI,CAACzB,UAAU,CAACQ,QAAQ,CAACkB,YAAY,EAAEf,YAAY,CAAC;IAEjGD,OAAO,GAAGiB,sBAAsB,CAC5B,IAAI,CAAC3B,UAAU,CAACC,MAAM,CAACF,UAAU,EACjCW,OAAO,EACPK,SACJ,CAAC;IACD,IAAAa,kBAAW,EAAClB,OAAO,EAAEL,WAAW,CAACS,KAAK,EAAEH,YAAY,CAAC;IACrD,OAAOD,OAAO;EAClB,CAAC,EAAEmB,eAAe,CAAC;AACvB;AAGO,eAAeC,UAAUA,CAE5BlC,KAAoD,EACtD;EACEA,KAAK,GAAGC,oBAAY,CAACC,qBAAqB,CAACF,KAAK,CAAQ;EAExD,IAAMG,UAAU,GAAG,IAAI,CAACE,MAAM,CAACF,UAAU;EACzC,IAAI,CAACA,UAAU,CAACG,IAAI,EAAE;IAClB,MAAM,IAAAC,mBAAU,EAAC,OAAO,EAAE;MACtBF,MAAM,EAAEF,UAAU;MAClBK,QAAQ,EAAER;IACd,CAAC,CAAC;EACN;EACA,IAAMS,WAAW,GAAG,IAAAC,qBAAc,EAACP,UAAU,CAACG,IAAI,CAAC;EACnD,IAAMK,YAAY,GAAG,MAAM,IAAI,CAACC,QAAQ,CAACD,YAAY;EACrD,IAAMQ,SAAmC,GAAG;IACxCC,IAAI,EAAEe,KAAK,CAACC,OAAO,CAACpC,KAAK,CAAC,GAAGA,KAAK,GAAG,CAACA,KAAK,CAAC;IAC5CsB,OAAO,EAAEX,YAAY;IACrBY,IAAI,EAAE,IAAAC,UAAG,EAAC;EACd,CAAC;EAED,IAAIa,UAA0C,GAAG,CAAC,CAAQ;EAC1DA,UAAU,GAAGN,sBAAsB,CAC/B,IAAI,CAAC1B,MAAM,CAACF,UAAU,EACtBkC,UAAU,EACVlB,SACJ,CAAQ;EACR,IAAMJ,YAA0C,GAAG;IAC/CW,UAAU,EAAE,EAAE;IACdE,IAAI,EAAE;EACV,CAAC;EACD,IAAAI,kBAAW,EAACK,UAAU,EAAS5B,WAAW,CAACS,KAAK,EAAEH,YAAY,CAAC;EAE/D,IAAMU,MAAkC,GAAG,CAACN,SAAS,CAAC;EACtDJ,YAAY,CAACW,UAAU,CAACC,IAAI,CAACF,MAAM,CAAC;EACpCV,YAAY,CAACa,IAAI,GAAG,MAAMC,kBAAkB,CAAC,IAAI,CAACjB,QAAQ,CAACkB,YAAY,EAAEf,YAAY,CAAC;EAEtF,IAAMuB,MAAM,GAAG,MAAM,IAAI,CAACC,MAAM,CAACF,UAAU,CAAC,CAACG,KAAK,CAAC,MAAOC,GAAY,IAAK;IACvE,IAAIA,GAAG,CAACC,IAAI,KAAK,UAAU,EAAE;MACzB;MACA,IAAMC,GAAG,GAAG,MAAM,IAAI,CAACC,OAAO,CAACH,GAAG,CAACI,UAAU,CAACC,EAAE,CAAC,CAACC,IAAI,CAAC,IAAI,CAAC;MAC5D,OAAOJ,GAAG,CAAC5C,UAAU,CAACC,KAAK,CAAC;IAChC,CAAC,MAAM;MACH,MAAMyC,GAAG;IACb;EACJ,CAAC,CAAC;EACF,OAAOH,MAAM;AACjB;AAGO,SAASU,uBAAuBA,CAAYC,CAA2B,EAAEC,CAA2B,EAAE;EACzG,OAAOD,CAAC,CAAC3B,OAAO,GAAG4B,CAAC,CAAC5B,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC;AACzC;AAGA,SAASS,sBAAsBA,CAC3B1B,MAA+C,EAC/CS,OAA+B,EAC/BK,SAAmC,EACb;EACtB,IAAMgC,UAAU,GAAGhC,SAAS,CAACC,IAAI;EACjC+B,UAAU,CAACC,OAAO,CAACC,SAAS,IAAI;IAC5B,IAAIC,UAAmB;IACvB,IAAID,SAAS,CAACE,QAAQ,EAAE;MACpB,IAAMC,KAAkC,GAAG;QACvCD,QAAQ,EAAE,IAAA7C,qBAAc,EAAC2C,SAAS,CAACE,QAAe,CAAC;QACnDE,IAAI,EAAE,EAAE;QACRC,IAAI,EAAE;MACV,CAAC;MACD,IAAMC,OAAO,GAAG,IAAAC,uBAAe,EAACvD,MAAM,EAAEmD,KAAK,CAAC;MAC9CF,UAAU,GAAGK,OAAO,CAAC7C,OAAc,CAAC;IACxC,CAAC,MAAM;MACHwC,UAAU,GAAG,IAAI;IACrB;IACA,IAAIA,UAAU,EAAE;MACZ,IAAID,SAAS,CAACQ,OAAO,EAAE;QACnB/C,OAAO,GAAG,IAAAgD,0BAAY,EAAyBhD,OAAO,EAAEuC,SAAS,CAACQ,OAAO,CAAC;MAC9E;IACJ,CAAC,MAAM;MACH,IAAIR,SAAS,CAACU,UAAU,EAAE;QACtBjD,OAAO,GAAG,IAAAgD,0BAAY,EAAyBhD,OAAO,EAAEuC,SAAS,CAACU,UAAU,CAAC;MACjF;IACJ;EACJ,CAAC,CAAC;EACF,OAAOjD,OAAO;AAClB;AAEO,eAAee,kBAAkBA,CACpCC,YAA0B,EAC1BkC,KAA6B,EACd;EACf,IAAMC,OAAO,GAAGD,KAAK,CAACtC,UAAU,CAACwC,GAAG,CAAExC,UAAU,IAAK;IACjD,OAAOA,UAAU,CAACwC,GAAG,CAACC,EAAE,IAAIA,EAAE,CAAC7C,OAAO,CAAC;EAC3C,CAAC,CAAC;EACF,IAAMM,IAAI,GAAG,MAAME,YAAY,CAACsC,IAAI,CAACC,SAAS,CAACJ,OAAO,CAAC,CAAC;EACxD,OAAOrC,IAAI;AACf;AAEO,SAAS0C,iBAAiBA,CAAA,EAAwD;EACrF,IAAMC,eAAqD,GAAG;IAC1DC,IAAI,EAAE,QAAQ;IACdC,UAAU,EAAE;MACRrD,IAAI,EAAE;QACFoD,IAAI,EAAE,OAAO;QACbE,KAAK,EAAE;UACHF,IAAI,EAAE,QAAQ;UACdC,UAAU,EAAE;YACRlB,QAAQ,EAAE;cACNiB,IAAI,EAAE;YACV,CAAC;YACDX,OAAO,EAAE;cACLW,IAAI,EAAE;YACV,CAAC;YACDT,UAAU,EAAE;cACRS,IAAI,EAAE;YACV;UACJ,CAAC;UACDG,oBAAoB,EAAE;QAC1B,CAAC;QACDC,QAAQ,EAAE;MACd,CAAC;MACDtD,OAAO,EAAE;QACLkD,IAAI,EAAE;MACV,CAAC;MACDjD,IAAI,EAAE;QACFiD,IAAI,EAAE,QAAQ;QACdK,OAAO,EAAE,CAAC;QACVC,OAAO,EAAE,gBAAgB;QACzBC,UAAU,EAAE;MAChB;IACJ,CAAC;IACDJ,oBAAoB,EAAE,KAAK;IAC3BK,QAAQ,EAAE,CACN,MAAM,EACN,SAAS,EACT,MAAM;EAEd,CAAC;EACD,OAAO;IACHR,IAAI,EAAE,QAAQ;IACdC,UAAU,EAAE;MACR/C,UAAU,EAAE;QACR8C,IAAI,EAAE,OAAO;QACbE,KAAK,EAAE;UACHF,IAAI,EAAE,OAAO;UACbE,KAAK,EAAEH;QACX;MACJ,CAAC;MACD3C,IAAI,EAAE;QACF4C,IAAI,EAAE,QAAQ;QACd;QACAS,SAAS,EAAE;MACf;IACJ,CAAC;IACDN,oBAAoB,EAAE,KAAK;IAC3BK,QAAQ,EAAE,CAAC,YAAY,EAAE,MAAM;EACnC,CAAC;AACL;AAGO,eAAeE,eAAeA,CACjCpD,YAA0B,EAC1BqD,MAAoC,EACpCC,MAAoC,EACC;EAErC;EACA;EACA,IAAID,MAAM,CAACzD,UAAU,CAAC2D,MAAM,GAAGD,MAAM,CAAC1D,UAAU,CAAC2D,MAAM,EAAE;IACrD,CAACF,MAAM,EAAEC,MAAM,CAAC,GAAG,CAACA,MAAM,EAAED,MAAM,CAAC;EACvC;EAEA,IAAMG,GAAiC,GAAG;IACtC5D,UAAU,EAAE,EAAE;IACdE,IAAI,EAAE;EACV,CAAC;EACDuD,MAAM,CAACzD,UAAU,CAAC0B,OAAO,CAAC,CAACmC,GAAG,EAAEC,KAAK,KAAK;IACtC,IAAIC,SAAqC,GAAG,EAAE;IAC9C,IAAMC,GAAG,GAAG,IAAIC,GAAG,CAAS,CAAC,CAAC,CAAC;;IAE/BJ,GAAG,CAACnC,OAAO,CAACe,EAAE,IAAI;MACduB,GAAG,CAACE,GAAG,CAACzB,EAAE,CAAC7C,OAAO,CAAC;MACnBmE,SAAS,CAAC9D,IAAI,CAACwC,EAAE,CAAC;IACtB,CAAC,CAAC;IACF,IAAIiB,MAAM,CAAC1D,UAAU,CAAC8D,KAAK,CAAC,EAAE;MAC1BJ,MAAM,CAAC1D,UAAU,CAAC8D,KAAK,CAAC,CAACpC,OAAO,CAACe,EAAE,IAAI;QACnC,IAAI,CAACuB,GAAG,CAACG,GAAG,CAAC1B,EAAE,CAAC7C,OAAO,CAAC,EAAE;UACtBmE,SAAS,CAAC9D,IAAI,CAACwC,EAAE,CAAC;QACtB;MACJ,CAAC,CAAC;IACN;IACAsB,SAAS,GAAGA,SAAS,CAAChC,IAAI,CAACT,uBAAuB,CAAC;IACnDsC,GAAG,CAAC5D,UAAU,CAAC8D,KAAK,CAAC,GAAGC,SAAS;EACrC,CAAC,CAAC;EAGFH,GAAG,CAAC1D,IAAI,GAAG,MAAMC,kBAAkB,CAACC,YAAY,EAAEwD,GAAG,CAAC;EACtD,OAAOA,GAAG;AACd;AAEO,SAASQ,eAAeA,CAC3BzF,MAA+C,EAC/CS,OAA2C,EAC3CkD,KAAmC,EACb;EACtB,IAAI+B,IAA4B,GAAG;IAC/BC,QAAQ,EAAE;EACd,CAAQ;EACR,IAAAhE,kBAAW,EAAC+D,IAAI,EAAE,IAAArF,qBAAc,EAACL,MAAM,CAACC,IAAI,CAAC,CAACY,KAAK,EAAE8C,KAAK,CAAC;EAC3DA,KAAK,CAACtC,UAAU,CAAC0B,OAAO,CAAC1B,UAAU,IAAI;IACnCA,UAAU,CAAC0B,OAAO,CAACe,EAAE,IAAI;MACrB4B,IAAI,GAAGhE,sBAAsB,CACzB1B,MAAM,EACN0F,IAAI,EACJ5B,EACJ,CAAC;IACL,CAAC,CAAC;EACN,CAAC,CAAC;EACF,OAAO4B,IAAI;AACf;AAGO,SAASE,sBAAsBA,CAClCnE,YAA0B,EAC1BzB,MAA+C,EACnB;EAC5B,IAAMI,WAAW,GAAG,IAAAC,qBAAc,EAACL,MAAM,CAACC,IAAI,CAAC;EAC/C,IAAM4F,SAAS,GAAGzF,WAAW,CAACS,KAAK;EACnC,IAAMiF,YAAY,GAAG,IAAAC,sBAAe,EAAmEF,SAAS,CAAC;EAEjH,IAAMG,eAA6C,GAAG,MAAAA,CAClDC,CAAoC,EACpCC,QAAgB,KACf;IACD,IAAMC,UAAU,GAAGL,YAAY,CAACG,CAAC,CAACG,gBAAgB,CAAC;IACnD,IAAMC,aAAa,GAAGP,YAAY,CAACG,CAAC,CAACK,eAAe,CAAC;IAErD,IAAIH,UAAU,CAAC5E,IAAI,KAAK8E,aAAa,CAAC9E,IAAI,EAAE;MACxC,OAAOgF,OAAO,CAACC,OAAO,CAAC;QACnBC,OAAO,EAAE;MACb,CAAC,CAAC;IACN;IAEA,IAAMC,UAAU,GAAG,MAAM7B,eAAe,CAACpD,YAAY,EAAE0E,UAAU,EAAEE,aAAa,CAAC;IACjF,IAAMM,SAAS,GAAGlB,eAAe,CAC7BzF,MAAM,EACNiG,CAAC,CAACG,gBAAgB,EAClBM,UACJ,CAAC;IACD,OAAOH,OAAO,CAACC,OAAO,CAAC;MACnBC,OAAO,EAAE,KAAK;MACdG,YAAY,EAAED;IAClB,CAAC,CAAC;EACN,CAAC;EAED,OAAOX,eAAe;AAC1B;AAGO,IAAMpE,eAAe,GAAAiF,OAAA,CAAAjF,eAAA,GAAG,SAAS;AAEjC,IAAMkF,cAAwB,GAAAD,OAAA,CAAAC,cAAA,GAAG;EACpCC,IAAI,EAAE,MAAM;EACZC,IAAI,EAAE,IAAI;EACVC,UAAU,EAAE;IACRC,UAAU,EAAGC,KAAU,IAAK;MACxBA,KAAK,CAACzH,UAAU,GAAGA,UAAU;MAE7B,IAAM0H,SAAS,GAAGD,KAAK,CAACE,MAAM;MAC9BF,KAAK,CAACE,MAAM,GAAG,YAA4B;QACvC,IAAI,CAAC,IAAI,CAACtH,UAAU,CAACC,MAAM,CAACF,UAAU,CAACG,IAAI,EAAE;UACzC,OAAOmH,SAAS,CAACE,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;QACjC;QACA,OAAO,IAAI,CAAC5H,UAAU,CAAC;UACnB8D,OAAO,EAAE;YACL+D,IAAI,EAAE;cACF5B,QAAQ,EAAE;YACd;UACJ;QACJ,CAAC,CAAC;MACN,CAAC;MAED,IAAM6B,mBAAmB,GAAGL,KAAK,CAACM,gBAAgB;MAClDN,KAAK,CAACM,gBAAgB,GAAG,UAA4BC,KAAU,EAAE;QAC7D,IAAI,CAAC,IAAI,CAAC3H,UAAU,CAACC,MAAM,CAACF,UAAU,CAACG,IAAI,EAAE;UACzC,OAAOuH,mBAAmB,CAACF,IAAI,CAAC,IAAI,CAAC,CAACI,KAAK,CAAC;QAChD;QACA,OAAO,IAAI,CAAChI,UAAU,CAAC;UACnB8D,OAAO,EAAE;YACL+D,IAAI,EAAEG;UACV;QACJ,CAAC,CAAC;MACN,CAAC;MACD,IAAMC,oBAAoB,GAAGR,KAAK,CAAC3G,iBAAiB;MACpD2G,KAAK,CAAC3G,iBAAiB,GAAG,UAAUoH,EAAO,EAAEC,OAAe,EAAE;QAC1D,IAAI,CAAC,IAAI,CAAC9H,UAAU,CAACC,MAAM,CAACF,UAAU,CAACG,IAAI,EAAE;UACzC,OAAO0H,oBAAoB,CAACL,IAAI,CAAC,IAAI,CAAC,CAACM,EAAE,CAAC;QAC9C;QACA,IAAIC,OAAO,KAAKjG,eAAe,EAAE;UAC7B,OAAO+F,oBAAoB,CAACL,IAAI,CAAC,IAAI,CAAC,CAACM,EAAE,CAAC;QAC9C,CAAC,MAAM;UACH,MAAM,IAAA1H,mBAAU,EAAC,OAAO,EAAE;YACtBuC,EAAE,EAAE,IAAI,CAACqF,OAAO;YAChBC,IAAI,EAAE;cAAEF;YAAQ;UACpB,CAAC,CAAC;QACN;MACJ,CAAC;IACL,CAAC;IACDG,YAAY,EAAGb,KAAU,IAAK;MAC1BA,KAAK,CAACtF,UAAU,GAAGA,UAAU;IACjC;EACJ,CAAC;EACDjC,YAAY,EAAE,CAAC,CAAC;EAChBqI,KAAK,EAAE;IACHC,qBAAqB,EAAE;MACnBC,KAAK,EAAGC,IAAI,IAAK;QACb,IAAI,CAACA,IAAI,CAACpI,MAAM,CAACC,IAAI,EAAE;UACnB;QACJ;QACA,IAAImI,IAAI,CAACpC,eAAe,EAAE;UACtB,MAAM,IAAA9F,mBAAU,EAAC,OAAO,EAAE;YACtBH,UAAU,EAAEqI,IAAI,CAACrB,IAAI;YACrB/G,MAAM,EAAEoI,IAAI,CAACpI;UACjB,CAAC,CAAC;QACN;QACAoI,IAAI,CAACpC,eAAe,GAAGJ,sBAAsB,CACzCwC,IAAI,CAAC7H,QAAQ,CAACkB,YAAY,EAC1B2G,IAAI,CAACpI,MACT,CAAC;MACL;IACJ,CAAC;IACDqI,kBAAkB,EAAE;MAChBF,KAAK,EAAEA,CAAC;QAAEpI;MAAW,CAAC,KAAK;QACvB,IAAI,CAACA,UAAU,CAACC,MAAM,CAACF,UAAU,CAACG,IAAI,EAAE;UACpC;QACJ;QAEA,IAAMG,WAAW,GAAG,IAAAC,qBAAc,EAACN,UAAU,CAACC,MAAM,CAACF,UAAU,CAACG,IAAI,CAAC;QACrE,IAAM4F,SAAS,GAAGzF,WAAW,CAACS,KAAK;QACnC,IAAMyH,OAAO,GAAG,IAAAvC,sBAAe,EAA8B3F,WAAW,CAACS,KAAK,CAAC;;QAE/E;AAChB;AACA;AACA;AACA;QACgB,IAAIjB,oBAAY,CAAC2I,SAAS,CAAC,CAAC,EAAE;UAC1B,IAAMC,eAAe,GAAGzI,UAAU,CAAC0I,eAAe,CAACC,SAAS,CAACpB,IAAI,CAACvH,UAAU,CAAC0I,eAAe,CAAC;UAC7F1I,UAAU,CAAC0I,eAAe,CAACC,SAAS,GAAG,gBAAgBC,MAAM,EAAEd,OAAO,EAAE;YAEpE,MAAMtB,OAAO,CAACqC,GAAG,CACbD,MAAM,CAAC9E,GAAG,CAAC,MAAOgF,KAAK,IAAK;cACxB,IAAMC,WAAkC,GAAG,IAAAnI,YAAK,EAACkI,KAAK,CAACE,QAAQ,CAAC;cAChE,IAAMpF,KAAK,GAAG2E,OAAO,CAACQ,WAAW,CAAC;cAElC,IAAME,OAAO,GAAGvD,eAAe,CAC3B1F,UAAU,CAACC,MAAM,CAACF,UAAU,EAC5BgJ,WAAW,EACXnF,KACJ,CAAC;cAED,SAASsF,cAAcA,CAAC3G,GAAQ,EAAE;gBAC9B,IAAM2C,GAAQ,GAAG,CAAC,CAAC;gBACnBiE,MAAM,CAACC,OAAO,CAAC7G,GAAG,CAAC,CAACS,OAAO,CAAC,CAAC,CAACqG,CAAC,EAAEC,CAAC,CAAC,KAAK;kBACpC,IACI,CAACD,CAAC,CAACE,UAAU,CAAC,GAAG,CAAC,IAClB,OAAOD,CAAC,KAAK,WAAW,EAC1B;oBACEpE,GAAG,CAACmE,CAAC,CAAC,GAAGC,CAAC;kBACd;gBACJ,CAAC,CAAC;gBACF,OAAOpE,GAAG;cACd;cACA,IAAI,CAAC,IAAAsE,gBAAS,EAACN,cAAc,CAACH,WAAW,CAAC,EAAEG,cAAc,CAACD,OAAO,CAAC,CAAC,EAAE;gBAClE,MAAM,IAAA9I,mBAAU,EAAC,KAAK,EAAE;kBACpB6I,QAAQ,EAAED;gBACd,CAAC,CAAC;cACN;cACA,IAAMU,gBAAgB,GAAG,MAAMhI,kBAAkB,CAACzB,UAAU,CAACQ,QAAQ,CAACkB,YAAY,EAAEkC,KAAK,CAAC;cAC1F,IAAIA,KAAK,CAACpC,IAAI,KAAKiI,gBAAgB,EAAE;gBACjC,MAAM,IAAAtJ,mBAAU,EAAC,KAAK,EAAE;kBACpB6I,QAAQ,EAAED,WAAW;kBACrBf,IAAI,EAAE;oBAAExG,IAAI,EAAEoC,KAAK,CAACpC,IAAI;oBAAEiI;kBAAiB;gBAC/C,CAAC,CAAC;cACN;YACJ,CAAC,CACL,CAAC;YAED,OAAOhB,eAAe,CAACG,MAAM,EAAEd,OAAO,CAAC;UAC3C,CAAC;QACL;QAGA,IAAM4B,gBAAgB,GAAG1J,UAAU,CAAC2J,UAAU,CAACpC,IAAI,CAACvH,UAAU,CAAC;QAC/DA,UAAU,CAAC2J,UAAU,GAAG,gBAAgBC,QAAe,EAAE;UACrD,IAAMrJ,YAAY,GAAG,MAAMP,UAAU,CAACQ,QAAQ,CAACD,YAAY;UAC3D,IAAMsJ,WAAW,GAAG,MAAMrD,OAAO,CAACqC,GAAG,CACjCe,QAAQ,CAAC9F,GAAG,CAAC,MAAOpD,OAAO,IAAK;YAC5B,IAAMoJ,KAAmC,GAAG,CAAC,CAAC;YAC9CX,MAAM,CAACC,OAAO,CAAC1I,OAAO,CAAC,CAACsC,OAAO,CAAC,CAAC,CAAC+G,GAAG,EAAEC,KAAK,CAAC,KAAK;cAC9C,IACI,CAACD,GAAG,CAACR,UAAU,CAAC,GAAG,CAAC,IACpBQ,GAAG,KAAKjE,SAAS,EACnB;gBACEgE,KAAK,CAACC,GAAG,CAAC,GAAGC,KAAK;cACtB;YACJ,CAAC,CAAC;YAEF,IAAMC,cAAsC,GAAG;cAC3C3I,UAAU,EAAE,CACR,CAAC;gBACGJ,OAAO,EAAEX,YAAY;gBACrBS,IAAI,EAAE,CAAC;kBACHyC,OAAO,EAAE;oBACL+D,IAAI,EAAEsC;kBACV;gBACJ,CAAC,CAAC;gBACF3I,IAAI,EAAE,IAAAC,UAAG,EAAC;cACd,CAAC,CAAC,CACL;cACDI,IAAI,EAAE;YACV,CAAC;YACDyI,cAAc,CAACzI,IAAI,GAAG,MAAMC,kBAAkB,CAACzB,UAAU,CAACQ,QAAQ,CAACkB,YAAY,EAAEuI,cAAc,CAAC;YAChG,IAAArI,kBAAW,EAAClB,OAAO,EAAEL,WAAW,CAACS,KAAK,EAAEmJ,cAAc,CAAC;YACvD,OAAOvJ,OAAO;UAClB,CAAC,CACL,CAAC;UACD,OAAOgJ,gBAAgB,CAACG,WAAW,CAAC;QACxC,CAAC;MACL;IACJ;EACJ;AACJ,CAAC","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/dev-mode/check-document.js b/dist/cjs/plugins/dev-mode/check-document.js deleted file mode 100644 index c60d9090c7b..00000000000 --- a/dist/cjs/plugins/dev-mode/check-document.js +++ /dev/null @@ -1,128 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.checkWriteRows = checkWriteRows; -exports.containsDateInstance = containsDateInstance; -exports.ensurePrimaryKeyValid = ensurePrimaryKeyValid; -var _rxError = require("../../rx-error.js"); -var _rxSchemaHelper = require("../../rx-schema-helper.js"); -function ensurePrimaryKeyValid(primaryKey, docData) { - if (!primaryKey) { - throw (0, _rxError.newRxError)('DOC20', { - primaryKey, - document: docData - }); - } - - /** - * This is required so that we can left-pad - * the primaryKey and we are still able to de-left-pad - * it to get again the original key. - */ - if (primaryKey !== primaryKey.trim()) { - throw (0, _rxError.newRxError)('DOC21', { - primaryKey, - document: docData - }); - } - if (primaryKey.includes('\r') || primaryKey.includes('\n')) { - throw (0, _rxError.newRxError)('DOC22', { - primaryKey, - document: docData - }); - } - if (primaryKey.includes('"')) { - throw (0, _rxError.newRxError)('DOC23', { - primaryKey, - document: docData - }); - } -} - -/** - * Deeply checks if the object contains an - * instance of the JavaScript Date class. - * @recursive - */ -function containsDateInstance(obj) { - if (typeof obj !== 'object' || obj === null) { - return false; - } - for (var key in obj) { - if (obj.hasOwnProperty(key)) { - if (obj[key] instanceof Date) { - return true; - } - if (typeof obj[key] === 'object' && containsDateInstance(obj[key])) { - return true; - } - } - } - return false; -} -function checkWriteRows(storageInstance, rows) { - var primaryPath = (0, _rxSchemaHelper.getPrimaryFieldOfPrimaryKey)(storageInstance.schema.primaryKey); - var _loop = function (writeRow) { - // ensure that the primary key has not been changed - writeRow.document = (0, _rxSchemaHelper.fillPrimaryKey)(primaryPath, storageInstance.schema, writeRow.document); - - /** - * Ensure that _meta fields have been merged - * and not replaced. - * This is important so that when one plugin A - * sets a _meta field and another plugin B does a write - * to the document, it must be ensured that the - * field of plugin A was not removed. - */ - if (writeRow.previous) { - Object.keys(writeRow.previous._meta).forEach(metaFieldName => { - if (!Object.prototype.hasOwnProperty.call(writeRow.document._meta, metaFieldName)) { - throw (0, _rxError.newRxError)('SNH', { - dataBefore: writeRow.previous, - dataAfter: writeRow.document, - args: { - metaFieldName - } - }); - } - }); - } - - /** - * Ensure it can be structured cloned - */ - try { - /** - * Notice that structuredClone() is not available - * in ReactNative, so we test for JSON.stringify() instead - * @link https://github.com/pubkey/rxdb/issues/5046#issuecomment-1827374498 - */ - if (typeof structuredClone === 'function') { - structuredClone(writeRow); - } else { - JSON.parse(JSON.stringify(writeRow)); - } - } catch (err) { - throw (0, _rxError.newRxError)('DOC24', { - collection: storageInstance.collectionName, - document: writeRow.document - }); - } - - /** - * Ensure it does not contain a Date() object - */ - if (containsDateInstance(writeRow.document)) { - throw (0, _rxError.newRxError)('DOC24', { - collection: storageInstance.collectionName, - document: writeRow.document - }); - } - }; - for (var writeRow of rows) { - _loop(writeRow); - } -} -//# sourceMappingURL=check-document.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/dev-mode/check-document.js.map b/dist/cjs/plugins/dev-mode/check-document.js.map deleted file mode 100644 index e74f044a610..00000000000 --- a/dist/cjs/plugins/dev-mode/check-document.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"check-document.js","names":["_rxError","require","_rxSchemaHelper","ensurePrimaryKeyValid","primaryKey","docData","newRxError","document","trim","includes","containsDateInstance","obj","key","hasOwnProperty","Date","checkWriteRows","storageInstance","rows","primaryPath","getPrimaryFieldOfPrimaryKey","schema","_loop","writeRow","fillPrimaryKey","previous","Object","keys","_meta","forEach","metaFieldName","prototype","call","dataBefore","dataAfter","args","structuredClone","JSON","parse","stringify","err","collection","collectionName"],"sources":["../../../../src/plugins/dev-mode/check-document.ts"],"sourcesContent":["import { newRxError } from '../../rx-error.ts';\nimport { fillPrimaryKey, getPrimaryFieldOfPrimaryKey } from '../../rx-schema-helper.ts';\nimport type { BulkWriteRow, RxDocumentData, RxStorageInstance } from '../../types/index.d.ts';\n\nexport function ensurePrimaryKeyValid(\n primaryKey: string,\n docData: RxDocumentData\n) {\n if (!primaryKey) {\n throw newRxError('DOC20', {\n primaryKey,\n document: docData\n });\n }\n\n\n /**\n * This is required so that we can left-pad\n * the primaryKey and we are still able to de-left-pad\n * it to get again the original key.\n */\n if (\n primaryKey !== primaryKey.trim()\n ) {\n throw newRxError('DOC21', {\n primaryKey,\n document: docData\n });\n }\n if (\n primaryKey.includes('\\r') ||\n primaryKey.includes('\\n')\n ) {\n throw newRxError('DOC22', {\n primaryKey,\n document: docData\n });\n }\n if (\n primaryKey.includes('\"')\n ) {\n throw newRxError('DOC23', {\n primaryKey,\n document: docData\n });\n }\n}\n\n/**\n * Deeply checks if the object contains an\n * instance of the JavaScript Date class.\n * @recursive\n */\nexport function containsDateInstance(obj: any): boolean {\n if (typeof obj !== 'object' || obj === null) {\n return false;\n }\n for (let key in obj) {\n if (obj.hasOwnProperty(key)) {\n if (obj[key] instanceof Date) {\n return true;\n }\n if (typeof obj[key] === 'object' && containsDateInstance(obj[key])) {\n return true;\n }\n }\n }\n return false;\n}\n\n\nexport function checkWriteRows(\n storageInstance: RxStorageInstance,\n rows: BulkWriteRow[]\n) {\n const primaryPath = getPrimaryFieldOfPrimaryKey(storageInstance.schema.primaryKey);\n for (const writeRow of rows) {\n // ensure that the primary key has not been changed\n writeRow.document = fillPrimaryKey(\n primaryPath,\n storageInstance.schema,\n writeRow.document\n );\n\n\n\n /**\n * Ensure that _meta fields have been merged\n * and not replaced.\n * This is important so that when one plugin A\n * sets a _meta field and another plugin B does a write\n * to the document, it must be ensured that the\n * field of plugin A was not removed.\n */\n if (writeRow.previous) {\n Object.keys(writeRow.previous._meta)\n .forEach(metaFieldName => {\n if (!Object.prototype.hasOwnProperty.call(writeRow.document._meta, metaFieldName)) {\n throw newRxError('SNH', {\n dataBefore: writeRow.previous,\n dataAfter: writeRow.document,\n args: {\n metaFieldName\n }\n });\n }\n });\n }\n\n /**\n * Ensure it can be structured cloned\n */\n try {\n /**\n * Notice that structuredClone() is not available\n * in ReactNative, so we test for JSON.stringify() instead\n * @link https://github.com/pubkey/rxdb/issues/5046#issuecomment-1827374498\n */\n if (typeof structuredClone === 'function') {\n structuredClone(writeRow);\n } else {\n JSON.parse(JSON.stringify(writeRow));\n }\n } catch (err) {\n throw newRxError('DOC24', {\n collection: storageInstance.collectionName,\n document: writeRow.document\n });\n }\n\n\n /**\n * Ensure it does not contain a Date() object\n */\n if (containsDateInstance(writeRow.document)) {\n throw newRxError('DOC24', {\n collection: storageInstance.collectionName,\n document: writeRow.document\n });\n }\n }\n\n}\n"],"mappings":";;;;;;;;AAAA,IAAAA,QAAA,GAAAC,OAAA;AACA,IAAAC,eAAA,GAAAD,OAAA;AAGO,SAASE,qBAAqBA,CACjCC,UAAkB,EAClBC,OAA4B,EAC9B;EACE,IAAI,CAACD,UAAU,EAAE;IACb,MAAM,IAAAE,mBAAU,EAAC,OAAO,EAAE;MACtBF,UAAU;MACVG,QAAQ,EAAEF;IACd,CAAC,CAAC;EACN;;EAGA;AACJ;AACA;AACA;AACA;EACI,IACID,UAAU,KAAKA,UAAU,CAACI,IAAI,CAAC,CAAC,EAClC;IACE,MAAM,IAAAF,mBAAU,EAAC,OAAO,EAAE;MACtBF,UAAU;MACVG,QAAQ,EAAEF;IACd,CAAC,CAAC;EACN;EACA,IACID,UAAU,CAACK,QAAQ,CAAC,IAAI,CAAC,IACzBL,UAAU,CAACK,QAAQ,CAAC,IAAI,CAAC,EAC3B;IACE,MAAM,IAAAH,mBAAU,EAAC,OAAO,EAAE;MACtBF,UAAU;MACVG,QAAQ,EAAEF;IACd,CAAC,CAAC;EACN;EACA,IACID,UAAU,CAACK,QAAQ,CAAC,GAAG,CAAC,EAC1B;IACE,MAAM,IAAAH,mBAAU,EAAC,OAAO,EAAE;MACtBF,UAAU;MACVG,QAAQ,EAAEF;IACd,CAAC,CAAC;EACN;AACJ;;AAEA;AACA;AACA;AACA;AACA;AACO,SAASK,oBAAoBA,CAACC,GAAQ,EAAW;EACpD,IAAI,OAAOA,GAAG,KAAK,QAAQ,IAAIA,GAAG,KAAK,IAAI,EAAE;IACzC,OAAO,KAAK;EAChB;EACA,KAAK,IAAIC,GAAG,IAAID,GAAG,EAAE;IACjB,IAAIA,GAAG,CAACE,cAAc,CAACD,GAAG,CAAC,EAAE;MACzB,IAAID,GAAG,CAACC,GAAG,CAAC,YAAYE,IAAI,EAAE;QAC1B,OAAO,IAAI;MACf;MACA,IAAI,OAAOH,GAAG,CAACC,GAAG,CAAC,KAAK,QAAQ,IAAIF,oBAAoB,CAACC,GAAG,CAACC,GAAG,CAAC,CAAC,EAAE;QAChE,OAAO,IAAI;MACf;IACJ;EACJ;EACA,OAAO,KAAK;AAChB;AAGO,SAASG,cAAcA,CAC1BC,eAA4D,EAC5DC,IAA+B,EACjC;EACE,IAAMC,WAAW,GAAG,IAAAC,2CAA2B,EAACH,eAAe,CAACI,MAAM,CAAChB,UAAU,CAAC;EAAC,IAAAiB,KAAA,YAAAA,CAAAC,QAAA,EACtD;IACzB;IACAA,QAAQ,CAACf,QAAQ,GAAG,IAAAgB,8BAAc,EAC9BL,WAAW,EACXF,eAAe,CAACI,MAAM,EACtBE,QAAQ,CAACf,QACb,CAAC;;IAID;AACR;AACA;AACA;AACA;AACA;AACA;AACA;IACQ,IAAIe,QAAQ,CAACE,QAAQ,EAAE;MACnBC,MAAM,CAACC,IAAI,CAACJ,QAAQ,CAACE,QAAQ,CAACG,KAAK,CAAC,CAC/BC,OAAO,CAACC,aAAa,IAAI;QACtB,IAAI,CAACJ,MAAM,CAACK,SAAS,CAACjB,cAAc,CAACkB,IAAI,CAACT,QAAQ,CAACf,QAAQ,CAACoB,KAAK,EAAEE,aAAa,CAAC,EAAE;UAC/E,MAAM,IAAAvB,mBAAU,EAAC,KAAK,EAAE;YACpB0B,UAAU,EAAEV,QAAQ,CAACE,QAAQ;YAC7BS,SAAS,EAAEX,QAAQ,CAACf,QAAQ;YAC5B2B,IAAI,EAAE;cACFL;YACJ;UACJ,CAAC,CAAC;QACN;MACJ,CAAC,CAAC;IACV;;IAEA;AACR;AACA;IACQ,IAAI;MACA;AACZ;AACA;AACA;AACA;MACY,IAAI,OAAOM,eAAe,KAAK,UAAU,EAAE;QACvCA,eAAe,CAACb,QAAQ,CAAC;MAC7B,CAAC,MAAM;QACHc,IAAI,CAACC,KAAK,CAACD,IAAI,CAACE,SAAS,CAAChB,QAAQ,CAAC,CAAC;MACxC;IACJ,CAAC,CAAC,OAAOiB,GAAG,EAAE;MACV,MAAM,IAAAjC,mBAAU,EAAC,OAAO,EAAE;QACtBkC,UAAU,EAAExB,eAAe,CAACyB,cAAc;QAC1ClC,QAAQ,EAAEe,QAAQ,CAACf;MACvB,CAAC,CAAC;IACN;;IAGA;AACR;AACA;IACQ,IAAIG,oBAAoB,CAACY,QAAQ,CAACf,QAAQ,CAAC,EAAE;MACzC,MAAM,IAAAD,mBAAU,EAAC,OAAO,EAAE;QACtBkC,UAAU,EAAExB,eAAe,CAACyB,cAAc;QAC1ClC,QAAQ,EAAEe,QAAQ,CAACf;MACvB,CAAC,CAAC;IACN;EACJ,CAAC;EAhED,KAAK,IAAMe,QAAQ,IAAIL,IAAI;IAAAI,KAAA,CAAAC,QAAA;EAAA;AAkE/B","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/dev-mode/check-migration-strategies.js b/dist/cjs/plugins/dev-mode/check-migration-strategies.js deleted file mode 100644 index 486a64cca36..00000000000 --- a/dist/cjs/plugins/dev-mode/check-migration-strategies.js +++ /dev/null @@ -1,43 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.checkMigrationStrategies = checkMigrationStrategies; -var _rxError = require("../../rx-error.js"); -var _rxSchema = require("../../rx-schema.js"); -/** - * checks if the migrationStrategies are ok, throws if not - * @throws {Error|TypeError} if not ok - */ -function checkMigrationStrategies(schema, migrationStrategies) { - // migrationStrategies must be object not array - if (typeof migrationStrategies !== 'object' || Array.isArray(migrationStrategies)) { - throw (0, _rxError.newRxTypeError)('COL11', { - schema - }); - } - var previousVersions = (0, _rxSchema.getPreviousVersions)(schema); - - // for every previousVersion there must be strategy - if (previousVersions.length !== Object.keys(migrationStrategies).length) { - throw (0, _rxError.newRxError)('COL12', { - have: Object.keys(migrationStrategies), - should: previousVersions - }); - } - - // every strategy must have number as property and be a function - previousVersions.map(vNr => ({ - v: vNr, - s: migrationStrategies[vNr + 1] - })).filter(strategy => typeof strategy.s !== 'function').forEach(strategy => { - throw (0, _rxError.newRxTypeError)('COL13', { - version: strategy.v, - type: typeof strategy, - schema - }); - }); - return true; -} -//# sourceMappingURL=check-migration-strategies.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/dev-mode/check-migration-strategies.js.map b/dist/cjs/plugins/dev-mode/check-migration-strategies.js.map deleted file mode 100644 index fffedf0d2ee..00000000000 --- a/dist/cjs/plugins/dev-mode/check-migration-strategies.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"check-migration-strategies.js","names":["_rxError","require","_rxSchema","checkMigrationStrategies","schema","migrationStrategies","Array","isArray","newRxTypeError","previousVersions","getPreviousVersions","length","Object","keys","newRxError","have","should","map","vNr","v","s","filter","strategy","forEach","version","type"],"sources":["../../../../src/plugins/dev-mode/check-migration-strategies.ts"],"sourcesContent":["import type { RxJsonSchema, NumberFunctionMap } from '../../types/index.d.ts';\nimport { newRxTypeError, newRxError } from '../../rx-error.ts';\nimport { getPreviousVersions } from '../../rx-schema.ts';\n\n\n/**\n * checks if the migrationStrategies are ok, throws if not\n * @throws {Error|TypeError} if not ok\n */\nexport function checkMigrationStrategies(\n schema: RxJsonSchema,\n migrationStrategies: NumberFunctionMap\n): boolean {\n // migrationStrategies must be object not array\n if (\n typeof migrationStrategies !== 'object' ||\n Array.isArray(migrationStrategies)\n ) {\n throw newRxTypeError('COL11', {\n schema\n });\n }\n\n const previousVersions = getPreviousVersions(schema);\n\n // for every previousVersion there must be strategy\n if (\n previousVersions.length !== Object\n .keys(migrationStrategies).length\n ) {\n throw newRxError('COL12', {\n have: Object.keys(migrationStrategies),\n should: previousVersions\n });\n }\n\n // every strategy must have number as property and be a function\n previousVersions\n .map(vNr => ({\n v: vNr,\n s: migrationStrategies[(vNr + 1)]\n }))\n .filter(strategy => typeof strategy.s !== 'function')\n .forEach(strategy => {\n throw newRxTypeError('COL13', {\n version: strategy.v,\n type: typeof strategy,\n schema\n });\n });\n\n return true;\n}\n"],"mappings":";;;;;;AACA,IAAAA,QAAA,GAAAC,OAAA;AACA,IAAAC,SAAA,GAAAD,OAAA;AAGA;AACA;AACA;AACA;AACO,SAASE,wBAAwBA,CACpCC,MAAyB,EACzBC,mBAAsC,EAC/B;EACP;EACA,IACI,OAAOA,mBAAmB,KAAK,QAAQ,IACvCC,KAAK,CAACC,OAAO,CAACF,mBAAmB,CAAC,EACpC;IACE,MAAM,IAAAG,uBAAc,EAAC,OAAO,EAAE;MAC1BJ;IACJ,CAAC,CAAC;EACN;EAEA,IAAMK,gBAAgB,GAAG,IAAAC,6BAAmB,EAACN,MAAM,CAAC;;EAEpD;EACA,IACIK,gBAAgB,CAACE,MAAM,KAAKC,MAAM,CAC7BC,IAAI,CAACR,mBAAmB,CAAC,CAACM,MAAM,EACvC;IACE,MAAM,IAAAG,mBAAU,EAAC,OAAO,EAAE;MACtBC,IAAI,EAAEH,MAAM,CAACC,IAAI,CAACR,mBAAmB,CAAC;MACtCW,MAAM,EAAEP;IACZ,CAAC,CAAC;EACN;;EAEA;EACAA,gBAAgB,CACXQ,GAAG,CAACC,GAAG,KAAK;IACTC,CAAC,EAAED,GAAG;IACNE,CAAC,EAAEf,mBAAmB,CAAEa,GAAG,GAAG,CAAC;EACnC,CAAC,CAAC,CAAC,CACFG,MAAM,CAACC,QAAQ,IAAI,OAAOA,QAAQ,CAACF,CAAC,KAAK,UAAU,CAAC,CACpDG,OAAO,CAACD,QAAQ,IAAI;IACjB,MAAM,IAAAd,uBAAc,EAAC,OAAO,EAAE;MAC1BgB,OAAO,EAAEF,QAAQ,CAACH,CAAC;MACnBM,IAAI,EAAE,OAAOH,QAAQ;MACrBlB;IACJ,CAAC,CAAC;EACN,CAAC,CAAC;EAEN,OAAO,IAAI;AACf","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/dev-mode/check-orm.js b/dist/cjs/plugins/dev-mode/check-orm.js deleted file mode 100644 index 3697ec0abb1..00000000000 --- a/dist/cjs/plugins/dev-mode/check-orm.js +++ /dev/null @@ -1,53 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.checkOrmDocumentMethods = checkOrmDocumentMethods; -exports.checkOrmMethods = checkOrmMethods; -var _rxError = require("../../rx-error.js"); -var _entityProperties = require("./entity-properties.js"); -/** - * checks if the given static methods are allowed - * @throws if not allowed - */ -function checkOrmMethods(statics) { - if (!statics) { - return; - } - Object.entries(statics).forEach(([k, v]) => { - if (typeof k !== 'string') { - throw (0, _rxError.newRxTypeError)('COL14', { - name: k - }); - } - if (k.startsWith('_')) { - throw (0, _rxError.newRxTypeError)('COL15', { - name: k - }); - } - if (typeof v !== 'function') { - throw (0, _rxError.newRxTypeError)('COL16', { - name: k, - type: typeof k - }); - } - if ((0, _entityProperties.rxCollectionProperties)().includes(k) || (0, _entityProperties.rxDocumentProperties)().includes(k)) { - throw (0, _rxError.newRxError)('COL17', { - name: k - }); - } - }); -} -function checkOrmDocumentMethods(schema, methods) { - var topLevelFields = Object.keys(schema.properties); - if (!methods) { - return; - } - Object.keys(methods).filter(funName => topLevelFields.includes(funName)).forEach(funName => { - throw (0, _rxError.newRxError)('COL18', { - funName - }); - }); -} -//# sourceMappingURL=check-orm.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/dev-mode/check-orm.js.map b/dist/cjs/plugins/dev-mode/check-orm.js.map deleted file mode 100644 index d95cca759ed..00000000000 --- a/dist/cjs/plugins/dev-mode/check-orm.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"check-orm.js","names":["_rxError","require","_entityProperties","checkOrmMethods","statics","Object","entries","forEach","k","v","newRxTypeError","name","startsWith","type","rxCollectionProperties","includes","rxDocumentProperties","newRxError","checkOrmDocumentMethods","schema","methods","topLevelFields","keys","properties","filter","funName"],"sources":["../../../../src/plugins/dev-mode/check-orm.ts"],"sourcesContent":["import {\n newRxError,\n newRxTypeError\n} from '../../rx-error.ts';\nimport type { KeyFunctionMap, RxJsonSchema } from '../../types/index.d.ts';\nimport { rxCollectionProperties, rxDocumentProperties } from './entity-properties.ts';\n\n/**\n * checks if the given static methods are allowed\n * @throws if not allowed\n */\nexport function checkOrmMethods(statics?: KeyFunctionMap) {\n if (!statics) {\n return;\n }\n Object\n .entries(statics)\n .forEach(([k, v]) => {\n if (typeof k !== 'string') {\n throw newRxTypeError('COL14', {\n name: k\n });\n }\n\n if (k.startsWith('_')) {\n throw newRxTypeError('COL15', {\n name: k\n });\n }\n\n if (typeof v !== 'function') {\n throw newRxTypeError('COL16', {\n name: k,\n type: typeof k\n });\n }\n\n if (\n rxCollectionProperties().includes(k) ||\n rxDocumentProperties().includes(k)\n ) {\n throw newRxError('COL17', {\n name: k\n });\n }\n });\n}\n\n\nexport function checkOrmDocumentMethods(\n schema: RxJsonSchema,\n methods?: any,\n) {\n const topLevelFields = Object.keys(schema.properties) as (keyof RxDocType)[];\n if (!methods) {\n return;\n }\n Object.keys(methods)\n .filter(funName => topLevelFields.includes(funName as any))\n .forEach(funName => {\n throw newRxError('COL18', {\n funName\n });\n });\n}\n"],"mappings":";;;;;;;AAAA,IAAAA,QAAA,GAAAC,OAAA;AAKA,IAAAC,iBAAA,GAAAD,OAAA;AAEA;AACA;AACA;AACA;AACO,SAASE,eAAeA,CAACC,OAAwB,EAAE;EACtD,IAAI,CAACA,OAAO,EAAE;IACV;EACJ;EACAC,MAAM,CACDC,OAAO,CAACF,OAAO,CAAC,CAChBG,OAAO,CAAC,CAAC,CAACC,CAAC,EAAEC,CAAC,CAAC,KAAK;IACjB,IAAI,OAAOD,CAAC,KAAK,QAAQ,EAAE;MACvB,MAAM,IAAAE,uBAAc,EAAC,OAAO,EAAE;QAC1BC,IAAI,EAAEH;MACV,CAAC,CAAC;IACN;IAEA,IAAIA,CAAC,CAACI,UAAU,CAAC,GAAG,CAAC,EAAE;MACnB,MAAM,IAAAF,uBAAc,EAAC,OAAO,EAAE;QAC1BC,IAAI,EAAEH;MACV,CAAC,CAAC;IACN;IAEA,IAAI,OAAOC,CAAC,KAAK,UAAU,EAAE;MACzB,MAAM,IAAAC,uBAAc,EAAC,OAAO,EAAE;QAC1BC,IAAI,EAAEH,CAAC;QACPK,IAAI,EAAE,OAAOL;MACjB,CAAC,CAAC;IACN;IAEA,IACI,IAAAM,wCAAsB,EAAC,CAAC,CAACC,QAAQ,CAACP,CAAC,CAAC,IACpC,IAAAQ,sCAAoB,EAAC,CAAC,CAACD,QAAQ,CAACP,CAAC,CAAC,EACpC;MACE,MAAM,IAAAS,mBAAU,EAAC,OAAO,EAAE;QACtBN,IAAI,EAAEH;MACV,CAAC,CAAC;IACN;EACJ,CAAC,CAAC;AACV;AAGO,SAASU,uBAAuBA,CACnCC,MAA+B,EAC/BC,OAAa,EACf;EACE,IAAMC,cAAc,GAAGhB,MAAM,CAACiB,IAAI,CAACH,MAAM,CAACI,UAAU,CAAwB;EAC5E,IAAI,CAACH,OAAO,EAAE;IACV;EACJ;EACAf,MAAM,CAACiB,IAAI,CAACF,OAAO,CAAC,CACfI,MAAM,CAACC,OAAO,IAAIJ,cAAc,CAACN,QAAQ,CAACU,OAAc,CAAC,CAAC,CAC1DlB,OAAO,CAACkB,OAAO,IAAI;IAChB,MAAM,IAAAR,mBAAU,EAAC,OAAO,EAAE;MACtBQ;IACJ,CAAC,CAAC;EACN,CAAC,CAAC;AACV","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/dev-mode/check-query.js b/dist/cjs/plugins/dev-mode/check-query.js deleted file mode 100644 index 6b0aef6fa15..00000000000 --- a/dist/cjs/plugins/dev-mode/check-query.js +++ /dev/null @@ -1,151 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.areSelectorsSatisfiedByIndex = areSelectorsSatisfiedByIndex; -exports.checkMangoQuery = checkMangoQuery; -exports.checkQuery = checkQuery; -exports.ensureObjectDoesNotContainRegExp = ensureObjectDoesNotContainRegExp; -var _rxError = require("../../rx-error.js"); -var _index = require("../utils/index.js"); -var _rxQuery = require("../../rx-query.js"); -/** - * accidentally passing a non-valid object into the query params - * is very hard to debug especially when queries are observed - * This is why we do some checks here in dev-mode - */ -function checkQuery(args) { - var isPlainObject = Object.prototype.toString.call(args.queryObj) === '[object Object]'; - if (!isPlainObject) { - throw (0, _rxError.newRxTypeError)('QU11', { - op: args.op, - collection: args.collection.name, - queryObj: args.queryObj - }); - } - var validKeys = ['selector', 'limit', 'skip', 'sort', 'index']; - Object.keys(args.queryObj).forEach(key => { - if (!validKeys.includes(key)) { - throw (0, _rxError.newRxTypeError)('QU11', { - op: args.op, - collection: args.collection.name, - queryObj: args.queryObj, - key, - args: { - validKeys - } - }); - } - }); - - // do not allow skip or limit for count queries - if (args.op === 'count' && (args.queryObj.limit || args.queryObj.skip)) { - throw (0, _rxError.newRxError)('QU15', { - collection: args.collection.name, - query: args.queryObj - }); - } - ensureObjectDoesNotContainRegExp(args.queryObj); -} -function checkMangoQuery(args) { - var schema = args.rxQuery.collection.schema.jsonSchema; - - /** - * Ensure that all top level fields are included in the schema. - * TODO this check can be augmented to also check sub-fields. - */ - var massagedSelector = args.mangoQuery.selector; - var schemaTopLevelFields = Object.keys(schema.properties); - Object.keys(massagedSelector) - // do not check operators - .filter(fieldOrOperator => !fieldOrOperator.startsWith('$')) - // skip this check on non-top-level fields - .filter(field => !field.includes('.')).forEach(field => { - if (!schemaTopLevelFields.includes(field)) { - throw (0, _rxError.newRxError)('QU13', { - schema, - field, - query: args.mangoQuery - }); - } - }); - - /** - * ensure if custom index is set, - * it is also defined in the schema - */ - var schemaIndexes = schema.indexes ? schema.indexes : []; - var index = args.mangoQuery.index; - if (index) { - var isInSchema = schemaIndexes.find(schemaIndex => (0, _index.deepEqual)(schemaIndex, index)); - if (!isInSchema) { - throw (0, _rxError.newRxError)('QU12', { - collection: args.rxQuery.collection.name, - query: args.mangoQuery, - schema - }); - } - } - - /** - * Ensure that a count() query can only be used - * with selectors that are fully satisfied by the used index. - */ - if (args.rxQuery.op === 'count') { - if (!areSelectorsSatisfiedByIndex(args.rxQuery.collection.schema.jsonSchema, args.mangoQuery) && !args.rxQuery.collection.database.allowSlowCount) { - throw (0, _rxError.newRxError)('QU14', { - collection: args.rxQuery.collection, - query: args.mangoQuery - }); - } - } - - /** - * Ensure that sort only runs on known fields - * TODO also check nested fields - */ - if (args.mangoQuery.sort) { - args.mangoQuery.sort.map(sortPart => Object.keys(sortPart)[0]).filter(field => !field.includes('.')).forEach(field => { - if (!schemaTopLevelFields.includes(field)) { - throw (0, _rxError.newRxError)('QU13', { - schema, - field, - query: args.mangoQuery - }); - } - }); - } - - // Do not allow RexExp instances - ensureObjectDoesNotContainRegExp(args.mangoQuery); -} -function areSelectorsSatisfiedByIndex(schema, query) { - var preparedQuery = (0, _rxQuery.prepareQuery)(schema, query); - return preparedQuery.queryPlan.selectorSatisfiedByIndex; -} - -/** - * Ensures that the selector does not contain any RegExp instance. - * @recursive - */ -function ensureObjectDoesNotContainRegExp(selector) { - if (typeof selector !== 'object' || selector === null) { - return; - } - var keys = Object.keys(selector); - keys.forEach(key => { - var value = selector[key]; - if (value instanceof RegExp) { - throw (0, _rxError.newRxError)('QU16', { - field: key, - query: selector - }); - } else if (Array.isArray(value)) { - value.forEach(item => ensureObjectDoesNotContainRegExp(item)); - } else { - ensureObjectDoesNotContainRegExp(value); - } - }); -} -//# sourceMappingURL=check-query.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/dev-mode/check-query.js.map b/dist/cjs/plugins/dev-mode/check-query.js.map deleted file mode 100644 index fd9165272dd..00000000000 --- a/dist/cjs/plugins/dev-mode/check-query.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"check-query.js","names":["_rxError","require","_index","_rxQuery","checkQuery","args","isPlainObject","Object","prototype","toString","call","queryObj","newRxTypeError","op","collection","name","validKeys","keys","forEach","key","includes","limit","skip","newRxError","query","ensureObjectDoesNotContainRegExp","checkMangoQuery","schema","rxQuery","jsonSchema","massagedSelector","mangoQuery","selector","schemaTopLevelFields","properties","filter","fieldOrOperator","startsWith","field","schemaIndexes","indexes","index","isInSchema","find","schemaIndex","deepEqual","areSelectorsSatisfiedByIndex","database","allowSlowCount","sort","map","sortPart","preparedQuery","prepareQuery","queryPlan","selectorSatisfiedByIndex","value","RegExp","Array","isArray","item"],"sources":["../../../../src/plugins/dev-mode/check-query.ts"],"sourcesContent":["import type {\n RxPluginPreCreateRxQueryArgs,\n MangoQuery,\n RxPluginPrePrepareQueryArgs,\n FilledMangoQuery,\n RxJsonSchema,\n RxDocumentData,\n MangoQuerySelector,\n PreparedQuery\n} from '../../types/index.d.ts';\nimport { newRxError, newRxTypeError } from '../../rx-error.ts';\nimport { deepEqual } from '../utils/index.ts';\nimport { prepareQuery } from '../../rx-query.ts';\n\n/**\n * accidentally passing a non-valid object into the query params\n * is very hard to debug especially when queries are observed\n * This is why we do some checks here in dev-mode\n */\nexport function checkQuery(args: RxPluginPreCreateRxQueryArgs) {\n const isPlainObject = Object.prototype.toString.call(args.queryObj) === '[object Object]';\n if (!isPlainObject) {\n throw newRxTypeError('QU11', {\n op: args.op,\n collection: args.collection.name,\n queryObj: args.queryObj\n });\n }\n\n const validKeys: (keyof MangoQuery)[] = [\n 'selector',\n 'limit',\n 'skip',\n 'sort',\n 'index'\n ];\n Object.keys(args.queryObj).forEach(key => {\n if (!(validKeys as string[]).includes(key)) {\n throw newRxTypeError('QU11', {\n op: args.op,\n collection: args.collection.name,\n queryObj: args.queryObj,\n key,\n args: {\n validKeys\n }\n });\n }\n });\n\n // do not allow skip or limit for count queries\n if (\n args.op === 'count' &&\n (\n args.queryObj.limit ||\n args.queryObj.skip\n )\n ) {\n throw newRxError(\n 'QU15',\n {\n collection: args.collection.name,\n query: args.queryObj\n }\n );\n }\n\n ensureObjectDoesNotContainRegExp(args.queryObj);\n}\n\n\nexport function checkMangoQuery(args: RxPluginPrePrepareQueryArgs) {\n const schema = args.rxQuery.collection.schema.jsonSchema;\n\n /**\n * Ensure that all top level fields are included in the schema.\n * TODO this check can be augmented to also check sub-fields.\n */\n const massagedSelector: MangoQuerySelector = args.mangoQuery.selector;\n const schemaTopLevelFields = Object.keys(schema.properties);\n Object.keys(massagedSelector)\n // do not check operators\n .filter(fieldOrOperator => !fieldOrOperator.startsWith('$'))\n // skip this check on non-top-level fields\n .filter(field => !field.includes('.'))\n .forEach(field => {\n if (!schemaTopLevelFields.includes(field)) {\n throw newRxError('QU13', {\n schema,\n field,\n query: args.mangoQuery,\n });\n }\n });\n\n /**\n * ensure if custom index is set,\n * it is also defined in the schema\n */\n const schemaIndexes = schema.indexes ? schema.indexes : [];\n const index = args.mangoQuery.index;\n if (index) {\n const isInSchema = schemaIndexes.find(schemaIndex => deepEqual(schemaIndex, index));\n if (!isInSchema) {\n throw newRxError(\n 'QU12',\n {\n collection: args.rxQuery.collection.name,\n query: args.mangoQuery,\n schema\n }\n );\n }\n }\n\n\n /**\n * Ensure that a count() query can only be used\n * with selectors that are fully satisfied by the used index.\n */\n if (args.rxQuery.op === 'count') {\n if (\n !areSelectorsSatisfiedByIndex(\n args.rxQuery.collection.schema.jsonSchema,\n args.mangoQuery\n ) &&\n !args.rxQuery.collection.database.allowSlowCount\n ) {\n throw newRxError('QU14', {\n collection: args.rxQuery.collection,\n query: args.mangoQuery\n });\n }\n }\n\n /**\n * Ensure that sort only runs on known fields\n * TODO also check nested fields\n */\n if (args.mangoQuery.sort) {\n args.mangoQuery.sort\n .map(sortPart => Object.keys(sortPart)[0])\n .filter(field => !field.includes('.'))\n .forEach(field => {\n if (!schemaTopLevelFields.includes(field)) {\n throw newRxError('QU13', {\n schema,\n field,\n query: args.mangoQuery,\n });\n }\n });\n }\n\n // Do not allow RexExp instances\n ensureObjectDoesNotContainRegExp(args.mangoQuery);\n}\n\n\nexport function areSelectorsSatisfiedByIndex(\n schema: RxJsonSchema>,\n query: FilledMangoQuery\n): boolean {\n const preparedQuery: PreparedQuery = prepareQuery(\n schema,\n query\n );\n return preparedQuery.queryPlan.selectorSatisfiedByIndex;\n}\n\n/**\n * Ensures that the selector does not contain any RegExp instance.\n * @recursive\n */\nexport function ensureObjectDoesNotContainRegExp(selector: any) {\n if (typeof selector !== 'object' || selector === null) {\n return;\n }\n const keys = Object.keys(selector);\n keys.forEach(key => {\n const value: any = selector[key];\n if (value instanceof RegExp) {\n throw newRxError('QU16', {\n field: key,\n query: selector,\n });\n } else if (Array.isArray(value)) {\n value.forEach(item => ensureObjectDoesNotContainRegExp(item));\n } else {\n ensureObjectDoesNotContainRegExp(value);\n }\n });\n}\n"],"mappings":";;;;;;;;;AAUA,IAAAA,QAAA,GAAAC,OAAA;AACA,IAAAC,MAAA,GAAAD,OAAA;AACA,IAAAE,QAAA,GAAAF,OAAA;AAEA;AACA;AACA;AACA;AACA;AACO,SAASG,UAAUA,CAACC,IAAkC,EAAE;EAC3D,IAAMC,aAAa,GAAGC,MAAM,CAACC,SAAS,CAACC,QAAQ,CAACC,IAAI,CAACL,IAAI,CAACM,QAAQ,CAAC,KAAK,iBAAiB;EACzF,IAAI,CAACL,aAAa,EAAE;IAChB,MAAM,IAAAM,uBAAc,EAAC,MAAM,EAAE;MACzBC,EAAE,EAAER,IAAI,CAACQ,EAAE;MACXC,UAAU,EAAET,IAAI,CAACS,UAAU,CAACC,IAAI;MAChCJ,QAAQ,EAAEN,IAAI,CAACM;IACnB,CAAC,CAAC;EACN;EAEA,IAAMK,SAA+B,GAAG,CACpC,UAAU,EACV,OAAO,EACP,MAAM,EACN,MAAM,EACN,OAAO,CACV;EACDT,MAAM,CAACU,IAAI,CAACZ,IAAI,CAACM,QAAQ,CAAC,CAACO,OAAO,CAACC,GAAG,IAAI;IACtC,IAAI,CAAEH,SAAS,CAAcI,QAAQ,CAACD,GAAG,CAAC,EAAE;MACxC,MAAM,IAAAP,uBAAc,EAAC,MAAM,EAAE;QACzBC,EAAE,EAAER,IAAI,CAACQ,EAAE;QACXC,UAAU,EAAET,IAAI,CAACS,UAAU,CAACC,IAAI;QAChCJ,QAAQ,EAAEN,IAAI,CAACM,QAAQ;QACvBQ,GAAG;QACHd,IAAI,EAAE;UACFW;QACJ;MACJ,CAAC,CAAC;IACN;EACJ,CAAC,CAAC;;EAEF;EACA,IACIX,IAAI,CAACQ,EAAE,KAAK,OAAO,KAEfR,IAAI,CAACM,QAAQ,CAACU,KAAK,IACnBhB,IAAI,CAACM,QAAQ,CAACW,IAAI,CACrB,EACH;IACE,MAAM,IAAAC,mBAAU,EACZ,MAAM,EACN;MACIT,UAAU,EAAET,IAAI,CAACS,UAAU,CAACC,IAAI;MAChCS,KAAK,EAAEnB,IAAI,CAACM;IAChB,CACJ,CAAC;EACL;EAEAc,gCAAgC,CAACpB,IAAI,CAACM,QAAQ,CAAC;AACnD;AAGO,SAASe,eAAeA,CAACrB,IAAiC,EAAE;EAC/D,IAAMsB,MAAM,GAAGtB,IAAI,CAACuB,OAAO,CAACd,UAAU,CAACa,MAAM,CAACE,UAAU;;EAExD;AACJ;AACA;AACA;EACI,IAAMC,gBAAyC,GAAGzB,IAAI,CAAC0B,UAAU,CAACC,QAAQ;EAC1E,IAAMC,oBAAoB,GAAG1B,MAAM,CAACU,IAAI,CAACU,MAAM,CAACO,UAAU,CAAC;EAC3D3B,MAAM,CAACU,IAAI,CAACa,gBAAgB;EACxB;EAAA,CACCK,MAAM,CAACC,eAAe,IAAI,CAACA,eAAe,CAACC,UAAU,CAAC,GAAG,CAAC;EAC3D;EAAA,CACCF,MAAM,CAACG,KAAK,IAAI,CAACA,KAAK,CAAClB,QAAQ,CAAC,GAAG,CAAC,CAAC,CACrCF,OAAO,CAACoB,KAAK,IAAI;IACd,IAAI,CAACL,oBAAoB,CAACb,QAAQ,CAACkB,KAAK,CAAC,EAAE;MACvC,MAAM,IAAAf,mBAAU,EAAC,MAAM,EAAE;QACrBI,MAAM;QACNW,KAAK;QACLd,KAAK,EAAEnB,IAAI,CAAC0B;MAChB,CAAC,CAAC;IACN;EACJ,CAAC,CAAC;;EAEN;AACJ;AACA;AACA;EACI,IAAMQ,aAAa,GAAGZ,MAAM,CAACa,OAAO,GAAGb,MAAM,CAACa,OAAO,GAAG,EAAE;EAC1D,IAAMC,KAAK,GAAGpC,IAAI,CAAC0B,UAAU,CAACU,KAAK;EACnC,IAAIA,KAAK,EAAE;IACP,IAAMC,UAAU,GAAGH,aAAa,CAACI,IAAI,CAACC,WAAW,IAAI,IAAAC,gBAAS,EAACD,WAAW,EAAEH,KAAK,CAAC,CAAC;IACnF,IAAI,CAACC,UAAU,EAAE;MACb,MAAM,IAAAnB,mBAAU,EACZ,MAAM,EACN;QACIT,UAAU,EAAET,IAAI,CAACuB,OAAO,CAACd,UAAU,CAACC,IAAI;QACxCS,KAAK,EAAEnB,IAAI,CAAC0B,UAAU;QACtBJ;MACJ,CACJ,CAAC;IACL;EACJ;;EAGA;AACJ;AACA;AACA;EACI,IAAItB,IAAI,CAACuB,OAAO,CAACf,EAAE,KAAK,OAAO,EAAE;IAC7B,IACI,CAACiC,4BAA4B,CACzBzC,IAAI,CAACuB,OAAO,CAACd,UAAU,CAACa,MAAM,CAACE,UAAU,EACzCxB,IAAI,CAAC0B,UACT,CAAC,IACD,CAAC1B,IAAI,CAACuB,OAAO,CAACd,UAAU,CAACiC,QAAQ,CAACC,cAAc,EAClD;MACE,MAAM,IAAAzB,mBAAU,EAAC,MAAM,EAAE;QACrBT,UAAU,EAAET,IAAI,CAACuB,OAAO,CAACd,UAAU;QACnCU,KAAK,EAAEnB,IAAI,CAAC0B;MAChB,CAAC,CAAC;IACN;EACJ;;EAEA;AACJ;AACA;AACA;EACI,IAAI1B,IAAI,CAAC0B,UAAU,CAACkB,IAAI,EAAE;IACtB5C,IAAI,CAAC0B,UAAU,CAACkB,IAAI,CACfC,GAAG,CAACC,QAAQ,IAAI5C,MAAM,CAACU,IAAI,CAACkC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CACzChB,MAAM,CAACG,KAAK,IAAI,CAACA,KAAK,CAAClB,QAAQ,CAAC,GAAG,CAAC,CAAC,CACrCF,OAAO,CAACoB,KAAK,IAAI;MACd,IAAI,CAACL,oBAAoB,CAACb,QAAQ,CAACkB,KAAK,CAAC,EAAE;QACvC,MAAM,IAAAf,mBAAU,EAAC,MAAM,EAAE;UACrBI,MAAM;UACNW,KAAK;UACLd,KAAK,EAAEnB,IAAI,CAAC0B;QAChB,CAAC,CAAC;MACN;IACJ,CAAC,CAAC;EACV;;EAEA;EACAN,gCAAgC,CAACpB,IAAI,CAAC0B,UAAU,CAAC;AACrD;AAGO,SAASe,4BAA4BA,CACxCnB,MAA+C,EAC/CH,KAAkC,EAC3B;EACP,IAAM4B,aAAiC,GAAG,IAAAC,qBAAY,EAClD1B,MAAM,EACNH,KACJ,CAAC;EACD,OAAO4B,aAAa,CAACE,SAAS,CAACC,wBAAwB;AAC3D;;AAEA;AACA;AACA;AACA;AACO,SAAS9B,gCAAgCA,CAACO,QAAa,EAAE;EAC5D,IAAI,OAAOA,QAAQ,KAAK,QAAQ,IAAIA,QAAQ,KAAK,IAAI,EAAE;IACnD;EACJ;EACA,IAAMf,IAAI,GAAGV,MAAM,CAACU,IAAI,CAACe,QAAQ,CAAC;EAClCf,IAAI,CAACC,OAAO,CAACC,GAAG,IAAI;IAChB,IAAMqC,KAAU,GAAGxB,QAAQ,CAACb,GAAG,CAAC;IAChC,IAAIqC,KAAK,YAAYC,MAAM,EAAE;MACzB,MAAM,IAAAlC,mBAAU,EAAC,MAAM,EAAE;QACrBe,KAAK,EAAEnB,GAAG;QACVK,KAAK,EAAEQ;MACX,CAAC,CAAC;IACN,CAAC,MAAM,IAAI0B,KAAK,CAACC,OAAO,CAACH,KAAK,CAAC,EAAE;MAC7BA,KAAK,CAACtC,OAAO,CAAC0C,IAAI,IAAInC,gCAAgC,CAACmC,IAAI,CAAC,CAAC;IACjE,CAAC,MAAM;MACHnC,gCAAgC,CAAC+B,KAAK,CAAC;IAC3C;EACJ,CAAC,CAAC;AACN","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/dev-mode/check-schema.js b/dist/cjs/plugins/dev-mode/check-schema.js deleted file mode 100644 index b18c3d7627b..00000000000 --- a/dist/cjs/plugins/dev-mode/check-schema.js +++ /dev/null @@ -1,506 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.checkFieldNameRegex = checkFieldNameRegex; -exports.checkPrimaryKey = checkPrimaryKey; -exports.checkSchema = checkSchema; -exports.validateFieldsDeep = validateFieldsDeep; -var _rxError = require("../../rx-error.js"); -var _rxSchemaHelper = require("../../rx-schema-helper.js"); -var _index = require("../../plugins/utils/index.js"); -var _entityProperties = require("./entity-properties.js"); -/** - * does additional checks over the schema-json - * to ensure nothing is broken or not supported - */ - -/** - * checks if the fieldname is allowed - * this makes sure that the fieldnames can be transformed into javascript-vars - * and does not conquer the observe$ and populate_ fields - * @throws {Error} - */ -function checkFieldNameRegex(fieldName) { - if (fieldName === '_deleted') { - return; - } - if (['properties'].includes(fieldName)) { - throw (0, _rxError.newRxError)('SC23', { - fieldName - }); - } - var regexStr = '^[a-zA-Z](?:[[a-zA-Z0-9_]*]?[a-zA-Z0-9])?$'; - var regex = new RegExp(regexStr); - if ( - /** - * It must be allowed to set _id as primaryKey. - * This makes it sometimes easier to work with RxDB+CouchDB - * @link https://github.com/pubkey/rxdb/issues/681 - */ - fieldName !== '_id' && !fieldName.match(regex)) { - throw (0, _rxError.newRxError)('SC1', { - regex: regexStr, - fieldName - }); - } -} - -/** - * validate that all schema-related things are ok - */ -function validateFieldsDeep(rxJsonSchema) { - var primaryPath = (0, _rxSchemaHelper.getPrimaryFieldOfPrimaryKey)(rxJsonSchema.primaryKey); - function checkField(fieldName, schemaObj, path) { - if (typeof fieldName === 'string' && typeof schemaObj === 'object' && !Array.isArray(schemaObj) && path.split('.').pop() !== 'patternProperties') checkFieldNameRegex(fieldName); - - // 'item' only allowed it type=='array' - if (Object.prototype.hasOwnProperty.call(schemaObj, 'item') && schemaObj.type !== 'array') { - throw (0, _rxError.newRxError)('SC2', { - fieldName - }); - } - - /** - * required fields cannot be set via 'required: true', - * but must be set via required: [] - */ - if (Object.prototype.hasOwnProperty.call(schemaObj, 'required') && typeof schemaObj.required === 'boolean') { - throw (0, _rxError.newRxError)('SC24', { - fieldName - }); - } - - // $ref is not allowed - if (Object.prototype.hasOwnProperty.call(schemaObj, '$ref')) { - throw (0, _rxError.newRxError)('SC40', { - fieldName - }); - } - - // if ref given, must be type=='string', type=='array' with string-items or type==['string','null'] - if (Object.prototype.hasOwnProperty.call(schemaObj, 'ref')) { - if (Array.isArray(schemaObj.type)) { - if (schemaObj.type.length > 2 || !schemaObj.type.includes('string') || !schemaObj.type.includes('null')) { - throw (0, _rxError.newRxError)('SC4', { - fieldName - }); - } - } else { - switch (schemaObj.type) { - case 'string': - break; - case 'array': - if (!schemaObj.items || !schemaObj.items.type || schemaObj.items.type !== 'string') { - throw (0, _rxError.newRxError)('SC3', { - fieldName - }); - } - break; - default: - throw (0, _rxError.newRxError)('SC4', { - fieldName - }); - } - } - } - var isNested = path.split('.').length >= 2; - - // nested only - if (isNested) { - if (schemaObj.default) { - throw (0, _rxError.newRxError)('SC7', { - path - }); - } - } - - // first level - if (!isNested) { - // if _id is used, it must be primaryKey - if (fieldName === '_id' && primaryPath !== '_id') { - throw (0, _rxError.newRxError)('COL2', { - fieldName - }); - } - - // check underscore fields - if (fieldName.charAt(0) === '_') { - if ( - // exceptional allow underscore on these fields. - fieldName === '_id' || fieldName === '_deleted') { - return; - } - throw (0, _rxError.newRxError)('SC8', { - fieldName - }); - } - } - } - function traverse(currentObj, currentPath) { - if (!currentObj || typeof currentObj !== 'object') { - return; - } - Object.keys(currentObj).forEach(attributeName => { - var schemaObj = currentObj[attributeName]; - if (!currentObj.properties && schemaObj && typeof schemaObj === 'object' && !Array.isArray(currentObj)) { - checkField(attributeName, schemaObj, currentPath); - } - var nextPath = currentPath; - if (attributeName !== 'properties') nextPath = nextPath + '.' + attributeName; - traverse(schemaObj, nextPath); - }); - } - traverse(rxJsonSchema, ''); - return true; -} -function checkPrimaryKey(jsonSchema) { - if (!jsonSchema.primaryKey) { - throw (0, _rxError.newRxError)('SC30', { - schema: jsonSchema - }); - } - function validatePrimarySchemaPart(schemaPart) { - if (!schemaPart) { - throw (0, _rxError.newRxError)('SC33', { - schema: jsonSchema - }); - } - var type = schemaPart.type; - if (!type || !['string', 'number', 'integer'].includes(type)) { - throw (0, _rxError.newRxError)('SC32', { - schema: jsonSchema, - args: { - schemaPart - } - }); - } - } - if (typeof jsonSchema.primaryKey === 'string') { - var key = jsonSchema.primaryKey; - var schemaPart = jsonSchema.properties[key]; - validatePrimarySchemaPart(schemaPart); - } else { - var compositePrimaryKey = jsonSchema.primaryKey; - var keySchemaPart = (0, _rxSchemaHelper.getSchemaByObjectPath)(jsonSchema, compositePrimaryKey.key); - validatePrimarySchemaPart(keySchemaPart); - compositePrimaryKey.fields.forEach(field => { - var schemaPart = (0, _rxSchemaHelper.getSchemaByObjectPath)(jsonSchema, field); - validatePrimarySchemaPart(schemaPart); - }); - } - - /** - * The primary key must have a maxLength set - * which is required by some RxStorage implementations - * to ensure we can craft custom index strings. - */ - var primaryPath = (0, _rxSchemaHelper.getPrimaryFieldOfPrimaryKey)(jsonSchema.primaryKey); - var primaryPathSchemaPart = jsonSchema.properties[primaryPath]; - if (!primaryPathSchemaPart.maxLength) { - throw (0, _rxError.newRxError)('SC39', { - schema: jsonSchema, - args: { - primaryPathSchemaPart - } - }); - } else if (!isFinite(primaryPathSchemaPart.maxLength)) { - throw (0, _rxError.newRxError)('SC41', { - schema: jsonSchema, - args: { - primaryPathSchemaPart - } - }); - } -} - -/** - * computes real path of the object path in the collection schema - */ -function getSchemaPropertyRealPath(shortPath) { - var pathParts = shortPath.split('.'); - var realPath = ''; - for (var i = 0; i < pathParts.length; i += 1) { - if (pathParts[i] !== '[]') { - realPath = realPath.concat('.properties.'.concat(pathParts[i])); - } else { - realPath = realPath.concat('.items'); - } - } - return (0, _index.trimDots)(realPath); -} - -/** - * does the checking - * @throws {Error} if something is not ok - */ -function checkSchema(jsonSchema) { - if (!jsonSchema.primaryKey) { - throw (0, _rxError.newRxError)('SC30', { - schema: jsonSchema - }); - } - if (!Object.prototype.hasOwnProperty.call(jsonSchema, 'properties')) { - throw (0, _rxError.newRxError)('SC29', { - schema: jsonSchema - }); - } - - // _rev MUST NOT exist, it is added by RxDB - if (jsonSchema.properties._rev) { - throw (0, _rxError.newRxError)('SC10', { - schema: jsonSchema - }); - } - - // check version - if (!Object.prototype.hasOwnProperty.call(jsonSchema, 'version') || typeof jsonSchema.version !== 'number' || jsonSchema.version < 0) { - throw (0, _rxError.newRxError)('SC11', { - version: jsonSchema.version - }); - } - validateFieldsDeep(jsonSchema); - checkPrimaryKey(jsonSchema); - Object.keys(jsonSchema.properties).forEach(key => { - var value = jsonSchema.properties[key]; - // check primary - if (key === jsonSchema.primaryKey) { - if (jsonSchema.indexes && jsonSchema.indexes.includes(key)) { - throw (0, _rxError.newRxError)('SC13', { - value, - schema: jsonSchema - }); - } - if (value.unique) { - throw (0, _rxError.newRxError)('SC14', { - value, - schema: jsonSchema - }); - } - if (jsonSchema.encrypted && jsonSchema.encrypted.includes(key)) { - throw (0, _rxError.newRxError)('SC15', { - value, - schema: jsonSchema - }); - } - if (value.type !== 'string') { - throw (0, _rxError.newRxError)('SC16', { - value, - schema: jsonSchema - }); - } - } - - // check if RxDocument-property - if ((0, _entityProperties.rxDocumentProperties)().includes(key)) { - throw (0, _rxError.newRxError)('SC17', { - key, - schema: jsonSchema - }); - } - }); - - // check format of jsonSchema.indexes - if (jsonSchema.indexes) { - // should be an array - if (!(0, _index.isMaybeReadonlyArray)(jsonSchema.indexes)) { - throw (0, _rxError.newRxError)('SC18', { - indexes: jsonSchema.indexes, - schema: jsonSchema - }); - } - jsonSchema.indexes.forEach(index => { - // should contain strings or array of strings - if (!(typeof index === 'string' || Array.isArray(index))) { - throw (0, _rxError.newRxError)('SC19', { - index, - schema: jsonSchema - }); - } - // if is a compound index it must contain strings - if (Array.isArray(index)) { - for (var i = 0; i < index.length; i += 1) { - if (typeof index[i] !== 'string') { - throw (0, _rxError.newRxError)('SC20', { - index, - schema: jsonSchema - }); - } - } - } - - /** - * To be able to craft custom indexable string with compound fields, - * we need to know the maximum fieldlength of the fields values - * when they are transformed to strings. - * Therefore we need to enforce some properties inside of the schema. - */ - var indexAsArray = (0, _index.isMaybeReadonlyArray)(index) ? index : [index]; - indexAsArray.forEach(fieldName => { - var schemaPart = (0, _rxSchemaHelper.getSchemaByObjectPath)(jsonSchema, fieldName); - var type = schemaPart.type; - switch (type) { - case 'string': - var maxLength = schemaPart.maxLength; - if (!maxLength) { - throw (0, _rxError.newRxError)('SC34', { - index, - field: fieldName, - schema: jsonSchema - }); - } - break; - case 'number': - case 'integer': - var multipleOf = schemaPart.multipleOf; - if (!multipleOf) { - throw (0, _rxError.newRxError)('SC35', { - index, - field: fieldName, - schema: jsonSchema - }); - } - var maximum = schemaPart.maximum; - var minimum = schemaPart.minimum; - if (typeof maximum === 'undefined' || typeof minimum === 'undefined') { - throw (0, _rxError.newRxError)('SC37', { - index, - field: fieldName, - schema: jsonSchema - }); - } - if (!isFinite(maximum) || !isFinite(minimum)) { - throw (0, _rxError.newRxError)('SC41', { - index, - field: fieldName, - schema: jsonSchema - }); - } - break; - case 'boolean': - /** - * If a boolean field is used as an index, - * it must be required. - */ - var parentPath = ''; - var lastPathPart = fieldName; - if (fieldName.includes('.')) { - var partParts = fieldName.split('.'); - lastPathPart = partParts.pop(); - parentPath = partParts.join('.'); - } - var parentSchemaPart = parentPath === '' ? jsonSchema : (0, _rxSchemaHelper.getSchemaByObjectPath)(jsonSchema, parentPath); - if (!parentSchemaPart.required || !parentSchemaPart.required.includes(lastPathPart)) { - throw (0, _rxError.newRxError)('SC38', { - index, - field: fieldName, - schema: jsonSchema - }); - } - break; - default: - throw (0, _rxError.newRxError)('SC36', { - fieldName, - type: schemaPart.type, - schema: jsonSchema - }); - } - }); - }); - } - - // remove backward-compatibility for index: true - Object.keys((0, _index.flattenObject)(jsonSchema)).map(key => { - // flattenObject returns only ending paths, we need all paths pointing to an object - var split = key.split('.'); - split.pop(); // all but last - return split.join('.'); - }).filter(key => key !== '').filter((elem, pos, arr) => arr.indexOf(elem) === pos) // unique - .filter(key => { - // check if this path defines an index - var value = (0, _index.getProperty)(jsonSchema, key); - return value && !!value.index; - }).forEach(key => { - // replace inner properties - key = key.replace('properties.', ''); // first - key = key.replace(/\.properties\./g, '.'); // middle - throw (0, _rxError.newRxError)('SC26', { - index: (0, _index.trimDots)(key), - schema: jsonSchema - }); - }); - - /* check types of the indexes */ - (jsonSchema.indexes || []).reduce((indexPaths, currentIndex) => { - if ((0, _index.isMaybeReadonlyArray)(currentIndex)) { - (0, _index.appendToArray)(indexPaths, currentIndex); - } else { - indexPaths.push(currentIndex); - } - return indexPaths; - }, []).filter((elem, pos, arr) => arr.indexOf(elem) === pos) // from now on working only with unique indexes - .map(indexPath => { - var realPath = getSchemaPropertyRealPath(indexPath); // real path in the collection schema - var schemaObj = (0, _index.getProperty)(jsonSchema, realPath); // get the schema of the indexed property - if (!schemaObj || typeof schemaObj !== 'object') { - throw (0, _rxError.newRxError)('SC21', { - index: indexPath, - schema: jsonSchema - }); - } - return { - indexPath, - schemaObj - }; - }).filter(index => index.schemaObj.type !== 'string' && index.schemaObj.type !== 'integer' && index.schemaObj.type !== 'number' && index.schemaObj.type !== 'boolean').forEach(index => { - throw (0, _rxError.newRxError)('SC22', { - key: index.indexPath, - type: index.schemaObj.type, - schema: jsonSchema - }); - }); - - /** - * TODO - * in 9.0.0 we changed the way encrypted fields are defined - * This check ensures people do not oversee the breaking change - * Remove this check in the future - */ - Object.keys((0, _index.flattenObject)(jsonSchema)).map(key => { - // flattenObject returns only ending paths, we need all paths pointing to an object - var split = key.split('.'); - split.pop(); // all but last - return split.join('.'); - }).filter(key => key !== '' && key !== 'attachments').filter((elem, pos, arr) => arr.indexOf(elem) === pos) // unique - .filter(key => { - // check if this path defines an encrypted field - var value = (0, _index.getProperty)(jsonSchema, key); - return value && !!value.encrypted; - }).forEach(key => { - // replace inner properties - key = key.replace('properties.', ''); // first - key = key.replace(/\.properties\./g, '.'); // middle - throw (0, _rxError.newRxError)('SC27', { - index: (0, _index.trimDots)(key), - schema: jsonSchema - }); - }); - - /* ensure encrypted fields exist in the schema */ - if (jsonSchema.encrypted) { - jsonSchema.encrypted.forEach(propPath => { - // real path in the collection schema - var realPath = getSchemaPropertyRealPath(propPath); - // get the schema of the indexed property - var schemaObj = (0, _index.getProperty)(jsonSchema, realPath); - if (!schemaObj || typeof schemaObj !== 'object') { - throw (0, _rxError.newRxError)('SC28', { - field: propPath, - schema: jsonSchema - }); - } - }); - } -} -//# sourceMappingURL=check-schema.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/dev-mode/check-schema.js.map b/dist/cjs/plugins/dev-mode/check-schema.js.map deleted file mode 100644 index a6595c5ddef..00000000000 --- a/dist/cjs/plugins/dev-mode/check-schema.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"check-schema.js","names":["_rxError","require","_rxSchemaHelper","_index","_entityProperties","checkFieldNameRegex","fieldName","includes","newRxError","regexStr","regex","RegExp","match","validateFieldsDeep","rxJsonSchema","primaryPath","getPrimaryFieldOfPrimaryKey","primaryKey","checkField","schemaObj","path","Array","isArray","split","pop","Object","prototype","hasOwnProperty","call","type","required","length","items","isNested","default","charAt","traverse","currentObj","currentPath","keys","forEach","attributeName","properties","nextPath","checkPrimaryKey","jsonSchema","schema","validatePrimarySchemaPart","schemaPart","args","key","compositePrimaryKey","keySchemaPart","getSchemaByObjectPath","fields","field","primaryPathSchemaPart","maxLength","isFinite","getSchemaPropertyRealPath","shortPath","pathParts","realPath","i","concat","trimDots","checkSchema","_rev","version","value","indexes","unique","encrypted","rxDocumentProperties","isMaybeReadonlyArray","index","indexAsArray","multipleOf","maximum","minimum","parentPath","lastPathPart","partParts","join","parentSchemaPart","flattenObject","map","filter","elem","pos","arr","indexOf","getProperty","replace","reduce","indexPaths","currentIndex","appendToArray","push","indexPath","propPath"],"sources":["../../../../src/plugins/dev-mode/check-schema.ts"],"sourcesContent":["/**\n * does additional checks over the schema-json\n * to ensure nothing is broken or not supported\n */\nimport {\n newRxError\n} from '../../rx-error.ts';\nimport { getPrimaryFieldOfPrimaryKey, getSchemaByObjectPath } from '../../rx-schema-helper.ts';\nimport type {\n CompositePrimaryKey,\n JsonSchema,\n JsonSchemaTypes,\n RxJsonSchema,\n TopLevelProperty\n} from '../../types/index.d.ts';\nimport {\n appendToArray,\n flattenObject, getProperty, isMaybeReadonlyArray,\n trimDots\n} from '../../plugins/utils/index.ts';\nimport { rxDocumentProperties } from './entity-properties.ts';\n\n/**\n * checks if the fieldname is allowed\n * this makes sure that the fieldnames can be transformed into javascript-vars\n * and does not conquer the observe$ and populate_ fields\n * @throws {Error}\n */\nexport function checkFieldNameRegex(fieldName: string) {\n if (fieldName === '_deleted') {\n return;\n }\n\n if (['properties'].includes(fieldName)) {\n throw newRxError('SC23', {\n fieldName\n });\n }\n\n const regexStr = '^[a-zA-Z](?:[[a-zA-Z0-9_]*]?[a-zA-Z0-9])?$';\n const regex = new RegExp(regexStr);\n if (\n /**\n * It must be allowed to set _id as primaryKey.\n * This makes it sometimes easier to work with RxDB+CouchDB\n * @link https://github.com/pubkey/rxdb/issues/681\n */\n fieldName !== '_id' &&\n !fieldName.match(regex)\n ) {\n throw newRxError('SC1', {\n regex: regexStr,\n fieldName\n });\n }\n}\n\n/**\n * validate that all schema-related things are ok\n */\nexport function validateFieldsDeep(rxJsonSchema: RxJsonSchema): true {\n\n const primaryPath = getPrimaryFieldOfPrimaryKey(rxJsonSchema.primaryKey);\n\n function checkField(\n fieldName: string,\n schemaObj: JsonSchema,\n path: string\n ) {\n if (\n typeof fieldName === 'string' &&\n typeof schemaObj === 'object' &&\n !Array.isArray(schemaObj) &&\n path.split('.').pop() !== 'patternProperties'\n ) checkFieldNameRegex(fieldName);\n\n // 'item' only allowed it type=='array'\n if (Object.prototype.hasOwnProperty.call(schemaObj, 'item') && schemaObj.type !== 'array') {\n throw newRxError('SC2', {\n fieldName\n });\n }\n\n /**\n * required fields cannot be set via 'required: true',\n * but must be set via required: []\n */\n if (\n Object.prototype.hasOwnProperty.call(schemaObj, 'required') &&\n typeof schemaObj.required === 'boolean'\n ) {\n throw newRxError('SC24', {\n fieldName\n });\n }\n\n // $ref is not allowed\n if (Object.prototype.hasOwnProperty.call(schemaObj, '$ref')) {\n throw newRxError('SC40', {\n fieldName\n });\n }\n\n\n // if ref given, must be type=='string', type=='array' with string-items or type==['string','null']\n if (Object.prototype.hasOwnProperty.call(schemaObj, 'ref')) {\n if (Array.isArray(schemaObj.type)) {\n if (schemaObj.type.length > 2 || !schemaObj.type.includes('string') || !schemaObj.type.includes('null')) {\n throw newRxError('SC4', {\n fieldName\n });\n }\n } else {\n switch (schemaObj.type) {\n case 'string':\n break;\n case 'array':\n if (\n !schemaObj.items ||\n !(schemaObj.items as any).type ||\n (schemaObj.items as any).type !== 'string'\n ) {\n throw newRxError('SC3', {\n fieldName\n });\n }\n break;\n default:\n throw newRxError('SC4', {\n fieldName\n });\n }\n }\n }\n\n const isNested = path.split('.').length >= 2;\n\n // nested only\n if (isNested) {\n if ((schemaObj as any).default) {\n throw newRxError('SC7', {\n path\n });\n }\n }\n\n // first level\n if (!isNested) {\n\n // if _id is used, it must be primaryKey\n if (\n fieldName === '_id' &&\n primaryPath !== '_id'\n ) {\n throw newRxError('COL2', {\n fieldName\n });\n }\n\n // check underscore fields\n if (fieldName.charAt(0) === '_') {\n if (\n // exceptional allow underscore on these fields.\n fieldName === '_id' ||\n fieldName === '_deleted'\n ) {\n return;\n }\n throw newRxError('SC8', {\n fieldName\n });\n }\n }\n }\n\n function traverse(currentObj: any, currentPath: any) {\n if (!currentObj || typeof currentObj !== 'object') {\n return;\n }\n Object.keys(currentObj).forEach(attributeName => {\n const schemaObj = currentObj[attributeName];\n if (\n !currentObj.properties &&\n schemaObj &&\n typeof schemaObj === 'object' &&\n !Array.isArray(currentObj)\n ) {\n checkField(\n attributeName,\n schemaObj,\n currentPath\n );\n }\n let nextPath = currentPath;\n if (attributeName !== 'properties') nextPath = nextPath + '.' + attributeName;\n traverse(schemaObj, nextPath);\n });\n }\n traverse(rxJsonSchema, '');\n return true;\n}\n\nexport function checkPrimaryKey(\n jsonSchema: RxJsonSchema\n) {\n if (!jsonSchema.primaryKey) {\n throw newRxError('SC30', { schema: jsonSchema });\n }\n\n\n\n function validatePrimarySchemaPart(\n schemaPart: JsonSchema | TopLevelProperty\n ) {\n if (!schemaPart) {\n throw newRxError('SC33', { schema: jsonSchema });\n }\n\n const type: string = schemaPart.type as any;\n if (\n !type ||\n !['string', 'number', 'integer'].includes(type)\n ) {\n throw newRxError('SC32', { schema: jsonSchema, args: { schemaPart } });\n }\n }\n\n if (typeof jsonSchema.primaryKey === 'string') {\n const key = jsonSchema.primaryKey;\n const schemaPart = jsonSchema.properties[key];\n validatePrimarySchemaPart(schemaPart);\n } else {\n const compositePrimaryKey: CompositePrimaryKey = jsonSchema.primaryKey as any;\n\n const keySchemaPart = getSchemaByObjectPath(jsonSchema, compositePrimaryKey.key);\n validatePrimarySchemaPart(keySchemaPart);\n\n compositePrimaryKey.fields.forEach(field => {\n const schemaPart = getSchemaByObjectPath(jsonSchema, field);\n validatePrimarySchemaPart(schemaPart);\n });\n }\n\n\n /**\n * The primary key must have a maxLength set\n * which is required by some RxStorage implementations\n * to ensure we can craft custom index strings.\n */\n const primaryPath = getPrimaryFieldOfPrimaryKey(jsonSchema.primaryKey);\n const primaryPathSchemaPart = jsonSchema.properties[primaryPath];\n if (!primaryPathSchemaPart.maxLength) {\n throw newRxError('SC39', { schema: jsonSchema, args: { primaryPathSchemaPart } });\n } else if (!isFinite(primaryPathSchemaPart.maxLength)) {\n throw newRxError('SC41', { schema: jsonSchema, args: { primaryPathSchemaPart } });\n }\n}\n\n/**\n * computes real path of the object path in the collection schema\n */\nfunction getSchemaPropertyRealPath(shortPath: string) {\n const pathParts = shortPath.split('.');\n let realPath = '';\n for (let i = 0; i < pathParts.length; i += 1) {\n if (pathParts[i] !== '[]') {\n realPath = realPath.concat('.properties.'.concat(pathParts[i]));\n } else {\n realPath = realPath.concat('.items');\n }\n }\n return trimDots(realPath);\n}\n\n/**\n * does the checking\n * @throws {Error} if something is not ok\n */\nexport function checkSchema(jsonSchema: RxJsonSchema) {\n\n if (!jsonSchema.primaryKey) {\n throw newRxError('SC30', {\n schema: jsonSchema\n });\n }\n\n if (!Object.prototype.hasOwnProperty.call(jsonSchema, 'properties')) {\n throw newRxError('SC29', {\n schema: jsonSchema\n });\n }\n\n // _rev MUST NOT exist, it is added by RxDB\n if (jsonSchema.properties._rev) {\n throw newRxError('SC10', {\n schema: jsonSchema\n });\n }\n\n // check version\n if (!Object.prototype.hasOwnProperty.call(jsonSchema, 'version') ||\n typeof jsonSchema.version !== 'number' ||\n jsonSchema.version < 0\n ) {\n throw newRxError('SC11', {\n version: jsonSchema.version\n });\n }\n\n validateFieldsDeep(jsonSchema);\n checkPrimaryKey(jsonSchema);\n\n Object.keys(jsonSchema.properties).forEach(key => {\n const value: any = jsonSchema.properties[key];\n // check primary\n if (key === jsonSchema.primaryKey) {\n if (jsonSchema.indexes && jsonSchema.indexes.includes(key)) {\n throw newRxError('SC13', {\n value,\n schema: jsonSchema\n });\n }\n if (value.unique) {\n throw newRxError('SC14', {\n value,\n schema: jsonSchema\n });\n }\n if (jsonSchema.encrypted && jsonSchema.encrypted.includes(key)) {\n throw newRxError('SC15', {\n value,\n schema: jsonSchema\n });\n }\n if (value.type !== 'string') {\n throw newRxError('SC16', {\n value,\n schema: jsonSchema\n });\n }\n }\n\n // check if RxDocument-property\n if (rxDocumentProperties().includes(key)) {\n throw newRxError('SC17', {\n key,\n schema: jsonSchema\n });\n }\n });\n\n // check format of jsonSchema.indexes\n if (jsonSchema.indexes) {\n // should be an array\n if (!isMaybeReadonlyArray(jsonSchema.indexes)) {\n throw newRxError('SC18', {\n indexes: jsonSchema.indexes,\n schema: jsonSchema\n });\n }\n\n jsonSchema.indexes.forEach(index => {\n // should contain strings or array of strings\n if (!(typeof index === 'string' || Array.isArray(index))) {\n throw newRxError('SC19', { index, schema: jsonSchema });\n }\n // if is a compound index it must contain strings\n if (Array.isArray(index)) {\n for (let i = 0; i < index.length; i += 1) {\n if (typeof index[i] !== 'string') {\n throw newRxError('SC20', { index, schema: jsonSchema });\n }\n }\n }\n\n /**\n * To be able to craft custom indexable string with compound fields,\n * we need to know the maximum fieldlength of the fields values\n * when they are transformed to strings.\n * Therefore we need to enforce some properties inside of the schema.\n */\n const indexAsArray = isMaybeReadonlyArray(index) ? index : [index];\n indexAsArray.forEach(fieldName => {\n const schemaPart = getSchemaByObjectPath(\n jsonSchema,\n fieldName\n );\n\n\n const type: JsonSchemaTypes = schemaPart.type as any;\n switch (type) {\n case 'string':\n const maxLength = schemaPart.maxLength;\n if (!maxLength) {\n throw newRxError('SC34', {\n index,\n field: fieldName,\n schema: jsonSchema\n });\n }\n break;\n case 'number':\n case 'integer':\n const multipleOf = schemaPart.multipleOf;\n if (!multipleOf) {\n throw newRxError('SC35', {\n index,\n field: fieldName,\n schema: jsonSchema\n });\n }\n const maximum = schemaPart.maximum;\n const minimum = schemaPart.minimum;\n if (\n typeof maximum === 'undefined' ||\n typeof minimum === 'undefined'\n ) {\n throw newRxError('SC37', {\n index,\n field: fieldName,\n schema: jsonSchema\n });\n }\n\n if (\n !isFinite(maximum) ||\n !isFinite(minimum)\n ) {\n throw newRxError('SC41', {\n index,\n field: fieldName,\n schema: jsonSchema\n });\n }\n\n break;\n case 'boolean':\n /**\n * If a boolean field is used as an index,\n * it must be required.\n */\n let parentPath = '';\n let lastPathPart = fieldName;\n if (fieldName.includes('.')) {\n const partParts = fieldName.split('.');\n lastPathPart = partParts.pop();\n parentPath = partParts.join('.');\n }\n const parentSchemaPart = parentPath === '' ? jsonSchema : getSchemaByObjectPath(\n jsonSchema,\n parentPath\n );\n\n if (\n !parentSchemaPart.required ||\n !parentSchemaPart.required.includes(lastPathPart)\n ) {\n throw newRxError('SC38', {\n index,\n field: fieldName,\n schema: jsonSchema\n });\n }\n break;\n\n default:\n throw newRxError('SC36', {\n fieldName,\n type: schemaPart.type as any,\n schema: jsonSchema,\n });\n }\n });\n\n });\n }\n\n // remove backward-compatibility for index: true\n Object.keys(flattenObject(jsonSchema))\n .map(key => {\n // flattenObject returns only ending paths, we need all paths pointing to an object\n const split = key.split('.');\n split.pop(); // all but last\n return split.join('.');\n })\n .filter(key => key !== '')\n .filter((elem, pos, arr) => arr.indexOf(elem) === pos) // unique\n .filter(key => { // check if this path defines an index\n const value = getProperty(jsonSchema, key);\n return value && !!value.index;\n })\n .forEach(key => { // replace inner properties\n key = key.replace('properties.', ''); // first\n key = key.replace(/\\.properties\\./g, '.'); // middle\n throw newRxError('SC26', {\n index: trimDots(key),\n schema: jsonSchema\n });\n });\n\n /* check types of the indexes */\n (jsonSchema.indexes || [])\n .reduce((indexPaths: string[], currentIndex) => {\n if (isMaybeReadonlyArray(currentIndex)) {\n appendToArray(indexPaths, currentIndex);\n } else {\n indexPaths.push(currentIndex);\n }\n return indexPaths;\n }, [])\n .filter((elem, pos, arr) => arr.indexOf(elem) === pos) // from now on working only with unique indexes\n .map(indexPath => {\n const realPath = getSchemaPropertyRealPath(indexPath); // real path in the collection schema\n const schemaObj = getProperty(jsonSchema, realPath); // get the schema of the indexed property\n if (!schemaObj || typeof schemaObj !== 'object') {\n throw newRxError('SC21', {\n index: indexPath,\n schema: jsonSchema\n });\n }\n return { indexPath, schemaObj };\n })\n .filter(index =>\n index.schemaObj.type !== 'string' &&\n index.schemaObj.type !== 'integer' &&\n index.schemaObj.type !== 'number' &&\n index.schemaObj.type !== 'boolean'\n )\n .forEach(index => {\n throw newRxError('SC22', {\n key: index.indexPath,\n type: index.schemaObj.type,\n schema: jsonSchema\n });\n });\n\n\n /**\n * TODO\n * in 9.0.0 we changed the way encrypted fields are defined\n * This check ensures people do not oversee the breaking change\n * Remove this check in the future\n */\n Object.keys(flattenObject(jsonSchema))\n .map(key => {\n // flattenObject returns only ending paths, we need all paths pointing to an object\n const split = key.split('.');\n split.pop(); // all but last\n return split.join('.');\n })\n .filter(key => key !== '' && key !== 'attachments')\n .filter((elem, pos, arr) => arr.indexOf(elem) === pos) // unique\n .filter(key => {\n // check if this path defines an encrypted field\n const value = getProperty(jsonSchema, key);\n return value && !!value.encrypted;\n })\n .forEach(key => { // replace inner properties\n key = key.replace('properties.', ''); // first\n key = key.replace(/\\.properties\\./g, '.'); // middle\n throw newRxError('SC27', {\n index: trimDots(key),\n schema: jsonSchema\n });\n });\n\n /* ensure encrypted fields exist in the schema */\n if (jsonSchema.encrypted) {\n jsonSchema.encrypted\n .forEach(propPath => {\n // real path in the collection schema\n const realPath = getSchemaPropertyRealPath(propPath);\n // get the schema of the indexed property\n const schemaObj = getProperty(jsonSchema, realPath);\n if (!schemaObj || typeof schemaObj !== 'object') {\n throw newRxError('SC28', {\n field: propPath,\n schema: jsonSchema\n });\n }\n });\n }\n}\n"],"mappings":";;;;;;;;;AAIA,IAAAA,QAAA,GAAAC,OAAA;AAGA,IAAAC,eAAA,GAAAD,OAAA;AAQA,IAAAE,MAAA,GAAAF,OAAA;AAKA,IAAAG,iBAAA,GAAAH,OAAA;AApBA;AACA;AACA;AACA;;AAmBA;AACA;AACA;AACA;AACA;AACA;AACO,SAASI,mBAAmBA,CAACC,SAAiB,EAAE;EACnD,IAAIA,SAAS,KAAK,UAAU,EAAE;IAC1B;EACJ;EAEA,IAAI,CAAC,YAAY,CAAC,CAACC,QAAQ,CAACD,SAAS,CAAC,EAAE;IACpC,MAAM,IAAAE,mBAAU,EAAC,MAAM,EAAE;MACrBF;IACJ,CAAC,CAAC;EACN;EAEA,IAAMG,QAAQ,GAAG,4CAA4C;EAC7D,IAAMC,KAAK,GAAG,IAAIC,MAAM,CAACF,QAAQ,CAAC;EAClC;EACI;AACR;AACA;AACA;AACA;EACQH,SAAS,KAAK,KAAK,IACnB,CAACA,SAAS,CAACM,KAAK,CAACF,KAAK,CAAC,EACzB;IACE,MAAM,IAAAF,mBAAU,EAAC,KAAK,EAAE;MACpBE,KAAK,EAAED,QAAQ;MACfH;IACJ,CAAC,CAAC;EACN;AACJ;;AAEA;AACA;AACA;AACO,SAASO,kBAAkBA,CAACC,YAA+B,EAAQ;EAEtE,IAAMC,WAAW,GAAG,IAAAC,2CAA2B,EAACF,YAAY,CAACG,UAAU,CAAC;EAExE,SAASC,UAAUA,CACfZ,SAAiB,EACjBa,SAA0B,EAC1BC,IAAY,EACd;IACE,IACI,OAAOd,SAAS,KAAK,QAAQ,IAC7B,OAAOa,SAAS,KAAK,QAAQ,IAC7B,CAACE,KAAK,CAACC,OAAO,CAACH,SAAS,CAAC,IACzBC,IAAI,CAACG,KAAK,CAAC,GAAG,CAAC,CAACC,GAAG,CAAC,CAAC,KAAK,mBAAmB,EAC/CnB,mBAAmB,CAACC,SAAS,CAAC;;IAEhC;IACA,IAAImB,MAAM,CAACC,SAAS,CAACC,cAAc,CAACC,IAAI,CAACT,SAAS,EAAE,MAAM,CAAC,IAAIA,SAAS,CAACU,IAAI,KAAK,OAAO,EAAE;MACvF,MAAM,IAAArB,mBAAU,EAAC,KAAK,EAAE;QACpBF;MACJ,CAAC,CAAC;IACN;;IAEA;AACR;AACA;AACA;IACQ,IACImB,MAAM,CAACC,SAAS,CAACC,cAAc,CAACC,IAAI,CAACT,SAAS,EAAE,UAAU,CAAC,IAC3D,OAAOA,SAAS,CAACW,QAAQ,KAAK,SAAS,EACzC;MACE,MAAM,IAAAtB,mBAAU,EAAC,MAAM,EAAE;QACrBF;MACJ,CAAC,CAAC;IACN;;IAEA;IACA,IAAImB,MAAM,CAACC,SAAS,CAACC,cAAc,CAACC,IAAI,CAACT,SAAS,EAAE,MAAM,CAAC,EAAE;MACzD,MAAM,IAAAX,mBAAU,EAAC,MAAM,EAAE;QACrBF;MACJ,CAAC,CAAC;IACN;;IAGA;IACA,IAAImB,MAAM,CAACC,SAAS,CAACC,cAAc,CAACC,IAAI,CAACT,SAAS,EAAE,KAAK,CAAC,EAAE;MACxD,IAAIE,KAAK,CAACC,OAAO,CAACH,SAAS,CAACU,IAAI,CAAC,EAAE;QAC/B,IAAIV,SAAS,CAACU,IAAI,CAACE,MAAM,GAAG,CAAC,IAAI,CAACZ,SAAS,CAACU,IAAI,CAACtB,QAAQ,CAAC,QAAQ,CAAC,IAAI,CAACY,SAAS,CAACU,IAAI,CAACtB,QAAQ,CAAC,MAAM,CAAC,EAAE;UACrG,MAAM,IAAAC,mBAAU,EAAC,KAAK,EAAE;YACpBF;UACJ,CAAC,CAAC;QACN;MACJ,CAAC,MAAM;QACH,QAAQa,SAAS,CAACU,IAAI;UAClB,KAAK,QAAQ;YACT;UACJ,KAAK,OAAO;YACR,IACI,CAACV,SAAS,CAACa,KAAK,IAChB,CAAEb,SAAS,CAACa,KAAK,CAASH,IAAI,IAC7BV,SAAS,CAACa,KAAK,CAASH,IAAI,KAAK,QAAQ,EAC5C;cACE,MAAM,IAAArB,mBAAU,EAAC,KAAK,EAAE;gBACpBF;cACJ,CAAC,CAAC;YACN;YACA;UACJ;YACI,MAAM,IAAAE,mBAAU,EAAC,KAAK,EAAE;cACpBF;YACJ,CAAC,CAAC;QACV;MACJ;IACJ;IAEA,IAAM2B,QAAQ,GAAGb,IAAI,CAACG,KAAK,CAAC,GAAG,CAAC,CAACQ,MAAM,IAAI,CAAC;;IAE5C;IACA,IAAIE,QAAQ,EAAE;MACV,IAAKd,SAAS,CAASe,OAAO,EAAE;QAC5B,MAAM,IAAA1B,mBAAU,EAAC,KAAK,EAAE;UACpBY;QACJ,CAAC,CAAC;MACN;IACJ;;IAEA;IACA,IAAI,CAACa,QAAQ,EAAE;MAEX;MACA,IACI3B,SAAS,KAAK,KAAK,IACnBS,WAAW,KAAK,KAAK,EACvB;QACE,MAAM,IAAAP,mBAAU,EAAC,MAAM,EAAE;UACrBF;QACJ,CAAC,CAAC;MACN;;MAEA;MACA,IAAIA,SAAS,CAAC6B,MAAM,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE;QAC7B;QACI;QACA7B,SAAS,KAAK,KAAK,IACnBA,SAAS,KAAK,UAAU,EAC1B;UACE;QACJ;QACA,MAAM,IAAAE,mBAAU,EAAC,KAAK,EAAE;UACpBF;QACJ,CAAC,CAAC;MACN;IACJ;EACJ;EAEA,SAAS8B,QAAQA,CAACC,UAAe,EAAEC,WAAgB,EAAE;IACjD,IAAI,CAACD,UAAU,IAAI,OAAOA,UAAU,KAAK,QAAQ,EAAE;MAC/C;IACJ;IACAZ,MAAM,CAACc,IAAI,CAACF,UAAU,CAAC,CAACG,OAAO,CAACC,aAAa,IAAI;MAC7C,IAAMtB,SAAS,GAAGkB,UAAU,CAACI,aAAa,CAAC;MAC3C,IACI,CAACJ,UAAU,CAACK,UAAU,IACtBvB,SAAS,IACT,OAAOA,SAAS,KAAK,QAAQ,IAC7B,CAACE,KAAK,CAACC,OAAO,CAACe,UAAU,CAAC,EAC5B;QACEnB,UAAU,CACNuB,aAAa,EACbtB,SAAS,EACTmB,WACJ,CAAC;MACL;MACA,IAAIK,QAAQ,GAAGL,WAAW;MAC1B,IAAIG,aAAa,KAAK,YAAY,EAAEE,QAAQ,GAAGA,QAAQ,GAAG,GAAG,GAAGF,aAAa;MAC7EL,QAAQ,CAACjB,SAAS,EAAEwB,QAAQ,CAAC;IACjC,CAAC,CAAC;EACN;EACAP,QAAQ,CAACtB,YAAY,EAAE,EAAE,CAAC;EAC1B,OAAO,IAAI;AACf;AAEO,SAAS8B,eAAeA,CAC3BC,UAA6B,EAC/B;EACE,IAAI,CAACA,UAAU,CAAC5B,UAAU,EAAE;IACxB,MAAM,IAAAT,mBAAU,EAAC,MAAM,EAAE;MAAEsC,MAAM,EAAED;IAAW,CAAC,CAAC;EACpD;EAIA,SAASE,yBAAyBA,CAC9BC,UAAyC,EAC3C;IACE,IAAI,CAACA,UAAU,EAAE;MACb,MAAM,IAAAxC,mBAAU,EAAC,MAAM,EAAE;QAAEsC,MAAM,EAAED;MAAW,CAAC,CAAC;IACpD;IAEA,IAAMhB,IAAY,GAAGmB,UAAU,CAACnB,IAAW;IAC3C,IACI,CAACA,IAAI,IACL,CAAC,CAAC,QAAQ,EAAE,QAAQ,EAAE,SAAS,CAAC,CAACtB,QAAQ,CAACsB,IAAI,CAAC,EACjD;MACE,MAAM,IAAArB,mBAAU,EAAC,MAAM,EAAE;QAAEsC,MAAM,EAAED,UAAU;QAAEI,IAAI,EAAE;UAAED;QAAW;MAAE,CAAC,CAAC;IAC1E;EACJ;EAEA,IAAI,OAAOH,UAAU,CAAC5B,UAAU,KAAK,QAAQ,EAAE;IAC3C,IAAMiC,GAAG,GAAGL,UAAU,CAAC5B,UAAU;IACjC,IAAM+B,UAAU,GAAGH,UAAU,CAACH,UAAU,CAACQ,GAAG,CAAC;IAC7CH,yBAAyB,CAACC,UAAU,CAAC;EACzC,CAAC,MAAM;IACH,IAAMG,mBAA6C,GAAGN,UAAU,CAAC5B,UAAiB;IAElF,IAAMmC,aAAa,GAAG,IAAAC,qCAAqB,EAACR,UAAU,EAAEM,mBAAmB,CAACD,GAAG,CAAC;IAChFH,yBAAyB,CAACK,aAAa,CAAC;IAExCD,mBAAmB,CAACG,MAAM,CAACd,OAAO,CAACe,KAAK,IAAI;MACxC,IAAMP,UAAU,GAAG,IAAAK,qCAAqB,EAACR,UAAU,EAAEU,KAAK,CAAC;MAC3DR,yBAAyB,CAACC,UAAU,CAAC;IACzC,CAAC,CAAC;EACN;;EAGA;AACJ;AACA;AACA;AACA;EACI,IAAMjC,WAAW,GAAG,IAAAC,2CAA2B,EAAC6B,UAAU,CAAC5B,UAAU,CAAC;EACtE,IAAMuC,qBAAqB,GAAGX,UAAU,CAACH,UAAU,CAAC3B,WAAW,CAAC;EAChE,IAAI,CAACyC,qBAAqB,CAACC,SAAS,EAAE;IAClC,MAAM,IAAAjD,mBAAU,EAAC,MAAM,EAAE;MAAEsC,MAAM,EAAED,UAAU;MAAEI,IAAI,EAAE;QAAEO;MAAsB;IAAE,CAAC,CAAC;EACrF,CAAC,MAAM,IAAI,CAACE,QAAQ,CAACF,qBAAqB,CAACC,SAAS,CAAC,EAAE;IACnD,MAAM,IAAAjD,mBAAU,EAAC,MAAM,EAAE;MAAEsC,MAAM,EAAED,UAAU;MAAEI,IAAI,EAAE;QAAEO;MAAsB;IAAE,CAAC,CAAC;EACrF;AACJ;;AAEA;AACA;AACA;AACA,SAASG,yBAAyBA,CAACC,SAAiB,EAAE;EAClD,IAAMC,SAAS,GAAGD,SAAS,CAACrC,KAAK,CAAC,GAAG,CAAC;EACtC,IAAIuC,QAAQ,GAAG,EAAE;EACjB,KAAK,IAAIC,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGF,SAAS,CAAC9B,MAAM,EAAEgC,CAAC,IAAI,CAAC,EAAE;IAC1C,IAAIF,SAAS,CAACE,CAAC,CAAC,KAAK,IAAI,EAAE;MACvBD,QAAQ,GAAGA,QAAQ,CAACE,MAAM,CAAC,cAAc,CAACA,MAAM,CAACH,SAAS,CAACE,CAAC,CAAC,CAAC,CAAC;IACnE,CAAC,MAAM;MACHD,QAAQ,GAAGA,QAAQ,CAACE,MAAM,CAAC,QAAQ,CAAC;IACxC;EACJ;EACA,OAAO,IAAAC,eAAQ,EAACH,QAAQ,CAAC;AAC7B;;AAEA;AACA;AACA;AACA;AACO,SAASI,WAAWA,CAACrB,UAA6B,EAAE;EAEvD,IAAI,CAACA,UAAU,CAAC5B,UAAU,EAAE;IACxB,MAAM,IAAAT,mBAAU,EAAC,MAAM,EAAE;MACrBsC,MAAM,EAAED;IACZ,CAAC,CAAC;EACN;EAEA,IAAI,CAACpB,MAAM,CAACC,SAAS,CAACC,cAAc,CAACC,IAAI,CAACiB,UAAU,EAAE,YAAY,CAAC,EAAE;IACjE,MAAM,IAAArC,mBAAU,EAAC,MAAM,EAAE;MACrBsC,MAAM,EAAED;IACZ,CAAC,CAAC;EACN;;EAEA;EACA,IAAIA,UAAU,CAACH,UAAU,CAACyB,IAAI,EAAE;IAC5B,MAAM,IAAA3D,mBAAU,EAAC,MAAM,EAAE;MACrBsC,MAAM,EAAED;IACZ,CAAC,CAAC;EACN;;EAEA;EACA,IAAI,CAACpB,MAAM,CAACC,SAAS,CAACC,cAAc,CAACC,IAAI,CAACiB,UAAU,EAAE,SAAS,CAAC,IAC5D,OAAOA,UAAU,CAACuB,OAAO,KAAK,QAAQ,IACtCvB,UAAU,CAACuB,OAAO,GAAG,CAAC,EACxB;IACE,MAAM,IAAA5D,mBAAU,EAAC,MAAM,EAAE;MACrB4D,OAAO,EAAEvB,UAAU,CAACuB;IACxB,CAAC,CAAC;EACN;EAEAvD,kBAAkB,CAACgC,UAAU,CAAC;EAC9BD,eAAe,CAACC,UAAU,CAAC;EAE3BpB,MAAM,CAACc,IAAI,CAACM,UAAU,CAACH,UAAU,CAAC,CAACF,OAAO,CAACU,GAAG,IAAI;IAC9C,IAAMmB,KAAU,GAAGxB,UAAU,CAACH,UAAU,CAACQ,GAAG,CAAC;IAC7C;IACA,IAAIA,GAAG,KAAKL,UAAU,CAAC5B,UAAU,EAAE;MAC/B,IAAI4B,UAAU,CAACyB,OAAO,IAAIzB,UAAU,CAACyB,OAAO,CAAC/D,QAAQ,CAAC2C,GAAG,CAAC,EAAE;QACxD,MAAM,IAAA1C,mBAAU,EAAC,MAAM,EAAE;UACrB6D,KAAK;UACLvB,MAAM,EAAED;QACZ,CAAC,CAAC;MACN;MACA,IAAIwB,KAAK,CAACE,MAAM,EAAE;QACd,MAAM,IAAA/D,mBAAU,EAAC,MAAM,EAAE;UACrB6D,KAAK;UACLvB,MAAM,EAAED;QACZ,CAAC,CAAC;MACN;MACA,IAAIA,UAAU,CAAC2B,SAAS,IAAI3B,UAAU,CAAC2B,SAAS,CAACjE,QAAQ,CAAC2C,GAAG,CAAC,EAAE;QAC5D,MAAM,IAAA1C,mBAAU,EAAC,MAAM,EAAE;UACrB6D,KAAK;UACLvB,MAAM,EAAED;QACZ,CAAC,CAAC;MACN;MACA,IAAIwB,KAAK,CAACxC,IAAI,KAAK,QAAQ,EAAE;QACzB,MAAM,IAAArB,mBAAU,EAAC,MAAM,EAAE;UACrB6D,KAAK;UACLvB,MAAM,EAAED;QACZ,CAAC,CAAC;MACN;IACJ;;IAEA;IACA,IAAI,IAAA4B,sCAAoB,EAAC,CAAC,CAAClE,QAAQ,CAAC2C,GAAG,CAAC,EAAE;MACtC,MAAM,IAAA1C,mBAAU,EAAC,MAAM,EAAE;QACrB0C,GAAG;QACHJ,MAAM,EAAED;MACZ,CAAC,CAAC;IACN;EACJ,CAAC,CAAC;;EAEF;EACA,IAAIA,UAAU,CAACyB,OAAO,EAAE;IACpB;IACA,IAAI,CAAC,IAAAI,2BAAoB,EAAC7B,UAAU,CAACyB,OAAO,CAAC,EAAE;MAC3C,MAAM,IAAA9D,mBAAU,EAAC,MAAM,EAAE;QACrB8D,OAAO,EAAEzB,UAAU,CAACyB,OAAO;QAC3BxB,MAAM,EAAED;MACZ,CAAC,CAAC;IACN;IAEAA,UAAU,CAACyB,OAAO,CAAC9B,OAAO,CAACmC,KAAK,IAAI;MAChC;MACA,IAAI,EAAE,OAAOA,KAAK,KAAK,QAAQ,IAAItD,KAAK,CAACC,OAAO,CAACqD,KAAK,CAAC,CAAC,EAAE;QACtD,MAAM,IAAAnE,mBAAU,EAAC,MAAM,EAAE;UAAEmE,KAAK;UAAE7B,MAAM,EAAED;QAAW,CAAC,CAAC;MAC3D;MACA;MACA,IAAIxB,KAAK,CAACC,OAAO,CAACqD,KAAK,CAAC,EAAE;QACtB,KAAK,IAAIZ,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGY,KAAK,CAAC5C,MAAM,EAAEgC,CAAC,IAAI,CAAC,EAAE;UACtC,IAAI,OAAOY,KAAK,CAACZ,CAAC,CAAC,KAAK,QAAQ,EAAE;YAC9B,MAAM,IAAAvD,mBAAU,EAAC,MAAM,EAAE;cAAEmE,KAAK;cAAE7B,MAAM,EAAED;YAAW,CAAC,CAAC;UAC3D;QACJ;MACJ;;MAEA;AACZ;AACA;AACA;AACA;AACA;MACY,IAAM+B,YAAY,GAAG,IAAAF,2BAAoB,EAACC,KAAK,CAAC,GAAGA,KAAK,GAAG,CAACA,KAAK,CAAC;MAClEC,YAAY,CAACpC,OAAO,CAAClC,SAAS,IAAI;QAC9B,IAAM0C,UAAU,GAAG,IAAAK,qCAAqB,EACpCR,UAAU,EACVvC,SACJ,CAAC;QAGD,IAAMuB,IAAqB,GAAGmB,UAAU,CAACnB,IAAW;QACpD,QAAQA,IAAI;UACR,KAAK,QAAQ;YACT,IAAM4B,SAAS,GAAGT,UAAU,CAACS,SAAS;YACtC,IAAI,CAACA,SAAS,EAAE;cACZ,MAAM,IAAAjD,mBAAU,EAAC,MAAM,EAAE;gBACrBmE,KAAK;gBACLpB,KAAK,EAAEjD,SAAS;gBAChBwC,MAAM,EAAED;cACZ,CAAC,CAAC;YACN;YACA;UACJ,KAAK,QAAQ;UACb,KAAK,SAAS;YACV,IAAMgC,UAAU,GAAG7B,UAAU,CAAC6B,UAAU;YACxC,IAAI,CAACA,UAAU,EAAE;cACb,MAAM,IAAArE,mBAAU,EAAC,MAAM,EAAE;gBACrBmE,KAAK;gBACLpB,KAAK,EAAEjD,SAAS;gBAChBwC,MAAM,EAAED;cACZ,CAAC,CAAC;YACN;YACA,IAAMiC,OAAO,GAAG9B,UAAU,CAAC8B,OAAO;YAClC,IAAMC,OAAO,GAAG/B,UAAU,CAAC+B,OAAO;YAClC,IACI,OAAOD,OAAO,KAAK,WAAW,IAC9B,OAAOC,OAAO,KAAK,WAAW,EAChC;cACE,MAAM,IAAAvE,mBAAU,EAAC,MAAM,EAAE;gBACrBmE,KAAK;gBACLpB,KAAK,EAAEjD,SAAS;gBAChBwC,MAAM,EAAED;cACZ,CAAC,CAAC;YACN;YAEA,IACI,CAACa,QAAQ,CAACoB,OAAO,CAAC,IAClB,CAACpB,QAAQ,CAACqB,OAAO,CAAC,EACpB;cACE,MAAM,IAAAvE,mBAAU,EAAC,MAAM,EAAE;gBACrBmE,KAAK;gBACLpB,KAAK,EAAEjD,SAAS;gBAChBwC,MAAM,EAAED;cACZ,CAAC,CAAC;YACN;YAEA;UACJ,KAAK,SAAS;YACV;AACxB;AACA;AACA;YACwB,IAAImC,UAAU,GAAG,EAAE;YACnB,IAAIC,YAAY,GAAG3E,SAAS;YAC5B,IAAIA,SAAS,CAACC,QAAQ,CAAC,GAAG,CAAC,EAAE;cACzB,IAAM2E,SAAS,GAAG5E,SAAS,CAACiB,KAAK,CAAC,GAAG,CAAC;cACtC0D,YAAY,GAAGC,SAAS,CAAC1D,GAAG,CAAC,CAAC;cAC9BwD,UAAU,GAAGE,SAAS,CAACC,IAAI,CAAC,GAAG,CAAC;YACpC;YACA,IAAMC,gBAAgB,GAAGJ,UAAU,KAAK,EAAE,GAAGnC,UAAU,GAAG,IAAAQ,qCAAqB,EAC3ER,UAAU,EACVmC,UACJ,CAAC;YAED,IACI,CAACI,gBAAgB,CAACtD,QAAQ,IAC1B,CAACsD,gBAAgB,CAACtD,QAAQ,CAACvB,QAAQ,CAAC0E,YAAY,CAAC,EACnD;cACE,MAAM,IAAAzE,mBAAU,EAAC,MAAM,EAAE;gBACrBmE,KAAK;gBACLpB,KAAK,EAAEjD,SAAS;gBAChBwC,MAAM,EAAED;cACZ,CAAC,CAAC;YACN;YACA;UAEJ;YACI,MAAM,IAAArC,mBAAU,EAAC,MAAM,EAAE;cACrBF,SAAS;cACTuB,IAAI,EAAEmB,UAAU,CAACnB,IAAW;cAC5BiB,MAAM,EAAED;YACZ,CAAC,CAAC;QACV;MACJ,CAAC,CAAC;IAEN,CAAC,CAAC;EACN;;EAEA;EACApB,MAAM,CAACc,IAAI,CAAC,IAAA8C,oBAAa,EAACxC,UAAU,CAAC,CAAC,CACjCyC,GAAG,CAACpC,GAAG,IAAI;IACR;IACA,IAAM3B,KAAK,GAAG2B,GAAG,CAAC3B,KAAK,CAAC,GAAG,CAAC;IAC5BA,KAAK,CAACC,GAAG,CAAC,CAAC,CAAC,CAAC;IACb,OAAOD,KAAK,CAAC4D,IAAI,CAAC,GAAG,CAAC;EAC1B,CAAC,CAAC,CACDI,MAAM,CAACrC,GAAG,IAAIA,GAAG,KAAK,EAAE,CAAC,CACzBqC,MAAM,CAAC,CAACC,IAAI,EAAEC,GAAG,EAAEC,GAAG,KAAKA,GAAG,CAACC,OAAO,CAACH,IAAI,CAAC,KAAKC,GAAG,CAAC,CAAC;EAAA,CACtDF,MAAM,CAACrC,GAAG,IAAI;IAAE;IACb,IAAMmB,KAAK,GAAG,IAAAuB,kBAAW,EAAC/C,UAAU,EAAEK,GAAG,CAAC;IAC1C,OAAOmB,KAAK,IAAI,CAAC,CAACA,KAAK,CAACM,KAAK;EACjC,CAAC,CAAC,CACDnC,OAAO,CAACU,GAAG,IAAI;IAAE;IACdA,GAAG,GAAGA,GAAG,CAAC2C,OAAO,CAAC,aAAa,EAAE,EAAE,CAAC,CAAC,CAAC;IACtC3C,GAAG,GAAGA,GAAG,CAAC2C,OAAO,CAAC,iBAAiB,EAAE,GAAG,CAAC,CAAC,CAAC;IAC3C,MAAM,IAAArF,mBAAU,EAAC,MAAM,EAAE;MACrBmE,KAAK,EAAE,IAAAV,eAAQ,EAACf,GAAG,CAAC;MACpBJ,MAAM,EAAED;IACZ,CAAC,CAAC;EACN,CAAC,CAAC;;EAEN;EACA,CAACA,UAAU,CAACyB,OAAO,IAAI,EAAE,EACpBwB,MAAM,CAAC,CAACC,UAAoB,EAAEC,YAAY,KAAK;IAC5C,IAAI,IAAAtB,2BAAoB,EAACsB,YAAY,CAAC,EAAE;MACpC,IAAAC,oBAAa,EAACF,UAAU,EAAEC,YAAY,CAAC;IAC3C,CAAC,MAAM;MACHD,UAAU,CAACG,IAAI,CAACF,YAAY,CAAC;IACjC;IACA,OAAOD,UAAU;EACrB,CAAC,EAAE,EAAE,CAAC,CACLR,MAAM,CAAC,CAACC,IAAI,EAAEC,GAAG,EAAEC,GAAG,KAAKA,GAAG,CAACC,OAAO,CAACH,IAAI,CAAC,KAAKC,GAAG,CAAC,CAAC;EAAA,CACtDH,GAAG,CAACa,SAAS,IAAI;IACd,IAAMrC,QAAQ,GAAGH,yBAAyB,CAACwC,SAAS,CAAC,CAAC,CAAC;IACvD,IAAMhF,SAAS,GAAG,IAAAyE,kBAAW,EAAC/C,UAAU,EAAEiB,QAAQ,CAAC,CAAC,CAAC;IACrD,IAAI,CAAC3C,SAAS,IAAI,OAAOA,SAAS,KAAK,QAAQ,EAAE;MAC7C,MAAM,IAAAX,mBAAU,EAAC,MAAM,EAAE;QACrBmE,KAAK,EAAEwB,SAAS;QAChBrD,MAAM,EAAED;MACZ,CAAC,CAAC;IACN;IACA,OAAO;MAAEsD,SAAS;MAAEhF;IAAU,CAAC;EACnC,CAAC,CAAC,CACDoE,MAAM,CAACZ,KAAK,IACTA,KAAK,CAACxD,SAAS,CAACU,IAAI,KAAK,QAAQ,IACjC8C,KAAK,CAACxD,SAAS,CAACU,IAAI,KAAK,SAAS,IAClC8C,KAAK,CAACxD,SAAS,CAACU,IAAI,KAAK,QAAQ,IACjC8C,KAAK,CAACxD,SAAS,CAACU,IAAI,KAAK,SAC7B,CAAC,CACAW,OAAO,CAACmC,KAAK,IAAI;IACd,MAAM,IAAAnE,mBAAU,EAAC,MAAM,EAAE;MACrB0C,GAAG,EAAEyB,KAAK,CAACwB,SAAS;MACpBtE,IAAI,EAAE8C,KAAK,CAACxD,SAAS,CAACU,IAAI;MAC1BiB,MAAM,EAAED;IACZ,CAAC,CAAC;EACN,CAAC,CAAC;;EAGN;AACJ;AACA;AACA;AACA;AACA;EACIpB,MAAM,CAACc,IAAI,CAAC,IAAA8C,oBAAa,EAACxC,UAAU,CAAC,CAAC,CACjCyC,GAAG,CAACpC,GAAG,IAAI;IACR;IACA,IAAM3B,KAAK,GAAG2B,GAAG,CAAC3B,KAAK,CAAC,GAAG,CAAC;IAC5BA,KAAK,CAACC,GAAG,CAAC,CAAC,CAAC,CAAC;IACb,OAAOD,KAAK,CAAC4D,IAAI,CAAC,GAAG,CAAC;EAC1B,CAAC,CAAC,CACDI,MAAM,CAACrC,GAAG,IAAIA,GAAG,KAAK,EAAE,IAAIA,GAAG,KAAK,aAAa,CAAC,CAClDqC,MAAM,CAAC,CAACC,IAAI,EAAEC,GAAG,EAAEC,GAAG,KAAKA,GAAG,CAACC,OAAO,CAACH,IAAI,CAAC,KAAKC,GAAG,CAAC,CAAC;EAAA,CACtDF,MAAM,CAACrC,GAAG,IAAI;IACX;IACA,IAAMmB,KAAK,GAAG,IAAAuB,kBAAW,EAAC/C,UAAU,EAAEK,GAAG,CAAC;IAC1C,OAAOmB,KAAK,IAAI,CAAC,CAACA,KAAK,CAACG,SAAS;EACrC,CAAC,CAAC,CACDhC,OAAO,CAACU,GAAG,IAAI;IAAE;IACdA,GAAG,GAAGA,GAAG,CAAC2C,OAAO,CAAC,aAAa,EAAE,EAAE,CAAC,CAAC,CAAC;IACtC3C,GAAG,GAAGA,GAAG,CAAC2C,OAAO,CAAC,iBAAiB,EAAE,GAAG,CAAC,CAAC,CAAC;IAC3C,MAAM,IAAArF,mBAAU,EAAC,MAAM,EAAE;MACrBmE,KAAK,EAAE,IAAAV,eAAQ,EAACf,GAAG,CAAC;MACpBJ,MAAM,EAAED;IACZ,CAAC,CAAC;EACN,CAAC,CAAC;;EAEN;EACA,IAAIA,UAAU,CAAC2B,SAAS,EAAE;IACtB3B,UAAU,CAAC2B,SAAS,CACfhC,OAAO,CAAC4D,QAAQ,IAAI;MACjB;MACA,IAAMtC,QAAQ,GAAGH,yBAAyB,CAACyC,QAAQ,CAAC;MACpD;MACA,IAAMjF,SAAS,GAAG,IAAAyE,kBAAW,EAAC/C,UAAU,EAAEiB,QAAQ,CAAC;MACnD,IAAI,CAAC3C,SAAS,IAAI,OAAOA,SAAS,KAAK,QAAQ,EAAE;QAC7C,MAAM,IAAAX,mBAAU,EAAC,MAAM,EAAE;UACrB+C,KAAK,EAAE6C,QAAQ;UACftD,MAAM,EAAED;QACZ,CAAC,CAAC;MACN;IACJ,CAAC,CAAC;EACV;AACJ","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/dev-mode/entity-properties.js b/dist/cjs/plugins/dev-mode/entity-properties.js deleted file mode 100644 index f2f733b2937..00000000000 --- a/dist/cjs/plugins/dev-mode/entity-properties.js +++ /dev/null @@ -1,56 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.rxCollectionProperties = rxCollectionProperties; -exports.rxDatabaseProperties = rxDatabaseProperties; -exports.rxDocumentProperties = rxDocumentProperties; -var _rxCollection = require("../../rx-collection.js"); -var _rxDatabase = require("../../rx-database.js"); -var _rxDocument = require("../../rx-document.js"); -/** - * returns all possible properties of a RxCollection-instance - */ -var _rxCollectionProperties; -function rxCollectionProperties() { - if (!_rxCollectionProperties) { - var pseudoInstance = new _rxCollection.RxCollectionBase(); - var ownProperties = Object.getOwnPropertyNames(pseudoInstance); - var prototypeProperties = Object.getOwnPropertyNames(Object.getPrototypeOf(pseudoInstance)); - _rxCollectionProperties = [...ownProperties, ...prototypeProperties]; - } - return _rxCollectionProperties; -} - -/** - * returns all possible properties of a RxDatabase-instance - */ -var _rxDatabaseProperties; -function rxDatabaseProperties() { - if (!_rxDatabaseProperties) { - var pseudoInstance = new _rxDatabase.RxDatabaseBase('pseudoInstance', 'memory'); - var ownProperties = Object.getOwnPropertyNames(pseudoInstance); - var prototypeProperties = Object.getOwnPropertyNames(Object.getPrototypeOf(pseudoInstance)); - _rxDatabaseProperties = [...ownProperties, ...prototypeProperties]; - pseudoInstance.destroy(); - } - return _rxDatabaseProperties; -} - -/** - * returns all possible properties of a RxDocument - */ -var pseudoConstructor = (0, _rxDocument.createRxDocumentConstructor)(_rxDocument.basePrototype); -var pseudoRxDocument = new pseudoConstructor(); -var _rxDocumentProperties; -function rxDocumentProperties() { - if (!_rxDocumentProperties) { - var reserved = ['deleted', 'synced']; - var ownProperties = Object.getOwnPropertyNames(pseudoRxDocument); - var prototypeProperties = Object.getOwnPropertyNames(_rxDocument.basePrototype); - _rxDocumentProperties = [...ownProperties, ...prototypeProperties, ...reserved]; - } - return _rxDocumentProperties; -} -//# sourceMappingURL=entity-properties.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/dev-mode/entity-properties.js.map b/dist/cjs/plugins/dev-mode/entity-properties.js.map deleted file mode 100644 index a619d01dfbf..00000000000 --- a/dist/cjs/plugins/dev-mode/entity-properties.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"entity-properties.js","names":["_rxCollection","require","_rxDatabase","_rxDocument","_rxCollectionProperties","rxCollectionProperties","pseudoInstance","RxCollectionBase","ownProperties","Object","getOwnPropertyNames","prototypeProperties","getPrototypeOf","_rxDatabaseProperties","rxDatabaseProperties","RxDatabaseBase","destroy","pseudoConstructor","createRxDocumentConstructor","basePrototype","pseudoRxDocument","_rxDocumentProperties","rxDocumentProperties","reserved"],"sources":["../../../../src/plugins/dev-mode/entity-properties.ts"],"sourcesContent":["import { RxCollectionBase } from '../../rx-collection.ts';\nimport { RxDatabaseBase } from '../../rx-database.ts';\nimport { createRxDocumentConstructor, basePrototype } from '../../rx-document.ts';\n\n/**\n * returns all possible properties of a RxCollection-instance\n */\nlet _rxCollectionProperties: string[];\nexport function rxCollectionProperties(): string[] {\n if (!_rxCollectionProperties) {\n const pseudoInstance = new (RxCollectionBase as any)();\n const ownProperties = Object.getOwnPropertyNames(pseudoInstance);\n const prototypeProperties = Object.getOwnPropertyNames(\n Object.getPrototypeOf(pseudoInstance)\n );\n _rxCollectionProperties = [...ownProperties, ...prototypeProperties];\n }\n return _rxCollectionProperties;\n}\n\n\n/**\n * returns all possible properties of a RxDatabase-instance\n */\nlet _rxDatabaseProperties: string[];\nexport function rxDatabaseProperties(): string[] {\n if (!_rxDatabaseProperties) {\n const pseudoInstance: RxDatabaseBase = new (RxDatabaseBase as any)(\n 'pseudoInstance',\n 'memory'\n );\n const ownProperties = Object.getOwnPropertyNames(pseudoInstance);\n const prototypeProperties = Object.getOwnPropertyNames(\n Object.getPrototypeOf(pseudoInstance)\n );\n _rxDatabaseProperties = [...ownProperties, ...prototypeProperties];\n pseudoInstance.destroy();\n }\n return _rxDatabaseProperties;\n}\n\n/**\n * returns all possible properties of a RxDocument\n */\nconst pseudoConstructor = createRxDocumentConstructor(basePrototype);\nconst pseudoRxDocument = new (pseudoConstructor as any)();\nlet _rxDocumentProperties: string[];\nexport function rxDocumentProperties(): string[] {\n if (!_rxDocumentProperties) {\n const reserved = ['deleted', 'synced'];\n const ownProperties = Object.getOwnPropertyNames(pseudoRxDocument);\n const prototypeProperties = Object.getOwnPropertyNames(basePrototype);\n _rxDocumentProperties = [...ownProperties, ...prototypeProperties, ...reserved];\n }\n return _rxDocumentProperties;\n}\n"],"mappings":";;;;;;;;AAAA,IAAAA,aAAA,GAAAC,OAAA;AACA,IAAAC,WAAA,GAAAD,OAAA;AACA,IAAAE,WAAA,GAAAF,OAAA;AAEA;AACA;AACA;AACA,IAAIG,uBAAiC;AAC9B,SAASC,sBAAsBA,CAAA,EAAa;EAC/C,IAAI,CAACD,uBAAuB,EAAE;IAC1B,IAAME,cAAc,GAAG,IAAKC,8BAAgB,CAAS,CAAC;IACtD,IAAMC,aAAa,GAAGC,MAAM,CAACC,mBAAmB,CAACJ,cAAc,CAAC;IAChE,IAAMK,mBAAmB,GAAGF,MAAM,CAACC,mBAAmB,CAClDD,MAAM,CAACG,cAAc,CAACN,cAAc,CACxC,CAAC;IACDF,uBAAuB,GAAG,CAAC,GAAGI,aAAa,EAAE,GAAGG,mBAAmB,CAAC;EACxE;EACA,OAAOP,uBAAuB;AAClC;;AAGA;AACA;AACA;AACA,IAAIS,qBAA+B;AAC5B,SAASC,oBAAoBA,CAAA,EAAa;EAC7C,IAAI,CAACD,qBAAqB,EAAE;IACxB,IAAMP,cAAwC,GAAG,IAAKS,0BAAc,CAChE,gBAAgB,EAChB,QACJ,CAAC;IACD,IAAMP,aAAa,GAAGC,MAAM,CAACC,mBAAmB,CAACJ,cAAc,CAAC;IAChE,IAAMK,mBAAmB,GAAGF,MAAM,CAACC,mBAAmB,CAClDD,MAAM,CAACG,cAAc,CAACN,cAAc,CACxC,CAAC;IACDO,qBAAqB,GAAG,CAAC,GAAGL,aAAa,EAAE,GAAGG,mBAAmB,CAAC;IAClEL,cAAc,CAACU,OAAO,CAAC,CAAC;EAC5B;EACA,OAAOH,qBAAqB;AAChC;;AAEA;AACA;AACA;AACA,IAAMI,iBAAiB,GAAG,IAAAC,uCAA2B,EAACC,yBAAa,CAAC;AACpE,IAAMC,gBAAgB,GAAG,IAAKH,iBAAiB,CAAS,CAAC;AACzD,IAAII,qBAA+B;AAC5B,SAASC,oBAAoBA,CAAA,EAAa;EAC7C,IAAI,CAACD,qBAAqB,EAAE;IACxB,IAAME,QAAQ,GAAG,CAAC,SAAS,EAAE,QAAQ,CAAC;IACtC,IAAMf,aAAa,GAAGC,MAAM,CAACC,mBAAmB,CAACU,gBAAgB,CAAC;IAClE,IAAMT,mBAAmB,GAAGF,MAAM,CAACC,mBAAmB,CAACS,yBAAa,CAAC;IACrEE,qBAAqB,GAAG,CAAC,GAAGb,aAAa,EAAE,GAAGG,mBAAmB,EAAE,GAAGY,QAAQ,CAAC;EACnF;EACA,OAAOF,qBAAqB;AAChC","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/dev-mode/error-messages.js b/dist/cjs/plugins/dev-mode/error-messages.js deleted file mode 100644 index 000bf3ec6de..00000000000 --- a/dist/cjs/plugins/dev-mode/error-messages.js +++ /dev/null @@ -1,237 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.ERROR_MESSAGES = void 0; -/** - * this plugin adds the error-messages - * without it, only error-codes will be shown - * This is mainly because error-string are hard to compress and we need a smaller build - */ - -var ERROR_MESSAGES = exports.ERROR_MESSAGES = { - // util.js / config - UT1: 'given name is no string or empty', - UT2: "collection- and database-names must match the regex to be compatible with couchdb databases.\n See https://neighbourhood.ie/blog/2020/10/13/everything-you-need-to-know-about-couchdb-database-names/\n info: if your database-name specifies a folder, the name must contain the slash-char '/' or '\\'", - UT3: 'replication-direction must either be push or pull or both. But not none', - UT4: 'given leveldown is no valid adapter', - UT5: 'keyCompression is set to true in the schema but no key-compression handler is used in the storage', - UT6: 'schema contains encrypted fields but no encryption handler is used in the storage', - UT7: 'attachments.compression is enabled but no attachment-compression plugin is used', - // plugins - PL1: 'Given plugin is not RxDB plugin.', - // removed in 14.0.0 - PouchDB RxStorage was removed - PL2: 'You tried importing a RxDB plugin to pouchdb. Use addRxPlugin() instead.', - PL3: 'A plugin with the same name was already added but it was not the exact same JavaScript object', - // pouch-db.js - // removed in 12.0.0 - P1: 'PouchDB.getBatch: limit must be > 2', - P2: 'bulkWrite() cannot be called with an empty array', - // removed in 12.0.0 - P3: 'bulkAddRevisions cannot be called with an empty array', - - // rx-query - QU1: 'RxQuery._execOverDatabase(): op not known', - // removed in 9.0.0 - QU2: 'limit() must get a number', - // removed in 9.0.0 - QU3: 'skip() must get a number', - QU4: 'RxQuery.regex(): You cannot use .regex() on the primary field', - QU5: 'RxQuery.sort(): does not work because key is not defined in the schema', - QU6: 'RxQuery.limit(): cannot be called on .findOne()', - // removed in 12.0.0 (should by ensured by the typings) - QU7: 'query must be an object', - // removed in 12.0.0 (should by ensured by the typings) - QU8: 'query cannot be an array', - QU9: 'throwIfMissing can only be used in findOne queries', - QU10: 'result empty and throwIfMissing: true', - QU11: 'RxQuery: no valid query params given', - QU12: 'Given index is not in schema', - QU13: 'A top level field of the query is not included in the schema', - QU14: 'Running a count() query in slow mode is now allowed. Either run a count() query with a selector that fully matches an index ' + 'or set allowSlowCount=true when calling the createRxDatabase', - QU15: 'For count queries it is not allowed to use skip or limit', - QU16: '$regex queries must be defined by a string, not an RegExp instance. ' + 'This is because RegExp objects cannot be JSON stringified and also they are mutable which would be dangerous', - // mquery.js - MQ1: 'path must be a string or object', - MQ2: 'Invalid argument', - MQ3: 'Invalid sort() argument. Must be a string, object, or array', - MQ4: 'Invalid argument. Expected instanceof mquery or plain object', - MQ5: 'method must be used after where() when called with these arguments', - MQ6: 'Can\'t mix sort syntaxes. Use either array or object | .sort([[\'field\', 1], [\'test\', -1]]) | .sort({ field: 1, test: -1 })', - MQ7: 'Invalid sort value', - MQ8: 'Can\'t mix sort syntaxes. Use either array or object', - // rx-database - DB1: 'RxDocument.prepare(): another instance on this adapter has a different password', - DB2: 'RxDatabase.addCollections(): collection-names cannot start with underscore _', - DB3: 'RxDatabase.addCollections(): collection already exists. use myDatabase[collectionName] to get it', - DB4: 'RxDatabase.addCollections(): schema is missing', - DB5: 'RxDatabase.addCollections(): collection-name not allowed', - DB6: 'RxDatabase.addCollections(): another instance created this collection with a different schema. Read this https://rxdb.info/questions-answers.html?console=qa#cant-change-the-schema ', - // removed in 13.0.0 (now part of the encryption plugin) DB7: 'RxDatabase.addCollections(): schema encrypted but no password given', - DB8: 'createRxDatabase(): A RxDatabase with the same name and adapter already exists.\n' + 'Make sure to use this combination only once or set ignoreDuplicate to true if you do this intentional-\n' + 'This often happens in react projects with hot reload that reloads the code without reloading the process.', - // removed in 14.0.0 - PouchDB RxStorage is removed - DB9: 'createRxDatabase(): Adapter not added. Use addPouchPlugin(require(\'pouchdb-adapter-[adaptername]\'));', - // removed in 14.0.0 - PouchDB RxStorage is removed DB10: 'createRxDatabase(): To use leveldown-adapters, you have to add the leveldb-plugin. Use addPouchPlugin(require(\'pouchdb-adapter-leveldb\'));', - DB11: 'createRxDatabase(): Invalid db-name, folder-paths must not have an ending slash', - DB12: 'RxDatabase.addCollections(): could not write to internal store', - DB13: 'createRxDatabase(): Invalid db-name or collection name, name contains the dollar sign', - DB14: 'no custom reactivity factory added on database creation', - // rx-collection - COL1: 'RxDocument.insert() You cannot insert an existing document', - COL2: 'RxCollection.insert() fieldName ._id can only be used as primaryKey', - COL3: 'RxCollection.upsert() does not work without primary', - COL4: 'RxCollection.incrementalUpsert() does not work without primary', - COL5: 'RxCollection.find() if you want to search by _id, use .findOne(_id)', - COL6: 'RxCollection.findOne() needs a queryObject or string', - COL7: 'hook must be a function', - COL8: 'hooks-when not known', - COL9: 'RxCollection.addHook() hook-name not known', - COL10: 'RxCollection .postCreate-hooks cannot be async', - COL11: 'migrationStrategies must be an object', - COL12: 'A migrationStrategy is missing or too much', - COL13: 'migrationStrategy must be a function', - COL14: 'given static method-name is not a string', - COL15: 'static method-names cannot start with underscore _', - COL16: 'given static method is not a function', - COL17: 'RxCollection.ORM: statics-name not allowed', - COL18: 'collection-method not allowed because fieldname is in the schema', - // removed in 14.0.0, use CONFLICT instead - COL19: 'Document update conflict. When changing a document you must work on the previous revision', - COL20: 'Storage write error', - COL21: 'The RxCollection is destroyed or removed already, either from this JavaScript realm or from another, like a browser tab', - CONFLICT: 'Document update conflict. When changing a document you must work on the previous revision', - // rx-document.js - DOC1: 'RxDocument.get$ cannot get observable of in-array fields because order cannot be guessed', - DOC2: 'cannot observe primary path', - DOC3: 'final fields cannot be observed', - DOC4: 'RxDocument.get$ cannot observe a non-existed field', - DOC5: 'RxDocument.populate() cannot populate a non-existed field', - DOC6: 'RxDocument.populate() cannot populate because path has no ref', - DOC7: 'RxDocument.populate() ref-collection not in database', - DOC8: 'RxDocument.set(): primary-key cannot be modified', - DOC9: 'final fields cannot be modified', - DOC10: 'RxDocument.set(): cannot set childpath when rootPath not selected', - DOC11: 'RxDocument.save(): can\'t save deleted document', - // removed in 10.0.0 DOC12: 'RxDocument.save(): error', - DOC13: 'RxDocument.remove(): Document is already deleted', - DOC14: 'RxDocument.destroy() does not exist', - DOC15: 'query cannot be an array', - DOC16: 'Since version 8.0.0 RxDocument.set() can only be called on temporary RxDocuments', - DOC17: 'Since version 8.0.0 RxDocument.save() can only be called on non-temporary documents', - DOC18: 'Document property for composed primary key is missing', - DOC19: 'Value of primary key(s) cannot be changed', - DOC20: 'PrimaryKey missing', - DOC21: 'PrimaryKey must be equal to PrimaryKey.trim(). It cannot start or end with a whitespace', - DOC22: 'PrimaryKey must not contain a linebreak', - DOC23: 'PrimaryKey must not contain a double-quote ["]', - DOC24: 'Given document data could not be structured cloned. This happens if you pass non-plain-json data into it, like a Date() object or a Function. ' + 'In vue.js this happens if you use ref() on the document data which transforms it into a Proxy object.', - // data-migrator.js - DM1: 'migrate() Migration has already run', - DM2: 'migration of document failed final document does not match final schema', - DM3: 'migration already running', - DM4: 'Migration errored', - DM5: 'Cannot open database state with newer RxDB version. You have to migrate your database state first. See https://rxdb.info/migration-storage.html?console=storage ', - // plugins/attachments.js - AT1: 'to use attachments, please define this in your schema', - // plugins/encryption-crypto-js.js - EN1: 'password is not valid', - EN2: 'validatePassword: min-length of password not complied', - EN3: 'Schema contains encrypted properties but no password is given', - EN4: 'Password not valid', - // plugins/json-dump.js - JD1: 'You must create the collections before you can import their data', - JD2: 'RxCollection.importJSON(): the imported json relies on a different schema', - JD3: 'RxCollection.importJSON(): json.passwordHash does not match the own', - // plugins/leader-election.js - - // plugins/local-documents.js - LD1: 'RxDocument.allAttachments$ can\'t use attachments on local documents', - LD2: 'RxDocument.get(): objPath must be a string', - LD3: 'RxDocument.get$ cannot get observable of in-array fields because order cannot be guessed', - LD4: 'cannot observe primary path', - LD5: 'RxDocument.set() id cannot be modified', - LD6: 'LocalDocument: Function is not usable on local documents', - LD7: 'Local document already exists', - LD8: 'localDocuments not activated. Set localDocuments=true on creation, when you want to store local documents on the RxDatabase or RxCollection.', - // plugins/replication.js - RC1: 'Replication: already added', - RC2: 'replicateCouchDB() query must be from the same RxCollection', - // removed in 14.0.0 - PouchDB RxStorage is removed RC3: 'RxCollection.syncCouchDB() Do not use a collection\'s pouchdb as remote, use the collection instead', - RC4: 'RxCouchDBReplicationState.awaitInitialReplication() cannot await initial replication when live: true', - RC5: 'RxCouchDBReplicationState.awaitInitialReplication() cannot await initial replication if multiInstance because the replication might run on another instance', - RC6: 'syncFirestore() serverTimestampField MUST NOT be part of the collections schema and MUST NOT be nested.', - RC7: 'SimplePeer requires to have process.nextTick() polyfilled, see https://rxdb.info/replication-webrtc.html?console=webrtc ', - RC_PULL: 'RxReplication pull handler threw an error - see .errors for more details', - RC_STREAM: 'RxReplication pull stream$ threw an error - see .errors for more details', - RC_PUSH: 'RxReplication push handler threw an error - see .errors for more details', - RC_PUSH_NO_AR: 'RxReplication push handler did not return an array with the conflicts', - RC_WEBRTC_PEER: 'RxReplication WebRTC Peer has error', - RC_COUCHDB_1: 'replicateCouchDB() url must end with a slash like \'https://example.com/mydatabase/\'', - RC_COUCHDB_2: 'replicateCouchDB() did not get valid result with rows.', - RC_OUTDATED: 'Outdated client, update required. Replication was canceled', - RC_UNAUTHORIZED: 'Unauthorized client, update the replicationState.headers to set correct auth data', - RC_FORBIDDEN: 'Client behaves wrong so the replication was canceled. Mostly happens if the client tries to write data that it is not allowed to', - // plugins/dev-mode/check-schema.js - SC1: 'fieldnames do not match the regex', - SC2: 'SchemaCheck: name \'item\' reserved for array-fields', - SC3: 'SchemaCheck: fieldname has a ref-array but items-type is not string', - SC4: 'SchemaCheck: fieldname has a ref but is not type string, [string,null] or array', - SC6: 'SchemaCheck: primary can only be defined at top-level', - SC7: 'SchemaCheck: default-values can only be defined at top-level', - SC8: 'SchemaCheck: first level-fields cannot start with underscore _', - SC10: 'SchemaCheck: schema defines ._rev, this will be done automatically', - SC11: 'SchemaCheck: schema needs a number >=0 as version', - // removed in 10.0.0 - SC12: 'SchemaCheck: primary can only be defined once', - SC13: 'SchemaCheck: primary is always index, do not declare it as index', - SC14: 'SchemaCheck: primary is always unique, do not declare it as index', - SC15: 'SchemaCheck: primary cannot be encrypted', - SC16: 'SchemaCheck: primary must have type: string', - SC17: 'SchemaCheck: top-level fieldname is not allowed', - SC18: 'SchemaCheck: indexes must be an array', - SC19: 'SchemaCheck: indexes must contain strings or arrays of strings', - SC20: 'SchemaCheck: indexes.array must contain strings', - SC21: 'SchemaCheck: given index is not defined in schema', - SC22: 'SchemaCheck: given indexKey is not type:string', - SC23: 'SchemaCheck: fieldname is not allowed', - SC24: 'SchemaCheck: required fields must be set via array. See https://spacetelescope.github.io/understanding-json-schema/reference/object.html#required', - SC25: 'SchemaCheck: compoundIndexes needs to be specified in the indexes field', - SC26: 'SchemaCheck: indexes needs to be specified at collection schema level', - SC27: 'SchemaCheck: encrypted fields need to be specified at collection schema level', - SC28: 'SchemaCheck: encrypted fields is not defined in the schema', - SC29: 'SchemaCheck: missing object key \'properties\'', - SC30: 'SchemaCheck: primaryKey is required', - SC32: 'SchemaCheck: primary field must have the type string/number/integer', - SC33: 'SchemaCheck: used primary key is not a property in the schema', - SC34: 'Fields of type string that are used in an index, must have set the maxLength attribute in the schema', - SC35: 'Fields of type number/integer that are used in an index, must have set the multipleOf attribute in the schema', - SC36: 'A field of this type cannot be used as index', - SC37: 'Fields of type number that are used in an index, must have set the minimum and maximum attribute in the schema', - SC38: 'Fields of type boolean that are used in an index, must be required in the schema', - SC39: 'The primary key must have the maxLength attribute set', - SC40: '$ref fields in the schema are not allowed. RxDB cannot resolve related schemas because it would have a negative performance impact.' + 'It would have to run http requests on runtime. $ref fields should be resolved during build time.', - SC41: 'minimum, maximum and maxLength values for indexes must be real numbers, not Infinity or -Infinity', - // plugins/dev-mode - // removed in 13.9.0, use PL3 instead - DEV1: 'dev-mode added multiple times', - - // plugins/validate.js - VD1: 'Sub-schema not found, does the schemaPath exists in your schema?', - VD2: 'object does not match schema', - // plugins/in-memory.js - // removed in 14.0.0 - PouchDB RxStorage is removed IM1: 'InMemory: Memory-Adapter must be added. Use addPouchPlugin(require(\'pouchdb-adapter-memory\'));', - // removed in 14.0.0 - PouchDB RxStorage is removed IM2: 'inMemoryCollection.sync(): Do not replicate with the in-memory instance. Replicate with the parent instead', - - // plugins/server.js - S1: 'You cannot create collections after calling RxDatabase.server()', - // plugins/replication-graphql.js - GQL1: 'GraphQL replication: cannot find sub schema by key', - // removed in 13.0.0, use RC_PULL instead - GQL2: 'GraphQL replication: unknown errors occurred in replication pull - see innerErrors for more details', - GQL3: 'GraphQL replication: pull returns more documents then batchSize', - // removed in 13.0.0, use RC_PUSH instead - GQL4: 'GraphQL replication: unknown errors occurred in replication push - see innerErrors for more details', - - // plugins/crdt/ - CRDT1: 'CRDT operations cannot be used because the crdt options are not set in the schema.', - CRDT2: 'RxDocument.incrementalModify() cannot be used when CRDTs are activated.', - CRDT3: 'To use CRDTs you MUST NOT set a conflictHandler because the default CRDT conflict handler must be used', - // plugins/storage-dexie/ - // removed in 15.0.0, added boolean index support to dexie storage - DXE1: 'The dexie.js RxStorage does not support boolean indexes, see https://rxdb.info/rx-storage-dexie.html#boolean-index', - - /** - * Should never be thrown, use this for - * null checks etc. so you do not have to increase the - * build size with error message strings. - */ - SNH: 'This should never happen' -}; -//# sourceMappingURL=error-messages.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/dev-mode/error-messages.js.map b/dist/cjs/plugins/dev-mode/error-messages.js.map deleted file mode 100644 index 8b197165cc7..00000000000 --- a/dist/cjs/plugins/dev-mode/error-messages.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"error-messages.js","names":["ERROR_MESSAGES","exports","UT1","UT2","UT3","UT4","UT5","UT6","UT7","PL1","PL3","P2","QU1","QU4","QU5","QU6","QU9","QU10","QU11","QU12","QU13","QU14","QU15","QU16","MQ1","MQ2","MQ3","MQ4","MQ5","MQ6","MQ7","MQ8","DB1","DB2","DB3","DB4","DB5","DB6","DB8","DB11","DB12","DB13","DB14","COL1","COL2","COL3","COL4","COL5","COL6","COL7","COL8","COL9","COL10","COL11","COL12","COL13","COL14","COL15","COL16","COL17","COL18","COL20","COL21","CONFLICT","DOC1","DOC2","DOC3","DOC4","DOC5","DOC6","DOC7","DOC8","DOC9","DOC10","DOC11","DOC13","DOC14","DOC15","DOC16","DOC17","DOC18","DOC19","DOC20","DOC21","DOC22","DOC23","DOC24","DM1","DM2","DM3","DM4","DM5","AT1","EN1","EN2","EN3","EN4","JD1","JD2","JD3","LD1","LD2","LD3","LD4","LD5","LD6","LD7","LD8","RC1","RC2","RC4","RC5","RC6","RC7","RC_PULL","RC_STREAM","RC_PUSH","RC_PUSH_NO_AR","RC_WEBRTC_PEER","RC_COUCHDB_1","RC_COUCHDB_2","RC_OUTDATED","RC_UNAUTHORIZED","RC_FORBIDDEN","SC1","SC2","SC3","SC4","SC6","SC7","SC8","SC10","SC11","SC13","SC14","SC15","SC16","SC17","SC18","SC19","SC20","SC21","SC22","SC23","SC24","SC25","SC26","SC27","SC28","SC29","SC30","SC32","SC33","SC34","SC35","SC36","SC37","SC38","SC39","SC40","SC41","VD1","VD2","S1","GQL1","GQL3","CRDT1","CRDT2","CRDT3","SNH"],"sources":["../../../../src/plugins/dev-mode/error-messages.ts"],"sourcesContent":["/**\n * this plugin adds the error-messages\n * without it, only error-codes will be shown\n * This is mainly because error-string are hard to compress and we need a smaller build\n */\n\n\nexport const ERROR_MESSAGES = {\n // util.js / config\n UT1: 'given name is no string or empty',\n UT2: `collection- and database-names must match the regex to be compatible with couchdb databases.\n See https://neighbourhood.ie/blog/2020/10/13/everything-you-need-to-know-about-couchdb-database-names/\n info: if your database-name specifies a folder, the name must contain the slash-char '/' or '\\\\'`,\n UT3: 'replication-direction must either be push or pull or both. But not none',\n UT4: 'given leveldown is no valid adapter',\n UT5: 'keyCompression is set to true in the schema but no key-compression handler is used in the storage',\n UT6: 'schema contains encrypted fields but no encryption handler is used in the storage',\n UT7: 'attachments.compression is enabled but no attachment-compression plugin is used',\n\n // plugins\n PL1: 'Given plugin is not RxDB plugin.',\n // removed in 14.0.0 - PouchDB RxStorage was removed - PL2: 'You tried importing a RxDB plugin to pouchdb. Use addRxPlugin() instead.',\n PL3: 'A plugin with the same name was already added but it was not the exact same JavaScript object',\n\n // pouch-db.js\n // removed in 12.0.0 - P1: 'PouchDB.getBatch: limit must be > 2',\n P2: 'bulkWrite() cannot be called with an empty array',\n // removed in 12.0.0 - P3: 'bulkAddRevisions cannot be called with an empty array',\n\n // rx-query\n QU1: 'RxQuery._execOverDatabase(): op not known',\n // removed in 9.0.0 - QU2: 'limit() must get a number',\n // removed in 9.0.0 - QU3: 'skip() must get a number',\n QU4: 'RxQuery.regex(): You cannot use .regex() on the primary field',\n QU5: 'RxQuery.sort(): does not work because key is not defined in the schema',\n QU6: 'RxQuery.limit(): cannot be called on .findOne()',\n // removed in 12.0.0 (should by ensured by the typings) - QU7: 'query must be an object',\n // removed in 12.0.0 (should by ensured by the typings) - QU8: 'query cannot be an array',\n QU9: 'throwIfMissing can only be used in findOne queries',\n QU10: 'result empty and throwIfMissing: true',\n QU11: 'RxQuery: no valid query params given',\n QU12: 'Given index is not in schema',\n QU13: 'A top level field of the query is not included in the schema',\n QU14: 'Running a count() query in slow mode is now allowed. Either run a count() query with a selector that fully matches an index ' +\n 'or set allowSlowCount=true when calling the createRxDatabase',\n QU15: 'For count queries it is not allowed to use skip or limit',\n QU16: '$regex queries must be defined by a string, not an RegExp instance. ' +\n 'This is because RegExp objects cannot be JSON stringified and also they are mutable which would be dangerous',\n\n // mquery.js\n MQ1: 'path must be a string or object',\n MQ2: 'Invalid argument',\n MQ3: 'Invalid sort() argument. Must be a string, object, or array',\n MQ4: 'Invalid argument. Expected instanceof mquery or plain object',\n MQ5: 'method must be used after where() when called with these arguments',\n MQ6: 'Can\\'t mix sort syntaxes. Use either array or object | .sort([[\\'field\\', 1], [\\'test\\', -1]]) | .sort({ field: 1, test: -1 })',\n MQ7: 'Invalid sort value',\n MQ8: 'Can\\'t mix sort syntaxes. Use either array or object',\n\n // rx-database\n DB1: 'RxDocument.prepare(): another instance on this adapter has a different password',\n DB2: 'RxDatabase.addCollections(): collection-names cannot start with underscore _',\n DB3: 'RxDatabase.addCollections(): collection already exists. use myDatabase[collectionName] to get it',\n DB4: 'RxDatabase.addCollections(): schema is missing',\n DB5: 'RxDatabase.addCollections(): collection-name not allowed',\n DB6: 'RxDatabase.addCollections(): another instance created this collection with a different schema. Read this https://rxdb.info/questions-answers.html?console=qa#cant-change-the-schema ',\n // removed in 13.0.0 (now part of the encryption plugin) DB7: 'RxDatabase.addCollections(): schema encrypted but no password given',\n DB8: 'createRxDatabase(): A RxDatabase with the same name and adapter already exists.\\n' +\n 'Make sure to use this combination only once or set ignoreDuplicate to true if you do this intentional-\\n' +\n 'This often happens in react projects with hot reload that reloads the code without reloading the process.',\n // removed in 14.0.0 - PouchDB RxStorage is removed - DB9: 'createRxDatabase(): Adapter not added. Use addPouchPlugin(require(\\'pouchdb-adapter-[adaptername]\\'));',\n // removed in 14.0.0 - PouchDB RxStorage is removed DB10: 'createRxDatabase(): To use leveldown-adapters, you have to add the leveldb-plugin. Use addPouchPlugin(require(\\'pouchdb-adapter-leveldb\\'));',\n DB11: 'createRxDatabase(): Invalid db-name, folder-paths must not have an ending slash',\n DB12: 'RxDatabase.addCollections(): could not write to internal store',\n DB13: 'createRxDatabase(): Invalid db-name or collection name, name contains the dollar sign',\n DB14: 'no custom reactivity factory added on database creation',\n\n // rx-collection\n COL1: 'RxDocument.insert() You cannot insert an existing document',\n COL2: 'RxCollection.insert() fieldName ._id can only be used as primaryKey',\n COL3: 'RxCollection.upsert() does not work without primary',\n COL4: 'RxCollection.incrementalUpsert() does not work without primary',\n COL5: 'RxCollection.find() if you want to search by _id, use .findOne(_id)',\n COL6: 'RxCollection.findOne() needs a queryObject or string',\n COL7: 'hook must be a function',\n COL8: 'hooks-when not known',\n COL9: 'RxCollection.addHook() hook-name not known',\n COL10: 'RxCollection .postCreate-hooks cannot be async',\n COL11: 'migrationStrategies must be an object',\n COL12: 'A migrationStrategy is missing or too much',\n COL13: 'migrationStrategy must be a function',\n COL14: 'given static method-name is not a string',\n COL15: 'static method-names cannot start with underscore _',\n COL16: 'given static method is not a function',\n COL17: 'RxCollection.ORM: statics-name not allowed',\n COL18: 'collection-method not allowed because fieldname is in the schema',\n // removed in 14.0.0, use CONFLICT instead - COL19: 'Document update conflict. When changing a document you must work on the previous revision',\n COL20: 'Storage write error',\n COL21: 'The RxCollection is destroyed or removed already, either from this JavaScript realm or from another, like a browser tab',\n CONFLICT: 'Document update conflict. When changing a document you must work on the previous revision',\n\n // rx-document.js\n DOC1: 'RxDocument.get$ cannot get observable of in-array fields because order cannot be guessed',\n DOC2: 'cannot observe primary path',\n DOC3: 'final fields cannot be observed',\n DOC4: 'RxDocument.get$ cannot observe a non-existed field',\n DOC5: 'RxDocument.populate() cannot populate a non-existed field',\n DOC6: 'RxDocument.populate() cannot populate because path has no ref',\n DOC7: 'RxDocument.populate() ref-collection not in database',\n DOC8: 'RxDocument.set(): primary-key cannot be modified',\n DOC9: 'final fields cannot be modified',\n DOC10: 'RxDocument.set(): cannot set childpath when rootPath not selected',\n DOC11: 'RxDocument.save(): can\\'t save deleted document',\n // removed in 10.0.0 DOC12: 'RxDocument.save(): error',\n DOC13: 'RxDocument.remove(): Document is already deleted',\n DOC14: 'RxDocument.destroy() does not exist',\n DOC15: 'query cannot be an array',\n DOC16: 'Since version 8.0.0 RxDocument.set() can only be called on temporary RxDocuments',\n DOC17: 'Since version 8.0.0 RxDocument.save() can only be called on non-temporary documents',\n DOC18: 'Document property for composed primary key is missing',\n DOC19: 'Value of primary key(s) cannot be changed',\n DOC20: 'PrimaryKey missing',\n DOC21: 'PrimaryKey must be equal to PrimaryKey.trim(). It cannot start or end with a whitespace',\n DOC22: 'PrimaryKey must not contain a linebreak',\n DOC23: 'PrimaryKey must not contain a double-quote [\"]',\n DOC24: 'Given document data could not be structured cloned. This happens if you pass non-plain-json data into it, like a Date() object or a Function. ' +\n 'In vue.js this happens if you use ref() on the document data which transforms it into a Proxy object.',\n\n // data-migrator.js\n DM1: 'migrate() Migration has already run',\n DM2: 'migration of document failed final document does not match final schema',\n DM3: 'migration already running',\n DM4: 'Migration errored',\n DM5: 'Cannot open database state with newer RxDB version. You have to migrate your database state first. See https://rxdb.info/migration-storage.html?console=storage ',\n\n // plugins/attachments.js\n AT1: 'to use attachments, please define this in your schema',\n\n // plugins/encryption-crypto-js.js\n EN1: 'password is not valid',\n EN2: 'validatePassword: min-length of password not complied',\n EN3: 'Schema contains encrypted properties but no password is given',\n EN4: 'Password not valid',\n\n // plugins/json-dump.js\n JD1: 'You must create the collections before you can import their data',\n JD2: 'RxCollection.importJSON(): the imported json relies on a different schema',\n JD3: 'RxCollection.importJSON(): json.passwordHash does not match the own',\n\n // plugins/leader-election.js\n\n // plugins/local-documents.js\n LD1: 'RxDocument.allAttachments$ can\\'t use attachments on local documents',\n LD2: 'RxDocument.get(): objPath must be a string',\n LD3: 'RxDocument.get$ cannot get observable of in-array fields because order cannot be guessed',\n LD4: 'cannot observe primary path',\n LD5: 'RxDocument.set() id cannot be modified',\n LD6: 'LocalDocument: Function is not usable on local documents',\n LD7: 'Local document already exists',\n LD8: 'localDocuments not activated. Set localDocuments=true on creation, when you want to store local documents on the RxDatabase or RxCollection.',\n\n // plugins/replication.js\n RC1: 'Replication: already added',\n RC2: 'replicateCouchDB() query must be from the same RxCollection',\n // removed in 14.0.0 - PouchDB RxStorage is removed RC3: 'RxCollection.syncCouchDB() Do not use a collection\\'s pouchdb as remote, use the collection instead',\n RC4: 'RxCouchDBReplicationState.awaitInitialReplication() cannot await initial replication when live: true',\n RC5: 'RxCouchDBReplicationState.awaitInitialReplication() cannot await initial replication if multiInstance because the replication might run on another instance',\n RC6: 'syncFirestore() serverTimestampField MUST NOT be part of the collections schema and MUST NOT be nested.',\n RC7: 'SimplePeer requires to have process.nextTick() polyfilled, see https://rxdb.info/replication-webrtc.html?console=webrtc ',\n RC_PULL: 'RxReplication pull handler threw an error - see .errors for more details',\n RC_STREAM: 'RxReplication pull stream$ threw an error - see .errors for more details',\n RC_PUSH: 'RxReplication push handler threw an error - see .errors for more details',\n RC_PUSH_NO_AR: 'RxReplication push handler did not return an array with the conflicts',\n RC_WEBRTC_PEER: 'RxReplication WebRTC Peer has error',\n RC_COUCHDB_1: 'replicateCouchDB() url must end with a slash like \\'https://example.com/mydatabase/\\'',\n RC_COUCHDB_2: 'replicateCouchDB() did not get valid result with rows.',\n RC_OUTDATED: 'Outdated client, update required. Replication was canceled',\n RC_UNAUTHORIZED: 'Unauthorized client, update the replicationState.headers to set correct auth data',\n RC_FORBIDDEN: 'Client behaves wrong so the replication was canceled. Mostly happens if the client tries to write data that it is not allowed to',\n\n // plugins/dev-mode/check-schema.js\n SC1: 'fieldnames do not match the regex',\n SC2: 'SchemaCheck: name \\'item\\' reserved for array-fields',\n SC3: 'SchemaCheck: fieldname has a ref-array but items-type is not string',\n SC4: 'SchemaCheck: fieldname has a ref but is not type string, [string,null] or array',\n SC6: 'SchemaCheck: primary can only be defined at top-level',\n SC7: 'SchemaCheck: default-values can only be defined at top-level',\n SC8: 'SchemaCheck: first level-fields cannot start with underscore _',\n SC10: 'SchemaCheck: schema defines ._rev, this will be done automatically',\n SC11: 'SchemaCheck: schema needs a number >=0 as version',\n // removed in 10.0.0 - SC12: 'SchemaCheck: primary can only be defined once',\n SC13: 'SchemaCheck: primary is always index, do not declare it as index',\n SC14: 'SchemaCheck: primary is always unique, do not declare it as index',\n SC15: 'SchemaCheck: primary cannot be encrypted',\n SC16: 'SchemaCheck: primary must have type: string',\n SC17: 'SchemaCheck: top-level fieldname is not allowed',\n SC18: 'SchemaCheck: indexes must be an array',\n SC19: 'SchemaCheck: indexes must contain strings or arrays of strings',\n SC20: 'SchemaCheck: indexes.array must contain strings',\n SC21: 'SchemaCheck: given index is not defined in schema',\n SC22: 'SchemaCheck: given indexKey is not type:string',\n SC23: 'SchemaCheck: fieldname is not allowed',\n SC24: 'SchemaCheck: required fields must be set via array. See https://spacetelescope.github.io/understanding-json-schema/reference/object.html#required',\n SC25: 'SchemaCheck: compoundIndexes needs to be specified in the indexes field',\n SC26: 'SchemaCheck: indexes needs to be specified at collection schema level',\n SC27: 'SchemaCheck: encrypted fields need to be specified at collection schema level',\n SC28: 'SchemaCheck: encrypted fields is not defined in the schema',\n SC29: 'SchemaCheck: missing object key \\'properties\\'',\n SC30: 'SchemaCheck: primaryKey is required',\n SC32: 'SchemaCheck: primary field must have the type string/number/integer',\n SC33: 'SchemaCheck: used primary key is not a property in the schema',\n SC34: 'Fields of type string that are used in an index, must have set the maxLength attribute in the schema',\n SC35: 'Fields of type number/integer that are used in an index, must have set the multipleOf attribute in the schema',\n SC36: 'A field of this type cannot be used as index',\n SC37: 'Fields of type number that are used in an index, must have set the minimum and maximum attribute in the schema',\n SC38: 'Fields of type boolean that are used in an index, must be required in the schema',\n SC39: 'The primary key must have the maxLength attribute set',\n SC40: '$ref fields in the schema are not allowed. RxDB cannot resolve related schemas because it would have a negative performance impact.' +\n 'It would have to run http requests on runtime. $ref fields should be resolved during build time.',\n SC41: 'minimum, maximum and maxLength values for indexes must be real numbers, not Infinity or -Infinity',\n\n // plugins/dev-mode\n // removed in 13.9.0, use PL3 instead - DEV1: 'dev-mode added multiple times',\n\n // plugins/validate.js\n VD1: 'Sub-schema not found, does the schemaPath exists in your schema?',\n VD2: 'object does not match schema',\n\n // plugins/in-memory.js\n // removed in 14.0.0 - PouchDB RxStorage is removed IM1: 'InMemory: Memory-Adapter must be added. Use addPouchPlugin(require(\\'pouchdb-adapter-memory\\'));',\n // removed in 14.0.0 - PouchDB RxStorage is removed IM2: 'inMemoryCollection.sync(): Do not replicate with the in-memory instance. Replicate with the parent instead',\n\n // plugins/server.js\n S1: 'You cannot create collections after calling RxDatabase.server()',\n\n // plugins/replication-graphql.js\n GQL1: 'GraphQL replication: cannot find sub schema by key',\n // removed in 13.0.0, use RC_PULL instead - GQL2: 'GraphQL replication: unknown errors occurred in replication pull - see innerErrors for more details',\n GQL3: 'GraphQL replication: pull returns more documents then batchSize',\n // removed in 13.0.0, use RC_PUSH instead - GQL4: 'GraphQL replication: unknown errors occurred in replication push - see innerErrors for more details',\n\n // plugins/crdt/\n CRDT1: 'CRDT operations cannot be used because the crdt options are not set in the schema.',\n CRDT2: 'RxDocument.incrementalModify() cannot be used when CRDTs are activated.',\n CRDT3: 'To use CRDTs you MUST NOT set a conflictHandler because the default CRDT conflict handler must be used',\n\n // plugins/storage-dexie/\n // removed in 15.0.0, added boolean index support to dexie storage - DXE1: 'The dexie.js RxStorage does not support boolean indexes, see https://rxdb.info/rx-storage-dexie.html#boolean-index',\n\n /**\n * Should never be thrown, use this for\n * null checks etc. so you do not have to increase the\n * build size with error message strings.\n */\n SNH: 'This should never happen'\n};\n"],"mappings":";;;;;;AAAA;AACA;AACA;AACA;AACA;;AAGO,IAAMA,cAAc,GAAAC,OAAA,CAAAD,cAAA,GAAG;EAC1B;EACAE,GAAG,EAAE,kCAAkC;EACvCC,GAAG,kTAE8F;EACjGC,GAAG,EAAE,yEAAyE;EAC9EC,GAAG,EAAE,qCAAqC;EAC1CC,GAAG,EAAE,mGAAmG;EACxGC,GAAG,EAAE,mFAAmF;EACxFC,GAAG,EAAE,iFAAiF;EAEtF;EACAC,GAAG,EAAE,kCAAkC;EACvC;EACAC,GAAG,EAAE,+FAA+F;EAEpG;EACA;EACAC,EAAE,EAAE,kDAAkD;EACtD;;EAEA;EACAC,GAAG,EAAE,2CAA2C;EAChD;EACA;EACAC,GAAG,EAAE,+DAA+D;EACpEC,GAAG,EAAE,wEAAwE;EAC7EC,GAAG,EAAE,iDAAiD;EACtD;EACA;EACAC,GAAG,EAAE,oDAAoD;EACzDC,IAAI,EAAE,uCAAuC;EAC7CC,IAAI,EAAE,sCAAsC;EAC5CC,IAAI,EAAE,8BAA8B;EACpCC,IAAI,EAAE,8DAA8D;EACpEC,IAAI,EAAE,8HAA8H,GAChI,8DAA8D;EAClEC,IAAI,EAAE,0DAA0D;EAChEC,IAAI,EAAE,sEAAsE,GACxE,8GAA8G;EAElH;EACAC,GAAG,EAAE,iCAAiC;EACtCC,GAAG,EAAE,kBAAkB;EACvBC,GAAG,EAAE,6DAA6D;EAClEC,GAAG,EAAE,8DAA8D;EACnEC,GAAG,EAAE,oEAAoE;EACzEC,GAAG,EAAE,gIAAgI;EACrIC,GAAG,EAAE,oBAAoB;EACzBC,GAAG,EAAE,sDAAsD;EAE3D;EACAC,GAAG,EAAE,iFAAiF;EACtFC,GAAG,EAAE,8EAA8E;EACnFC,GAAG,EAAE,kGAAkG;EACvGC,GAAG,EAAE,gDAAgD;EACrDC,GAAG,EAAE,0DAA0D;EAC/DC,GAAG,EAAE,sLAAsL;EAC3L;EACAC,GAAG,EAAE,mFAAmF,GACpF,0GAA0G,GAC1G,2GAA2G;EAC/G;EACA;EACAC,IAAI,EAAE,iFAAiF;EACvFC,IAAI,EAAE,gEAAgE;EACtEC,IAAI,EAAE,uFAAuF;EAC7FC,IAAI,EAAE,yDAAyD;EAE/D;EACAC,IAAI,EAAE,4DAA4D;EAClEC,IAAI,EAAE,qEAAqE;EAC3EC,IAAI,EAAE,qDAAqD;EAC3DC,IAAI,EAAE,gEAAgE;EACtEC,IAAI,EAAE,qEAAqE;EAC3EC,IAAI,EAAE,sDAAsD;EAC5DC,IAAI,EAAE,yBAAyB;EAC/BC,IAAI,EAAE,sBAAsB;EAC5BC,IAAI,EAAE,4CAA4C;EAClDC,KAAK,EAAE,gDAAgD;EACvDC,KAAK,EAAE,uCAAuC;EAC9CC,KAAK,EAAE,4CAA4C;EACnDC,KAAK,EAAE,sCAAsC;EAC7CC,KAAK,EAAE,0CAA0C;EACjDC,KAAK,EAAE,oDAAoD;EAC3DC,KAAK,EAAE,uCAAuC;EAC9CC,KAAK,EAAE,4CAA4C;EACnDC,KAAK,EAAE,kEAAkE;EACzE;EACAC,KAAK,EAAE,qBAAqB;EAC5BC,KAAK,EAAE,yHAAyH;EAChIC,QAAQ,EAAE,2FAA2F;EAErG;EACAC,IAAI,EAAE,0FAA0F;EAChGC,IAAI,EAAE,6BAA6B;EACnCC,IAAI,EAAE,iCAAiC;EACvCC,IAAI,EAAE,oDAAoD;EAC1DC,IAAI,EAAE,2DAA2D;EACjEC,IAAI,EAAE,+DAA+D;EACrEC,IAAI,EAAE,sDAAsD;EAC5DC,IAAI,EAAE,kDAAkD;EACxDC,IAAI,EAAE,iCAAiC;EACvCC,KAAK,EAAE,mEAAmE;EAC1EC,KAAK,EAAE,iDAAiD;EACxD;EACAC,KAAK,EAAE,kDAAkD;EACzDC,KAAK,EAAE,qCAAqC;EAC5CC,KAAK,EAAE,0BAA0B;EACjCC,KAAK,EAAE,kFAAkF;EACzFC,KAAK,EAAE,qFAAqF;EAC5FC,KAAK,EAAE,uDAAuD;EAC9DC,KAAK,EAAE,2CAA2C;EAClDC,KAAK,EAAE,oBAAoB;EAC3BC,KAAK,EAAE,yFAAyF;EAChGC,KAAK,EAAE,yCAAyC;EAChDC,KAAK,EAAE,gDAAgD;EACvDC,KAAK,EAAE,gJAAgJ,GACnJ,uGAAuG;EAE3G;EACAC,GAAG,EAAE,qCAAqC;EAC1CC,GAAG,EAAE,yEAAyE;EAC9EC,GAAG,EAAE,2BAA2B;EAChCC,GAAG,EAAE,mBAAmB;EACxBC,GAAG,EAAE,kKAAkK;EAEvK;EACAC,GAAG,EAAE,uDAAuD;EAE5D;EACAC,GAAG,EAAE,uBAAuB;EAC5BC,GAAG,EAAE,uDAAuD;EAC5DC,GAAG,EAAE,+DAA+D;EACpEC,GAAG,EAAE,oBAAoB;EAEzB;EACAC,GAAG,EAAE,kEAAkE;EACvEC,GAAG,EAAE,2EAA2E;EAChFC,GAAG,EAAE,qEAAqE;EAE1E;;EAEA;EACAC,GAAG,EAAE,sEAAsE;EAC3EC,GAAG,EAAE,4CAA4C;EACjDC,GAAG,EAAE,0FAA0F;EAC/FC,GAAG,EAAE,6BAA6B;EAClCC,GAAG,EAAE,wCAAwC;EAC7CC,GAAG,EAAE,0DAA0D;EAC/DC,GAAG,EAAE,+BAA+B;EACpCC,GAAG,EAAE,8IAA8I;EAEnJ;EACAC,GAAG,EAAE,4BAA4B;EACjCC,GAAG,EAAE,6DAA6D;EAClE;EACAC,GAAG,EAAE,sGAAsG;EAC3GC,GAAG,EAAE,6JAA6J;EAClKC,GAAG,EAAE,yGAAyG;EAC9GC,GAAG,EAAE,0HAA0H;EAC/HC,OAAO,EAAE,0EAA0E;EACnFC,SAAS,EAAE,0EAA0E;EACrFC,OAAO,EAAE,0EAA0E;EACnFC,aAAa,EAAE,uEAAuE;EACtFC,cAAc,EAAE,qCAAqC;EACrDC,YAAY,EAAE,uFAAuF;EACrGC,YAAY,EAAE,wDAAwD;EACtEC,WAAW,EAAE,4DAA4D;EACzEC,eAAe,EAAE,mFAAmF;EACpGC,YAAY,EAAE,kIAAkI;EAEhJ;EACAC,GAAG,EAAE,mCAAmC;EACxCC,GAAG,EAAE,sDAAsD;EAC3DC,GAAG,EAAE,qEAAqE;EAC1EC,GAAG,EAAE,yFAAyF;EAC9FC,GAAG,EAAE,uDAAuD;EAC5DC,GAAG,EAAE,8DAA8D;EACnEC,GAAG,EAAE,gEAAgE;EACrEC,IAAI,EAAE,oEAAoE;EAC1EC,IAAI,EAAE,mDAAmD;EACzD;EACAC,IAAI,EAAE,kEAAkE;EACxEC,IAAI,EAAE,mEAAmE;EACzEC,IAAI,EAAE,0CAA0C;EAChDC,IAAI,EAAE,6CAA6C;EACnDC,IAAI,EAAE,iDAAiD;EACvDC,IAAI,EAAE,uCAAuC;EAC7CC,IAAI,EAAE,gEAAgE;EACtEC,IAAI,EAAE,iDAAiD;EACvDC,IAAI,EAAE,mDAAmD;EACzDC,IAAI,EAAE,gDAAgD;EACtDC,IAAI,EAAE,uCAAuC;EAC7CC,IAAI,EAAE,mJAAmJ;EACzJC,IAAI,EAAE,yEAAyE;EAC/EC,IAAI,EAAE,uEAAuE;EAC7EC,IAAI,EAAE,+EAA+E;EACrFC,IAAI,EAAE,4DAA4D;EAClEC,IAAI,EAAE,gDAAgD;EACtDC,IAAI,EAAE,qCAAqC;EAC3CC,IAAI,EAAE,qEAAqE;EAC3EC,IAAI,EAAE,+DAA+D;EACrEC,IAAI,EAAE,sGAAsG;EAC5GC,IAAI,EAAE,+GAA+G;EACrHC,IAAI,EAAE,8CAA8C;EACpDC,IAAI,EAAE,gHAAgH;EACtHC,IAAI,EAAE,kFAAkF;EACxFC,IAAI,EAAE,uDAAuD;EAC7DC,IAAI,EAAE,qIAAqI,GACvI,kGAAkG;EACtGC,IAAI,EAAE,mGAAmG;EAEzG;EACA;;EAEA;EACAC,GAAG,EAAE,kEAAkE;EACvEC,GAAG,EAAE,8BAA8B;EAEnC;EACA;EACA;;EAEA;EACAC,EAAE,EAAE,iEAAiE;EAErE;EACAC,IAAI,EAAE,oDAAoD;EAC1D;EACAC,IAAI,EAAE,iEAAiE;EACvE;;EAEA;EACAC,KAAK,EAAE,oFAAoF;EAC3FC,KAAK,EAAE,yEAAyE;EAChFC,KAAK,EAAE,wGAAwG;EAE/G;EACA;;EAEA;AACJ;AACA;AACA;AACA;EACIC,GAAG,EAAE;AACT,CAAC","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/dev-mode/index.js b/dist/cjs/plugins/dev-mode/index.js deleted file mode 100644 index 49c6c102949..00000000000 --- a/dist/cjs/plugins/dev-mode/index.js +++ /dev/null @@ -1,166 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -var _exportNames = { - disableWarnings: true, - deepFreezeWhenDevMode: true, - DEV_MODE_PLUGIN_NAME: true, - RxDBDevModePlugin: true -}; -exports.RxDBDevModePlugin = exports.DEV_MODE_PLUGIN_NAME = void 0; -exports.deepFreezeWhenDevMode = deepFreezeWhenDevMode; -exports.disableWarnings = disableWarnings; -var _errorMessages = require("./error-messages.js"); -var _checkSchema = require("./check-schema.js"); -Object.keys(_checkSchema).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; - if (key in exports && exports[key] === _checkSchema[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _checkSchema[key]; - } - }); -}); -var _checkOrm = require("./check-orm.js"); -var _checkMigrationStrategies = require("./check-migration-strategies.js"); -var _unallowedProperties = require("./unallowed-properties.js"); -Object.keys(_unallowedProperties).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; - if (key in exports && exports[key] === _unallowedProperties[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _unallowedProperties[key]; - } - }); -}); -var _checkQuery = require("./check-query.js"); -Object.keys(_checkQuery).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; - if (key in exports && exports[key] === _checkQuery[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _checkQuery[key]; - } - }); -}); -var _rxError = require("../../rx-error.js"); -var _index = require("../../plugins/utils/index.js"); -var _checkDocument = require("./check-document.js"); -var showDevModeWarning = true; - -/** - * Suppresses the warning message shown in the console, typically invoked once the developer (hello!) - * has acknowledged it. - */ -function disableWarnings() { - showDevModeWarning = false; -} - -/** - * Deep freezes and object when in dev-mode. - * Deep-Freezing has the same performance as deep-cloning, so we only do that in dev-mode. - * Also we can ensure the readonly state via typescript - * @link https://developer.mozilla.org/de/docs/Web/JavaScript/Reference/Global_Objects/Object/freeze - */ -function deepFreezeWhenDevMode(obj) { - // direct return if not suitable for deepFreeze() - if (!obj || typeof obj === 'string' || typeof obj === 'number') { - return obj; - } - return (0, _index.deepFreeze)(obj); -} -var DEV_MODE_PLUGIN_NAME = exports.DEV_MODE_PLUGIN_NAME = 'dev-mode'; -var RxDBDevModePlugin = exports.RxDBDevModePlugin = { - name: DEV_MODE_PLUGIN_NAME, - rxdb: true, - init: () => { - if (showDevModeWarning) { - console.warn(['-------------- RxDB dev-mode warning -------------------------------', 'you are seeing this because you use the RxDB dev-mode plugin https://rxdb.info/dev-mode.html?console=dev-mode ', 'This is great in development mode, because it will run many checks to ensure', 'that you use RxDB correct. If you see this in production mode,', 'you did something wrong because the dev-mode plugin will decrease the performance.', '', '🤗 Hint: To get the most out of RxDB, check out the Premium Plugins', 'to get access to faster storages and more professional features: https://rxdb.info/premium?console=dev-mode ', '', 'You can disable this warning by calling disableWarnings() from the dev-mode plugin.', - // '', - // 'Also take part in the RxDB User Survey: https://rxdb.info/survey.html', - '---------------------------------------------------------------------'].join('\n')); - } - }, - overwritable: { - isDevMode() { - return true; - }, - deepFreezeWhenDevMode, - tunnelErrorMessage(code) { - if (!_errorMessages.ERROR_MESSAGES[code]) { - console.error('RxDB: Error-Code not known: ' + code); - throw new Error('Error-Code ' + code + ' not known, contact the maintainer'); - } - return _errorMessages.ERROR_MESSAGES[code]; - } - }, - hooks: { - preCreateRxSchema: { - after: _checkSchema.checkSchema - }, - preCreateRxDatabase: { - after: function (args) { - (0, _unallowedProperties.ensureDatabaseNameIsValid)(args); - } - }, - preCreateRxCollection: { - after: function (args) { - (0, _unallowedProperties.ensureCollectionNameValid)(args); - (0, _checkOrm.checkOrmDocumentMethods)(args.schema, args.methods); - if (args.name.charAt(0) === '_') { - throw (0, _rxError.newRxError)('DB2', { - name: args.name - }); - } - if (!args.schema) { - throw (0, _rxError.newRxError)('DB4', { - name: args.name, - args - }); - } - } - }, - createRxDocument: { - before: function (doc) { - (0, _checkDocument.ensurePrimaryKeyValid)(doc.primary, doc.toJSON(true)); - } - }, - preCreateRxQuery: { - after: function (args) { - (0, _checkQuery.checkQuery)(args); - } - }, - prePrepareQuery: { - after: args => { - (0, _checkQuery.checkMangoQuery)(args); - } - }, - preStorageWrite: { - before: args => { - (0, _checkDocument.checkWriteRows)(args.storageInstance, args.rows); - } - }, - createRxCollection: { - after: args => { - // check ORM-methods - (0, _checkOrm.checkOrmMethods)(args.creator.statics); - (0, _checkOrm.checkOrmMethods)(args.creator.methods); - (0, _checkOrm.checkOrmMethods)(args.creator.attachments); - - // check migration strategies - if (args.creator.schema && args.creator.migrationStrategies) { - (0, _checkMigrationStrategies.checkMigrationStrategies)(args.creator.schema, args.creator.migrationStrategies); - } - } - } - } -}; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/dev-mode/index.js.map b/dist/cjs/plugins/dev-mode/index.js.map deleted file mode 100644 index aae3b4969ee..00000000000 --- a/dist/cjs/plugins/dev-mode/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["_errorMessages","require","_checkSchema","Object","keys","forEach","key","prototype","hasOwnProperty","call","_exportNames","exports","defineProperty","enumerable","get","_checkOrm","_checkMigrationStrategies","_unallowedProperties","_checkQuery","_rxError","_index","_checkDocument","showDevModeWarning","disableWarnings","deepFreezeWhenDevMode","obj","deepFreeze","DEV_MODE_PLUGIN_NAME","RxDBDevModePlugin","name","rxdb","init","console","warn","join","overwritable","isDevMode","tunnelErrorMessage","code","ERROR_MESSAGES","error","Error","hooks","preCreateRxSchema","after","checkSchema","preCreateRxDatabase","args","ensureDatabaseNameIsValid","preCreateRxCollection","ensureCollectionNameValid","checkOrmDocumentMethods","schema","methods","charAt","newRxError","createRxDocument","before","doc","ensurePrimaryKeyValid","primary","toJSON","preCreateRxQuery","checkQuery","prePrepareQuery","checkMangoQuery","preStorageWrite","checkWriteRows","storageInstance","rows","createRxCollection","checkOrmMethods","creator","statics","attachments","migrationStrategies","checkMigrationStrategies"],"sources":["../../../../src/plugins/dev-mode/index.ts"],"sourcesContent":["import type {\n RxPlugin,\n RxCollectionCreator,\n RxDatabaseCreator,\n RxErrorKey,\n RxDocument\n} from '../../types/index.d.ts';\n\nimport {\n ERROR_MESSAGES\n} from './error-messages.ts';\nimport {\n checkSchema\n} from './check-schema.ts';\nimport {\n checkOrmDocumentMethods,\n checkOrmMethods\n} from './check-orm.ts';\nimport { checkMigrationStrategies } from './check-migration-strategies.ts';\nimport {\n ensureCollectionNameValid,\n ensureDatabaseNameIsValid\n} from './unallowed-properties.ts';\nimport { checkMangoQuery, checkQuery } from './check-query.ts';\nimport { newRxError } from '../../rx-error.ts';\nimport { DeepReadonly } from '../../types/util.ts';\nimport { deepFreeze } from '../../plugins/utils/index.ts';\nimport { checkWriteRows, ensurePrimaryKeyValid } from './check-document.ts';\n\nexport * from './check-schema.ts';\nexport * from './unallowed-properties.ts';\nexport * from './check-query.ts';\n\nlet showDevModeWarning = true;\n\n/**\n * Suppresses the warning message shown in the console, typically invoked once the developer (hello!) \n * has acknowledged it.\n */\nexport function disableWarnings() {\n showDevModeWarning = false;\n}\n\n/**\n * Deep freezes and object when in dev-mode.\n * Deep-Freezing has the same performance as deep-cloning, so we only do that in dev-mode.\n * Also we can ensure the readonly state via typescript\n * @link https://developer.mozilla.org/de/docs/Web/JavaScript/Reference/Global_Objects/Object/freeze\n */\nexport function deepFreezeWhenDevMode(obj: T): DeepReadonly {\n // direct return if not suitable for deepFreeze()\n if (\n !obj ||\n typeof obj === 'string' ||\n typeof obj === 'number'\n ) {\n return obj as any;\n }\n\n return deepFreeze(obj) as any;\n}\n\n\nexport const DEV_MODE_PLUGIN_NAME = 'dev-mode';\nexport const RxDBDevModePlugin: RxPlugin = {\n name: DEV_MODE_PLUGIN_NAME,\n rxdb: true,\n init: () => {\n if (showDevModeWarning) {\n console.warn(\n [\n '-------------- RxDB dev-mode warning -------------------------------',\n 'you are seeing this because you use the RxDB dev-mode plugin https://rxdb.info/dev-mode.html?console=dev-mode ',\n 'This is great in development mode, because it will run many checks to ensure',\n 'that you use RxDB correct. If you see this in production mode,',\n 'you did something wrong because the dev-mode plugin will decrease the performance.',\n '',\n '🤗 Hint: To get the most out of RxDB, check out the Premium Plugins',\n 'to get access to faster storages and more professional features: https://rxdb.info/premium?console=dev-mode ',\n '',\n 'You can disable this warning by calling disableWarnings() from the dev-mode plugin.',\n // '',\n // 'Also take part in the RxDB User Survey: https://rxdb.info/survey.html',\n '---------------------------------------------------------------------'\n ].join('\\n')\n );\n }\n },\n overwritable: {\n isDevMode() {\n return true;\n },\n deepFreezeWhenDevMode,\n tunnelErrorMessage(code: RxErrorKey) {\n if (!ERROR_MESSAGES[code]) {\n console.error('RxDB: Error-Code not known: ' + code);\n throw new Error('Error-Code ' + code + ' not known, contact the maintainer');\n }\n return ERROR_MESSAGES[code];\n }\n },\n hooks: {\n preCreateRxSchema: {\n after: checkSchema\n },\n preCreateRxDatabase: {\n after: function (args: RxDatabaseCreator) {\n ensureDatabaseNameIsValid(args);\n }\n },\n preCreateRxCollection: {\n after: function (args: RxCollectionCreator & { name: string; }) {\n ensureCollectionNameValid(args);\n checkOrmDocumentMethods(args.schema as any, args.methods);\n if (args.name.charAt(0) === '_') {\n throw newRxError('DB2', {\n name: args.name\n });\n }\n if (!args.schema) {\n throw newRxError('DB4', {\n name: args.name,\n args\n });\n }\n }\n },\n createRxDocument: {\n before: function (doc: RxDocument) {\n ensurePrimaryKeyValid(doc.primary, doc.toJSON(true));\n }\n },\n preCreateRxQuery: {\n after: function (args) {\n checkQuery(args);\n }\n },\n prePrepareQuery: {\n after: (args) => {\n checkMangoQuery(args);\n }\n },\n preStorageWrite: {\n before: (args) => {\n checkWriteRows(args.storageInstance, args.rows);\n }\n },\n createRxCollection: {\n after: (args) => {\n // check ORM-methods\n checkOrmMethods(args.creator.statics);\n checkOrmMethods(args.creator.methods);\n checkOrmMethods(args.creator.attachments);\n\n // check migration strategies\n if (args.creator.schema && args.creator.migrationStrategies) {\n checkMigrationStrategies(\n args.creator.schema,\n args.creator.migrationStrategies\n );\n }\n }\n }\n }\n};\n"],"mappings":";;;;;;;;;;;;;;AAQA,IAAAA,cAAA,GAAAC,OAAA;AAGA,IAAAC,YAAA,GAAAD,OAAA;AAkBAE,MAAA,CAAAC,IAAA,CAAAF,YAAA,EAAAG,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAJ,YAAA,CAAAI,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAZ,YAAA,CAAAI,GAAA;IAAA;EAAA;AAAA;AAfA,IAAAS,SAAA,GAAAd,OAAA;AAIA,IAAAe,yBAAA,GAAAf,OAAA;AACA,IAAAgB,oBAAA,GAAAhB,OAAA;AAWAE,MAAA,CAAAC,IAAA,CAAAa,oBAAA,EAAAZ,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAW,oBAAA,CAAAX,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAG,oBAAA,CAAAX,GAAA;IAAA;EAAA;AAAA;AAPA,IAAAY,WAAA,GAAAjB,OAAA;AAQAE,MAAA,CAAAC,IAAA,CAAAc,WAAA,EAAAb,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAY,WAAA,CAAAZ,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAI,WAAA,CAAAZ,GAAA;IAAA;EAAA;AAAA;AAPA,IAAAa,QAAA,GAAAlB,OAAA;AAEA,IAAAmB,MAAA,GAAAnB,OAAA;AACA,IAAAoB,cAAA,GAAApB,OAAA;AAMA,IAAIqB,kBAAkB,GAAG,IAAI;;AAE7B;AACA;AACA;AACA;AACO,SAASC,eAAeA,CAAA,EAAG;EAC9BD,kBAAkB,GAAG,KAAK;AAC9B;;AAEA;AACA;AACA;AACA;AACA;AACA;AACO,SAASE,qBAAqBA,CAAIC,GAAM,EAAmB;EAC9D;EACA,IACI,CAACA,GAAG,IACJ,OAAOA,GAAG,KAAK,QAAQ,IACvB,OAAOA,GAAG,KAAK,QAAQ,EACzB;IACE,OAAOA,GAAG;EACd;EAEA,OAAO,IAAAC,iBAAU,EAACD,GAAG,CAAC;AAC1B;AAGO,IAAME,oBAAoB,GAAAhB,OAAA,CAAAgB,oBAAA,GAAG,UAAU;AACvC,IAAMC,iBAA2B,GAAAjB,OAAA,CAAAiB,iBAAA,GAAG;EACvCC,IAAI,EAAEF,oBAAoB;EAC1BG,IAAI,EAAE,IAAI;EACVC,IAAI,EAAEA,CAAA,KAAM;IACR,IAAIT,kBAAkB,EAAE;MACpBU,OAAO,CAACC,IAAI,CACZ,CACI,sEAAsE,EACtE,gHAAgH,EAChH,8EAA8E,EAC9E,gEAAgE,EAChE,oFAAoF,EACpF,EAAE,EACF,qEAAqE,EACrE,8GAA8G,EAC9G,EAAE,EACF,qFAAqF;MACrF;MACA;MACA,uEAAuE,CAC1E,CAACC,IAAI,CAAC,IAAI,CACf,CAAC;IACD;EACJ,CAAC;EACDC,YAAY,EAAE;IACVC,SAASA,CAAA,EAAG;MACR,OAAO,IAAI;IACf,CAAC;IACDZ,qBAAqB;IACrBa,kBAAkBA,CAACC,IAAgB,EAAE;MACjC,IAAI,CAACC,6BAAc,CAACD,IAAI,CAAC,EAAE;QACvBN,OAAO,CAACQ,KAAK,CAAC,8BAA8B,GAAGF,IAAI,CAAC;QACpD,MAAM,IAAIG,KAAK,CAAC,aAAa,GAAGH,IAAI,GAAG,oCAAoC,CAAC;MAChF;MACA,OAAOC,6BAAc,CAACD,IAAI,CAAC;IAC/B;EACJ,CAAC;EACDI,KAAK,EAAE;IACHC,iBAAiB,EAAE;MACfC,KAAK,EAAEC;IACX,CAAC;IACDC,mBAAmB,EAAE;MACjBF,KAAK,EAAE,SAAAA,CAAUG,IAAiC,EAAE;QAChD,IAAAC,8CAAyB,EAACD,IAAI,CAAC;MACnC;IACJ,CAAC;IACDE,qBAAqB,EAAE;MACnBL,KAAK,EAAE,SAAAA,CAAUG,IAAkD,EAAE;QACjE,IAAAG,8CAAyB,EAACH,IAAI,CAAC;QAC/B,IAAAI,iCAAuB,EAACJ,IAAI,CAACK,MAAM,EAASL,IAAI,CAACM,OAAO,CAAC;QACzD,IAAIN,IAAI,CAAClB,IAAI,CAACyB,MAAM,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE;UAC7B,MAAM,IAAAC,mBAAU,EAAC,KAAK,EAAE;YACpB1B,IAAI,EAAEkB,IAAI,CAAClB;UACf,CAAC,CAAC;QACN;QACA,IAAI,CAACkB,IAAI,CAACK,MAAM,EAAE;UACd,MAAM,IAAAG,mBAAU,EAAC,KAAK,EAAE;YACpB1B,IAAI,EAAEkB,IAAI,CAAClB,IAAI;YACfkB;UACJ,CAAC,CAAC;QACN;MACJ;IACJ,CAAC;IACDS,gBAAgB,EAAE;MACdC,MAAM,EAAE,SAAAA,CAAUC,GAAe,EAAE;QAC/B,IAAAC,oCAAqB,EAACD,GAAG,CAACE,OAAO,EAAEF,GAAG,CAACG,MAAM,CAAC,IAAI,CAAC,CAAC;MACxD;IACJ,CAAC;IACDC,gBAAgB,EAAE;MACdlB,KAAK,EAAE,SAAAA,CAAUG,IAAI,EAAE;QACnB,IAAAgB,sBAAU,EAAChB,IAAI,CAAC;MACpB;IACJ,CAAC;IACDiB,eAAe,EAAE;MACbpB,KAAK,EAAGG,IAAI,IAAK;QACb,IAAAkB,2BAAe,EAAClB,IAAI,CAAC;MACzB;IACJ,CAAC;IACDmB,eAAe,EAAE;MACbT,MAAM,EAAGV,IAAI,IAAK;QACd,IAAAoB,6BAAc,EAACpB,IAAI,CAACqB,eAAe,EAAErB,IAAI,CAACsB,IAAI,CAAC;MACnD;IACJ,CAAC;IACDC,kBAAkB,EAAE;MAChB1B,KAAK,EAAGG,IAAI,IAAK;QACb;QACA,IAAAwB,yBAAe,EAACxB,IAAI,CAACyB,OAAO,CAACC,OAAO,CAAC;QACrC,IAAAF,yBAAe,EAACxB,IAAI,CAACyB,OAAO,CAACnB,OAAO,CAAC;QACrC,IAAAkB,yBAAe,EAACxB,IAAI,CAACyB,OAAO,CAACE,WAAW,CAAC;;QAEzC;QACA,IAAI3B,IAAI,CAACyB,OAAO,CAACpB,MAAM,IAAIL,IAAI,CAACyB,OAAO,CAACG,mBAAmB,EAAE;UACzD,IAAAC,kDAAwB,EACpB7B,IAAI,CAACyB,OAAO,CAACpB,MAAM,EACnBL,IAAI,CAACyB,OAAO,CAACG,mBACjB,CAAC;QACL;MACJ;IACJ;EACJ;AACJ,CAAC","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/dev-mode/unallowed-properties.js b/dist/cjs/plugins/dev-mode/unallowed-properties.js deleted file mode 100644 index a45f06e7978..00000000000 --- a/dist/cjs/plugins/dev-mode/unallowed-properties.js +++ /dev/null @@ -1,82 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.ensureCollectionNameValid = ensureCollectionNameValid; -exports.ensureDatabaseNameIsValid = ensureDatabaseNameIsValid; -exports.validateDatabaseName = validateDatabaseName; -var _rxError = require("../../rx-error.js"); -var _entityProperties = require("./entity-properties.js"); -var _index = require("../../plugins/utils/index.js"); -/** - * if the name of a collection - * clashes with a property of RxDatabase, - * we get problems so this function prohibits this - */ -function ensureCollectionNameValid(args) { - if ((0, _entityProperties.rxDatabaseProperties)().includes(args.name)) { - throw (0, _rxError.newRxError)('DB5', { - name: args.name - }); - } - validateDatabaseName(args.name); -} -function ensureDatabaseNameIsValid(args) { - validateDatabaseName(args.name); - if (args.name.includes('$')) { - throw (0, _rxError.newRxError)('DB13', { - name: args.name - }); - } - - /** - * The server-plugin has problems when a path with and ending slash is given - * So we do not allow this. - * @link https://github.com/pubkey/rxdb/issues/2251 - */ - if ((0, _index.isFolderPath)(args.name)) { - if (args.name.endsWith('/') || args.name.endsWith('\\')) { - throw (0, _rxError.newRxError)('DB11', { - name: args.name - }); - } - } -} -var validCouchDBStringRegexStr = '^[a-z][_$a-z0-9\\-]*$'; -var validCouchDBStringRegex = new RegExp(validCouchDBStringRegexStr); - -/** - * Validates that a given string is ok to be used with couchdb-collection-names. - * We only allow these strings as database- or collection names because it ensures - * that you later do not get in trouble when you want to use the database together witch couchdb. - * - * @link https://docs.couchdb.org/en/stable/api/database/common.html - * @link https://neighbourhood.ie/blog/2020/10/13/everything-you-need-to-know-about-couchdb-database-names/ - * @throws {RxError} - */ -function validateDatabaseName(name) { - if (typeof name !== 'string' || name.length === 0) { - throw (0, _rxError.newRxTypeError)('UT1', { - name - }); - } - - // do not check, if foldername is given - if ((0, _index.isFolderPath)(name)) { - return true; - } - if (!name.match(validCouchDBStringRegex) && - /** - * The string ':memory:' is used in the SQLite RxStorage - * to persist data into a memory state. Often used in tests. - */ - name !== ':memory:') { - throw (0, _rxError.newRxError)('UT2', { - regex: validCouchDBStringRegexStr, - givenName: name - }); - } - return true; -} -//# sourceMappingURL=unallowed-properties.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/dev-mode/unallowed-properties.js.map b/dist/cjs/plugins/dev-mode/unallowed-properties.js.map deleted file mode 100644 index acb70eb215b..00000000000 --- a/dist/cjs/plugins/dev-mode/unallowed-properties.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"unallowed-properties.js","names":["_rxError","require","_entityProperties","_index","ensureCollectionNameValid","args","rxDatabaseProperties","includes","name","newRxError","validateDatabaseName","ensureDatabaseNameIsValid","isFolderPath","endsWith","validCouchDBStringRegexStr","validCouchDBStringRegex","RegExp","length","newRxTypeError","match","regex","givenName"],"sources":["../../../../src/plugins/dev-mode/unallowed-properties.ts"],"sourcesContent":["import type { RxCollectionCreator, RxDatabaseCreator } from '../../types/index.d.ts';\nimport { newRxError, newRxTypeError } from '../../rx-error.ts';\nimport { rxDatabaseProperties } from './entity-properties.ts';\nimport { isFolderPath } from '../../plugins/utils/index.ts';\n\n/**\n * if the name of a collection\n * clashes with a property of RxDatabase,\n * we get problems so this function prohibits this\n */\nexport function ensureCollectionNameValid(\n args: RxCollectionCreator & { name: string; }\n) {\n if (rxDatabaseProperties().includes(args.name)) {\n throw newRxError('DB5', {\n name: args.name\n });\n }\n validateDatabaseName(args.name);\n}\n\nexport function ensureDatabaseNameIsValid(args: RxDatabaseCreator) {\n\n validateDatabaseName(args.name);\n\n if(args.name.includes('$')){\n throw newRxError('DB13', {\n name: args.name,\n });\n }\n\n /**\n * The server-plugin has problems when a path with and ending slash is given\n * So we do not allow this.\n * @link https://github.com/pubkey/rxdb/issues/2251\n */\n if (isFolderPath(args.name)) {\n if (args.name.endsWith('/') || args.name.endsWith('\\\\')) {\n throw newRxError('DB11', {\n name: args.name,\n });\n }\n }\n}\n\n\n\nconst validCouchDBStringRegexStr = '^[a-z][_$a-z0-9\\\\-]*$';\nconst validCouchDBStringRegex = new RegExp(validCouchDBStringRegexStr);\n\n/**\n * Validates that a given string is ok to be used with couchdb-collection-names.\n * We only allow these strings as database- or collection names because it ensures\n * that you later do not get in trouble when you want to use the database together witch couchdb.\n *\n * @link https://docs.couchdb.org/en/stable/api/database/common.html\n * @link https://neighbourhood.ie/blog/2020/10/13/everything-you-need-to-know-about-couchdb-database-names/\n * @throws {RxError}\n */\nexport function validateDatabaseName(name: string): true {\n if (\n typeof name !== 'string' ||\n name.length === 0\n ) {\n throw newRxTypeError('UT1', {\n name\n });\n }\n\n\n // do not check, if foldername is given\n if (isFolderPath(name)) {\n return true;\n }\n\n if (\n !name.match(validCouchDBStringRegex) &&\n /**\n * The string ':memory:' is used in the SQLite RxStorage\n * to persist data into a memory state. Often used in tests.\n */\n name !== ':memory:'\n ) {\n throw newRxError('UT2', {\n regex: validCouchDBStringRegexStr,\n givenName: name,\n });\n }\n\n return true;\n}\n"],"mappings":";;;;;;;;AACA,IAAAA,QAAA,GAAAC,OAAA;AACA,IAAAC,iBAAA,GAAAD,OAAA;AACA,IAAAE,MAAA,GAAAF,OAAA;AAEA;AACA;AACA;AACA;AACA;AACO,SAASG,yBAAyBA,CACrCC,IAA6C,EAC/C;EACE,IAAI,IAAAC,sCAAoB,EAAC,CAAC,CAACC,QAAQ,CAACF,IAAI,CAACG,IAAI,CAAC,EAAE;IAC5C,MAAM,IAAAC,mBAAU,EAAC,KAAK,EAAE;MACpBD,IAAI,EAAEH,IAAI,CAACG;IACf,CAAC,CAAC;EACN;EACAE,oBAAoB,CAACL,IAAI,CAACG,IAAI,CAAC;AACnC;AAEO,SAASG,yBAAyBA,CAACN,IAAiC,EAAE;EAEzEK,oBAAoB,CAACL,IAAI,CAACG,IAAI,CAAC;EAE/B,IAAGH,IAAI,CAACG,IAAI,CAACD,QAAQ,CAAC,GAAG,CAAC,EAAC;IACvB,MAAM,IAAAE,mBAAU,EAAC,MAAM,EAAE;MACrBD,IAAI,EAAEH,IAAI,CAACG;IACf,CAAC,CAAC;EACN;;EAEA;AACJ;AACA;AACA;AACA;EACI,IAAI,IAAAI,mBAAY,EAACP,IAAI,CAACG,IAAI,CAAC,EAAE;IACzB,IAAIH,IAAI,CAACG,IAAI,CAACK,QAAQ,CAAC,GAAG,CAAC,IAAIR,IAAI,CAACG,IAAI,CAACK,QAAQ,CAAC,IAAI,CAAC,EAAE;MACrD,MAAM,IAAAJ,mBAAU,EAAC,MAAM,EAAE;QACrBD,IAAI,EAAEH,IAAI,CAACG;MACf,CAAC,CAAC;IACN;EACJ;AACJ;AAIA,IAAMM,0BAA0B,GAAG,uBAAuB;AAC1D,IAAMC,uBAAuB,GAAG,IAAIC,MAAM,CAACF,0BAA0B,CAAC;;AAEtE;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACO,SAASJ,oBAAoBA,CAACF,IAAY,EAAQ;EACrD,IACI,OAAOA,IAAI,KAAK,QAAQ,IACxBA,IAAI,CAACS,MAAM,KAAK,CAAC,EACnB;IACE,MAAM,IAAAC,uBAAc,EAAC,KAAK,EAAE;MACxBV;IACJ,CAAC,CAAC;EACN;;EAGA;EACA,IAAI,IAAAI,mBAAY,EAACJ,IAAI,CAAC,EAAE;IACpB,OAAO,IAAI;EACf;EAEA,IACI,CAACA,IAAI,CAACW,KAAK,CAACJ,uBAAuB,CAAC;EACpC;AACR;AACA;AACA;EACQP,IAAI,KAAK,UAAU,EACrB;IACE,MAAM,IAAAC,mBAAU,EAAC,KAAK,EAAE;MACpBW,KAAK,EAAEN,0BAA0B;MACjCO,SAAS,EAAEb;IACf,CAAC,CAAC;EACN;EAEA,OAAO,IAAI;AACf","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/electron/electron-helper.js b/dist/cjs/plugins/electron/electron-helper.js deleted file mode 100644 index 41d8b84fe68..00000000000 --- a/dist/cjs/plugins/electron/electron-helper.js +++ /dev/null @@ -1,9 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.IPC_RENDERER_TO_MAIN = exports.IPC_RENDERER_KEY_PREFIX = void 0; -var IPC_RENDERER_KEY_PREFIX = exports.IPC_RENDERER_KEY_PREFIX = 'rxdb-ipc-renderer-storage'; -var IPC_RENDERER_TO_MAIN = exports.IPC_RENDERER_TO_MAIN = 'rxdb-renderer-to-main'; -//# sourceMappingURL=electron-helper.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/electron/electron-helper.js.map b/dist/cjs/plugins/electron/electron-helper.js.map deleted file mode 100644 index 3175df3d42f..00000000000 --- a/dist/cjs/plugins/electron/electron-helper.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"electron-helper.js","names":["IPC_RENDERER_KEY_PREFIX","exports","IPC_RENDERER_TO_MAIN"],"sources":["../../../../src/plugins/electron/electron-helper.ts"],"sourcesContent":["export const IPC_RENDERER_KEY_PREFIX = 'rxdb-ipc-renderer-storage';\nexport const IPC_RENDERER_TO_MAIN = 'rxdb-renderer-to-main';\n"],"mappings":";;;;;;AAAO,IAAMA,uBAAuB,GAAAC,OAAA,CAAAD,uBAAA,GAAG,2BAA2B;AAC3D,IAAME,oBAAoB,GAAAD,OAAA,CAAAC,oBAAA,GAAG,uBAAuB","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/electron/index.js b/dist/cjs/plugins/electron/index.js deleted file mode 100644 index 7aaeb57ec8c..00000000000 --- a/dist/cjs/plugins/electron/index.js +++ /dev/null @@ -1,39 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -var _rxStorageIpcRenderer = require("./rx-storage-ipc-renderer.js"); -Object.keys(_rxStorageIpcRenderer).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _rxStorageIpcRenderer[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _rxStorageIpcRenderer[key]; - } - }); -}); -var _rxStorageIpcMain = require("./rx-storage-ipc-main.js"); -Object.keys(_rxStorageIpcMain).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _rxStorageIpcMain[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _rxStorageIpcMain[key]; - } - }); -}); -var _electronHelper = require("./electron-helper.js"); -Object.keys(_electronHelper).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _electronHelper[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _electronHelper[key]; - } - }); -}); -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/electron/index.js.map b/dist/cjs/plugins/electron/index.js.map deleted file mode 100644 index de620fbcfa7..00000000000 --- a/dist/cjs/plugins/electron/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["_rxStorageIpcRenderer","require","Object","keys","forEach","key","exports","defineProperty","enumerable","get","_rxStorageIpcMain","_electronHelper"],"sources":["../../../../src/plugins/electron/index.ts"],"sourcesContent":["export * from './rx-storage-ipc-renderer.ts';\nexport * from './rx-storage-ipc-main.ts';\nexport * from './electron-helper.ts';\n"],"mappings":";;;;;AAAA,IAAAA,qBAAA,GAAAC,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAH,qBAAA,EAAAI,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAL,qBAAA,CAAAK,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAT,qBAAA,CAAAK,GAAA;IAAA;EAAA;AAAA;AACA,IAAAK,iBAAA,GAAAT,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAO,iBAAA,EAAAN,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAK,iBAAA,CAAAL,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAC,iBAAA,CAAAL,GAAA;IAAA;EAAA;AAAA;AACA,IAAAM,eAAA,GAAAV,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAQ,eAAA,EAAAP,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAM,eAAA,CAAAN,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAE,eAAA,CAAAN,GAAA;IAAA;EAAA;AAAA","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/electron/rx-storage-ipc-main.js b/dist/cjs/plugins/electron/rx-storage-ipc-main.js deleted file mode 100644 index 951cf28c7d1..00000000000 --- a/dist/cjs/plugins/electron/rx-storage-ipc-main.js +++ /dev/null @@ -1,45 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.exposeIpcMainRxStorage = exposeIpcMainRxStorage; -var _rxjs = require("rxjs"); -var _electronHelper = require("./electron-helper.js"); -var _index = require("../storage-remote/index.js"); -/** - * This file contains everything - * that is supposed to run inside of the electron main process - */ - -function exposeIpcMainRxStorage(args) { - var channelId = [_electronHelper.IPC_RENDERER_KEY_PREFIX, args.key].join('|'); - var messages$ = new _rxjs.Subject(); - var openRenderers = new Set(); - args.ipcMain.on(channelId, (event, message) => { - addOpenRenderer(event.sender); - if (message) { - messages$.next(message); - } - }); - var addOpenRenderer = renderer => { - if (openRenderers.has(renderer)) return; - openRenderers.add(renderer); - renderer.on('destroyed', () => openRenderers.delete(renderer)); - }; - var send = msg => { - /** - * TODO we could improve performance - * by only sending the message to the 'correct' sender. - */ - openRenderers.forEach(sender => { - sender.send(channelId, msg); - }); - }; - (0, _index.exposeRxStorageRemote)({ - storage: args.storage, - messages$, - send - }); -} -//# sourceMappingURL=rx-storage-ipc-main.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/electron/rx-storage-ipc-main.js.map b/dist/cjs/plugins/electron/rx-storage-ipc-main.js.map deleted file mode 100644 index b1c0116a193..00000000000 --- a/dist/cjs/plugins/electron/rx-storage-ipc-main.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-storage-ipc-main.js","names":["_rxjs","require","_electronHelper","_index","exposeIpcMainRxStorage","args","channelId","IPC_RENDERER_KEY_PREFIX","key","join","messages$","Subject","openRenderers","Set","ipcMain","on","event","message","addOpenRenderer","sender","next","renderer","has","add","delete","send","msg","forEach","exposeRxStorageRemote","storage"],"sources":["../../../../src/plugins/electron/rx-storage-ipc-main.ts"],"sourcesContent":["/**\n * This file contains everything\n * that is supposed to run inside of the electron main process\n */\nimport type {\n RxStorage\n} from '../../types/index.d.ts';\nimport { Subject } from 'rxjs';\nimport {\n IPC_RENDERER_KEY_PREFIX\n} from './electron-helper.ts';\nimport {\n exposeRxStorageRemote,\n RxStorageRemoteExposeSettings,\n MessageToRemote\n} from '../storage-remote/index.ts';\nexport function exposeIpcMainRxStorage(\n args: {\n key: string;\n storage: RxStorage;\n ipcMain: any;\n }\n) {\n const channelId = [\n IPC_RENDERER_KEY_PREFIX,\n args.key,\n ].join('|');\n const messages$ = new Subject();\n const openRenderers: Set = new Set();\n args.ipcMain.on(\n channelId,\n (event: any, message: any) => {\n addOpenRenderer(event.sender);\n if (message) {\n messages$.next(message);\n }\n }\n );\n const addOpenRenderer = (renderer: any) => {\n if (openRenderers.has(renderer)) return;\n openRenderers.add(renderer);\n renderer.on('destroyed', () => openRenderers.delete(renderer));\n };\n const send: RxStorageRemoteExposeSettings['send'] = (msg) => {\n /**\n * TODO we could improve performance\n * by only sending the message to the 'correct' sender.\n */\n openRenderers.forEach(sender => {\n sender.send(channelId, msg);\n });\n };\n exposeRxStorageRemote({\n storage: args.storage,\n messages$,\n send\n });\n}\n"],"mappings":";;;;;;AAOA,IAAAA,KAAA,GAAAC,OAAA;AACA,IAAAC,eAAA,GAAAD,OAAA;AAGA,IAAAE,MAAA,GAAAF,OAAA;AAXA;AACA;AACA;AACA;;AAaO,SAASG,sBAAsBA,CAClCC,IAIC,EACH;EACE,IAAMC,SAAS,GAAG,CACdC,uCAAuB,EACvBF,IAAI,CAACG,GAAG,CACX,CAACC,IAAI,CAAC,GAAG,CAAC;EACX,IAAMC,SAAS,GAAG,IAAIC,aAAO,CAAkB,CAAC;EAChD,IAAMC,aAAuB,GAAG,IAAIC,GAAG,CAAC,CAAC;EACzCR,IAAI,CAACS,OAAO,CAACC,EAAE,CACXT,SAAS,EACT,CAACU,KAAU,EAAEC,OAAY,KAAK;IAC1BC,eAAe,CAACF,KAAK,CAACG,MAAM,CAAC;IAC7B,IAAIF,OAAO,EAAE;MACTP,SAAS,CAACU,IAAI,CAACH,OAAO,CAAC;IAC3B;EACJ,CACJ,CAAC;EACD,IAAMC,eAAe,GAAIG,QAAa,IAAK;IACvC,IAAIT,aAAa,CAACU,GAAG,CAACD,QAAQ,CAAC,EAAE;IACjCT,aAAa,CAACW,GAAG,CAACF,QAAQ,CAAC;IAC3BA,QAAQ,CAACN,EAAE,CAAC,WAAW,EAAE,MAAMH,aAAa,CAACY,MAAM,CAACH,QAAQ,CAAC,CAAC;EAClE,CAAC;EACD,IAAMI,IAA2C,GAAIC,GAAG,IAAK;IACzD;AACR;AACA;AACA;IACQd,aAAa,CAACe,OAAO,CAACR,MAAM,IAAI;MAC5BA,MAAM,CAACM,IAAI,CAACnB,SAAS,EAAEoB,GAAG,CAAC;IAC/B,CAAC,CAAC;EACN,CAAC;EACD,IAAAE,4BAAqB,EAAC;IAClBC,OAAO,EAAExB,IAAI,CAACwB,OAAO;IACrBnB,SAAS;IACTe;EACJ,CAAC,CAAC;AACN","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/electron/rx-storage-ipc-renderer.js b/dist/cjs/plugins/electron/rx-storage-ipc-renderer.js deleted file mode 100644 index 2a63c3f610c..00000000000 --- a/dist/cjs/plugins/electron/rx-storage-ipc-renderer.js +++ /dev/null @@ -1,37 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.getRxStorageIpcRenderer = getRxStorageIpcRenderer; -var _rxjs = require("rxjs"); -var _index = require("../storage-remote/index.js"); -var _electronHelper = require("./electron-helper.js"); -var _index2 = require("../utils/index.js"); -function getRxStorageIpcRenderer(settings) { - var channelId = [_electronHelper.IPC_RENDERER_KEY_PREFIX, settings.key].join('|'); - var storage = (0, _index.getRxStorageRemote)({ - identifier: 'electron-ipc-renderer', - mode: settings.mode, - messageChannelCreator() { - var messages$ = new _rxjs.Subject(); - var listener = (_event, message) => { - messages$.next(message); - }; - settings.ipcRenderer.on(channelId, listener); - settings.ipcRenderer.postMessage(channelId, false); - return Promise.resolve({ - messages$, - send(msg) { - settings.ipcRenderer.postMessage(channelId, msg); - }, - close() { - settings.ipcRenderer.removeListener(channelId, listener); - return _index2.PROMISE_RESOLVE_VOID; - } - }); - } - }); - return storage; -} -//# sourceMappingURL=rx-storage-ipc-renderer.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/electron/rx-storage-ipc-renderer.js.map b/dist/cjs/plugins/electron/rx-storage-ipc-renderer.js.map deleted file mode 100644 index 95c6e011b0a..00000000000 --- a/dist/cjs/plugins/electron/rx-storage-ipc-renderer.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-storage-ipc-renderer.js","names":["_rxjs","require","_index","_electronHelper","_index2","getRxStorageIpcRenderer","settings","channelId","IPC_RENDERER_KEY_PREFIX","key","join","storage","getRxStorageRemote","identifier","mode","messageChannelCreator","messages$","Subject","listener","_event","message","next","ipcRenderer","on","postMessage","Promise","resolve","send","msg","close","removeListener","PROMISE_RESOLVE_VOID"],"sources":["../../../../src/plugins/electron/rx-storage-ipc-renderer.ts"],"sourcesContent":["import { Subject } from 'rxjs';\nimport {\n getRxStorageRemote,\n RxStorageRemote,\n RxStorageRemoteSettings,\n MessageFromRemote\n} from '../storage-remote/index.ts';\nimport {\n IPC_RENDERER_KEY_PREFIX\n} from './electron-helper.ts';\nimport { PROMISE_RESOLVE_VOID } from '../utils/index.ts';\n\nexport type RxStorageIpcRendererSettings = {\n /**\n * Set the same key on both sides\n * to ensure that messages do not get mixed\n * up when you use more then one storage.\n */\n key: string;\n ipcRenderer: any;\n mode: RxStorageRemoteSettings['mode'];\n};\n\nexport type RxStorageIpcRenderer = RxStorageRemote;\nexport function getRxStorageIpcRenderer(\n settings: RxStorageIpcRendererSettings\n): RxStorageIpcRenderer {\n const channelId = [\n IPC_RENDERER_KEY_PREFIX,\n settings.key\n ].join('|');\n\n const storage = getRxStorageRemote({\n identifier: 'electron-ipc-renderer',\n mode: settings.mode,\n messageChannelCreator() {\n const messages$ = new Subject();\n const listener = (_event: any, message: any) => {\n messages$.next(message);\n };\n settings.ipcRenderer.on(channelId, listener);\n settings.ipcRenderer.postMessage(\n channelId,\n false\n );\n return Promise.resolve({\n messages$,\n send(msg) {\n settings.ipcRenderer.postMessage(\n channelId,\n msg\n );\n },\n close() {\n settings.ipcRenderer.removeListener(channelId, listener);\n return PROMISE_RESOLVE_VOID;\n }\n });\n },\n });\n return storage;\n}\n"],"mappings":";;;;;;AAAA,IAAAA,KAAA,GAAAC,OAAA;AACA,IAAAC,MAAA,GAAAD,OAAA;AAMA,IAAAE,eAAA,GAAAF,OAAA;AAGA,IAAAG,OAAA,GAAAH,OAAA;AAcO,SAASI,uBAAuBA,CACnCC,QAAsC,EAClB;EACpB,IAAMC,SAAS,GAAG,CACdC,uCAAuB,EACvBF,QAAQ,CAACG,GAAG,CACf,CAACC,IAAI,CAAC,GAAG,CAAC;EAEX,IAAMC,OAAO,GAAG,IAAAC,yBAAkB,EAAC;IAC/BC,UAAU,EAAE,uBAAuB;IACnCC,IAAI,EAAER,QAAQ,CAACQ,IAAI;IACnBC,qBAAqBA,CAAA,EAAG;MACpB,IAAMC,SAAS,GAAG,IAAIC,aAAO,CAAoB,CAAC;MAClD,IAAMC,QAAQ,GAAGA,CAACC,MAAW,EAAEC,OAAY,KAAK;QAC5CJ,SAAS,CAACK,IAAI,CAACD,OAAO,CAAC;MAC3B,CAAC;MACDd,QAAQ,CAACgB,WAAW,CAACC,EAAE,CAAChB,SAAS,EAAEW,QAAQ,CAAC;MAC5CZ,QAAQ,CAACgB,WAAW,CAACE,WAAW,CAC5BjB,SAAS,EACT,KACJ,CAAC;MACD,OAAOkB,OAAO,CAACC,OAAO,CAAC;QACnBV,SAAS;QACTW,IAAIA,CAACC,GAAG,EAAE;UACNtB,QAAQ,CAACgB,WAAW,CAACE,WAAW,CAC5BjB,SAAS,EACTqB,GACJ,CAAC;QACL,CAAC;QACDC,KAAKA,CAAA,EAAG;UACJvB,QAAQ,CAACgB,WAAW,CAACQ,cAAc,CAACvB,SAAS,EAAEW,QAAQ,CAAC;UACxD,OAAOa,4BAAoB;QAC/B;MACJ,CAAC,CAAC;IACN;EACJ,CAAC,CAAC;EACF,OAAOpB,OAAO;AAClB","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/encryption-crypto-js/index.js b/dist/cjs/plugins/encryption-crypto-js/index.js deleted file mode 100644 index 6b6831e299d..00000000000 --- a/dist/cjs/plugins/encryption-crypto-js/index.js +++ /dev/null @@ -1,147 +0,0 @@ -"use strict"; - -var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault"); -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.MINIMUM_PASSWORD_LENGTH = void 0; -exports.decryptString = decryptString; -exports.encryptString = encryptString; -exports.wrappedKeyEncryptionCryptoJsStorage = wrappedKeyEncryptionCryptoJsStorage; -var _cryptoJs = _interopRequireDefault(require("crypto-js")); -var _pluginHelpers = require("../../plugin-helpers.js"); -var _rxError = require("../../rx-error.js"); -var _rxStorageHelper = require("../../rx-storage-helper.js"); -var _index = require("../../plugins/utils/index.js"); -/** - * this plugin adds the encryption-capabilities to rxdb - * It's using crypto-js/aes for password-encryption - * @link https://github.com/brix/crypto-js - */ - -var { - AES, - enc: cryptoEnc -} = _cryptoJs.default; -var MINIMUM_PASSWORD_LENGTH = exports.MINIMUM_PASSWORD_LENGTH = 8; -function encryptString(value, password) { - var encrypted = AES.encrypt(value, password); - return encrypted.toString(); -} -function decryptString(cipherText, password) { - /** - * Trying to decrypt non-strings - * will cause no errors and will be hard to debug. - * So instead we do this check here. - */ - if (typeof cipherText !== 'string') { - throw (0, _rxError.newRxError)('SNH', { - args: { - cipherText - } - }); - } - var decrypted = AES.decrypt(cipherText, password); - var ret = decrypted.toString(cryptoEnc.Utf8); - return ret; -} -function wrappedKeyEncryptionCryptoJsStorage(args) { - return Object.assign({}, args.storage, { - async createStorageInstance(params) { - if (typeof params.password !== 'undefined') { - validatePassword(params.password); - } - if (!(0, _rxStorageHelper.hasEncryption)(params.schema)) { - var retInstance = await args.storage.createStorageInstance(params); - return retInstance; - } - if (!params.password) { - throw (0, _rxError.newRxError)('EN3', { - database: params.databaseName, - collection: params.collectionName, - schema: params.schema - }); - } - var password = params.password; - var schemaWithoutEncrypted = (0, _index.clone)(params.schema); - delete schemaWithoutEncrypted.encrypted; - if (schemaWithoutEncrypted.attachments) { - schemaWithoutEncrypted.attachments.encrypted = false; - } - var instance = await args.storage.createStorageInstance(Object.assign({}, params, { - schema: schemaWithoutEncrypted - })); - function modifyToStorage(docData) { - docData = cloneWithoutAttachments(docData); - (0, _index.ensureNotFalsy)(params.schema.encrypted).forEach(path => { - var value = (0, _index.getProperty)(docData, path); - if (typeof value === 'undefined') { - return; - } - var stringValue = JSON.stringify(value); - var encrypted = encryptString(stringValue, password); - (0, _index.setProperty)(docData, path, encrypted); - }); - - // handle attachments - if (params.schema.attachments && params.schema.attachments.encrypted) { - var newAttachments = {}; - Object.entries(docData._attachments).forEach(([id, attachment]) => { - var useAttachment = (0, _index.flatClone)(attachment); - if (useAttachment.data) { - var dataString = useAttachment.data; - useAttachment.data = (0, _index.b64EncodeUnicode)(encryptString(dataString, password)); - } - newAttachments[id] = useAttachment; - }); - docData._attachments = newAttachments; - } - return docData; - } - function modifyFromStorage(docData) { - docData = cloneWithoutAttachments(docData); - (0, _index.ensureNotFalsy)(params.schema.encrypted).forEach(path => { - var value = (0, _index.getProperty)(docData, path); - if (typeof value === 'undefined') { - return; - } - var decrypted = decryptString(value, password); - var decryptedParsed = JSON.parse(decrypted); - (0, _index.setProperty)(docData, path, decryptedParsed); - }); - return docData; - } - function modifyAttachmentFromStorage(attachmentData) { - if (params.schema.attachments && params.schema.attachments.encrypted) { - var decrypted = decryptString((0, _index.b64DecodeUnicode)(attachmentData), password); - return decrypted; - } else { - return attachmentData; - } - } - return (0, _pluginHelpers.wrapRxStorageInstance)(params.schema, instance, modifyToStorage, modifyFromStorage, modifyAttachmentFromStorage); - } - }); -} -function cloneWithoutAttachments(data) { - var attachments = data._attachments; - data = (0, _index.flatClone)(data); - delete data._attachments; - data = (0, _index.clone)(data); - data._attachments = attachments; - return data; -} -function validatePassword(password) { - if (typeof password !== 'string') { - throw (0, _rxError.newRxTypeError)('EN1', { - password - }); - } - if (password.length < MINIMUM_PASSWORD_LENGTH) { - throw (0, _rxError.newRxError)('EN2', { - minPassLength: MINIMUM_PASSWORD_LENGTH, - password - }); - } -} -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/encryption-crypto-js/index.js.map b/dist/cjs/plugins/encryption-crypto-js/index.js.map deleted file mode 100644 index 1267b8e71d0..00000000000 --- a/dist/cjs/plugins/encryption-crypto-js/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["_cryptoJs","_interopRequireDefault","require","_pluginHelpers","_rxError","_rxStorageHelper","_index","AES","enc","cryptoEnc","pkg","MINIMUM_PASSWORD_LENGTH","exports","encryptString","value","password","encrypted","encrypt","toString","decryptString","cipherText","newRxError","args","decrypted","decrypt","ret","Utf8","wrappedKeyEncryptionCryptoJsStorage","Object","assign","storage","createStorageInstance","params","validatePassword","hasEncryption","schema","retInstance","database","databaseName","collection","collectionName","schemaWithoutEncrypted","clone","attachments","instance","modifyToStorage","docData","cloneWithoutAttachments","ensureNotFalsy","forEach","path","getProperty","stringValue","JSON","stringify","setProperty","newAttachments","entries","_attachments","id","attachment","useAttachment","flatClone","data","dataString","b64EncodeUnicode","modifyFromStorage","decryptedParsed","parse","modifyAttachmentFromStorage","attachmentData","b64DecodeUnicode","wrapRxStorageInstance","newRxTypeError","length","minPassLength"],"sources":["../../../../src/plugins/encryption-crypto-js/index.ts"],"sourcesContent":["/**\n * this plugin adds the encryption-capabilities to rxdb\n * It's using crypto-js/aes for password-encryption\n * @link https://github.com/brix/crypto-js\n */\nimport pkg from 'crypto-js';\nconst { AES, enc: cryptoEnc } = pkg;\n\nimport { wrapRxStorageInstance } from '../../plugin-helpers.ts';\nimport { newRxError, newRxTypeError } from '../../rx-error.ts';\nimport { hasEncryption } from '../../rx-storage-helper.ts';\nimport type {\n InternalStoreDocType,\n RxAttachmentWriteData,\n RxDocumentData,\n RxDocumentWriteData,\n RxJsonSchema,\n RxStorage,\n RxStorageInstanceCreationParams\n} from '../../types/index.d.ts';\nimport {\n b64DecodeUnicode,\n b64EncodeUnicode,\n clone,\n ensureNotFalsy,\n flatClone,\n getProperty,\n setProperty\n} from '../../plugins/utils/index.ts';\n\nexport const MINIMUM_PASSWORD_LENGTH: 8 = 8;\n\n\nexport function encryptString(value: string, password: string): string {\n const encrypted = AES.encrypt(value, password);\n return encrypted.toString();\n}\n\nexport function decryptString(cipherText: string, password: any): string {\n /**\n * Trying to decrypt non-strings\n * will cause no errors and will be hard to debug.\n * So instead we do this check here.\n */\n if (typeof cipherText !== 'string') {\n throw newRxError('SNH', {\n args: {\n cipherText\n }\n });\n }\n\n const decrypted = AES.decrypt(cipherText, password);\n const ret = decrypted.toString(cryptoEnc.Utf8);\n return ret;\n}\n\nexport type InternalStorePasswordDocType = InternalStoreDocType<{\n hash: string;\n}>;\n\nexport function wrappedKeyEncryptionCryptoJsStorage(\n args: {\n storage: RxStorage;\n }\n): RxStorage {\n return Object.assign(\n {},\n args.storage,\n {\n async createStorageInstance(\n params: RxStorageInstanceCreationParams\n ) {\n if (typeof params.password !== 'undefined') {\n validatePassword(params.password as any);\n }\n\n if (!hasEncryption(params.schema)) {\n const retInstance = await args.storage.createStorageInstance(params);\n return retInstance;\n }\n\n if (!params.password) {\n throw newRxError('EN3', {\n database: params.databaseName,\n collection: params.collectionName,\n schema: params.schema\n });\n }\n const password = params.password;\n\n const schemaWithoutEncrypted: RxJsonSchema> = clone(params.schema);\n delete schemaWithoutEncrypted.encrypted;\n if (schemaWithoutEncrypted.attachments) {\n schemaWithoutEncrypted.attachments.encrypted = false;\n }\n\n const instance = await args.storage.createStorageInstance(\n Object.assign(\n {},\n params,\n {\n schema: schemaWithoutEncrypted\n }\n )\n );\n\n function modifyToStorage(docData: RxDocumentWriteData) {\n docData = cloneWithoutAttachments(docData);\n ensureNotFalsy(params.schema.encrypted)\n .forEach(path => {\n const value = getProperty(docData, path);\n if (typeof value === 'undefined') {\n return;\n }\n\n const stringValue = JSON.stringify(value);\n const encrypted = encryptString(stringValue, password);\n setProperty(docData, path, encrypted);\n });\n\n // handle attachments\n if (\n params.schema.attachments &&\n params.schema.attachments.encrypted\n ) {\n const newAttachments: typeof docData._attachments = {};\n Object.entries(docData._attachments).forEach(([id, attachment]) => {\n const useAttachment: RxAttachmentWriteData = flatClone(attachment) as any;\n if (useAttachment.data) {\n const dataString = useAttachment.data;\n useAttachment.data = b64EncodeUnicode(encryptString(dataString, password));\n }\n newAttachments[id] = useAttachment;\n });\n docData._attachments = newAttachments;\n }\n return docData;\n }\n function modifyFromStorage(docData: RxDocumentData): Promise> {\n docData = cloneWithoutAttachments(docData);\n ensureNotFalsy(params.schema.encrypted)\n .forEach(path => {\n const value = getProperty(docData, path);\n if (typeof value === 'undefined') {\n return;\n }\n const decrypted = decryptString(value, password);\n const decryptedParsed = JSON.parse(decrypted);\n setProperty(docData, path, decryptedParsed);\n });\n return docData;\n }\n\n function modifyAttachmentFromStorage(attachmentData: string): string {\n if (\n params.schema.attachments &&\n params.schema.attachments.encrypted\n ) {\n const decrypted = decryptString(b64DecodeUnicode(attachmentData), password);\n return decrypted;\n } else {\n return attachmentData;\n }\n }\n\n return wrapRxStorageInstance(\n params.schema,\n instance,\n modifyToStorage,\n modifyFromStorage,\n modifyAttachmentFromStorage\n );\n }\n }\n );\n}\n\n\n\n\n\nfunction cloneWithoutAttachments(data: RxDocumentWriteData): RxDocumentData {\n const attachments = data._attachments;\n data = flatClone(data);\n delete (data as any)._attachments;\n data = clone(data);\n data._attachments = attachments;\n return data as any;\n}\n\nfunction validatePassword(password: string) {\n if (typeof password !== 'string') {\n throw newRxTypeError('EN1', {\n password\n });\n }\n if (password.length < MINIMUM_PASSWORD_LENGTH) {\n throw newRxError('EN2', {\n minPassLength: MINIMUM_PASSWORD_LENGTH,\n password\n });\n }\n}\n"],"mappings":";;;;;;;;;;AAKA,IAAAA,SAAA,GAAAC,sBAAA,CAAAC,OAAA;AAGA,IAAAC,cAAA,GAAAD,OAAA;AACA,IAAAE,QAAA,GAAAF,OAAA;AACA,IAAAG,gBAAA,GAAAH,OAAA;AAUA,IAAAI,MAAA,GAAAJ,OAAA;AApBA;AACA;AACA;AACA;AACA;;AAEA,IAAM;EAAEK,GAAG;EAAEC,GAAG,EAAEC;AAAU,CAAC,GAAGC,iBAAG;AAwB5B,IAAMC,uBAA0B,GAAAC,OAAA,CAAAD,uBAAA,GAAG,CAAC;AAGpC,SAASE,aAAaA,CAACC,KAAa,EAAEC,QAAgB,EAAU;EACnE,IAAMC,SAAS,GAAGT,GAAG,CAACU,OAAO,CAACH,KAAK,EAAEC,QAAQ,CAAC;EAC9C,OAAOC,SAAS,CAACE,QAAQ,CAAC,CAAC;AAC/B;AAEO,SAASC,aAAaA,CAACC,UAAkB,EAAEL,QAAa,EAAU;EACrE;AACJ;AACA;AACA;AACA;EACI,IAAI,OAAOK,UAAU,KAAK,QAAQ,EAAE;IAChC,MAAM,IAAAC,mBAAU,EAAC,KAAK,EAAE;MACpBC,IAAI,EAAE;QACFF;MACJ;IACJ,CAAC,CAAC;EACN;EAEA,IAAMG,SAAS,GAAGhB,GAAG,CAACiB,OAAO,CAACJ,UAAU,EAAEL,QAAQ,CAAC;EACnD,IAAMU,GAAG,GAAGF,SAAS,CAACL,QAAQ,CAACT,SAAS,CAACiB,IAAI,CAAC;EAC9C,OAAOD,GAAG;AACd;AAMO,SAASE,mCAAmCA,CAC/CL,IAEC,EAC4C;EAC7C,OAAOM,MAAM,CAACC,MAAM,CAChB,CAAC,CAAC,EACFP,IAAI,CAACQ,OAAO,EACZ;IACI,MAAMC,qBAAqBA,CACvBC,MAAuD,EACzD;MACE,IAAI,OAAOA,MAAM,CAACjB,QAAQ,KAAK,WAAW,EAAE;QACxCkB,gBAAgB,CAACD,MAAM,CAACjB,QAAe,CAAC;MAC5C;MAEA,IAAI,CAAC,IAAAmB,8BAAa,EAACF,MAAM,CAACG,MAAM,CAAC,EAAE;QAC/B,IAAMC,WAAW,GAAG,MAAMd,IAAI,CAACQ,OAAO,CAACC,qBAAqB,CAACC,MAAM,CAAC;QACpE,OAAOI,WAAW;MACtB;MAEA,IAAI,CAACJ,MAAM,CAACjB,QAAQ,EAAE;QAClB,MAAM,IAAAM,mBAAU,EAAC,KAAK,EAAE;UACpBgB,QAAQ,EAAEL,MAAM,CAACM,YAAY;UAC7BC,UAAU,EAAEP,MAAM,CAACQ,cAAc;UACjCL,MAAM,EAAEH,MAAM,CAACG;QACnB,CAAC,CAAC;MACN;MACA,IAAMpB,QAAQ,GAAGiB,MAAM,CAACjB,QAAQ;MAEhC,IAAM0B,sBAA+D,GAAG,IAAAC,YAAK,EAACV,MAAM,CAACG,MAAM,CAAC;MAC5F,OAAOM,sBAAsB,CAACzB,SAAS;MACvC,IAAIyB,sBAAsB,CAACE,WAAW,EAAE;QACpCF,sBAAsB,CAACE,WAAW,CAAC3B,SAAS,GAAG,KAAK;MACxD;MAEA,IAAM4B,QAAQ,GAAG,MAAMtB,IAAI,CAACQ,OAAO,CAACC,qBAAqB,CACrDH,MAAM,CAACC,MAAM,CACT,CAAC,CAAC,EACFG,MAAM,EACN;QACIG,MAAM,EAAEM;MACZ,CACJ,CACJ,CAAC;MAED,SAASI,eAAeA,CAACC,OAAuC,EAAE;QAC9DA,OAAO,GAAGC,uBAAuB,CAACD,OAAO,CAAC;QAC1C,IAAAE,qBAAc,EAAChB,MAAM,CAACG,MAAM,CAACnB,SAAS,CAAC,CAClCiC,OAAO,CAACC,IAAI,IAAI;UACb,IAAMpC,KAAK,GAAG,IAAAqC,kBAAW,EAACL,OAAO,EAAEI,IAAI,CAAC;UACxC,IAAI,OAAOpC,KAAK,KAAK,WAAW,EAAE;YAC9B;UACJ;UAEA,IAAMsC,WAAW,GAAGC,IAAI,CAACC,SAAS,CAACxC,KAAK,CAAC;UACzC,IAAME,SAAS,GAAGH,aAAa,CAACuC,WAAW,EAAErC,QAAQ,CAAC;UACtD,IAAAwC,kBAAW,EAACT,OAAO,EAAEI,IAAI,EAAElC,SAAS,CAAC;QACzC,CAAC,CAAC;;QAEN;QACA,IACIgB,MAAM,CAACG,MAAM,CAACQ,WAAW,IACzBX,MAAM,CAACG,MAAM,CAACQ,WAAW,CAAC3B,SAAS,EACrC;UACE,IAAMwC,cAA2C,GAAG,CAAC,CAAC;UACtD5B,MAAM,CAAC6B,OAAO,CAACX,OAAO,CAACY,YAAY,CAAC,CAACT,OAAO,CAAC,CAAC,CAACU,EAAE,EAAEC,UAAU,CAAC,KAAK;YAC/D,IAAMC,aAAoC,GAAG,IAAAC,gBAAS,EAACF,UAAU,CAAQ;YACzE,IAAIC,aAAa,CAACE,IAAI,EAAE;cACpB,IAAMC,UAAU,GAAGH,aAAa,CAACE,IAAI;cACrCF,aAAa,CAACE,IAAI,GAAG,IAAAE,uBAAgB,EAACpD,aAAa,CAACmD,UAAU,EAAEjD,QAAQ,CAAC,CAAC;YAC9E;YACAyC,cAAc,CAACG,EAAE,CAAC,GAAGE,aAAa;UACtC,CAAC,CAAC;UACFf,OAAO,CAACY,YAAY,GAAGF,cAAc;QACzC;QACA,OAAOV,OAAO;MAClB;MACA,SAASoB,iBAAiBA,CAACpB,OAA4B,EAAsC;QACzFA,OAAO,GAAGC,uBAAuB,CAACD,OAAO,CAAC;QAC1C,IAAAE,qBAAc,EAAChB,MAAM,CAACG,MAAM,CAACnB,SAAS,CAAC,CAClCiC,OAAO,CAACC,IAAI,IAAI;UACb,IAAMpC,KAAK,GAAG,IAAAqC,kBAAW,EAACL,OAAO,EAAEI,IAAI,CAAC;UACxC,IAAI,OAAOpC,KAAK,KAAK,WAAW,EAAE;YAC9B;UACJ;UACA,IAAMS,SAAS,GAAGJ,aAAa,CAACL,KAAK,EAAEC,QAAQ,CAAC;UAChD,IAAMoD,eAAe,GAAGd,IAAI,CAACe,KAAK,CAAC7C,SAAS,CAAC;UAC7C,IAAAgC,kBAAW,EAACT,OAAO,EAAEI,IAAI,EAAEiB,eAAe,CAAC;QAC/C,CAAC,CAAC;QACN,OAAOrB,OAAO;MAClB;MAEA,SAASuB,2BAA2BA,CAACC,cAAsB,EAAU;QACjE,IACItC,MAAM,CAACG,MAAM,CAACQ,WAAW,IACzBX,MAAM,CAACG,MAAM,CAACQ,WAAW,CAAC3B,SAAS,EACrC;UACE,IAAMO,SAAS,GAAGJ,aAAa,CAAC,IAAAoD,uBAAgB,EAACD,cAAc,CAAC,EAAEvD,QAAQ,CAAC;UAC3E,OAAOQ,SAAS;QACpB,CAAC,MAAM;UACH,OAAO+C,cAAc;QACzB;MACJ;MAEA,OAAO,IAAAE,oCAAqB,EACxBxC,MAAM,CAACG,MAAM,EACbS,QAAQ,EACRC,eAAe,EACfqB,iBAAiB,EACjBG,2BACJ,CAAC;IACL;EACJ,CACJ,CAAC;AACL;AAMA,SAAStB,uBAAuBA,CAAIgB,IAA4B,EAAqB;EACjF,IAAMpB,WAAW,GAAGoB,IAAI,CAACL,YAAY;EACrCK,IAAI,GAAG,IAAAD,gBAAS,EAACC,IAAI,CAAC;EACtB,OAAQA,IAAI,CAASL,YAAY;EACjCK,IAAI,GAAG,IAAArB,YAAK,EAACqB,IAAI,CAAC;EAClBA,IAAI,CAACL,YAAY,GAAGf,WAAW;EAC/B,OAAOoB,IAAI;AACf;AAEA,SAAS9B,gBAAgBA,CAAClB,QAAgB,EAAE;EACxC,IAAI,OAAOA,QAAQ,KAAK,QAAQ,EAAE;IAC9B,MAAM,IAAA0D,uBAAc,EAAC,KAAK,EAAE;MACxB1D;IACJ,CAAC,CAAC;EACN;EACA,IAAIA,QAAQ,CAAC2D,MAAM,GAAG/D,uBAAuB,EAAE;IAC3C,MAAM,IAAAU,mBAAU,EAAC,KAAK,EAAE;MACpBsD,aAAa,EAAEhE,uBAAuB;MACtCI;IACJ,CAAC,CAAC;EACN;AACJ","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/flutter/index.js b/dist/cjs/plugins/flutter/index.js deleted file mode 100644 index ecf8f130872..00000000000 --- a/dist/cjs/plugins/flutter/index.js +++ /dev/null @@ -1,62 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.getLokijsAdapterFlutter = getLokijsAdapterFlutter; -exports.setFlutterRxDatabaseConnector = setFlutterRxDatabaseConnector; -function setFlutterRxDatabaseConnector(createDB) { - process.init = async databaseName => { - var db = await createDB(databaseName); - db.eventBulks$.subscribe(eventBulk => { - // eslint-disable-next-line @typescript-eslint/ban-ts-comment - // @ts-ignore - sendRxDBEvent(JSON.stringify(eventBulk)); - }); - process.db = db; - var collections = []; - Object.entries(db.collections).forEach(([collectionName, collection]) => { - collections.push({ - name: collectionName, - primaryKey: collection.schema.primaryPath - }); - }); - return { - databaseName, - collections - }; - }; -} - -/** - * Create a simple lokijs adapter so that we can persist string via flutter - * @link https://github.com/techfort/LokiJS/blob/master/tutorials/Persistence%20Adapters.md#creating-your-own-basic-persistence-adapter - */ -function getLokijsAdapterFlutter() { - var ret = { - async loadDatabase(databaseName, callback) { - // eslint-disable-next-line @typescript-eslint/ban-ts-comment - // @ts-ignore - var serializedDb = await readKeyValue(databaseName); - var success = true; - if (success) { - callback(serializedDb); - } else { - callback(new Error('There was a problem loading the database')); - } - }, - async saveDatabase(databaseName, dbstring, callback) { - // eslint-disable-next-line @typescript-eslint/ban-ts-comment - // @ts-ignore - await persistKeyValue(databaseName, dbstring); - var success = true; // make your own determinations - if (success) { - callback(null); - } else { - callback(new Error('An error was encountered loading " + dbname + " database.')); - } - } - }; - return ret; -} -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/flutter/index.js.map b/dist/cjs/plugins/flutter/index.js.map deleted file mode 100644 index 015aa06ceae..00000000000 --- a/dist/cjs/plugins/flutter/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["setFlutterRxDatabaseConnector","createDB","process","init","databaseName","db","eventBulks$","subscribe","eventBulk","sendRxDBEvent","JSON","stringify","collections","Object","entries","forEach","collectionName","collection","push","name","primaryKey","schema","primaryPath","getLokijsAdapterFlutter","ret","loadDatabase","callback","serializedDb","readKeyValue","success","Error","saveDatabase","dbstring","persistKeyValue"],"sources":["../../../../src/plugins/flutter/index.ts"],"sourcesContent":["import type {\n RxDatabase\n} from '../../types/index.d.ts';\n\nexport type CreateRxDatabaseFunctionType = (databaseName: string) => Promise;\n\nexport function setFlutterRxDatabaseConnector(\n createDB: CreateRxDatabaseFunctionType\n) {\n (process as any).init = async (databaseName: string) => {\n const db = await createDB(databaseName);\n db.eventBulks$.subscribe(eventBulk => {\n // eslint-disable-next-line @typescript-eslint/ban-ts-comment\n // @ts-ignore\n sendRxDBEvent(JSON.stringify(eventBulk));\n });\n (process as any).db = db;\n const collections: { name: string; primaryKey: string; }[] = [];\n Object.entries(db.collections).forEach(([collectionName, collection]) => {\n collections.push({\n name: collectionName,\n primaryKey: collection.schema.primaryPath\n });\n });\n return {\n databaseName,\n collections\n };\n };\n}\n\n/**\n * Create a simple lokijs adapter so that we can persist string via flutter\n * @link https://github.com/techfort/LokiJS/blob/master/tutorials/Persistence%20Adapters.md#creating-your-own-basic-persistence-adapter\n */\nexport function getLokijsAdapterFlutter() {\n const ret = {\n async loadDatabase(databaseName: string, callback: (v: string | Error) => {}) {\n // eslint-disable-next-line @typescript-eslint/ban-ts-comment\n // @ts-ignore\n const serializedDb: string = await readKeyValue(databaseName);\n\n const success = true;\n if (success) {\n callback(serializedDb);\n } else {\n callback(new Error('There was a problem loading the database'));\n }\n },\n async saveDatabase(databaseName: string, dbstring: string, callback: (v: string | Error | null) => {}) {\n // eslint-disable-next-line @typescript-eslint/ban-ts-comment\n // @ts-ignore\n await persistKeyValue(databaseName, dbstring);\n\n const success = true; // make your own determinations\n if (success) {\n callback(null);\n } else {\n callback(new Error('An error was encountered loading \" + dbname + \" database.'));\n }\n }\n };\n return ret;\n}\n"],"mappings":";;;;;;;AAMO,SAASA,6BAA6BA,CACzCC,QAAsC,EACxC;EACGC,OAAO,CAASC,IAAI,GAAG,MAAOC,YAAoB,IAAK;IACpD,IAAMC,EAAE,GAAG,MAAMJ,QAAQ,CAACG,YAAY,CAAC;IACvCC,EAAE,CAACC,WAAW,CAACC,SAAS,CAACC,SAAS,IAAI;MAClC;MACA;MACAC,aAAa,CAACC,IAAI,CAACC,SAAS,CAACH,SAAS,CAAC,CAAC;IAC5C,CAAC,CAAC;IACDN,OAAO,CAASG,EAAE,GAAGA,EAAE;IACxB,IAAMO,WAAoD,GAAG,EAAE;IAC/DC,MAAM,CAACC,OAAO,CAACT,EAAE,CAACO,WAAW,CAAC,CAACG,OAAO,CAAC,CAAC,CAACC,cAAc,EAAEC,UAAU,CAAC,KAAK;MACrEL,WAAW,CAACM,IAAI,CAAC;QACbC,IAAI,EAAEH,cAAc;QACpBI,UAAU,EAAEH,UAAU,CAACI,MAAM,CAACC;MAClC,CAAC,CAAC;IACN,CAAC,CAAC;IACF,OAAO;MACHlB,YAAY;MACZQ;IACJ,CAAC;EACL,CAAC;AACL;;AAEA;AACA;AACA;AACA;AACO,SAASW,uBAAuBA,CAAA,EAAG;EACtC,IAAMC,GAAG,GAAG;IACR,MAAMC,YAAYA,CAACrB,YAAoB,EAAEsB,QAAmC,EAAE;MAC1E;MACA;MACA,IAAMC,YAAoB,GAAG,MAAMC,YAAY,CAACxB,YAAY,CAAC;MAE7D,IAAMyB,OAAO,GAAG,IAAI;MACpB,IAAIA,OAAO,EAAE;QACTH,QAAQ,CAACC,YAAY,CAAC;MAC1B,CAAC,MAAM;QACHD,QAAQ,CAAC,IAAII,KAAK,CAAC,0CAA0C,CAAC,CAAC;MACnE;IACJ,CAAC;IACD,MAAMC,YAAYA,CAAC3B,YAAoB,EAAE4B,QAAgB,EAAEN,QAA0C,EAAE;MACnG;MACA;MACA,MAAMO,eAAe,CAAC7B,YAAY,EAAE4B,QAAQ,CAAC;MAE7C,IAAMH,OAAO,GAAG,IAAI,CAAC,CAAE;MACvB,IAAIA,OAAO,EAAE;QACTH,QAAQ,CAAC,IAAI,CAAC;MAClB,CAAC,MAAM;QACHA,QAAQ,CAAC,IAAII,KAAK,CAAC,2DAA2D,CAAC,CAAC;MACpF;IACJ;EACJ,CAAC;EACD,OAAON,GAAG;AACd","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/json-dump/index.js b/dist/cjs/plugins/json-dump/index.js deleted file mode 100644 index 085727593a1..00000000000 --- a/dist/cjs/plugins/json-dump/index.js +++ /dev/null @@ -1,94 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.RxDBJsonDumpPlugin = void 0; -var _rxQuery = require("../../rx-query.js"); -var _rxError = require("../../rx-error.js"); -var _index = require("../../plugins/utils/index.js"); -/** - * this plugin adds the json export/import capabilities to RxDB - */ - -function dumpRxDatabase(collections) { - var json = { - name: this.name, - instanceToken: this.token, - collections: [] - }; - var useCollections = Object.keys(this.collections).filter(colName => !collections || collections.includes(colName)).filter(colName => colName.charAt(0) !== '_').map(colName => this.collections[colName]); - return Promise.all(useCollections.map(col => col.exportJSON())).then(cols => { - json.collections = cols; - return json; - }); -} -var importDumpRxDatabase = function (dump) { - /** - * collections must be created before the import - * because we do not know about the other collection-settings here - */ - var missingCollections = dump.collections.filter(col => !this.collections[col.name]).map(col => col.name); - if (missingCollections.length > 0) { - throw (0, _rxError.newRxError)('JD1', { - missingCollections - }); - } - return Promise.all(dump.collections.map(colDump => this.collections[colDump.name].importJSON(colDump))); -}; -var dumpRxCollection = async function () { - var json = { - name: this.name, - schemaHash: await this.schema.hash, - docs: [] - }; - var query = (0, _rxQuery.createRxQuery)('find', (0, _rxQuery._getDefaultQuery)(), this); - return (0, _rxQuery.queryCollection)(query).then(docs => { - json.docs = docs.map(docData => { - docData = (0, _index.flatClone)(docData); - delete docData._rev; - delete docData._attachments; - return docData; - }); - return json; - }); -}; -async function importDumpRxCollection(exportedJSON) { - // check schemaHash - if (exportedJSON.schemaHash !== (await this.schema.hash)) { - throw (0, _rxError.newRxError)('JD2', { - schemaHash: exportedJSON.schemaHash, - own: this.schema.hash - }); - } - var docs = exportedJSON.docs; - return this.storageInstance.bulkWrite(docs.map(docData => { - var document = Object.assign({}, docData, { - _meta: { - lwt: (0, _index.now)() - }, - _rev: (0, _index.getDefaultRevision)(), - _attachments: {}, - _deleted: false - }); - return { - document - }; - }), 'json-dump-import'); -} -var RxDBJsonDumpPlugin = exports.RxDBJsonDumpPlugin = { - name: 'json-dump', - rxdb: true, - prototypes: { - RxDatabase: proto => { - proto.exportJSON = dumpRxDatabase; - proto.importJSON = importDumpRxDatabase; - }, - RxCollection: proto => { - proto.exportJSON = dumpRxCollection; - proto.importJSON = importDumpRxCollection; - } - }, - overwritable: {} -}; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/json-dump/index.js.map b/dist/cjs/plugins/json-dump/index.js.map deleted file mode 100644 index 226eb6c9709..00000000000 --- a/dist/cjs/plugins/json-dump/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["_rxQuery","require","_rxError","_index","dumpRxDatabase","collections","json","name","instanceToken","token","useCollections","Object","keys","filter","colName","includes","charAt","map","Promise","all","col","exportJSON","then","cols","importDumpRxDatabase","dump","missingCollections","length","newRxError","colDump","importJSON","dumpRxCollection","schemaHash","schema","hash","docs","query","createRxQuery","_getDefaultQuery","queryCollection","docData","flatClone","_rev","_attachments","importDumpRxCollection","exportedJSON","own","storageInstance","bulkWrite","document","assign","_meta","lwt","now","getDefaultRevision","_deleted","RxDBJsonDumpPlugin","exports","rxdb","prototypes","RxDatabase","proto","RxCollection","overwritable"],"sources":["../../../../src/plugins/json-dump/index.ts"],"sourcesContent":["/**\n * this plugin adds the json export/import capabilities to RxDB\n */\nimport {\n createRxQuery,\n queryCollection,\n _getDefaultQuery\n} from '../../rx-query.ts';\nimport {\n newRxError\n} from '../../rx-error.ts';\nimport type {\n RxDatabase,\n RxCollection,\n RxPlugin,\n RxDocumentData\n} from '../../types/index.d.ts';\nimport {\n flatClone,\n getDefaultRevision,\n now\n} from '../../plugins/utils/index.ts';\n\nfunction dumpRxDatabase(\n this: RxDatabase,\n collections?: string[]\n): Promise {\n const json: any = {\n name: this.name,\n instanceToken: this.token,\n collections: []\n };\n\n const useCollections = Object.keys(this.collections)\n .filter(colName => !collections || collections.includes(colName))\n .filter(colName => colName.charAt(0) !== '_')\n .map(colName => this.collections[colName]);\n\n return Promise.all(\n useCollections\n .map(col => col.exportJSON())\n ).then(cols => {\n json.collections = cols;\n return json;\n });\n}\n\nconst importDumpRxDatabase = function (\n this: RxDatabase,\n dump: any\n) {\n /**\n * collections must be created before the import\n * because we do not know about the other collection-settings here\n */\n const missingCollections = dump.collections\n .filter((col: any) => !this.collections[col.name])\n .map((col: any) => col.name);\n if (missingCollections.length > 0) {\n throw newRxError('JD1', {\n missingCollections\n });\n }\n\n return Promise.all(\n dump.collections\n .map((colDump: any) => this.collections[colDump.name].importJSON(colDump))\n );\n};\n\nconst dumpRxCollection = async function (\n this: RxCollection\n) {\n const json: any = {\n name: this.name,\n schemaHash: await this.schema.hash,\n docs: []\n };\n\n const query = createRxQuery(\n 'find',\n _getDefaultQuery(),\n this\n );\n return queryCollection(query)\n .then((docs: any) => {\n json.docs = docs.map((docData: any) => {\n docData = flatClone(docData);\n delete docData._rev;\n delete docData._attachments;\n return docData;\n });\n return json;\n });\n};\n\nasync function importDumpRxCollection(\n this: RxCollection,\n exportedJSON: any\n): Promise {\n // check schemaHash\n if (exportedJSON.schemaHash !== await this.schema.hash) {\n throw newRxError('JD2', {\n schemaHash: exportedJSON.schemaHash,\n own: this.schema.hash\n });\n }\n\n const docs: RxDocType[] = exportedJSON.docs;\n return this.storageInstance.bulkWrite(\n docs.map(docData => {\n const document: RxDocumentData = Object.assign(\n {},\n docData,\n {\n _meta: {\n lwt: now()\n },\n _rev: getDefaultRevision(),\n _attachments: {},\n _deleted: false\n }\n );\n return {\n document\n };\n }),\n 'json-dump-import'\n );\n}\n\nexport const RxDBJsonDumpPlugin: RxPlugin = {\n name: 'json-dump',\n rxdb: true,\n prototypes: {\n RxDatabase: (proto: any) => {\n proto.exportJSON = dumpRxDatabase;\n proto.importJSON = importDumpRxDatabase;\n },\n RxCollection: (proto: any) => {\n proto.exportJSON = dumpRxCollection;\n proto.importJSON = importDumpRxCollection;\n }\n },\n overwritable: {}\n};\n"],"mappings":";;;;;;AAGA,IAAAA,QAAA,GAAAC,OAAA;AAKA,IAAAC,QAAA,GAAAD,OAAA;AASA,IAAAE,MAAA,GAAAF,OAAA;AAjBA;AACA;AACA;;AAqBA,SAASG,cAAcA,CAEnBC,WAAsB,EACV;EACZ,IAAMC,IAAS,GAAG;IACdC,IAAI,EAAE,IAAI,CAACA,IAAI;IACfC,aAAa,EAAE,IAAI,CAACC,KAAK;IACzBJ,WAAW,EAAE;EACjB,CAAC;EAED,IAAMK,cAAc,GAAGC,MAAM,CAACC,IAAI,CAAC,IAAI,CAACP,WAAW,CAAC,CAC/CQ,MAAM,CAACC,OAAO,IAAI,CAACT,WAAW,IAAIA,WAAW,CAACU,QAAQ,CAACD,OAAO,CAAC,CAAC,CAChED,MAAM,CAACC,OAAO,IAAIA,OAAO,CAACE,MAAM,CAAC,CAAC,CAAC,KAAK,GAAG,CAAC,CAC5CC,GAAG,CAACH,OAAO,IAAI,IAAI,CAACT,WAAW,CAACS,OAAO,CAAC,CAAC;EAE9C,OAAOI,OAAO,CAACC,GAAG,CACdT,cAAc,CACTO,GAAG,CAACG,GAAG,IAAIA,GAAG,CAACC,UAAU,CAAC,CAAC,CACpC,CAAC,CAACC,IAAI,CAACC,IAAI,IAAI;IACXjB,IAAI,CAACD,WAAW,GAAGkB,IAAI;IACvB,OAAOjB,IAAI;EACf,CAAC,CAAC;AACN;AAEA,IAAMkB,oBAAoB,GAAG,SAAAA,CAEzBC,IAAS,EACX;EACE;AACJ;AACA;AACA;EACI,IAAMC,kBAAkB,GAAGD,IAAI,CAACpB,WAAW,CACtCQ,MAAM,CAAEO,GAAQ,IAAK,CAAC,IAAI,CAACf,WAAW,CAACe,GAAG,CAACb,IAAI,CAAC,CAAC,CACjDU,GAAG,CAAEG,GAAQ,IAAKA,GAAG,CAACb,IAAI,CAAC;EAChC,IAAImB,kBAAkB,CAACC,MAAM,GAAG,CAAC,EAAE;IAC/B,MAAM,IAAAC,mBAAU,EAAC,KAAK,EAAE;MACpBF;IACJ,CAAC,CAAC;EACN;EAEA,OAAOR,OAAO,CAACC,GAAG,CACdM,IAAI,CAACpB,WAAW,CACXY,GAAG,CAAEY,OAAY,IAAK,IAAI,CAACxB,WAAW,CAACwB,OAAO,CAACtB,IAAI,CAAC,CAACuB,UAAU,CAACD,OAAO,CAAC,CACjF,CAAC;AACL,CAAC;AAED,IAAME,gBAAgB,GAAG,eAAAA,CAAA,EAEvB;EACE,IAAMzB,IAAS,GAAG;IACdC,IAAI,EAAE,IAAI,CAACA,IAAI;IACfyB,UAAU,EAAE,MAAM,IAAI,CAACC,MAAM,CAACC,IAAI;IAClCC,IAAI,EAAE;EACV,CAAC;EAED,IAAMC,KAAK,GAAG,IAAAC,sBAAa,EACvB,MAAM,EACN,IAAAC,yBAAgB,EAAC,CAAC,EAClB,IACJ,CAAC;EACD,OAAO,IAAAC,wBAAe,EAACH,KAAK,CAAC,CACxBd,IAAI,CAAEa,IAAS,IAAK;IACjB7B,IAAI,CAAC6B,IAAI,GAAGA,IAAI,CAAClB,GAAG,CAAEuB,OAAY,IAAK;MACnCA,OAAO,GAAG,IAAAC,gBAAS,EAACD,OAAO,CAAC;MAC5B,OAAOA,OAAO,CAACE,IAAI;MACnB,OAAOF,OAAO,CAACG,YAAY;MAC3B,OAAOH,OAAO;IAClB,CAAC,CAAC;IACF,OAAOlC,IAAI;EACf,CAAC,CAAC;AACV,CAAC;AAED,eAAesC,sBAAsBA,CAEjCC,YAAiB,EACL;EACZ;EACA,IAAIA,YAAY,CAACb,UAAU,MAAK,MAAM,IAAI,CAACC,MAAM,CAACC,IAAI,GAAE;IACpD,MAAM,IAAAN,mBAAU,EAAC,KAAK,EAAE;MACpBI,UAAU,EAAEa,YAAY,CAACb,UAAU;MACnCc,GAAG,EAAE,IAAI,CAACb,MAAM,CAACC;IACrB,CAAC,CAAC;EACN;EAEA,IAAMC,IAAiB,GAAGU,YAAY,CAACV,IAAI;EAC3C,OAAO,IAAI,CAACY,eAAe,CAACC,SAAS,CACjCb,IAAI,CAAClB,GAAG,CAACuB,OAAO,IAAI;IAChB,IAAMS,QAAmC,GAAGtC,MAAM,CAACuC,MAAM,CACrD,CAAC,CAAC,EACFV,OAAO,EACP;MACIW,KAAK,EAAE;QACHC,GAAG,EAAE,IAAAC,UAAG,EAAC;MACb,CAAC;MACDX,IAAI,EAAE,IAAAY,yBAAkB,EAAC,CAAC;MAC1BX,YAAY,EAAE,CAAC,CAAC;MAChBY,QAAQ,EAAE;IACd,CACJ,CAAC;IACD,OAAO;MACHN;IACJ,CAAC;EACL,CAAC,CAAC,EACF,kBACJ,CAAC;AACL;AAEO,IAAMO,kBAA4B,GAAAC,OAAA,CAAAD,kBAAA,GAAG;EACxCjD,IAAI,EAAE,WAAW;EACjBmD,IAAI,EAAE,IAAI;EACVC,UAAU,EAAE;IACRC,UAAU,EAAGC,KAAU,IAAK;MACxBA,KAAK,CAACxC,UAAU,GAAGjB,cAAc;MACjCyD,KAAK,CAAC/B,UAAU,GAAGN,oBAAoB;IAC3C,CAAC;IACDsC,YAAY,EAAGD,KAAU,IAAK;MAC1BA,KAAK,CAACxC,UAAU,GAAGU,gBAAgB;MACnC8B,KAAK,CAAC/B,UAAU,GAAGc,sBAAsB;IAC7C;EACJ,CAAC;EACDmB,YAAY,EAAE,CAAC;AACnB,CAAC","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/key-compression/index.js b/dist/cjs/plugins/key-compression/index.js deleted file mode 100644 index 0638dfbdfcf..00000000000 --- a/dist/cjs/plugins/key-compression/index.js +++ /dev/null @@ -1,136 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.compressDocumentData = compressDocumentData; -exports.decompressDocumentData = decompressDocumentData; -exports.getCompressionStateByRxJsonSchema = getCompressionStateByRxJsonSchema; -exports.wrappedKeyCompressionStorage = wrappedKeyCompressionStorage; -var _jsonschemaKeyCompression = require("jsonschema-key-compression"); -var _overwritable = require("../../overwritable.js"); -var _pluginHelpers = require("../../plugin-helpers.js"); -var _rxSchemaHelper = require("../../rx-schema-helper.js"); -var _rxStorageHelper = require("../../rx-storage-helper.js"); -var _index = require("../../plugins/utils/index.js"); -var _rxQuery = require("../../rx-query.js"); -/** - * this plugin adds the keycompression-capabilities to rxdb - * if you don't use this, ensure that you set disableKeyCompression to false in your schema - */ - -/** - * Cache the compression table and the compressed schema - * by the storage instance for better performance. - */ -var COMPRESSION_STATE_BY_SCHEMA = new WeakMap(); -function getCompressionStateByRxJsonSchema(schema) { - /** - * Because we cache the state by the JsonSchema, - * it must be assured that the given schema object - * is never mutated. - */ - _overwritable.overwritable.deepFreezeWhenDevMode(schema); - return (0, _index.getFromMapOrCreate)(COMPRESSION_STATE_BY_SCHEMA, schema, () => { - var compressionSchema = (0, _index.flatClone)(schema); - delete compressionSchema.primaryKey; - var table = (0, _jsonschemaKeyCompression.createCompressionTable)(compressionSchema, _jsonschemaKeyCompression.DEFAULT_COMPRESSION_FLAG, [ - /** - * Do not compress the primary field - * for easier debugging. - */ - (0, _rxSchemaHelper.getPrimaryFieldOfPrimaryKey)(schema.primaryKey), '_rev', '_attachments', '_deleted', '_meta']); - delete compressionSchema.primaryKey; - var compressedSchema = (0, _jsonschemaKeyCompression.createCompressedJsonSchema)(table, compressionSchema); - - // also compress primary key - if (typeof schema.primaryKey !== 'string') { - var composedPrimary = schema.primaryKey; - var newComposedPrimary = { - key: (0, _jsonschemaKeyCompression.compressedPath)(table, composedPrimary.key), - fields: composedPrimary.fields.map(field => (0, _jsonschemaKeyCompression.compressedPath)(table, field)), - separator: composedPrimary.separator - }; - compressedSchema.primaryKey = newComposedPrimary; - } else { - compressedSchema.primaryKey = (0, _jsonschemaKeyCompression.compressedPath)(table, schema.primaryKey); - } - - /** - * the key compression module does not know about indexes - * in the schema, so we have to also compress them here. - */ - if (schema.indexes) { - var newIndexes = schema.indexes.map(idx => { - if ((0, _index.isMaybeReadonlyArray)(idx)) { - return idx.map(subIdx => (0, _jsonschemaKeyCompression.compressedPath)(table, subIdx)); - } else { - return (0, _jsonschemaKeyCompression.compressedPath)(table, idx); - } - }); - compressedSchema.indexes = newIndexes; - } - var compressionState = { - table, - schema, - compressedSchema - }; - return compressionState; - }); -} -function wrappedKeyCompressionStorage(args) { - return Object.assign({}, args.storage, { - async createStorageInstance(params) { - if (!params.schema.keyCompression) { - return args.storage.createStorageInstance(params); - } - var compressionState = getCompressionStateByRxJsonSchema(params.schema); - function modifyToStorage(docData) { - var ret = compressDocumentData(compressionState, docData); - return ret; - } - function modifyFromStorage(docData) { - return decompressDocumentData(compressionState, docData); - } - - /** - * Because this wrapper resolves the key-compression, - * we can set the flag to false - * which allows underlying storages to detect wrong configurations - * like when keyCompression is set to false but no key-compression module is used. - */ - var childSchema = (0, _index.flatClone)(compressionState.compressedSchema); - childSchema.keyCompression = false; - var instance = await args.storage.createStorageInstance(Object.assign({}, params, { - schema: childSchema - })); - var wrappedInstance = (0, _pluginHelpers.wrapRxStorageInstance)(params.schema, instance, modifyToStorage, modifyFromStorage); - var overwriteMethods = ['query', 'count']; - overwriteMethods.forEach(methodName => { - var methodBefore = wrappedInstance[methodName].bind(wrappedInstance); - wrappedInstance[methodName] = async preparedQuery => { - var compressedQuery = (0, _jsonschemaKeyCompression.compressQuery)(compressionState.table, preparedQuery.query); - var compressedPreparedQuery = (0, _rxQuery.prepareQuery)(compressionState.compressedSchema, compressedQuery); - return methodBefore(compressedPreparedQuery); - }; - }); - return wrappedInstance; - } - }); -} -function compressDocumentData(compressionState, docData) { - /** - * Do not send attachments to compressObject() - * because it will deep clone which does not work on Blob or Buffer. - */ - docData = (0, _rxStorageHelper.flatCloneDocWithMeta)(docData); - var attachments = docData._attachments; - delete docData._attachments; - docData = (0, _jsonschemaKeyCompression.compressObject)(compressionState.table, docData); - docData._attachments = attachments; - return docData; -} -function decompressDocumentData(compressionState, docData) { - return (0, _jsonschemaKeyCompression.decompressObject)(compressionState.table, docData); -} -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/key-compression/index.js.map b/dist/cjs/plugins/key-compression/index.js.map deleted file mode 100644 index e8854f3deda..00000000000 --- a/dist/cjs/plugins/key-compression/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["_jsonschemaKeyCompression","require","_overwritable","_pluginHelpers","_rxSchemaHelper","_rxStorageHelper","_index","_rxQuery","COMPRESSION_STATE_BY_SCHEMA","WeakMap","getCompressionStateByRxJsonSchema","schema","overwritable","deepFreezeWhenDevMode","getFromMapOrCreate","compressionSchema","flatClone","primaryKey","table","createCompressionTable","DEFAULT_COMPRESSION_FLAG","getPrimaryFieldOfPrimaryKey","compressedSchema","createCompressedJsonSchema","composedPrimary","newComposedPrimary","key","compressedPath","fields","map","field","separator","indexes","newIndexes","idx","isMaybeReadonlyArray","subIdx","compressionState","wrappedKeyCompressionStorage","args","Object","assign","storage","createStorageInstance","params","keyCompression","modifyToStorage","docData","ret","compressDocumentData","modifyFromStorage","decompressDocumentData","childSchema","instance","wrappedInstance","wrapRxStorageInstance","overwriteMethods","forEach","methodName","methodBefore","bind","preparedQuery","compressedQuery","compressQuery","query","compressedPreparedQuery","prepareQuery","flatCloneDocWithMeta","attachments","_attachments","compressObject","decompressObject"],"sources":["../../../../src/plugins/key-compression/index.ts"],"sourcesContent":["/**\n * this plugin adds the keycompression-capabilities to rxdb\n * if you don't use this, ensure that you set disableKeyCompression to false in your schema\n */\n\nimport {\n createCompressionTable,\n CompressionTable,\n JsonSchema as KeyCompressionJsonSchema,\n compressObject,\n decompressObject,\n compressedPath,\n DEFAULT_COMPRESSION_FLAG,\n createCompressedJsonSchema,\n compressQuery\n} from 'jsonschema-key-compression';\nimport {\n overwritable\n} from '../../overwritable.ts';\nimport { wrapRxStorageInstance } from '../../plugin-helpers.ts';\nimport { getPrimaryFieldOfPrimaryKey } from '../../rx-schema-helper.ts';\nimport { flatCloneDocWithMeta } from '../../rx-storage-helper.ts';\n\nimport type {\n RxJsonSchema,\n CompositePrimaryKey,\n RxStorage,\n RxStorageInstanceCreationParams,\n RxDocumentData,\n FilledMangoQuery,\n PreparedQuery,\n RxDocumentWriteData\n} from '../../types/index.d.ts';\nimport {\n clone,\n flatClone,\n getFromMapOrCreate,\n isMaybeReadonlyArray\n} from '../../plugins/utils/index.ts';\nimport { prepareQuery } from '../../rx-query.ts';\n\ndeclare type CompressionState = {\n table: CompressionTable;\n schema: RxJsonSchema;\n compressedSchema: RxJsonSchema;\n};\n\n/**\n * Cache the compression table and the compressed schema\n * by the storage instance for better performance.\n */\nconst COMPRESSION_STATE_BY_SCHEMA: WeakMap<\n RxJsonSchema,\n CompressionState\n> = new WeakMap();\n\n\nexport function getCompressionStateByRxJsonSchema(\n schema: RxJsonSchema\n): CompressionState {\n /**\n * Because we cache the state by the JsonSchema,\n * it must be assured that the given schema object\n * is never mutated.\n */\n overwritable.deepFreezeWhenDevMode(schema);\n\n return getFromMapOrCreate(\n COMPRESSION_STATE_BY_SCHEMA,\n schema,\n () => {\n const compressionSchema: KeyCompressionJsonSchema = flatClone(schema) as any;\n delete (compressionSchema as any).primaryKey;\n\n const table = createCompressionTable(\n compressionSchema,\n DEFAULT_COMPRESSION_FLAG,\n [\n /**\n * Do not compress the primary field\n * for easier debugging.\n */\n getPrimaryFieldOfPrimaryKey(schema.primaryKey),\n '_rev',\n '_attachments',\n '_deleted',\n '_meta'\n ]\n );\n\n delete (compressionSchema as any).primaryKey;\n const compressedSchema: RxJsonSchema = createCompressedJsonSchema(\n table,\n compressionSchema\n ) as RxJsonSchema;\n\n // also compress primary key\n if (typeof schema.primaryKey !== 'string') {\n const composedPrimary: CompositePrimaryKey = schema.primaryKey;\n const newComposedPrimary: CompositePrimaryKey = {\n key: compressedPath(table, composedPrimary.key as string),\n fields: composedPrimary.fields.map(field => compressedPath(table, field as string)),\n separator: composedPrimary.separator\n };\n compressedSchema.primaryKey = newComposedPrimary;\n } else {\n compressedSchema.primaryKey = compressedPath(table, schema.primaryKey);\n }\n\n /**\n * the key compression module does not know about indexes\n * in the schema, so we have to also compress them here.\n */\n if (schema.indexes) {\n const newIndexes = schema.indexes.map(idx => {\n if (isMaybeReadonlyArray(idx)) {\n return idx.map(subIdx => compressedPath(table, subIdx));\n } else {\n return compressedPath(table, idx);\n }\n });\n compressedSchema.indexes = newIndexes;\n }\n\n const compressionState = {\n table,\n schema,\n compressedSchema\n };\n return compressionState;\n }\n );\n}\n\nexport function wrappedKeyCompressionStorage(\n args: {\n storage: RxStorage;\n }\n): RxStorage {\n return Object.assign(\n {},\n args.storage,\n {\n async createStorageInstance(\n params: RxStorageInstanceCreationParams\n ) {\n if (!params.schema.keyCompression) {\n return args.storage.createStorageInstance(params);\n }\n\n const compressionState = getCompressionStateByRxJsonSchema(params.schema);\n function modifyToStorage(docData: RxDocumentWriteData) {\n const ret = compressDocumentData(compressionState, docData);\n return ret;\n }\n function modifyFromStorage(docData: RxDocumentData): Promise> {\n return decompressDocumentData(compressionState, docData);\n }\n\n /**\n * Because this wrapper resolves the key-compression,\n * we can set the flag to false\n * which allows underlying storages to detect wrong configurations\n * like when keyCompression is set to false but no key-compression module is used.\n */\n const childSchema = flatClone(compressionState.compressedSchema);\n childSchema.keyCompression = false;\n\n const instance = await args.storage.createStorageInstance(\n Object.assign(\n {},\n params,\n {\n schema: childSchema\n }\n )\n );\n\n const wrappedInstance = wrapRxStorageInstance(\n params.schema,\n instance,\n modifyToStorage,\n modifyFromStorage\n );\n\n\n const overwriteMethods = ['query', 'count'] as const;\n overwriteMethods.forEach(methodName => {\n const methodBefore = wrappedInstance[methodName].bind(wrappedInstance);\n (wrappedInstance as any)[methodName] = async (preparedQuery: PreparedQuery) => {\n const compressedQuery: FilledMangoQuery = compressQuery(\n compressionState.table,\n preparedQuery.query as any\n ) as any;\n\n const compressedPreparedQuery = prepareQuery(\n compressionState.compressedSchema,\n compressedQuery\n );\n return methodBefore(compressedPreparedQuery);\n }\n });\n\n return wrappedInstance;\n }\n }\n );\n}\n\nexport function compressDocumentData(\n compressionState: CompressionState,\n docData: RxDocumentData\n): RxDocumentData {\n /**\n * Do not send attachments to compressObject()\n * because it will deep clone which does not work on Blob or Buffer.\n */\n docData = flatCloneDocWithMeta(docData);\n const attachments = docData._attachments;\n delete docData._attachments;\n\n docData = compressObject(\n compressionState.table,\n docData\n );\n docData._attachments = attachments;\n return docData;\n}\n\nexport function decompressDocumentData(\n compressionState: CompressionState,\n docData: RxDocumentData\n): RxDocumentData {\n return decompressObject(\n compressionState.table,\n docData\n );\n}\n"],"mappings":";;;;;;;;;AAKA,IAAAA,yBAAA,GAAAC,OAAA;AAWA,IAAAC,aAAA,GAAAD,OAAA;AAGA,IAAAE,cAAA,GAAAF,OAAA;AACA,IAAAG,eAAA,GAAAH,OAAA;AACA,IAAAI,gBAAA,GAAAJ,OAAA;AAYA,IAAAK,MAAA,GAAAL,OAAA;AAMA,IAAAM,QAAA,GAAAN,OAAA;AAvCA;AACA;AACA;AACA;;AA4CA;AACA;AACA;AACA;AACA,IAAMO,2BAGL,GAAG,IAAIC,OAAO,CAAC,CAAC;AAGV,SAASC,iCAAiCA,CAC7CC,MAAyB,EACT;EAChB;AACJ;AACA;AACA;AACA;EACIC,0BAAY,CAACC,qBAAqB,CAACF,MAAM,CAAC;EAE1C,OAAO,IAAAG,yBAAkB,EACrBN,2BAA2B,EAC3BG,MAAM,EACN,MAAM;IACF,IAAMI,iBAA2C,GAAG,IAAAC,gBAAS,EAACL,MAAM,CAAQ;IAC5E,OAAQI,iBAAiB,CAASE,UAAU;IAE5C,IAAMC,KAAK,GAAG,IAAAC,gDAAsB,EAChCJ,iBAAiB,EACjBK,kDAAwB,EACxB;IACI;AACpB;AACA;AACA;IACoB,IAAAC,2CAA2B,EAACV,MAAM,CAACM,UAAU,CAAC,EAC9C,MAAM,EACN,cAAc,EACd,UAAU,EACV,OAAO,CAEf,CAAC;IAED,OAAQF,iBAAiB,CAASE,UAAU;IAC5C,IAAMK,gBAAmC,GAAG,IAAAC,oDAA0B,EAClEL,KAAK,EACLH,iBACJ,CAAsB;;IAEtB;IACA,IAAI,OAAOJ,MAAM,CAACM,UAAU,KAAK,QAAQ,EAAE;MACvC,IAAMO,eAAyC,GAAGb,MAAM,CAACM,UAAU;MACnE,IAAMQ,kBAA4C,GAAG;QACjDC,GAAG,EAAE,IAAAC,wCAAc,EAACT,KAAK,EAAEM,eAAe,CAACE,GAAa,CAAC;QACzDE,MAAM,EAAEJ,eAAe,CAACI,MAAM,CAACC,GAAG,CAACC,KAAK,IAAI,IAAAH,wCAAc,EAACT,KAAK,EAAEY,KAAe,CAAC,CAAC;QACnFC,SAAS,EAAEP,eAAe,CAACO;MAC/B,CAAC;MACDT,gBAAgB,CAACL,UAAU,GAAGQ,kBAAkB;IACpD,CAAC,MAAM;MACHH,gBAAgB,CAACL,UAAU,GAAG,IAAAU,wCAAc,EAACT,KAAK,EAAEP,MAAM,CAACM,UAAU,CAAC;IAC1E;;IAEA;AACZ;AACA;AACA;IACY,IAAIN,MAAM,CAACqB,OAAO,EAAE;MAChB,IAAMC,UAAU,GAAGtB,MAAM,CAACqB,OAAO,CAACH,GAAG,CAACK,GAAG,IAAI;QACzC,IAAI,IAAAC,2BAAoB,EAACD,GAAG,CAAC,EAAE;UAC3B,OAAOA,GAAG,CAACL,GAAG,CAACO,MAAM,IAAI,IAAAT,wCAAc,EAACT,KAAK,EAAEkB,MAAM,CAAC,CAAC;QAC3D,CAAC,MAAM;UACH,OAAO,IAAAT,wCAAc,EAACT,KAAK,EAAEgB,GAAG,CAAC;QACrC;MACJ,CAAC,CAAC;MACFZ,gBAAgB,CAACU,OAAO,GAAGC,UAAU;IACzC;IAEA,IAAMI,gBAAgB,GAAG;MACrBnB,KAAK;MACLP,MAAM;MACNW;IACJ,CAAC;IACD,OAAOe,gBAAgB;EAC3B,CACJ,CAAC;AACL;AAEO,SAASC,4BAA4BA,CACxCC,IAEC,EAC4C;EAC7C,OAAOC,MAAM,CAACC,MAAM,CAChB,CAAC,CAAC,EACFF,IAAI,CAACG,OAAO,EACZ;IACI,MAAMC,qBAAqBA,CACvBC,MAAuD,EACzD;MACE,IAAI,CAACA,MAAM,CAACjC,MAAM,CAACkC,cAAc,EAAE;QAC/B,OAAON,IAAI,CAACG,OAAO,CAACC,qBAAqB,CAACC,MAAM,CAAC;MACrD;MAEA,IAAMP,gBAAgB,GAAG3B,iCAAiC,CAACkC,MAAM,CAACjC,MAAM,CAAC;MACzE,SAASmC,eAAeA,CAACC,OAAuC,EAAE;QAC9D,IAAMC,GAAG,GAAGC,oBAAoB,CAACZ,gBAAgB,EAAEU,OAAO,CAAC;QAC3D,OAAOC,GAAG;MACd;MACA,SAASE,iBAAiBA,CAACH,OAA4B,EAAsC;QACzF,OAAOI,sBAAsB,CAACd,gBAAgB,EAAEU,OAAO,CAAC;MAC5D;;MAEA;AAChB;AACA;AACA;AACA;AACA;MACgB,IAAMK,WAAW,GAAG,IAAApC,gBAAS,EAACqB,gBAAgB,CAACf,gBAAgB,CAAC;MAChE8B,WAAW,CAACP,cAAc,GAAG,KAAK;MAElC,IAAMQ,QAAQ,GAAG,MAAMd,IAAI,CAACG,OAAO,CAACC,qBAAqB,CACrDH,MAAM,CAACC,MAAM,CACT,CAAC,CAAC,EACFG,MAAM,EACN;QACIjC,MAAM,EAAEyC;MACZ,CACJ,CACJ,CAAC;MAED,IAAME,eAAe,GAAG,IAAAC,oCAAqB,EACzCX,MAAM,CAACjC,MAAM,EACb0C,QAAQ,EACRP,eAAe,EACfI,iBACJ,CAAC;MAGD,IAAMM,gBAAgB,GAAG,CAAC,OAAO,EAAE,OAAO,CAAU;MACpDA,gBAAgB,CAACC,OAAO,CAACC,UAAU,IAAI;QACnC,IAAMC,YAAY,GAAGL,eAAe,CAACI,UAAU,CAAC,CAACE,IAAI,CAACN,eAAe,CAAC;QACrEA,eAAe,CAASI,UAAU,CAAC,GAAG,MAAOG,aAAuC,IAAK;UACtF,IAAMC,eAA4C,GAAG,IAAAC,uCAAa,EAC9D1B,gBAAgB,CAACnB,KAAK,EACtB2C,aAAa,CAACG,KAClB,CAAQ;UAER,IAAMC,uBAAuB,GAAG,IAAAC,qBAAY,EACxC7B,gBAAgB,CAACf,gBAAgB,EACjCwC,eACJ,CAAC;UACD,OAAOH,YAAY,CAACM,uBAAuB,CAAC;QAChD,CAAC;MACL,CAAC,CAAC;MAEF,OAAOX,eAAe;IAC1B;EACJ,CACJ,CAAC;AACL;AAEO,SAASL,oBAAoBA,CAChCZ,gBAAkC,EAClCU,OAA4B,EACT;EACnB;AACJ;AACA;AACA;EACIA,OAAO,GAAG,IAAAoB,qCAAoB,EAACpB,OAAO,CAAC;EACvC,IAAMqB,WAAW,GAAGrB,OAAO,CAACsB,YAAY;EACxC,OAAOtB,OAAO,CAACsB,YAAY;EAE3BtB,OAAO,GAAG,IAAAuB,wCAAc,EACpBjC,gBAAgB,CAACnB,KAAK,EACtB6B,OACJ,CAAC;EACDA,OAAO,CAACsB,YAAY,GAAGD,WAAW;EAClC,OAAOrB,OAAO;AAClB;AAEO,SAASI,sBAAsBA,CAClCd,gBAAkC,EAClCU,OAA4B,EACT;EACnB,OAAO,IAAAwB,0CAAgB,EACnBlC,gBAAgB,CAACnB,KAAK,EACtB6B,OACJ,CAAC;AACL","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/leader-election/index.js b/dist/cjs/plugins/leader-election/index.js deleted file mode 100644 index 333ef9cfca8..00000000000 --- a/dist/cjs/plugins/leader-election/index.js +++ /dev/null @@ -1,98 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.RxDBLeaderElectionPlugin = void 0; -exports.getForDatabase = getForDatabase; -exports.getLeaderElectorByBroadcastChannel = getLeaderElectorByBroadcastChannel; -exports.isLeader = isLeader; -exports.onDestroy = onDestroy; -exports.rxdb = exports.prototypes = void 0; -exports.waitForLeadership = waitForLeadership; -var _broadcastChannel = require("broadcast-channel"); -var _rxStorageMultiinstance = require("../../rx-storage-multiinstance.js"); -var _index = require("../utils/index.js"); -/** - * this plugin adds the leader-election-capabilities to rxdb - */ - -var LEADER_ELECTORS_OF_DB = new WeakMap(); -var LEADER_ELECTOR_BY_BROADCAST_CHANNEL = new WeakMap(); - -/** - * Returns the leader elector of a broadcast channel. - * Used to ensure we reuse the same elector for the channel each time. - */ -function getLeaderElectorByBroadcastChannel(broadcastChannel) { - return (0, _index.getFromMapOrCreate)(LEADER_ELECTOR_BY_BROADCAST_CHANNEL, broadcastChannel, () => (0, _broadcastChannel.createLeaderElection)(broadcastChannel)); -} - -/** - * @overwrites RxDatabase().leaderElector for caching - */ -function getForDatabase() { - var broadcastChannel = (0, _rxStorageMultiinstance.getBroadcastChannelReference)(this.storage.name, this.token, this.name, this); - - /** - * Clean up the reference on RxDatabase.destroy() - */ - var oldDestroy = this.destroy.bind(this); - this.destroy = function () { - (0, _rxStorageMultiinstance.removeBroadcastChannelReference)(this.token, this); - return oldDestroy(); - }; - var elector = getLeaderElectorByBroadcastChannel(broadcastChannel); - if (!elector) { - elector = getLeaderElectorByBroadcastChannel(broadcastChannel); - LEADER_ELECTORS_OF_DB.set(this, elector); - } - - /** - * Overwrite for caching - */ - this.leaderElector = () => elector; - return elector; -} -function isLeader() { - if (!this.multiInstance) { - return true; - } - return this.leaderElector().isLeader; -} -function waitForLeadership() { - if (!this.multiInstance) { - return _index.PROMISE_RESOLVE_TRUE; - } else { - return this.leaderElector().awaitLeadership().then(() => true); - } -} - -/** - * runs when the database gets destroyed - */ -function onDestroy(db) { - var has = LEADER_ELECTORS_OF_DB.get(db); - if (has) { - has.die(); - } -} -var rxdb = exports.rxdb = true; -var prototypes = exports.prototypes = { - RxDatabase: proto => { - proto.leaderElector = getForDatabase; - proto.isLeader = isLeader; - proto.waitForLeadership = waitForLeadership; - } -}; -var RxDBLeaderElectionPlugin = exports.RxDBLeaderElectionPlugin = { - name: 'leader-election', - rxdb, - prototypes, - hooks: { - preDestroyRxDatabase: { - after: onDestroy - } - } -}; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/leader-election/index.js.map b/dist/cjs/plugins/leader-election/index.js.map deleted file mode 100644 index 801313a32b6..00000000000 --- a/dist/cjs/plugins/leader-election/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["_broadcastChannel","require","_rxStorageMultiinstance","_index","LEADER_ELECTORS_OF_DB","WeakMap","LEADER_ELECTOR_BY_BROADCAST_CHANNEL","getLeaderElectorByBroadcastChannel","broadcastChannel","getFromMapOrCreate","createLeaderElection","getForDatabase","getBroadcastChannelReference","storage","name","token","oldDestroy","destroy","bind","removeBroadcastChannelReference","elector","set","leaderElector","isLeader","multiInstance","waitForLeadership","PROMISE_RESOLVE_TRUE","awaitLeadership","then","onDestroy","db","has","get","die","rxdb","exports","prototypes","RxDatabase","proto","RxDBLeaderElectionPlugin","hooks","preDestroyRxDatabase","after"],"sources":["../../../../src/plugins/leader-election/index.ts"],"sourcesContent":["/**\n * this plugin adds the leader-election-capabilities to rxdb\n */\n\nimport {\n createLeaderElection,\n LeaderElector,\n BroadcastChannel\n} from 'broadcast-channel';\nimport {\n getBroadcastChannelReference,\n removeBroadcastChannelReference\n} from '../../rx-storage-multiinstance.ts';\n\nimport type {\n RxDatabase,\n RxPlugin\n} from '../../types/index.d.ts';\nimport { PROMISE_RESOLVE_TRUE, getFromMapOrCreate } from '../utils/index.ts';\n\nconst LEADER_ELECTORS_OF_DB: WeakMap = new WeakMap();\nconst LEADER_ELECTOR_BY_BROADCAST_CHANNEL: WeakMap = new WeakMap();\n\n\n/**\n * Returns the leader elector of a broadcast channel.\n * Used to ensure we reuse the same elector for the channel each time.\n */\nexport function getLeaderElectorByBroadcastChannel(broadcastChannel: BroadcastChannel): LeaderElector {\n return getFromMapOrCreate(\n LEADER_ELECTOR_BY_BROADCAST_CHANNEL,\n broadcastChannel,\n () => createLeaderElection(broadcastChannel)\n );\n}\n\n/**\n * @overwrites RxDatabase().leaderElector for caching\n */\nexport function getForDatabase(this: RxDatabase): LeaderElector {\n\n\n const broadcastChannel = getBroadcastChannelReference(\n this.storage.name,\n this.token,\n this.name,\n this\n );\n\n /**\n * Clean up the reference on RxDatabase.destroy()\n */\n const oldDestroy = this.destroy.bind(this);\n this.destroy = function () {\n removeBroadcastChannelReference(this.token, this);\n return oldDestroy();\n };\n\n\n let elector = getLeaderElectorByBroadcastChannel(broadcastChannel);\n if (!elector) {\n elector = getLeaderElectorByBroadcastChannel(broadcastChannel);\n LEADER_ELECTORS_OF_DB.set(\n this,\n elector\n );\n }\n\n /**\n * Overwrite for caching\n */\n this.leaderElector = () => elector;\n\n return elector;\n}\n\nexport function isLeader(this: RxDatabase): boolean {\n if (!this.multiInstance) {\n return true;\n }\n return this.leaderElector().isLeader;\n}\n\nexport function waitForLeadership(this: RxDatabase): Promise {\n if (!this.multiInstance) {\n return PROMISE_RESOLVE_TRUE;\n } else {\n return this.leaderElector()\n .awaitLeadership()\n .then(() => true);\n }\n}\n\n/**\n * runs when the database gets destroyed\n */\nexport function onDestroy(db: RxDatabase) {\n const has = LEADER_ELECTORS_OF_DB.get(db);\n if (has) {\n has.die();\n }\n}\n\nexport const rxdb = true;\nexport const prototypes = {\n RxDatabase: (proto: any) => {\n proto.leaderElector = getForDatabase;\n proto.isLeader = isLeader;\n proto.waitForLeadership = waitForLeadership;\n }\n};\n\nexport const RxDBLeaderElectionPlugin: RxPlugin = {\n name: 'leader-election',\n rxdb,\n prototypes,\n hooks: {\n preDestroyRxDatabase: {\n after: onDestroy\n }\n }\n};\n"],"mappings":";;;;;;;;;;;;AAIA,IAAAA,iBAAA,GAAAC,OAAA;AAKA,IAAAC,uBAAA,GAAAD,OAAA;AASA,IAAAE,MAAA,GAAAF,OAAA;AAlBA;AACA;AACA;;AAkBA,IAAMG,qBAAyD,GAAG,IAAIC,OAAO,CAAC,CAAC;AAC/E,IAAMC,mCAA6E,GAAG,IAAID,OAAO,CAAC,CAAC;;AAGnG;AACA;AACA;AACA;AACO,SAASE,kCAAkCA,CAACC,gBAAkC,EAAiB;EAClG,OAAO,IAAAC,yBAAkB,EACrBH,mCAAmC,EACnCE,gBAAgB,EAChB,MAAM,IAAAE,sCAAoB,EAACF,gBAAgB,CAC/C,CAAC;AACL;;AAEA;AACA;AACA;AACO,SAASG,cAAcA,CAAA,EAAkC;EAG5D,IAAMH,gBAAgB,GAAG,IAAAI,oDAA4B,EACjD,IAAI,CAACC,OAAO,CAACC,IAAI,EACjB,IAAI,CAACC,KAAK,EACV,IAAI,CAACD,IAAI,EACT,IACJ,CAAC;;EAED;AACJ;AACA;EACI,IAAME,UAAU,GAAG,IAAI,CAACC,OAAO,CAACC,IAAI,CAAC,IAAI,CAAC;EAC1C,IAAI,CAACD,OAAO,GAAG,YAAY;IACvB,IAAAE,uDAA+B,EAAC,IAAI,CAACJ,KAAK,EAAE,IAAI,CAAC;IACjD,OAAOC,UAAU,CAAC,CAAC;EACvB,CAAC;EAGD,IAAII,OAAO,GAAGb,kCAAkC,CAACC,gBAAgB,CAAC;EAClE,IAAI,CAACY,OAAO,EAAE;IACVA,OAAO,GAAGb,kCAAkC,CAACC,gBAAgB,CAAC;IAC9DJ,qBAAqB,CAACiB,GAAG,CACrB,IAAI,EACJD,OACJ,CAAC;EACL;;EAEA;AACJ;AACA;EACI,IAAI,CAACE,aAAa,GAAG,MAAMF,OAAO;EAElC,OAAOA,OAAO;AAClB;AAEO,SAASG,QAAQA,CAAA,EAA4B;EAChD,IAAI,CAAC,IAAI,CAACC,aAAa,EAAE;IACrB,OAAO,IAAI;EACf;EACA,OAAO,IAAI,CAACF,aAAa,CAAC,CAAC,CAACC,QAAQ;AACxC;AAEO,SAASE,iBAAiBA,CAAA,EAAqC;EAClE,IAAI,CAAC,IAAI,CAACD,aAAa,EAAE;IACrB,OAAOE,2BAAoB;EAC/B,CAAC,MAAM;IACH,OAAO,IAAI,CAACJ,aAAa,CAAC,CAAC,CACtBK,eAAe,CAAC,CAAC,CACjBC,IAAI,CAAC,MAAM,IAAI,CAAC;EACzB;AACJ;;AAEA;AACA;AACA;AACO,SAASC,SAASA,CAACC,EAAc,EAAE;EACtC,IAAMC,GAAG,GAAG3B,qBAAqB,CAAC4B,GAAG,CAACF,EAAE,CAAC;EACzC,IAAIC,GAAG,EAAE;IACLA,GAAG,CAACE,GAAG,CAAC,CAAC;EACb;AACJ;AAEO,IAAMC,IAAI,GAAAC,OAAA,CAAAD,IAAA,GAAG,IAAI;AACjB,IAAME,UAAU,GAAAD,OAAA,CAAAC,UAAA,GAAG;EACtBC,UAAU,EAAGC,KAAU,IAAK;IACxBA,KAAK,CAAChB,aAAa,GAAGX,cAAc;IACpC2B,KAAK,CAACf,QAAQ,GAAGA,QAAQ;IACzBe,KAAK,CAACb,iBAAiB,GAAGA,iBAAiB;EAC/C;AACJ,CAAC;AAEM,IAAMc,wBAAkC,GAAAJ,OAAA,CAAAI,wBAAA,GAAG;EAC9CzB,IAAI,EAAE,iBAAiB;EACvBoB,IAAI;EACJE,UAAU;EACVI,KAAK,EAAE;IACHC,oBAAoB,EAAE;MAClBC,KAAK,EAAEb;IACX;EACJ;AACJ,CAAC","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/local-documents/index.js b/dist/cjs/plugins/local-documents/index.js deleted file mode 100644 index 81b884158fb..00000000000 --- a/dist/cjs/plugins/local-documents/index.js +++ /dev/null @@ -1,109 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -var _exportNames = { - RxDBLocalDocumentsPlugin: true -}; -exports.RxDBLocalDocumentsPlugin = void 0; -var _localDocuments = require("./local-documents.js"); -Object.keys(_localDocuments).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; - if (key in exports && exports[key] === _localDocuments[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _localDocuments[key]; - } - }); -}); -var _localDocumentsHelper = require("./local-documents-helper.js"); -Object.keys(_localDocumentsHelper).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; - if (key in exports && exports[key] === _localDocumentsHelper[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _localDocumentsHelper[key]; - } - }); -}); -var _rxLocalDocument = require("./rx-local-document.js"); -Object.keys(_rxLocalDocument).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; - if (key in exports && exports[key] === _rxLocalDocument[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _rxLocalDocument[key]; - } - }); -}); -var RxDBLocalDocumentsPlugin = exports.RxDBLocalDocumentsPlugin = { - name: 'local-documents', - rxdb: true, - prototypes: { - RxCollection: proto => { - proto.insertLocal = _localDocuments.insertLocal; - proto.upsertLocal = _localDocuments.upsertLocal; - proto.getLocal = _localDocuments.getLocal; - proto.getLocal$ = _localDocuments.getLocal$; - }, - RxDatabase: proto => { - proto.insertLocal = _localDocuments.insertLocal; - proto.upsertLocal = _localDocuments.upsertLocal; - proto.getLocal = _localDocuments.getLocal; - proto.getLocal$ = _localDocuments.getLocal$; - } - }, - hooks: { - createRxDatabase: { - before: args => { - if (args.creator.localDocuments) { - /** - * We do not have to await - * the creation to speed up initial page load. - */ - /* await */ - (0, _localDocumentsHelper.createLocalDocStateByParent)(args.database); - } - } - }, - createRxCollection: { - before: args => { - if (args.creator.localDocuments) { - /** - * We do not have to await - * the creation to speed up initial page load. - */ - /* await */ - (0, _localDocumentsHelper.createLocalDocStateByParent)(args.collection); - } - } - }, - preDestroyRxDatabase: { - after: db => { - return (0, _localDocumentsHelper.closeStateByParent)(db); - } - }, - postDestroyRxCollection: { - after: collection => (0, _localDocumentsHelper.closeStateByParent)(collection) - }, - postRemoveRxDatabase: { - after: args => { - return (0, _localDocumentsHelper.removeLocalDocumentsStorageInstance)(args.storage, args.databaseName, ''); - } - }, - postRemoveRxCollection: { - after: args => { - return (0, _localDocumentsHelper.removeLocalDocumentsStorageInstance)(args.storage, args.databaseName, args.collectionName); - } - } - }, - overwritable: {} -}; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/local-documents/index.js.map b/dist/cjs/plugins/local-documents/index.js.map deleted file mode 100644 index 583a7765524..00000000000 --- a/dist/cjs/plugins/local-documents/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["_localDocuments","require","Object","keys","forEach","key","prototype","hasOwnProperty","call","_exportNames","exports","defineProperty","enumerable","get","_localDocumentsHelper","_rxLocalDocument","RxDBLocalDocumentsPlugin","name","rxdb","prototypes","RxCollection","proto","insertLocal","upsertLocal","getLocal","getLocal$","RxDatabase","hooks","createRxDatabase","before","args","creator","localDocuments","createLocalDocStateByParent","database","createRxCollection","collection","preDestroyRxDatabase","after","db","closeStateByParent","postDestroyRxCollection","postRemoveRxDatabase","removeLocalDocumentsStorageInstance","storage","databaseName","postRemoveRxCollection","collectionName","overwritable"],"sources":["../../../../src/plugins/local-documents/index.ts"],"sourcesContent":["import type {\n RxPlugin\n} from '../../types/index.d.ts';\nimport {\n getLocal,\n getLocal$,\n insertLocal,\n upsertLocal\n} from './local-documents.ts';\nimport {\n closeStateByParent,\n createLocalDocStateByParent,\n removeLocalDocumentsStorageInstance\n} from './local-documents-helper.ts';\n\nexport * from './local-documents-helper.ts';\nexport * from './local-documents.ts';\nexport * from './rx-local-document.ts';\nexport type {\n LocalDocumentParent,\n LocalDocumentState,\n RxLocalDocument,\n RxLocalDocumentData\n} from '../../types/plugins/local-documents.d.ts';\n\n\nexport const RxDBLocalDocumentsPlugin: RxPlugin = {\n name: 'local-documents',\n rxdb: true,\n prototypes: {\n RxCollection: (proto: any) => {\n proto.insertLocal = insertLocal;\n proto.upsertLocal = upsertLocal;\n proto.getLocal = getLocal;\n proto.getLocal$ = getLocal$;\n },\n RxDatabase: (proto: any) => {\n proto.insertLocal = insertLocal;\n proto.upsertLocal = upsertLocal;\n proto.getLocal = getLocal;\n proto.getLocal$ = getLocal$;\n }\n },\n hooks: {\n createRxDatabase: {\n before: args => {\n if (args.creator.localDocuments) {\n /**\n * We do not have to await\n * the creation to speed up initial page load.\n */\n /* await */ createLocalDocStateByParent(args.database);\n }\n }\n },\n createRxCollection: {\n before: args => {\n if (args.creator.localDocuments) {\n /**\n * We do not have to await\n * the creation to speed up initial page load.\n */\n /* await */ createLocalDocStateByParent(args.collection);\n }\n }\n },\n preDestroyRxDatabase: {\n after: db => {\n return closeStateByParent(db);\n }\n },\n postDestroyRxCollection: {\n after: collection => closeStateByParent(collection)\n },\n postRemoveRxDatabase: {\n after: args => {\n return removeLocalDocumentsStorageInstance(\n args.storage,\n args.databaseName,\n ''\n );\n }\n },\n postRemoveRxCollection: {\n after: args => {\n return removeLocalDocumentsStorageInstance(\n args.storage,\n args.databaseName,\n args.collectionName\n );\n }\n }\n },\n overwritable: {}\n};\n"],"mappings":";;;;;;;;;AAGA,IAAAA,eAAA,GAAAC,OAAA;AAaAC,MAAA,CAAAC,IAAA,CAAAH,eAAA,EAAAI,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAL,eAAA,CAAAK,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAb,eAAA,CAAAK,GAAA;IAAA;EAAA;AAAA;AAPA,IAAAS,qBAAA,GAAAb,OAAA;AAMAC,MAAA,CAAAC,IAAA,CAAAW,qBAAA,EAAAV,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAS,qBAAA,CAAAT,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAC,qBAAA,CAAAT,GAAA;IAAA;EAAA;AAAA;AAEA,IAAAU,gBAAA,GAAAd,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAY,gBAAA,EAAAX,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAU,gBAAA,CAAAV,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAE,gBAAA,CAAAV,GAAA;IAAA;EAAA;AAAA;AASO,IAAMW,wBAAkC,GAAAN,OAAA,CAAAM,wBAAA,GAAG;EAC9CC,IAAI,EAAE,iBAAiB;EACvBC,IAAI,EAAE,IAAI;EACVC,UAAU,EAAE;IACRC,YAAY,EAAGC,KAAU,IAAK;MAC1BA,KAAK,CAACC,WAAW,GAAGA,2BAAW;MAC/BD,KAAK,CAACE,WAAW,GAAGA,2BAAW;MAC/BF,KAAK,CAACG,QAAQ,GAAGA,wBAAQ;MACzBH,KAAK,CAACI,SAAS,GAAGA,yBAAS;IAC/B,CAAC;IACDC,UAAU,EAAGL,KAAU,IAAK;MACxBA,KAAK,CAACC,WAAW,GAAGA,2BAAW;MAC/BD,KAAK,CAACE,WAAW,GAAGA,2BAAW;MAC/BF,KAAK,CAACG,QAAQ,GAAGA,wBAAQ;MACzBH,KAAK,CAACI,SAAS,GAAGA,yBAAS;IAC/B;EACJ,CAAC;EACDE,KAAK,EAAE;IACHC,gBAAgB,EAAE;MACdC,MAAM,EAAEC,IAAI,IAAI;QACZ,IAAIA,IAAI,CAACC,OAAO,CAACC,cAAc,EAAE;UAC7B;AACpB;AACA;AACA;UACoB;UAAY,IAAAC,iDAA2B,EAACH,IAAI,CAACI,QAAQ,CAAC;QAC1D;MACJ;IACJ,CAAC;IACDC,kBAAkB,EAAE;MAChBN,MAAM,EAAEC,IAAI,IAAI;QACZ,IAAIA,IAAI,CAACC,OAAO,CAACC,cAAc,EAAE;UAC7B;AACpB;AACA;AACA;UACoB;UAAY,IAAAC,iDAA2B,EAACH,IAAI,CAACM,UAAU,CAAC;QAC5D;MACJ;IACJ,CAAC;IACDC,oBAAoB,EAAE;MAClBC,KAAK,EAAEC,EAAE,IAAI;QACT,OAAO,IAAAC,wCAAkB,EAACD,EAAE,CAAC;MACjC;IACJ,CAAC;IACDE,uBAAuB,EAAE;MACrBH,KAAK,EAAEF,UAAU,IAAI,IAAAI,wCAAkB,EAACJ,UAAU;IACtD,CAAC;IACDM,oBAAoB,EAAE;MAClBJ,KAAK,EAAER,IAAI,IAAI;QACX,OAAO,IAAAa,yDAAmC,EACtCb,IAAI,CAACc,OAAO,EACZd,IAAI,CAACe,YAAY,EACjB,EACJ,CAAC;MACL;IACJ,CAAC;IACDC,sBAAsB,EAAE;MACpBR,KAAK,EAAER,IAAI,IAAI;QACX,OAAO,IAAAa,yDAAmC,EACtCb,IAAI,CAACc,OAAO,EACZd,IAAI,CAACe,YAAY,EACjBf,IAAI,CAACiB,cACT,CAAC;MACL;IACJ;EACJ,CAAC;EACDC,YAAY,EAAE,CAAC;AACnB,CAAC","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/local-documents/local-documents-helper.js b/dist/cjs/plugins/local-documents/local-documents-helper.js deleted file mode 100644 index 7c5e38f0e7e..00000000000 --- a/dist/cjs/plugins/local-documents/local-documents-helper.js +++ /dev/null @@ -1,139 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.RX_LOCAL_DOCUMENT_SCHEMA = exports.LOCAL_DOC_STATE_BY_PARENT_RESOLVED = exports.LOCAL_DOC_STATE_BY_PARENT = void 0; -exports.closeStateByParent = closeStateByParent; -exports.createLocalDocStateByParent = createLocalDocStateByParent; -exports.createLocalDocumentStorageInstance = createLocalDocumentStorageInstance; -exports.getCollectionLocalInstanceName = getCollectionLocalInstanceName; -exports.getLocalDocStateByParent = getLocalDocStateByParent; -exports.removeLocalDocumentsStorageInstance = removeLocalDocumentsStorageInstance; -var _rxjs = require("rxjs"); -var _docCache = require("../../doc-cache.js"); -var _incrementalWrite = require("../../incremental-write.js"); -var _rxError = require("../../rx-error.js"); -var _rxSchemaHelper = require("../../rx-schema-helper.js"); -var _rxStorageHelper = require("../../rx-storage-helper.js"); -var _index = require("../../plugins/utils/index.js"); -var _rxLocalDocument = require("./rx-local-document.js"); -var _overwritable = require("../../overwritable.js"); -var LOCAL_DOC_STATE_BY_PARENT = exports.LOCAL_DOC_STATE_BY_PARENT = new WeakMap(); -var LOCAL_DOC_STATE_BY_PARENT_RESOLVED = exports.LOCAL_DOC_STATE_BY_PARENT_RESOLVED = new WeakMap(); -function createLocalDocStateByParent(parent) { - var database = parent.database ? parent.database : parent; - var collectionName = parent.database ? parent.name : ''; - var statePromise = (async () => { - var storageInstance = await createLocalDocumentStorageInstance(database.token, database.storage, database.name, collectionName, database.instanceCreationOptions, database.multiInstance); - storageInstance = (0, _rxStorageHelper.getWrappedStorageInstance)(database, storageInstance, RX_LOCAL_DOCUMENT_SCHEMA); - var docCache = new _docCache.DocumentCache('id', parent.$.pipe((0, _rxjs.filter)(cE => cE.isLocal)), docData => (0, _rxLocalDocument.createRxLocalDocument)(docData, parent)); - var incrementalWriteQueue = new _incrementalWrite.IncrementalWriteQueue(storageInstance, 'id', () => {}, () => {}); - - /** - * Emit the changestream into the collections change stream - */ - var databaseStorageToken = await database.storageToken; - var subLocalDocs = storageInstance.changeStream().subscribe(eventBulk => { - var events = new Array(eventBulk.events.length); - var rawEvents = eventBulk.events; - var collectionName = parent.database ? parent.name : undefined; - for (var index = 0; index < rawEvents.length; index++) { - var event = rawEvents[index]; - events[index] = { - documentId: event.documentId, - collectionName, - isLocal: true, - operation: event.operation, - documentData: _overwritable.overwritable.deepFreezeWhenDevMode(event.documentData), - previousDocumentData: _overwritable.overwritable.deepFreezeWhenDevMode(event.previousDocumentData) - }; - } - var changeEventBulk = { - id: eventBulk.id, - internal: false, - collectionName: parent.database ? parent.name : undefined, - storageToken: databaseStorageToken, - events, - databaseToken: database.token, - checkpoint: eventBulk.checkpoint, - context: eventBulk.context, - endTime: eventBulk.endTime, - startTime: eventBulk.startTime - }; - database.$emit(changeEventBulk); - }); - parent._subs.push(subLocalDocs); - var state = { - database, - parent, - storageInstance, - docCache, - incrementalWriteQueue - }; - LOCAL_DOC_STATE_BY_PARENT_RESOLVED.set(parent, state); - return state; - })(); - LOCAL_DOC_STATE_BY_PARENT.set(parent, statePromise); -} -function getLocalDocStateByParent(parent) { - var statePromise = LOCAL_DOC_STATE_BY_PARENT.get(parent); - if (!statePromise) { - var database = parent.database ? parent.database : parent; - var collectionName = parent.database ? parent.name : ''; - throw (0, _rxError.newRxError)('LD8', { - database: database.name, - collection: collectionName - }); - } - return statePromise; -} -function createLocalDocumentStorageInstance(databaseInstanceToken, storage, databaseName, collectionName, instanceCreationOptions, multiInstance) { - return storage.createStorageInstance({ - databaseInstanceToken, - databaseName: databaseName, - /** - * Use a different collection name for the local documents instance - * so that the local docs can be kept while deleting the normal instance - * after migration. - */ - collectionName: getCollectionLocalInstanceName(collectionName), - schema: RX_LOCAL_DOCUMENT_SCHEMA, - options: instanceCreationOptions, - multiInstance, - devMode: _overwritable.overwritable.isDevMode() - }); -} -function closeStateByParent(parent) { - var statePromise = LOCAL_DOC_STATE_BY_PARENT.get(parent); - if (statePromise) { - LOCAL_DOC_STATE_BY_PARENT.delete(parent); - return statePromise.then(state => state.storageInstance.close()); - } -} -async function removeLocalDocumentsStorageInstance(storage, databaseName, collectionName) { - var databaseInstanceToken = (0, _index.randomCouchString)(10); - var storageInstance = await createLocalDocumentStorageInstance(databaseInstanceToken, storage, databaseName, collectionName, {}, false); - await storageInstance.remove(); -} -function getCollectionLocalInstanceName(collectionName) { - return 'plugin-local-documents-' + collectionName; -} -var RX_LOCAL_DOCUMENT_SCHEMA = exports.RX_LOCAL_DOCUMENT_SCHEMA = (0, _rxSchemaHelper.fillWithDefaultSettings)({ - title: 'RxLocalDocument', - version: 0, - primaryKey: 'id', - type: 'object', - properties: { - id: { - type: 'string', - maxLength: 128 - }, - data: { - type: 'object', - additionalProperties: true - } - }, - required: ['id', 'data'] -}); -//# sourceMappingURL=local-documents-helper.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/local-documents/local-documents-helper.js.map b/dist/cjs/plugins/local-documents/local-documents-helper.js.map deleted file mode 100644 index 1989a443385..00000000000 --- a/dist/cjs/plugins/local-documents/local-documents-helper.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"local-documents-helper.js","names":["_rxjs","require","_docCache","_incrementalWrite","_rxError","_rxSchemaHelper","_rxStorageHelper","_index","_rxLocalDocument","_overwritable","LOCAL_DOC_STATE_BY_PARENT","exports","WeakMap","LOCAL_DOC_STATE_BY_PARENT_RESOLVED","createLocalDocStateByParent","parent","database","collectionName","name","statePromise","storageInstance","createLocalDocumentStorageInstance","token","storage","instanceCreationOptions","multiInstance","getWrappedStorageInstance","RX_LOCAL_DOCUMENT_SCHEMA","docCache","DocumentCache","$","pipe","filter","cE","isLocal","docData","createRxLocalDocument","incrementalWriteQueue","IncrementalWriteQueue","databaseStorageToken","storageToken","subLocalDocs","changeStream","subscribe","eventBulk","events","Array","length","rawEvents","undefined","index","event","documentId","operation","documentData","overwritable","deepFreezeWhenDevMode","previousDocumentData","changeEventBulk","id","internal","databaseToken","checkpoint","context","endTime","startTime","$emit","_subs","push","state","set","getLocalDocStateByParent","get","newRxError","collection","databaseInstanceToken","databaseName","createStorageInstance","getCollectionLocalInstanceName","schema","options","devMode","isDevMode","closeStateByParent","delete","then","close","removeLocalDocumentsStorageInstance","randomCouchString","remove","fillWithDefaultSettings","title","version","primaryKey","type","properties","maxLength","data","additionalProperties","required"],"sources":["../../../../src/plugins/local-documents/local-documents-helper.ts"],"sourcesContent":["import { filter } from 'rxjs';\nimport { DocumentCache } from '../../doc-cache.ts';\nimport { IncrementalWriteQueue } from '../../incremental-write.ts';\nimport { newRxError } from '../../rx-error.ts';\nimport { fillWithDefaultSettings } from '../../rx-schema-helper.ts';\nimport {\n getWrappedStorageInstance\n} from '../../rx-storage-helper.ts';\nimport type {\n LocalDocumentParent,\n LocalDocumentState,\n RxChangeEvent,\n RxChangeEventBulk,\n RxDatabase,\n RxDocumentData,\n RxJsonSchema,\n RxLocalDocumentData,\n RxStorage\n} from '../../types/index.d.ts';\nimport { randomCouchString } from '../../plugins/utils/index.ts';\nimport { createRxLocalDocument } from './rx-local-document.ts';\nimport { overwritable } from '../../overwritable.ts';\n\nexport const LOCAL_DOC_STATE_BY_PARENT: WeakMap> = new WeakMap();\nexport const LOCAL_DOC_STATE_BY_PARENT_RESOLVED: WeakMap = new WeakMap();\n\nexport function createLocalDocStateByParent(parent: LocalDocumentParent): void {\n const database: RxDatabase = parent.database ? parent.database : parent as any;\n const collectionName = parent.database ? parent.name : '';\n const statePromise = (async () => {\n let storageInstance = await createLocalDocumentStorageInstance(\n database.token,\n database.storage,\n database.name,\n collectionName,\n database.instanceCreationOptions,\n database.multiInstance\n );\n storageInstance = getWrappedStorageInstance(\n database,\n storageInstance,\n RX_LOCAL_DOCUMENT_SCHEMA\n );\n const docCache = new DocumentCache(\n 'id',\n parent.$.pipe(\n filter(cE => (cE as RxChangeEvent).isLocal)\n ),\n docData => createRxLocalDocument(docData, parent) as any\n );\n\n const incrementalWriteQueue = new IncrementalWriteQueue(\n storageInstance,\n 'id',\n () => { },\n () => { }\n );\n\n /**\n * Emit the changestream into the collections change stream\n */\n const databaseStorageToken = await database.storageToken;\n const subLocalDocs = storageInstance.changeStream().subscribe(eventBulk => {\n const events = new Array(eventBulk.events.length);\n const rawEvents = eventBulk.events;\n const collectionName = parent.database ? parent.name : undefined;\n for (let index = 0; index < rawEvents.length; index++) {\n const event = rawEvents[index];\n events[index] = {\n documentId: event.documentId,\n collectionName,\n isLocal: true,\n operation: event.operation,\n documentData: overwritable.deepFreezeWhenDevMode(event.documentData) as any,\n previousDocumentData: overwritable.deepFreezeWhenDevMode(event.previousDocumentData) as any\n };\n }\n const changeEventBulk: RxChangeEventBulk = {\n id: eventBulk.id,\n internal: false,\n collectionName: parent.database ? parent.name : undefined,\n storageToken: databaseStorageToken,\n events,\n databaseToken: database.token,\n checkpoint: eventBulk.checkpoint,\n context: eventBulk.context,\n endTime: eventBulk.endTime,\n startTime: eventBulk.startTime\n };\n database.$emit(changeEventBulk);\n });\n parent._subs.push(subLocalDocs);\n\n const state = {\n database,\n parent,\n storageInstance,\n docCache,\n incrementalWriteQueue\n };\n LOCAL_DOC_STATE_BY_PARENT_RESOLVED.set(parent, state);\n return state;\n })();\n LOCAL_DOC_STATE_BY_PARENT.set(parent, statePromise);\n}\n\nexport function getLocalDocStateByParent(parent: LocalDocumentParent): Promise {\n const statePromise = LOCAL_DOC_STATE_BY_PARENT.get(parent);\n if (!statePromise) {\n const database: RxDatabase = parent.database ? parent.database : parent as any;\n const collectionName = parent.database ? parent.name : '';\n throw newRxError('LD8', {\n database: database.name,\n collection: collectionName\n });\n }\n return statePromise;\n}\n\nexport function createLocalDocumentStorageInstance(\n databaseInstanceToken: string,\n storage: RxStorage,\n databaseName: string,\n collectionName: string,\n instanceCreationOptions: any,\n multiInstance: boolean\n) {\n return storage.createStorageInstance({\n databaseInstanceToken,\n databaseName: databaseName,\n /**\n * Use a different collection name for the local documents instance\n * so that the local docs can be kept while deleting the normal instance\n * after migration.\n */\n collectionName: getCollectionLocalInstanceName(collectionName),\n schema: RX_LOCAL_DOCUMENT_SCHEMA,\n options: instanceCreationOptions,\n multiInstance,\n devMode: overwritable.isDevMode()\n });\n}\n\nexport function closeStateByParent(parent: LocalDocumentParent) {\n const statePromise = LOCAL_DOC_STATE_BY_PARENT.get(parent);\n if (statePromise) {\n LOCAL_DOC_STATE_BY_PARENT.delete(parent);\n return statePromise.then(state => state.storageInstance.close());\n }\n}\n\nexport async function removeLocalDocumentsStorageInstance(\n storage: RxStorage,\n databaseName: string,\n collectionName: string\n) {\n const databaseInstanceToken = randomCouchString(10);\n const storageInstance = await createLocalDocumentStorageInstance(\n databaseInstanceToken,\n storage,\n databaseName,\n collectionName,\n {},\n false\n );\n await storageInstance.remove();\n}\n\nexport function getCollectionLocalInstanceName(collectionName: string): string {\n return 'plugin-local-documents-' + collectionName;\n}\n\nexport const RX_LOCAL_DOCUMENT_SCHEMA: RxJsonSchema> = fillWithDefaultSettings({\n title: 'RxLocalDocument',\n version: 0,\n primaryKey: 'id',\n type: 'object',\n properties: {\n id: {\n type: 'string',\n maxLength: 128\n },\n data: {\n type: 'object',\n additionalProperties: true\n }\n },\n required: [\n 'id',\n 'data'\n ]\n});\n"],"mappings":";;;;;;;;;;;;AAAA,IAAAA,KAAA,GAAAC,OAAA;AACA,IAAAC,SAAA,GAAAD,OAAA;AACA,IAAAE,iBAAA,GAAAF,OAAA;AACA,IAAAG,QAAA,GAAAH,OAAA;AACA,IAAAI,eAAA,GAAAJ,OAAA;AACA,IAAAK,gBAAA,GAAAL,OAAA;AAcA,IAAAM,MAAA,GAAAN,OAAA;AACA,IAAAO,gBAAA,GAAAP,OAAA;AACA,IAAAQ,aAAA,GAAAR,OAAA;AAEO,IAAMS,yBAAoF,GAAAC,OAAA,CAAAD,yBAAA,GAAG,IAAIE,OAAO,CAAC,CAAC;AAC1G,IAAMC,kCAAoF,GAAAF,OAAA,CAAAE,kCAAA,GAAG,IAAID,OAAO,CAAC,CAAC;AAE1G,SAASE,2BAA2BA,CAACC,MAA2B,EAAQ;EAC3E,IAAMC,QAAoB,GAAGD,MAAM,CAACC,QAAQ,GAAGD,MAAM,CAACC,QAAQ,GAAGD,MAAa;EAC9E,IAAME,cAAc,GAAGF,MAAM,CAACC,QAAQ,GAAGD,MAAM,CAACG,IAAI,GAAG,EAAE;EACzD,IAAMC,YAAY,GAAG,CAAC,YAAY;IAC9B,IAAIC,eAAe,GAAG,MAAMC,kCAAkC,CAC1DL,QAAQ,CAACM,KAAK,EACdN,QAAQ,CAACO,OAAO,EAChBP,QAAQ,CAACE,IAAI,EACbD,cAAc,EACdD,QAAQ,CAACQ,uBAAuB,EAChCR,QAAQ,CAACS,aACb,CAAC;IACDL,eAAe,GAAG,IAAAM,0CAAyB,EACvCV,QAAQ,EACRI,eAAe,EACfO,wBACJ,CAAC;IACD,IAAMC,QAAQ,GAAG,IAAIC,uBAAa,CAC9B,IAAI,EACJd,MAAM,CAACe,CAAC,CAACC,IAAI,CACT,IAAAC,YAAM,EAACC,EAAE,IAAKA,EAAE,CAAwBC,OAAO,CACnD,CAAC,EACDC,OAAO,IAAI,IAAAC,sCAAqB,EAACD,OAAO,EAAEpB,MAAM,CACpD,CAAC;IAED,IAAMsB,qBAAqB,GAAG,IAAIC,uCAAqB,CACnDlB,eAAe,EACf,IAAI,EACJ,MAAM,CAAE,CAAC,EACT,MAAM,CAAE,CACZ,CAAC;;IAED;AACR;AACA;IACQ,IAAMmB,oBAAoB,GAAG,MAAMvB,QAAQ,CAACwB,YAAY;IACxD,IAAMC,YAAY,GAAGrB,eAAe,CAACsB,YAAY,CAAC,CAAC,CAACC,SAAS,CAACC,SAAS,IAAI;MACvE,IAAMC,MAAM,GAAG,IAAIC,KAAK,CAACF,SAAS,CAACC,MAAM,CAACE,MAAM,CAAC;MACjD,IAAMC,SAAS,GAAGJ,SAAS,CAACC,MAAM;MAClC,IAAM5B,cAAc,GAAGF,MAAM,CAACC,QAAQ,GAAGD,MAAM,CAACG,IAAI,GAAG+B,SAAS;MAChE,KAAK,IAAIC,KAAK,GAAG,CAAC,EAAEA,KAAK,GAAGF,SAAS,CAACD,MAAM,EAAEG,KAAK,EAAE,EAAE;QACnD,IAAMC,KAAK,GAAGH,SAAS,CAACE,KAAK,CAAC;QAC9BL,MAAM,CAACK,KAAK,CAAC,GAAG;UACZE,UAAU,EAAED,KAAK,CAACC,UAAU;UAC5BnC,cAAc;UACdiB,OAAO,EAAE,IAAI;UACbmB,SAAS,EAAEF,KAAK,CAACE,SAAS;UAC1BC,YAAY,EAAEC,0BAAY,CAACC,qBAAqB,CAACL,KAAK,CAACG,YAAY,CAAQ;UAC3EG,oBAAoB,EAAEF,0BAAY,CAACC,qBAAqB,CAACL,KAAK,CAACM,oBAAoB;QACvF,CAAC;MACL;MACA,IAAMC,eAAuD,GAAG;QAC5DC,EAAE,EAAEf,SAAS,CAACe,EAAE;QAChBC,QAAQ,EAAE,KAAK;QACf3C,cAAc,EAAEF,MAAM,CAACC,QAAQ,GAAGD,MAAM,CAACG,IAAI,GAAG+B,SAAS;QACzDT,YAAY,EAAED,oBAAoB;QAClCM,MAAM;QACNgB,aAAa,EAAE7C,QAAQ,CAACM,KAAK;QAC7BwC,UAAU,EAAElB,SAAS,CAACkB,UAAU;QAChCC,OAAO,EAAEnB,SAAS,CAACmB,OAAO;QAC1BC,OAAO,EAAEpB,SAAS,CAACoB,OAAO;QAC1BC,SAAS,EAAErB,SAAS,CAACqB;MACzB,CAAC;MACDjD,QAAQ,CAACkD,KAAK,CAACR,eAAe,CAAC;IACnC,CAAC,CAAC;IACF3C,MAAM,CAACoD,KAAK,CAACC,IAAI,CAAC3B,YAAY,CAAC;IAE/B,IAAM4B,KAAK,GAAG;MACVrD,QAAQ;MACRD,MAAM;MACNK,eAAe;MACfQ,QAAQ;MACRS;IACJ,CAAC;IACDxB,kCAAkC,CAACyD,GAAG,CAACvD,MAAM,EAAEsD,KAAK,CAAC;IACrD,OAAOA,KAAK;EAChB,CAAC,EAAE,CAAC;EACJ3D,yBAAyB,CAAC4D,GAAG,CAACvD,MAAM,EAAEI,YAAY,CAAC;AACvD;AAEO,SAASoD,wBAAwBA,CAACxD,MAA2B,EAA+B;EAC/F,IAAMI,YAAY,GAAGT,yBAAyB,CAAC8D,GAAG,CAACzD,MAAM,CAAC;EAC1D,IAAI,CAACI,YAAY,EAAE;IACf,IAAMH,QAAoB,GAAGD,MAAM,CAACC,QAAQ,GAAGD,MAAM,CAACC,QAAQ,GAAGD,MAAa;IAC9E,IAAME,cAAc,GAAGF,MAAM,CAACC,QAAQ,GAAGD,MAAM,CAACG,IAAI,GAAG,EAAE;IACzD,MAAM,IAAAuD,mBAAU,EAAC,KAAK,EAAE;MACpBzD,QAAQ,EAAEA,QAAQ,CAACE,IAAI;MACvBwD,UAAU,EAAEzD;IAChB,CAAC,CAAC;EACN;EACA,OAAOE,YAAY;AACvB;AAEO,SAASE,kCAAkCA,CAC9CsD,qBAA6B,EAC7BpD,OAA4B,EAC5BqD,YAAoB,EACpB3D,cAAsB,EACtBO,uBAA4B,EAC5BC,aAAsB,EACxB;EACE,OAAOF,OAAO,CAACsD,qBAAqB,CAAsB;IACtDF,qBAAqB;IACrBC,YAAY,EAAEA,YAAY;IAC1B;AACR;AACA;AACA;AACA;IACQ3D,cAAc,EAAE6D,8BAA8B,CAAC7D,cAAc,CAAC;IAC9D8D,MAAM,EAAEpD,wBAAwB;IAChCqD,OAAO,EAAExD,uBAAuB;IAChCC,aAAa;IACbwD,OAAO,EAAE1B,0BAAY,CAAC2B,SAAS,CAAC;EACpC,CAAC,CAAC;AACN;AAEO,SAASC,kBAAkBA,CAACpE,MAA2B,EAAE;EAC5D,IAAMI,YAAY,GAAGT,yBAAyB,CAAC8D,GAAG,CAACzD,MAAM,CAAC;EAC1D,IAAII,YAAY,EAAE;IACdT,yBAAyB,CAAC0E,MAAM,CAACrE,MAAM,CAAC;IACxC,OAAOI,YAAY,CAACkE,IAAI,CAAChB,KAAK,IAAIA,KAAK,CAACjD,eAAe,CAACkE,KAAK,CAAC,CAAC,CAAC;EACpE;AACJ;AAEO,eAAeC,mCAAmCA,CACrDhE,OAA4B,EAC5BqD,YAAoB,EACpB3D,cAAsB,EACxB;EACE,IAAM0D,qBAAqB,GAAG,IAAAa,wBAAiB,EAAC,EAAE,CAAC;EACnD,IAAMpE,eAAe,GAAG,MAAMC,kCAAkC,CAC5DsD,qBAAqB,EACrBpD,OAAO,EACPqD,YAAY,EACZ3D,cAAc,EACd,CAAC,CAAC,EACF,KACJ,CAAC;EACD,MAAMG,eAAe,CAACqE,MAAM,CAAC,CAAC;AAClC;AAEO,SAASX,8BAA8BA,CAAC7D,cAAsB,EAAU;EAC3E,OAAO,yBAAyB,GAAGA,cAAc;AACrD;AAEO,IAAMU,wBAA2E,GAAAhB,OAAA,CAAAgB,wBAAA,GAAG,IAAA+D,uCAAuB,EAAC;EAC/GC,KAAK,EAAE,iBAAiB;EACxBC,OAAO,EAAE,CAAC;EACVC,UAAU,EAAE,IAAI;EAChBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRpC,EAAE,EAAE;MACAmC,IAAI,EAAE,QAAQ;MACdE,SAAS,EAAE;IACf,CAAC;IACDC,IAAI,EAAE;MACFH,IAAI,EAAE,QAAQ;MACdI,oBAAoB,EAAE;IAC1B;EACJ,CAAC;EACDC,QAAQ,EAAE,CACN,IAAI,EACJ,MAAM;AAEd,CAAC,CAAC","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/local-documents/local-documents.js b/dist/cjs/plugins/local-documents/local-documents.js deleted file mode 100644 index 9056bebf120..00000000000 --- a/dist/cjs/plugins/local-documents/local-documents.js +++ /dev/null @@ -1,107 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.getLocal = getLocal; -exports.getLocal$ = getLocal$; -exports.insertLocal = insertLocal; -exports.upsertLocal = upsertLocal; -var _index = require("../../plugins/utils/index.js"); -var _rxjs = require("rxjs"); -var _localDocumentsHelper = require("./local-documents-helper.js"); -var _rxStorageHelper = require("../../rx-storage-helper.js"); -/** - * save the local-document-data - * throws if already exists - */ -async function insertLocal(id, data) { - var state = await (0, _localDocumentsHelper.getLocalDocStateByParent)(this); - - // create new one - var docData = { - id: id, - data, - _deleted: false, - _meta: (0, _index.getDefaultRxDocumentMeta)(), - _rev: (0, _index.getDefaultRevision)(), - _attachments: {} - }; - return (0, _rxStorageHelper.writeSingle)(state.storageInstance, { - document: docData - }, 'local-document-insert').then(newDocData => state.docCache.getCachedRxDocument(newDocData)); -} - -/** - * save the local-document-data - * overwrites existing if exists - */ -function upsertLocal(id, data) { - return this.getLocal(id).then(existing => { - if (!existing) { - // create new one - var docPromise = this.insertLocal(id, data); - return docPromise; - } else { - // update existing - return existing.incrementalModify(() => { - return data; - }); - } - }); -} -async function getLocal(id) { - var state = await (0, _localDocumentsHelper.getLocalDocStateByParent)(this); - var docCache = state.docCache; - - // check in doc-cache - var found = docCache.getLatestDocumentDataIfExists(id); - if (found) { - return Promise.resolve(docCache.getCachedRxDocument(found)); - } - - // if not found, check in storage instance - return (0, _rxStorageHelper.getSingleDocument)(state.storageInstance, id).then(docData => { - if (!docData) { - return null; - } - return state.docCache.getCachedRxDocument(docData); - }); -} -function getLocal$(id) { - return this.$.pipe((0, _rxjs.startWith)(null), (0, _rxjs.mergeMap)(async cE => { - if (cE) { - return { - changeEvent: cE - }; - } else { - var doc = await this.getLocal(id); - return { - doc: doc - }; - } - }), (0, _rxjs.mergeMap)(async changeEventOrDoc => { - if (changeEventOrDoc.changeEvent) { - var cE = changeEventOrDoc.changeEvent; - if (!cE.isLocal || cE.documentId !== id) { - return { - use: false - }; - } else { - var doc = await this.getLocal(id); - return { - use: true, - doc: doc - }; - } - } else { - return { - use: true, - doc: changeEventOrDoc.doc - }; - } - }), (0, _rxjs.filter)(filterFlagged => filterFlagged.use), (0, _rxjs.map)(filterFlagged => { - return filterFlagged.doc; - })); -} -//# sourceMappingURL=local-documents.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/local-documents/local-documents.js.map b/dist/cjs/plugins/local-documents/local-documents.js.map deleted file mode 100644 index e7948c88325..00000000000 --- a/dist/cjs/plugins/local-documents/local-documents.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"local-documents.js","names":["_index","require","_rxjs","_localDocumentsHelper","_rxStorageHelper","insertLocal","id","data","state","getLocalDocStateByParent","docData","_deleted","_meta","getDefaultRxDocumentMeta","_rev","getDefaultRevision","_attachments","writeSingle","storageInstance","document","then","newDocData","docCache","getCachedRxDocument","upsertLocal","getLocal","existing","docPromise","incrementalModify","found","getLatestDocumentDataIfExists","Promise","resolve","getSingleDocument","getLocal$","$","pipe","startWith","mergeMap","cE","changeEvent","doc","changeEventOrDoc","isLocal","documentId","use","filter","filterFlagged","map"],"sources":["../../../../src/plugins/local-documents/local-documents.ts"],"sourcesContent":["import {\n getDefaultRevision,\n getDefaultRxDocumentMeta\n} from '../../plugins/utils/index.ts';\n\nimport type {\n RxChangeEvent,\n RxCollection,\n RxDatabase,\n RxDocument,\n RxDocumentWriteData,\n RxLocalDocument,\n RxLocalDocumentData\n} from '../../types/index.d.ts';\n\nimport {\n filter,\n map,\n startWith,\n mergeMap\n} from 'rxjs';\nimport { Observable } from 'rxjs';\n\nimport { getLocalDocStateByParent } from './local-documents-helper.ts';\nimport { getSingleDocument, writeSingle } from '../../rx-storage-helper.ts';\n\n\n\n/**\n * save the local-document-data\n * throws if already exists\n */\nexport async function insertLocal = any, Reactivity = unknown>(\n this: RxDatabase | RxCollection,\n id: string,\n data: DocData\n): Promise> {\n const state = await getLocalDocStateByParent(this);\n\n // create new one\n const docData: RxDocumentWriteData> = {\n id: id,\n data,\n _deleted: false,\n _meta: getDefaultRxDocumentMeta(),\n _rev: getDefaultRevision(),\n _attachments: {}\n };\n\n return writeSingle(\n state.storageInstance,\n {\n document: docData\n },\n 'local-document-insert'\n ).then(newDocData => state.docCache.getCachedRxDocument(newDocData) as any);\n}\n\n/**\n * save the local-document-data\n * overwrites existing if exists\n */\nexport function upsertLocal = any, Reactivity = unknown>(\n this: any,\n id: string,\n data: DocData\n): Promise> {\n return this.getLocal(id)\n .then((existing: RxDocument) => {\n if (!existing) {\n // create new one\n const docPromise = this.insertLocal(id, data);\n return docPromise;\n } else {\n // update existing\n return existing.incrementalModify(() => {\n return data;\n });\n }\n });\n}\n\nexport async function getLocal(this: any, id: string): Promise | null> {\n const state = await getLocalDocStateByParent(this);\n const docCache = state.docCache;\n\n // check in doc-cache\n const found = docCache.getLatestDocumentDataIfExists(id);\n if (found) {\n return Promise.resolve(\n docCache.getCachedRxDocument(found) as any\n );\n }\n\n // if not found, check in storage instance\n return getSingleDocument(state.storageInstance, id)\n .then((docData) => {\n if (!docData) {\n return null;\n }\n return state.docCache.getCachedRxDocument(docData) as any;\n });\n}\n\nexport function getLocal$(this: RxCollection, id: string): Observable | null> {\n return this.$.pipe(\n startWith(null),\n mergeMap(async (cE: RxChangeEvent | null) => {\n if (cE) {\n return {\n changeEvent: cE\n };\n } else {\n const doc = await this.getLocal(id);\n return {\n doc: doc\n };\n }\n }),\n mergeMap(async (changeEventOrDoc) => {\n if (changeEventOrDoc.changeEvent) {\n const cE = changeEventOrDoc.changeEvent;\n if (!cE.isLocal || cE.documentId !== id) {\n return {\n use: false\n };\n } else {\n const doc = await this.getLocal(id);\n return {\n use: true,\n doc: doc\n };\n }\n } else {\n return {\n use: true,\n doc: changeEventOrDoc.doc\n };\n }\n }),\n filter(filterFlagged => filterFlagged.use),\n map(filterFlagged => {\n return filterFlagged.doc as any;\n })\n );\n}\n"],"mappings":";;;;;;;;;AAAA,IAAAA,MAAA,GAAAC,OAAA;AAeA,IAAAC,KAAA,GAAAD,OAAA;AAQA,IAAAE,qBAAA,GAAAF,OAAA;AACA,IAAAG,gBAAA,GAAAH,OAAA;AAIA;AACA;AACA;AACA;AACO,eAAeI,WAAWA,CAE7BC,EAAU,EACVC,IAAa,EACqC;EAClD,IAAMC,KAAK,GAAG,MAAM,IAAAC,8CAAwB,EAAC,IAAI,CAAC;;EAElD;EACA,IAAMC,OAA0D,GAAG;IAC/DJ,EAAE,EAAEA,EAAE;IACNC,IAAI;IACJI,QAAQ,EAAE,KAAK;IACfC,KAAK,EAAE,IAAAC,+BAAwB,EAAC,CAAC;IACjCC,IAAI,EAAE,IAAAC,yBAAkB,EAAC,CAAC;IAC1BC,YAAY,EAAE,CAAC;EACnB,CAAC;EAED,OAAO,IAAAC,4BAAW,EACdT,KAAK,CAACU,eAAe,EACrB;IACIC,QAAQ,EAAET;EACd,CAAC,EACD,uBACJ,CAAC,CAACU,IAAI,CAACC,UAAU,IAAIb,KAAK,CAACc,QAAQ,CAACC,mBAAmB,CAACF,UAAU,CAAQ,CAAC;AAC/E;;AAEA;AACA;AACA;AACA;AACO,SAASG,WAAWA,CAEvBlB,EAAU,EACVC,IAAa,EACqC;EAClD,OAAO,IAAI,CAACkB,QAAQ,CAACnB,EAAE,CAAC,CACnBc,IAAI,CAAEM,QAAoB,IAAK;IAC5B,IAAI,CAACA,QAAQ,EAAE;MACX;MACA,IAAMC,UAAU,GAAG,IAAI,CAACtB,WAAW,CAACC,EAAE,EAAEC,IAAI,CAAC;MAC7C,OAAOoB,UAAU;IACrB,CAAC,MAAM;MACH;MACA,OAAOD,QAAQ,CAACE,iBAAiB,CAAC,MAAM;QACpC,OAAOrB,IAAI;MACf,CAAC,CAAC;IACN;EACJ,CAAC,CAAC;AACV;AAEO,eAAekB,QAAQA,CAAiDnB,EAAU,EAA6D;EAClJ,IAAME,KAAK,GAAG,MAAM,IAAAC,8CAAwB,EAAC,IAAI,CAAC;EAClD,IAAMa,QAAQ,GAAGd,KAAK,CAACc,QAAQ;;EAE/B;EACA,IAAMO,KAAK,GAAGP,QAAQ,CAACQ,6BAA6B,CAACxB,EAAE,CAAC;EACxD,IAAIuB,KAAK,EAAE;IACP,OAAOE,OAAO,CAACC,OAAO,CAClBV,QAAQ,CAACC,mBAAmB,CAACM,KAAK,CACtC,CAAC;EACL;;EAEA;EACA,OAAO,IAAAI,kCAAiB,EAACzB,KAAK,CAACU,eAAe,EAAEZ,EAAE,CAAC,CAC9Cc,IAAI,CAAEV,OAAO,IAAK;IACf,IAAI,CAACA,OAAO,EAAE;MACV,OAAO,IAAI;IACf;IACA,OAAOF,KAAK,CAACc,QAAQ,CAACC,mBAAmB,CAACb,OAAO,CAAC;EACtD,CAAC,CAAC;AACV;AAEO,SAASwB,SAASA,CAA0D5B,EAAU,EAAgE;EACzJ,OAAO,IAAI,CAAC6B,CAAC,CAACC,IAAI,CACd,IAAAC,eAAS,EAAC,IAAI,CAAC,EACf,IAAAC,cAAQ,EAAC,MAAOC,EAA6C,IAAK;IAC9D,IAAIA,EAAE,EAAE;MACJ,OAAO;QACHC,WAAW,EAAED;MACjB,CAAC;IACL,CAAC,MAAM;MACH,IAAME,GAAG,GAAG,MAAM,IAAI,CAAChB,QAAQ,CAACnB,EAAE,CAAC;MACnC,OAAO;QACHmC,GAAG,EAAEA;MACT,CAAC;IACL;EACJ,CAAC,CAAC,EACF,IAAAH,cAAQ,EAAC,MAAOI,gBAAgB,IAAK;IACjC,IAAIA,gBAAgB,CAACF,WAAW,EAAE;MAC9B,IAAMD,EAAE,GAAGG,gBAAgB,CAACF,WAAW;MACvC,IAAI,CAACD,EAAE,CAACI,OAAO,IAAIJ,EAAE,CAACK,UAAU,KAAKtC,EAAE,EAAE;QACrC,OAAO;UACHuC,GAAG,EAAE;QACT,CAAC;MACL,CAAC,MAAM;QACH,IAAMJ,GAAG,GAAG,MAAM,IAAI,CAAChB,QAAQ,CAACnB,EAAE,CAAC;QACnC,OAAO;UACHuC,GAAG,EAAE,IAAI;UACTJ,GAAG,EAAEA;QACT,CAAC;MACL;IACJ,CAAC,MAAM;MACH,OAAO;QACHI,GAAG,EAAE,IAAI;QACTJ,GAAG,EAAEC,gBAAgB,CAACD;MAC1B,CAAC;IACL;EACJ,CAAC,CAAC,EACF,IAAAK,YAAM,EAACC,aAAa,IAAIA,aAAa,CAACF,GAAG,CAAC,EAC1C,IAAAG,SAAG,EAACD,aAAa,IAAI;IACjB,OAAOA,aAAa,CAACN,GAAG;EAC5B,CAAC,CACL,CAAC;AACL","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/local-documents/rx-local-document.js b/dist/cjs/plugins/local-documents/rx-local-document.js deleted file mode 100644 index d71bae66d37..00000000000 --- a/dist/cjs/plugins/local-documents/rx-local-document.js +++ /dev/null @@ -1,186 +0,0 @@ -"use strict"; - -var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault"); -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.createRxLocalDocument = createRxLocalDocument; -exports.getRxDatabaseFromLocalDocument = getRxDatabaseFromLocalDocument; -var _inheritsLoose2 = _interopRequireDefault(require("@babel/runtime/helpers/inheritsLoose")); -var _rxjs = require("rxjs"); -var _overwritable = require("../../overwritable.js"); -var _rxChangeEvent = require("../../rx-change-event.js"); -var _rxDocument = require("../../rx-document.js"); -var _rxError = require("../../rx-error.js"); -var _rxStorageHelper = require("../../rx-storage-helper.js"); -var _index = require("../../plugins/utils/index.js"); -var _localDocumentsHelper = require("./local-documents-helper.js"); -var _rxDatabase = require("../../rx-database.js"); -var RxDocumentParent = (0, _rxDocument.createRxDocumentConstructor)(); -var RxLocalDocumentClass = /*#__PURE__*/function (_RxDocumentParent) { - function RxLocalDocumentClass(id, jsonData, parent) { - var _this2; - _this2 = _RxDocumentParent.call(this, null, jsonData) || this; - _this2.id = id; - _this2.parent = parent; - return _this2; - } - (0, _inheritsLoose2.default)(RxLocalDocumentClass, _RxDocumentParent); - return RxLocalDocumentClass; -}(RxDocumentParent); -var RxLocalDocumentPrototype = { - get isLocal() { - return true; - }, - // - // overwrites - // - get allAttachments$() { - // this is overwritten here because we cannot re-set getters on the prototype - throw (0, _rxError.newRxError)('LD1', { - document: this - }); - }, - get primaryPath() { - return 'id'; - }, - get primary() { - return this.id; - }, - get $() { - var _this = this; - var state = (0, _index.getFromMapOrThrow)(_localDocumentsHelper.LOCAL_DOC_STATE_BY_PARENT_RESOLVED, this.parent); - return _this.parent.$.pipe((0, _rxjs.filter)(changeEvent => changeEvent.documentId === this.primary), (0, _rxjs.filter)(changeEvent => changeEvent.isLocal), (0, _rxjs.map)(changeEvent => (0, _rxChangeEvent.getDocumentDataOfRxChangeEvent)(changeEvent)), (0, _rxjs.startWith)(state.docCache.getLatestDocumentData(this.primary)), (0, _rxjs.distinctUntilChanged)((prev, curr) => prev._rev === curr._rev), (0, _rxjs.map)(docData => state.docCache.getCachedRxDocument(docData)), (0, _rxjs.shareReplay)(_index.RXJS_SHARE_REPLAY_DEFAULTS)); - }, - get $$() { - var _this = this; - var db = getRxDatabaseFromLocalDocument(_this); - var reactivity = db.getReactivityFactory(); - return reactivity.fromObservable(_this.$, _this.getLatest()._data, db); - }, - get deleted$$() { - var _this = this; - var db = getRxDatabaseFromLocalDocument(_this); - var reactivity = db.getReactivityFactory(); - return reactivity.fromObservable(_this.deleted$, _this.getLatest().deleted, db); - }, - getLatest() { - var state = (0, _index.getFromMapOrThrow)(_localDocumentsHelper.LOCAL_DOC_STATE_BY_PARENT_RESOLVED, this.parent); - var latestDocData = state.docCache.getLatestDocumentData(this.primary); - return state.docCache.getCachedRxDocument(latestDocData); - }, - get(objPath) { - objPath = 'data.' + objPath; - if (!this._data) { - return undefined; - } - if (typeof objPath !== 'string') { - throw (0, _rxError.newRxTypeError)('LD2', { - objPath - }); - } - var valueObj = (0, _index.getProperty)(this._data, objPath); - valueObj = _overwritable.overwritable.deepFreezeWhenDevMode(valueObj); - return valueObj; - }, - get$(objPath) { - objPath = 'data.' + objPath; - if (_overwritable.overwritable.isDevMode()) { - if (objPath.includes('.item.')) { - throw (0, _rxError.newRxError)('LD3', { - objPath - }); - } - if (objPath === this.primaryPath) { - throw (0, _rxError.newRxError)('LD4'); - } - } - return this.$.pipe((0, _rxjs.map)(localDocument => localDocument._data), (0, _rxjs.map)(data => (0, _index.getProperty)(data, objPath)), (0, _rxjs.distinctUntilChanged)()); - }, - get$$(objPath) { - var db = getRxDatabaseFromLocalDocument(this); - var reactivity = db.getReactivityFactory(); - return reactivity.fromObservable(this.get$(objPath), this.getLatest().get(objPath), db); - }, - async incrementalModify(mutationFunction) { - var state = await (0, _localDocumentsHelper.getLocalDocStateByParent)(this.parent); - return state.incrementalWriteQueue.addWrite(this._data, async docData => { - docData.data = await mutationFunction(docData.data, this); - return docData; - }).then(result => state.docCache.getCachedRxDocument(result)); - }, - incrementalPatch(patch) { - return this.incrementalModify(docData => { - Object.entries(patch).forEach(([k, v]) => { - docData[k] = v; - }); - return docData; - }); - }, - async _saveData(newData) { - var state = await (0, _localDocumentsHelper.getLocalDocStateByParent)(this.parent); - var oldData = this._data; - newData.id = this.id; - return state.storageInstance.bulkWrite([{ - previous: oldData, - document: newData - }], 'local-document-save-data').then(res => { - var docResult = res.success[0]; - if (!docResult) { - throw res.error[0]; - } - newData = (0, _index.flatClone)(newData); - newData._rev = docResult._rev; - }); - }, - async remove() { - var state = await (0, _localDocumentsHelper.getLocalDocStateByParent)(this.parent); - var writeData = (0, _index.flatClone)(this._data); - writeData._deleted = true; - return (0, _rxStorageHelper.writeSingle)(state.storageInstance, { - previous: this._data, - document: writeData - }, 'local-document-remove').then(writeResult => state.docCache.getCachedRxDocument(writeResult)); - } -}; -var INIT_DONE = false; -var _init = () => { - if (INIT_DONE) return;else INIT_DONE = true; - - // add functions of RxDocument - var docBaseProto = _rxDocument.basePrototype; - var props = Object.getOwnPropertyNames(docBaseProto); - props.forEach(key => { - var exists = Object.getOwnPropertyDescriptor(RxLocalDocumentPrototype, key); - if (exists) return; - var desc = Object.getOwnPropertyDescriptor(docBaseProto, key); - Object.defineProperty(RxLocalDocumentPrototype, key, desc); - }); - - /** - * Overwrite things that do not work on local documents - * with a throwing function. - */ - var getThrowingFun = k => () => { - throw (0, _rxError.newRxError)('LD6', { - functionName: k - }); - }; - ['populate', 'update', 'putAttachment', 'getAttachment', 'allAttachments'].forEach(k => RxLocalDocumentPrototype[k] = getThrowingFun(k)); -}; -function createRxLocalDocument(data, parent) { - _init(); - var newDoc = new RxLocalDocumentClass(data.id, data, parent); - Object.setPrototypeOf(newDoc, RxLocalDocumentPrototype); - newDoc.prototype = RxLocalDocumentPrototype; - return newDoc; -} -function getRxDatabaseFromLocalDocument(doc) { - var parent = doc.parent; - if ((0, _rxDatabase.isRxDatabase)(parent)) { - return parent; - } else { - return parent.database; - } -} -//# sourceMappingURL=rx-local-document.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/local-documents/rx-local-document.js.map b/dist/cjs/plugins/local-documents/rx-local-document.js.map deleted file mode 100644 index 633fe7fe41b..00000000000 --- a/dist/cjs/plugins/local-documents/rx-local-document.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-local-document.js","names":["_rxjs","require","_overwritable","_rxChangeEvent","_rxDocument","_rxError","_rxStorageHelper","_index","_localDocumentsHelper","_rxDatabase","RxDocumentParent","createRxDocumentConstructor","RxLocalDocumentClass","_RxDocumentParent","id","jsonData","parent","_this2","call","_inheritsLoose2","default","RxLocalDocumentPrototype","isLocal","allAttachments$","newRxError","document","primaryPath","primary","$","_this","state","getFromMapOrThrow","LOCAL_DOC_STATE_BY_PARENT_RESOLVED","pipe","filter","changeEvent","documentId","map","getDocumentDataOfRxChangeEvent","startWith","docCache","getLatestDocumentData","distinctUntilChanged","prev","curr","_rev","docData","getCachedRxDocument","shareReplay","RXJS_SHARE_REPLAY_DEFAULTS","$$","db","getRxDatabaseFromLocalDocument","reactivity","getReactivityFactory","fromObservable","getLatest","_data","deleted$$","deleted$","deleted","latestDocData","get","objPath","undefined","newRxTypeError","valueObj","getProperty","overwritable","deepFreezeWhenDevMode","get$","isDevMode","includes","localDocument","data","get$$","incrementalModify","mutationFunction","getLocalDocStateByParent","incrementalWriteQueue","addWrite","then","result","incrementalPatch","patch","Object","entries","forEach","k","v","_saveData","newData","oldData","storageInstance","bulkWrite","previous","res","docResult","success","error","flatClone","remove","writeData","_deleted","writeSingle","writeResult","INIT_DONE","_init","docBaseProto","basePrototype","props","getOwnPropertyNames","key","exists","getOwnPropertyDescriptor","desc","defineProperty","getThrowingFun","functionName","createRxLocalDocument","newDoc","setPrototypeOf","prototype","doc","isRxDatabase","database"],"sources":["../../../../src/plugins/local-documents/rx-local-document.ts"],"sourcesContent":["import { Observable } from 'rxjs';\nimport {\n distinctUntilChanged,\n filter,\n map,\n shareReplay,\n startWith\n} from 'rxjs';\nimport { overwritable } from '../../overwritable.ts';\nimport { getDocumentDataOfRxChangeEvent } from '../../rx-change-event.ts';\nimport {\n basePrototype,\n createRxDocumentConstructor\n} from '../../rx-document.ts';\nimport {\n newRxError,\n newRxTypeError\n} from '../../rx-error.ts';\nimport { writeSingle } from '../../rx-storage-helper.ts';\nimport type {\n LocalDocumentModifyFunction,\n RxCollection,\n RxDatabase,\n RxDocument,\n RxDocumentData,\n RxDocumentWriteData,\n RxLocalDocument,\n RxLocalDocumentData\n} from '../../types/index.d.ts';\nimport {\n flatClone,\n getDefaultRevision,\n getDefaultRxDocumentMeta,\n getFromMapOrThrow,\n getProperty,\n RXJS_SHARE_REPLAY_DEFAULTS\n} from '../../plugins/utils/index.ts';\nimport { getLocalDocStateByParent, LOCAL_DOC_STATE_BY_PARENT_RESOLVED } from './local-documents-helper.ts';\nimport { isRxDatabase } from '../../rx-database.ts';\n\nconst RxDocumentParent = createRxDocumentConstructor() as any;\n\nclass RxLocalDocumentClass extends RxDocumentParent {\n constructor(\n public readonly id: string,\n jsonData: DocData,\n public readonly parent: RxCollection | RxDatabase\n ) {\n super(null, jsonData);\n }\n}\n\n\n\nconst RxLocalDocumentPrototype: any = {\n get isLocal() {\n return true;\n },\n\n //\n // overwrites\n //\n get allAttachments$() {\n // this is overwritten here because we cannot re-set getters on the prototype\n throw newRxError('LD1', {\n document: this\n });\n },\n get primaryPath() {\n return 'id';\n },\n get primary() {\n return this.id;\n },\n get $(): Observable> {\n const _this: RxLocalDocumentClass = this as any;\n const state = getFromMapOrThrow(LOCAL_DOC_STATE_BY_PARENT_RESOLVED, this.parent);\n return _this.parent.$.pipe(\n filter(changeEvent => changeEvent.documentId === this.primary),\n filter(changeEvent => changeEvent.isLocal),\n map(changeEvent => getDocumentDataOfRxChangeEvent(changeEvent)),\n startWith(state.docCache.getLatestDocumentData(this.primary)),\n distinctUntilChanged((prev, curr) => prev._rev === curr._rev),\n map(docData => state.docCache.getCachedRxDocument(docData)),\n shareReplay(RXJS_SHARE_REPLAY_DEFAULTS)\n ) as Observable;\n },\n get $$(): any {\n const _this: RxLocalDocumentClass = this as any;\n const db = getRxDatabaseFromLocalDocument(_this);\n const reactivity = db.getReactivityFactory();\n return reactivity.fromObservable(\n _this.$,\n _this.getLatest()._data,\n db\n );\n },\n get deleted$$() {\n const _this: RxLocalDocumentClass = this as any;\n const db = getRxDatabaseFromLocalDocument(_this);\n const reactivity = db.getReactivityFactory();\n return reactivity.fromObservable(\n _this.deleted$,\n _this.getLatest().deleted,\n db\n );\n },\n getLatest(this: RxLocalDocument): RxLocalDocument {\n const state = getFromMapOrThrow(LOCAL_DOC_STATE_BY_PARENT_RESOLVED, this.parent);\n const latestDocData = state.docCache.getLatestDocumentData(this.primary);\n return state.docCache.getCachedRxDocument(latestDocData) as any;\n },\n get(this: RxDocument, objPath: string) {\n objPath = 'data.' + objPath;\n\n if (!this._data) {\n return undefined;\n }\n if (typeof objPath !== 'string') {\n throw newRxTypeError('LD2', {\n objPath\n });\n }\n\n let valueObj = getProperty(this._data, objPath);\n valueObj = overwritable.deepFreezeWhenDevMode(valueObj);\n return valueObj;\n },\n get$(this: RxDocument, objPath: string) {\n objPath = 'data.' + objPath;\n\n if (overwritable.isDevMode()) {\n if (objPath.includes('.item.')) {\n throw newRxError('LD3', {\n objPath\n });\n }\n if (objPath === this.primaryPath) {\n throw newRxError('LD4');\n }\n }\n return this.$\n .pipe(\n map(localDocument => localDocument._data),\n map(data => getProperty(data, objPath)),\n distinctUntilChanged()\n );\n },\n get$$(this: RxDocument, objPath: string) {\n const db = getRxDatabaseFromLocalDocument(this as any);\n const reactivity = db.getReactivityFactory();\n return reactivity.fromObservable(\n this.get$(objPath),\n this.getLatest().get(objPath),\n db\n );\n },\n async incrementalModify(\n this: RxLocalDocument,\n mutationFunction: LocalDocumentModifyFunction\n ) {\n const state = await getLocalDocStateByParent(this.parent);\n\n return state.incrementalWriteQueue.addWrite(\n this._data as any,\n async (docData) => {\n docData.data = await mutationFunction(docData.data, this);\n return docData;\n }\n ).then(result => state.docCache.getCachedRxDocument(result as any));\n },\n incrementalPatch(patch: Partial) {\n return this.incrementalModify((docData: any) => {\n Object\n .entries(patch)\n .forEach(([k, v]) => {\n docData[k] = v;\n });\n return docData;\n });\n },\n async _saveData(this: RxLocalDocument, newData: RxDocumentData) {\n const state = await getLocalDocStateByParent(this.parent);\n const oldData: RxDocumentData = this._data;\n newData.id = (this as any).id;\n return state.storageInstance.bulkWrite([{\n previous: oldData,\n document: newData\n }], 'local-document-save-data')\n .then((res) => {\n const docResult = res.success[0];\n if (!docResult) {\n throw res.error[0];\n }\n newData = flatClone(newData);\n newData._rev = docResult._rev;\n });\n },\n\n async remove(this: RxLocalDocument): Promise> {\n const state = await getLocalDocStateByParent(this.parent);\n const writeData = flatClone(this._data);\n writeData._deleted = true;\n return writeSingle(state.storageInstance, {\n previous: this._data,\n document: writeData\n }, 'local-document-remove')\n .then((writeResult) => state.docCache.getCachedRxDocument(writeResult) as any);\n }\n};\n\n\n\nlet INIT_DONE = false;\nconst _init = () => {\n if (INIT_DONE) return;\n else INIT_DONE = true;\n\n // add functions of RxDocument\n const docBaseProto = basePrototype;\n const props = Object.getOwnPropertyNames(docBaseProto);\n props.forEach(key => {\n const exists = Object.getOwnPropertyDescriptor(RxLocalDocumentPrototype, key);\n if (exists) return;\n const desc: any = Object.getOwnPropertyDescriptor(docBaseProto, key);\n Object.defineProperty(RxLocalDocumentPrototype, key, desc);\n });\n\n\n /**\n * Overwrite things that do not work on local documents\n * with a throwing function.\n */\n const getThrowingFun = (k: string) => () => {\n throw newRxError('LD6', {\n functionName: k\n });\n };\n [\n 'populate',\n 'update',\n 'putAttachment',\n 'getAttachment',\n 'allAttachments'\n ].forEach((k: string) => RxLocalDocumentPrototype[k] = getThrowingFun(k));\n};\n\n\n\nexport function createRxLocalDocument(\n data: RxDocumentData>,\n parent: any\n): RxLocalDocument {\n _init();\n const newDoc = new RxLocalDocumentClass(data.id, data, parent);\n Object.setPrototypeOf(newDoc, RxLocalDocumentPrototype);\n newDoc.prototype = RxLocalDocumentPrototype;\n return newDoc as any;\n}\n\n\nexport function getRxDatabaseFromLocalDocument(doc: RxLocalDocument | RxLocalDocumentClass) {\n const parent = doc.parent;\n if (isRxDatabase(parent)) {\n return parent;\n } else {\n return (parent as RxCollection).database;\n }\n}\n"],"mappings":";;;;;;;;;AACA,IAAAA,KAAA,GAAAC,OAAA;AAOA,IAAAC,aAAA,GAAAD,OAAA;AACA,IAAAE,cAAA,GAAAF,OAAA;AACA,IAAAG,WAAA,GAAAH,OAAA;AAIA,IAAAI,QAAA,GAAAJ,OAAA;AAIA,IAAAK,gBAAA,GAAAL,OAAA;AAWA,IAAAM,MAAA,GAAAN,OAAA;AAQA,IAAAO,qBAAA,GAAAP,OAAA;AACA,IAAAQ,WAAA,GAAAR,OAAA;AAEA,IAAMS,gBAAgB,GAAG,IAAAC,uCAA2B,EAAC,CAAQ;AAAC,IAExDC,oBAAoB,0BAAAC,iBAAA;EACtB,SAAAD,qBACoBE,EAAU,EAC1BC,QAAiB,EACDC,MAAiC,EACnD;IAAA,IAAAC,MAAA;IACEA,MAAA,GAAAJ,iBAAA,CAAAK,IAAA,OAAM,IAAI,EAAEH,QAAQ,CAAC;IAACE,MAAA,CAJNH,EAAU,GAAVA,EAAU;IAAAG,MAAA,CAEVD,MAAiC,GAAjCA,MAAiC;IAAA,OAAAC,MAAA;EAGrD;EAAC,IAAAE,eAAA,CAAAC,OAAA,EAAAR,oBAAA,EAAAC,iBAAA;EAAA,OAAAD,oBAAA;AAAA,EAP6CF,gBAAgB;AAYlE,IAAMW,wBAA6B,GAAG;EAClC,IAAIC,OAAOA,CAAA,EAAG;IACV,OAAO,IAAI;EACf,CAAC;EAED;EACA;EACA;EACA,IAAIC,eAAeA,CAAA,EAAG;IAClB;IACA,MAAM,IAAAC,mBAAU,EAAC,KAAK,EAAE;MACpBC,QAAQ,EAAE;IACd,CAAC,CAAC;EACN,CAAC;EACD,IAAIC,WAAWA,CAAA,EAAG;IACd,OAAO,IAAI;EACf,CAAC;EACD,IAAIC,OAAOA,CAAA,EAAG;IACV,OAAO,IAAI,CAACb,EAAE;EAClB,CAAC;EACD,IAAIc,CAACA,CAAA,EAA0C;IAC3C,IAAMC,KAA2B,GAAG,IAAW;IAC/C,IAAMC,KAAK,GAAG,IAAAC,wBAAiB,EAACC,wDAAkC,EAAE,IAAI,CAAChB,MAAM,CAAC;IAChF,OAAOa,KAAK,CAACb,MAAM,CAACY,CAAC,CAACK,IAAI,CACtB,IAAAC,YAAM,EAACC,WAAW,IAAIA,WAAW,CAACC,UAAU,KAAK,IAAI,CAACT,OAAO,CAAC,EAC9D,IAAAO,YAAM,EAACC,WAAW,IAAIA,WAAW,CAACb,OAAO,CAAC,EAC1C,IAAAe,SAAG,EAACF,WAAW,IAAI,IAAAG,6CAA8B,EAACH,WAAW,CAAC,CAAC,EAC/D,IAAAI,eAAS,EAACT,KAAK,CAACU,QAAQ,CAACC,qBAAqB,CAAC,IAAI,CAACd,OAAO,CAAC,CAAC,EAC7D,IAAAe,0BAAoB,EAAC,CAACC,IAAI,EAAEC,IAAI,KAAKD,IAAI,CAACE,IAAI,KAAKD,IAAI,CAACC,IAAI,CAAC,EAC7D,IAAAR,SAAG,EAACS,OAAO,IAAIhB,KAAK,CAACU,QAAQ,CAACO,mBAAmB,CAACD,OAAO,CAAC,CAAC,EAC3D,IAAAE,iBAAW,EAACC,iCAA0B,CAC1C,CAAC;EACL,CAAC;EACD,IAAIC,EAAEA,CAAA,EAAQ;IACV,IAAMrB,KAA2B,GAAG,IAAW;IAC/C,IAAMsB,EAAE,GAAGC,8BAA8B,CAACvB,KAAK,CAAC;IAChD,IAAMwB,UAAU,GAAGF,EAAE,CAACG,oBAAoB,CAAC,CAAC;IAC5C,OAAOD,UAAU,CAACE,cAAc,CAC5B1B,KAAK,CAACD,CAAC,EACPC,KAAK,CAAC2B,SAAS,CAAC,CAAC,CAACC,KAAK,EACvBN,EACJ,CAAC;EACL,CAAC;EACD,IAAIO,SAASA,CAAA,EAAG;IACZ,IAAM7B,KAA2B,GAAG,IAAW;IAC/C,IAAMsB,EAAE,GAAGC,8BAA8B,CAACvB,KAAK,CAAC;IAChD,IAAMwB,UAAU,GAAGF,EAAE,CAACG,oBAAoB,CAAC,CAAC;IAC5C,OAAOD,UAAU,CAACE,cAAc,CAC5B1B,KAAK,CAAC8B,QAAQ,EACd9B,KAAK,CAAC2B,SAAS,CAAC,CAAC,CAACI,OAAO,EACzBT,EACJ,CAAC;EACL,CAAC;EACDK,SAASA,CAAA,EAAmD;IACxD,IAAM1B,KAAK,GAAG,IAAAC,wBAAiB,EAACC,wDAAkC,EAAE,IAAI,CAAChB,MAAM,CAAC;IAChF,IAAM6C,aAAa,GAAG/B,KAAK,CAACU,QAAQ,CAACC,qBAAqB,CAAC,IAAI,CAACd,OAAO,CAAC;IACxE,OAAOG,KAAK,CAACU,QAAQ,CAACO,mBAAmB,CAACc,aAAa,CAAC;EAC5D,CAAC;EACDC,GAAGA,CAAmBC,OAAe,EAAE;IACnCA,OAAO,GAAG,OAAO,GAAGA,OAAO;IAE3B,IAAI,CAAC,IAAI,CAACN,KAAK,EAAE;MACb,OAAOO,SAAS;IACpB;IACA,IAAI,OAAOD,OAAO,KAAK,QAAQ,EAAE;MAC7B,MAAM,IAAAE,uBAAc,EAAC,KAAK,EAAE;QACxBF;MACJ,CAAC,CAAC;IACN;IAEA,IAAIG,QAAQ,GAAG,IAAAC,kBAAW,EAAC,IAAI,CAACV,KAAK,EAAEM,OAAO,CAAC;IAC/CG,QAAQ,GAAGE,0BAAY,CAACC,qBAAqB,CAACH,QAAQ,CAAC;IACvD,OAAOA,QAAQ;EACnB,CAAC;EACDI,IAAIA,CAAmBP,OAAe,EAAE;IACpCA,OAAO,GAAG,OAAO,GAAGA,OAAO;IAE3B,IAAIK,0BAAY,CAACG,SAAS,CAAC,CAAC,EAAE;MAC1B,IAAIR,OAAO,CAACS,QAAQ,CAAC,QAAQ,CAAC,EAAE;QAC5B,MAAM,IAAAhD,mBAAU,EAAC,KAAK,EAAE;UACpBuC;QACJ,CAAC,CAAC;MACN;MACA,IAAIA,OAAO,KAAK,IAAI,CAACrC,WAAW,EAAE;QAC9B,MAAM,IAAAF,mBAAU,EAAC,KAAK,CAAC;MAC3B;IACJ;IACA,OAAO,IAAI,CAACI,CAAC,CACRK,IAAI,CACD,IAAAI,SAAG,EAACoC,aAAa,IAAIA,aAAa,CAAChB,KAAK,CAAC,EACzC,IAAApB,SAAG,EAACqC,IAAI,IAAI,IAAAP,kBAAW,EAACO,IAAI,EAAEX,OAAO,CAAC,CAAC,EACvC,IAAArB,0BAAoB,EAAC,CACzB,CAAC;EACT,CAAC;EACDiC,KAAKA,CAAmBZ,OAAe,EAAE;IACrC,IAAMZ,EAAE,GAAGC,8BAA8B,CAAC,IAAW,CAAC;IACtD,IAAMC,UAAU,GAAGF,EAAE,CAACG,oBAAoB,CAAC,CAAC;IAC5C,OAAOD,UAAU,CAACE,cAAc,CAC5B,IAAI,CAACe,IAAI,CAACP,OAAO,CAAC,EAClB,IAAI,CAACP,SAAS,CAAC,CAAC,CAACM,GAAG,CAACC,OAAO,CAAC,EAC7BZ,EACJ,CAAC;EACL,CAAC;EACD,MAAMyB,iBAAiBA,CAEnBC,gBAAkD,EACpD;IACE,IAAM/C,KAAK,GAAG,MAAM,IAAAgD,8CAAwB,EAAC,IAAI,CAAC9D,MAAM,CAAC;IAEzD,OAAOc,KAAK,CAACiD,qBAAqB,CAACC,QAAQ,CACvC,IAAI,CAACvB,KAAK,EACV,MAAOX,OAAO,IAAK;MACfA,OAAO,CAAC4B,IAAI,GAAG,MAAMG,gBAAgB,CAAC/B,OAAO,CAAC4B,IAAI,EAAE,IAAI,CAAC;MACzD,OAAO5B,OAAO;IAClB,CACJ,CAAC,CAACmC,IAAI,CAACC,MAAM,IAAIpD,KAAK,CAACU,QAAQ,CAACO,mBAAmB,CAACmC,MAAa,CAAC,CAAC;EACvE,CAAC;EACDC,gBAAgBA,CAACC,KAAmB,EAAE;IAClC,OAAO,IAAI,CAACR,iBAAiB,CAAE9B,OAAY,IAAK;MAC5CuC,MAAM,CACDC,OAAO,CAACF,KAAK,CAAC,CACdG,OAAO,CAAC,CAAC,CAACC,CAAC,EAAEC,CAAC,CAAC,KAAK;QACjB3C,OAAO,CAAC0C,CAAC,CAAC,GAAGC,CAAC;MAClB,CAAC,CAAC;MACN,OAAO3C,OAAO;IAClB,CAAC,CAAC;EACN,CAAC;EACD,MAAM4C,SAASA,CAA6BC,OAA4C,EAAE;IACtF,IAAM7D,KAAK,GAAG,MAAM,IAAAgD,8CAAwB,EAAC,IAAI,CAAC9D,MAAM,CAAC;IACzD,IAAM4E,OAA4C,GAAG,IAAI,CAACnC,KAAK;IAC/DkC,OAAO,CAAC7E,EAAE,GAAI,IAAI,CAASA,EAAE;IAC7B,OAAOgB,KAAK,CAAC+D,eAAe,CAACC,SAAS,CAAC,CAAC;MACpCC,QAAQ,EAAEH,OAAO;MACjBnE,QAAQ,EAAEkE;IACd,CAAC,CAAC,EAAE,0BAA0B,CAAC,CAC1BV,IAAI,CAAEe,GAAG,IAAK;MACX,IAAMC,SAAS,GAAGD,GAAG,CAACE,OAAO,CAAC,CAAC,CAAC;MAChC,IAAI,CAACD,SAAS,EAAE;QACZ,MAAMD,GAAG,CAACG,KAAK,CAAC,CAAC,CAAC;MACtB;MACAR,OAAO,GAAG,IAAAS,gBAAS,EAACT,OAAO,CAAC;MAC5BA,OAAO,CAAC9C,IAAI,GAAGoD,SAAS,CAACpD,IAAI;IACjC,CAAC,CAAC;EACV,CAAC;EAED,MAAMwD,MAAMA,CAAA,EAA4D;IACpE,IAAMvE,KAAK,GAAG,MAAM,IAAAgD,8CAAwB,EAAC,IAAI,CAAC9D,MAAM,CAAC;IACzD,IAAMsF,SAAS,GAAG,IAAAF,gBAAS,EAAC,IAAI,CAAC3C,KAAK,CAAC;IACvC6C,SAAS,CAACC,QAAQ,GAAG,IAAI;IACzB,OAAO,IAAAC,4BAAW,EAAC1E,KAAK,CAAC+D,eAAe,EAAE;MACtCE,QAAQ,EAAE,IAAI,CAACtC,KAAK;MACpBhC,QAAQ,EAAE6E;IACd,CAAC,EAAE,uBAAuB,CAAC,CACtBrB,IAAI,CAAEwB,WAAW,IAAK3E,KAAK,CAACU,QAAQ,CAACO,mBAAmB,CAAC0D,WAAW,CAAQ,CAAC;EACtF;AACJ,CAAC;AAID,IAAIC,SAAS,GAAG,KAAK;AACrB,IAAMC,KAAK,GAAGA,CAAA,KAAM;EAChB,IAAID,SAAS,EAAE,OAAO,KACjBA,SAAS,GAAG,IAAI;;EAErB;EACA,IAAME,YAAY,GAAGC,yBAAa;EAClC,IAAMC,KAAK,GAAGzB,MAAM,CAAC0B,mBAAmB,CAACH,YAAY,CAAC;EACtDE,KAAK,CAACvB,OAAO,CAACyB,GAAG,IAAI;IACjB,IAAMC,MAAM,GAAG5B,MAAM,CAAC6B,wBAAwB,CAAC7F,wBAAwB,EAAE2F,GAAG,CAAC;IAC7E,IAAIC,MAAM,EAAE;IACZ,IAAME,IAAS,GAAG9B,MAAM,CAAC6B,wBAAwB,CAACN,YAAY,EAAEI,GAAG,CAAC;IACpE3B,MAAM,CAAC+B,cAAc,CAAC/F,wBAAwB,EAAE2F,GAAG,EAAEG,IAAI,CAAC;EAC9D,CAAC,CAAC;;EAGF;AACJ;AACA;AACA;EACI,IAAME,cAAc,GAAI7B,CAAS,IAAK,MAAM;IACxC,MAAM,IAAAhE,mBAAU,EAAC,KAAK,EAAE;MACpB8F,YAAY,EAAE9B;IAClB,CAAC,CAAC;EACN,CAAC;EACD,CACI,UAAU,EACV,QAAQ,EACR,eAAe,EACf,eAAe,EACf,gBAAgB,CACnB,CAACD,OAAO,CAAEC,CAAS,IAAKnE,wBAAwB,CAACmE,CAAC,CAAC,GAAG6B,cAAc,CAAC7B,CAAC,CAAC,CAAC;AAC7E,CAAC;AAIM,SAAS+B,qBAAqBA,CACjC7C,IAAkD,EAClD1D,MAAW,EACa;EACxB2F,KAAK,CAAC,CAAC;EACP,IAAMa,MAAM,GAAG,IAAI5G,oBAAoB,CAAC8D,IAAI,CAAC5D,EAAE,EAAE4D,IAAI,EAAE1D,MAAM,CAAC;EAC9DqE,MAAM,CAACoC,cAAc,CAACD,MAAM,EAAEnG,wBAAwB,CAAC;EACvDmG,MAAM,CAACE,SAAS,GAAGrG,wBAAwB;EAC3C,OAAOmG,MAAM;AACjB;AAGO,SAASpE,8BAA8BA,CAACuE,GAAgD,EAAE;EAC7F,IAAM3G,MAAM,GAAG2G,GAAG,CAAC3G,MAAM;EACzB,IAAI,IAAA4G,wBAAY,EAAC5G,MAAM,CAAC,EAAE;IACtB,OAAOA,MAAM;EACjB,CAAC,MAAM;IACH,OAAQA,MAAM,CAAkB6G,QAAQ;EAC5C;AACJ","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/migration-schema/index.js b/dist/cjs/plugins/migration-schema/index.js deleted file mode 100644 index d7b1b72e08e..00000000000 --- a/dist/cjs/plugins/migration-schema/index.js +++ /dev/null @@ -1,84 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -var _exportNames = { - DATA_MIGRATOR_BY_COLLECTION: true, - RxDBMigrationPlugin: true, - RxDBMigrationSchemaPlugin: true -}; -exports.RxDBMigrationSchemaPlugin = exports.RxDBMigrationPlugin = exports.DATA_MIGRATOR_BY_COLLECTION = void 0; -var _rxjs = require("rxjs"); -var _index = require("../../plugins/utils/index.js"); -var _rxMigrationState = require("./rx-migration-state.js"); -Object.keys(_rxMigrationState).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; - if (key in exports && exports[key] === _rxMigrationState[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _rxMigrationState[key]; - } - }); -}); -var _migrationHelpers = require("./migration-helpers.js"); -Object.keys(_migrationHelpers).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; - if (key in exports && exports[key] === _migrationHelpers[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _migrationHelpers[key]; - } - }); -}); -var _plugin = require("../../plugin.js"); -var _index2 = require("../local-documents/index.js"); -var _migrationTypes = require("./migration-types.js"); -Object.keys(_migrationTypes).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; - if (key in exports && exports[key] === _migrationTypes[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _migrationTypes[key]; - } - }); -}); -var DATA_MIGRATOR_BY_COLLECTION = exports.DATA_MIGRATOR_BY_COLLECTION = new WeakMap(); -var RxDBMigrationPlugin = exports.RxDBMigrationPlugin = { - name: 'migration-schema', - rxdb: true, - init() { - (0, _plugin.addRxPlugin)(_index2.RxDBLocalDocumentsPlugin); - }, - hooks: { - preDestroyRxDatabase: { - after: _migrationHelpers.onDatabaseDestroy - } - }, - prototypes: { - RxDatabase: proto => { - proto.migrationStates = function () { - return (0, _migrationHelpers.getMigrationStateByDatabase)(this).pipe((0, _rxjs.shareReplay)(_index.RXJS_SHARE_REPLAY_DEFAULTS)); - }; - }, - RxCollection: proto => { - proto.getMigrationState = function () { - return (0, _index.getFromMapOrCreate)(DATA_MIGRATOR_BY_COLLECTION, this, () => new _rxMigrationState.RxMigrationState(this.asRxCollection, this.migrationStrategies)); - }; - proto.migrationNeeded = function () { - if (this.schema.version === 0) { - return _index.PROMISE_RESOLVE_FALSE; - } - return (0, _migrationHelpers.mustMigrate)(this.getMigrationState()); - }; - } - } -}; -var RxDBMigrationSchemaPlugin = exports.RxDBMigrationSchemaPlugin = RxDBMigrationPlugin; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/migration-schema/index.js.map b/dist/cjs/plugins/migration-schema/index.js.map deleted file mode 100644 index 0bdf7c931d5..00000000000 --- a/dist/cjs/plugins/migration-schema/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["_rxjs","require","_index","_rxMigrationState","Object","keys","forEach","key","prototype","hasOwnProperty","call","_exportNames","exports","defineProperty","enumerable","get","_migrationHelpers","_plugin","_index2","_migrationTypes","DATA_MIGRATOR_BY_COLLECTION","WeakMap","RxDBMigrationPlugin","name","rxdb","init","addRxPlugin","RxDBLocalDocumentsPlugin","hooks","preDestroyRxDatabase","after","onDatabaseDestroy","prototypes","RxDatabase","proto","migrationStates","getMigrationStateByDatabase","pipe","shareReplay","RXJS_SHARE_REPLAY_DEFAULTS","RxCollection","getMigrationState","getFromMapOrCreate","RxMigrationState","asRxCollection","migrationStrategies","migrationNeeded","schema","version","PROMISE_RESOLVE_FALSE","mustMigrate","RxDBMigrationSchemaPlugin"],"sources":["../../../../src/plugins/migration-schema/index.ts"],"sourcesContent":["import {\n Observable\n} from 'rxjs';\nimport {\n shareReplay\n} from 'rxjs';\nimport type {\n RxPlugin,\n RxCollection,\n RxDatabase\n} from '../../types/index.ts';\nimport {\n getFromMapOrCreate,\n PROMISE_RESOLVE_FALSE,\n RXJS_SHARE_REPLAY_DEFAULTS\n} from '../../plugins/utils/index.ts';\nimport {\n RxMigrationState\n} from './rx-migration-state.ts';\nimport {\n getMigrationStateByDatabase,\n mustMigrate,\n onDatabaseDestroy\n} from './migration-helpers.ts';\nimport { addRxPlugin } from '../../plugin.ts';\nimport { RxDBLocalDocumentsPlugin } from '../local-documents/index.ts';\n\nexport const DATA_MIGRATOR_BY_COLLECTION: WeakMap = new WeakMap();\n\nexport const RxDBMigrationPlugin: RxPlugin = {\n name: 'migration-schema',\n rxdb: true,\n init() {\n addRxPlugin(RxDBLocalDocumentsPlugin);\n },\n hooks: {\n preDestroyRxDatabase: {\n after: onDatabaseDestroy\n }\n },\n prototypes: {\n RxDatabase: (proto: any) => {\n proto.migrationStates = function (this: RxDatabase): Observable {\n return getMigrationStateByDatabase(this).pipe(\n shareReplay(RXJS_SHARE_REPLAY_DEFAULTS)\n );\n };\n },\n RxCollection: (proto: any) => {\n proto.getMigrationState = function (this: RxCollection): RxMigrationState {\n return getFromMapOrCreate(\n DATA_MIGRATOR_BY_COLLECTION,\n this,\n () => new RxMigrationState(\n this.asRxCollection,\n this.migrationStrategies\n )\n );\n };\n proto.migrationNeeded = function (this: RxCollection) {\n if (this.schema.version === 0) {\n return PROMISE_RESOLVE_FALSE;\n }\n return mustMigrate(this.getMigrationState());\n };\n }\n }\n};\n\nexport const RxDBMigrationSchemaPlugin = RxDBMigrationPlugin;\n\n\nexport * from './rx-migration-state.ts';\nexport * from './migration-helpers.ts';\nexport * from './migration-types.ts';\n"],"mappings":";;;;;;;;;;;AAGA,IAAAA,KAAA,GAAAC,OAAA;AAQA,IAAAC,MAAA,GAAAD,OAAA;AAKA,IAAAE,iBAAA,GAAAF,OAAA;AAwDAG,MAAA,CAAAC,IAAA,CAAAF,iBAAA,EAAAG,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAJ,iBAAA,CAAAI,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAZ,iBAAA,CAAAI,GAAA;IAAA;EAAA;AAAA;AArDA,IAAAS,iBAAA,GAAAf,OAAA;AAsDAG,MAAA,CAAAC,IAAA,CAAAW,iBAAA,EAAAV,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAS,iBAAA,CAAAT,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAC,iBAAA,CAAAT,GAAA;IAAA;EAAA;AAAA;AAjDA,IAAAU,OAAA,GAAAhB,OAAA;AACA,IAAAiB,OAAA,GAAAjB,OAAA;AAiDA,IAAAkB,eAAA,GAAAlB,OAAA;AAAAG,MAAA,CAAAC,IAAA,CAAAc,eAAA,EAAAb,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAY,eAAA,CAAAZ,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAI,eAAA,CAAAZ,GAAA;IAAA;EAAA;AAAA;AA/CO,IAAMa,2BAAoE,GAAAR,OAAA,CAAAQ,2BAAA,GAAG,IAAIC,OAAO,CAAC,CAAC;AAE1F,IAAMC,mBAA6B,GAAAV,OAAA,CAAAU,mBAAA,GAAG;EACzCC,IAAI,EAAE,kBAAkB;EACxBC,IAAI,EAAE,IAAI;EACVC,IAAIA,CAAA,EAAG;IACH,IAAAC,mBAAW,EAACC,gCAAwB,CAAC;EACzC,CAAC;EACDC,KAAK,EAAE;IACHC,oBAAoB,EAAE;MAClBC,KAAK,EAAEC;IACX;EACJ,CAAC;EACDC,UAAU,EAAE;IACRC,UAAU,EAAGC,KAAU,IAAK;MACxBA,KAAK,CAACC,eAAe,GAAG,YAA4D;QAChF,OAAO,IAAAC,6CAA2B,EAAC,IAAI,CAAC,CAACC,IAAI,CACzC,IAAAC,iBAAW,EAACC,iCAA0B,CAC1C,CAAC;MACL,CAAC;IACL,CAAC;IACDC,YAAY,EAAGN,KAAU,IAAK;MAC1BA,KAAK,CAACO,iBAAiB,GAAG,YAAgD;QACtE,OAAO,IAAAC,yBAAkB,EACrBtB,2BAA2B,EAC3B,IAAI,EACJ,MAAM,IAAIuB,kCAAgB,CACtB,IAAI,CAACC,cAAc,EACnB,IAAI,CAACC,mBACT,CACJ,CAAC;MACL,CAAC;MACDX,KAAK,CAACY,eAAe,GAAG,YAA8B;QAClD,IAAI,IAAI,CAACC,MAAM,CAACC,OAAO,KAAK,CAAC,EAAE;UAC3B,OAAOC,4BAAqB;QAChC;QACA,OAAO,IAAAC,6BAAW,EAAC,IAAI,CAACT,iBAAiB,CAAC,CAAC,CAAC;MAChD,CAAC;IACL;EACJ;AACJ,CAAC;AAEM,IAAMU,yBAAyB,GAAAvC,OAAA,CAAAuC,yBAAA,GAAG7B,mBAAmB","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/migration-schema/migration-helpers.js b/dist/cjs/plugins/migration-schema/migration-helpers.js deleted file mode 100644 index 118dcfd2071..00000000000 --- a/dist/cjs/plugins/migration-schema/migration-helpers.js +++ /dev/null @@ -1,106 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.MIGRATION_DEFAULT_BATCH_SIZE = exports.DATA_MIGRATION_STATE_SUBJECT_BY_DATABASE = void 0; -exports.addMigrationStateToDatabase = addMigrationStateToDatabase; -exports.getMigrationStateByDatabase = getMigrationStateByDatabase; -exports.getOldCollectionMeta = getOldCollectionMeta; -exports.migrateDocumentData = migrateDocumentData; -exports.mustMigrate = mustMigrate; -exports.onDatabaseDestroy = onDatabaseDestroy; -exports.runStrategyIfNotNull = runStrategyIfNotNull; -var _rxjs = require("rxjs"); -var _rxDatabaseInternalStore = require("../../rx-database-internal-store.js"); -var _rxSchema = require("../../rx-schema.js"); -var _index = require("../utils/index.js"); -async function getOldCollectionMeta(migrationState) { - var collectionDocKeys = (0, _rxSchema.getPreviousVersions)(migrationState.collection.schema.jsonSchema).map(version => migrationState.collection.name + '-' + version); - var found = await migrationState.database.internalStore.findDocumentsById(collectionDocKeys.map(key => (0, _rxDatabaseInternalStore.getPrimaryKeyOfInternalDocument)(key, _rxDatabaseInternalStore.INTERNAL_CONTEXT_COLLECTION)), false); - if (found.length > 1) { - throw new Error('more than one old collection meta found'); - } - return found[0]; -} - -/** - * runs the doc-data through all following migrationStrategies - * so it will match the newest schema. - * @throws Error if final doc does not match final schema or migrationStrategy crashes - * @return final object or null if migrationStrategy deleted it - */ -function migrateDocumentData(collection, docSchemaVersion, docData) { - /** - * We cannot deep-clone Blob or Buffer - * so we just flat clone it here - * and attach it to the deep cloned document data. - */ - var attachmentsBefore = (0, _index.flatClone)(docData._attachments); - var mutateableDocData = (0, _index.clone)(docData); - var meta = mutateableDocData._meta; - delete mutateableDocData._meta; - mutateableDocData._attachments = attachmentsBefore; - var nextVersion = docSchemaVersion + 1; - - // run the document through migrationStrategies - var currentPromise = Promise.resolve(mutateableDocData); - var _loop = function () { - var version = nextVersion; - currentPromise = currentPromise.then(docOrNull => runStrategyIfNotNull(collection, version, docOrNull)); - nextVersion++; - }; - while (nextVersion <= collection.schema.version) { - _loop(); - } - return currentPromise.then(doc => { - if (doc === null) { - return _index.PROMISE_RESOLVE_NULL; - } - doc._meta = meta; - return doc; - }); -} -function runStrategyIfNotNull(collection, version, docOrNull) { - if (docOrNull === null) { - return _index.PROMISE_RESOLVE_NULL; - } else { - var ret = collection.migrationStrategies[version](docOrNull, collection); - var retPromise = (0, _index.toPromise)(ret); - return retPromise; - } -} - -/** - * returns true if a migration is needed - */ -async function mustMigrate(migrationState) { - if (migrationState.collection.schema.version === 0) { - return _index.PROMISE_RESOLVE_FALSE; - } - var oldColDoc = await getOldCollectionMeta(migrationState); - return !!oldColDoc; -} -var MIGRATION_DEFAULT_BATCH_SIZE = exports.MIGRATION_DEFAULT_BATCH_SIZE = 200; -var DATA_MIGRATION_STATE_SUBJECT_BY_DATABASE = exports.DATA_MIGRATION_STATE_SUBJECT_BY_DATABASE = new WeakMap(); -function addMigrationStateToDatabase(migrationState) { - var allSubject = getMigrationStateByDatabase(migrationState.database); - var allList = allSubject.getValue().slice(0); - allList.push(migrationState); - allSubject.next(allList); -} -function getMigrationStateByDatabase(database) { - return (0, _index.getFromMapOrCreate)(DATA_MIGRATION_STATE_SUBJECT_BY_DATABASE, database, () => new _rxjs.BehaviorSubject([])); -} - -/** - * Complete on database destroy - * so people do not have to unsubscribe - */ -function onDatabaseDestroy(database) { - var subject = DATA_MIGRATION_STATE_SUBJECT_BY_DATABASE.get(database); - if (subject) { - subject.complete(); - } -} -//# sourceMappingURL=migration-helpers.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/migration-schema/migration-helpers.js.map b/dist/cjs/plugins/migration-schema/migration-helpers.js.map deleted file mode 100644 index f7c527ac125..00000000000 --- a/dist/cjs/plugins/migration-schema/migration-helpers.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"migration-helpers.js","names":["_rxjs","require","_rxDatabaseInternalStore","_rxSchema","_index","getOldCollectionMeta","migrationState","collectionDocKeys","getPreviousVersions","collection","schema","jsonSchema","map","version","name","found","database","internalStore","findDocumentsById","key","getPrimaryKeyOfInternalDocument","INTERNAL_CONTEXT_COLLECTION","length","Error","migrateDocumentData","docSchemaVersion","docData","attachmentsBefore","flatClone","_attachments","mutateableDocData","clone","meta","_meta","nextVersion","currentPromise","Promise","resolve","_loop","then","docOrNull","runStrategyIfNotNull","doc","PROMISE_RESOLVE_NULL","ret","migrationStrategies","retPromise","toPromise","mustMigrate","PROMISE_RESOLVE_FALSE","oldColDoc","MIGRATION_DEFAULT_BATCH_SIZE","exports","DATA_MIGRATION_STATE_SUBJECT_BY_DATABASE","WeakMap","addMigrationStateToDatabase","allSubject","getMigrationStateByDatabase","allList","getValue","slice","push","next","getFromMapOrCreate","BehaviorSubject","onDatabaseDestroy","subject","get","complete"],"sources":["../../../../src/plugins/migration-schema/migration-helpers.ts"],"sourcesContent":["import { BehaviorSubject } from 'rxjs';\nimport {\n INTERNAL_CONTEXT_COLLECTION,\n getPrimaryKeyOfInternalDocument\n} from '../../rx-database-internal-store.ts';\nimport { getPreviousVersions } from '../../rx-schema.ts';\nimport type {\n InternalStoreCollectionDocType,\n RxCollection,\n RxDatabase,\n RxDocumentData\n} from '../../types/index.d.ts';\nimport {\n PROMISE_RESOLVE_FALSE,\n PROMISE_RESOLVE_NULL,\n clone,\n flatClone,\n getFromMapOrCreate,\n toPromise\n} from '../utils/index.ts';\nimport { RxMigrationState } from './rx-migration-state.ts';\n\nexport async function getOldCollectionMeta(\n migrationState: RxMigrationState\n): Promise> {\n\n const collectionDocKeys = getPreviousVersions(migrationState.collection.schema.jsonSchema)\n .map(version => migrationState.collection.name + '-' + version);\n\n const found = await migrationState.database.internalStore.findDocumentsById(\n collectionDocKeys.map(key => getPrimaryKeyOfInternalDocument(\n key,\n INTERNAL_CONTEXT_COLLECTION\n )),\n false\n );\n if (found.length > 1) {\n throw new Error('more than one old collection meta found');\n }\n return found[0];\n}\n\n\n/**\n * runs the doc-data through all following migrationStrategies\n * so it will match the newest schema.\n * @throws Error if final doc does not match final schema or migrationStrategy crashes\n * @return final object or null if migrationStrategy deleted it\n */\nexport function migrateDocumentData(\n collection: RxCollection,\n docSchemaVersion: number,\n docData: any\n): Promise {\n /**\n * We cannot deep-clone Blob or Buffer\n * so we just flat clone it here\n * and attach it to the deep cloned document data.\n */\n const attachmentsBefore = flatClone(docData._attachments);\n const mutateableDocData = clone(docData);\n const meta = mutateableDocData._meta;\n delete mutateableDocData._meta;\n mutateableDocData._attachments = attachmentsBefore;\n\n let nextVersion = docSchemaVersion + 1;\n\n // run the document through migrationStrategies\n let currentPromise = Promise.resolve(mutateableDocData);\n while (nextVersion <= collection.schema.version) {\n const version = nextVersion;\n currentPromise = currentPromise.then(docOrNull => runStrategyIfNotNull(\n collection,\n version,\n docOrNull\n ));\n nextVersion++;\n }\n\n return currentPromise.then(doc => {\n if (doc === null) {\n return PROMISE_RESOLVE_NULL;\n }\n doc._meta = meta;\n return doc;\n });\n}\n\nexport function runStrategyIfNotNull(\n collection: RxCollection,\n version: number,\n docOrNull: any | null\n): Promise {\n if (docOrNull === null) {\n return PROMISE_RESOLVE_NULL;\n } else {\n const ret = collection.migrationStrategies[version](docOrNull, collection);\n const retPromise = toPromise(ret);\n return retPromise;\n }\n}\n\n/**\n * returns true if a migration is needed\n */\nexport async function mustMigrate(\n migrationState: RxMigrationState\n): Promise {\n if (migrationState.collection.schema.version === 0) {\n return PROMISE_RESOLVE_FALSE;\n }\n const oldColDoc = await getOldCollectionMeta(migrationState);\n return !!oldColDoc;\n}\nexport const MIGRATION_DEFAULT_BATCH_SIZE = 200;\n\n\nexport type MigrationStateWithCollection = {\n collection: RxCollection;\n migrationState: RxMigrationState;\n};\n\nexport const DATA_MIGRATION_STATE_SUBJECT_BY_DATABASE = new WeakMap>();\nexport function addMigrationStateToDatabase(\n migrationState: RxMigrationState\n) {\n const allSubject = getMigrationStateByDatabase(migrationState.database);\n const allList = allSubject.getValue().slice(0);\n allList.push(migrationState);\n allSubject.next(allList);\n}\nexport function getMigrationStateByDatabase(database: RxDatabase): BehaviorSubject {\n return getFromMapOrCreate(\n DATA_MIGRATION_STATE_SUBJECT_BY_DATABASE,\n database,\n () => new BehaviorSubject([])\n );\n}\n\n/**\n * Complete on database destroy\n * so people do not have to unsubscribe\n */\nexport function onDatabaseDestroy(database: RxDatabase) {\n const subject = DATA_MIGRATION_STATE_SUBJECT_BY_DATABASE.get(database);\n if (subject) {\n subject.complete();\n }\n}\n"],"mappings":";;;;;;;;;;;;;AAAA,IAAAA,KAAA,GAAAC,OAAA;AACA,IAAAC,wBAAA,GAAAD,OAAA;AAIA,IAAAE,SAAA,GAAAF,OAAA;AAOA,IAAAG,MAAA,GAAAH,OAAA;AAUO,eAAeI,oBAAoBA,CACtCC,cAAgC,EACuB;EAEvD,IAAMC,iBAAiB,GAAG,IAAAC,6BAAmB,EAACF,cAAc,CAACG,UAAU,CAACC,MAAM,CAACC,UAAU,CAAC,CACrFC,GAAG,CAACC,OAAO,IAAIP,cAAc,CAACG,UAAU,CAACK,IAAI,GAAG,GAAG,GAAGD,OAAO,CAAC;EAEnE,IAAME,KAAK,GAAG,MAAMT,cAAc,CAACU,QAAQ,CAACC,aAAa,CAACC,iBAAiB,CACvEX,iBAAiB,CAACK,GAAG,CAACO,GAAG,IAAI,IAAAC,wDAA+B,EACxDD,GAAG,EACHE,oDACJ,CAAC,CAAC,EACF,KACJ,CAAC;EACD,IAAIN,KAAK,CAACO,MAAM,GAAG,CAAC,EAAE;IAClB,MAAM,IAAIC,KAAK,CAAC,yCAAyC,CAAC;EAC9D;EACA,OAAOR,KAAK,CAAC,CAAC,CAAC;AACnB;;AAGA;AACA;AACA;AACA;AACA;AACA;AACO,SAASS,mBAAmBA,CAC/Bf,UAAwB,EACxBgB,gBAAwB,EACxBC,OAAY,EACO;EACnB;AACJ;AACA;AACA;AACA;EACI,IAAMC,iBAAiB,GAAG,IAAAC,gBAAS,EAACF,OAAO,CAACG,YAAY,CAAC;EACzD,IAAMC,iBAAiB,GAAG,IAAAC,YAAK,EAACL,OAAO,CAAC;EACxC,IAAMM,IAAI,GAAGF,iBAAiB,CAACG,KAAK;EACpC,OAAOH,iBAAiB,CAACG,KAAK;EAC9BH,iBAAiB,CAACD,YAAY,GAAGF,iBAAiB;EAElD,IAAIO,WAAW,GAAGT,gBAAgB,GAAG,CAAC;;EAEtC;EACA,IAAIU,cAAc,GAAGC,OAAO,CAACC,OAAO,CAACP,iBAAiB,CAAC;EAAC,IAAAQ,KAAA,YAAAA,CAAA,EACP;IAC7C,IAAMzB,OAAO,GAAGqB,WAAW;IAC3BC,cAAc,GAAGA,cAAc,CAACI,IAAI,CAACC,SAAS,IAAIC,oBAAoB,CAClEhC,UAAU,EACVI,OAAO,EACP2B,SACJ,CAAC,CAAC;IACFN,WAAW,EAAE;EACjB,CAAC;EARD,OAAOA,WAAW,IAAIzB,UAAU,CAACC,MAAM,CAACG,OAAO;IAAAyB,KAAA;EAAA;EAU/C,OAAOH,cAAc,CAACI,IAAI,CAACG,GAAG,IAAI;IAC9B,IAAIA,GAAG,KAAK,IAAI,EAAE;MACd,OAAOC,2BAAoB;IAC/B;IACAD,GAAG,CAACT,KAAK,GAAGD,IAAI;IAChB,OAAOU,GAAG;EACd,CAAC,CAAC;AACN;AAEO,SAASD,oBAAoBA,CAChChC,UAAwB,EACxBI,OAAe,EACf2B,SAAqB,EACF;EACnB,IAAIA,SAAS,KAAK,IAAI,EAAE;IACpB,OAAOG,2BAAoB;EAC/B,CAAC,MAAM;IACH,IAAMC,GAAG,GAAGnC,UAAU,CAACoC,mBAAmB,CAAChC,OAAO,CAAC,CAAC2B,SAAS,EAAE/B,UAAU,CAAC;IAC1E,IAAMqC,UAAU,GAAG,IAAAC,gBAAS,EAACH,GAAG,CAAC;IACjC,OAAOE,UAAU;EACrB;AACJ;;AAEA;AACA;AACA;AACO,eAAeE,WAAWA,CAC7B1C,cAAgC,EAChB;EAChB,IAAIA,cAAc,CAACG,UAAU,CAACC,MAAM,CAACG,OAAO,KAAK,CAAC,EAAE;IAChD,OAAOoC,4BAAqB;EAChC;EACA,IAAMC,SAAS,GAAG,MAAM7C,oBAAoB,CAACC,cAAc,CAAC;EAC5D,OAAO,CAAC,CAAC4C,SAAS;AACtB;AACO,IAAMC,4BAA4B,GAAAC,OAAA,CAAAD,4BAAA,GAAG,GAAG;AAQxC,IAAME,wCAAwC,GAAAD,OAAA,CAAAC,wCAAA,GAAG,IAAIC,OAAO,CAAkD,CAAC;AAC/G,SAASC,2BAA2BA,CACvCjD,cAAgC,EAClC;EACE,IAAMkD,UAAU,GAAGC,2BAA2B,CAACnD,cAAc,CAACU,QAAQ,CAAC;EACvE,IAAM0C,OAAO,GAAGF,UAAU,CAACG,QAAQ,CAAC,CAAC,CAACC,KAAK,CAAC,CAAC,CAAC;EAC9CF,OAAO,CAACG,IAAI,CAACvD,cAAc,CAAC;EAC5BkD,UAAU,CAACM,IAAI,CAACJ,OAAO,CAAC;AAC5B;AACO,SAASD,2BAA2BA,CAACzC,QAAoB,EAAuC;EACnG,OAAO,IAAA+C,yBAAkB,EACrBV,wCAAwC,EACxCrC,QAAQ,EACR,MAAM,IAAIgD,qBAAe,CAAqB,EAAE,CACpD,CAAC;AACL;;AAEA;AACA;AACA;AACA;AACO,SAASC,iBAAiBA,CAACjD,QAAoB,EAAE;EACpD,IAAMkD,OAAO,GAAGb,wCAAwC,CAACc,GAAG,CAACnD,QAAQ,CAAC;EACtE,IAAIkD,OAAO,EAAE;IACTA,OAAO,CAACE,QAAQ,CAAC,CAAC;EACtB;AACJ","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/migration-schema/migration-types.js b/dist/cjs/plugins/migration-schema/migration-types.js deleted file mode 100644 index 9c3e0685361..00000000000 --- a/dist/cjs/plugins/migration-schema/migration-types.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -//# sourceMappingURL=migration-types.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/migration-schema/migration-types.js.map b/dist/cjs/plugins/migration-schema/migration-types.js.map deleted file mode 100644 index 9fd87e6a5e9..00000000000 --- a/dist/cjs/plugins/migration-schema/migration-types.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"migration-types.js","names":[],"sources":["../../../../src/plugins/migration-schema/migration-types.ts"],"sourcesContent":["import type {\n InternalStoreDocType,\n PlainJsonError\n} from '../../types/index.d.ts';\n\nexport type RxMigrationStatus = {\n collectionName: string;\n status: 'RUNNING' | 'DONE' | 'ERROR';\n error?: PlainJsonError;\n\n /**\n * Counters so that you can display\n * the migration state to your user in the UI\n * and show a loading bar.\n */\n count: {\n /**\n * Total amount of documents that\n * have to be migrated\n */\n total: number;\n /**\n * Amount of documents that have been migrated already\n * = success + purged\n */\n handled: number;\n /**\n * Total percentage [0-100]\n */\n percent: number;\n };\n};\n\n\n/**\n * To be shared between browser tabs,\n * the migration status is written into a document in the internal storage of the database.\n */\nexport type RxMigrationStatusDocument = InternalStoreDocType;\n\n\nexport type MigrationStatusUpdate = (before: RxMigrationStatus) => RxMigrationStatus;\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/migration-schema/rx-migration-state.js b/dist/cjs/plugins/migration-schema/rx-migration-state.js deleted file mode 100644 index 5697c26424b..00000000000 --- a/dist/cjs/plugins/migration-schema/rx-migration-state.js +++ /dev/null @@ -1,353 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.RxMigrationState = void 0; -var _rxjs = require("rxjs"); -var _rxError = require("../../rx-error.js"); -var _migrationHelpers = require("./migration-helpers.js"); -var _index = require("../utils/index.js"); -var _rxStorageHelper = require("../../rx-storage-helper.js"); -var _broadcastChannel = require("broadcast-channel"); -var _index2 = require("../../replication-protocol/index.js"); -var _overwritable = require("../../overwritable.js"); -var _rxDatabaseInternalStore = require("../../rx-database-internal-store.js"); -var _rxQuery = require("../../rx-query.js"); -var _rxQueryHelper = require("../../rx-query-helper.js"); -var RxMigrationState = exports.RxMigrationState = /*#__PURE__*/function () { - function RxMigrationState(collection, migrationStrategies, statusDocKey = [collection.name, 'v', collection.schema.version].join('-')) { - this.started = false; - this.updateStatusHandlers = []; - this.updateStatusQueue = _index.PROMISE_RESOLVE_TRUE; - this.collection = collection; - this.migrationStrategies = migrationStrategies; - this.statusDocKey = statusDocKey; - this.database = collection.database; - this.oldCollectionMeta = (0, _migrationHelpers.getOldCollectionMeta)(this); - this.mustMigrate = (0, _migrationHelpers.mustMigrate)(this); - this.statusDocId = (0, _rxDatabaseInternalStore.getPrimaryKeyOfInternalDocument)(this.statusDocKey, _rxDatabaseInternalStore.INTERNAL_CONTEXT_MIGRATION_STATUS); - (0, _migrationHelpers.addMigrationStateToDatabase)(this); - this.$ = (0, _rxStorageHelper.observeSingle)(this.database.internalStore, this.statusDocId).pipe((0, _rxjs.filter)(d => !!d), (0, _rxjs.map)(d => (0, _index.ensureNotFalsy)(d).data), (0, _rxjs.shareReplay)(_index.RXJS_SHARE_REPLAY_DEFAULTS)); - } - var _proto = RxMigrationState.prototype; - _proto.getStatus = function getStatus() { - return (0, _rxjs.firstValueFrom)(this.$); - } - - /** - * Starts the migration. - * Returns void so that people to not get the idea to await - * this function. - * Instead use migratePromise() if you want to await - * the migration. This ensures it works even if the migration - * is run on a different browser tab. - */; - _proto.startMigration = async function startMigration(batchSize = _migrationHelpers.MIGRATION_DEFAULT_BATCH_SIZE) { - var must = await this.mustMigrate; - if (!must) { - return; - } - if (this.started) { - throw (0, _rxError.newRxError)('DM1'); - } - this.started = true; - var broadcastChannel = undefined; - /** - * To ensure that multiple tabs do not migrate the same collection, - * we use a new broadcastChannel/leaderElector for each collection. - * This is required because collections can be added dynamically and - * not all tabs might know about this collection. - */ - if (this.database.multiInstance) { - broadcastChannel = new _broadcastChannel.BroadcastChannel(['rx-migration-state', this.database.name, this.collection.name, this.collection.schema.version].join('|')); - var leaderElector = (0, _broadcastChannel.createLeaderElection)(broadcastChannel); - await leaderElector.awaitLeadership(); - } - - /** - * Instead of writing a custom migration protocol, - * we do a push-only replication from the old collection data to the new one. - * This also ensure that restarting the replication works without problems. - */ - var oldCollectionMeta = await this.oldCollectionMeta; - var oldStorageInstance = await this.database.storage.createStorageInstance({ - databaseName: this.database.name, - collectionName: this.collection.name, - databaseInstanceToken: this.database.token, - multiInstance: this.database.multiInstance, - options: {}, - schema: oldCollectionMeta.data.schema, - password: this.database.password, - devMode: _overwritable.overwritable.isDevMode() - }); - var connectedInstances = await this.getConnectedStorageInstances(); - - /** - * Initially write the migration status into a meta document. - */ - var totalCount = await this.countAllDoucments([oldStorageInstance].concat(connectedInstances.map(r => r.oldStorage))); - await this.updateStatus(s => { - s.count.total = totalCount; - return s; - }); - try { - /** - * First migrate the connected storages, - * afterwards migrate the normal collection. - */ - await Promise.all(connectedInstances.map(async connectedInstance => { - await (0, _rxDatabaseInternalStore.addConnectedStorageToCollection)(this.collection, connectedInstance.newStorage.collectionName, connectedInstance.newStorage.schema); - await this.migrateStorage(connectedInstance.oldStorage, connectedInstance.newStorage, batchSize); - await connectedInstance.newStorage.close(); - })); - await this.migrateStorage(oldStorageInstance, - /** - * Use the originalStorageInstance here - * so that the _meta.lwt time keeps the same - * and our replication checkpoints still point to the - * correct checkpoint. - */ - this.collection.storageInstance.originalStorageInstance, batchSize); - } catch (err) { - await oldStorageInstance.close(); - await this.updateStatus(s => { - s.status = 'ERROR'; - s.error = (0, _index.errorToPlainJson)(err); - return s; - }); - return; - } - - // remove old collection meta doc - await (0, _rxStorageHelper.writeSingle)(this.database.internalStore, { - previous: oldCollectionMeta, - document: Object.assign({}, oldCollectionMeta, { - _deleted: true - }) - }, 'rx-migration-remove-collection-meta'); - await this.updateStatus(s => { - s.status = 'DONE'; - return s; - }); - if (broadcastChannel) { - await broadcastChannel.close(); - } - }; - _proto.updateStatus = function updateStatus(handler) { - this.updateStatusHandlers.push(handler); - this.updateStatusQueue = this.updateStatusQueue.then(async () => { - if (this.updateStatusHandlers.length === 0) { - return; - } - // re-run until no conflict - var useHandlers = this.updateStatusHandlers; - this.updateStatusHandlers = []; - while (true) { - var previous = await (0, _rxStorageHelper.getSingleDocument)(this.database.internalStore, this.statusDocId); - var newDoc = (0, _index.clone)(previous); - if (!previous) { - newDoc = { - id: this.statusDocId, - key: this.statusDocKey, - context: _rxDatabaseInternalStore.INTERNAL_CONTEXT_MIGRATION_STATUS, - data: { - collectionName: this.collection.name, - status: 'RUNNING', - count: { - total: 0, - handled: 0, - percent: 0 - } - }, - _deleted: false, - _meta: (0, _index.getDefaultRxDocumentMeta)(), - _rev: (0, _index.getDefaultRevision)(), - _attachments: {} - }; - } - var status = (0, _index.ensureNotFalsy)(newDoc).data; - for (var oneHandler of useHandlers) { - status = oneHandler(status); - } - status.count.percent = Math.round(status.count.handled / status.count.total * 100); - if (newDoc && previous && (0, _index.deepEqual)(newDoc.data, previous.data)) { - break; - } - try { - await (0, _rxStorageHelper.writeSingle)(this.database.internalStore, { - previous, - document: (0, _index.ensureNotFalsy)(newDoc) - }, _rxDatabaseInternalStore.INTERNAL_CONTEXT_MIGRATION_STATUS); - - // write successful - break; - } catch (err) { - // ignore conflicts - if (!(0, _rxError.isBulkWriteConflictError)(err)) { - throw err; - } - } - } - }); - return this.updateStatusQueue; - }; - _proto.migrateStorage = async function migrateStorage(oldStorage, newStorage, batchSize) { - var replicationMetaStorageInstance = await this.database.storage.createStorageInstance({ - databaseName: this.database.name, - collectionName: 'rx-migration-state-meta-' + this.collection.name + '-' + this.collection.schema.version, - databaseInstanceToken: this.database.token, - multiInstance: this.database.multiInstance, - options: {}, - schema: (0, _index2.getRxReplicationMetaInstanceSchema)(oldStorage.schema, (0, _rxStorageHelper.hasEncryption)(oldStorage.schema)), - password: this.database.password, - devMode: _overwritable.overwritable.isDevMode() - }); - var replicationHandlerBase = (0, _index2.rxStorageInstanceToReplicationHandler)(newStorage, - /** - * Ignore push-conflicts. - * If this happens we drop the 'old' document state. - */ - _index2.defaultConflictHandler, this.database.token, true); - var replicationState = (0, _index2.replicateRxStorageInstance)({ - keepMeta: true, - identifier: ['rx-migration-state', this.collection.name, oldStorage.schema.version, this.collection.schema.version].join('-'), - replicationHandler: { - masterChangesSince() { - return Promise.resolve({ - checkpoint: null, - documents: [] - }); - }, - masterWrite: async rows => { - rows = await Promise.all(rows.map(async row => { - var newDocData = row.newDocumentState; - if (newStorage.schema.title === _index2.META_INSTANCE_SCHEMA_TITLE) { - newDocData = row.newDocumentState.docData; - if (row.newDocumentState.isCheckpoint === '1') { - return { - assumedMasterState: undefined, - newDocumentState: row.newDocumentState - }; - } - } - var migratedDocData = await (0, _migrationHelpers.migrateDocumentData)(this.collection, oldStorage.schema.version, newDocData); - var newRow = { - // drop the assumed master state, we do not have to care about conflicts here. - assumedMasterState: undefined, - newDocumentState: newStorage.schema.title === _index2.META_INSTANCE_SCHEMA_TITLE ? Object.assign({}, row.newDocumentState, { - docData: migratedDocData - }) : migratedDocData - }; - return newRow; - })); - - // filter out the documents where the migration strategy returned null - rows = rows.filter(row => !!row.newDocumentState); - var result = await replicationHandlerBase.masterWrite(rows); - return result; - }, - masterChangeStream$: new _rxjs.Subject().asObservable() - }, - forkInstance: oldStorage, - metaInstance: replicationMetaStorageInstance, - pushBatchSize: batchSize, - pullBatchSize: 0, - conflictHandler: _index2.defaultConflictHandler, - hashFunction: this.database.hashFunction - }); - var hasError = false; - replicationState.events.error.subscribe(err => hasError = err); - - // update replication status on each change - replicationState.events.processed.up.subscribe(() => { - this.updateStatus(status => { - status.count.handled = status.count.handled + 1; - return status; - }); - }); - await (0, _index2.awaitRxStorageReplicationFirstInSync)(replicationState); - await (0, _index2.cancelRxStorageReplication)(replicationState); - await this.updateStatusQueue; - if (hasError) { - await replicationMetaStorageInstance.close(); - throw hasError; - } - - // cleanup old storages - await Promise.all([oldStorage.remove(), replicationMetaStorageInstance.remove()]); - }; - _proto.countAllDoucments = async function countAllDoucments(storageInstances) { - var ret = 0; - await Promise.all(storageInstances.map(async instance => { - var preparedQuery = (0, _rxQuery.prepareQuery)(instance.schema, (0, _rxQueryHelper.normalizeMangoQuery)(instance.schema, { - selector: {} - })); - var countResult = await instance.count(preparedQuery); - ret += countResult.count; - })); - return ret; - }; - _proto.getConnectedStorageInstances = async function getConnectedStorageInstances() { - var oldCollectionMeta = await this.oldCollectionMeta; - var ret = []; - await Promise.all(await Promise.all(oldCollectionMeta.data.connectedStorages.map(async connectedStorage => { - // atm we can only migrate replication states. - if (connectedStorage.schema.title !== _index2.META_INSTANCE_SCHEMA_TITLE) { - throw new Error('unknown migration handling for schema'); - } - var newSchema = (0, _index2.getRxReplicationMetaInstanceSchema)((0, _index.clone)(this.collection.schema.jsonSchema), (0, _rxStorageHelper.hasEncryption)(connectedStorage.schema)); - newSchema.version = this.collection.schema.version; - var [oldStorage, newStorage] = await Promise.all([this.database.storage.createStorageInstance({ - databaseInstanceToken: this.database.token, - databaseName: this.database.name, - devMode: _overwritable.overwritable.isDevMode(), - multiInstance: this.database.multiInstance, - options: {}, - schema: connectedStorage.schema, - password: this.database.password, - collectionName: connectedStorage.collectionName - }), this.database.storage.createStorageInstance({ - databaseInstanceToken: this.database.token, - databaseName: this.database.name, - devMode: _overwritable.overwritable.isDevMode(), - multiInstance: this.database.multiInstance, - options: {}, - schema: newSchema, - password: this.database.password, - collectionName: connectedStorage.collectionName - })]); - ret.push({ - oldStorage, - newStorage - }); - }))); - return ret; - }; - _proto.migratePromise = async function migratePromise(batchSize) { - this.startMigration(batchSize); - var must = await this.mustMigrate; - if (!must) { - return { - status: 'DONE', - collectionName: this.collection.name, - count: { - handled: 0, - percent: 0, - total: 0 - } - }; - } - var result = await Promise.race([(0, _rxjs.firstValueFrom)(this.$.pipe((0, _rxjs.filter)(d => d.status === 'DONE'))), (0, _rxjs.firstValueFrom)(this.$.pipe((0, _rxjs.filter)(d => d.status === 'ERROR')))]); - if (result.status === 'ERROR') { - throw (0, _rxError.newRxError)('DM4', { - collection: this.collection.name, - error: result.error - }); - } else { - return result; - } - }; - return RxMigrationState; -}(); -//# sourceMappingURL=rx-migration-state.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/migration-schema/rx-migration-state.js.map b/dist/cjs/plugins/migration-schema/rx-migration-state.js.map deleted file mode 100644 index d83b926c0d4..00000000000 --- a/dist/cjs/plugins/migration-schema/rx-migration-state.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-migration-state.js","names":["_rxjs","require","_rxError","_migrationHelpers","_index","_rxStorageHelper","_broadcastChannel","_index2","_overwritable","_rxDatabaseInternalStore","_rxQuery","_rxQueryHelper","RxMigrationState","exports","collection","migrationStrategies","statusDocKey","name","schema","version","join","started","updateStatusHandlers","updateStatusQueue","PROMISE_RESOLVE_TRUE","database","oldCollectionMeta","getOldCollectionMeta","mustMigrate","statusDocId","getPrimaryKeyOfInternalDocument","INTERNAL_CONTEXT_MIGRATION_STATUS","addMigrationStateToDatabase","$","observeSingle","internalStore","pipe","filter","d","map","ensureNotFalsy","data","shareReplay","RXJS_SHARE_REPLAY_DEFAULTS","_proto","prototype","getStatus","firstValueFrom","startMigration","batchSize","MIGRATION_DEFAULT_BATCH_SIZE","must","newRxError","broadcastChannel","undefined","multiInstance","BroadcastChannel","leaderElector","createLeaderElection","awaitLeadership","oldStorageInstance","storage","createStorageInstance","databaseName","collectionName","databaseInstanceToken","token","options","password","devMode","overwritable","isDevMode","connectedInstances","getConnectedStorageInstances","totalCount","countAllDoucments","concat","r","oldStorage","updateStatus","s","count","total","Promise","all","connectedInstance","addConnectedStorageToCollection","newStorage","migrateStorage","close","storageInstance","originalStorageInstance","err","status","error","errorToPlainJson","writeSingle","previous","document","Object","assign","_deleted","handler","push","then","length","useHandlers","getSingleDocument","newDoc","clone","id","key","context","handled","percent","_meta","getDefaultRxDocumentMeta","_rev","getDefaultRevision","_attachments","oneHandler","Math","round","deepEqual","isBulkWriteConflictError","replicationMetaStorageInstance","getRxReplicationMetaInstanceSchema","hasEncryption","replicationHandlerBase","rxStorageInstanceToReplicationHandler","defaultConflictHandler","replicationState","replicateRxStorageInstance","keepMeta","identifier","replicationHandler","masterChangesSince","resolve","checkpoint","documents","masterWrite","rows","row","newDocData","newDocumentState","title","META_INSTANCE_SCHEMA_TITLE","docData","isCheckpoint","assumedMasterState","migratedDocData","migrateDocumentData","newRow","result","masterChangeStream$","Subject","asObservable","forkInstance","metaInstance","pushBatchSize","pullBatchSize","conflictHandler","hashFunction","hasError","events","subscribe","processed","up","awaitRxStorageReplicationFirstInSync","cancelRxStorageReplication","remove","storageInstances","ret","instance","preparedQuery","prepareQuery","normalizeMangoQuery","selector","countResult","connectedStorages","connectedStorage","Error","newSchema","jsonSchema","migratePromise","race"],"sources":["../../../../src/plugins/migration-schema/rx-migration-state.ts"],"sourcesContent":["import {\n Observable,\n Subject,\n filter,\n firstValueFrom,\n map,\n shareReplay\n} from 'rxjs';\nimport {\n isBulkWriteConflictError,\n newRxError\n} from '../../rx-error.ts';\nimport type {\n NumberFunctionMap,\n RxCollection,\n RxDatabase,\n RxError,\n RxReplicationWriteToMasterRow,\n RxStorageInstance,\n RxTypeError\n} from '../../types/index.d.ts';\nimport {\n MIGRATION_DEFAULT_BATCH_SIZE,\n addMigrationStateToDatabase,\n getOldCollectionMeta,\n migrateDocumentData,\n mustMigrate\n} from './migration-helpers.ts';\nimport {\n PROMISE_RESOLVE_TRUE,\n RXJS_SHARE_REPLAY_DEFAULTS,\n clone,\n deepEqual,\n ensureNotFalsy,\n errorToPlainJson,\n getDefaultRevision,\n getDefaultRxDocumentMeta\n} from '../utils/index.ts';\nimport type {\n MigrationStatusUpdate,\n RxMigrationStatus,\n RxMigrationStatusDocument\n} from './migration-types.ts';\nimport {\n getSingleDocument,\n hasEncryption,\n observeSingle,\n writeSingle\n} from '../../rx-storage-helper.ts';\nimport {\n BroadcastChannel,\n createLeaderElection\n} from 'broadcast-channel';\nimport {\n META_INSTANCE_SCHEMA_TITLE,\n awaitRxStorageReplicationFirstInSync,\n cancelRxStorageReplication,\n defaultConflictHandler,\n getRxReplicationMetaInstanceSchema,\n replicateRxStorageInstance,\n rxStorageInstanceToReplicationHandler\n} from '../../replication-protocol/index.ts';\nimport { overwritable } from '../../overwritable.ts';\nimport {\n INTERNAL_CONTEXT_MIGRATION_STATUS,\n addConnectedStorageToCollection,\n getPrimaryKeyOfInternalDocument\n} from '../../rx-database-internal-store.ts';\nimport { prepareQuery } from '../../rx-query.ts';\nimport { normalizeMangoQuery } from '../../rx-query-helper.ts';\n\n\n\nexport class RxMigrationState {\n\n public database: RxDatabase;\n\n\n private started: boolean = false;\n public readonly oldCollectionMeta: ReturnType;\n public readonly mustMigrate: ReturnType;\n public readonly statusDocId: string;\n public readonly $: Observable;\n\n constructor(\n public readonly collection: RxCollection,\n public readonly migrationStrategies: NumberFunctionMap,\n public readonly statusDocKey = [\n collection.name,\n 'v',\n collection.schema.version\n ].join('-'),\n ) {\n this.database = collection.database;\n this.oldCollectionMeta = getOldCollectionMeta(this);\n this.mustMigrate = mustMigrate(this);\n this.statusDocId = getPrimaryKeyOfInternalDocument(\n this.statusDocKey,\n INTERNAL_CONTEXT_MIGRATION_STATUS\n );\n addMigrationStateToDatabase(this);\n\n this.$ = observeSingle(\n this.database.internalStore,\n this.statusDocId\n ).pipe(\n filter(d => !!d),\n map(d => ensureNotFalsy(d).data),\n shareReplay(RXJS_SHARE_REPLAY_DEFAULTS)\n );\n }\n\n getStatus() {\n return firstValueFrom(this.$);\n }\n\n\n /**\n * Starts the migration.\n * Returns void so that people to not get the idea to await\n * this function.\n * Instead use migratePromise() if you want to await\n * the migration. This ensures it works even if the migration\n * is run on a different browser tab.\n */\n async startMigration(batchSize: number = MIGRATION_DEFAULT_BATCH_SIZE): Promise {\n const must = await this.mustMigrate;\n if (!must) {\n return;\n }\n if (this.started) {\n throw newRxError('DM1');\n }\n this.started = true;\n\n\n let broadcastChannel: BroadcastChannel | undefined = undefined;\n /**\n * To ensure that multiple tabs do not migrate the same collection,\n * we use a new broadcastChannel/leaderElector for each collection.\n * This is required because collections can be added dynamically and\n * not all tabs might know about this collection.\n */\n if (this.database.multiInstance) {\n broadcastChannel = new BroadcastChannel([\n 'rx-migration-state',\n this.database.name,\n this.collection.name,\n this.collection.schema.version\n ].join('|'));\n const leaderElector = createLeaderElection(broadcastChannel);\n await leaderElector.awaitLeadership();\n }\n\n /**\n * Instead of writing a custom migration protocol,\n * we do a push-only replication from the old collection data to the new one.\n * This also ensure that restarting the replication works without problems.\n */\n const oldCollectionMeta = await this.oldCollectionMeta;\n const oldStorageInstance = await this.database.storage.createStorageInstance({\n databaseName: this.database.name,\n collectionName: this.collection.name,\n databaseInstanceToken: this.database.token,\n multiInstance: this.database.multiInstance,\n options: {},\n schema: oldCollectionMeta.data.schema,\n password: this.database.password,\n devMode: overwritable.isDevMode()\n });\n\n\n const connectedInstances = await this.getConnectedStorageInstances();\n\n\n /**\n * Initially write the migration status into a meta document.\n */\n const totalCount = await this.countAllDoucments(\n [oldStorageInstance].concat(connectedInstances.map(r => r.oldStorage))\n );\n await this.updateStatus(s => {\n s.count.total = totalCount;\n return s;\n });\n\n\n try {\n /**\n * First migrate the connected storages,\n * afterwards migrate the normal collection.\n */\n await Promise.all(\n connectedInstances.map(async (connectedInstance) => {\n await addConnectedStorageToCollection(\n this.collection,\n connectedInstance.newStorage.collectionName,\n connectedInstance.newStorage.schema\n );\n await this.migrateStorage(\n connectedInstance.oldStorage,\n connectedInstance.newStorage,\n batchSize\n );\n await connectedInstance.newStorage.close();\n })\n );\n\n await this.migrateStorage(\n oldStorageInstance,\n /**\n * Use the originalStorageInstance here\n * so that the _meta.lwt time keeps the same\n * and our replication checkpoints still point to the\n * correct checkpoint.\n */\n this.collection.storageInstance.originalStorageInstance,\n batchSize\n );\n } catch (err) {\n await oldStorageInstance.close();\n await this.updateStatus(s => {\n s.status = 'ERROR';\n s.error = errorToPlainJson(err as Error);\n return s;\n });\n return;\n }\n\n\n // remove old collection meta doc\n await writeSingle(\n this.database.internalStore,\n {\n previous: oldCollectionMeta,\n document: Object.assign(\n {},\n oldCollectionMeta,\n {\n _deleted: true\n }\n )\n },\n 'rx-migration-remove-collection-meta'\n );\n\n await this.updateStatus(s => {\n s.status = 'DONE';\n return s;\n });\n if (broadcastChannel) {\n await broadcastChannel.close();\n }\n }\n\n public updateStatusHandlers: MigrationStatusUpdate[] = [];\n public updateStatusQueue: Promise = PROMISE_RESOLVE_TRUE;\n public updateStatus(\n handler: MigrationStatusUpdate\n ) {\n this.updateStatusHandlers.push(handler);\n this.updateStatusQueue = this.updateStatusQueue.then(async () => {\n if (this.updateStatusHandlers.length === 0) {\n return;\n }\n // re-run until no conflict\n const useHandlers = this.updateStatusHandlers;\n this.updateStatusHandlers = [];\n while (true) {\n const previous = await getSingleDocument(\n this.database.internalStore,\n this.statusDocId\n );\n let newDoc = clone(previous);\n if (!previous) {\n newDoc = {\n id: this.statusDocId,\n key: this.statusDocKey,\n context: INTERNAL_CONTEXT_MIGRATION_STATUS,\n data: {\n collectionName: this.collection.name,\n status: 'RUNNING',\n count: {\n total: 0,\n handled: 0,\n percent: 0\n }\n },\n _deleted: false,\n _meta: getDefaultRxDocumentMeta(),\n _rev: getDefaultRevision(),\n _attachments: {}\n };\n }\n\n let status = ensureNotFalsy(newDoc).data;\n for (const oneHandler of useHandlers) {\n status = oneHandler(status);\n }\n status.count.percent = Math.round((status.count.handled / status.count.total) * 100);\n\n if (\n newDoc && previous &&\n deepEqual(newDoc.data, previous.data)\n ) {\n break;\n }\n\n\n try {\n await writeSingle(\n this.database.internalStore,\n {\n previous,\n document: ensureNotFalsy(newDoc)\n },\n INTERNAL_CONTEXT_MIGRATION_STATUS\n );\n\n // write successful\n break;\n } catch (err) {\n // ignore conflicts\n if (!isBulkWriteConflictError(err)) {\n throw err;\n }\n }\n }\n });\n return this.updateStatusQueue;\n }\n\n\n public async migrateStorage(\n oldStorage: RxStorageInstance,\n newStorage: RxStorageInstance,\n batchSize: number\n ) {\n const replicationMetaStorageInstance = await this.database.storage.createStorageInstance({\n databaseName: this.database.name,\n collectionName: 'rx-migration-state-meta-' + this.collection.name + '-' + this.collection.schema.version,\n databaseInstanceToken: this.database.token,\n multiInstance: this.database.multiInstance,\n options: {},\n schema: getRxReplicationMetaInstanceSchema(oldStorage.schema, hasEncryption(oldStorage.schema)),\n password: this.database.password,\n devMode: overwritable.isDevMode()\n });\n\n const replicationHandlerBase = rxStorageInstanceToReplicationHandler(\n newStorage,\n /**\n * Ignore push-conflicts.\n * If this happens we drop the 'old' document state.\n */\n defaultConflictHandler,\n this.database.token,\n true\n );\n\n const replicationState = replicateRxStorageInstance({\n keepMeta: true,\n identifier: [\n 'rx-migration-state',\n this.collection.name,\n oldStorage.schema.version,\n this.collection.schema.version\n ].join('-'),\n replicationHandler: {\n masterChangesSince() {\n return Promise.resolve({\n checkpoint: null,\n documents: []\n });\n },\n masterWrite: async (rows) => {\n rows = await Promise.all(\n rows\n .map(async (row) => {\n let newDocData = row.newDocumentState;\n if (newStorage.schema.title === META_INSTANCE_SCHEMA_TITLE) {\n newDocData = row.newDocumentState.docData;\n if (row.newDocumentState.isCheckpoint === '1') {\n return {\n assumedMasterState: undefined,\n newDocumentState: row.newDocumentState\n };\n }\n }\n const migratedDocData: RxReplicationWriteToMasterRow = await migrateDocumentData(\n this.collection,\n oldStorage.schema.version,\n newDocData\n );\n const newRow: RxReplicationWriteToMasterRow = {\n // drop the assumed master state, we do not have to care about conflicts here.\n assumedMasterState: undefined,\n newDocumentState: newStorage.schema.title === META_INSTANCE_SCHEMA_TITLE\n ? Object.assign({}, row.newDocumentState, { docData: migratedDocData })\n : migratedDocData\n };\n return newRow;\n })\n );\n\n // filter out the documents where the migration strategy returned null\n rows = rows.filter(row => !!row.newDocumentState);\n\n const result = await replicationHandlerBase.masterWrite(rows);\n return result;\n },\n masterChangeStream$: new Subject().asObservable()\n },\n forkInstance: oldStorage,\n metaInstance: replicationMetaStorageInstance,\n pushBatchSize: batchSize,\n pullBatchSize: 0,\n conflictHandler: defaultConflictHandler,\n hashFunction: this.database.hashFunction\n });\n\n\n let hasError: RxError | RxTypeError | false = false;\n replicationState.events.error.subscribe(err => hasError = err);\n\n // update replication status on each change\n replicationState.events.processed.up.subscribe(() => {\n this.updateStatus(status => {\n status.count.handled = status.count.handled + 1;\n return status;\n });\n });\n\n await awaitRxStorageReplicationFirstInSync(replicationState);\n await cancelRxStorageReplication(replicationState);\n\n await this.updateStatusQueue;\n if (hasError) {\n await replicationMetaStorageInstance.close();\n throw hasError;\n }\n\n // cleanup old storages\n await Promise.all([\n oldStorage.remove(),\n replicationMetaStorageInstance.remove()\n ]);\n }\n\n public async countAllDoucments(\n storageInstances: RxStorageInstance[]\n ): Promise {\n let ret = 0;\n await Promise.all(\n storageInstances.map(async (instance) => {\n\n const preparedQuery = prepareQuery(\n instance.schema,\n normalizeMangoQuery(\n instance.schema,\n {\n selector: {}\n }\n )\n );\n const countResult = await instance.count(preparedQuery);\n ret += countResult.count;\n })\n );\n return ret;\n }\n\n public async getConnectedStorageInstances() {\n const oldCollectionMeta = await this.oldCollectionMeta;\n const ret: {\n oldStorage: RxStorageInstance;\n newStorage: RxStorageInstance;\n }[] = [];\n\n await Promise.all(\n await Promise.all(\n oldCollectionMeta\n .data\n .connectedStorages\n .map(async (connectedStorage) => {\n\n // atm we can only migrate replication states.\n if (connectedStorage.schema.title !== META_INSTANCE_SCHEMA_TITLE) {\n throw new Error('unknown migration handling for schema');\n }\n\n const newSchema = getRxReplicationMetaInstanceSchema(\n clone(this.collection.schema.jsonSchema),\n hasEncryption(connectedStorage.schema)\n );\n newSchema.version = this.collection.schema.version;\n const [oldStorage, newStorage] = await Promise.all([\n this.database.storage.createStorageInstance({\n databaseInstanceToken: this.database.token,\n databaseName: this.database.name,\n devMode: overwritable.isDevMode(),\n multiInstance: this.database.multiInstance,\n options: {},\n schema: connectedStorage.schema,\n password: this.database.password,\n collectionName: connectedStorage.collectionName\n }),\n this.database.storage.createStorageInstance({\n databaseInstanceToken: this.database.token,\n databaseName: this.database.name,\n devMode: overwritable.isDevMode(),\n multiInstance: this.database.multiInstance,\n options: {},\n schema: newSchema,\n password: this.database.password,\n collectionName: connectedStorage.collectionName\n })\n ]);\n ret.push({ oldStorage, newStorage });\n })\n )\n );\n\n return ret;\n }\n\n\n\n async migratePromise(batchSize?: number): Promise {\n this.startMigration(batchSize);\n const must = await this.mustMigrate;\n if (!must) {\n return {\n status: 'DONE',\n collectionName: this.collection.name,\n count: {\n handled: 0,\n percent: 0,\n total: 0\n }\n };\n }\n\n const result = await Promise.race([\n firstValueFrom(\n this.$.pipe(\n filter(d => d.status === 'DONE')\n )\n ),\n firstValueFrom(\n this.$.pipe(\n filter(d => d.status === 'ERROR')\n )\n )\n ]);\n\n if (result.status === 'ERROR') {\n throw newRxError('DM4', {\n collection: this.collection.name,\n error: result.error\n });\n } else {\n return result;\n }\n\n }\n}\n"],"mappings":";;;;;;AAAA,IAAAA,KAAA,GAAAC,OAAA;AAQA,IAAAC,QAAA,GAAAD,OAAA;AAaA,IAAAE,iBAAA,GAAAF,OAAA;AAOA,IAAAG,MAAA,GAAAH,OAAA;AAeA,IAAAI,gBAAA,GAAAJ,OAAA;AAMA,IAAAK,iBAAA,GAAAL,OAAA;AAIA,IAAAM,OAAA,GAAAN,OAAA;AASA,IAAAO,aAAA,GAAAP,OAAA;AACA,IAAAQ,wBAAA,GAAAR,OAAA;AAKA,IAAAS,QAAA,GAAAT,OAAA;AACA,IAAAU,cAAA,GAAAV,OAAA;AAA+D,IAIlDW,gBAAgB,GAAAC,OAAA,CAAAD,gBAAA;EAWzB,SAAAA,iBACoBE,UAAwB,EACxBC,mBAAsC,EACtCC,YAAY,GAAG,CAC3BF,UAAU,CAACG,IAAI,EACf,GAAG,EACHH,UAAU,CAACI,MAAM,CAACC,OAAO,CAC5B,CAACC,IAAI,CAAC,GAAG,CAAC,EACb;IAAA,KAdMC,OAAO,GAAY,KAAK;IAAA,KAiLzBC,oBAAoB,GAA4B,EAAE;IAAA,KAClDC,iBAAiB,GAAiBC,2BAAoB;IAAA,KA3KzCV,UAAwB,GAAxBA,UAAwB;IAAA,KACxBC,mBAAsC,GAAtCA,mBAAsC;IAAA,KACtCC,YAAY,GAAZA,YAAY;IAM5B,IAAI,CAACS,QAAQ,GAAGX,UAAU,CAACW,QAAQ;IACnC,IAAI,CAACC,iBAAiB,GAAG,IAAAC,sCAAoB,EAAC,IAAI,CAAC;IACnD,IAAI,CAACC,WAAW,GAAG,IAAAA,6BAAW,EAAC,IAAI,CAAC;IACpC,IAAI,CAACC,WAAW,GAAG,IAAAC,wDAA+B,EAC9C,IAAI,CAACd,YAAY,EACjBe,0DACJ,CAAC;IACD,IAAAC,6CAA2B,EAAC,IAAI,CAAC;IAEjC,IAAI,CAACC,CAAC,GAAG,IAAAC,8BAAa,EAClB,IAAI,CAACT,QAAQ,CAACU,aAAa,EAC3B,IAAI,CAACN,WACT,CAAC,CAACO,IAAI,CACF,IAAAC,YAAM,EAACC,CAAC,IAAI,CAAC,CAACA,CAAC,CAAC,EAChB,IAAAC,SAAG,EAACD,CAAC,IAAI,IAAAE,qBAAc,EAACF,CAAC,CAAC,CAACG,IAAI,CAAC,EAChC,IAAAC,iBAAW,EAACC,iCAA0B,CAC1C,CAAC;EACL;EAAC,IAAAC,MAAA,GAAAhC,gBAAA,CAAAiC,SAAA;EAAAD,MAAA,CAEDE,SAAS,GAAT,SAAAA,UAAA,EAAY;IACR,OAAO,IAAAC,oBAAc,EAAC,IAAI,CAACd,CAAC,CAAC;EACjC;;EAGA;AACJ;AACA;AACA;AACA;AACA;AACA;AACA,KAPI;EAAAW,MAAA,CAQMI,cAAc,GAApB,eAAAA,eAAqBC,SAAiB,GAAGC,8CAA4B,EAAiB;IAClF,IAAMC,IAAI,GAAG,MAAM,IAAI,CAACvB,WAAW;IACnC,IAAI,CAACuB,IAAI,EAAE;MACP;IACJ;IACA,IAAI,IAAI,CAAC9B,OAAO,EAAE;MACd,MAAM,IAAA+B,mBAAU,EAAC,KAAK,CAAC;IAC3B;IACA,IAAI,CAAC/B,OAAO,GAAG,IAAI;IAGnB,IAAIgC,gBAA8C,GAAGC,SAAS;IAC9D;AACR;AACA;AACA;AACA;AACA;IACQ,IAAI,IAAI,CAAC7B,QAAQ,CAAC8B,aAAa,EAAE;MAC7BF,gBAAgB,GAAG,IAAIG,kCAAgB,CAAC,CACpC,oBAAoB,EACpB,IAAI,CAAC/B,QAAQ,CAACR,IAAI,EAClB,IAAI,CAACH,UAAU,CAACG,IAAI,EACpB,IAAI,CAACH,UAAU,CAACI,MAAM,CAACC,OAAO,CACjC,CAACC,IAAI,CAAC,GAAG,CAAC,CAAC;MACZ,IAAMqC,aAAa,GAAG,IAAAC,sCAAoB,EAACL,gBAAgB,CAAC;MAC5D,MAAMI,aAAa,CAACE,eAAe,CAAC,CAAC;IACzC;;IAEA;AACR;AACA;AACA;AACA;IACQ,IAAMjC,iBAAiB,GAAG,MAAM,IAAI,CAACA,iBAAiB;IACtD,IAAMkC,kBAAkB,GAAG,MAAM,IAAI,CAACnC,QAAQ,CAACoC,OAAO,CAACC,qBAAqB,CAAC;MACzEC,YAAY,EAAE,IAAI,CAACtC,QAAQ,CAACR,IAAI;MAChC+C,cAAc,EAAE,IAAI,CAAClD,UAAU,CAACG,IAAI;MACpCgD,qBAAqB,EAAE,IAAI,CAACxC,QAAQ,CAACyC,KAAK;MAC1CX,aAAa,EAAE,IAAI,CAAC9B,QAAQ,CAAC8B,aAAa;MAC1CY,OAAO,EAAE,CAAC,CAAC;MACXjD,MAAM,EAAEQ,iBAAiB,CAACe,IAAI,CAACvB,MAAM;MACrCkD,QAAQ,EAAE,IAAI,CAAC3C,QAAQ,CAAC2C,QAAQ;MAChCC,OAAO,EAAEC,0BAAY,CAACC,SAAS,CAAC;IACpC,CAAC,CAAC;IAGF,IAAMC,kBAAkB,GAAG,MAAM,IAAI,CAACC,4BAA4B,CAAC,CAAC;;IAGpE;AACR;AACA;IACQ,IAAMC,UAAU,GAAG,MAAM,IAAI,CAACC,iBAAiB,CAC3C,CAACf,kBAAkB,CAAC,CAACgB,MAAM,CAACJ,kBAAkB,CAACjC,GAAG,CAACsC,CAAC,IAAIA,CAAC,CAACC,UAAU,CAAC,CACzE,CAAC;IACD,MAAM,IAAI,CAACC,YAAY,CAACC,CAAC,IAAI;MACzBA,CAAC,CAACC,KAAK,CAACC,KAAK,GAAGR,UAAU;MAC1B,OAAOM,CAAC;IACZ,CAAC,CAAC;IAGF,IAAI;MACA;AACZ;AACA;AACA;MACY,MAAMG,OAAO,CAACC,GAAG,CACbZ,kBAAkB,CAACjC,GAAG,CAAC,MAAO8C,iBAAiB,IAAK;QAChD,MAAM,IAAAC,wDAA+B,EACjC,IAAI,CAACxE,UAAU,EACfuE,iBAAiB,CAACE,UAAU,CAACvB,cAAc,EAC3CqB,iBAAiB,CAACE,UAAU,CAACrE,MACjC,CAAC;QACD,MAAM,IAAI,CAACsE,cAAc,CACrBH,iBAAiB,CAACP,UAAU,EAC5BO,iBAAiB,CAACE,UAAU,EAC5BtC,SACJ,CAAC;QACD,MAAMoC,iBAAiB,CAACE,UAAU,CAACE,KAAK,CAAC,CAAC;MAC9C,CAAC,CACL,CAAC;MAED,MAAM,IAAI,CAACD,cAAc,CACrB5B,kBAAkB;MAClB;AAChB;AACA;AACA;AACA;AACA;MACgB,IAAI,CAAC9C,UAAU,CAAC4E,eAAe,CAACC,uBAAuB,EACvD1C,SACJ,CAAC;IACL,CAAC,CAAC,OAAO2C,GAAG,EAAE;MACV,MAAMhC,kBAAkB,CAAC6B,KAAK,CAAC,CAAC;MAChC,MAAM,IAAI,CAACV,YAAY,CAACC,CAAC,IAAI;QACzBA,CAAC,CAACa,MAAM,GAAG,OAAO;QAClBb,CAAC,CAACc,KAAK,GAAG,IAAAC,uBAAgB,EAACH,GAAY,CAAC;QACxC,OAAOZ,CAAC;MACZ,CAAC,CAAC;MACF;IACJ;;IAGA;IACA,MAAM,IAAAgB,4BAAW,EACb,IAAI,CAACvE,QAAQ,CAACU,aAAa,EAC3B;MACI8D,QAAQ,EAAEvE,iBAAiB;MAC3BwE,QAAQ,EAAEC,MAAM,CAACC,MAAM,CACnB,CAAC,CAAC,EACF1E,iBAAiB,EACjB;QACI2E,QAAQ,EAAE;MACd,CACJ;IACJ,CAAC,EACD,qCACJ,CAAC;IAED,MAAM,IAAI,CAACtB,YAAY,CAACC,CAAC,IAAI;MACzBA,CAAC,CAACa,MAAM,GAAG,MAAM;MACjB,OAAOb,CAAC;IACZ,CAAC,CAAC;IACF,IAAI3B,gBAAgB,EAAE;MAClB,MAAMA,gBAAgB,CAACoC,KAAK,CAAC,CAAC;IAClC;EACJ,CAAC;EAAA7C,MAAA,CAIMmC,YAAY,GAAnB,SAAAA,aACIuB,OAA8B,EAChC;IACE,IAAI,CAAChF,oBAAoB,CAACiF,IAAI,CAACD,OAAO,CAAC;IACvC,IAAI,CAAC/E,iBAAiB,GAAG,IAAI,CAACA,iBAAiB,CAACiF,IAAI,CAAC,YAAY;MAC7D,IAAI,IAAI,CAAClF,oBAAoB,CAACmF,MAAM,KAAK,CAAC,EAAE;QACxC;MACJ;MACA;MACA,IAAMC,WAAW,GAAG,IAAI,CAACpF,oBAAoB;MAC7C,IAAI,CAACA,oBAAoB,GAAG,EAAE;MAC9B,OAAO,IAAI,EAAE;QACT,IAAM2E,QAAQ,GAAG,MAAM,IAAAU,kCAAiB,EACpC,IAAI,CAAClF,QAAQ,CAACU,aAAa,EAC3B,IAAI,CAACN,WACT,CAAC;QACD,IAAI+E,MAAM,GAAG,IAAAC,YAAK,EAACZ,QAAQ,CAAC;QAC5B,IAAI,CAACA,QAAQ,EAAE;UACXW,MAAM,GAAG;YACLE,EAAE,EAAE,IAAI,CAACjF,WAAW;YACpBkF,GAAG,EAAE,IAAI,CAAC/F,YAAY;YACtBgG,OAAO,EAAEjF,0DAAiC;YAC1CU,IAAI,EAAE;cACFuB,cAAc,EAAE,IAAI,CAAClD,UAAU,CAACG,IAAI;cACpC4E,MAAM,EAAE,SAAS;cACjBZ,KAAK,EAAE;gBACHC,KAAK,EAAE,CAAC;gBACR+B,OAAO,EAAE,CAAC;gBACVC,OAAO,EAAE;cACb;YACJ,CAAC;YACDb,QAAQ,EAAE,KAAK;YACfc,KAAK,EAAE,IAAAC,+BAAwB,EAAC,CAAC;YACjCC,IAAI,EAAE,IAAAC,yBAAkB,EAAC,CAAC;YAC1BC,YAAY,EAAE,CAAC;UACnB,CAAC;QACL;QAEA,IAAI1B,MAAM,GAAG,IAAArD,qBAAc,EAACoE,MAAM,CAAC,CAACnE,IAAI;QACxC,KAAK,IAAM+E,UAAU,IAAId,WAAW,EAAE;UAClCb,MAAM,GAAG2B,UAAU,CAAC3B,MAAM,CAAC;QAC/B;QACAA,MAAM,CAACZ,KAAK,CAACiC,OAAO,GAAGO,IAAI,CAACC,KAAK,CAAE7B,MAAM,CAACZ,KAAK,CAACgC,OAAO,GAAGpB,MAAM,CAACZ,KAAK,CAACC,KAAK,GAAI,GAAG,CAAC;QAEpF,IACI0B,MAAM,IAAIX,QAAQ,IAClB,IAAA0B,gBAAS,EAACf,MAAM,CAACnE,IAAI,EAAEwD,QAAQ,CAACxD,IAAI,CAAC,EACvC;UACE;QACJ;QAGA,IAAI;UACA,MAAM,IAAAuD,4BAAW,EACb,IAAI,CAACvE,QAAQ,CAACU,aAAa,EAC3B;YACI8D,QAAQ;YACRC,QAAQ,EAAE,IAAA1D,qBAAc,EAACoE,MAAM;UACnC,CAAC,EACD7E,0DACJ,CAAC;;UAED;UACA;QACJ,CAAC,CAAC,OAAO6D,GAAG,EAAE;UACV;UACA,IAAI,CAAC,IAAAgC,iCAAwB,EAAChC,GAAG,CAAC,EAAE;YAChC,MAAMA,GAAG;UACb;QACJ;MACJ;IACJ,CAAC,CAAC;IACF,OAAO,IAAI,CAACrE,iBAAiB;EACjC,CAAC;EAAAqB,MAAA,CAGY4C,cAAc,GAA3B,eAAAA,eACIV,UAA4C,EAC5CS,UAA4C,EAC5CtC,SAAiB,EACnB;IACE,IAAM4E,8BAA8B,GAAG,MAAM,IAAI,CAACpG,QAAQ,CAACoC,OAAO,CAACC,qBAAqB,CAAC;MACrFC,YAAY,EAAE,IAAI,CAACtC,QAAQ,CAACR,IAAI;MAChC+C,cAAc,EAAE,0BAA0B,GAAG,IAAI,CAAClD,UAAU,CAACG,IAAI,GAAG,GAAG,GAAG,IAAI,CAACH,UAAU,CAACI,MAAM,CAACC,OAAO;MACxG8C,qBAAqB,EAAE,IAAI,CAACxC,QAAQ,CAACyC,KAAK;MAC1CX,aAAa,EAAE,IAAI,CAAC9B,QAAQ,CAAC8B,aAAa;MAC1CY,OAAO,EAAE,CAAC,CAAC;MACXjD,MAAM,EAAE,IAAA4G,0CAAkC,EAAChD,UAAU,CAAC5D,MAAM,EAAE,IAAA6G,8BAAa,EAACjD,UAAU,CAAC5D,MAAM,CAAC,CAAC;MAC/FkD,QAAQ,EAAE,IAAI,CAAC3C,QAAQ,CAAC2C,QAAQ;MAChCC,OAAO,EAAEC,0BAAY,CAACC,SAAS,CAAC;IACpC,CAAC,CAAC;IAEF,IAAMyD,sBAAsB,GAAG,IAAAC,6CAAqC,EAChE1C,UAAU;IACV;AACZ;AACA;AACA;IACY2C,8BAAsB,EACtB,IAAI,CAACzG,QAAQ,CAACyC,KAAK,EACnB,IACJ,CAAC;IAED,IAAMiE,gBAAgB,GAAG,IAAAC,kCAA0B,EAAC;MAChDC,QAAQ,EAAE,IAAI;MACdC,UAAU,EAAE,CACR,oBAAoB,EACpB,IAAI,CAACxH,UAAU,CAACG,IAAI,EACpB6D,UAAU,CAAC5D,MAAM,CAACC,OAAO,EACzB,IAAI,CAACL,UAAU,CAACI,MAAM,CAACC,OAAO,CACjC,CAACC,IAAI,CAAC,GAAG,CAAC;MACXmH,kBAAkB,EAAE;QAChBC,kBAAkBA,CAAA,EAAG;UACjB,OAAOrD,OAAO,CAACsD,OAAO,CAAC;YACnBC,UAAU,EAAE,IAAI;YAChBC,SAAS,EAAE;UACf,CAAC,CAAC;QACN,CAAC;QACDC,WAAW,EAAE,MAAOC,IAAI,IAAK;UACzBA,IAAI,GAAG,MAAM1D,OAAO,CAACC,GAAG,CACpByD,IAAI,CACCtG,GAAG,CAAC,MAAOuG,GAAG,IAAK;YAChB,IAAIC,UAAU,GAAGD,GAAG,CAACE,gBAAgB;YACrC,IAAIzD,UAAU,CAACrE,MAAM,CAAC+H,KAAK,KAAKC,kCAA0B,EAAE;cACxDH,UAAU,GAAGD,GAAG,CAACE,gBAAgB,CAACG,OAAO;cACzC,IAAIL,GAAG,CAACE,gBAAgB,CAACI,YAAY,KAAK,GAAG,EAAE;gBAC3C,OAAO;kBACHC,kBAAkB,EAAE/F,SAAS;kBAC7B0F,gBAAgB,EAAEF,GAAG,CAACE;gBAC1B,CAAC;cACL;YACJ;YACA,IAAMM,eAAmD,GAAG,MAAM,IAAAC,qCAAmB,EACjF,IAAI,CAACzI,UAAU,EACfgE,UAAU,CAAC5D,MAAM,CAACC,OAAO,EACzB4H,UACJ,CAAC;YACD,IAAMS,MAA0C,GAAG;cAC/C;cACAH,kBAAkB,EAAE/F,SAAS;cAC7B0F,gBAAgB,EAAEzD,UAAU,CAACrE,MAAM,CAAC+H,KAAK,KAAKC,kCAA0B,GAClE/C,MAAM,CAACC,MAAM,CAAC,CAAC,CAAC,EAAE0C,GAAG,CAACE,gBAAgB,EAAE;gBAAEG,OAAO,EAAEG;cAAgB,CAAC,CAAC,GACrEA;YACV,CAAC;YACD,OAAOE,MAAM;UACjB,CAAC,CACT,CAAC;;UAED;UACAX,IAAI,GAAGA,IAAI,CAACxG,MAAM,CAACyG,GAAG,IAAI,CAAC,CAACA,GAAG,CAACE,gBAAgB,CAAC;UAEjD,IAAMS,MAAM,GAAG,MAAMzB,sBAAsB,CAACY,WAAW,CAACC,IAAI,CAAC;UAC7D,OAAOY,MAAM;QACjB,CAAC;QACDC,mBAAmB,EAAE,IAAIC,aAAO,CAAM,CAAC,CAACC,YAAY,CAAC;MACzD,CAAC;MACDC,YAAY,EAAE/E,UAAU;MACxBgF,YAAY,EAAEjC,8BAA8B;MAC5CkC,aAAa,EAAE9G,SAAS;MACxB+G,aAAa,EAAE,CAAC;MAChBC,eAAe,EAAE/B,8BAAsB;MACvCgC,YAAY,EAAE,IAAI,CAACzI,QAAQ,CAACyI;IAChC,CAAC,CAAC;IAGF,IAAIC,QAAuC,GAAG,KAAK;IACnDhC,gBAAgB,CAACiC,MAAM,CAACtE,KAAK,CAACuE,SAAS,CAACzE,GAAG,IAAIuE,QAAQ,GAAGvE,GAAG,CAAC;;IAE9D;IACAuC,gBAAgB,CAACiC,MAAM,CAACE,SAAS,CAACC,EAAE,CAACF,SAAS,CAAC,MAAM;MACjD,IAAI,CAACtF,YAAY,CAACc,MAAM,IAAI;QACxBA,MAAM,CAACZ,KAAK,CAACgC,OAAO,GAAGpB,MAAM,CAACZ,KAAK,CAACgC,OAAO,GAAG,CAAC;QAC/C,OAAOpB,MAAM;MACjB,CAAC,CAAC;IACN,CAAC,CAAC;IAEF,MAAM,IAAA2E,4CAAoC,EAACrC,gBAAgB,CAAC;IAC5D,MAAM,IAAAsC,kCAA0B,EAACtC,gBAAgB,CAAC;IAElD,MAAM,IAAI,CAAC5G,iBAAiB;IAC5B,IAAI4I,QAAQ,EAAE;MACV,MAAMtC,8BAA8B,CAACpC,KAAK,CAAC,CAAC;MAC5C,MAAM0E,QAAQ;IAClB;;IAEA;IACA,MAAMhF,OAAO,CAACC,GAAG,CAAC,CACdN,UAAU,CAAC4F,MAAM,CAAC,CAAC,EACnB7C,8BAA8B,CAAC6C,MAAM,CAAC,CAAC,CAC1C,CAAC;EACN,CAAC;EAAA9H,MAAA,CAEY+B,iBAAiB,GAA9B,eAAAA,kBACIgG,gBAAoD,EACrC;IACf,IAAIC,GAAG,GAAG,CAAC;IACX,MAAMzF,OAAO,CAACC,GAAG,CACbuF,gBAAgB,CAACpI,GAAG,CAAC,MAAOsI,QAAQ,IAAK;MAErC,IAAMC,aAAa,GAAG,IAAAC,qBAAY,EAC9BF,QAAQ,CAAC3J,MAAM,EACf,IAAA8J,kCAAmB,EACfH,QAAQ,CAAC3J,MAAM,EACf;QACI+J,QAAQ,EAAE,CAAC;MACf,CACJ,CACJ,CAAC;MACD,IAAMC,WAAW,GAAG,MAAML,QAAQ,CAAC5F,KAAK,CAAC6F,aAAa,CAAC;MACvDF,GAAG,IAAIM,WAAW,CAACjG,KAAK;IAC5B,CAAC,CACL,CAAC;IACD,OAAO2F,GAAG;EACd,CAAC;EAAAhI,MAAA,CAEY6B,4BAA4B,GAAzC,eAAAA,6BAAA,EAA4C;IACxC,IAAM/C,iBAAiB,GAAG,MAAM,IAAI,CAACA,iBAAiB;IACtD,IAAMkJ,GAGH,GAAG,EAAE;IAER,MAAMzF,OAAO,CAACC,GAAG,CACb,MAAMD,OAAO,CAACC,GAAG,CACb1D,iBAAiB,CACZe,IAAI,CACJ0I,iBAAiB,CACjB5I,GAAG,CAAC,MAAO6I,gBAAgB,IAAK;MAE7B;MACA,IAAIA,gBAAgB,CAAClK,MAAM,CAAC+H,KAAK,KAAKC,kCAA0B,EAAE;QAC9D,MAAM,IAAImC,KAAK,CAAC,uCAAuC,CAAC;MAC5D;MAEA,IAAMC,SAAS,GAAG,IAAAxD,0CAAkC,EAChD,IAAAjB,YAAK,EAAC,IAAI,CAAC/F,UAAU,CAACI,MAAM,CAACqK,UAAU,CAAC,EACxC,IAAAxD,8BAAa,EAACqD,gBAAgB,CAAClK,MAAM,CACzC,CAAC;MACDoK,SAAS,CAACnK,OAAO,GAAG,IAAI,CAACL,UAAU,CAACI,MAAM,CAACC,OAAO;MAClD,IAAM,CAAC2D,UAAU,EAAES,UAAU,CAAC,GAAG,MAAMJ,OAAO,CAACC,GAAG,CAAC,CAC/C,IAAI,CAAC3D,QAAQ,CAACoC,OAAO,CAACC,qBAAqB,CAAC;QACxCG,qBAAqB,EAAE,IAAI,CAACxC,QAAQ,CAACyC,KAAK;QAC1CH,YAAY,EAAE,IAAI,CAACtC,QAAQ,CAACR,IAAI;QAChCoD,OAAO,EAAEC,0BAAY,CAACC,SAAS,CAAC,CAAC;QACjChB,aAAa,EAAE,IAAI,CAAC9B,QAAQ,CAAC8B,aAAa;QAC1CY,OAAO,EAAE,CAAC,CAAC;QACXjD,MAAM,EAAEkK,gBAAgB,CAAClK,MAAM;QAC/BkD,QAAQ,EAAE,IAAI,CAAC3C,QAAQ,CAAC2C,QAAQ;QAChCJ,cAAc,EAAEoH,gBAAgB,CAACpH;MACrC,CAAC,CAAC,EACF,IAAI,CAACvC,QAAQ,CAACoC,OAAO,CAACC,qBAAqB,CAAC;QACxCG,qBAAqB,EAAE,IAAI,CAACxC,QAAQ,CAACyC,KAAK;QAC1CH,YAAY,EAAE,IAAI,CAACtC,QAAQ,CAACR,IAAI;QAChCoD,OAAO,EAAEC,0BAAY,CAACC,SAAS,CAAC,CAAC;QACjChB,aAAa,EAAE,IAAI,CAAC9B,QAAQ,CAAC8B,aAAa;QAC1CY,OAAO,EAAE,CAAC,CAAC;QACXjD,MAAM,EAAEoK,SAAS;QACjBlH,QAAQ,EAAE,IAAI,CAAC3C,QAAQ,CAAC2C,QAAQ;QAChCJ,cAAc,EAAEoH,gBAAgB,CAACpH;MACrC,CAAC,CAAC,CACL,CAAC;MACF4G,GAAG,CAACrE,IAAI,CAAC;QAAEzB,UAAU;QAAES;MAAW,CAAC,CAAC;IACxC,CAAC,CACT,CACJ,CAAC;IAED,OAAOqF,GAAG;EACd,CAAC;EAAAhI,MAAA,CAIK4I,cAAc,GAApB,eAAAA,eAAqBvI,SAAkB,EAA8B;IACjE,IAAI,CAACD,cAAc,CAACC,SAAS,CAAC;IAC9B,IAAME,IAAI,GAAG,MAAM,IAAI,CAACvB,WAAW;IACnC,IAAI,CAACuB,IAAI,EAAE;MACP,OAAO;QACH0C,MAAM,EAAE,MAAM;QACd7B,cAAc,EAAE,IAAI,CAAClD,UAAU,CAACG,IAAI;QACpCgE,KAAK,EAAE;UACHgC,OAAO,EAAE,CAAC;UACVC,OAAO,EAAE,CAAC;UACVhC,KAAK,EAAE;QACX;MACJ,CAAC;IACL;IAEA,IAAMuE,MAAM,GAAG,MAAMtE,OAAO,CAACsG,IAAI,CAAC,CAC9B,IAAA1I,oBAAc,EACV,IAAI,CAACd,CAAC,CAACG,IAAI,CACP,IAAAC,YAAM,EAACC,CAAC,IAAIA,CAAC,CAACuD,MAAM,KAAK,MAAM,CACnC,CACJ,CAAC,EACD,IAAA9C,oBAAc,EACV,IAAI,CAACd,CAAC,CAACG,IAAI,CACP,IAAAC,YAAM,EAACC,CAAC,IAAIA,CAAC,CAACuD,MAAM,KAAK,OAAO,CACpC,CACJ,CAAC,CACJ,CAAC;IAEF,IAAI4D,MAAM,CAAC5D,MAAM,KAAK,OAAO,EAAE;MAC3B,MAAM,IAAAzC,mBAAU,EAAC,KAAK,EAAE;QACpBtC,UAAU,EAAE,IAAI,CAACA,UAAU,CAACG,IAAI;QAChC6E,KAAK,EAAE2D,MAAM,CAAC3D;MAClB,CAAC,CAAC;IACN,CAAC,MAAM;MACH,OAAO2D,MAAM;IACjB;EAEJ,CAAC;EAAA,OAAA7I,gBAAA;AAAA","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/migration-storage/index.js b/dist/cjs/plugins/migration-storage/index.js deleted file mode 100644 index ed998f6e44c..00000000000 --- a/dist/cjs/plugins/migration-storage/index.js +++ /dev/null @@ -1,216 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.migrateCollection = migrateCollection; -exports.migrateStorage = migrateStorage; -var _index = require("../../index.js"); -/** - * Migrates collections of RxDB version A and puts them - * into a RxDatabase that is created with version B. - * This function only works from the previous major version upwards. - * Do not use it to migrate like rxdb v9 to v14. - */ -async function migrateStorage(params) { - var collections = Object.values(params.database.collections); - var batchSize = params.batchSize ? params.batchSize : 10; - if (params.parallel) { - await Promise.all(collections.map(collection => migrateCollection(collection, params.oldDatabaseName, params.oldStorage, batchSize, params.afterMigrateBatch, params.logFunction))); - } else { - for (var collection of collections) { - await migrateCollection(collection, params.oldDatabaseName, params.oldStorage, batchSize, params.afterMigrateBatch, params.logFunction); - } - } -} -async function migrateCollection(collection, oldDatabaseName, oldStorage, batchSize, afterMigrateBatch, -// to log each step, pass console.log.bind(console) here. -logFunction) { - function log(message) { - if (logFunction) { - logFunction('migrateCollection(' + collection.name + ')' + message); - } - } - log('start migrateCollection()'); - var schema = collection.schema.jsonSchema; - var primaryPath = collection.schema.primaryPath; - var oldDatabaseInstanceToken = (0, _index.randomCouchString)(10); - - /** - * In RxDB v15 we changed how the indexes are created. - * Before (v14), the storage prepended the _deleted field - * to all indexes. - * In v15, RxDB will prepend the _deleted field BEFORE sending - * it to the storage. Therefore we have to strip these fields - * when crating v14 storage instances. - */ - if (!oldStorage.rxdbVersion && schema.indexes) { - schema = (0, _index.clone)(schema); - schema.indexes = (0, _index.ensureNotFalsy)(schema.indexes).map(index => { - index = (0, _index.toArray)(index).filter(field => field !== '_deleted'); - if (index.includes('_meta.lwt')) { - return null; - } - return index; - }).filter(_index.arrayFilterNotEmpty); - } - var oldStorageInstance = await oldStorage.createStorageInstance({ - databaseName: oldDatabaseName, - collectionName: collection.name, - multiInstance: false, - options: {}, - schema: schema, - databaseInstanceToken: oldDatabaseInstanceToken, - devMode: false - }); - var plainQuery = { - selector: { - _deleted: { - $eq: false - } - }, - limit: batchSize, - sort: [{ - [primaryPath]: 'asc' - }], - skip: 0 - }; - - /** - * In RxDB v15 we removed statics.prepareQuery() - * But to be downwards compatible, still use that - * when migrating from an old storage. - * TODO remove this in the next major version. v16. - */ - var preparedQuery; - if (oldStorage.statics && oldStorage.statics.prepareQuery) { - preparedQuery = oldStorage.statics.prepareQuery(schema, plainQuery); - } else { - preparedQuery = (0, _index.prepareQuery)(schema, plainQuery); - } - var _loop = async function () { - log('loop once'); - /** - * Get a batch of documents - */ - var queryResult = await oldStorageInstance.query(preparedQuery); - var docs = queryResult.documents; - if (docs.length === 0) { - /** - * No more documents to migrate - */ - log('migration of collection done'); - await oldStorageInstance.remove(); - return { - v: void 0 - }; - } - var docsNonMutated = (0, _index.clone)(docs); - - /** - * Get attachments - * if defined in the schema. - */ - if (schema.attachments) { - await Promise.all(docs.map(async doc => { - var docId = doc[primaryPath]; - await Promise.all(Object.entries(doc._attachments).map(async ([attachmentId, attachmentMeta]) => { - var attachmentData = await oldStorageInstance.getAttachmentData(docId, attachmentId, attachmentMeta.digest); - var attachmentDataString = await (0, _index.blobToBase64String)(attachmentData); - doc._attachments[attachmentId] = { - data: attachmentDataString, - digest: attachmentMeta.digest, - length: attachmentMeta.length, - type: attachmentMeta.type - }; - })); - })); - log('got attachments'); - } - - /** - * Insert the documents to the new storage - */ - var insertToNewWriteRows = docs.map(document => { - return { - document - }; - }); - var writeToNewResult = await collection.storageInstance.bulkWrite(insertToNewWriteRows, 'migrate-storage'); - log('written batch to new storage'); - - // TODO we should throw on non-conflict errors here. - // if (Object.keys(writeToNewResult.error).length > 0) { - // throw new Error('could not write to new storage'); - // } - - /** - * Remove the docs from the old storage - */ - var writeToOldRows = docs.map((_doc, idx) => { - var previous = docsNonMutated[idx]; - if (!previous._meta) { - previous._meta = { - lwt: new Date().getTime() - }; - } - var newDoc = (0, _index.clone)(previous); - newDoc._deleted = true; - if (!newDoc._meta) { - newDoc._meta = { - lwt: new Date().getTime() - }; - } - newDoc._meta.lwt = new Date().getTime() + 1; - newDoc._rev = (0, _index.createRevision)(oldDatabaseInstanceToken, previous); - return { - previous, - document: newDoc - }; - }); - try { - var writeToOldResult = await oldStorageInstance.bulkWrite(writeToOldRows, 'migrate-between-rxdb-versions'); - if (Object.keys(writeToOldResult.error).length > 0) { - console.dir({ - writeToOldRows, - errors: writeToOldResult.error - }); - throw new Error('got error while deleting migrated documents on the old storage'); - } - } catch (err) { - log('could not delete on old instance'); - console.dir(err); - throw err; - } - log('deleted batch on old storage'); - await oldStorageInstance.cleanup(0).catch(() => { - /** - * Migration from RxDB v14 to v15 had problem running the cleanup() - * on the old storage because the indexing structure changed. - * Because the periodic cleanup during migration - * is an optional step, we just log instead of throwing an error. - * @link https://github.com/pubkey/rxdb/issues/5565 - * - * TODO remove this in the next major version - */ - log('oldStorageInstance.cleanup(0) has thrown'); - }); - - // run the handler if provided - if (afterMigrateBatch) { - await afterMigrateBatch({ - databaseName: collection.database.name, - collectionName: collection.name, - oldDatabaseName, - insertToNewWriteRows, - writeToNewResult - }); - } - }, - _ret; - while (true) { - _ret = await _loop(); - if (_ret) return _ret.v; - } -} -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/migration-storage/index.js.map b/dist/cjs/plugins/migration-storage/index.js.map deleted file mode 100644 index 94bb49c7c0c..00000000000 --- a/dist/cjs/plugins/migration-storage/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["_index","require","migrateStorage","params","collections","Object","values","database","batchSize","parallel","Promise","all","map","collection","migrateCollection","oldDatabaseName","oldStorage","afterMigrateBatch","logFunction","log","message","name","schema","jsonSchema","primaryPath","oldDatabaseInstanceToken","randomCouchString","rxdbVersion","indexes","clone","ensureNotFalsy","index","toArray","filter","field","includes","arrayFilterNotEmpty","oldStorageInstance","createStorageInstance","databaseName","collectionName","multiInstance","options","databaseInstanceToken","devMode","plainQuery","selector","_deleted","$eq","limit","sort","skip","preparedQuery","statics","prepareQuery","_loop","queryResult","query","docs","documents","length","remove","v","docsNonMutated","attachments","doc","docId","entries","_attachments","attachmentId","attachmentMeta","attachmentData","getAttachmentData","digest","attachmentDataString","blobToBase64String","data","type","insertToNewWriteRows","document","writeToNewResult","storageInstance","bulkWrite","writeToOldRows","_doc","idx","previous","_meta","lwt","Date","getTime","newDoc","_rev","createRevision","writeToOldResult","keys","error","console","dir","errors","Error","err","cleanup","catch","_ret"],"sources":["../../../../src/plugins/migration-storage/index.ts"],"sourcesContent":["import {\n RxDatabase,\n RxCollection,\n createRevision,\n clone,\n BulkWriteRow,\n RxStorageBulkWriteResponse,\n randomCouchString,\n RxStorage,\n blobToBase64String,\n prepareQuery,\n PreparedQuery,\n FilledMangoQuery,\n ensureNotFalsy,\n toArray,\n arrayFilterNotEmpty\n} from '../../index.ts';\n\nexport type RxStorageOld = RxStorage | any;\n\nexport type AfterMigrateBatchHandlerInput = {\n databaseName: string;\n collectionName: string;\n oldDatabaseName: string;\n insertToNewWriteRows: BulkWriteRow[];\n writeToNewResult: RxStorageBulkWriteResponse;\n};\nexport type AfterMigrateBatchHandler = (\n input: AfterMigrateBatchHandlerInput\n) => any | Promise;\n\n\nexport type MigrateStorageParams = {\n database: RxDatabase;\n /**\n * Using the migration plugin requires you\n * to rename your new old database.\n * The original name of the v11 database must be provided here.\n */\n oldDatabaseName: string;\n oldStorage: RxStorageOld;\n batchSize?: number;\n parallel?: boolean;\n afterMigrateBatch?: AfterMigrateBatchHandler;\n // to log each step, pass console.log.bind(console) here.\n logFunction?: (message: string) => void;\n}\n\n/**\n * Migrates collections of RxDB version A and puts them\n * into a RxDatabase that is created with version B.\n * This function only works from the previous major version upwards.\n * Do not use it to migrate like rxdb v9 to v14. \n */\nexport async function migrateStorage(\n params: MigrateStorageParams\n): Promise {\n const collections = Object.values(params.database.collections);\n const batchSize = params.batchSize ? params.batchSize : 10;\n if (params.parallel) {\n await Promise.all(\n collections.map(collection => migrateCollection(\n collection,\n params.oldDatabaseName,\n params.oldStorage,\n batchSize,\n params.afterMigrateBatch,\n params.logFunction\n ))\n );\n } else {\n for (const collection of collections) {\n await migrateCollection(\n collection,\n params.oldDatabaseName,\n params.oldStorage,\n batchSize,\n params.afterMigrateBatch,\n params.logFunction\n );\n }\n }\n}\n\nexport async function migrateCollection(\n collection: RxCollection,\n oldDatabaseName: string,\n oldStorage: RxStorageOld,\n batchSize: number,\n afterMigrateBatch?: AfterMigrateBatchHandler,\n // to log each step, pass console.log.bind(console) here.\n logFunction?: (message: string) => void\n) {\n function log(message: string) {\n if (logFunction) {\n logFunction('migrateCollection(' + collection.name + ')' + message);\n }\n }\n log('start migrateCollection()');\n let schema = collection.schema.jsonSchema;\n const primaryPath = collection.schema.primaryPath;\n const oldDatabaseInstanceToken = randomCouchString(10);\n\n\n /**\n * In RxDB v15 we changed how the indexes are created.\n * Before (v14), the storage prepended the _deleted field\n * to all indexes.\n * In v15, RxDB will prepend the _deleted field BEFORE sending\n * it to the storage. Therefore we have to strip these fields\n * when crating v14 storage instances.\n */\n if (!oldStorage.rxdbVersion && schema.indexes) {\n schema = clone(schema);\n schema.indexes = ensureNotFalsy(schema.indexes).map(index => {\n index = toArray(index).filter(field => field !== '_deleted');\n if (index.includes('_meta.lwt')) {\n return null;\n }\n return index;\n }).filter(arrayFilterNotEmpty);\n\n }\n\n const oldStorageInstance = await oldStorage.createStorageInstance({\n databaseName: oldDatabaseName,\n collectionName: collection.name,\n multiInstance: false,\n options: {},\n schema: schema,\n databaseInstanceToken: oldDatabaseInstanceToken,\n devMode: false\n });\n\n\n const plainQuery: FilledMangoQuery = {\n selector: {\n _deleted: {\n $eq: false\n }\n } as any,\n limit: batchSize,\n sort: [{ [primaryPath]: 'asc' } as any],\n skip: 0\n };\n\n /**\n * In RxDB v15 we removed statics.prepareQuery()\n * But to be downwards compatible, still use that\n * when migrating from an old storage.\n * TODO remove this in the next major version. v16.\n */\n let preparedQuery: PreparedQuery;\n if (oldStorage.statics && oldStorage.statics.prepareQuery) {\n preparedQuery = oldStorage.statics.prepareQuery(\n schema,\n plainQuery\n );\n } else {\n preparedQuery = prepareQuery(\n schema,\n plainQuery\n );\n }\n\n while (true) {\n log('loop once');\n /**\n * Get a batch of documents\n */\n const queryResult = await oldStorageInstance.query(preparedQuery);\n const docs = queryResult.documents;\n if (docs.length === 0) {\n /**\n * No more documents to migrate\n */\n log('migration of collection done');\n await oldStorageInstance.remove();\n return;\n }\n\n const docsNonMutated = clone(docs);\n\n /**\n * Get attachments\n * if defined in the schema.\n */\n if (schema.attachments) {\n await Promise.all(\n docs.map(async (doc: any) => {\n const docId: string = (doc as any)[primaryPath];\n await Promise.all(\n Object.entries(doc._attachments).map(async ([attachmentId, attachmentMeta]) => {\n const attachmentData = await oldStorageInstance.getAttachmentData(\n docId,\n attachmentId,\n (attachmentMeta as any).digest\n );\n const attachmentDataString = await blobToBase64String(attachmentData);\n (doc as any)._attachments[attachmentId] = {\n data: attachmentDataString,\n digest: (attachmentMeta as any).digest,\n length: (attachmentMeta as any).length,\n type: (attachmentMeta as any).type\n }\n })\n );\n })\n );\n log('got attachments');\n }\n\n /**\n * Insert the documents to the new storage\n */\n const insertToNewWriteRows: BulkWriteRow[] = docs.map((document: any) => {\n return { document };\n });\n const writeToNewResult: RxStorageBulkWriteResponse = await collection.storageInstance.bulkWrite(\n insertToNewWriteRows,\n 'migrate-storage'\n );\n log('written batch to new storage');\n\n // TODO we should throw on non-conflict errors here.\n // if (Object.keys(writeToNewResult.error).length > 0) {\n // throw new Error('could not write to new storage');\n // }\n\n /**\n * Remove the docs from the old storage\n */\n const writeToOldRows = docs.map((_doc: any, idx: number) => {\n const previous = docsNonMutated[idx];\n if (!previous._meta) {\n previous._meta = {\n lwt: new Date().getTime()\n };\n }\n\n const newDoc: typeof previous = clone(previous);\n newDoc._deleted = true;\n if (!newDoc._meta) {\n newDoc._meta = {\n lwt: new Date().getTime()\n };\n }\n newDoc._meta.lwt = new Date().getTime() + 1;\n newDoc._rev = createRevision(\n oldDatabaseInstanceToken,\n previous\n );\n\n return {\n previous,\n document: newDoc,\n }\n });\n try {\n const writeToOldResult = await oldStorageInstance.bulkWrite(\n writeToOldRows,\n 'migrate-between-rxdb-versions'\n );\n if (Object.keys(writeToOldResult.error).length > 0) {\n console.dir({\n writeToOldRows,\n errors: writeToOldResult.error\n });\n throw new Error('got error while deleting migrated documents on the old storage');\n }\n } catch (err) {\n log('could not delete on old instance');\n console.dir(err);\n throw err;\n }\n log('deleted batch on old storage');\n await oldStorageInstance.cleanup(0)\n .catch(() => {\n /**\n * Migration from RxDB v14 to v15 had problem running the cleanup()\n * on the old storage because the indexing structure changed.\n * Because the periodic cleanup during migration\n * is an optional step, we just log instead of throwing an error.\n * @link https://github.com/pubkey/rxdb/issues/5565\n * \n * TODO remove this in the next major version\n */\n log('oldStorageInstance.cleanup(0) has thrown');\n });\n\n // run the handler if provided\n if (afterMigrateBatch) {\n await afterMigrateBatch({\n databaseName: collection.database.name,\n collectionName: collection.name,\n oldDatabaseName,\n insertToNewWriteRows,\n writeToNewResult\n });\n }\n }\n}\n\n\n\n\n\n"],"mappings":";;;;;;;AAAA,IAAAA,MAAA,GAAAC,OAAA;AAgDA;AACA;AACA;AACA;AACA;AACA;AACO,eAAeC,cAAcA,CAChCC,MAA4B,EACf;EACb,IAAMC,WAAW,GAAGC,MAAM,CAACC,MAAM,CAACH,MAAM,CAACI,QAAQ,CAACH,WAAW,CAAC;EAC9D,IAAMI,SAAS,GAAGL,MAAM,CAACK,SAAS,GAAGL,MAAM,CAACK,SAAS,GAAG,EAAE;EAC1D,IAAIL,MAAM,CAACM,QAAQ,EAAE;IACjB,MAAMC,OAAO,CAACC,GAAG,CACbP,WAAW,CAACQ,GAAG,CAACC,UAAU,IAAIC,iBAAiB,CAC3CD,UAAU,EACVV,MAAM,CAACY,eAAe,EACtBZ,MAAM,CAACa,UAAU,EACjBR,SAAS,EACTL,MAAM,CAACc,iBAAiB,EACxBd,MAAM,CAACe,WACX,CAAC,CACL,CAAC;EACL,CAAC,MAAM;IACH,KAAK,IAAML,UAAU,IAAIT,WAAW,EAAE;MAClC,MAAMU,iBAAiB,CACnBD,UAAU,EACVV,MAAM,CAACY,eAAe,EACtBZ,MAAM,CAACa,UAAU,EACjBR,SAAS,EACTL,MAAM,CAACc,iBAAiB,EACxBd,MAAM,CAACe,WACX,CAAC;IACL;EACJ;AACJ;AAEO,eAAeJ,iBAAiBA,CACnCD,UAAmC,EACnCE,eAAuB,EACvBC,UAAkC,EAClCR,SAAiB,EACjBS,iBAA4C;AAC5C;AACAC,WAAuC,EACzC;EACE,SAASC,GAAGA,CAACC,OAAe,EAAE;IAC1B,IAAIF,WAAW,EAAE;MACbA,WAAW,CAAC,oBAAoB,GAAGL,UAAU,CAACQ,IAAI,GAAG,GAAG,GAAGD,OAAO,CAAC;IACvE;EACJ;EACAD,GAAG,CAAC,2BAA2B,CAAC;EAChC,IAAIG,MAAM,GAAGT,UAAU,CAACS,MAAM,CAACC,UAAU;EACzC,IAAMC,WAAW,GAAGX,UAAU,CAACS,MAAM,CAACE,WAAW;EACjD,IAAMC,wBAAwB,GAAG,IAAAC,wBAAiB,EAAC,EAAE,CAAC;;EAGtD;AACJ;AACA;AACA;AACA;AACA;AACA;AACA;EACI,IAAI,CAACV,UAAU,CAACW,WAAW,IAAIL,MAAM,CAACM,OAAO,EAAE;IAC3CN,MAAM,GAAG,IAAAO,YAAK,EAACP,MAAM,CAAC;IACtBA,MAAM,CAACM,OAAO,GAAG,IAAAE,qBAAc,EAACR,MAAM,CAACM,OAAO,CAAC,CAAChB,GAAG,CAACmB,KAAK,IAAI;MACzDA,KAAK,GAAG,IAAAC,cAAO,EAACD,KAAK,CAAC,CAACE,MAAM,CAACC,KAAK,IAAIA,KAAK,KAAK,UAAU,CAAC;MAC5D,IAAIH,KAAK,CAACI,QAAQ,CAAC,WAAW,CAAC,EAAE;QAC7B,OAAO,IAAI;MACf;MACA,OAAOJ,KAAK;IAChB,CAAC,CAAC,CAACE,MAAM,CAACG,0BAAmB,CAAC;EAElC;EAEA,IAAMC,kBAAkB,GAAG,MAAMrB,UAAU,CAACsB,qBAAqB,CAAC;IAC9DC,YAAY,EAAExB,eAAe;IAC7ByB,cAAc,EAAE3B,UAAU,CAACQ,IAAI;IAC/BoB,aAAa,EAAE,KAAK;IACpBC,OAAO,EAAE,CAAC,CAAC;IACXpB,MAAM,EAAEA,MAAM;IACdqB,qBAAqB,EAAElB,wBAAwB;IAC/CmB,OAAO,EAAE;EACb,CAAC,CAAC;EAGF,IAAMC,UAAuC,GAAG;IAC5CC,QAAQ,EAAE;MACNC,QAAQ,EAAE;QACNC,GAAG,EAAE;MACT;IACJ,CAAQ;IACRC,KAAK,EAAEzC,SAAS;IAChB0C,IAAI,EAAE,CAAC;MAAE,CAAC1B,WAAW,GAAG;IAAM,CAAC,CAAQ;IACvC2B,IAAI,EAAE;EACV,CAAC;;EAED;AACJ;AACA;AACA;AACA;AACA;EACI,IAAIC,aAAuC;EAC3C,IAAIpC,UAAU,CAACqC,OAAO,IAAIrC,UAAU,CAACqC,OAAO,CAACC,YAAY,EAAE;IACvDF,aAAa,GAAGpC,UAAU,CAACqC,OAAO,CAACC,YAAY,CAC3ChC,MAAM,EACNuB,UACJ,CAAC;EACL,CAAC,MAAM;IACHO,aAAa,GAAG,IAAAE,mBAAY,EACxBhC,MAAM,EACNuB,UACJ,CAAC;EACL;EAAC,IAAAU,KAAA,kBAAAA,CAAA,EAEY;MACTpC,GAAG,CAAC,WAAW,CAAC;MAChB;AACR;AACA;MACQ,IAAMqC,WAAW,GAAG,MAAMnB,kBAAkB,CAACoB,KAAK,CAACL,aAAa,CAAC;MACjE,IAAMM,IAAI,GAAGF,WAAW,CAACG,SAAS;MAClC,IAAID,IAAI,CAACE,MAAM,KAAK,CAAC,EAAE;QACnB;AACZ;AACA;QACYzC,GAAG,CAAC,8BAA8B,CAAC;QACnC,MAAMkB,kBAAkB,CAACwB,MAAM,CAAC,CAAC;QAAC;UAAAC,CAAA;QAAA;MAEtC;MAEA,IAAMC,cAAc,GAAG,IAAAlC,YAAK,EAAC6B,IAAI,CAAC;;MAElC;AACR;AACA;AACA;MACQ,IAAIpC,MAAM,CAAC0C,WAAW,EAAE;QACpB,MAAMtD,OAAO,CAACC,GAAG,CACb+C,IAAI,CAAC9C,GAAG,CAAC,MAAOqD,GAAQ,IAAK;UACzB,IAAMC,KAAa,GAAID,GAAG,CAASzC,WAAW,CAAC;UAC/C,MAAMd,OAAO,CAACC,GAAG,CACbN,MAAM,CAAC8D,OAAO,CAACF,GAAG,CAACG,YAAY,CAAC,CAACxD,GAAG,CAAC,OAAO,CAACyD,YAAY,EAAEC,cAAc,CAAC,KAAK;YAC3E,IAAMC,cAAc,GAAG,MAAMlC,kBAAkB,CAACmC,iBAAiB,CAC7DN,KAAK,EACLG,YAAY,EACXC,cAAc,CAASG,MAC5B,CAAC;YACD,IAAMC,oBAAoB,GAAG,MAAM,IAAAC,yBAAkB,EAACJ,cAAc,CAAC;YACpEN,GAAG,CAASG,YAAY,CAACC,YAAY,CAAC,GAAG;cACtCO,IAAI,EAAEF,oBAAoB;cAC1BD,MAAM,EAAGH,cAAc,CAASG,MAAM;cACtCb,MAAM,EAAGU,cAAc,CAASV,MAAM;cACtCiB,IAAI,EAAGP,cAAc,CAASO;YAClC,CAAC;UACL,CAAC,CACL,CAAC;QACL,CAAC,CACL,CAAC;QACD1D,GAAG,CAAC,iBAAiB,CAAC;MAC1B;;MAEA;AACR;AACA;MACQ,IAAM2D,oBAAyC,GAAGpB,IAAI,CAAC9C,GAAG,CAAEmE,QAAa,IAAK;QAC1E,OAAO;UAAEA;QAAS,CAAC;MACvB,CAAC,CAAC;MACF,IAAMC,gBAAiD,GAAG,MAAMnE,UAAU,CAACoE,eAAe,CAACC,SAAS,CAChGJ,oBAAoB,EACpB,iBACJ,CAAC;MACD3D,GAAG,CAAC,8BAA8B,CAAC;;MAEnC;MACA;MACA;MACA;;MAEA;AACR;AACA;MACQ,IAAMgE,cAAc,GAAGzB,IAAI,CAAC9C,GAAG,CAAC,CAACwE,IAAS,EAAEC,GAAW,KAAK;QACxD,IAAMC,QAAQ,GAAGvB,cAAc,CAACsB,GAAG,CAAC;QACpC,IAAI,CAACC,QAAQ,CAACC,KAAK,EAAE;UACjBD,QAAQ,CAACC,KAAK,GAAG;YACbC,GAAG,EAAE,IAAIC,IAAI,CAAC,CAAC,CAACC,OAAO,CAAC;UAC5B,CAAC;QACL;QAEA,IAAMC,MAAuB,GAAG,IAAA9D,YAAK,EAACyD,QAAQ,CAAC;QAC/CK,MAAM,CAAC5C,QAAQ,GAAG,IAAI;QACtB,IAAI,CAAC4C,MAAM,CAACJ,KAAK,EAAE;UACfI,MAAM,CAACJ,KAAK,GAAG;YACXC,GAAG,EAAE,IAAIC,IAAI,CAAC,CAAC,CAACC,OAAO,CAAC;UAC5B,CAAC;QACL;QACAC,MAAM,CAACJ,KAAK,CAACC,GAAG,GAAG,IAAIC,IAAI,CAAC,CAAC,CAACC,OAAO,CAAC,CAAC,GAAG,CAAC;QAC3CC,MAAM,CAACC,IAAI,GAAG,IAAAC,qBAAc,EACxBpE,wBAAwB,EACxB6D,QACJ,CAAC;QAED,OAAO;UACHA,QAAQ;UACRP,QAAQ,EAAEY;QACd,CAAC;MACL,CAAC,CAAC;MACF,IAAI;QACA,IAAMG,gBAAgB,GAAG,MAAMzD,kBAAkB,CAAC6C,SAAS,CACvDC,cAAc,EACd,+BACJ,CAAC;QACD,IAAI9E,MAAM,CAAC0F,IAAI,CAACD,gBAAgB,CAACE,KAAK,CAAC,CAACpC,MAAM,GAAG,CAAC,EAAE;UAChDqC,OAAO,CAACC,GAAG,CAAC;YACRf,cAAc;YACdgB,MAAM,EAAEL,gBAAgB,CAACE;UAC7B,CAAC,CAAC;UACF,MAAM,IAAII,KAAK,CAAC,gEAAgE,CAAC;QACrF;MACJ,CAAC,CAAC,OAAOC,GAAG,EAAE;QACVlF,GAAG,CAAC,kCAAkC,CAAC;QACvC8E,OAAO,CAACC,GAAG,CAACG,GAAG,CAAC;QAChB,MAAMA,GAAG;MACb;MACAlF,GAAG,CAAC,8BAA8B,CAAC;MACnC,MAAMkB,kBAAkB,CAACiE,OAAO,CAAC,CAAC,CAAC,CAC9BC,KAAK,CAAC,MAAM;QACT;AAChB;AACA;AACA;AACA;AACA;AACA;AACA;AACA;QACgBpF,GAAG,CAAC,0CAA0C,CAAC;MACnD,CAAC,CAAC;;MAEN;MACA,IAAIF,iBAAiB,EAAE;QACnB,MAAMA,iBAAiB,CAAC;UACpBsB,YAAY,EAAE1B,UAAU,CAACN,QAAQ,CAACc,IAAI;UACtCmB,cAAc,EAAE3B,UAAU,CAACQ,IAAI;UAC/BN,eAAe;UACf+D,oBAAoB;UACpBE;QACJ,CAAC,CAAC;MACN;IACJ,CAAC;IAAAwB,IAAA;EAvID,OAAO,IAAI;IAAAA,IAAA,SAAAjD,KAAA;IAAA,IAAAiD,IAAA,SAAAA,IAAA,CAAA1C,CAAA;EAAA;AAwIf","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/query-builder/index.js b/dist/cjs/plugins/query-builder/index.js deleted file mode 100644 index 573a24d52a0..00000000000 --- a/dist/cjs/plugins/query-builder/index.js +++ /dev/null @@ -1,62 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -var _exportNames = { - runBuildingStep: true, - applyBuildingStep: true, - RxDBQueryBuilderPlugin: true -}; -exports.RxDBQueryBuilderPlugin = void 0; -exports.applyBuildingStep = applyBuildingStep; -exports.runBuildingStep = runBuildingStep; -var _nosqlQueryBuilder = require("./mquery/nosql-query-builder.js"); -Object.keys(_nosqlQueryBuilder).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; - if (key in exports && exports[key] === _nosqlQueryBuilder[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _nosqlQueryBuilder[key]; - } - }); -}); -var _rxQuery = require("../../rx-query.js"); -var _index = require("../../plugins/utils/index.js"); -// if the query-builder plugin is used, we have to save its last path -var RXQUERY_OTHER_FLAG = 'queryBuilderPath'; -function runBuildingStep(rxQuery, functionName, value) { - var queryBuilder = (0, _nosqlQueryBuilder.createQueryBuilder)((0, _index.clone)(rxQuery.mangoQuery), rxQuery.other[RXQUERY_OTHER_FLAG]); - queryBuilder[functionName](value); // run - - var queryBuilderJson = queryBuilder.toJSON(); - return (0, _rxQuery.createRxQuery)(rxQuery.op, queryBuilderJson.query, rxQuery.collection, { - ...rxQuery.other, - [RXQUERY_OTHER_FLAG]: queryBuilderJson.path - }); -} -function applyBuildingStep(proto, functionName) { - proto[functionName] = function (value) { - return runBuildingStep(this, functionName, value); - }; -} -var RxDBQueryBuilderPlugin = exports.RxDBQueryBuilderPlugin = { - name: 'query-builder', - rxdb: true, - prototypes: { - RxQuery(proto) { - ['where', 'equals', 'eq', 'or', 'nor', 'and', 'mod', 'exists', 'elemMatch', 'sort'].forEach(attribute => { - applyBuildingStep(proto, attribute); - }); - _nosqlQueryBuilder.OTHER_MANGO_ATTRIBUTES.forEach(attribute => { - applyBuildingStep(proto, attribute); - }); - _nosqlQueryBuilder.OTHER_MANGO_OPERATORS.forEach(operator => { - applyBuildingStep(proto, operator); - }); - } - } -}; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/query-builder/index.js.map b/dist/cjs/plugins/query-builder/index.js.map deleted file mode 100644 index 4d74551a994..00000000000 --- a/dist/cjs/plugins/query-builder/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["_nosqlQueryBuilder","require","Object","keys","forEach","key","prototype","hasOwnProperty","call","_exportNames","exports","defineProperty","enumerable","get","_rxQuery","_index","RXQUERY_OTHER_FLAG","runBuildingStep","rxQuery","functionName","value","queryBuilder","createQueryBuilder","clone","mangoQuery","other","queryBuilderJson","toJSON","createRxQuery","op","query","collection","path","applyBuildingStep","proto","RxDBQueryBuilderPlugin","name","rxdb","prototypes","RxQuery","attribute","OTHER_MANGO_ATTRIBUTES","OTHER_MANGO_OPERATORS","operator"],"sources":["../../../../src/plugins/query-builder/index.ts"],"sourcesContent":["import {\n createQueryBuilder,\n OTHER_MANGO_ATTRIBUTES,\n OTHER_MANGO_OPERATORS\n} from './mquery/nosql-query-builder.ts';\nimport type { RxPlugin, RxQuery } from '../../types/index.d.ts';\nimport { createRxQuery } from '../../rx-query.ts';\nimport { clone } from '../../plugins/utils/index.ts';\n\n// if the query-builder plugin is used, we have to save its last path\nconst RXQUERY_OTHER_FLAG = 'queryBuilderPath';\n\nexport function runBuildingStep(\n rxQuery: RxQuery,\n functionName: string,\n value: any\n): RxQuery {\n const queryBuilder = createQueryBuilder(clone(rxQuery.mangoQuery), rxQuery.other[RXQUERY_OTHER_FLAG]);\n\n (queryBuilder as any)[functionName](value); // run\n\n const queryBuilderJson = queryBuilder.toJSON();\n\n return createRxQuery(\n rxQuery.op,\n queryBuilderJson.query,\n rxQuery.collection,\n {\n ...rxQuery.other,\n [RXQUERY_OTHER_FLAG]: queryBuilderJson.path\n }\n ) as RxQuery;\n}\n\nexport function applyBuildingStep(\n proto: any,\n functionName: string\n): void {\n proto[functionName] = function (this: RxQuery, value: any) {\n return runBuildingStep(this, functionName, value);\n };\n}\n\nexport * from './mquery/nosql-query-builder.ts';\n\nexport const RxDBQueryBuilderPlugin: RxPlugin = {\n name: 'query-builder',\n rxdb: true,\n prototypes: {\n RxQuery(proto: any) {\n [\n 'where',\n 'equals',\n 'eq',\n 'or',\n 'nor',\n 'and',\n 'mod',\n 'exists',\n 'elemMatch',\n 'sort'\n ].forEach(attribute => {\n applyBuildingStep(proto, attribute);\n });\n OTHER_MANGO_ATTRIBUTES.forEach(attribute => {\n applyBuildingStep(proto, attribute);\n });\n OTHER_MANGO_OPERATORS.forEach(operator => {\n applyBuildingStep(proto, operator);\n });\n }\n }\n};\n"],"mappings":";;;;;;;;;;;;;AAAA,IAAAA,kBAAA,GAAAC,OAAA;AA2CAC,MAAA,CAAAC,IAAA,CAAAH,kBAAA,EAAAI,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAL,kBAAA,CAAAK,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAb,kBAAA,CAAAK,GAAA;IAAA;EAAA;AAAA;AArCA,IAAAS,QAAA,GAAAb,OAAA;AACA,IAAAc,MAAA,GAAAd,OAAA;AAEA;AACA,IAAMe,kBAAkB,GAAG,kBAAkB;AAEtC,SAASC,eAAeA,CAC3BC,OAA+C,EAC/CC,YAAoB,EACpBC,KAAU,EAC4B;EACtC,IAAMC,YAAY,GAAG,IAAAC,qCAAkB,EAAC,IAAAC,YAAK,EAACL,OAAO,CAACM,UAAU,CAAC,EAAEN,OAAO,CAACO,KAAK,CAACT,kBAAkB,CAAC,CAAC;EAEpGK,YAAY,CAASF,YAAY,CAAC,CAACC,KAAK,CAAC,CAAC,CAAC;;EAE5C,IAAMM,gBAAgB,GAAGL,YAAY,CAACM,MAAM,CAAC,CAAC;EAE9C,OAAO,IAAAC,sBAAa,EAChBV,OAAO,CAACW,EAAE,EACVH,gBAAgB,CAACI,KAAK,EACtBZ,OAAO,CAACa,UAAU,EAClB;IACI,GAAGb,OAAO,CAACO,KAAK;IAChB,CAACT,kBAAkB,GAAGU,gBAAgB,CAACM;EAC3C,CACJ,CAAC;AACL;AAEO,SAASC,iBAAiBA,CAC7BC,KAAU,EACVf,YAAoB,EAChB;EACJe,KAAK,CAACf,YAAY,CAAC,GAAG,UAAyBC,KAAU,EAAE;IACvD,OAAOH,eAAe,CAAC,IAAI,EAAEE,YAAY,EAAEC,KAAK,CAAC;EACrD,CAAC;AACL;AAIO,IAAMe,sBAAgC,GAAAzB,OAAA,CAAAyB,sBAAA,GAAG;EAC5CC,IAAI,EAAE,eAAe;EACrBC,IAAI,EAAE,IAAI;EACVC,UAAU,EAAE;IACRC,OAAOA,CAACL,KAAU,EAAE;MAChB,CACI,OAAO,EACP,QAAQ,EACR,IAAI,EACJ,IAAI,EACJ,KAAK,EACL,KAAK,EACL,KAAK,EACL,QAAQ,EACR,WAAW,EACX,MAAM,CACT,CAAC9B,OAAO,CAACoC,SAAS,IAAI;QACnBP,iBAAiB,CAACC,KAAK,EAAEM,SAAS,CAAC;MACvC,CAAC,CAAC;MACFC,yCAAsB,CAACrC,OAAO,CAACoC,SAAS,IAAI;QACxCP,iBAAiB,CAACC,KAAK,EAAEM,SAAS,CAAC;MACvC,CAAC,CAAC;MACFE,wCAAqB,CAACtC,OAAO,CAACuC,QAAQ,IAAI;QACtCV,iBAAiB,CAACC,KAAK,EAAES,QAAQ,CAAC;MACtC,CAAC,CAAC;IACN;EACJ;AACJ,CAAC","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/query-builder/mquery/mquery-utils.js b/dist/cjs/plugins/query-builder/mquery/mquery-utils.js deleted file mode 100644 index ea44bec5ed1..00000000000 --- a/dist/cjs/plugins/query-builder/mquery/mquery-utils.js +++ /dev/null @@ -1,40 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.isObject = isObject; -exports.merge = merge; -/** - * this is copied from - * @link https://github.com/aheckmann/mquery/blob/master/lib/utils.js - */ - -/** - * @link https://github.com/aheckmann/mquery/commit/792e69fd0a7281a0300be5cade5a6d7c1d468ad4 - */ -var SPECIAL_PROPERTIES = ['__proto__', 'constructor', 'prototype']; - -/** - * Merges 'from' into 'to' without overwriting existing properties. - */ -function merge(to, from) { - Object.keys(from).forEach(key => { - if (SPECIAL_PROPERTIES.includes(key)) { - return; - } - if (typeof to[key] === 'undefined') { - to[key] = from[key]; - } else { - if (isObject(from[key])) merge(to[key], from[key]);else to[key] = from[key]; - } - }); -} - -/** - * Determines if `arg` is an object. - */ -function isObject(arg) { - return '[object Object]' === arg.toString(); -} -//# sourceMappingURL=mquery-utils.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/query-builder/mquery/mquery-utils.js.map b/dist/cjs/plugins/query-builder/mquery/mquery-utils.js.map deleted file mode 100644 index 7938f212749..00000000000 --- a/dist/cjs/plugins/query-builder/mquery/mquery-utils.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"mquery-utils.js","names":["SPECIAL_PROPERTIES","merge","to","from","Object","keys","forEach","key","includes","isObject","arg","toString"],"sources":["../../../../../src/plugins/query-builder/mquery/mquery-utils.ts"],"sourcesContent":["/**\n * this is copied from\n * @link https://github.com/aheckmann/mquery/blob/master/lib/utils.js\n */\n\n\n/**\n * @link https://github.com/aheckmann/mquery/commit/792e69fd0a7281a0300be5cade5a6d7c1d468ad4\n */\nconst SPECIAL_PROPERTIES = ['__proto__', 'constructor', 'prototype'];\n\n/**\n * Merges 'from' into 'to' without overwriting existing properties.\n */\nexport function merge(to: any, from: any): any {\n Object.keys(from)\n .forEach(key => {\n if (SPECIAL_PROPERTIES.includes(key)) {\n return;\n }\n if (typeof to[key] === 'undefined') {\n to[key] = from[key];\n } else {\n if (isObject(from[key]))\n merge(to[key], from[key]);\n else\n to[key] = from[key];\n }\n });\n}\n\n/**\n * Determines if `arg` is an object.\n */\nexport function isObject(arg: Object | any[] | String | Function | RegExp | any): boolean {\n return '[object Object]' === arg.toString();\n}\n"],"mappings":";;;;;;;AAAA;AACA;AACA;AACA;;AAGA;AACA;AACA;AACA,IAAMA,kBAAkB,GAAG,CAAC,WAAW,EAAE,aAAa,EAAE,WAAW,CAAC;;AAEpE;AACA;AACA;AACO,SAASC,KAAKA,CAACC,EAAO,EAAEC,IAAS,EAAO;EAC3CC,MAAM,CAACC,IAAI,CAACF,IAAI,CAAC,CACZG,OAAO,CAACC,GAAG,IAAI;IACZ,IAAIP,kBAAkB,CAACQ,QAAQ,CAACD,GAAG,CAAC,EAAE;MAClC;IACJ;IACA,IAAI,OAAOL,EAAE,CAACK,GAAG,CAAC,KAAK,WAAW,EAAE;MAChCL,EAAE,CAACK,GAAG,CAAC,GAAGJ,IAAI,CAACI,GAAG,CAAC;IACvB,CAAC,MAAM;MACH,IAAIE,QAAQ,CAACN,IAAI,CAACI,GAAG,CAAC,CAAC,EACnBN,KAAK,CAACC,EAAE,CAACK,GAAG,CAAC,EAAEJ,IAAI,CAACI,GAAG,CAAC,CAAC,CAAC,KAE1BL,EAAE,CAACK,GAAG,CAAC,GAAGJ,IAAI,CAACI,GAAG,CAAC;IAC3B;EACJ,CAAC,CAAC;AACV;;AAEA;AACA;AACA;AACO,SAASE,QAAQA,CAACC,GAAsD,EAAW;EACtF,OAAO,iBAAiB,KAAKA,GAAG,CAACC,QAAQ,CAAC,CAAC;AAC/C","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/query-builder/mquery/nosql-query-builder.js b/dist/cjs/plugins/query-builder/mquery/nosql-query-builder.js deleted file mode 100644 index dfa63b6c730..00000000000 --- a/dist/cjs/plugins/query-builder/mquery/nosql-query-builder.js +++ /dev/null @@ -1,486 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.OTHER_MANGO_OPERATORS = exports.OTHER_MANGO_ATTRIBUTES = exports.NoSqlQueryBuilderClass = void 0; -exports.canMerge = canMerge; -exports.createQueryBuilder = createQueryBuilder; -exports.mQuerySortToRxDBSort = mQuerySortToRxDBSort; -var _mqueryUtils = require("./mquery-utils.js"); -var _rxError = require("../../../rx-error.js"); -/** - * this is based on - * @link https://github.com/aheckmann/mquery/blob/master/lib/mquery.js - */ -var NoSqlQueryBuilderClass = exports.NoSqlQueryBuilderClass = /*#__PURE__*/function () { - /** - * MQuery constructor used for building queries. - * - * ####Example: - * var query = new MQuery({ name: 'mquery' }); - * query.where('age').gte(21).exec(callback); - * - */ - function NoSqlQueryBuilderClass(mangoQuery, _path) { - this.options = {}; - this._conditions = {}; - this._fields = {}; - this._path = _path; - if (mangoQuery) { - var queryBuilder = this; - if (mangoQuery.selector) { - queryBuilder.find(mangoQuery.selector); - } - if (mangoQuery.limit) { - queryBuilder.limit(mangoQuery.limit); - } - if (mangoQuery.skip) { - queryBuilder.skip(mangoQuery.skip); - } - if (mangoQuery.sort) { - mangoQuery.sort.forEach(s => queryBuilder.sort(s)); - } - } - } - - /** - * Specifies a `path` for use with chaining. - */ - var _proto = NoSqlQueryBuilderClass.prototype; - _proto.where = function where(_path, _val) { - if (!arguments.length) return this; - var type = typeof arguments[0]; - if ('string' === type) { - this._path = arguments[0]; - if (2 === arguments.length) { - this._conditions[this._path] = arguments[1]; - } - return this; - } - if ('object' === type && !Array.isArray(arguments[0])) { - return this.merge(arguments[0]); - } - throw (0, _rxError.newRxTypeError)('MQ1', { - path: arguments[0] - }); - } - - /** - * Specifies the complementary comparison value for paths specified with `where()` - * ####Example - * User.where('age').equals(49); - */; - _proto.equals = function equals(val) { - this._ensurePath('equals'); - var path = this._path; - this._conditions[path] = val; - return this; - } - - /** - * Specifies the complementary comparison value for paths specified with `where()` - * This is alias of `equals` - */; - _proto.eq = function eq(val) { - this._ensurePath('eq'); - var path = this._path; - this._conditions[path] = val; - return this; - } - - /** - * Specifies arguments for an `$or` condition. - * ####Example - * query.or([{ color: 'red' }, { status: 'emergency' }]) - */; - _proto.or = function or(array) { - var or = this._conditions.$or || (this._conditions.$or = []); - if (!Array.isArray(array)) array = [array]; - or.push.apply(or, array); - return this; - } - - /** - * Specifies arguments for a `$nor` condition. - * ####Example - * query.nor([{ color: 'green' }, { status: 'ok' }]) - */; - _proto.nor = function nor(array) { - var nor = this._conditions.$nor || (this._conditions.$nor = []); - if (!Array.isArray(array)) array = [array]; - nor.push.apply(nor, array); - return this; - } - - /** - * Specifies arguments for a `$and` condition. - * ####Example - * query.and([{ color: 'green' }, { status: 'ok' }]) - * @see $and http://docs.mongodb.org/manual/reference/operator/and/ - */; - _proto.and = function and(array) { - var and = this._conditions.$and || (this._conditions.$and = []); - if (!Array.isArray(array)) array = [array]; - and.push.apply(and, array); - return this; - } - - /** - * Specifies a `$mod` condition - */; - _proto.mod = function mod(_path, _val) { - var val; - var path; - if (1 === arguments.length) { - this._ensurePath('mod'); - val = arguments[0]; - path = this._path; - } else if (2 === arguments.length && !Array.isArray(arguments[1])) { - this._ensurePath('mod'); - val = arguments.slice(); - path = this._path; - } else if (3 === arguments.length) { - val = arguments.slice(1); - path = arguments[0]; - } else { - val = arguments[1]; - path = arguments[0]; - } - var conds = this._conditions[path] || (this._conditions[path] = {}); - conds.$mod = val; - return this; - } - - /** - * Specifies an `$exists` condition - * ####Example - * // { name: { $exists: true }} - * Thing.where('name').exists() - * Thing.where('name').exists(true) - * Thing.find().exists('name') - */; - _proto.exists = function exists(_path, _val) { - var path; - var val; - if (0 === arguments.length) { - this._ensurePath('exists'); - path = this._path; - val = true; - } else if (1 === arguments.length) { - if ('boolean' === typeof arguments[0]) { - this._ensurePath('exists'); - path = this._path; - val = arguments[0]; - } else { - path = arguments[0]; - val = true; - } - } else if (2 === arguments.length) { - path = arguments[0]; - val = arguments[1]; - } - var conds = this._conditions[path] || (this._conditions[path] = {}); - conds.$exists = val; - return this; - } - - /** - * Specifies an `$elemMatch` condition - * ####Example - * query.elemMatch('comment', { author: 'autobot', votes: {$gte: 5}}) - * query.where('comment').elemMatch({ author: 'autobot', votes: {$gte: 5}}) - * query.elemMatch('comment', function (elem) { - * elem.where('author').equals('autobot'); - * elem.where('votes').gte(5); - * }) - * query.where('comment').elemMatch(function (elem) { - * elem.where({ author: 'autobot' }); - * elem.where('votes').gte(5); - * }) - */; - _proto.elemMatch = function elemMatch(_path, _criteria) { - if (null === arguments[0]) throw (0, _rxError.newRxTypeError)('MQ2'); - var fn; - var path; - var criteria; - if ('function' === typeof arguments[0]) { - this._ensurePath('elemMatch'); - path = this._path; - fn = arguments[0]; - } else if ((0, _mqueryUtils.isObject)(arguments[0])) { - this._ensurePath('elemMatch'); - path = this._path; - criteria = arguments[0]; - } else if ('function' === typeof arguments[1]) { - path = arguments[0]; - fn = arguments[1]; - } else if (arguments[1] && (0, _mqueryUtils.isObject)(arguments[1])) { - path = arguments[0]; - criteria = arguments[1]; - } else throw (0, _rxError.newRxTypeError)('MQ2'); - if (fn) { - criteria = new NoSqlQueryBuilderClass(); - fn(criteria); - criteria = criteria._conditions; - } - var conds = this._conditions[path] || (this._conditions[path] = {}); - conds.$elemMatch = criteria; - return this; - } - - /** - * Sets the sort order - * If an object is passed, values allowed are 'asc', 'desc', 'ascending', 'descending', 1, and -1. - * If a string is passed, it must be a space delimited list of path names. - * The sort order of each path is ascending unless the path name is prefixed with `-` which will be treated as descending. - * ####Example - * query.sort({ field: 'asc', test: -1 }); - * query.sort('field -test'); - * query.sort([['field', 1], ['test', -1]]); - */; - _proto.sort = function sort(arg) { - if (!arg) return this; - var len; - var type = typeof arg; - // .sort([['field', 1], ['test', -1]]) - if (Array.isArray(arg)) { - len = arg.length; - for (var i = 0; i < arg.length; ++i) { - _pushArr(this.options, arg[i][0], arg[i][1]); - } - return this; - } - - // .sort('field -test') - if (1 === arguments.length && 'string' === type) { - arg = arg.split(/\s+/); - len = arg.length; - for (var _i = 0; _i < len; ++_i) { - var field = arg[_i]; - if (!field) continue; - var ascend = '-' === field[0] ? -1 : 1; - if (ascend === -1) field = field.substring(1); - push(this.options, field, ascend); - } - return this; - } - - // .sort({ field: 1, test: -1 }) - if ((0, _mqueryUtils.isObject)(arg)) { - var keys = Object.keys(arg); - keys.forEach(field => push(this.options, field, arg[field])); - return this; - } - throw (0, _rxError.newRxTypeError)('MQ3', { - args: arguments - }); - } - - /** - * Merges another MQuery or conditions object into this one. - * - * When a MQuery is passed, conditions, field selection and options are merged. - * - */; - _proto.merge = function merge(source) { - if (!source) { - return this; - } - if (!canMerge(source)) { - throw (0, _rxError.newRxTypeError)('MQ4', { - source - }); - } - if (source instanceof NoSqlQueryBuilderClass) { - // if source has a feature, apply it to ourselves - - if (source._conditions) (0, _mqueryUtils.merge)(this._conditions, source._conditions); - if (source._fields) { - if (!this._fields) this._fields = {}; - (0, _mqueryUtils.merge)(this._fields, source._fields); - } - if (source.options) { - if (!this.options) this.options = {}; - (0, _mqueryUtils.merge)(this.options, source.options); - } - if (source._distinct) this._distinct = source._distinct; - return this; - } - - // plain object - (0, _mqueryUtils.merge)(this._conditions, source); - return this; - } - - /** - * Finds documents. - * ####Example - * query.find() - * query.find({ name: 'Burning Lights' }) - */; - _proto.find = function find(criteria) { - if (canMerge(criteria)) { - this.merge(criteria); - } - return this; - } - - /** - * Make sure _path is set. - * - * @param {String} method - */; - _proto._ensurePath = function _ensurePath(method) { - if (!this._path) { - throw (0, _rxError.newRxError)('MQ5', { - method - }); - } - }; - _proto.toJSON = function toJSON() { - var query = { - selector: this._conditions - }; - if (this.options.skip) { - query.skip = this.options.skip; - } - if (this.options.limit) { - query.limit = this.options.limit; - } - if (this.options.sort) { - query.sort = mQuerySortToRxDBSort(this.options.sort); - } - return { - query, - path: this._path - }; - }; - return NoSqlQueryBuilderClass; -}(); -function mQuerySortToRxDBSort(sort) { - return Object.entries(sort).map(([k, v]) => { - var direction = v === 1 ? 'asc' : 'desc'; - var part = { - [k]: direction - }; - return part; - }); -} - -/** - * Because some prototype-methods are generated, - * we have to define the type of NoSqlQueryBuilder here - */ - -/** - * limit, skip, maxScan, batchSize, comment - * - * Sets these associated options. - * - * query.comment('feed query'); - */ -var OTHER_MANGO_ATTRIBUTES = exports.OTHER_MANGO_ATTRIBUTES = ['limit', 'skip', 'maxScan', 'batchSize', 'comment']; -OTHER_MANGO_ATTRIBUTES.forEach(function (method) { - NoSqlQueryBuilderClass.prototype[method] = function (v) { - this.options[method] = v; - return this; - }; -}); - -/** - * gt, gte, lt, lte, ne, in, nin, all, regex, size, maxDistance - * - * Thing.where('type').nin(array) - */ -var OTHER_MANGO_OPERATORS = exports.OTHER_MANGO_OPERATORS = ['gt', 'gte', 'lt', 'lte', 'ne', 'in', 'nin', 'all', 'regex', 'size']; -OTHER_MANGO_OPERATORS.forEach(function ($conditional) { - NoSqlQueryBuilderClass.prototype[$conditional] = function () { - var path; - var val; - if (1 === arguments.length) { - this._ensurePath($conditional); - val = arguments[0]; - path = this._path; - } else { - val = arguments[1]; - path = arguments[0]; - } - var conds = this._conditions[path] === null || typeof this._conditions[path] === 'object' ? this._conditions[path] : this._conditions[path] = {}; - if ($conditional === 'regex') { - if (val instanceof RegExp) { - throw (0, _rxError.newRxError)('QU16', { - field: path, - query: this._conditions - }); - } - if (typeof val === 'string') { - conds['$' + $conditional] = val; - } else { - conds['$' + $conditional] = val.$regex; - if (val.$options) { - conds.$options = val.$options; - } - } - } else { - conds['$' + $conditional] = val; - } - return this; - }; -}); -function push(opts, field, value) { - if (Array.isArray(opts.sort)) { - throw (0, _rxError.newRxTypeError)('MQ6', { - opts, - field, - value - }); - } - if (value && value.$meta) { - var sort = opts.sort || (opts.sort = {}); - sort[field] = { - $meta: value.$meta - }; - return; - } - var val = String(value || 1).toLowerCase(); - if (!/^(?:ascending|asc|descending|desc|1|-1)$/.test(val)) { - if (Array.isArray(value)) value = '[' + value + ']'; - throw (0, _rxError.newRxTypeError)('MQ7', { - field, - value - }); - } - // store `sort` in a sane format - var s = opts.sort || (opts.sort = {}); - var valueStr = value.toString().replace('asc', '1').replace('ascending', '1').replace('desc', '-1').replace('descending', '-1'); - s[field] = parseInt(valueStr, 10); -} -function _pushArr(opts, field, value) { - opts.sort = opts.sort || []; - if (!Array.isArray(opts.sort)) { - throw (0, _rxError.newRxTypeError)('MQ8', { - opts, - field, - value - }); - } - - /* const valueStr = value.toString() - .replace('asc', '1') - .replace('ascending', '1') - .replace('desc', '-1') - .replace('descending', '-1');*/ - opts.sort.push([field, value]); -} - -/** - * Determines if `conds` can be merged using `mquery().merge()` - */ -function canMerge(conds) { - return conds instanceof NoSqlQueryBuilderClass || (0, _mqueryUtils.isObject)(conds); -} -function createQueryBuilder(query, path) { - return new NoSqlQueryBuilderClass(query, path); -} -//# sourceMappingURL=nosql-query-builder.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/query-builder/mquery/nosql-query-builder.js.map b/dist/cjs/plugins/query-builder/mquery/nosql-query-builder.js.map deleted file mode 100644 index d9d5ec3ab49..00000000000 --- a/dist/cjs/plugins/query-builder/mquery/nosql-query-builder.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"nosql-query-builder.js","names":["_mqueryUtils","require","_rxError","NoSqlQueryBuilderClass","exports","mangoQuery","_path","options","_conditions","_fields","queryBuilder","selector","find","limit","skip","sort","forEach","s","_proto","prototype","where","_val","arguments","length","type","Array","isArray","merge","newRxTypeError","path","equals","val","_ensurePath","eq","or","array","$or","push","apply","nor","$nor","and","$and","mod","slice","conds","$mod","exists","$exists","elemMatch","_criteria","fn","criteria","isObject","$elemMatch","arg","len","i","_pushArr","split","field","ascend","substring","keys","Object","args","source","canMerge","_distinct","method","newRxError","toJSON","query","mQuerySortToRxDBSort","entries","map","k","v","direction","part","OTHER_MANGO_ATTRIBUTES","OTHER_MANGO_OPERATORS","$conditional","RegExp","$regex","$options","opts","value","$meta","String","toLowerCase","test","valueStr","toString","replace","parseInt","createQueryBuilder"],"sources":["../../../../../src/plugins/query-builder/mquery/nosql-query-builder.ts"],"sourcesContent":["/**\n * this is based on\n * @link https://github.com/aheckmann/mquery/blob/master/lib/mquery.js\n */\nimport {\n isObject,\n merge\n} from './mquery-utils.ts';\nimport {\n newRxTypeError,\n newRxError\n} from '../../../rx-error.ts';\nimport type {\n MangoQuery,\n MangoQuerySelector,\n MangoQuerySortPart,\n MangoQuerySortDirection\n} from '../../../types/index.d.ts';\n\n\ndeclare type MQueryOptions = {\n limit?: number;\n skip?: number;\n sort?: any;\n};\n\nexport class NoSqlQueryBuilderClass {\n\n public options: MQueryOptions = {};\n public _conditions: MangoQuerySelector = {};\n public _fields: any = {};\n private _distinct: any;\n\n /**\n * MQuery constructor used for building queries.\n *\n * ####Example:\n * var query = new MQuery({ name: 'mquery' });\n * query.where('age').gte(21).exec(callback);\n *\n */\n constructor(\n mangoQuery?: MangoQuery,\n public _path?: any\n ) {\n if (mangoQuery) {\n const queryBuilder: NoSqlQueryBuilder = this as any;\n\n if (mangoQuery.selector) {\n queryBuilder.find(mangoQuery.selector);\n }\n if (mangoQuery.limit) {\n queryBuilder.limit(mangoQuery.limit);\n }\n if (mangoQuery.skip) {\n queryBuilder.skip(mangoQuery.skip);\n }\n if (mangoQuery.sort) {\n mangoQuery.sort.forEach(s => queryBuilder.sort(s));\n }\n }\n }\n\n /**\n * Specifies a `path` for use with chaining.\n */\n where(_path: string, _val?: MangoQuerySelector): NoSqlQueryBuilder {\n if (!arguments.length) return this as any;\n const type = typeof arguments[0];\n if ('string' === type) {\n this._path = arguments[0];\n if (2 === arguments.length) {\n (this._conditions as any)[this._path] = arguments[1];\n }\n return this as any;\n }\n\n if ('object' === type && !Array.isArray(arguments[0])) {\n return this.merge(arguments[0]);\n }\n\n throw newRxTypeError('MQ1', {\n path: arguments[0]\n });\n }\n\n /**\n * Specifies the complementary comparison value for paths specified with `where()`\n * ####Example\n * User.where('age').equals(49);\n */\n equals(val: any): NoSqlQueryBuilder {\n this._ensurePath('equals');\n const path = this._path;\n (this._conditions as any)[path] = val;\n return this as any;\n }\n\n /**\n * Specifies the complementary comparison value for paths specified with `where()`\n * This is alias of `equals`\n */\n eq(val: any): NoSqlQueryBuilder {\n this._ensurePath('eq');\n const path = this._path;\n (this._conditions as any)[path] = val;\n return this as any;\n }\n\n /**\n * Specifies arguments for an `$or` condition.\n * ####Example\n * query.or([{ color: 'red' }, { status: 'emergency' }])\n */\n or(array: any[]): NoSqlQueryBuilder {\n const or = this._conditions.$or || (this._conditions.$or = []);\n if (!Array.isArray(array)) array = [array];\n or.push.apply(or, array);\n return this as any;\n }\n\n /**\n * Specifies arguments for a `$nor` condition.\n * ####Example\n * query.nor([{ color: 'green' }, { status: 'ok' }])\n */\n nor(array: any[]): NoSqlQueryBuilder {\n const nor = this._conditions.$nor || (this._conditions.$nor = []);\n if (!Array.isArray(array)) array = [array];\n nor.push.apply(nor, array);\n return this as any;\n }\n\n /**\n * Specifies arguments for a `$and` condition.\n * ####Example\n * query.and([{ color: 'green' }, { status: 'ok' }])\n * @see $and http://docs.mongodb.org/manual/reference/operator/and/\n */\n and(array: any[]): NoSqlQueryBuilder {\n const and = this._conditions.$and || (this._conditions.$and = []);\n if (!Array.isArray(array)) array = [array];\n and.push.apply(and, array);\n return this as any;\n }\n\n /**\n * Specifies a `$mod` condition\n */\n mod(_path: string, _val: number): NoSqlQueryBuilder {\n let val;\n let path;\n\n if (1 === arguments.length) {\n this._ensurePath('mod');\n val = arguments[0];\n path = this._path;\n } else if (2 === arguments.length && !Array.isArray(arguments[1])) {\n this._ensurePath('mod');\n val = (arguments as any).slice();\n path = this._path;\n } else if (3 === arguments.length) {\n val = (arguments as any).slice(1);\n path = arguments[0];\n } else {\n val = arguments[1];\n path = arguments[0];\n }\n\n const conds = (this._conditions as any)[path] || ((this._conditions as any)[path] = {});\n conds.$mod = val;\n return this as any;\n }\n\n /**\n * Specifies an `$exists` condition\n * ####Example\n * // { name: { $exists: true }}\n * Thing.where('name').exists()\n * Thing.where('name').exists(true)\n * Thing.find().exists('name')\n */\n exists(_path: string, _val: number): NoSqlQueryBuilder {\n let path;\n let val;\n if (0 === arguments.length) {\n this._ensurePath('exists');\n path = this._path;\n val = true;\n } else if (1 === arguments.length) {\n if ('boolean' === typeof arguments[0]) {\n this._ensurePath('exists');\n path = this._path;\n val = arguments[0];\n } else {\n path = arguments[0];\n val = true;\n }\n } else if (2 === arguments.length) {\n path = arguments[0];\n val = arguments[1];\n }\n\n const conds = (this._conditions as any)[path] || ((this._conditions as any)[path] = {});\n conds.$exists = val;\n return this as any;\n }\n\n /**\n * Specifies an `$elemMatch` condition\n * ####Example\n * query.elemMatch('comment', { author: 'autobot', votes: {$gte: 5}})\n * query.where('comment').elemMatch({ author: 'autobot', votes: {$gte: 5}})\n * query.elemMatch('comment', function (elem) {\n * elem.where('author').equals('autobot');\n * elem.where('votes').gte(5);\n * })\n * query.where('comment').elemMatch(function (elem) {\n * elem.where({ author: 'autobot' });\n * elem.where('votes').gte(5);\n * })\n */\n elemMatch(_path: string, _criteria: any): NoSqlQueryBuilder {\n if (null === arguments[0])\n throw newRxTypeError('MQ2');\n\n let fn;\n let path;\n let criteria;\n\n if ('function' === typeof arguments[0]) {\n this._ensurePath('elemMatch');\n path = this._path;\n fn = arguments[0];\n } else if (isObject(arguments[0])) {\n this._ensurePath('elemMatch');\n path = this._path;\n criteria = arguments[0];\n } else if ('function' === typeof arguments[1]) {\n path = arguments[0];\n fn = arguments[1];\n } else if (arguments[1] && isObject(arguments[1])) {\n path = arguments[0];\n criteria = arguments[1];\n } else\n throw newRxTypeError('MQ2');\n\n if (fn) {\n criteria = new NoSqlQueryBuilderClass;\n fn(criteria);\n criteria = criteria._conditions;\n }\n\n const conds = (this._conditions as any)[path] || ((this._conditions as any)[path] = {});\n conds.$elemMatch = criteria;\n return this as any;\n }\n\n /**\n * Sets the sort order\n * If an object is passed, values allowed are 'asc', 'desc', 'ascending', 'descending', 1, and -1.\n * If a string is passed, it must be a space delimited list of path names.\n * The sort order of each path is ascending unless the path name is prefixed with `-` which will be treated as descending.\n * ####Example\n * query.sort({ field: 'asc', test: -1 });\n * query.sort('field -test');\n * query.sort([['field', 1], ['test', -1]]);\n */\n sort(arg: any): NoSqlQueryBuilder {\n if (!arg) return this as any;\n let len;\n const type = typeof arg;\n // .sort([['field', 1], ['test', -1]])\n if (Array.isArray(arg)) {\n len = arg.length;\n for (let i = 0; i < arg.length; ++i) {\n _pushArr(this.options, arg[i][0], arg[i][1]);\n }\n\n return this as any;\n }\n\n // .sort('field -test')\n if (1 === arguments.length && 'string' === type) {\n arg = arg.split(/\\s+/);\n len = arg.length;\n for (let i = 0; i < len; ++i) {\n let field = arg[i];\n if (!field) continue;\n const ascend = '-' === field[0] ? -1 : 1;\n if (ascend === -1) field = field.substring(1);\n push(this.options, field, ascend);\n }\n\n return this as any;\n }\n\n // .sort({ field: 1, test: -1 })\n if (isObject(arg)) {\n const keys = Object.keys(arg);\n keys.forEach(field => push(this.options, field, arg[field]));\n return this as any;\n }\n\n throw newRxTypeError('MQ3', {\n args: arguments\n });\n }\n\n /**\n * Merges another MQuery or conditions object into this one.\n *\n * When a MQuery is passed, conditions, field selection and options are merged.\n *\n */\n merge(source: any): NoSqlQueryBuilder {\n if (!source) {\n return this as any;\n }\n\n if (!canMerge(source)) {\n throw newRxTypeError('MQ4', {\n source\n });\n }\n\n if (source instanceof NoSqlQueryBuilderClass) {\n // if source has a feature, apply it to ourselves\n\n if (source._conditions)\n merge(this._conditions, source._conditions);\n\n if (source._fields) {\n if (!this._fields) this._fields = {};\n merge(this._fields, source._fields);\n }\n\n if (source.options) {\n if (!this.options) this.options = {};\n merge(this.options, source.options);\n }\n\n if (source._distinct)\n this._distinct = source._distinct;\n\n return this as any;\n }\n\n // plain object\n merge(this._conditions, source);\n\n return this as any;\n }\n\n /**\n * Finds documents.\n * ####Example\n * query.find()\n * query.find({ name: 'Burning Lights' })\n */\n find(criteria: any): NoSqlQueryBuilder {\n if (canMerge(criteria)) {\n this.merge(criteria);\n }\n\n return this as any;\n }\n\n /**\n * Make sure _path is set.\n *\n * @param {String} method\n */\n _ensurePath(method: any) {\n if (!this._path) {\n throw newRxError('MQ5', {\n method\n });\n }\n }\n\n toJSON(): {\n query: MangoQuery;\n path?: string;\n } {\n const query: MangoQuery = {\n selector: this._conditions,\n };\n\n if (this.options.skip) {\n query.skip = this.options.skip;\n }\n if (this.options.limit) {\n query.limit = this.options.limit;\n }\n if (this.options.sort) {\n query.sort = mQuerySortToRxDBSort(this.options.sort);\n }\n\n return {\n query,\n path: this._path\n };\n }\n}\n\nexport function mQuerySortToRxDBSort(\n sort: { [k: string]: 1 | -1; }\n): MangoQuerySortPart[] {\n return Object.entries(sort).map(([k, v]) => {\n const direction: MangoQuerySortDirection = v === 1 ? 'asc' : 'desc';\n const part: MangoQuerySortPart = { [k]: direction } as any;\n return part;\n });\n}\n\n/**\n * Because some prototype-methods are generated,\n * we have to define the type of NoSqlQueryBuilder here\n */\n\nexport interface NoSqlQueryBuilder extends NoSqlQueryBuilderClass {\n maxScan: ReturnSelfNumberFunction;\n batchSize: ReturnSelfNumberFunction;\n limit: ReturnSelfNumberFunction;\n skip: ReturnSelfNumberFunction;\n comment: ReturnSelfFunction;\n\n gt: ReturnSelfFunction;\n gte: ReturnSelfFunction;\n lt: ReturnSelfFunction;\n lte: ReturnSelfFunction;\n ne: ReturnSelfFunction;\n in: ReturnSelfFunction;\n nin: ReturnSelfFunction;\n all: ReturnSelfFunction;\n regex: ReturnSelfFunction;\n size: ReturnSelfFunction;\n\n}\n\ndeclare type ReturnSelfFunction = (v: any) => NoSqlQueryBuilder;\ndeclare type ReturnSelfNumberFunction = (v: number | null) => NoSqlQueryBuilder;\n\n/**\n * limit, skip, maxScan, batchSize, comment\n *\n * Sets these associated options.\n *\n * query.comment('feed query');\n */\nexport const OTHER_MANGO_ATTRIBUTES = ['limit', 'skip', 'maxScan', 'batchSize', 'comment'];\nOTHER_MANGO_ATTRIBUTES.forEach(function (method) {\n (NoSqlQueryBuilderClass.prototype as any)[method] = function (v: any) {\n this.options[method] = v;\n return this;\n };\n});\n\n\n/**\n * gt, gte, lt, lte, ne, in, nin, all, regex, size, maxDistance\n *\n * Thing.where('type').nin(array)\n */\nexport const OTHER_MANGO_OPERATORS = [\n 'gt', 'gte', 'lt', 'lte', 'ne',\n 'in', 'nin', 'all', 'regex', 'size'\n];\nOTHER_MANGO_OPERATORS.forEach(function ($conditional) {\n (NoSqlQueryBuilderClass.prototype as any)[$conditional] = function () {\n let path;\n let val;\n\n if (1 === arguments.length) {\n this._ensurePath($conditional);\n val = arguments[0];\n path = this._path;\n } else {\n val = arguments[1];\n path = arguments[0];\n }\n\n const conds = this._conditions[path] === null || typeof this._conditions[path] === 'object' ?\n this._conditions[path] :\n (this._conditions[path] = {});\n\n\n\n if ($conditional === 'regex') {\n if (val instanceof RegExp) {\n throw newRxError('QU16', {\n field: path,\n query: this._conditions,\n });\n }\n if (typeof val === 'string') {\n conds['$' + $conditional] = val;\n } else {\n conds['$' + $conditional] = val.$regex;\n if (val.$options) {\n conds.$options = val.$options;\n }\n }\n } else {\n conds['$' + $conditional] = val;\n }\n\n return this;\n };\n});\n\n\nfunction push(opts: any, field: string, value: any) {\n if (Array.isArray(opts.sort)) {\n throw newRxTypeError('MQ6', {\n opts,\n field,\n value\n });\n }\n\n if (value && value.$meta) {\n const sort = opts.sort || (opts.sort = {});\n sort[field] = {\n $meta: value.$meta\n };\n return;\n }\n\n const val = String(value || 1).toLowerCase();\n if (!/^(?:ascending|asc|descending|desc|1|-1)$/.test(val)) {\n if (Array.isArray(value)) value = '[' + value + ']';\n throw newRxTypeError('MQ7', {\n field,\n value\n });\n }\n // store `sort` in a sane format\n const s = opts.sort || (opts.sort = {});\n const valueStr = value.toString()\n .replace('asc', '1')\n .replace('ascending', '1')\n .replace('desc', '-1')\n .replace('descending', '-1');\n s[field] = parseInt(valueStr, 10);\n}\n\nfunction _pushArr(opts: any, field: string, value: any) {\n opts.sort = opts.sort || [];\n if (!Array.isArray(opts.sort)) {\n throw newRxTypeError('MQ8', {\n opts,\n field,\n value\n });\n }\n\n /* const valueStr = value.toString()\n .replace('asc', '1')\n .replace('ascending', '1')\n .replace('desc', '-1')\n .replace('descending', '-1');*/\n opts.sort.push([field, value]);\n}\n\n\n/**\n * Determines if `conds` can be merged using `mquery().merge()`\n */\nexport function canMerge(conds: any): boolean {\n return conds instanceof NoSqlQueryBuilderClass || isObject(conds);\n}\n\n\nexport function createQueryBuilder(query?: MangoQuery, path?: any): NoSqlQueryBuilder {\n return new NoSqlQueryBuilderClass(query, path) as NoSqlQueryBuilder;\n}\n"],"mappings":";;;;;;;;;AAIA,IAAAA,YAAA,GAAAC,OAAA;AAIA,IAAAC,QAAA,GAAAD,OAAA;AARA;AACA;AACA;AACA;AAHA,IA0BaE,sBAAsB,GAAAC,OAAA,CAAAD,sBAAA;EAO/B;AACJ;AACA;AACA;AACA;AACA;AACA;AACA;EACI,SAAAA,uBACIE,UAAgC,EACzBC,KAAW,EACpB;IAAA,KAhBKC,OAAO,GAAkB,CAAC,CAAC;IAAA,KAC3BC,WAAW,GAAgC,CAAC,CAAC;IAAA,KAC7CC,OAAO,GAAQ,CAAC,CAAC;IAAA,KAabH,KAAW,GAAXA,KAAW;IAElB,IAAID,UAAU,EAAE;MACZ,IAAMK,YAAwC,GAAG,IAAW;MAE5D,IAAIL,UAAU,CAACM,QAAQ,EAAE;QACrBD,YAAY,CAACE,IAAI,CAACP,UAAU,CAACM,QAAQ,CAAC;MAC1C;MACA,IAAIN,UAAU,CAACQ,KAAK,EAAE;QAClBH,YAAY,CAACG,KAAK,CAACR,UAAU,CAACQ,KAAK,CAAC;MACxC;MACA,IAAIR,UAAU,CAACS,IAAI,EAAE;QACjBJ,YAAY,CAACI,IAAI,CAACT,UAAU,CAACS,IAAI,CAAC;MACtC;MACA,IAAIT,UAAU,CAACU,IAAI,EAAE;QACjBV,UAAU,CAACU,IAAI,CAACC,OAAO,CAACC,CAAC,IAAIP,YAAY,CAACK,IAAI,CAACE,CAAC,CAAC,CAAC;MACtD;IACJ;EACJ;;EAEA;AACJ;AACA;EAFI,IAAAC,MAAA,GAAAf,sBAAA,CAAAgB,SAAA;EAAAD,MAAA,CAGAE,KAAK,GAAL,SAAAA,MAAMd,KAAa,EAAEe,IAAkC,EAA8B;IACjF,IAAI,CAACC,SAAS,CAACC,MAAM,EAAE,OAAO,IAAI;IAClC,IAAMC,IAAI,GAAG,OAAOF,SAAS,CAAC,CAAC,CAAC;IAChC,IAAI,QAAQ,KAAKE,IAAI,EAAE;MACnB,IAAI,CAAClB,KAAK,GAAGgB,SAAS,CAAC,CAAC,CAAC;MACzB,IAAI,CAAC,KAAKA,SAAS,CAACC,MAAM,EAAE;QACvB,IAAI,CAACf,WAAW,CAAS,IAAI,CAACF,KAAK,CAAC,GAAGgB,SAAS,CAAC,CAAC,CAAC;MACxD;MACA,OAAO,IAAI;IACf;IAEA,IAAI,QAAQ,KAAKE,IAAI,IAAI,CAACC,KAAK,CAACC,OAAO,CAACJ,SAAS,CAAC,CAAC,CAAC,CAAC,EAAE;MACnD,OAAO,IAAI,CAACK,KAAK,CAACL,SAAS,CAAC,CAAC,CAAC,CAAC;IACnC;IAEA,MAAM,IAAAM,uBAAc,EAAC,KAAK,EAAE;MACxBC,IAAI,EAAEP,SAAS,CAAC,CAAC;IACrB,CAAC,CAAC;EACN;;EAEA;AACJ;AACA;AACA;AACA,KAJI;EAAAJ,MAAA,CAKAY,MAAM,GAAN,SAAAA,OAAOC,GAAQ,EAA8B;IACzC,IAAI,CAACC,WAAW,CAAC,QAAQ,CAAC;IAC1B,IAAMH,IAAI,GAAG,IAAI,CAACvB,KAAK;IACtB,IAAI,CAACE,WAAW,CAASqB,IAAI,CAAC,GAAGE,GAAG;IACrC,OAAO,IAAI;EACf;;EAEA;AACJ;AACA;AACA,KAHI;EAAAb,MAAA,CAIAe,EAAE,GAAF,SAAAA,GAAGF,GAAQ,EAA8B;IACrC,IAAI,CAACC,WAAW,CAAC,IAAI,CAAC;IACtB,IAAMH,IAAI,GAAG,IAAI,CAACvB,KAAK;IACtB,IAAI,CAACE,WAAW,CAASqB,IAAI,CAAC,GAAGE,GAAG;IACrC,OAAO,IAAI;EACf;;EAEA;AACJ;AACA;AACA;AACA,KAJI;EAAAb,MAAA,CAKAgB,EAAE,GAAF,SAAAA,GAAGC,KAAY,EAA8B;IACzC,IAAMD,EAAE,GAAG,IAAI,CAAC1B,WAAW,CAAC4B,GAAG,KAAK,IAAI,CAAC5B,WAAW,CAAC4B,GAAG,GAAG,EAAE,CAAC;IAC9D,IAAI,CAACX,KAAK,CAACC,OAAO,CAACS,KAAK,CAAC,EAAEA,KAAK,GAAG,CAACA,KAAK,CAAC;IAC1CD,EAAE,CAACG,IAAI,CAACC,KAAK,CAACJ,EAAE,EAAEC,KAAK,CAAC;IACxB,OAAO,IAAI;EACf;;EAEA;AACJ;AACA;AACA;AACA,KAJI;EAAAjB,MAAA,CAKAqB,GAAG,GAAH,SAAAA,IAAIJ,KAAY,EAA8B;IAC1C,IAAMI,GAAG,GAAG,IAAI,CAAC/B,WAAW,CAACgC,IAAI,KAAK,IAAI,CAAChC,WAAW,CAACgC,IAAI,GAAG,EAAE,CAAC;IACjE,IAAI,CAACf,KAAK,CAACC,OAAO,CAACS,KAAK,CAAC,EAAEA,KAAK,GAAG,CAACA,KAAK,CAAC;IAC1CI,GAAG,CAACF,IAAI,CAACC,KAAK,CAACC,GAAG,EAAEJ,KAAK,CAAC;IAC1B,OAAO,IAAI;EACf;;EAEA;AACJ;AACA;AACA;AACA;AACA,KALI;EAAAjB,MAAA,CAMAuB,GAAG,GAAH,SAAAA,IAAIN,KAAY,EAA8B;IAC1C,IAAMM,GAAG,GAAG,IAAI,CAACjC,WAAW,CAACkC,IAAI,KAAK,IAAI,CAAClC,WAAW,CAACkC,IAAI,GAAG,EAAE,CAAC;IACjE,IAAI,CAACjB,KAAK,CAACC,OAAO,CAACS,KAAK,CAAC,EAAEA,KAAK,GAAG,CAACA,KAAK,CAAC;IAC1CM,GAAG,CAACJ,IAAI,CAACC,KAAK,CAACG,GAAG,EAAEN,KAAK,CAAC;IAC1B,OAAO,IAAI;EACf;;EAEA;AACJ;AACA,KAFI;EAAAjB,MAAA,CAGAyB,GAAG,GAAH,SAAAA,IAAIrC,KAAa,EAAEe,IAAY,EAA8B;IACzD,IAAIU,GAAG;IACP,IAAIF,IAAI;IAER,IAAI,CAAC,KAAKP,SAAS,CAACC,MAAM,EAAE;MACxB,IAAI,CAACS,WAAW,CAAC,KAAK,CAAC;MACvBD,GAAG,GAAGT,SAAS,CAAC,CAAC,CAAC;MAClBO,IAAI,GAAG,IAAI,CAACvB,KAAK;IACrB,CAAC,MAAM,IAAI,CAAC,KAAKgB,SAAS,CAACC,MAAM,IAAI,CAACE,KAAK,CAACC,OAAO,CAACJ,SAAS,CAAC,CAAC,CAAC,CAAC,EAAE;MAC/D,IAAI,CAACU,WAAW,CAAC,KAAK,CAAC;MACvBD,GAAG,GAAIT,SAAS,CAASsB,KAAK,CAAC,CAAC;MAChCf,IAAI,GAAG,IAAI,CAACvB,KAAK;IACrB,CAAC,MAAM,IAAI,CAAC,KAAKgB,SAAS,CAACC,MAAM,EAAE;MAC/BQ,GAAG,GAAIT,SAAS,CAASsB,KAAK,CAAC,CAAC,CAAC;MACjCf,IAAI,GAAGP,SAAS,CAAC,CAAC,CAAC;IACvB,CAAC,MAAM;MACHS,GAAG,GAAGT,SAAS,CAAC,CAAC,CAAC;MAClBO,IAAI,GAAGP,SAAS,CAAC,CAAC,CAAC;IACvB;IAEA,IAAMuB,KAAK,GAAI,IAAI,CAACrC,WAAW,CAASqB,IAAI,CAAC,KAAM,IAAI,CAACrB,WAAW,CAASqB,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC;IACvFgB,KAAK,CAACC,IAAI,GAAGf,GAAG;IAChB,OAAO,IAAI;EACf;;EAEA;AACJ;AACA;AACA;AACA;AACA;AACA;AACA,KAPI;EAAAb,MAAA,CAQA6B,MAAM,GAAN,SAAAA,OAAOzC,KAAa,EAAEe,IAAY,EAA8B;IAC5D,IAAIQ,IAAI;IACR,IAAIE,GAAG;IACP,IAAI,CAAC,KAAKT,SAAS,CAACC,MAAM,EAAE;MACxB,IAAI,CAACS,WAAW,CAAC,QAAQ,CAAC;MAC1BH,IAAI,GAAG,IAAI,CAACvB,KAAK;MACjByB,GAAG,GAAG,IAAI;IACd,CAAC,MAAM,IAAI,CAAC,KAAKT,SAAS,CAACC,MAAM,EAAE;MAC/B,IAAI,SAAS,KAAK,OAAOD,SAAS,CAAC,CAAC,CAAC,EAAE;QACnC,IAAI,CAACU,WAAW,CAAC,QAAQ,CAAC;QAC1BH,IAAI,GAAG,IAAI,CAACvB,KAAK;QACjByB,GAAG,GAAGT,SAAS,CAAC,CAAC,CAAC;MACtB,CAAC,MAAM;QACHO,IAAI,GAAGP,SAAS,CAAC,CAAC,CAAC;QACnBS,GAAG,GAAG,IAAI;MACd;IACJ,CAAC,MAAM,IAAI,CAAC,KAAKT,SAAS,CAACC,MAAM,EAAE;MAC/BM,IAAI,GAAGP,SAAS,CAAC,CAAC,CAAC;MACnBS,GAAG,GAAGT,SAAS,CAAC,CAAC,CAAC;IACtB;IAEA,IAAMuB,KAAK,GAAI,IAAI,CAACrC,WAAW,CAASqB,IAAI,CAAC,KAAM,IAAI,CAACrB,WAAW,CAASqB,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC;IACvFgB,KAAK,CAACG,OAAO,GAAGjB,GAAG;IACnB,OAAO,IAAI;EACf;;EAEA;AACJ;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,KAbI;EAAAb,MAAA,CAcA+B,SAAS,GAAT,SAAAA,UAAU3C,KAAa,EAAE4C,SAAc,EAA8B;IACjE,IAAI,IAAI,KAAK5B,SAAS,CAAC,CAAC,CAAC,EACrB,MAAM,IAAAM,uBAAc,EAAC,KAAK,CAAC;IAE/B,IAAIuB,EAAE;IACN,IAAItB,IAAI;IACR,IAAIuB,QAAQ;IAEZ,IAAI,UAAU,KAAK,OAAO9B,SAAS,CAAC,CAAC,CAAC,EAAE;MACpC,IAAI,CAACU,WAAW,CAAC,WAAW,CAAC;MAC7BH,IAAI,GAAG,IAAI,CAACvB,KAAK;MACjB6C,EAAE,GAAG7B,SAAS,CAAC,CAAC,CAAC;IACrB,CAAC,MAAM,IAAI,IAAA+B,qBAAQ,EAAC/B,SAAS,CAAC,CAAC,CAAC,CAAC,EAAE;MAC/B,IAAI,CAACU,WAAW,CAAC,WAAW,CAAC;MAC7BH,IAAI,GAAG,IAAI,CAACvB,KAAK;MACjB8C,QAAQ,GAAG9B,SAAS,CAAC,CAAC,CAAC;IAC3B,CAAC,MAAM,IAAI,UAAU,KAAK,OAAOA,SAAS,CAAC,CAAC,CAAC,EAAE;MAC3CO,IAAI,GAAGP,SAAS,CAAC,CAAC,CAAC;MACnB6B,EAAE,GAAG7B,SAAS,CAAC,CAAC,CAAC;IACrB,CAAC,MAAM,IAAIA,SAAS,CAAC,CAAC,CAAC,IAAI,IAAA+B,qBAAQ,EAAC/B,SAAS,CAAC,CAAC,CAAC,CAAC,EAAE;MAC/CO,IAAI,GAAGP,SAAS,CAAC,CAAC,CAAC;MACnB8B,QAAQ,GAAG9B,SAAS,CAAC,CAAC,CAAC;IAC3B,CAAC,MACG,MAAM,IAAAM,uBAAc,EAAC,KAAK,CAAC;IAE/B,IAAIuB,EAAE,EAAE;MACJC,QAAQ,GAAG,IAAIjD,sBAAsB,CAAD,CAAC;MACrCgD,EAAE,CAACC,QAAQ,CAAC;MACZA,QAAQ,GAAGA,QAAQ,CAAC5C,WAAW;IACnC;IAEA,IAAMqC,KAAK,GAAI,IAAI,CAACrC,WAAW,CAASqB,IAAI,CAAC,KAAM,IAAI,CAACrB,WAAW,CAASqB,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC;IACvFgB,KAAK,CAACS,UAAU,GAAGF,QAAQ;IAC3B,OAAO,IAAI;EACf;;EAEA;AACJ;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,KATI;EAAAlC,MAAA,CAUAH,IAAI,GAAJ,SAAAA,KAAKwC,GAAQ,EAA8B;IACvC,IAAI,CAACA,GAAG,EAAE,OAAO,IAAI;IACrB,IAAIC,GAAG;IACP,IAAMhC,IAAI,GAAG,OAAO+B,GAAG;IACvB;IACA,IAAI9B,KAAK,CAACC,OAAO,CAAC6B,GAAG,CAAC,EAAE;MACpBC,GAAG,GAAGD,GAAG,CAAChC,MAAM;MAChB,KAAK,IAAIkC,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGF,GAAG,CAAChC,MAAM,EAAE,EAAEkC,CAAC,EAAE;QACjCC,QAAQ,CAAC,IAAI,CAACnD,OAAO,EAAEgD,GAAG,CAACE,CAAC,CAAC,CAAC,CAAC,CAAC,EAAEF,GAAG,CAACE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;MAChD;MAEA,OAAO,IAAI;IACf;;IAEA;IACA,IAAI,CAAC,KAAKnC,SAAS,CAACC,MAAM,IAAI,QAAQ,KAAKC,IAAI,EAAE;MAC7C+B,GAAG,GAAGA,GAAG,CAACI,KAAK,CAAC,KAAK,CAAC;MACtBH,GAAG,GAAGD,GAAG,CAAChC,MAAM;MAChB,KAAK,IAAIkC,EAAC,GAAG,CAAC,EAAEA,EAAC,GAAGD,GAAG,EAAE,EAAEC,EAAC,EAAE;QAC1B,IAAIG,KAAK,GAAGL,GAAG,CAACE,EAAC,CAAC;QAClB,IAAI,CAACG,KAAK,EAAE;QACZ,IAAMC,MAAM,GAAG,GAAG,KAAKD,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC;QACxC,IAAIC,MAAM,KAAK,CAAC,CAAC,EAAED,KAAK,GAAGA,KAAK,CAACE,SAAS,CAAC,CAAC,CAAC;QAC7CzB,IAAI,CAAC,IAAI,CAAC9B,OAAO,EAAEqD,KAAK,EAAEC,MAAM,CAAC;MACrC;MAEA,OAAO,IAAI;IACf;;IAEA;IACA,IAAI,IAAAR,qBAAQ,EAACE,GAAG,CAAC,EAAE;MACf,IAAMQ,IAAI,GAAGC,MAAM,CAACD,IAAI,CAACR,GAAG,CAAC;MAC7BQ,IAAI,CAAC/C,OAAO,CAAC4C,KAAK,IAAIvB,IAAI,CAAC,IAAI,CAAC9B,OAAO,EAAEqD,KAAK,EAAEL,GAAG,CAACK,KAAK,CAAC,CAAC,CAAC;MAC5D,OAAO,IAAI;IACf;IAEA,MAAM,IAAAhC,uBAAc,EAAC,KAAK,EAAE;MACxBqC,IAAI,EAAE3C;IACV,CAAC,CAAC;EACN;;EAEA;AACJ;AACA;AACA;AACA;AACA,KALI;EAAAJ,MAAA,CAMAS,KAAK,GAAL,SAAAA,MAAMuC,MAAW,EAA8B;IAC3C,IAAI,CAACA,MAAM,EAAE;MACT,OAAO,IAAI;IACf;IAEA,IAAI,CAACC,QAAQ,CAACD,MAAM,CAAC,EAAE;MACnB,MAAM,IAAAtC,uBAAc,EAAC,KAAK,EAAE;QACxBsC;MACJ,CAAC,CAAC;IACN;IAEA,IAAIA,MAAM,YAAY/D,sBAAsB,EAAE;MAC1C;;MAEA,IAAI+D,MAAM,CAAC1D,WAAW,EAClB,IAAAmB,kBAAK,EAAC,IAAI,CAACnB,WAAW,EAAE0D,MAAM,CAAC1D,WAAW,CAAC;MAE/C,IAAI0D,MAAM,CAACzD,OAAO,EAAE;QAChB,IAAI,CAAC,IAAI,CAACA,OAAO,EAAE,IAAI,CAACA,OAAO,GAAG,CAAC,CAAC;QACpC,IAAAkB,kBAAK,EAAC,IAAI,CAAClB,OAAO,EAAEyD,MAAM,CAACzD,OAAO,CAAC;MACvC;MAEA,IAAIyD,MAAM,CAAC3D,OAAO,EAAE;QAChB,IAAI,CAAC,IAAI,CAACA,OAAO,EAAE,IAAI,CAACA,OAAO,GAAG,CAAC,CAAC;QACpC,IAAAoB,kBAAK,EAAC,IAAI,CAACpB,OAAO,EAAE2D,MAAM,CAAC3D,OAAO,CAAC;MACvC;MAEA,IAAI2D,MAAM,CAACE,SAAS,EAChB,IAAI,CAACA,SAAS,GAAGF,MAAM,CAACE,SAAS;MAErC,OAAO,IAAI;IACf;;IAEA;IACA,IAAAzC,kBAAK,EAAC,IAAI,CAACnB,WAAW,EAAE0D,MAAM,CAAC;IAE/B,OAAO,IAAI;EACf;;EAEA;AACJ;AACA;AACA;AACA;AACA,KALI;EAAAhD,MAAA,CAMAN,IAAI,GAAJ,SAAAA,KAAKwC,QAAa,EAA8B;IAC5C,IAAIe,QAAQ,CAACf,QAAQ,CAAC,EAAE;MACpB,IAAI,CAACzB,KAAK,CAACyB,QAAQ,CAAC;IACxB;IAEA,OAAO,IAAI;EACf;;EAEA;AACJ;AACA;AACA;AACA,KAJI;EAAAlC,MAAA,CAKAc,WAAW,GAAX,SAAAA,YAAYqC,MAAW,EAAE;IACrB,IAAI,CAAC,IAAI,CAAC/D,KAAK,EAAE;MACb,MAAM,IAAAgE,mBAAU,EAAC,KAAK,EAAE;QACpBD;MACJ,CAAC,CAAC;IACN;EACJ,CAAC;EAAAnD,MAAA,CAEDqD,MAAM,GAAN,SAAAA,OAAA,EAGE;IACE,IAAMC,KAA0B,GAAG;MAC/B7D,QAAQ,EAAE,IAAI,CAACH;IACnB,CAAC;IAED,IAAI,IAAI,CAACD,OAAO,CAACO,IAAI,EAAE;MACnB0D,KAAK,CAAC1D,IAAI,GAAG,IAAI,CAACP,OAAO,CAACO,IAAI;IAClC;IACA,IAAI,IAAI,CAACP,OAAO,CAACM,KAAK,EAAE;MACpB2D,KAAK,CAAC3D,KAAK,GAAG,IAAI,CAACN,OAAO,CAACM,KAAK;IACpC;IACA,IAAI,IAAI,CAACN,OAAO,CAACQ,IAAI,EAAE;MACnByD,KAAK,CAACzD,IAAI,GAAG0D,oBAAoB,CAAC,IAAI,CAAClE,OAAO,CAACQ,IAAI,CAAC;IACxD;IAEA,OAAO;MACHyD,KAAK;MACL3C,IAAI,EAAE,IAAI,CAACvB;IACf,CAAC;EACL,CAAC;EAAA,OAAAH,sBAAA;AAAA;AAGE,SAASsE,oBAAoBA,CAChC1D,IAA8B,EACD;EAC7B,OAAOiD,MAAM,CAACU,OAAO,CAAC3D,IAAI,CAAC,CAAC4D,GAAG,CAAC,CAAC,CAACC,CAAC,EAAEC,CAAC,CAAC,KAAK;IACxC,IAAMC,SAAkC,GAAGD,CAAC,KAAK,CAAC,GAAG,KAAK,GAAG,MAAM;IACnE,IAAME,IAAiC,GAAG;MAAE,CAACH,CAAC,GAAGE;IAAU,CAAQ;IACnE,OAAOC,IAAI;EACf,CAAC,CAAC;AACN;;AAEA;AACA;AACA;AACA;;AAyBA;AACA;AACA;AACA;AACA;AACA;AACA;AACO,IAAMC,sBAAsB,GAAA5E,OAAA,CAAA4E,sBAAA,GAAG,CAAC,OAAO,EAAE,MAAM,EAAE,SAAS,EAAE,WAAW,EAAE,SAAS,CAAC;AAC1FA,sBAAsB,CAAChE,OAAO,CAAC,UAAUqD,MAAM,EAAE;EAC5ClE,sBAAsB,CAACgB,SAAS,CAASkD,MAAM,CAAC,GAAG,UAAUQ,CAAM,EAAE;IAClE,IAAI,CAACtE,OAAO,CAAC8D,MAAM,CAAC,GAAGQ,CAAC;IACxB,OAAO,IAAI;EACf,CAAC;AACL,CAAC,CAAC;;AAGF;AACA;AACA;AACA;AACA;AACO,IAAMI,qBAAqB,GAAA7E,OAAA,CAAA6E,qBAAA,GAAG,CACjC,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,EAAE,IAAI,EAC9B,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,OAAO,EAAE,MAAM,CACtC;AACDA,qBAAqB,CAACjE,OAAO,CAAC,UAAUkE,YAAY,EAAE;EACjD/E,sBAAsB,CAACgB,SAAS,CAAS+D,YAAY,CAAC,GAAG,YAAY;IAClE,IAAIrD,IAAI;IACR,IAAIE,GAAG;IAEP,IAAI,CAAC,KAAKT,SAAS,CAACC,MAAM,EAAE;MACxB,IAAI,CAACS,WAAW,CAACkD,YAAY,CAAC;MAC9BnD,GAAG,GAAGT,SAAS,CAAC,CAAC,CAAC;MAClBO,IAAI,GAAG,IAAI,CAACvB,KAAK;IACrB,CAAC,MAAM;MACHyB,GAAG,GAAGT,SAAS,CAAC,CAAC,CAAC;MAClBO,IAAI,GAAGP,SAAS,CAAC,CAAC,CAAC;IACvB;IAEA,IAAMuB,KAAK,GAAG,IAAI,CAACrC,WAAW,CAACqB,IAAI,CAAC,KAAK,IAAI,IAAI,OAAO,IAAI,CAACrB,WAAW,CAACqB,IAAI,CAAC,KAAK,QAAQ,GACvF,IAAI,CAACrB,WAAW,CAACqB,IAAI,CAAC,GACrB,IAAI,CAACrB,WAAW,CAACqB,IAAI,CAAC,GAAG,CAAC,CAAE;IAIjC,IAAIqD,YAAY,KAAK,OAAO,EAAE;MAC1B,IAAInD,GAAG,YAAYoD,MAAM,EAAE;QACvB,MAAM,IAAAb,mBAAU,EAAC,MAAM,EAAE;UACrBV,KAAK,EAAE/B,IAAI;UACX2C,KAAK,EAAE,IAAI,CAAChE;QAChB,CAAC,CAAC;MACN;MACA,IAAI,OAAOuB,GAAG,KAAK,QAAQ,EAAE;QACzBc,KAAK,CAAC,GAAG,GAAGqC,YAAY,CAAC,GAAGnD,GAAG;MACnC,CAAC,MAAM;QACHc,KAAK,CAAC,GAAG,GAAGqC,YAAY,CAAC,GAAGnD,GAAG,CAACqD,MAAM;QACtC,IAAIrD,GAAG,CAACsD,QAAQ,EAAE;UACdxC,KAAK,CAACwC,QAAQ,GAAGtD,GAAG,CAACsD,QAAQ;QACjC;MACJ;IACJ,CAAC,MAAM;MACHxC,KAAK,CAAC,GAAG,GAAGqC,YAAY,CAAC,GAAGnD,GAAG;IACnC;IAEA,OAAO,IAAI;EACf,CAAC;AACL,CAAC,CAAC;AAGF,SAASM,IAAIA,CAACiD,IAAS,EAAE1B,KAAa,EAAE2B,KAAU,EAAE;EAChD,IAAI9D,KAAK,CAACC,OAAO,CAAC4D,IAAI,CAACvE,IAAI,CAAC,EAAE;IAC1B,MAAM,IAAAa,uBAAc,EAAC,KAAK,EAAE;MACxB0D,IAAI;MACJ1B,KAAK;MACL2B;IACJ,CAAC,CAAC;EACN;EAEA,IAAIA,KAAK,IAAIA,KAAK,CAACC,KAAK,EAAE;IACtB,IAAMzE,IAAI,GAAGuE,IAAI,CAACvE,IAAI,KAAKuE,IAAI,CAACvE,IAAI,GAAG,CAAC,CAAC,CAAC;IAC1CA,IAAI,CAAC6C,KAAK,CAAC,GAAG;MACV4B,KAAK,EAAED,KAAK,CAACC;IACjB,CAAC;IACD;EACJ;EAEA,IAAMzD,GAAG,GAAG0D,MAAM,CAACF,KAAK,IAAI,CAAC,CAAC,CAACG,WAAW,CAAC,CAAC;EAC5C,IAAI,CAAC,0CAA0C,CAACC,IAAI,CAAC5D,GAAG,CAAC,EAAE;IACvD,IAAIN,KAAK,CAACC,OAAO,CAAC6D,KAAK,CAAC,EAAEA,KAAK,GAAG,GAAG,GAAGA,KAAK,GAAG,GAAG;IACnD,MAAM,IAAA3D,uBAAc,EAAC,KAAK,EAAE;MACxBgC,KAAK;MACL2B;IACJ,CAAC,CAAC;EACN;EACA;EACA,IAAMtE,CAAC,GAAGqE,IAAI,CAACvE,IAAI,KAAKuE,IAAI,CAACvE,IAAI,GAAG,CAAC,CAAC,CAAC;EACvC,IAAM6E,QAAQ,GAAGL,KAAK,CAACM,QAAQ,CAAC,CAAC,CAC5BC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CACnBA,OAAO,CAAC,WAAW,EAAE,GAAG,CAAC,CACzBA,OAAO,CAAC,MAAM,EAAE,IAAI,CAAC,CACrBA,OAAO,CAAC,YAAY,EAAE,IAAI,CAAC;EAChC7E,CAAC,CAAC2C,KAAK,CAAC,GAAGmC,QAAQ,CAACH,QAAQ,EAAE,EAAE,CAAC;AACrC;AAEA,SAASlC,QAAQA,CAAC4B,IAAS,EAAE1B,KAAa,EAAE2B,KAAU,EAAE;EACpDD,IAAI,CAACvE,IAAI,GAAGuE,IAAI,CAACvE,IAAI,IAAI,EAAE;EAC3B,IAAI,CAACU,KAAK,CAACC,OAAO,CAAC4D,IAAI,CAACvE,IAAI,CAAC,EAAE;IAC3B,MAAM,IAAAa,uBAAc,EAAC,KAAK,EAAE;MACxB0D,IAAI;MACJ1B,KAAK;MACL2B;IACJ,CAAC,CAAC;EACN;;EAEA;AACJ;AACA;AACA;AACA;EACID,IAAI,CAACvE,IAAI,CAACsB,IAAI,CAAC,CAACuB,KAAK,EAAE2B,KAAK,CAAC,CAAC;AAClC;;AAGA;AACA;AACA;AACO,SAASpB,QAAQA,CAACtB,KAAU,EAAW;EAC1C,OAAOA,KAAK,YAAY1C,sBAAsB,IAAI,IAAAkD,qBAAQ,EAACR,KAAK,CAAC;AACrE;AAGO,SAASmD,kBAAkBA,CAAUxB,KAA2B,EAAE3C,IAAU,EAA8B;EAC7G,OAAO,IAAI1B,sBAAsB,CAACqE,KAAK,EAAE3C,IAAI,CAAC;AAClD","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/replication-couchdb/couchdb-helper.js b/dist/cjs/plugins/replication-couchdb/couchdb-helper.js deleted file mode 100644 index 292035b6730..00000000000 --- a/dist/cjs/plugins/replication-couchdb/couchdb-helper.js +++ /dev/null @@ -1,77 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.COUCHDB_NEW_REPLICATION_PLUGIN_IDENTITY_PREFIX = void 0; -exports.couchDBDocToRxDocData = couchDBDocToRxDocData; -exports.couchSwapIdToPrimary = couchSwapIdToPrimary; -exports.couchSwapPrimaryToId = couchSwapPrimaryToId; -exports.getDefaultFetch = getDefaultFetch; -exports.getFetchWithCouchDBAuthorization = getFetchWithCouchDBAuthorization; -exports.mergeUrlQueryParams = mergeUrlQueryParams; -var _index = require("../../plugins/utils/index.js"); -var COUCHDB_NEW_REPLICATION_PLUGIN_IDENTITY_PREFIX = exports.COUCHDB_NEW_REPLICATION_PLUGIN_IDENTITY_PREFIX = 'couchdb'; -function mergeUrlQueryParams(params) { - return Object.entries(params).filter(([_k, value]) => typeof value !== 'undefined').map(([key, value]) => key + '=' + value).join('&'); -} -function couchDBDocToRxDocData(primaryPath, couchDocData) { - var doc = couchSwapIdToPrimary(primaryPath, couchDocData); - - // ensure deleted flag is set. - doc._deleted = !!doc._deleted; - delete doc._rev; - return doc; -} -function couchSwapIdToPrimary(primaryKey, docData) { - if (primaryKey === '_id' || docData[primaryKey]) { - return (0, _index.flatClone)(docData); - } - docData = (0, _index.flatClone)(docData); - docData[primaryKey] = docData._id; - delete docData._id; - return docData; -} - -/** - * Swaps the primaryKey of the document - * to the _id property. - */ -function couchSwapPrimaryToId(primaryKey, docData) { - // optimisation shortcut - if (primaryKey === '_id') { - return docData; - } - var idValue = docData[primaryKey]; - var ret = (0, _index.flatClone)(docData); - delete ret[primaryKey]; - ret._id = idValue; - return ret; -} -function getDefaultFetch() { - if (typeof window === 'object' && window['fetch']) { - /** - * @link https://stackoverflow.com/a/47180009/3443137 - */ - return window.fetch.bind(window); - } else { - return fetch; - } -} - -/** - * Returns a fetch handler that contains the username and password - * in the Authorization header - */ -function getFetchWithCouchDBAuthorization(username, password) { - var ret = (url, options) => { - options = Object.assign({}, options); - if (!options.headers) { - options.headers = {}; - } - options.headers['Authorization'] = 'Basic ' + (0, _index.b64EncodeUnicode)(username + ':' + password); - return fetch(url, options); - }; - return ret; -} -//# sourceMappingURL=couchdb-helper.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/replication-couchdb/couchdb-helper.js.map b/dist/cjs/plugins/replication-couchdb/couchdb-helper.js.map deleted file mode 100644 index db8b3f029aa..00000000000 --- a/dist/cjs/plugins/replication-couchdb/couchdb-helper.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"couchdb-helper.js","names":["_index","require","COUCHDB_NEW_REPLICATION_PLUGIN_IDENTITY_PREFIX","exports","mergeUrlQueryParams","params","Object","entries","filter","_k","value","map","key","join","couchDBDocToRxDocData","primaryPath","couchDocData","doc","couchSwapIdToPrimary","_deleted","_rev","primaryKey","docData","flatClone","_id","couchSwapPrimaryToId","idValue","ret","getDefaultFetch","window","fetch","bind","getFetchWithCouchDBAuthorization","username","password","url","options","assign","headers","b64EncodeUnicode"],"sources":["../../../../src/plugins/replication-couchdb/couchdb-helper.ts"],"sourcesContent":["import type {\n RxDocumentData,\n StringKeys,\n WithDeleted\n} from '../../types/index.d.ts';\nimport { b64EncodeUnicode, flatClone } from '../../plugins/utils/index.ts';\nimport { URLQueryParams } from './couchdb-types.ts';\n\n\nexport const COUCHDB_NEW_REPLICATION_PLUGIN_IDENTITY_PREFIX = 'couchdb';\n\n\nexport function mergeUrlQueryParams(\n params: URLQueryParams\n): string {\n return Object.entries(params)\n .filter(([_k, value]) => typeof value !== 'undefined')\n .map(([key, value]) => key + '=' + value)\n .join('&');\n}\n\nexport function couchDBDocToRxDocData(\n primaryPath: string,\n couchDocData: any\n): WithDeleted {\n const doc = couchSwapIdToPrimary(primaryPath as any, couchDocData);\n\n // ensure deleted flag is set.\n doc._deleted = !!doc._deleted;\n\n delete doc._rev;\n\n return doc;\n}\n\n\nexport function couchSwapIdToPrimary(\n primaryKey: StringKeys>,\n docData: any\n): any {\n if (primaryKey === '_id' || docData[primaryKey]) {\n return flatClone(docData);\n }\n docData = flatClone(docData);\n docData[primaryKey] = docData._id;\n delete docData._id;\n\n return docData;\n}\n\n/**\n * Swaps the primaryKey of the document\n * to the _id property.\n */\nexport function couchSwapPrimaryToId(\n primaryKey: StringKeys>,\n docData: any\n): RxDocType & { _id: string; } {\n // optimisation shortcut\n if (primaryKey === '_id') {\n return docData;\n }\n\n const idValue = docData[primaryKey];\n const ret = flatClone(docData);\n delete ret[primaryKey];\n ret._id = idValue;\n return ret;\n}\n\n\nexport function getDefaultFetch() {\n if (\n typeof window === 'object' &&\n (window as any)['fetch']\n ) {\n /**\n * @link https://stackoverflow.com/a/47180009/3443137\n */\n return window.fetch.bind(window);\n } else {\n return fetch;\n }\n}\n\n/**\n * Returns a fetch handler that contains the username and password\n * in the Authorization header\n */\nexport function getFetchWithCouchDBAuthorization(username: string, password: string): typeof fetch {\n const ret: typeof fetch = (url, options) => {\n options = Object.assign({}, options);\n if (!options.headers) {\n options.headers = {};\n }\n (options as any).headers['Authorization'] = 'Basic ' + b64EncodeUnicode(username + ':' + password);\n return fetch(url as any, options);\n };\n return ret;\n}\n"],"mappings":";;;;;;;;;;;;AAKA,IAAAA,MAAA,GAAAC,OAAA;AAIO,IAAMC,8CAA8C,GAAAC,OAAA,CAAAD,8CAAA,GAAG,SAAS;AAGhE,SAASE,mBAAmBA,CAC/BC,MAAsB,EAChB;EACN,OAAOC,MAAM,CAACC,OAAO,CAACF,MAAM,CAAC,CACxBG,MAAM,CAAC,CAAC,CAACC,EAAE,EAAEC,KAAK,CAAC,KAAK,OAAOA,KAAK,KAAK,WAAW,CAAC,CACrDC,GAAG,CAAC,CAAC,CAACC,GAAG,EAAEF,KAAK,CAAC,KAAKE,GAAG,GAAG,GAAG,GAAGF,KAAK,CAAC,CACxCG,IAAI,CAAC,GAAG,CAAC;AAClB;AAEO,SAASC,qBAAqBA,CACjCC,WAAmB,EACnBC,YAAiB,EACK;EACtB,IAAMC,GAAG,GAAGC,oBAAoB,CAACH,WAAW,EAASC,YAAY,CAAC;;EAElE;EACAC,GAAG,CAACE,QAAQ,GAAG,CAAC,CAACF,GAAG,CAACE,QAAQ;EAE7B,OAAOF,GAAG,CAACG,IAAI;EAEf,OAAOH,GAAG;AACd;AAGO,SAASC,oBAAoBA,CAChCG,UAAyC,EACzCC,OAAY,EACT;EACH,IAAID,UAAU,KAAK,KAAK,IAAIC,OAAO,CAACD,UAAU,CAAC,EAAE;IAC7C,OAAO,IAAAE,gBAAS,EAACD,OAAO,CAAC;EAC7B;EACAA,OAAO,GAAG,IAAAC,gBAAS,EAACD,OAAO,CAAC;EAC5BA,OAAO,CAACD,UAAU,CAAC,GAAGC,OAAO,CAACE,GAAG;EACjC,OAAOF,OAAO,CAACE,GAAG;EAElB,OAAOF,OAAO;AAClB;;AAEA;AACA;AACA;AACA;AACO,SAASG,oBAAoBA,CAChCJ,UAAiD,EACjDC,OAAY,EACgB;EAC5B;EACA,IAAID,UAAU,KAAK,KAAK,EAAE;IACtB,OAAOC,OAAO;EAClB;EAEA,IAAMI,OAAO,GAAGJ,OAAO,CAACD,UAAU,CAAC;EACnC,IAAMM,GAAG,GAAG,IAAAJ,gBAAS,EAACD,OAAO,CAAC;EAC9B,OAAOK,GAAG,CAACN,UAAU,CAAC;EACtBM,GAAG,CAACH,GAAG,GAAGE,OAAO;EACjB,OAAOC,GAAG;AACd;AAGO,SAASC,eAAeA,CAAA,EAAG;EAC9B,IACI,OAAOC,MAAM,KAAK,QAAQ,IACzBA,MAAM,CAAS,OAAO,CAAC,EAC1B;IACE;AACR;AACA;IACQ,OAAOA,MAAM,CAACC,KAAK,CAACC,IAAI,CAACF,MAAM,CAAC;EACpC,CAAC,MAAM;IACH,OAAOC,KAAK;EAChB;AACJ;;AAEA;AACA;AACA;AACA;AACO,SAASE,gCAAgCA,CAACC,QAAgB,EAAEC,QAAgB,EAAgB;EAC/F,IAAMP,GAAiB,GAAGA,CAACQ,GAAG,EAAEC,OAAO,KAAK;IACxCA,OAAO,GAAG9B,MAAM,CAAC+B,MAAM,CAAC,CAAC,CAAC,EAAED,OAAO,CAAC;IACpC,IAAI,CAACA,OAAO,CAACE,OAAO,EAAE;MAClBF,OAAO,CAACE,OAAO,GAAG,CAAC,CAAC;IACxB;IACCF,OAAO,CAASE,OAAO,CAAC,eAAe,CAAC,GAAG,QAAQ,GAAG,IAAAC,uBAAgB,EAACN,QAAQ,GAAG,GAAG,GAAGC,QAAQ,CAAC;IAClG,OAAOJ,KAAK,CAACK,GAAG,EAASC,OAAO,CAAC;EACrC,CAAC;EACD,OAAOT,GAAG;AACd","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/replication-couchdb/couchdb-types.js b/dist/cjs/plugins/replication-couchdb/couchdb-types.js deleted file mode 100644 index a06421ac32c..00000000000 --- a/dist/cjs/plugins/replication-couchdb/couchdb-types.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -//# sourceMappingURL=couchdb-types.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/replication-couchdb/couchdb-types.js.map b/dist/cjs/plugins/replication-couchdb/couchdb-types.js.map deleted file mode 100644 index fc2fde9f850..00000000000 --- a/dist/cjs/plugins/replication-couchdb/couchdb-types.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"couchdb-types.js","names":[],"sources":["../../../../src/plugins/replication-couchdb/couchdb-types.ts"],"sourcesContent":["import type {\n ById,\n ReplicationOptions,\n ReplicationPullOptions,\n ReplicationPushOptions\n} from '../../types/index.d.ts';\n\nexport type CouchDBCheckpointType = {\n sequence: number;\n};\n\nexport type FetchMethodType = typeof fetch;\nexport type SyncOptionsCouchDB = Omit<\n ReplicationOptions,\n 'pull' | 'push'\n> & {\n url: string;\n /**\n * Here you can set a custom fetch method\n * to use http headers or credentials when doing requests.\n */\n fetch?: FetchMethodType;\n pull?: Omit, 'handler' | 'stream$'> & {\n /**\n * Heartbeat time in milliseconds\n * for the long polling of the changestream.\n */\n heartbeat?: number;\n };\n push?: Omit, 'handler'>;\n};\n\n\nexport type URLQueryParams = ById;\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/replication-couchdb/index.js b/dist/cjs/plugins/replication-couchdb/index.js deleted file mode 100644 index 68b186666fc..00000000000 --- a/dist/cjs/plugins/replication-couchdb/index.js +++ /dev/null @@ -1,290 +0,0 @@ -"use strict"; - -var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault"); -Object.defineProperty(exports, "__esModule", { - value: true -}); -var _exportNames = { - RxCouchDBReplicationState: true, - replicateCouchDB: true -}; -exports.RxCouchDBReplicationState = void 0; -exports.replicateCouchDB = replicateCouchDB; -var _inheritsLoose2 = _interopRequireDefault(require("@babel/runtime/helpers/inheritsLoose")); -var _index = require("../../plugins/utils/index.js"); -var _index2 = require("../leader-election/index.js"); -var _index3 = require("../replication/index.js"); -var _index4 = require("../../index.js"); -var _rxjs = require("rxjs"); -var _couchdbHelper = require("./couchdb-helper.js"); -Object.keys(_couchdbHelper).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; - if (key in exports && exports[key] === _couchdbHelper[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _couchdbHelper[key]; - } - }); -}); -var _replicationHelper = require("../replication/replication-helper.js"); -var _couchdbTypes = require("./couchdb-types.js"); -Object.keys(_couchdbTypes).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; - if (key in exports && exports[key] === _couchdbTypes[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _couchdbTypes[key]; - } - }); -}); -/** - * This plugin can be used to sync collections with a remote CouchDB endpoint. - */ -var RxCouchDBReplicationState = exports.RxCouchDBReplicationState = /*#__PURE__*/function (_RxReplicationState) { - function RxCouchDBReplicationState(url, fetch, replicationIdentifier, collection, pull, push, live = true, retryTime = 1000 * 5, autoStart = true) { - var _this; - _this = _RxReplicationState.call(this, replicationIdentifier, collection, '_deleted', pull, push, live, retryTime, autoStart) || this; - _this.url = url; - _this.fetch = fetch; - _this.replicationIdentifier = replicationIdentifier; - _this.collection = collection; - _this.pull = pull; - _this.push = push; - _this.live = live; - _this.retryTime = retryTime; - _this.autoStart = autoStart; - return _this; - } - (0, _inheritsLoose2.default)(RxCouchDBReplicationState, _RxReplicationState); - return RxCouchDBReplicationState; -}(_index3.RxReplicationState); -function replicateCouchDB(options) { - var collection = options.collection; - var conflictHandler = collection.conflictHandler; - (0, _index4.addRxPlugin)(_index2.RxDBLeaderElectionPlugin); - var primaryPath = options.collection.schema.primaryPath; - if (!options.url.endsWith('/')) { - throw (0, _index4.newRxError)('RC_COUCHDB_1', { - args: { - collection: options.collection.name, - url: options.url - } - }); - } - options = (0, _index.flatClone)(options); - if (!options.url.endsWith('/')) { - options.url = options.url + '/'; - } - options.waitForLeadership = typeof options.waitForLeadership === 'undefined' ? true : options.waitForLeadership; - var pullStream$ = new _rxjs.Subject(); - var replicationPrimitivesPull; - if (options.pull) { - replicationPrimitivesPull = { - async handler(lastPulledCheckpoint, batchSize) { - /** - * @link https://docs.couchdb.org/en/3.2.2-docs/api/database/changes.html - */ - var url = options.url + '_changes?' + (0, _couchdbHelper.mergeUrlQueryParams)({ - style: 'all_docs', - feed: 'normal', - include_docs: true, - since: lastPulledCheckpoint ? lastPulledCheckpoint.sequence : 0, - heartbeat: options.pull && options.pull.heartbeat ? options.pull.heartbeat : 60000, - limit: batchSize, - seq_interval: batchSize - }); - var response = await replicationState.fetch(url); - var jsonResponse = await response.json(); - if (!jsonResponse.results) { - throw (0, _index4.newRxError)('RC_COUCHDB_2', { - args: { - jsonResponse - } - }); - } - var documents = jsonResponse.results.map(row => (0, _couchdbHelper.couchDBDocToRxDocData)(collection.schema.primaryPath, (0, _index.ensureNotFalsy)(row.doc))); - return { - documents, - checkpoint: { - sequence: jsonResponse.last_seq - } - }; - }, - batchSize: (0, _index.ensureNotFalsy)(options.pull).batchSize, - modifier: (0, _index.ensureNotFalsy)(options.pull).modifier, - stream$: pullStream$.asObservable(), - initialCheckpoint: options.pull.initialCheckpoint - }; - } - var replicationPrimitivesPush; - if (options.push) { - replicationPrimitivesPush = { - async handler(rows) { - var conflicts = []; - var pushRowsById = new Map(); - rows.forEach(row => { - var id = row.newDocumentState[primaryPath]; - pushRowsById.set(id, row); - }); - - /** - * First get the current master state from the remote - * to check for conflicts - */ - var docsByIdResponse = await replicationState.fetch(options.url + '_all_docs?' + (0, _couchdbHelper.mergeUrlQueryParams)({}), { - method: 'POST', - headers: { - 'content-type': 'application/json' - }, - body: JSON.stringify({ - keys: rows.map(row => row.newDocumentState[primaryPath]), - include_docs: true, - deleted: 'ok' - }) - }); - var docsByIdRows = await docsByIdResponse.json(); - var nonConflictRows = []; - var remoteRevById = new Map(); - await Promise.all(docsByIdRows.rows.map(async row => { - if (!row.doc) { - nonConflictRows.push((0, _index.getFromMapOrThrow)(pushRowsById, row.key)); - return; - } - var realMasterState = (0, _couchdbHelper.couchDBDocToRxDocData)(primaryPath, row.doc); - var pushRow = (0, _index.getFromMapOrThrow)(pushRowsById, row.id); - if (pushRow.assumedMasterState && (await conflictHandler({ - realMasterState, - newDocumentState: pushRow.assumedMasterState - }, 'couchdb-push-1')).isEqual) { - remoteRevById.set(row.id, row.doc._rev); - nonConflictRows.push(pushRow); - } else { - conflicts.push(realMasterState); - } - })); - - /** - * @link https://docs.couchdb.org/en/3.2.2-docs/api/database/bulk-api.html#db-bulk-docs - */ - var url = options.url + '_bulk_docs?' + (0, _couchdbHelper.mergeUrlQueryParams)({}); - var body = { - docs: nonConflictRows.map(row => { - var docId = row.newDocumentState[primaryPath]; - var sendDoc = (0, _index.flatClone)(row.newDocumentState); - if (remoteRevById.has(docId)) { - sendDoc._rev = (0, _index.getFromMapOrThrow)(remoteRevById, docId); - } - return (0, _couchdbHelper.couchSwapPrimaryToId)(collection.schema.primaryPath, sendDoc); - }) - }; - var response = await replicationState.fetch(url, { - method: 'POST', - headers: { - 'content-type': 'application/json' - }, - body: JSON.stringify(body) - }); - var responseJson = await response.json(); - - // get conflicting writes - var conflictAgainIds = []; - responseJson.forEach(writeResultRow => { - var isConflict = writeResultRow.error === 'conflict'; - if (!writeResultRow.ok && !isConflict) { - throw (0, _index4.newRxError)('SNH', { - args: { - writeResultRow - } - }); - } - if (isConflict) { - conflictAgainIds.push(writeResultRow.id); - } - }); - if (conflictAgainIds.length === 0) { - return conflicts; - } - var getConflictDocsUrl = options.url + '_all_docs?' + (0, _couchdbHelper.mergeUrlQueryParams)({ - include_docs: true, - keys: JSON.stringify(conflictAgainIds) - }); - var conflictResponse = await replicationState.fetch(getConflictDocsUrl); - var conflictResponseJson = await conflictResponse.json(); - conflictResponseJson.rows.forEach(conflictAgainRow => { - conflicts.push((0, _couchdbHelper.couchDBDocToRxDocData)(collection.schema.primaryPath, conflictAgainRow.doc)); - }); - return conflicts; - }, - batchSize: options.push.batchSize, - modifier: options.push.modifier, - initialCheckpoint: options.push.initialCheckpoint - }; - } - var replicationState = new RxCouchDBReplicationState(options.url, options.fetch ? options.fetch : (0, _couchdbHelper.getDefaultFetch)(), options.replicationIdentifier, collection, replicationPrimitivesPull, replicationPrimitivesPush, options.live, options.retryTime, options.autoStart); - - /** - * Use long polling to get live changes for the pull.stream$ - */ - if (options.live && options.pull) { - var startBefore = replicationState.start.bind(replicationState); - replicationState.start = () => { - var since = 'now'; - var batchSize = options.pull && options.pull.batchSize ? options.pull.batchSize : 20; - (async () => { - var lastRequestStartTime = (0, _index.now)(); - while (!replicationState.isStopped()) { - var _url = options.url + '_changes?' + (0, _couchdbHelper.mergeUrlQueryParams)({ - style: 'all_docs', - feed: 'longpoll', - since, - include_docs: true, - heartbeat: options.pull && options.pull.heartbeat ? options.pull.heartbeat : 60000, - limit: batchSize, - seq_interval: batchSize - }); - var jsonResponse = void 0; - try { - lastRequestStartTime = (0, _index.now)(); - jsonResponse = await (await replicationState.fetch(_url)).json(); - } catch (err) { - replicationState.subjects.error.next((0, _index4.newRxError)('RC_STREAM', { - args: { - url: _url - }, - error: (0, _index.errorToPlainJson)(err) - })); - if (lastRequestStartTime < (0, _index.now)() - replicationState.retryTime) { - /** - * Last request start was long ago, - * so we directly retry. - * This mostly happens on timeouts - * which are normal behavior for long polling requests. - */ - await (0, _index.promiseWait)(0); - } else { - // await next tick here otherwise we could go in to a 100% CPU blocking cycle. - await (0, _replicationHelper.awaitRetry)(collection, replicationState.retryTime); - } - continue; - } - var documents = jsonResponse.results.map(row => (0, _couchdbHelper.couchDBDocToRxDocData)(collection.schema.primaryPath, (0, _index.ensureNotFalsy)(row.doc))); - since = jsonResponse.last_seq; - pullStream$.next({ - documents, - checkpoint: { - sequence: jsonResponse.last_seq - } - }); - } - })(); - return startBefore(); - }; - } - (0, _index3.startReplicationOnLeaderShip)(options.waitForLeadership, replicationState); - return replicationState; -} -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/replication-couchdb/index.js.map b/dist/cjs/plugins/replication-couchdb/index.js.map deleted file mode 100644 index 59b79208e75..00000000000 --- a/dist/cjs/plugins/replication-couchdb/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["_index","require","_index2","_index3","_index4","_rxjs","_couchdbHelper","Object","keys","forEach","key","prototype","hasOwnProperty","call","_exportNames","exports","defineProperty","enumerable","get","_replicationHelper","_couchdbTypes","RxCouchDBReplicationState","_RxReplicationState","url","fetch","replicationIdentifier","collection","pull","push","live","retryTime","autoStart","_this","_inheritsLoose2","default","RxReplicationState","replicateCouchDB","options","conflictHandler","addRxPlugin","RxDBLeaderElectionPlugin","primaryPath","schema","endsWith","newRxError","args","name","flatClone","waitForLeadership","pullStream$","Subject","replicationPrimitivesPull","handler","lastPulledCheckpoint","batchSize","mergeUrlQueryParams","style","feed","include_docs","since","sequence","heartbeat","limit","seq_interval","response","replicationState","jsonResponse","json","results","documents","map","row","couchDBDocToRxDocData","ensureNotFalsy","doc","checkpoint","last_seq","modifier","stream$","asObservable","initialCheckpoint","replicationPrimitivesPush","rows","conflicts","pushRowsById","Map","id","newDocumentState","set","docsByIdResponse","method","headers","body","JSON","stringify","deleted","docsByIdRows","nonConflictRows","remoteRevById","Promise","all","getFromMapOrThrow","realMasterState","pushRow","assumedMasterState","isEqual","_rev","docs","docId","sendDoc","has","couchSwapPrimaryToId","responseJson","conflictAgainIds","writeResultRow","isConflict","error","ok","length","getConflictDocsUrl","conflictResponse","conflictResponseJson","conflictAgainRow","getDefaultFetch","startBefore","start","bind","lastRequestStartTime","now","isStopped","err","subjects","next","errorToPlainJson","promiseWait","awaitRetry","startReplicationOnLeaderShip"],"sources":["../../../../src/plugins/replication-couchdb/index.ts"],"sourcesContent":["/**\n * This plugin can be used to sync collections with a remote CouchDB endpoint.\n */\nimport {\n ensureNotFalsy,\n errorToPlainJson,\n flatClone,\n getFromMapOrThrow,\n now,\n promiseWait\n} from '../../plugins/utils/index.ts';\n\nimport { RxDBLeaderElectionPlugin } from '../leader-election/index.ts';\nimport type {\n RxCollection,\n ReplicationPullOptions,\n ReplicationPushOptions,\n RxReplicationWriteToMasterRow,\n RxReplicationPullStreamItem,\n CouchdbChangesResult,\n CouchBulkDocResultRow,\n CouchAllDocsResponse,\n RxConflictHandler\n} from '../../types/index.d.ts';\nimport {\n RxReplicationState,\n startReplicationOnLeaderShip\n} from '../replication/index.ts';\nimport {\n addRxPlugin,\n newRxError,\n WithDeleted\n} from '../../index.ts';\n\nimport { Subject } from 'rxjs';\nimport type {\n CouchDBCheckpointType,\n FetchMethodType,\n SyncOptionsCouchDB\n} from './couchdb-types.ts';\nimport {\n couchDBDocToRxDocData,\n mergeUrlQueryParams,\n couchSwapPrimaryToId,\n getDefaultFetch\n} from './couchdb-helper.ts';\nimport { awaitRetry } from '../replication/replication-helper.ts';\n\nexport * from './couchdb-helper.ts';\nexport * from './couchdb-types.ts';\n\nexport class RxCouchDBReplicationState extends RxReplicationState {\n constructor(\n public readonly url: string,\n public fetch: FetchMethodType,\n public readonly replicationIdentifier: string,\n public readonly collection: RxCollection,\n public readonly pull?: ReplicationPullOptions,\n public readonly push?: ReplicationPushOptions,\n public readonly live: boolean = true,\n public retryTime: number = 1000 * 5,\n public autoStart: boolean = true\n ) {\n super(\n replicationIdentifier,\n collection,\n '_deleted',\n pull,\n push,\n live,\n retryTime,\n autoStart\n );\n }\n}\n\nexport function replicateCouchDB(\n options: SyncOptionsCouchDB\n) {\n const collection = options.collection;\n const conflictHandler: RxConflictHandler = collection.conflictHandler;\n addRxPlugin(RxDBLeaderElectionPlugin);\n const primaryPath = options.collection.schema.primaryPath;\n\n if (!options.url.endsWith('/')) {\n throw newRxError('RC_COUCHDB_1', {\n args: {\n collection: options.collection.name,\n url: options.url\n }\n });\n }\n\n options = flatClone(options);\n if (!options.url.endsWith('/')) {\n options.url = options.url + '/';\n }\n options.waitForLeadership = typeof options.waitForLeadership === 'undefined' ? true : options.waitForLeadership;\n const pullStream$: Subject> = new Subject();\n let replicationPrimitivesPull: ReplicationPullOptions | undefined;\n if (options.pull) {\n replicationPrimitivesPull = {\n async handler(\n lastPulledCheckpoint: CouchDBCheckpointType | undefined,\n batchSize: number\n ) {\n /**\n * @link https://docs.couchdb.org/en/3.2.2-docs/api/database/changes.html\n */\n const url = options.url + '_changes?' + mergeUrlQueryParams({\n style: 'all_docs',\n feed: 'normal',\n include_docs: true,\n since: lastPulledCheckpoint ? lastPulledCheckpoint.sequence : 0,\n heartbeat: options.pull && options.pull.heartbeat ? options.pull.heartbeat : 60000,\n limit: batchSize,\n seq_interval: batchSize\n });\n\n const response = await replicationState.fetch(url);\n const jsonResponse: CouchdbChangesResult = await response.json();\n if (!jsonResponse.results) {\n throw newRxError('RC_COUCHDB_2', {\n args: { jsonResponse }\n });\n }\n const documents: WithDeleted[] = jsonResponse.results\n .map(row => couchDBDocToRxDocData(collection.schema.primaryPath, ensureNotFalsy(row.doc)));\n return {\n documents,\n checkpoint: {\n sequence: jsonResponse.last_seq\n }\n };\n },\n batchSize: ensureNotFalsy(options.pull).batchSize,\n modifier: ensureNotFalsy(options.pull).modifier,\n stream$: pullStream$.asObservable(),\n initialCheckpoint: options.pull.initialCheckpoint\n };\n }\n\n let replicationPrimitivesPush: ReplicationPushOptions | undefined;\n if (options.push) {\n replicationPrimitivesPush = {\n async handler(\n rows: RxReplicationWriteToMasterRow[]\n ) {\n const conflicts: WithDeleted[] = [];\n const pushRowsById = new Map>();\n rows.forEach(row => {\n const id = (row.newDocumentState as any)[primaryPath];\n pushRowsById.set(id, row);\n });\n\n /**\n * First get the current master state from the remote\n * to check for conflicts\n */\n const docsByIdResponse = await replicationState.fetch(\n options.url + '_all_docs?' + mergeUrlQueryParams({}),\n {\n method: 'POST',\n headers: {\n 'content-type': 'application/json'\n },\n body: JSON.stringify({\n keys: rows.map(row => (row.newDocumentState as any)[primaryPath]),\n include_docs: true,\n deleted: 'ok'\n })\n }\n );\n const docsByIdRows: CouchAllDocsResponse = await docsByIdResponse.json();\n const nonConflictRows: typeof rows = [];\n const remoteRevById = new Map();\n await Promise.all(\n docsByIdRows.rows.map(async (row) => {\n if (!row.doc) {\n nonConflictRows.push(getFromMapOrThrow(pushRowsById, row.key));\n return;\n }\n const realMasterState: WithDeleted = couchDBDocToRxDocData(primaryPath, row.doc);\n const pushRow = getFromMapOrThrow(pushRowsById, row.id);\n\n if (\n pushRow.assumedMasterState &&\n (await conflictHandler({\n realMasterState,\n newDocumentState: pushRow.assumedMasterState\n }, 'couchdb-push-1')).isEqual\n ) {\n remoteRevById.set(row.id, row.doc._rev);\n nonConflictRows.push(pushRow);\n } else {\n conflicts.push(realMasterState);\n }\n })\n );\n\n /**\n * @link https://docs.couchdb.org/en/3.2.2-docs/api/database/bulk-api.html#db-bulk-docs\n */\n const url = options.url + '_bulk_docs?' + mergeUrlQueryParams({});\n const body = {\n docs: nonConflictRows.map(row => {\n const docId = (row.newDocumentState as any)[primaryPath];\n const sendDoc = flatClone(row.newDocumentState);\n if (remoteRevById.has(docId)) {\n (sendDoc as any)._rev = getFromMapOrThrow(remoteRevById, docId);\n }\n return couchSwapPrimaryToId(collection.schema.primaryPath, sendDoc);\n })\n };\n\n const response = await replicationState.fetch(\n url,\n {\n method: 'POST',\n headers: {\n 'content-type': 'application/json'\n },\n body: JSON.stringify(body)\n }\n );\n const responseJson: CouchBulkDocResultRow[] = await response.json();\n\n // get conflicting writes\n const conflictAgainIds: string[] = [];\n responseJson.forEach(writeResultRow => {\n const isConflict = writeResultRow.error === 'conflict';\n if (!writeResultRow.ok && !isConflict) {\n throw newRxError('SNH', { args: { writeResultRow } });\n }\n if (isConflict) {\n conflictAgainIds.push(writeResultRow.id);\n }\n });\n\n if (conflictAgainIds.length === 0) {\n return conflicts;\n }\n\n const getConflictDocsUrl = options.url + '_all_docs?' + mergeUrlQueryParams({\n include_docs: true,\n keys: JSON.stringify(conflictAgainIds)\n });\n const conflictResponse = await replicationState.fetch(getConflictDocsUrl);\n const conflictResponseJson: CouchAllDocsResponse = await conflictResponse.json();\n conflictResponseJson.rows.forEach(conflictAgainRow => {\n conflicts.push(couchDBDocToRxDocData(collection.schema.primaryPath, conflictAgainRow.doc));\n });\n\n return conflicts;\n },\n batchSize: options.push.batchSize,\n modifier: options.push.modifier,\n initialCheckpoint: options.push.initialCheckpoint\n };\n }\n\n const replicationState = new RxCouchDBReplicationState(\n options.url,\n options.fetch ? options.fetch : getDefaultFetch(),\n options.replicationIdentifier,\n collection,\n replicationPrimitivesPull,\n replicationPrimitivesPush,\n options.live,\n options.retryTime,\n options.autoStart\n );\n\n /**\n * Use long polling to get live changes for the pull.stream$\n */\n if (options.live && options.pull) {\n const startBefore = replicationState.start.bind(replicationState);\n replicationState.start = () => {\n let since: string | number = 'now';\n const batchSize = options.pull && options.pull.batchSize ? options.pull.batchSize : 20;\n\n (async () => {\n let lastRequestStartTime = now();\n while (!replicationState.isStopped()) {\n const url = options.url + '_changes?' + mergeUrlQueryParams({\n style: 'all_docs',\n feed: 'longpoll',\n since,\n include_docs: true,\n heartbeat: options.pull && options.pull.heartbeat ? options.pull.heartbeat : 60000,\n limit: batchSize,\n seq_interval: batchSize\n });\n\n let jsonResponse: CouchdbChangesResult;\n try {\n lastRequestStartTime = now();\n jsonResponse = await (await replicationState.fetch(url)).json();\n } catch (err: any) {\n replicationState.subjects.error.next(\n newRxError('RC_STREAM', {\n args: { url },\n error: errorToPlainJson(err)\n })\n );\n\n if (lastRequestStartTime < (now() - replicationState.retryTime)) {\n /**\n * Last request start was long ago,\n * so we directly retry.\n * This mostly happens on timeouts\n * which are normal behavior for long polling requests.\n */\n await promiseWait(0);\n } else {\n // await next tick here otherwise we could go in to a 100% CPU blocking cycle.\n await awaitRetry(\n collection,\n replicationState.retryTime\n );\n }\n continue;\n }\n const documents: WithDeleted[] = jsonResponse.results\n .map(row => couchDBDocToRxDocData(collection.schema.primaryPath, ensureNotFalsy(row.doc)));\n since = jsonResponse.last_seq;\n\n pullStream$.next({\n documents,\n checkpoint: {\n sequence: jsonResponse.last_seq\n }\n });\n }\n })();\n return startBefore();\n };\n }\n\n startReplicationOnLeaderShip(options.waitForLeadership, replicationState);\n\n return replicationState;\n}\n"],"mappings":";;;;;;;;;;;;;AAGA,IAAAA,MAAA,GAAAC,OAAA;AASA,IAAAC,OAAA,GAAAD,OAAA;AAYA,IAAAE,OAAA,GAAAF,OAAA;AAIA,IAAAG,OAAA,GAAAH,OAAA;AAMA,IAAAI,KAAA,GAAAJ,OAAA;AAMA,IAAAK,cAAA,GAAAL,OAAA;AAQAM,MAAA,CAAAC,IAAA,CAAAF,cAAA,EAAAG,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAJ,cAAA,CAAAI,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAZ,cAAA,CAAAI,GAAA;IAAA;EAAA;AAAA;AAFA,IAAAS,kBAAA,GAAAlB,OAAA;AAGA,IAAAmB,aAAA,GAAAnB,OAAA;AAAAM,MAAA,CAAAC,IAAA,CAAAY,aAAA,EAAAX,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAU,aAAA,CAAAV,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAE,aAAA,CAAAV,GAAA;IAAA;EAAA;AAAA;AAjDA;AACA;AACA;AAFA,IAmDaW,yBAAyB,GAAAN,OAAA,CAAAM,yBAAA,0BAAAC,mBAAA;EAClC,SAAAD,0BACoBE,GAAW,EACpBC,KAAsB,EACbC,qBAA6B,EAC7BC,UAAmC,EACnCC,IAA+D,EAC/DC,IAAwC,EACxCC,IAAa,GAAG,IAAI,EAC7BC,SAAiB,GAAG,IAAI,GAAG,CAAC,EAC5BC,SAAkB,GAAG,IAAI,EAClC;IAAA,IAAAC,KAAA;IACEA,KAAA,GAAAV,mBAAA,CAAAT,IAAA,OACIY,qBAAqB,EACrBC,UAAU,EACV,UAAU,EACVC,IAAI,EACJC,IAAI,EACJC,IAAI,EACJC,SAAS,EACTC,SACJ,CAAC;IAACC,KAAA,CAnBcT,GAAW,GAAXA,GAAW;IAAAS,KAAA,CACpBR,KAAsB,GAAtBA,KAAsB;IAAAQ,KAAA,CACbP,qBAA6B,GAA7BA,qBAA6B;IAAAO,KAAA,CAC7BN,UAAmC,GAAnCA,UAAmC;IAAAM,KAAA,CACnCL,IAA+D,GAA/DA,IAA+D;IAAAK,KAAA,CAC/DJ,IAAwC,GAAxCA,IAAwC;IAAAI,KAAA,CACxCH,IAAa,GAAbA,IAAa;IAAAG,KAAA,CACtBF,SAAiB,GAAjBA,SAAiB;IAAAE,KAAA,CACjBD,SAAkB,GAAlBA,SAAkB;IAAA,OAAAC,KAAA;EAY7B;EAAC,IAAAC,eAAA,CAAAC,OAAA,EAAAb,yBAAA,EAAAC,mBAAA;EAAA,OAAAD,yBAAA;AAAA,EAtBqDc,0BAAkB;AAyBrE,SAASC,gBAAgBA,CAC5BC,OAAsC,EACxC;EACE,IAAMX,UAAU,GAAGW,OAAO,CAACX,UAAU;EACrC,IAAMY,eAA2C,GAAGZ,UAAU,CAACY,eAAe;EAC9E,IAAAC,mBAAW,EAACC,gCAAwB,CAAC;EACrC,IAAMC,WAAW,GAAGJ,OAAO,CAACX,UAAU,CAACgB,MAAM,CAACD,WAAW;EAEzD,IAAI,CAACJ,OAAO,CAACd,GAAG,CAACoB,QAAQ,CAAC,GAAG,CAAC,EAAE;IAC5B,MAAM,IAAAC,kBAAU,EAAC,cAAc,EAAE;MAC7BC,IAAI,EAAE;QACFnB,UAAU,EAAEW,OAAO,CAACX,UAAU,CAACoB,IAAI;QACnCvB,GAAG,EAAEc,OAAO,CAACd;MACjB;IACJ,CAAC,CAAC;EACN;EAEAc,OAAO,GAAG,IAAAU,gBAAS,EAACV,OAAO,CAAC;EAC5B,IAAI,CAACA,OAAO,CAACd,GAAG,CAACoB,QAAQ,CAAC,GAAG,CAAC,EAAE;IAC5BN,OAAO,CAACd,GAAG,GAAGc,OAAO,CAACd,GAAG,GAAG,GAAG;EACnC;EACAc,OAAO,CAACW,iBAAiB,GAAG,OAAOX,OAAO,CAACW,iBAAiB,KAAK,WAAW,GAAG,IAAI,GAAGX,OAAO,CAACW,iBAAiB;EAC/G,IAAMC,WAAmF,GAAG,IAAIC,aAAO,CAAC,CAAC;EACzG,IAAIC,yBAA+F;EACnG,IAAId,OAAO,CAACV,IAAI,EAAE;IACdwB,yBAAyB,GAAG;MACxB,MAAMC,OAAOA,CACTC,oBAAuD,EACvDC,SAAiB,EACnB;QACE;AAChB;AACA;QACgB,IAAM/B,GAAG,GAAGc,OAAO,CAACd,GAAG,GAAG,WAAW,GAAG,IAAAgC,kCAAmB,EAAC;UACxDC,KAAK,EAAE,UAAU;UACjBC,IAAI,EAAE,QAAQ;UACdC,YAAY,EAAE,IAAI;UAClBC,KAAK,EAAEN,oBAAoB,GAAGA,oBAAoB,CAACO,QAAQ,GAAG,CAAC;UAC/DC,SAAS,EAAExB,OAAO,CAACV,IAAI,IAAIU,OAAO,CAACV,IAAI,CAACkC,SAAS,GAAGxB,OAAO,CAACV,IAAI,CAACkC,SAAS,GAAG,KAAK;UAClFC,KAAK,EAAER,SAAS;UAChBS,YAAY,EAAET;QAClB,CAAC,CAAC;QAEF,IAAMU,QAAQ,GAAG,MAAMC,gBAAgB,CAACzC,KAAK,CAACD,GAAG,CAAC;QAClD,IAAM2C,YAAkC,GAAG,MAAMF,QAAQ,CAACG,IAAI,CAAC,CAAC;QAChE,IAAI,CAACD,YAAY,CAACE,OAAO,EAAE;UACvB,MAAM,IAAAxB,kBAAU,EAAC,cAAc,EAAE;YAC7BC,IAAI,EAAE;cAAEqB;YAAa;UACzB,CAAC,CAAC;QACN;QACA,IAAMG,SAAmC,GAAGH,YAAY,CAACE,OAAO,CAC3DE,GAAG,CAACC,GAAG,IAAI,IAAAC,oCAAqB,EAAC9C,UAAU,CAACgB,MAAM,CAACD,WAAW,EAAE,IAAAgC,qBAAc,EAACF,GAAG,CAACG,GAAG,CAAC,CAAC,CAAC;QAC9F,OAAO;UACHL,SAAS;UACTM,UAAU,EAAE;YACRf,QAAQ,EAAEM,YAAY,CAACU;UAC3B;QACJ,CAAC;MACL,CAAC;MACDtB,SAAS,EAAE,IAAAmB,qBAAc,EAACpC,OAAO,CAACV,IAAI,CAAC,CAAC2B,SAAS;MACjDuB,QAAQ,EAAE,IAAAJ,qBAAc,EAACpC,OAAO,CAACV,IAAI,CAAC,CAACkD,QAAQ;MAC/CC,OAAO,EAAE7B,WAAW,CAAC8B,YAAY,CAAC,CAAC;MACnCC,iBAAiB,EAAE3C,OAAO,CAACV,IAAI,CAACqD;IACpC,CAAC;EACL;EAEA,IAAIC,yBAAwE;EAC5E,IAAI5C,OAAO,CAACT,IAAI,EAAE;IACdqD,yBAAyB,GAAG;MACxB,MAAM7B,OAAOA,CACT8B,IAAgD,EAClD;QACE,IAAMC,SAAmC,GAAG,EAAE;QAC9C,IAAMC,YAAY,GAAG,IAAIC,GAAG,CAAmD,CAAC;QAChFH,IAAI,CAACzE,OAAO,CAAC8D,GAAG,IAAI;UAChB,IAAMe,EAAE,GAAIf,GAAG,CAACgB,gBAAgB,CAAS9C,WAAW,CAAC;UACrD2C,YAAY,CAACI,GAAG,CAACF,EAAE,EAAEf,GAAG,CAAC;QAC7B,CAAC,CAAC;;QAEF;AAChB;AACA;AACA;QACgB,IAAMkB,gBAAgB,GAAG,MAAMxB,gBAAgB,CAACzC,KAAK,CACjDa,OAAO,CAACd,GAAG,GAAG,YAAY,GAAG,IAAAgC,kCAAmB,EAAC,CAAC,CAAC,CAAC,EACpD;UACImC,MAAM,EAAE,MAAM;UACdC,OAAO,EAAE;YACL,cAAc,EAAE;UACpB,CAAC;UACDC,IAAI,EAAEC,IAAI,CAACC,SAAS,CAAC;YACjBtF,IAAI,EAAE0E,IAAI,CAACZ,GAAG,CAACC,GAAG,IAAKA,GAAG,CAACgB,gBAAgB,CAAS9C,WAAW,CAAC,CAAC;YACjEiB,YAAY,EAAE,IAAI;YAClBqC,OAAO,EAAE;UACb,CAAC;QACL,CACJ,CAAC;QACD,IAAMC,YAAkC,GAAG,MAAMP,gBAAgB,CAACtB,IAAI,CAAC,CAAC;QACxE,IAAM8B,eAA4B,GAAG,EAAE;QACvC,IAAMC,aAAa,GAAG,IAAIb,GAAG,CAAiB,CAAC;QAC/C,MAAMc,OAAO,CAACC,GAAG,CACbJ,YAAY,CAACd,IAAI,CAACZ,GAAG,CAAC,MAAOC,GAAG,IAAK;UACjC,IAAI,CAACA,GAAG,CAACG,GAAG,EAAE;YACVuB,eAAe,CAACrE,IAAI,CAAC,IAAAyE,wBAAiB,EAACjB,YAAY,EAAEb,GAAG,CAAC7D,GAAG,CAAC,CAAC;YAC9D;UACJ;UACA,IAAM4F,eAAuC,GAAG,IAAA9B,oCAAqB,EAAC/B,WAAW,EAAE8B,GAAG,CAACG,GAAG,CAAC;UAC3F,IAAM6B,OAAO,GAAG,IAAAF,wBAAiB,EAACjB,YAAY,EAAEb,GAAG,CAACe,EAAE,CAAC;UAEvD,IACIiB,OAAO,CAACC,kBAAkB,IAC1B,CAAC,MAAMlE,eAAe,CAAC;YACnBgE,eAAe;YACff,gBAAgB,EAAEgB,OAAO,CAACC;UAC9B,CAAC,EAAE,gBAAgB,CAAC,EAAEC,OAAO,EAC/B;YACEP,aAAa,CAACV,GAAG,CAACjB,GAAG,CAACe,EAAE,EAAEf,GAAG,CAACG,GAAG,CAACgC,IAAI,CAAC;YACvCT,eAAe,CAACrE,IAAI,CAAC2E,OAAO,CAAC;UACjC,CAAC,MAAM;YACHpB,SAAS,CAACvD,IAAI,CAAC0E,eAAe,CAAC;UACnC;QACJ,CAAC,CACL,CAAC;;QAED;AAChB;AACA;QACgB,IAAM/E,GAAG,GAAGc,OAAO,CAACd,GAAG,GAAG,aAAa,GAAG,IAAAgC,kCAAmB,EAAC,CAAC,CAAC,CAAC;QACjE,IAAMqC,IAAI,GAAG;UACTe,IAAI,EAAEV,eAAe,CAAC3B,GAAG,CAACC,GAAG,IAAI;YAC7B,IAAMqC,KAAK,GAAIrC,GAAG,CAACgB,gBAAgB,CAAS9C,WAAW,CAAC;YACxD,IAAMoE,OAAO,GAAG,IAAA9D,gBAAS,EAACwB,GAAG,CAACgB,gBAAgB,CAAC;YAC/C,IAAIW,aAAa,CAACY,GAAG,CAACF,KAAK,CAAC,EAAE;cACzBC,OAAO,CAASH,IAAI,GAAG,IAAAL,wBAAiB,EAACH,aAAa,EAAEU,KAAK,CAAC;YACnE;YACA,OAAO,IAAAG,mCAAoB,EAACrF,UAAU,CAACgB,MAAM,CAACD,WAAW,EAAEoE,OAAO,CAAC;UACvE,CAAC;QACL,CAAC;QAED,IAAM7C,QAAQ,GAAG,MAAMC,gBAAgB,CAACzC,KAAK,CACzCD,GAAG,EACH;UACImE,MAAM,EAAE,MAAM;UACdC,OAAO,EAAE;YACL,cAAc,EAAE;UACpB,CAAC;UACDC,IAAI,EAAEC,IAAI,CAACC,SAAS,CAACF,IAAI;QAC7B,CACJ,CAAC;QACD,IAAMoB,YAAqC,GAAG,MAAMhD,QAAQ,CAACG,IAAI,CAAC,CAAC;;QAEnE;QACA,IAAM8C,gBAA0B,GAAG,EAAE;QACrCD,YAAY,CAACvG,OAAO,CAACyG,cAAc,IAAI;UACnC,IAAMC,UAAU,GAAGD,cAAc,CAACE,KAAK,KAAK,UAAU;UACtD,IAAI,CAACF,cAAc,CAACG,EAAE,IAAI,CAACF,UAAU,EAAE;YACnC,MAAM,IAAAvE,kBAAU,EAAC,KAAK,EAAE;cAAEC,IAAI,EAAE;gBAAEqE;cAAe;YAAE,CAAC,CAAC;UACzD;UACA,IAAIC,UAAU,EAAE;YACZF,gBAAgB,CAACrF,IAAI,CAACsF,cAAc,CAAC5B,EAAE,CAAC;UAC5C;QACJ,CAAC,CAAC;QAEF,IAAI2B,gBAAgB,CAACK,MAAM,KAAK,CAAC,EAAE;UAC/B,OAAOnC,SAAS;QACpB;QAEA,IAAMoC,kBAAkB,GAAGlF,OAAO,CAACd,GAAG,GAAG,YAAY,GAAG,IAAAgC,kCAAmB,EAAC;UACxEG,YAAY,EAAE,IAAI;UAClBlD,IAAI,EAAEqF,IAAI,CAACC,SAAS,CAACmB,gBAAgB;QACzC,CAAC,CAAC;QACF,IAAMO,gBAAgB,GAAG,MAAMvD,gBAAgB,CAACzC,KAAK,CAAC+F,kBAAkB,CAAC;QACzE,IAAME,oBAA0C,GAAG,MAAMD,gBAAgB,CAACrD,IAAI,CAAC,CAAC;QAChFsD,oBAAoB,CAACvC,IAAI,CAACzE,OAAO,CAACiH,gBAAgB,IAAI;UAClDvC,SAAS,CAACvD,IAAI,CAAC,IAAA4C,oCAAqB,EAAC9C,UAAU,CAACgB,MAAM,CAACD,WAAW,EAAEiF,gBAAgB,CAAChD,GAAG,CAAC,CAAC;QAC9F,CAAC,CAAC;QAEF,OAAOS,SAAS;MACpB,CAAC;MACD7B,SAAS,EAAEjB,OAAO,CAACT,IAAI,CAAC0B,SAAS;MACjCuB,QAAQ,EAAExC,OAAO,CAACT,IAAI,CAACiD,QAAQ;MAC/BG,iBAAiB,EAAE3C,OAAO,CAACT,IAAI,CAACoD;IACpC,CAAC;EACL;EAEA,IAAMf,gBAAgB,GAAG,IAAI5C,yBAAyB,CAClDgB,OAAO,CAACd,GAAG,EACXc,OAAO,CAACb,KAAK,GAAGa,OAAO,CAACb,KAAK,GAAG,IAAAmG,8BAAe,EAAC,CAAC,EACjDtF,OAAO,CAACZ,qBAAqB,EAC7BC,UAAU,EACVyB,yBAAyB,EACzB8B,yBAAyB,EACzB5C,OAAO,CAACR,IAAI,EACZQ,OAAO,CAACP,SAAS,EACjBO,OAAO,CAACN,SACZ,CAAC;;EAED;AACJ;AACA;EACI,IAAIM,OAAO,CAACR,IAAI,IAAIQ,OAAO,CAACV,IAAI,EAAE;IAC9B,IAAMiG,WAAW,GAAG3D,gBAAgB,CAAC4D,KAAK,CAACC,IAAI,CAAC7D,gBAAgB,CAAC;IACjEA,gBAAgB,CAAC4D,KAAK,GAAG,MAAM;MAC3B,IAAIlE,KAAsB,GAAG,KAAK;MAClC,IAAML,SAAS,GAAGjB,OAAO,CAACV,IAAI,IAAIU,OAAO,CAACV,IAAI,CAAC2B,SAAS,GAAGjB,OAAO,CAACV,IAAI,CAAC2B,SAAS,GAAG,EAAE;MAEtF,CAAC,YAAY;QACT,IAAIyE,oBAAoB,GAAG,IAAAC,UAAG,EAAC,CAAC;QAChC,OAAO,CAAC/D,gBAAgB,CAACgE,SAAS,CAAC,CAAC,EAAE;UAClC,IAAM1G,IAAG,GAAGc,OAAO,CAACd,GAAG,GAAG,WAAW,GAAG,IAAAgC,kCAAmB,EAAC;YACxDC,KAAK,EAAE,UAAU;YACjBC,IAAI,EAAE,UAAU;YAChBE,KAAK;YACLD,YAAY,EAAE,IAAI;YAClBG,SAAS,EAAExB,OAAO,CAACV,IAAI,IAAIU,OAAO,CAACV,IAAI,CAACkC,SAAS,GAAGxB,OAAO,CAACV,IAAI,CAACkC,SAAS,GAAG,KAAK;YAClFC,KAAK,EAAER,SAAS;YAChBS,YAAY,EAAET;UAClB,CAAC,CAAC;UAEF,IAAIY,YAAkC;UACtC,IAAI;YACA6D,oBAAoB,GAAG,IAAAC,UAAG,EAAC,CAAC;YAC5B9D,YAAY,GAAG,MAAM,CAAC,MAAMD,gBAAgB,CAACzC,KAAK,CAACD,IAAG,CAAC,EAAE4C,IAAI,CAAC,CAAC;UACnE,CAAC,CAAC,OAAO+D,GAAQ,EAAE;YACfjE,gBAAgB,CAACkE,QAAQ,CAACf,KAAK,CAACgB,IAAI,CAChC,IAAAxF,kBAAU,EAAC,WAAW,EAAE;cACpBC,IAAI,EAAE;gBAAEtB,GAAG,EAAHA;cAAI,CAAC;cACb6F,KAAK,EAAE,IAAAiB,uBAAgB,EAACH,GAAG;YAC/B,CAAC,CACL,CAAC;YAED,IAAIH,oBAAoB,GAAI,IAAAC,UAAG,EAAC,CAAC,GAAG/D,gBAAgB,CAACnC,SAAU,EAAE;cAC7D;AAC5B;AACA;AACA;AACA;AACA;cAC4B,MAAM,IAAAwG,kBAAW,EAAC,CAAC,CAAC;YACxB,CAAC,MAAM;cACH;cACA,MAAM,IAAAC,6BAAU,EACZ7G,UAAU,EACVuC,gBAAgB,CAACnC,SACrB,CAAC;YACL;YACA;UACJ;UACA,IAAMuC,SAAmC,GAAGH,YAAY,CAACE,OAAO,CAC3DE,GAAG,CAACC,GAAG,IAAI,IAAAC,oCAAqB,EAAC9C,UAAU,CAACgB,MAAM,CAACD,WAAW,EAAE,IAAAgC,qBAAc,EAACF,GAAG,CAACG,GAAG,CAAC,CAAC,CAAC;UAC9Ff,KAAK,GAAGO,YAAY,CAACU,QAAQ;UAE7B3B,WAAW,CAACmF,IAAI,CAAC;YACb/D,SAAS;YACTM,UAAU,EAAE;cACRf,QAAQ,EAAEM,YAAY,CAACU;YAC3B;UACJ,CAAC,CAAC;QACN;MACJ,CAAC,EAAE,CAAC;MACJ,OAAOgD,WAAW,CAAC,CAAC;IACxB,CAAC;EACL;EAEA,IAAAY,oCAA4B,EAACnG,OAAO,CAACW,iBAAiB,EAAEiB,gBAAgB,CAAC;EAEzE,OAAOA,gBAAgB;AAC3B","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/replication-firestore/firestore-helper.js b/dist/cjs/plugins/replication-firestore/firestore-helper.js deleted file mode 100644 index 815e0c003c6..00000000000 --- a/dist/cjs/plugins/replication-firestore/firestore-helper.js +++ /dev/null @@ -1,58 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.firestoreRowToDocData = firestoreRowToDocData; -exports.getContentByIds = getContentByIds; -exports.getFirestoreSortFieldValue = getFirestoreSortFieldValue; -exports.isoStringToServerTimestamp = isoStringToServerTimestamp; -exports.serverTimestampToIsoString = serverTimestampToIsoString; -exports.stripPrimaryKey = stripPrimaryKey; -exports.stripServerTimestampField = stripServerTimestampField; -var _firestore = require("firebase/firestore"); -var _index = require("../../plugins/utils/index.js"); -function getFirestoreSortFieldValue(docData, primaryKey) { - var timeString = (0, _index.now)() + ''; - return 'rxdb-' + timeString.padStart(15, '0') + '-' + docData[primaryKey]; -} -function stripServerTimestampField(serverTimestampField, docData) { - var data = (0, _index.flatClone)(docData); - delete data[serverTimestampField]; - return data; -} -function serverTimestampToIsoString(serverTimestampField, docData) { - var timestamp = docData[serverTimestampField]; - var date = timestamp.toDate(); - return date.toISOString(); -} -function isoStringToServerTimestamp(isoString) { - var date = new Date(isoString); - return _firestore.Timestamp.fromDate(date); -} -function firestoreRowToDocData(serverTimestampField, primaryPath, row) { - var docData = stripServerTimestampField(serverTimestampField, row.data()); - docData[primaryPath] = row.id; - return docData; -} -function stripPrimaryKey(primaryPath, docData) { - docData = (0, _index.flatClone)(docData); - delete docData[primaryPath]; - return docData; -} - -// https://stackoverflow.com/questions/61354866/is-there-a-workaround-for-the-firebase-query-in-limit-to-10 -function getContentByIds(ids, getQuery) { - var batches = []; - while (ids.length) { - // firestore limits batches to 10 - var batch = ids.splice(0, 10); - - // add the batch request to to a queue - batches.push(getQuery(batch)); - } - - // after all of the data is fetched, return it - return Promise.all(batches).then(content => content.map(i => i.docs).flat()); -} -//# sourceMappingURL=firestore-helper.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/replication-firestore/firestore-helper.js.map b/dist/cjs/plugins/replication-firestore/firestore-helper.js.map deleted file mode 100644 index 26adb1e064a..00000000000 --- a/dist/cjs/plugins/replication-firestore/firestore-helper.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"firestore-helper.js","names":["_firestore","require","_index","getFirestoreSortFieldValue","docData","primaryKey","timeString","now","padStart","stripServerTimestampField","serverTimestampField","data","flatClone","serverTimestampToIsoString","timestamp","date","toDate","toISOString","isoStringToServerTimestamp","isoString","Date","Timestamp","fromDate","firestoreRowToDocData","primaryPath","row","id","stripPrimaryKey","getContentByIds","ids","getQuery","batches","length","batch","splice","push","Promise","all","then","content","map","i","docs","flat"],"sources":["../../../../src/plugins/replication-firestore/firestore-helper.ts"],"sourcesContent":["import {\n QueryDocumentSnapshot,\n Timestamp\n} from 'firebase/firestore';\nimport type {\n WithDeleted\n} from '../../types/index.d.ts';\nimport { flatClone, now } from '../../plugins/utils/index.ts';\nimport type { GetQuery } from './firestore-types.ts';\n\n\nexport function getFirestoreSortFieldValue(docData: any, primaryKey: string): string {\n const timeString = now() + '';\n return 'rxdb-' + timeString.padStart(15, '0') + '-' + docData[primaryKey];\n}\n\nexport function stripServerTimestampField(\n serverTimestampField: string,\n docData: RxDocType\n): WithDeleted {\n const data = flatClone(docData);\n delete (data as any)[serverTimestampField];\n return data as any;\n}\n\n\nexport function serverTimestampToIsoString(serverTimestampField: string, docData: any): string {\n const timestamp = (docData as any)[serverTimestampField];\n const date: Date = timestamp.toDate();\n return date.toISOString();\n}\n\nexport function isoStringToServerTimestamp(isoString: string): Timestamp {\n const date = new Date(isoString);\n return Timestamp.fromDate(date);\n}\n\nexport function firestoreRowToDocData(\n serverTimestampField: string,\n primaryPath: string,\n row: QueryDocumentSnapshot\n): WithDeleted {\n const docData = stripServerTimestampField(\n serverTimestampField,\n row.data()\n );\n (docData as any)[primaryPath] = row.id;\n return docData;\n}\n\nexport function stripPrimaryKey(\n primaryPath: string,\n docData: any\n): any {\n docData = flatClone(docData);\n delete (docData as any)[primaryPath];\n return docData;\n}\n\n// https://stackoverflow.com/questions/61354866/is-there-a-workaround-for-the-firebase-query-in-limit-to-10\nexport function getContentByIds(ids: string[], getQuery: GetQuery): Promise[]> {\n const batches = [];\n\n while (ids.length) {\n // firestore limits batches to 10\n const batch = ids.splice(0, 10);\n\n // add the batch request to to a queue\n batches.push(getQuery(batch));\n }\n\n // after all of the data is fetched, return it\n return Promise.all(batches).then((content) => content.map(i => i.docs).flat());\n}\n"],"mappings":";;;;;;;;;;;;AAAA,IAAAA,UAAA,GAAAC,OAAA;AAOA,IAAAC,MAAA,GAAAD,OAAA;AAIO,SAASE,0BAA0BA,CAACC,OAAY,EAAEC,UAAkB,EAAU;EACjF,IAAMC,UAAU,GAAG,IAAAC,UAAG,EAAC,CAAC,GAAG,EAAE;EAC7B,OAAO,OAAO,GAAGD,UAAU,CAACE,QAAQ,CAAC,EAAE,EAAE,GAAG,CAAC,GAAG,GAAG,GAAGJ,OAAO,CAACC,UAAU,CAAC;AAC7E;AAEO,SAASI,yBAAyBA,CACrCC,oBAA4B,EAC5BN,OAAkB,EACI;EACtB,IAAMO,IAAI,GAAG,IAAAC,gBAAS,EAACR,OAAO,CAAC;EAC/B,OAAQO,IAAI,CAASD,oBAAoB,CAAC;EAC1C,OAAOC,IAAI;AACf;AAGO,SAASE,0BAA0BA,CAACH,oBAA4B,EAAEN,OAAY,EAAU;EAC3F,IAAMU,SAAS,GAAIV,OAAO,CAASM,oBAAoB,CAAC;EACxD,IAAMK,IAAU,GAAGD,SAAS,CAACE,MAAM,CAAC,CAAC;EACrC,OAAOD,IAAI,CAACE,WAAW,CAAC,CAAC;AAC7B;AAEO,SAASC,0BAA0BA,CAACC,SAAiB,EAAa;EACrE,IAAMJ,IAAI,GAAG,IAAIK,IAAI,CAACD,SAAS,CAAC;EAChC,OAAOE,oBAAS,CAACC,QAAQ,CAACP,IAAI,CAAC;AACnC;AAEO,SAASQ,qBAAqBA,CACjCb,oBAA4B,EAC5Bc,WAAmB,EACnBC,GAAqC,EACf;EACtB,IAAMrB,OAAO,GAAGK,yBAAyB,CACrCC,oBAAoB,EACpBe,GAAG,CAACd,IAAI,CAAC,CACb,CAAC;EACAP,OAAO,CAASoB,WAAW,CAAC,GAAGC,GAAG,CAACC,EAAE;EACtC,OAAOtB,OAAO;AAClB;AAEO,SAASuB,eAAeA,CAC3BH,WAAmB,EACnBpB,OAAY,EACT;EACHA,OAAO,GAAG,IAAAQ,gBAAS,EAACR,OAAO,CAAC;EAC5B,OAAQA,OAAO,CAASoB,WAAW,CAAC;EACpC,OAAOpB,OAAO;AAClB;;AAEA;AACO,SAASwB,eAAeA,CAAYC,GAAa,EAAEC,QAA6B,EAA+C;EAClI,IAAMC,OAAO,GAAG,EAAE;EAElB,OAAOF,GAAG,CAACG,MAAM,EAAE;IACf;IACA,IAAMC,KAAK,GAAGJ,GAAG,CAACK,MAAM,CAAC,CAAC,EAAE,EAAE,CAAC;;IAE/B;IACAH,OAAO,CAACI,IAAI,CAACL,QAAQ,CAACG,KAAK,CAAC,CAAC;EACjC;;EAEA;EACA,OAAOG,OAAO,CAACC,GAAG,CAACN,OAAO,CAAC,CAACO,IAAI,CAAEC,OAAO,IAAKA,OAAO,CAACC,GAAG,CAACC,CAAC,IAAIA,CAAC,CAACC,IAAI,CAAC,CAACC,IAAI,CAAC,CAAC,CAAC;AAClF","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/replication-firestore/firestore-types.js b/dist/cjs/plugins/replication-firestore/firestore-types.js deleted file mode 100644 index 74386df0d84..00000000000 --- a/dist/cjs/plugins/replication-firestore/firestore-types.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -//# sourceMappingURL=firestore-types.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/replication-firestore/firestore-types.js.map b/dist/cjs/plugins/replication-firestore/firestore-types.js.map deleted file mode 100644 index 4af79d887b5..00000000000 --- a/dist/cjs/plugins/replication-firestore/firestore-types.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"firestore-types.js","names":[],"sources":["../../../../src/plugins/replication-firestore/firestore-types.ts"],"sourcesContent":["import type {\n MaybePromise,\n ReplicationOptions,\n ReplicationPullOptions,\n ReplicationPushOptions,\n WithDeleted\n} from '../../types/index.d.ts';\n\nimport type {\n CollectionReference,\n Firestore,\n QueryFieldFilterConstraint,\n QuerySnapshot\n} from 'firebase/firestore';\n\nexport type FirestoreCheckpointType = {\n id: string;\n /**\n * Firestore internally sets the time to an object like\n * {\n * \"seconds\": 1669807105,\n * \"nanoseconds\": 476000000\n * }\n * But to be able to query that, we have to use a date string\n * like '2022-11-30T11:18:25.141Z'\n * so we store that string instead.\n */\n serverTimestamp: string;\n};\nexport type FirestoreCollection = CollectionReference;\n\nexport type FirestoreOptions = {\n projectId: string;\n collection: FirestoreCollection;\n database: Firestore;\n};\n\nexport type FirestoreSyncPullOptions =\n Omit, 'handler' | 'stream$'>\n & {\n filter?: QueryFieldFilterConstraint | QueryFieldFilterConstraint[];\n };\n\nexport type FirestoreSyncPushOptions = Omit, 'handler'>\n & {\n filter?(item: WithDeleted): MaybePromise;\n };\n\nexport type SyncOptionsFirestore = Omit<\n ReplicationOptions,\n 'pull' | 'push'\n> & {\n firestore: FirestoreOptions;\n /**\n * In firestore it is not possible to read out\n * the internally used write timestamp.\n * Even if we could read it out, it is not indexed which\n * is required for fetch 'changes-since-x'.\n * So instead we have to rely on a custom user defined field\n * that contains the server time which is set by firestore via serverTimestamp()\n * IMPORTANT: The serverTimestampField MUST NOT be part of the collections RxJsonSchema!\n * [default='serverTimestamp']\n * @link https://groups.google.com/g/firebase-talk/c/tAmPzPei-mE\n */\n serverTimestampField?: string;\n pull?: FirestoreSyncPullOptions;\n push?: FirestoreSyncPushOptions;\n};\n\nexport type GetQuery = (ids: string[]) => Promise>;\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/replication-firestore/index.js b/dist/cjs/plugins/replication-firestore/index.js deleted file mode 100644 index 9c6f2628663..00000000000 --- a/dist/cjs/plugins/replication-firestore/index.js +++ /dev/null @@ -1,261 +0,0 @@ -"use strict"; - -var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault"); -Object.defineProperty(exports, "__esModule", { - value: true -}); -var _exportNames = { - RxFirestoreReplicationState: true, - replicateFirestore: true -}; -exports.RxFirestoreReplicationState = void 0; -exports.replicateFirestore = replicateFirestore; -var _inheritsLoose2 = _interopRequireDefault(require("@babel/runtime/helpers/inheritsLoose")); -var _index = require("../../plugins/utils/index.js"); -var _firestore = require("firebase/firestore"); -var _index2 = require("../leader-election/index.js"); -var _index3 = require("../replication/index.js"); -var _index4 = require("../../index.js"); -var _rxjs = require("rxjs"); -var _firestoreHelper = require("./firestore-helper.js"); -Object.keys(_firestoreHelper).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; - if (key in exports && exports[key] === _firestoreHelper[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _firestoreHelper[key]; - } - }); -}); -var _firestoreTypes = require("./firestore-types.js"); -Object.keys(_firestoreTypes).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; - if (key in exports && exports[key] === _firestoreTypes[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _firestoreTypes[key]; - } - }); -}); -var RxFirestoreReplicationState = exports.RxFirestoreReplicationState = /*#__PURE__*/function (_RxReplicationState) { - function RxFirestoreReplicationState(firestore, replicationIdentifierHash, collection, pull, push, live = true, retryTime = 1000 * 5, autoStart = true) { - var _this; - _this = _RxReplicationState.call(this, replicationIdentifierHash, collection, '_deleted', pull, push, live, retryTime, autoStart) || this; - _this.firestore = firestore; - _this.replicationIdentifierHash = replicationIdentifierHash; - _this.collection = collection; - _this.pull = pull; - _this.push = push; - _this.live = live; - _this.retryTime = retryTime; - _this.autoStart = autoStart; - return _this; - } - (0, _inheritsLoose2.default)(RxFirestoreReplicationState, _RxReplicationState); - return RxFirestoreReplicationState; -}(_index3.RxReplicationState); -function replicateFirestore(options) { - var collection = options.collection; - (0, _index4.addRxPlugin)(_index2.RxDBLeaderElectionPlugin); - var pullStream$ = new _rxjs.Subject(); - var replicationPrimitivesPull; - options.live = typeof options.live === 'undefined' ? true : options.live; - options.waitForLeadership = typeof options.waitForLeadership === 'undefined' ? true : options.waitForLeadership; - var serverTimestampField = typeof options.serverTimestampField === 'undefined' ? 'serverTimestamp' : options.serverTimestampField; - options.serverTimestampField = serverTimestampField; - var primaryPath = collection.schema.primaryPath; - - /** - * The serverTimestampField MUST NOT be part of the collections RxJsonSchema. - */ - var schemaPart = (0, _index4.getSchemaByObjectPath)(collection.schema.jsonSchema, serverTimestampField); - if (schemaPart || - // also must not be nested. - serverTimestampField.includes('.')) { - throw (0, _index4.newRxError)('RC6', { - field: serverTimestampField, - schema: collection.schema.jsonSchema - }); - } - var pullFilters = options.pull?.filter !== undefined ? (0, _index.toArray)(options.pull.filter) : []; - var pullQuery = (0, _firestore.query)(options.firestore.collection, ...pullFilters); - if (options.pull) { - replicationPrimitivesPull = { - async handler(lastPulledCheckpoint, batchSize) { - var newerQuery; - var sameTimeQuery; - if (lastPulledCheckpoint) { - var lastServerTimestamp = (0, _firestoreHelper.isoStringToServerTimestamp)(lastPulledCheckpoint.serverTimestamp); - newerQuery = (0, _firestore.query)(pullQuery, (0, _firestore.where)(serverTimestampField, '>', lastServerTimestamp), (0, _firestore.orderBy)(serverTimestampField, 'asc'), (0, _firestore.limit)(batchSize)); - sameTimeQuery = (0, _firestore.query)(pullQuery, (0, _firestore.where)(serverTimestampField, '==', lastServerTimestamp), (0, _firestore.where)(primaryPath, '>', lastPulledCheckpoint.id), (0, _firestore.orderBy)(primaryPath, 'asc'), (0, _firestore.limit)(batchSize)); - } else { - newerQuery = (0, _firestore.query)(pullQuery, (0, _firestore.orderBy)(serverTimestampField, 'asc'), (0, _firestore.limit)(batchSize)); - } - var mustsReRun = true; - var useDocs = []; - while (mustsReRun) { - /** - * Local writes that have not been persisted to the server - * are in pending state and do not have a correct serverTimestamp set. - * We have to ensure we only use document states that are in sync with the server. - * @link https://medium.com/firebase-developers/the-secrets-of-firestore-fieldvalue-servertimestamp-revealed-29dd7a38a82b - */ - await (0, _firestore.waitForPendingWrites)(options.firestore.database); - await (0, _firestore.runTransaction)(options.firestore.database, async _tx => { - useDocs = []; - var [newerQueryResult, sameTimeQueryResult] = await Promise.all([(0, _firestore.getDocs)(newerQuery), sameTimeQuery ? (0, _firestore.getDocs)(sameTimeQuery) : undefined]); - if (newerQueryResult.metadata.hasPendingWrites || sameTimeQuery && (0, _index.ensureNotFalsy)(sameTimeQueryResult).metadata.hasPendingWrites) { - return; - } else { - mustsReRun = false; - if (sameTimeQuery) { - useDocs = (0, _index.ensureNotFalsy)(sameTimeQueryResult).docs; - } - var missingAmount = batchSize - useDocs.length; - if (missingAmount > 0) { - var additionalDocs = newerQueryResult.docs.slice(0, missingAmount).filter(x => !!x); - (0, _index.appendToArray)(useDocs, additionalDocs); - } - } - }); - } - if (useDocs.length === 0) { - return { - checkpoint: lastPulledCheckpoint ?? null, - documents: [] - }; - } - var lastDoc = (0, _index.ensureNotFalsy)((0, _index.lastOfArray)(useDocs)); - var documents = useDocs.map(row => (0, _firestoreHelper.firestoreRowToDocData)(serverTimestampField, primaryPath, row)); - var newCheckpoint = { - id: lastDoc.id, - serverTimestamp: (0, _firestoreHelper.serverTimestampToIsoString)(serverTimestampField, lastDoc.data()) - }; - var ret = { - documents: documents, - checkpoint: newCheckpoint - }; - return ret; - }, - batchSize: (0, _index.ensureNotFalsy)(options.pull).batchSize, - modifier: (0, _index.ensureNotFalsy)(options.pull).modifier, - stream$: pullStream$.asObservable() - }; - } - var replicationPrimitivesPush; - if (options.push) { - var pushFilter = options.push?.filter; - replicationPrimitivesPush = { - async handler(rows) { - if (pushFilter !== undefined) { - rows = await (0, _index.asyncFilter)(rows, row => pushFilter(row.newDocumentState)); - } - var writeRowsById = {}; - var docIds = rows.map(row => { - var docId = row.newDocumentState[primaryPath]; - writeRowsById[docId] = row; - return docId; - }); - await (0, _firestore.waitForPendingWrites)(options.firestore.database); - var conflicts = []; - - /** - * Everything must run INSIDE of the transaction - * because on tx-errors, firebase will re-run the transaction on some cases. - * @link https://firebase.google.com/docs/firestore/manage-data/transactions#transaction_failure - * @link https://firebase.google.com/docs/firestore/manage-data/transactions - */ - await (0, _firestore.runTransaction)(options.firestore.database, async _tx => { - conflicts = []; // reset in case the tx has re-run. - /** - * @link https://stackoverflow.com/a/48423626/3443137 - */ - - var getQuery = ids => { - return (0, _firestore.getDocs)((0, _firestore.query)(options.firestore.collection, (0, _firestore.where)((0, _firestore.documentId)(), 'in', ids))); - }; - var docsInDbResult = await (0, _firestoreHelper.getContentByIds)(docIds, getQuery); - var docsInDbById = {}; - docsInDbResult.forEach(row => { - var docDataInDb = (0, _firestoreHelper.stripServerTimestampField)(serverTimestampField, row.data()); - var docId = row.id; - docDataInDb[primaryPath] = docId; - docsInDbById[docId] = docDataInDb; - }); - - /** - * @link https://firebase.google.com/docs/firestore/manage-data/transactions#batched-writes - */ - var batch = (0, _firestore.writeBatch)(options.firestore.database); - var hasWrite = false; - await Promise.all(Object.entries(writeRowsById).map(async ([docId, writeRow]) => { - var docInDb = docsInDbById[docId]; - if (docInDb && (!writeRow.assumedMasterState || (await collection.conflictHandler({ - newDocumentState: docInDb, - realMasterState: writeRow.assumedMasterState - }, 'replication-firestore-push')).isEqual === false)) { - // conflict - conflicts.push(docInDb); - } else { - // no conflict - hasWrite = true; - var docRef = (0, _firestore.doc)(options.firestore.collection, docId); - var writeDocData = (0, _index.flatClone)(writeRow.newDocumentState); - writeDocData[serverTimestampField] = (0, _firestore.serverTimestamp)(); - if (!docInDb) { - // insert - batch.set(docRef, (0, _firestoreHelper.stripPrimaryKey)(primaryPath, writeDocData)); - } else { - // update - batch.update(docRef, (0, _firestoreHelper.stripPrimaryKey)(primaryPath, writeDocData)); - } - } - })); - if (hasWrite) { - await batch.commit(); - } - }); - await (0, _firestore.waitForPendingWrites)(options.firestore.database); - return conflicts; - }, - batchSize: options.push.batchSize, - modifier: options.push.modifier - }; - } - var replicationState = new RxFirestoreReplicationState(options.firestore, options.replicationIdentifier, collection, replicationPrimitivesPull, replicationPrimitivesPush, options.live, options.retryTime, options.autoStart); - - /** - * Use long polling to get live changes for the pull.stream$ - */ - if (options.live && options.pull) { - var startBefore = replicationState.start.bind(replicationState); - var cancelBefore = replicationState.cancel.bind(replicationState); - replicationState.start = () => { - var lastChangeQuery = (0, _firestore.query)(pullQuery, (0, _firestore.orderBy)(serverTimestampField, 'desc'), (0, _firestore.limit)(1)); - var unsubscribe = (0, _firestore.onSnapshot)(lastChangeQuery, _querySnapshot => { - /** - * There is no good way to observe the event stream in firestore. - * So instead we listen to any write to the collection - * and then emit a 'RESYNC' flag. - */ - replicationState.reSync(); - }, error => { - replicationState.subjects.error.next((0, _index4.newRxError)('RC_STREAM', { - error: (0, _index.errorToPlainJson)(error) - })); - }); - replicationState.cancel = () => { - unsubscribe(); - return cancelBefore(); - }; - return startBefore(); - }; - } - (0, _index3.startReplicationOnLeaderShip)(options.waitForLeadership, replicationState); - return replicationState; -} -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/replication-firestore/index.js.map b/dist/cjs/plugins/replication-firestore/index.js.map deleted file mode 100644 index 7f936d7a055..00000000000 --- a/dist/cjs/plugins/replication-firestore/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["_index","require","_firestore","_index2","_index3","_index4","_rxjs","_firestoreHelper","Object","keys","forEach","key","prototype","hasOwnProperty","call","_exportNames","exports","defineProperty","enumerable","get","_firestoreTypes","RxFirestoreReplicationState","_RxReplicationState","firestore","replicationIdentifierHash","collection","pull","push","live","retryTime","autoStart","_this","_inheritsLoose2","default","RxReplicationState","replicateFirestore","options","addRxPlugin","RxDBLeaderElectionPlugin","pullStream$","Subject","replicationPrimitivesPull","waitForLeadership","serverTimestampField","primaryPath","schema","schemaPart","getSchemaByObjectPath","jsonSchema","includes","newRxError","field","pullFilters","filter","undefined","toArray","pullQuery","query","handler","lastPulledCheckpoint","batchSize","newerQuery","sameTimeQuery","lastServerTimestamp","isoStringToServerTimestamp","serverTimestamp","where","orderBy","limit","id","mustsReRun","useDocs","waitForPendingWrites","database","runTransaction","_tx","newerQueryResult","sameTimeQueryResult","Promise","all","getDocs","metadata","hasPendingWrites","ensureNotFalsy","docs","missingAmount","length","additionalDocs","slice","x","appendToArray","checkpoint","documents","lastDoc","lastOfArray","map","row","firestoreRowToDocData","newCheckpoint","serverTimestampToIsoString","data","ret","modifier","stream$","asObservable","replicationPrimitivesPush","pushFilter","rows","asyncFilter","newDocumentState","writeRowsById","docIds","docId","conflicts","getQuery","ids","documentId","docsInDbResult","getContentByIds","docsInDbById","docDataInDb","stripServerTimestampField","batch","writeBatch","hasWrite","entries","writeRow","docInDb","assumedMasterState","conflictHandler","realMasterState","isEqual","docRef","doc","writeDocData","flatClone","set","stripPrimaryKey","update","commit","replicationState","replicationIdentifier","startBefore","start","bind","cancelBefore","cancel","lastChangeQuery","unsubscribe","onSnapshot","_querySnapshot","reSync","error","subjects","next","errorToPlainJson","startReplicationOnLeaderShip"],"sources":["../../../../src/plugins/replication-firestore/index.ts"],"sourcesContent":["import {\n appendToArray,\n asyncFilter,\n ensureNotFalsy,\n errorToPlainJson,\n flatClone,\n lastOfArray,\n toArray\n} from '../../plugins/utils/index.ts';\n\nimport {\n doc,\n query,\n where,\n orderBy,\n limit,\n getDocs,\n onSnapshot,\n runTransaction,\n writeBatch,\n serverTimestamp,\n QueryDocumentSnapshot,\n waitForPendingWrites,\n documentId\n} from 'firebase/firestore';\n\nimport { RxDBLeaderElectionPlugin } from '../leader-election/index.ts';\nimport type {\n RxCollection,\n ReplicationPullOptions,\n ReplicationPushOptions,\n RxReplicationWriteToMasterRow,\n RxReplicationPullStreamItem\n} from '../../types/index.d.ts';\nimport {\n RxReplicationState,\n startReplicationOnLeaderShip\n} from '../replication/index.ts';\nimport {\n addRxPlugin,\n ById,\n getSchemaByObjectPath,\n newRxError,\n WithDeleted\n} from '../../index.ts';\n\nimport type {\n FirestoreCheckpointType,\n FirestoreOptions,\n SyncOptionsFirestore\n} from './firestore-types.ts';\nimport { Subject } from 'rxjs';\nimport {\n firestoreRowToDocData,\n getContentByIds,\n isoStringToServerTimestamp,\n serverTimestampToIsoString,\n stripPrimaryKey,\n stripServerTimestampField\n} from './firestore-helper.ts';\n\nexport * from './firestore-helper.ts';\nexport * from './firestore-types.ts';\n\nexport class RxFirestoreReplicationState extends RxReplicationState {\n constructor(\n public readonly firestore: FirestoreOptions,\n public readonly replicationIdentifierHash: string,\n public readonly collection: RxCollection,\n public readonly pull?: ReplicationPullOptions,\n public readonly push?: ReplicationPushOptions,\n public readonly live: boolean = true,\n public retryTime: number = 1000 * 5,\n public autoStart: boolean = true\n ) {\n super(\n replicationIdentifierHash,\n collection,\n '_deleted',\n pull,\n push,\n live,\n retryTime,\n autoStart\n );\n }\n}\n\nexport function replicateFirestore(\n options: SyncOptionsFirestore\n): RxFirestoreReplicationState {\n const collection = options.collection;\n addRxPlugin(RxDBLeaderElectionPlugin);\n const pullStream$: Subject> = new Subject();\n let replicationPrimitivesPull: ReplicationPullOptions | undefined;\n options.live = typeof options.live === 'undefined' ? true : options.live;\n options.waitForLeadership = typeof options.waitForLeadership === 'undefined' ? true : options.waitForLeadership;\n const serverTimestampField = typeof options.serverTimestampField === 'undefined' ? 'serverTimestamp' : options.serverTimestampField;\n options.serverTimestampField = serverTimestampField;\n const primaryPath = collection.schema.primaryPath;\n\n /**\n * The serverTimestampField MUST NOT be part of the collections RxJsonSchema.\n */\n const schemaPart = getSchemaByObjectPath(collection.schema.jsonSchema, serverTimestampField);\n if (\n schemaPart ||\n // also must not be nested.\n serverTimestampField.includes('.')\n ) {\n throw newRxError('RC6', {\n field: serverTimestampField,\n schema: collection.schema.jsonSchema\n });\n }\n\n const pullFilters = options.pull?.filter !== undefined\n ? toArray(options.pull.filter)\n : [];\n\n const pullQuery = query(options.firestore.collection, ...pullFilters);\n\n if (options.pull) {\n replicationPrimitivesPull = {\n async handler(\n lastPulledCheckpoint: FirestoreCheckpointType | undefined,\n batchSize: number\n ) {\n let newerQuery: ReturnType;\n let sameTimeQuery: ReturnType | undefined;\n\n if (lastPulledCheckpoint) {\n const lastServerTimestamp = isoStringToServerTimestamp(lastPulledCheckpoint.serverTimestamp);\n newerQuery = query(pullQuery,\n where(serverTimestampField, '>', lastServerTimestamp),\n orderBy(serverTimestampField, 'asc'),\n limit(batchSize)\n );\n sameTimeQuery = query(pullQuery,\n where(serverTimestampField, '==', lastServerTimestamp),\n where(primaryPath, '>', lastPulledCheckpoint.id),\n orderBy(primaryPath, 'asc'),\n limit(batchSize)\n );\n } else {\n newerQuery = query(pullQuery,\n orderBy(serverTimestampField, 'asc'),\n limit(batchSize)\n );\n }\n\n let mustsReRun = true;\n let useDocs: QueryDocumentSnapshot[] = [];\n while (mustsReRun) {\n /**\n * Local writes that have not been persisted to the server\n * are in pending state and do not have a correct serverTimestamp set.\n * We have to ensure we only use document states that are in sync with the server.\n * @link https://medium.com/firebase-developers/the-secrets-of-firestore-fieldvalue-servertimestamp-revealed-29dd7a38a82b\n */\n await waitForPendingWrites(options.firestore.database);\n await runTransaction(options.firestore.database, async (_tx) => {\n useDocs = [];\n const [\n newerQueryResult,\n sameTimeQueryResult\n ] = await Promise.all([\n getDocs(newerQuery),\n sameTimeQuery ? getDocs(sameTimeQuery) : undefined\n ]);\n\n if (\n newerQueryResult.metadata.hasPendingWrites ||\n (sameTimeQuery && ensureNotFalsy(sameTimeQueryResult).metadata.hasPendingWrites)\n ) {\n return;\n } else {\n mustsReRun = false;\n\n if (sameTimeQuery) {\n useDocs = ensureNotFalsy(sameTimeQueryResult).docs as any;\n }\n const missingAmount = batchSize - useDocs.length;\n if (missingAmount > 0) {\n const additionalDocs = newerQueryResult.docs.slice(0, missingAmount).filter(x => !!x);\n appendToArray(useDocs, additionalDocs);\n }\n }\n });\n }\n\n if (useDocs.length === 0) {\n return {\n checkpoint: lastPulledCheckpoint ?? null,\n documents: []\n };\n }\n const lastDoc = ensureNotFalsy(lastOfArray(useDocs));\n const documents: WithDeleted[] = useDocs\n .map(row => firestoreRowToDocData(\n serverTimestampField,\n primaryPath,\n row\n ));\n const newCheckpoint: FirestoreCheckpointType = {\n id: lastDoc.id,\n serverTimestamp: serverTimestampToIsoString(serverTimestampField, lastDoc.data())\n };\n const ret = {\n documents: documents,\n checkpoint: newCheckpoint\n };\n return ret;\n },\n batchSize: ensureNotFalsy(options.pull).batchSize,\n modifier: ensureNotFalsy(options.pull).modifier,\n stream$: pullStream$.asObservable()\n };\n }\n\n let replicationPrimitivesPush: ReplicationPushOptions | undefined;\n if (options.push) {\n const pushFilter = options.push?.filter;\n replicationPrimitivesPush = {\n async handler(\n rows: RxReplicationWriteToMasterRow[]\n ) {\n if (pushFilter !== undefined) {\n rows = await asyncFilter(rows, (row) => pushFilter(row.newDocumentState));\n }\n\n const writeRowsById: ById> = {};\n const docIds: string[] = rows.map(row => {\n const docId = (row.newDocumentState as any)[primaryPath];\n writeRowsById[docId] = row;\n return docId;\n });\n await waitForPendingWrites(options.firestore.database);\n let conflicts: WithDeleted[] = [];\n\n /**\n * Everything must run INSIDE of the transaction\n * because on tx-errors, firebase will re-run the transaction on some cases.\n * @link https://firebase.google.com/docs/firestore/manage-data/transactions#transaction_failure\n * @link https://firebase.google.com/docs/firestore/manage-data/transactions\n */\n await runTransaction(options.firestore.database, async (_tx) => {\n conflicts = []; // reset in case the tx has re-run.\n /**\n * @link https://stackoverflow.com/a/48423626/3443137\n */\n\n const getQuery = (ids: string[]) => {\n return getDocs(\n query(\n options.firestore.collection,\n where(documentId(), 'in', ids)\n )\n );\n };\n\n const docsInDbResult = await getContentByIds(docIds, getQuery);\n\n const docsInDbById: ById = {};\n docsInDbResult.forEach(row => {\n const docDataInDb = stripServerTimestampField(serverTimestampField, row.data());\n const docId = row.id;\n (docDataInDb as any)[primaryPath] = docId;\n docsInDbById[docId] = docDataInDb;\n });\n\n /**\n * @link https://firebase.google.com/docs/firestore/manage-data/transactions#batched-writes\n */\n const batch = writeBatch(options.firestore.database);\n let hasWrite = false;\n await Promise.all(\n Object.entries(writeRowsById).map(async ([docId, writeRow]) => {\n const docInDb: RxDocType | undefined = docsInDbById[docId];\n\n if (\n docInDb &&\n (\n !writeRow.assumedMasterState ||\n (await collection.conflictHandler({\n newDocumentState: docInDb as any,\n realMasterState: writeRow.assumedMasterState\n }, 'replication-firestore-push')).isEqual === false\n )\n ) {\n // conflict\n conflicts.push(docInDb as any);\n } else {\n // no conflict\n hasWrite = true;\n const docRef = doc(options.firestore.collection, docId);\n const writeDocData = flatClone(writeRow.newDocumentState);\n (writeDocData as any)[serverTimestampField] = serverTimestamp();\n if (!docInDb) {\n // insert\n batch.set(docRef, stripPrimaryKey(primaryPath, writeDocData));\n } else {\n // update\n batch.update(docRef, stripPrimaryKey(primaryPath, writeDocData));\n }\n }\n })\n );\n\n if (hasWrite) {\n await batch.commit();\n }\n });\n await waitForPendingWrites(options.firestore.database);\n return conflicts;\n },\n batchSize: options.push.batchSize,\n modifier: options.push.modifier\n };\n }\n\n\n const replicationState = new RxFirestoreReplicationState(\n options.firestore,\n options.replicationIdentifier,\n collection,\n replicationPrimitivesPull,\n replicationPrimitivesPush,\n options.live,\n options.retryTime,\n options.autoStart\n );\n\n /**\n * Use long polling to get live changes for the pull.stream$\n */\n if (options.live && options.pull) {\n const startBefore = replicationState.start.bind(replicationState);\n const cancelBefore = replicationState.cancel.bind(replicationState);\n replicationState.start = () => {\n const lastChangeQuery = query(\n pullQuery,\n orderBy(serverTimestampField, 'desc'),\n limit(1)\n );\n const unsubscribe = onSnapshot(\n lastChangeQuery,\n (_querySnapshot) => {\n /**\n * There is no good way to observe the event stream in firestore.\n * So instead we listen to any write to the collection\n * and then emit a 'RESYNC' flag.\n */\n replicationState.reSync();\n },\n (error) => {\n replicationState.subjects.error.next(\n newRxError('RC_STREAM', { error: errorToPlainJson(error) })\n );\n }\n );\n replicationState.cancel = () => {\n unsubscribe();\n return cancelBefore();\n };\n return startBefore();\n };\n }\n\n startReplicationOnLeaderShip(options.waitForLeadership, replicationState);\n\n return replicationState;\n}\n"],"mappings":";;;;;;;;;;;;;AAAA,IAAAA,MAAA,GAAAC,OAAA;AAUA,IAAAC,UAAA,GAAAD,OAAA;AAgBA,IAAAE,OAAA,GAAAF,OAAA;AAQA,IAAAG,OAAA,GAAAH,OAAA;AAIA,IAAAI,OAAA,GAAAJ,OAAA;AAaA,IAAAK,KAAA,GAAAL,OAAA;AACA,IAAAM,gBAAA,GAAAN,OAAA;AASAO,MAAA,CAAAC,IAAA,CAAAF,gBAAA,EAAAG,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAJ,gBAAA,CAAAI,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAZ,gBAAA,CAAAI,GAAA;IAAA;EAAA;AAAA;AACA,IAAAS,eAAA,GAAAnB,OAAA;AAAAO,MAAA,CAAAC,IAAA,CAAAW,eAAA,EAAAV,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAS,eAAA,CAAAT,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAC,eAAA,CAAAT,GAAA;IAAA;EAAA;AAAA;AAAqC,IAExBU,2BAA2B,GAAAL,OAAA,CAAAK,2BAAA,0BAAAC,mBAAA;EACpC,SAAAD,4BACoBE,SAAsC,EACtCC,yBAAiC,EACjCC,UAAmC,EACnCC,IAAiE,EACjEC,IAAwC,EACxCC,IAAa,GAAG,IAAI,EAC7BC,SAAiB,GAAG,IAAI,GAAG,CAAC,EAC5BC,SAAkB,GAAG,IAAI,EAClC;IAAA,IAAAC,KAAA;IACEA,KAAA,GAAAT,mBAAA,CAAAR,IAAA,OACIU,yBAAyB,EACzBC,UAAU,EACV,UAAU,EACVC,IAAI,EACJC,IAAI,EACJC,IAAI,EACJC,SAAS,EACTC,SACJ,CAAC;IAACC,KAAA,CAlBcR,SAAsC,GAAtCA,SAAsC;IAAAQ,KAAA,CACtCP,yBAAiC,GAAjCA,yBAAiC;IAAAO,KAAA,CACjCN,UAAmC,GAAnCA,UAAmC;IAAAM,KAAA,CACnCL,IAAiE,GAAjEA,IAAiE;IAAAK,KAAA,CACjEJ,IAAwC,GAAxCA,IAAwC;IAAAI,KAAA,CACxCH,IAAa,GAAbA,IAAa;IAAAG,KAAA,CACtBF,SAAiB,GAAjBA,SAAiB;IAAAE,KAAA,CACjBD,SAAkB,GAAlBA,SAAkB;IAAA,OAAAC,KAAA;EAY7B;EAAC,IAAAC,eAAA,CAAAC,OAAA,EAAAZ,2BAAA,EAAAC,mBAAA;EAAA,OAAAD,2BAAA;AAAA,EArBuDa,0BAAkB;AAwBvE,SAASC,kBAAkBA,CAC9BC,OAAwC,EACF;EACtC,IAAMX,UAAU,GAAGW,OAAO,CAACX,UAAU;EACrC,IAAAY,mBAAW,EAACC,gCAAwB,CAAC;EACrC,IAAMC,WAAqF,GAAG,IAAIC,aAAO,CAAC,CAAC;EAC3G,IAAIC,yBAAiG;EACrGL,OAAO,CAACR,IAAI,GAAG,OAAOQ,OAAO,CAACR,IAAI,KAAK,WAAW,GAAG,IAAI,GAAGQ,OAAO,CAACR,IAAI;EACxEQ,OAAO,CAACM,iBAAiB,GAAG,OAAON,OAAO,CAACM,iBAAiB,KAAK,WAAW,GAAG,IAAI,GAAGN,OAAO,CAACM,iBAAiB;EAC/G,IAAMC,oBAAoB,GAAG,OAAOP,OAAO,CAACO,oBAAoB,KAAK,WAAW,GAAG,iBAAiB,GAAGP,OAAO,CAACO,oBAAoB;EACnIP,OAAO,CAACO,oBAAoB,GAAGA,oBAAoB;EACnD,IAAMC,WAAW,GAAGnB,UAAU,CAACoB,MAAM,CAACD,WAAW;;EAEjD;AACJ;AACA;EACI,IAAME,UAAU,GAAG,IAAAC,6BAAqB,EAACtB,UAAU,CAACoB,MAAM,CAACG,UAAU,EAAEL,oBAAoB,CAAC;EAC5F,IACIG,UAAU;EACV;EACAH,oBAAoB,CAACM,QAAQ,CAAC,GAAG,CAAC,EACpC;IACE,MAAM,IAAAC,kBAAU,EAAC,KAAK,EAAE;MACpBC,KAAK,EAAER,oBAAoB;MAC3BE,MAAM,EAAEpB,UAAU,CAACoB,MAAM,CAACG;IAC9B,CAAC,CAAC;EACN;EAEA,IAAMI,WAAW,GAAGhB,OAAO,CAACV,IAAI,EAAE2B,MAAM,KAAKC,SAAS,GAChD,IAAAC,cAAO,EAACnB,OAAO,CAACV,IAAI,CAAC2B,MAAM,CAAC,GAC5B,EAAE;EAER,IAAMG,SAAS,GAAG,IAAAC,gBAAK,EAACrB,OAAO,CAACb,SAAS,CAACE,UAAU,EAAE,GAAG2B,WAAW,CAAC;EAErE,IAAIhB,OAAO,CAACV,IAAI,EAAE;IACde,yBAAyB,GAAG;MACxB,MAAMiB,OAAOA,CACTC,oBAAyD,EACzDC,SAAiB,EACnB;QACE,IAAIC,UAAoC;QACxC,IAAIC,aAAmD;QAEvD,IAAIH,oBAAoB,EAAE;UACtB,IAAMI,mBAAmB,GAAG,IAAAC,2CAA0B,EAACL,oBAAoB,CAACM,eAAe,CAAC;UAC5FJ,UAAU,GAAG,IAAAJ,gBAAK,EAACD,SAAS,EACxB,IAAAU,gBAAK,EAACvB,oBAAoB,EAAE,GAAG,EAAEoB,mBAAmB,CAAC,EACrD,IAAAI,kBAAO,EAACxB,oBAAoB,EAAE,KAAK,CAAC,EACpC,IAAAyB,gBAAK,EAACR,SAAS,CACnB,CAAC;UACDE,aAAa,GAAG,IAAAL,gBAAK,EAACD,SAAS,EAC3B,IAAAU,gBAAK,EAACvB,oBAAoB,EAAE,IAAI,EAAEoB,mBAAmB,CAAC,EACtD,IAAAG,gBAAK,EAACtB,WAAW,EAAE,GAAG,EAAEe,oBAAoB,CAACU,EAAE,CAAC,EAChD,IAAAF,kBAAO,EAACvB,WAAW,EAAE,KAAK,CAAC,EAC3B,IAAAwB,gBAAK,EAACR,SAAS,CACnB,CAAC;QACL,CAAC,MAAM;UACHC,UAAU,GAAG,IAAAJ,gBAAK,EAACD,SAAS,EACxB,IAAAW,kBAAO,EAACxB,oBAAoB,EAAE,KAAK,CAAC,EACpC,IAAAyB,gBAAK,EAACR,SAAS,CACnB,CAAC;QACL;QAEA,IAAIU,UAAU,GAAG,IAAI;QACrB,IAAIC,OAA2C,GAAG,EAAE;QACpD,OAAOD,UAAU,EAAE;UACf;AACpB;AACA;AACA;AACA;AACA;UACoB,MAAM,IAAAE,+BAAoB,EAACpC,OAAO,CAACb,SAAS,CAACkD,QAAQ,CAAC;UACtD,MAAM,IAAAC,yBAAc,EAACtC,OAAO,CAACb,SAAS,CAACkD,QAAQ,EAAE,MAAOE,GAAG,IAAK;YAC5DJ,OAAO,GAAG,EAAE;YACZ,IAAM,CACFK,gBAAgB,EAChBC,mBAAmB,CACtB,GAAG,MAAMC,OAAO,CAACC,GAAG,CAAC,CAClB,IAAAC,kBAAO,EAACnB,UAAU,CAAC,EACnBC,aAAa,GAAG,IAAAkB,kBAAO,EAAClB,aAAa,CAAC,GAAGR,SAAS,CACrD,CAAC;YAEF,IACIsB,gBAAgB,CAACK,QAAQ,CAACC,gBAAgB,IACzCpB,aAAa,IAAI,IAAAqB,qBAAc,EAACN,mBAAmB,CAAC,CAACI,QAAQ,CAACC,gBAAiB,EAClF;cACE;YACJ,CAAC,MAAM;cACHZ,UAAU,GAAG,KAAK;cAElB,IAAIR,aAAa,EAAE;gBACfS,OAAO,GAAG,IAAAY,qBAAc,EAACN,mBAAmB,CAAC,CAACO,IAAW;cAC7D;cACA,IAAMC,aAAa,GAAGzB,SAAS,GAAGW,OAAO,CAACe,MAAM;cAChD,IAAID,aAAa,GAAG,CAAC,EAAE;gBACnB,IAAME,cAAc,GAAGX,gBAAgB,CAACQ,IAAI,CAACI,KAAK,CAAC,CAAC,EAAEH,aAAa,CAAC,CAAChC,MAAM,CAACoC,CAAC,IAAI,CAAC,CAACA,CAAC,CAAC;gBACrF,IAAAC,oBAAa,EAACnB,OAAO,EAAEgB,cAAc,CAAC;cAC1C;YACJ;UACJ,CAAC,CAAC;QACN;QAEA,IAAIhB,OAAO,CAACe,MAAM,KAAK,CAAC,EAAE;UACtB,OAAO;YACHK,UAAU,EAAEhC,oBAAoB,IAAI,IAAI;YACxCiC,SAAS,EAAE;UACf,CAAC;QACL;QACA,IAAMC,OAAO,GAAG,IAAAV,qBAAc,EAAC,IAAAW,kBAAW,EAACvB,OAAO,CAAC,CAAC;QACpD,IAAMqB,SAAmC,GAAGrB,OAAO,CAC9CwB,GAAG,CAACC,GAAG,IAAI,IAAAC,sCAAqB,EAC7BtD,oBAAoB,EACpBC,WAAW,EACXoD,GACJ,CAAC,CAAC;QACN,IAAME,aAAsC,GAAG;UAC3C7B,EAAE,EAAEwB,OAAO,CAACxB,EAAE;UACdJ,eAAe,EAAE,IAAAkC,2CAA0B,EAACxD,oBAAoB,EAAEkD,OAAO,CAACO,IAAI,CAAC,CAAC;QACpF,CAAC;QACD,IAAMC,GAAG,GAAG;UACRT,SAAS,EAAEA,SAAS;UACpBD,UAAU,EAAEO;QAChB,CAAC;QACD,OAAOG,GAAG;MACd,CAAC;MACDzC,SAAS,EAAE,IAAAuB,qBAAc,EAAC/C,OAAO,CAACV,IAAI,CAAC,CAACkC,SAAS;MACjD0C,QAAQ,EAAE,IAAAnB,qBAAc,EAAC/C,OAAO,CAACV,IAAI,CAAC,CAAC4E,QAAQ;MAC/CC,OAAO,EAAEhE,WAAW,CAACiE,YAAY,CAAC;IACtC,CAAC;EACL;EAEA,IAAIC,yBAAwE;EAC5E,IAAIrE,OAAO,CAACT,IAAI,EAAE;IACd,IAAM+E,UAAU,GAAGtE,OAAO,CAACT,IAAI,EAAE0B,MAAM;IACvCoD,yBAAyB,GAAG;MACxB,MAAM/C,OAAOA,CACTiD,IAAgD,EAClD;QACE,IAAID,UAAU,KAAKpD,SAAS,EAAE;UAC1BqD,IAAI,GAAG,MAAM,IAAAC,kBAAW,EAACD,IAAI,EAAGX,GAAG,IAAKU,UAAU,CAACV,GAAG,CAACa,gBAAgB,CAAC,CAAC;QAC7E;QAEA,IAAMC,aAA6D,GAAG,CAAC,CAAC;QACxE,IAAMC,MAAgB,GAAGJ,IAAI,CAACZ,GAAG,CAACC,GAAG,IAAI;UACrC,IAAMgB,KAAK,GAAIhB,GAAG,CAACa,gBAAgB,CAASjE,WAAW,CAAC;UACxDkE,aAAa,CAACE,KAAK,CAAC,GAAGhB,GAAG;UAC1B,OAAOgB,KAAK;QAChB,CAAC,CAAC;QACF,MAAM,IAAAxC,+BAAoB,EAACpC,OAAO,CAACb,SAAS,CAACkD,QAAQ,CAAC;QACtD,IAAIwC,SAAmC,GAAG,EAAE;;QAE5C;AAChB;AACA;AACA;AACA;AACA;QACgB,MAAM,IAAAvC,yBAAc,EAACtC,OAAO,CAACb,SAAS,CAACkD,QAAQ,EAAE,MAAOE,GAAG,IAAK;UAC5DsC,SAAS,GAAG,EAAE,CAAC,CAAC;UAChB;AACpB;AACA;;UAEoB,IAAMC,QAAQ,GAAIC,GAAa,IAAK;YAChC,OAAO,IAAAnC,kBAAO,EACV,IAAAvB,gBAAK,EACDrB,OAAO,CAACb,SAAS,CAACE,UAAU,EAC5B,IAAAyC,gBAAK,EAAC,IAAAkD,qBAAU,EAAC,CAAC,EAAE,IAAI,EAAED,GAAG,CACjC,CACJ,CAAC;UACL,CAAC;UAED,IAAME,cAAc,GAAG,MAAM,IAAAC,gCAAe,EAAYP,MAAM,EAAEG,QAAQ,CAAC;UAEzE,IAAMK,YAA6B,GAAG,CAAC,CAAC;UACxCF,cAAc,CAAC3G,OAAO,CAACsF,GAAG,IAAI;YAC1B,IAAMwB,WAAW,GAAG,IAAAC,0CAAyB,EAAC9E,oBAAoB,EAAEqD,GAAG,CAACI,IAAI,CAAC,CAAC,CAAC;YAC/E,IAAMY,KAAK,GAAGhB,GAAG,CAAC3B,EAAE;YACnBmD,WAAW,CAAS5E,WAAW,CAAC,GAAGoE,KAAK;YACzCO,YAAY,CAACP,KAAK,CAAC,GAAGQ,WAAW;UACrC,CAAC,CAAC;;UAEF;AACpB;AACA;UACoB,IAAME,KAAK,GAAG,IAAAC,qBAAU,EAACvF,OAAO,CAACb,SAAS,CAACkD,QAAQ,CAAC;UACpD,IAAImD,QAAQ,GAAG,KAAK;UACpB,MAAM9C,OAAO,CAACC,GAAG,CACbvE,MAAM,CAACqH,OAAO,CAACf,aAAa,CAAC,CAACf,GAAG,CAAC,OAAO,CAACiB,KAAK,EAAEc,QAAQ,CAAC,KAAK;YAC3D,IAAMC,OAA8B,GAAGR,YAAY,CAACP,KAAK,CAAC;YAE1D,IACIe,OAAO,KAEH,CAACD,QAAQ,CAACE,kBAAkB,IAC5B,CAAC,MAAMvG,UAAU,CAACwG,eAAe,CAAC;cAC9BpB,gBAAgB,EAAEkB,OAAc;cAChCG,eAAe,EAAEJ,QAAQ,CAACE;YAC9B,CAAC,EAAE,4BAA4B,CAAC,EAAEG,OAAO,KAAK,KAAK,CACtD,EACH;cACE;cACAlB,SAAS,CAACtF,IAAI,CAACoG,OAAc,CAAC;YAClC,CAAC,MAAM;cACH;cACAH,QAAQ,GAAG,IAAI;cACf,IAAMQ,MAAM,GAAG,IAAAC,cAAG,EAACjG,OAAO,CAACb,SAAS,CAACE,UAAU,EAAEuF,KAAK,CAAC;cACvD,IAAMsB,YAAY,GAAG,IAAAC,gBAAS,EAACT,QAAQ,CAACjB,gBAAgB,CAAC;cACxDyB,YAAY,CAAS3F,oBAAoB,CAAC,GAAG,IAAAsB,0BAAe,EAAC,CAAC;cAC/D,IAAI,CAAC8D,OAAO,EAAE;gBACV;gBACAL,KAAK,CAACc,GAAG,CAACJ,MAAM,EAAE,IAAAK,gCAAe,EAAC7F,WAAW,EAAE0F,YAAY,CAAC,CAAC;cACjE,CAAC,MAAM;gBACH;gBACAZ,KAAK,CAACgB,MAAM,CAACN,MAAM,EAAE,IAAAK,gCAAe,EAAC7F,WAAW,EAAE0F,YAAY,CAAC,CAAC;cACpE;YACJ;UACJ,CAAC,CACL,CAAC;UAED,IAAIV,QAAQ,EAAE;YACV,MAAMF,KAAK,CAACiB,MAAM,CAAC,CAAC;UACxB;QACJ,CAAC,CAAC;QACF,MAAM,IAAAnE,+BAAoB,EAACpC,OAAO,CAACb,SAAS,CAACkD,QAAQ,CAAC;QACtD,OAAOwC,SAAS;MACpB,CAAC;MACDrD,SAAS,EAAExB,OAAO,CAACT,IAAI,CAACiC,SAAS;MACjC0C,QAAQ,EAAElE,OAAO,CAACT,IAAI,CAAC2E;IAC3B,CAAC;EACL;EAGA,IAAMsC,gBAAgB,GAAG,IAAIvH,2BAA2B,CACpDe,OAAO,CAACb,SAAS,EACjBa,OAAO,CAACyG,qBAAqB,EAC7BpH,UAAU,EACVgB,yBAAyB,EACzBgE,yBAAyB,EACzBrE,OAAO,CAACR,IAAI,EACZQ,OAAO,CAACP,SAAS,EACjBO,OAAO,CAACN,SACZ,CAAC;;EAED;AACJ;AACA;EACI,IAAIM,OAAO,CAACR,IAAI,IAAIQ,OAAO,CAACV,IAAI,EAAE;IAC9B,IAAMoH,WAAW,GAAGF,gBAAgB,CAACG,KAAK,CAACC,IAAI,CAACJ,gBAAgB,CAAC;IACjE,IAAMK,YAAY,GAAGL,gBAAgB,CAACM,MAAM,CAACF,IAAI,CAACJ,gBAAgB,CAAC;IACnEA,gBAAgB,CAACG,KAAK,GAAG,MAAM;MAC3B,IAAMI,eAAe,GAAG,IAAA1F,gBAAK,EACzBD,SAAS,EACT,IAAAW,kBAAO,EAACxB,oBAAoB,EAAE,MAAM,CAAC,EACrC,IAAAyB,gBAAK,EAAC,CAAC,CACX,CAAC;MACD,IAAMgF,WAAW,GAAG,IAAAC,qBAAU,EAC1BF,eAAe,EACdG,cAAc,IAAK;QAChB;AACpB;AACA;AACA;AACA;QACoBV,gBAAgB,CAACW,MAAM,CAAC,CAAC;MAC7B,CAAC,EACAC,KAAK,IAAK;QACPZ,gBAAgB,CAACa,QAAQ,CAACD,KAAK,CAACE,IAAI,CAChC,IAAAxG,kBAAU,EAAC,WAAW,EAAE;UAAEsG,KAAK,EAAE,IAAAG,uBAAgB,EAACH,KAAK;QAAE,CAAC,CAC9D,CAAC;MACL,CACJ,CAAC;MACDZ,gBAAgB,CAACM,MAAM,GAAG,MAAM;QAC5BE,WAAW,CAAC,CAAC;QACb,OAAOH,YAAY,CAAC,CAAC;MACzB,CAAC;MACD,OAAOH,WAAW,CAAC,CAAC;IACxB,CAAC;EACL;EAEA,IAAAc,oCAA4B,EAACxH,OAAO,CAACM,iBAAiB,EAAEkG,gBAAgB,CAAC;EAEzE,OAAOA,gBAAgB;AAC3B","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/replication-graphql/graphql-schema-from-rx-schema.js b/dist/cjs/plugins/replication-graphql/graphql-schema-from-rx-schema.js deleted file mode 100644 index c656896b151..00000000000 --- a/dist/cjs/plugins/replication-graphql/graphql-schema-from-rx-schema.js +++ /dev/null @@ -1,235 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.SPACING = void 0; -exports.fillUpOptionals = fillUpOptionals; -exports.graphQLSchemaFromRxSchema = graphQLSchemaFromRxSchema; -var _getGraphqlFromJsonschema = require("get-graphql-from-jsonschema"); -var _rxSchemaHelper = require("../../rx-schema-helper.js"); -var _index = require("../../plugins/utils/index.js"); -/** - * just type some common types - * to have better IDE autocomplete, - * all strings are allowed - */ - -// we use two spaces because get-graphql-from-jsonschema does also -var SPACING = exports.SPACING = ' '; - -/** - * Create a GraphQL schema from a given RxJsonSchema - */ -function graphQLSchemaFromRxSchema(input) { - var ret = { - asString: '', - queries: [], - mutations: [], - subscriptions: [], - inputs: [], - types: [] - }; - Object.entries(input).forEach(([collectionName, collectionSettings]) => { - collectionSettings = fillUpOptionals(collectionSettings); - var schema = collectionSettings.schema; - var prefixes = (0, _index.ensureNotFalsy)(collectionSettings.prefixes); - var ucCollectionName = (0, _index.ucfirst)(collectionName); - var collectionNameInput = (0, _index.ucfirst)(collectionName) + 'Input'; - - // input - var inputSchema = stripKeysFromSchema(schema, (0, _index.ensureNotFalsy)(collectionSettings.ignoreInputKeys)); - var inputGraphQL = (0, _getGraphqlFromJsonschema.getGraphqlSchemaFromJsonSchema)({ - rootName: collectionNameInput, - schema: inputSchema, - direction: 'input' - }); - var pushRowGraphQL = (0, _getGraphqlFromJsonschema.getGraphqlSchemaFromJsonSchema)({ - rootName: collectionNameInput + prefixes.pushRow, - schema: { - type: 'object', - properties: { - assumedMasterState: inputSchema, - newDocumentState: inputSchema - }, - required: ['newDocumentState'], - additionalProperties: false - }, - direction: 'input' - }); - var checkpointSchema = { - type: 'object', - properties: {}, - required: [], - additionalProperties: false - }; - collectionSettings.checkpointFields.forEach(key => { - var subSchema = schema.properties[key]; - checkpointSchema.properties[key] = subSchema; - checkpointSchema.required.push(key); - }); - var checkpointInputGraphQL = (0, _getGraphqlFromJsonschema.getGraphqlSchemaFromJsonSchema)({ - rootName: collectionNameInput + prefixes.checkpoint, - schema: checkpointSchema, - direction: 'input' - }); - ret.inputs = ret.inputs.concat(inputGraphQL.typeDefinitions.map(str => replaceTopLevelTypeName(str, collectionNameInput))).concat(pushRowGraphQL.typeDefinitions.map(str => replaceTopLevelTypeName(str, collectionNameInput + prefixes.pushRow))).concat(checkpointInputGraphQL.typeDefinitions.map(str => replaceTopLevelTypeName(str, collectionNameInput + prefixes.checkpoint))); - var headersSchema = { - type: 'object', - additionalProperties: false, - properties: {}, - required: [] - }; - (0, _index.ensureNotFalsy)(collectionSettings.headerFields).forEach(headerField => { - headersSchema.properties[headerField] = { - type: 'string' - }; - headersSchema.required.push(headerField); - }); - var headersInputName = collectionNameInput + prefixes.headers; - var headersInputGraphQL = (0, _getGraphqlFromJsonschema.getGraphqlSchemaFromJsonSchema)({ - rootName: headersInputName, - schema: headersSchema, - direction: 'input' - }); - if ((0, _index.ensureNotFalsy)(collectionSettings.headerFields).length > 0) { - ret.inputs = ret.inputs.concat(headersInputGraphQL.typeDefinitions.map(str => replaceTopLevelTypeName(str, headersInputName))); - } - - // output - var outputSchema = stripKeysFromSchema(schema, (0, _index.ensureNotFalsy)(collectionSettings.ignoreOutputKeys)); - var outputGraphQL = (0, _getGraphqlFromJsonschema.getGraphqlSchemaFromJsonSchema)({ - rootName: collectionName, - schema: outputSchema, - direction: 'output' - }); - var checkpointOutputGraphQL = (0, _getGraphqlFromJsonschema.getGraphqlSchemaFromJsonSchema)({ - rootName: ucCollectionName + prefixes.checkpoint, - schema: checkpointSchema, - direction: 'output' - }); - var pullBulkOutputGraphQL = (0, _getGraphqlFromJsonschema.getGraphqlSchemaFromJsonSchema)({ - rootName: ucCollectionName + prefixes.pullBulk, - schema: { - type: 'object', - properties: { - documents: { - type: 'array', - items: inputSchema - }, - checkpoint: checkpointSchema - }, - required: ['documents', 'checkpoint'], - additionalProperties: false - }, - direction: 'output' - }); - ret.types = ret.types.concat(outputGraphQL.typeDefinitions.map(str => replaceTopLevelTypeName(str, ucCollectionName))).concat(checkpointOutputGraphQL.typeDefinitions.map(str => replaceTopLevelTypeName(str, ucCollectionName + prefixes.checkpoint))).concat(pullBulkOutputGraphQL.typeDefinitions.map(str => replaceTopLevelTypeName(str, ucCollectionName + prefixes.pullBulk))); - - // query - var queryName = prefixes.pull + ucCollectionName; - var queryKeys = ['checkpoint: ' + collectionNameInput + prefixes.checkpoint, 'limit: Int!']; - var queryString = queryName + '(' + queryKeys.join(', ') + '): ' + ucCollectionName + prefixes.pullBulk + '!'; - ret.queries.push(SPACING + queryString); - - // mutation - var mutationName = prefixes.push + ucCollectionName; - var mutationString = mutationName + '(' + collectionName + prefixes.pushRow + ': [' + collectionNameInput + prefixes.pushRow + ']): [' + ucCollectionName + '!]!'; - ret.mutations.push(SPACING + mutationString); - - // subscription - var subscriptionHeaderInputString = ''; - if (collectionSettings.headerFields && collectionSettings.headerFields.length > 0) { - subscriptionHeaderInputString = '(headers: ' + headersInputName + ')'; - } - var subscriptionName = prefixes.stream + ucCollectionName; - var subscriptionString = subscriptionName + subscriptionHeaderInputString + ': ' + ucCollectionName + prefixes.pullBulk + '!'; - ret.subscriptions.push(SPACING + subscriptionString); - }); - - // build full string - var fullQueryString = 'type Query {\n' + ret.queries.join('\n') + '\n}\n'; - var fullMutationString = 'type Mutation {\n' + ret.mutations.join('\n') + '\n}\n'; - var fullSubscriptionString = 'type Subscription {\n' + ret.subscriptions.join('\n') + '\n}\n'; - var fullTypeString = ret.types.join('\n'); - var fullInputString = ret.inputs.join('\n'); - var fullSchemaString = '' + 'schema {\n' + SPACING + 'query: Query\n' + SPACING + 'mutation: Mutation\n' + SPACING + 'subscription: Subscription\n' + '}\n'; - ret.asString = '' + fullQueryString + '\n' + fullMutationString + '\n' + fullSubscriptionString + '\n' + fullTypeString + '\n' + fullInputString + '\n' + fullSchemaString; - return ret; -} -function fillUpOptionals(input) { - input = (0, _index.flatClone)(input); - var schema = (0, _rxSchemaHelper.fillWithDefaultSettings)(input.schema); - // strip internal attributes - Object.keys(schema.properties).forEach(key => { - if (key.startsWith('_')) { - delete schema.properties[key]; - } - }); - input.schema = schema; - - // add deleted field to schema - if (!input.deletedField) { - input.deletedField = '_deleted'; - } - schema.properties[input.deletedField] = { - type: 'boolean' - }; - schema.required.push(input.deletedField); - - // fill up prefixes - if (!input.prefixes) { - input.prefixes = {}; - } - var prefixes = input.prefixes; - if (!prefixes.push) { - prefixes.push = 'push'; - } - if (!prefixes.pushRow) { - prefixes.pushRow = 'PushRow'; - } - if (!prefixes.checkpoint) { - prefixes.checkpoint = 'Checkpoint'; - } - if (!prefixes.pull) { - prefixes.pull = 'pull'; - } - if (!prefixes.pullBulk) { - prefixes.pullBulk = 'PullBulk'; - } - if (!prefixes.stream) { - prefixes.stream = 'stream'; - } - if (!prefixes.headers) { - prefixes.headers = 'Headers'; - } - if (!input.headerFields) { - input.headerFields = []; - } - if (!input.withRevisions) { - input.withRevisions = false; - } - if (!input.ignoreInputKeys) { - input.ignoreInputKeys = []; - } - if (!input.ignoreOutputKeys) { - input.ignoreOutputKeys = []; - } - return input; -} -function stripKeysFromSchema(schema, strip) { - var cloned = (0, _index.clone)(schema); - strip.forEach(key => { - delete cloned.properties[key]; - }); - return cloned; -} - -/** - * get-graphql-from-jsonschema add a T0-suffix - * that we do not want for the top level type - */ -function replaceTopLevelTypeName(str, ucCollectionName) { - return str.replace(' ' + ucCollectionName + 'T0 ', ' ' + ucCollectionName + ' '); -} -//# sourceMappingURL=graphql-schema-from-rx-schema.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/replication-graphql/graphql-schema-from-rx-schema.js.map b/dist/cjs/plugins/replication-graphql/graphql-schema-from-rx-schema.js.map deleted file mode 100644 index 82608ff145e..00000000000 --- a/dist/cjs/plugins/replication-graphql/graphql-schema-from-rx-schema.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"graphql-schema-from-rx-schema.js","names":["_getGraphqlFromJsonschema","require","_rxSchemaHelper","_index","SPACING","exports","graphQLSchemaFromRxSchema","input","ret","asString","queries","mutations","subscriptions","inputs","types","Object","entries","forEach","collectionName","collectionSettings","fillUpOptionals","schema","prefixes","ensureNotFalsy","ucCollectionName","ucfirst","collectionNameInput","inputSchema","stripKeysFromSchema","ignoreInputKeys","inputGraphQL","getGraphqlSchemaFromJsonSchema","rootName","direction","pushRowGraphQL","pushRow","type","properties","assumedMasterState","newDocumentState","required","additionalProperties","checkpointSchema","checkpointFields","key","subSchema","push","checkpointInputGraphQL","checkpoint","concat","typeDefinitions","map","str","replaceTopLevelTypeName","headersSchema","headerFields","headerField","headersInputName","headers","headersInputGraphQL","length","outputSchema","ignoreOutputKeys","outputGraphQL","checkpointOutputGraphQL","pullBulkOutputGraphQL","pullBulk","documents","items","queryName","pull","queryKeys","queryString","join","mutationName","mutationString","subscriptionHeaderInputString","subscriptionName","stream","subscriptionString","fullQueryString","fullMutationString","fullSubscriptionString","fullTypeString","fullInputString","fullSchemaString","flatClone","fillWithDefaultSettings","keys","startsWith","deletedField","withRevisions","strip","cloned","clone","replace"],"sources":["../../../../src/plugins/replication-graphql/graphql-schema-from-rx-schema.ts"],"sourcesContent":["import { getGraphqlSchemaFromJsonSchema } from 'get-graphql-from-jsonschema';\n\nimport { fillWithDefaultSettings } from '../../rx-schema-helper.ts';\n\nimport type { RxJsonSchema } from '../../types/index.d.ts';\nimport { clone, ensureNotFalsy, flatClone, ucfirst } from '../../plugins/utils/index.ts';\n\nexport type Prefixes = {\n push?: string;\n pushRow?: string;\n checkpoint?: string;\n pull?: string;\n pullBulk?: string;\n stream?: string;\n headers?: string;\n};\n\n/**\n * just type some common types\n * to have better IDE autocomplete,\n * all strings are allowed\n */\nexport type GraphQLParamType = 'ID' | 'ID!' |\n 'String' | 'String!' |\n 'Int' | 'Int!' |\n 'Float' | 'Float!' |\n string;\n\nexport type GraphQLSchemaFromRxSchemaInputSingleCollection = {\n schema: RxJsonSchema;\n /**\n * These fields of the document data\n * will be used for the checkpoint.\n */\n checkpointFields: string[];\n ignoreInputKeys?: string[];\n ignoreOutputKeys?: string[];\n withRevisions?: boolean;\n prefixes?: Prefixes;\n headerFields?: string[];\n /**\n * Name of the boolean field that marks deleted documents.\n * [default='_deleted']\n */\n deletedField?: string;\n};\n\nexport type GraphQLSchemaFromRxSchemaInput = {\n [collectionName: string]: GraphQLSchemaFromRxSchemaInputSingleCollection;\n};\nexport type GraphQLSchemaFromRxSchemaOutput = {\n asString: string;\n queries: string[];\n mutations: string[];\n subscriptions: string[];\n inputs: string[];\n types: string[];\n};\n\n// we use two spaces because get-graphql-from-jsonschema does also\nexport const SPACING = ' ';\n\n/**\n * Create a GraphQL schema from a given RxJsonSchema\n */\nexport function graphQLSchemaFromRxSchema(\n input: GraphQLSchemaFromRxSchemaInput\n): GraphQLSchemaFromRxSchemaOutput {\n const ret: GraphQLSchemaFromRxSchemaOutput = {\n asString: '',\n queries: [],\n mutations: [],\n subscriptions: [],\n inputs: [],\n types: []\n };\n\n Object.entries(input).forEach(([collectionName, collectionSettings]) => {\n collectionSettings = fillUpOptionals(collectionSettings);\n\n const schema = collectionSettings.schema;\n const prefixes: Prefixes = ensureNotFalsy(collectionSettings.prefixes);\n const ucCollectionName = ucfirst(collectionName);\n const collectionNameInput = ucfirst(collectionName) + 'Input';\n\n // input\n const inputSchema = stripKeysFromSchema(schema, ensureNotFalsy(collectionSettings.ignoreInputKeys));\n\n const inputGraphQL = getGraphqlSchemaFromJsonSchema({\n rootName: collectionNameInput,\n schema: inputSchema as any,\n direction: 'input'\n });\n const pushRowGraphQL = getGraphqlSchemaFromJsonSchema({\n rootName: collectionNameInput + prefixes.pushRow,\n schema: {\n type: 'object',\n properties: {\n assumedMasterState: inputSchema as any,\n newDocumentState: inputSchema as any\n },\n required: ['newDocumentState'],\n additionalProperties: false\n },\n direction: 'input'\n });\n\n const checkpointSchema = {\n type: 'object',\n properties: {},\n required: [],\n additionalProperties: false\n } as any;\n collectionSettings.checkpointFields.forEach(key => {\n const subSchema: any = schema.properties[key];\n checkpointSchema.properties[key] = subSchema;\n checkpointSchema.required.push(key);\n });\n const checkpointInputGraphQL = getGraphqlSchemaFromJsonSchema({\n rootName: collectionNameInput + prefixes.checkpoint,\n schema: checkpointSchema as any,\n direction: 'input'\n });\n\n ret.inputs = ret.inputs.concat(\n inputGraphQL\n .typeDefinitions\n .map(str => replaceTopLevelTypeName(str, collectionNameInput))\n ).concat(\n pushRowGraphQL\n .typeDefinitions\n .map(str => replaceTopLevelTypeName(str, collectionNameInput + prefixes.pushRow))\n ).concat(\n checkpointInputGraphQL\n .typeDefinitions\n .map(str => replaceTopLevelTypeName(str, collectionNameInput + prefixes.checkpoint))\n );\n\n const headersSchema: any = {\n type: 'object',\n additionalProperties: false,\n properties: {},\n required: []\n };\n ensureNotFalsy(collectionSettings.headerFields).forEach(headerField => {\n headersSchema.properties[headerField] = {\n type: 'string'\n };\n headersSchema.required.push(headerField);\n });\n const headersInputName = collectionNameInput + prefixes.headers;\n const headersInputGraphQL = getGraphqlSchemaFromJsonSchema({\n rootName: headersInputName,\n schema: headersSchema,\n direction: 'input'\n });\n if (ensureNotFalsy(collectionSettings.headerFields).length > 0) {\n ret.inputs = ret.inputs.concat(\n headersInputGraphQL\n .typeDefinitions\n .map(str => replaceTopLevelTypeName(str, headersInputName))\n );\n }\n\n // output\n const outputSchema = stripKeysFromSchema(schema, ensureNotFalsy(collectionSettings.ignoreOutputKeys));\n const outputGraphQL = getGraphqlSchemaFromJsonSchema({\n rootName: collectionName,\n schema: outputSchema as any,\n direction: 'output'\n });\n const checkpointOutputGraphQL = getGraphqlSchemaFromJsonSchema({\n rootName: ucCollectionName + prefixes.checkpoint,\n schema: checkpointSchema as any,\n direction: 'output'\n });\n const pullBulkOutputGraphQL = getGraphqlSchemaFromJsonSchema({\n rootName: ucCollectionName + prefixes.pullBulk,\n schema: {\n type: 'object',\n properties: {\n documents: {\n type: 'array',\n items: inputSchema as any\n },\n checkpoint: checkpointSchema as any\n },\n required: ['documents', 'checkpoint'],\n additionalProperties: false\n },\n direction: 'output'\n });\n ret.types = ret.types.concat(\n outputGraphQL.typeDefinitions\n .map(str => replaceTopLevelTypeName(str, ucCollectionName))\n ).concat(\n checkpointOutputGraphQL.typeDefinitions\n .map(str => replaceTopLevelTypeName(str, ucCollectionName + prefixes.checkpoint))\n ).concat(\n pullBulkOutputGraphQL.typeDefinitions\n .map(str => replaceTopLevelTypeName(str, ucCollectionName + prefixes.pullBulk))\n );\n\n // query\n const queryName = prefixes.pull + ucCollectionName;\n const queryKeys = [\n 'checkpoint: ' + collectionNameInput + prefixes.checkpoint,\n 'limit: Int!'\n ];\n const queryString = queryName + '(' + queryKeys.join(', ') + '): ' + ucCollectionName + prefixes.pullBulk + '!';\n ret.queries.push(SPACING + queryString);\n\n // mutation\n const mutationName = prefixes.push + ucCollectionName;\n const mutationString = mutationName + '(' + collectionName + prefixes.pushRow + ': [' + collectionNameInput + prefixes.pushRow + ']): [' + ucCollectionName + '!]!';\n ret.mutations.push(SPACING + mutationString);\n\n // subscription\n let subscriptionHeaderInputString = '';\n if (collectionSettings.headerFields && collectionSettings.headerFields.length > 0) {\n subscriptionHeaderInputString = '(headers: ' + headersInputName + ')';\n }\n const subscriptionName = prefixes.stream + ucCollectionName;\n const subscriptionString = subscriptionName + subscriptionHeaderInputString + ': ' + ucCollectionName + prefixes.pullBulk + '!';\n ret.subscriptions.push(SPACING + subscriptionString);\n });\n\n // build full string\n const fullQueryString = 'type Query {\\n' + ret.queries.join('\\n') + '\\n}\\n';\n const fullMutationString = 'type Mutation {\\n' + ret.mutations.join('\\n') + '\\n}\\n';\n const fullSubscriptionString = 'type Subscription {\\n' + ret.subscriptions.join('\\n') + '\\n}\\n';\n\n const fullTypeString = ret.types.join('\\n');\n const fullInputString = ret.inputs.join('\\n');\n\n const fullSchemaString = '' +\n 'schema {\\n' +\n SPACING + 'query: Query\\n' +\n SPACING + 'mutation: Mutation\\n' +\n SPACING + 'subscription: Subscription\\n' +\n '}\\n';\n\n ret.asString = '' +\n fullQueryString + '\\n' +\n fullMutationString + '\\n' +\n fullSubscriptionString + '\\n' +\n fullTypeString + '\\n' +\n fullInputString + '\\n' +\n fullSchemaString;\n\n return ret;\n}\n\n\nexport function fillUpOptionals(\n input: GraphQLSchemaFromRxSchemaInputSingleCollection\n): GraphQLSchemaFromRxSchemaInputSingleCollection {\n input = flatClone(input);\n\n const schema = fillWithDefaultSettings(input.schema);\n // strip internal attributes\n Object.keys(schema.properties).forEach(key => {\n if (key.startsWith('_')) {\n delete schema.properties[key];\n }\n });\n input.schema = schema;\n\n // add deleted field to schema\n if (!input.deletedField) {\n input.deletedField = '_deleted';\n }\n schema.properties[input.deletedField] = {\n type: 'boolean'\n };\n (schema.required as string[]).push(input.deletedField);\n\n // fill up prefixes\n if (!input.prefixes) {\n input.prefixes = {} as any;\n }\n const prefixes: Prefixes = input.prefixes as any;\n if (!prefixes.push) {\n prefixes.push = 'push';\n }\n if (!prefixes.pushRow) {\n prefixes.pushRow = 'PushRow';\n }\n if (!prefixes.checkpoint) {\n prefixes.checkpoint = 'Checkpoint';\n }\n if (!prefixes.pull) {\n prefixes.pull = 'pull';\n }\n if (!prefixes.pullBulk) {\n prefixes.pullBulk = 'PullBulk';\n }\n if (!prefixes.stream) {\n prefixes.stream = 'stream';\n }\n if (!prefixes.headers) {\n prefixes.headers = 'Headers';\n }\n if (!input.headerFields) {\n input.headerFields = [];\n }\n\n\n if (!input.withRevisions) {\n input.withRevisions = false;\n }\n\n if (!input.ignoreInputKeys) {\n input.ignoreInputKeys = [];\n }\n if (!input.ignoreOutputKeys) {\n input.ignoreOutputKeys = [];\n }\n\n return input;\n}\n\nfunction stripKeysFromSchema(schema: RxJsonSchema, strip: string[]): RxJsonSchema> {\n const cloned: any = clone(schema);\n strip.forEach(key => {\n delete cloned.properties[key];\n });\n return cloned;\n}\n\n/**\n * get-graphql-from-jsonschema add a T0-suffix\n * that we do not want for the top level type\n */\nfunction replaceTopLevelTypeName(str: string, ucCollectionName: string): string {\n return str.replace(' ' + ucCollectionName + 'T0 ', ' ' + ucCollectionName + ' ');\n}\n"],"mappings":";;;;;;;;AAAA,IAAAA,yBAAA,GAAAC,OAAA;AAEA,IAAAC,eAAA,GAAAD,OAAA;AAGA,IAAAE,MAAA,GAAAF,OAAA;AAYA;AACA;AACA;AACA;AACA;;AAsCA;AACO,IAAMG,OAAO,GAAAC,OAAA,CAAAD,OAAA,GAAG,IAAI;;AAE3B;AACA;AACA;AACO,SAASE,yBAAyBA,CACrCC,KAAqC,EACN;EAC/B,IAAMC,GAAoC,GAAG;IACzCC,QAAQ,EAAE,EAAE;IACZC,OAAO,EAAE,EAAE;IACXC,SAAS,EAAE,EAAE;IACbC,aAAa,EAAE,EAAE;IACjBC,MAAM,EAAE,EAAE;IACVC,KAAK,EAAE;EACX,CAAC;EAEDC,MAAM,CAACC,OAAO,CAACT,KAAK,CAAC,CAACU,OAAO,CAAC,CAAC,CAACC,cAAc,EAAEC,kBAAkB,CAAC,KAAK;IACpEA,kBAAkB,GAAGC,eAAe,CAACD,kBAAkB,CAAC;IAExD,IAAME,MAAM,GAAGF,kBAAkB,CAACE,MAAM;IACxC,IAAMC,QAAkB,GAAG,IAAAC,qBAAc,EAACJ,kBAAkB,CAACG,QAAQ,CAAC;IACtE,IAAME,gBAAgB,GAAG,IAAAC,cAAO,EAACP,cAAc,CAAC;IAChD,IAAMQ,mBAAmB,GAAG,IAAAD,cAAO,EAACP,cAAc,CAAC,GAAG,OAAO;;IAE7D;IACA,IAAMS,WAAW,GAAGC,mBAAmB,CAACP,MAAM,EAAE,IAAAE,qBAAc,EAACJ,kBAAkB,CAACU,eAAe,CAAC,CAAC;IAEnG,IAAMC,YAAY,GAAG,IAAAC,wDAA8B,EAAC;MAChDC,QAAQ,EAAEN,mBAAmB;MAC7BL,MAAM,EAAEM,WAAkB;MAC1BM,SAAS,EAAE;IACf,CAAC,CAAC;IACF,IAAMC,cAAc,GAAG,IAAAH,wDAA8B,EAAC;MAClDC,QAAQ,EAAEN,mBAAmB,GAAGJ,QAAQ,CAACa,OAAO;MAChDd,MAAM,EAAE;QACJe,IAAI,EAAE,QAAQ;QACdC,UAAU,EAAE;UACRC,kBAAkB,EAAEX,WAAkB;UACtCY,gBAAgB,EAAEZ;QACtB,CAAC;QACDa,QAAQ,EAAE,CAAC,kBAAkB,CAAC;QAC9BC,oBAAoB,EAAE;MAC1B,CAAC;MACDR,SAAS,EAAE;IACf,CAAC,CAAC;IAEF,IAAMS,gBAAgB,GAAG;MACrBN,IAAI,EAAE,QAAQ;MACdC,UAAU,EAAE,CAAC,CAAC;MACdG,QAAQ,EAAE,EAAE;MACZC,oBAAoB,EAAE;IAC1B,CAAQ;IACRtB,kBAAkB,CAACwB,gBAAgB,CAAC1B,OAAO,CAAC2B,GAAG,IAAI;MAC/C,IAAMC,SAAc,GAAGxB,MAAM,CAACgB,UAAU,CAACO,GAAG,CAAC;MAC7CF,gBAAgB,CAACL,UAAU,CAACO,GAAG,CAAC,GAAGC,SAAS;MAC5CH,gBAAgB,CAACF,QAAQ,CAACM,IAAI,CAACF,GAAG,CAAC;IACvC,CAAC,CAAC;IACF,IAAMG,sBAAsB,GAAG,IAAAhB,wDAA8B,EAAC;MAC1DC,QAAQ,EAAEN,mBAAmB,GAAGJ,QAAQ,CAAC0B,UAAU;MACnD3B,MAAM,EAAEqB,gBAAuB;MAC/BT,SAAS,EAAE;IACf,CAAC,CAAC;IAEFzB,GAAG,CAACK,MAAM,GAAGL,GAAG,CAACK,MAAM,CAACoC,MAAM,CAC1BnB,YAAY,CACPoB,eAAe,CACfC,GAAG,CAACC,GAAG,IAAIC,uBAAuB,CAACD,GAAG,EAAE1B,mBAAmB,CAAC,CACrE,CAAC,CAACuB,MAAM,CACJf,cAAc,CACTgB,eAAe,CACfC,GAAG,CAACC,GAAG,IAAIC,uBAAuB,CAACD,GAAG,EAAE1B,mBAAmB,GAAGJ,QAAQ,CAACa,OAAO,CAAC,CACxF,CAAC,CAACc,MAAM,CACJF,sBAAsB,CACjBG,eAAe,CACfC,GAAG,CAACC,GAAG,IAAIC,uBAAuB,CAACD,GAAG,EAAE1B,mBAAmB,GAAGJ,QAAQ,CAAC0B,UAAU,CAAC,CAC3F,CAAC;IAED,IAAMM,aAAkB,GAAG;MACvBlB,IAAI,EAAE,QAAQ;MACdK,oBAAoB,EAAE,KAAK;MAC3BJ,UAAU,EAAE,CAAC,CAAC;MACdG,QAAQ,EAAE;IACd,CAAC;IACD,IAAAjB,qBAAc,EAACJ,kBAAkB,CAACoC,YAAY,CAAC,CAACtC,OAAO,CAACuC,WAAW,IAAI;MACnEF,aAAa,CAACjB,UAAU,CAACmB,WAAW,CAAC,GAAG;QACpCpB,IAAI,EAAE;MACV,CAAC;MACDkB,aAAa,CAACd,QAAQ,CAACM,IAAI,CAACU,WAAW,CAAC;IAC5C,CAAC,CAAC;IACF,IAAMC,gBAAgB,GAAG/B,mBAAmB,GAAGJ,QAAQ,CAACoC,OAAO;IAC/D,IAAMC,mBAAmB,GAAG,IAAA5B,wDAA8B,EAAC;MACvDC,QAAQ,EAAEyB,gBAAgB;MAC1BpC,MAAM,EAAEiC,aAAa;MACrBrB,SAAS,EAAE;IACf,CAAC,CAAC;IACF,IAAI,IAAAV,qBAAc,EAACJ,kBAAkB,CAACoC,YAAY,CAAC,CAACK,MAAM,GAAG,CAAC,EAAE;MAC5DpD,GAAG,CAACK,MAAM,GAAGL,GAAG,CAACK,MAAM,CAACoC,MAAM,CAC1BU,mBAAmB,CACdT,eAAe,CACfC,GAAG,CAACC,GAAG,IAAIC,uBAAuB,CAACD,GAAG,EAAEK,gBAAgB,CAAC,CAClE,CAAC;IACL;;IAEA;IACA,IAAMI,YAAY,GAAGjC,mBAAmB,CAACP,MAAM,EAAE,IAAAE,qBAAc,EAACJ,kBAAkB,CAAC2C,gBAAgB,CAAC,CAAC;IACrG,IAAMC,aAAa,GAAG,IAAAhC,wDAA8B,EAAC;MACjDC,QAAQ,EAAEd,cAAc;MACxBG,MAAM,EAAEwC,YAAmB;MAC3B5B,SAAS,EAAE;IACf,CAAC,CAAC;IACF,IAAM+B,uBAAuB,GAAG,IAAAjC,wDAA8B,EAAC;MAC3DC,QAAQ,EAAER,gBAAgB,GAAGF,QAAQ,CAAC0B,UAAU;MAChD3B,MAAM,EAAEqB,gBAAuB;MAC/BT,SAAS,EAAE;IACf,CAAC,CAAC;IACF,IAAMgC,qBAAqB,GAAG,IAAAlC,wDAA8B,EAAC;MACzDC,QAAQ,EAAER,gBAAgB,GAAGF,QAAQ,CAAC4C,QAAQ;MAC9C7C,MAAM,EAAE;QACJe,IAAI,EAAE,QAAQ;QACdC,UAAU,EAAE;UACR8B,SAAS,EAAE;YACP/B,IAAI,EAAE,OAAO;YACbgC,KAAK,EAAEzC;UACX,CAAC;UACDqB,UAAU,EAAEN;QAChB,CAAC;QACDF,QAAQ,EAAE,CAAC,WAAW,EAAE,YAAY,CAAC;QACrCC,oBAAoB,EAAE;MAC1B,CAAC;MACDR,SAAS,EAAE;IACf,CAAC,CAAC;IACFzB,GAAG,CAACM,KAAK,GAAGN,GAAG,CAACM,KAAK,CAACmC,MAAM,CACxBc,aAAa,CAACb,eAAe,CACxBC,GAAG,CAACC,GAAG,IAAIC,uBAAuB,CAACD,GAAG,EAAE5B,gBAAgB,CAAC,CAClE,CAAC,CAACyB,MAAM,CACJe,uBAAuB,CAACd,eAAe,CAClCC,GAAG,CAACC,GAAG,IAAIC,uBAAuB,CAACD,GAAG,EAAE5B,gBAAgB,GAAGF,QAAQ,CAAC0B,UAAU,CAAC,CACxF,CAAC,CAACC,MAAM,CACJgB,qBAAqB,CAACf,eAAe,CAChCC,GAAG,CAACC,GAAG,IAAIC,uBAAuB,CAACD,GAAG,EAAE5B,gBAAgB,GAAGF,QAAQ,CAAC4C,QAAQ,CAAC,CACtF,CAAC;;IAED;IACA,IAAMG,SAAS,GAAG/C,QAAQ,CAACgD,IAAI,GAAG9C,gBAAgB;IAClD,IAAM+C,SAAS,GAAG,CACd,cAAc,GAAG7C,mBAAmB,GAAGJ,QAAQ,CAAC0B,UAAU,EAC1D,aAAa,CAChB;IACD,IAAMwB,WAAW,GAAGH,SAAS,GAAG,GAAG,GAAGE,SAAS,CAACE,IAAI,CAAC,IAAI,CAAC,GAAG,KAAK,GAAGjD,gBAAgB,GAAGF,QAAQ,CAAC4C,QAAQ,GAAG,GAAG;IAC/G1D,GAAG,CAACE,OAAO,CAACoC,IAAI,CAAC1C,OAAO,GAAGoE,WAAW,CAAC;;IAEvC;IACA,IAAME,YAAY,GAAGpD,QAAQ,CAACwB,IAAI,GAAGtB,gBAAgB;IACrD,IAAMmD,cAAc,GAAGD,YAAY,GAAG,GAAG,GAAGxD,cAAc,GAAGI,QAAQ,CAACa,OAAO,GAAG,KAAK,GAAGT,mBAAmB,GAAGJ,QAAQ,CAACa,OAAO,GAAG,OAAO,GAAGX,gBAAgB,GAAG,KAAK;IACnKhB,GAAG,CAACG,SAAS,CAACmC,IAAI,CAAC1C,OAAO,GAAGuE,cAAc,CAAC;;IAE5C;IACA,IAAIC,6BAA6B,GAAG,EAAE;IACtC,IAAIzD,kBAAkB,CAACoC,YAAY,IAAIpC,kBAAkB,CAACoC,YAAY,CAACK,MAAM,GAAG,CAAC,EAAE;MAC/EgB,6BAA6B,GAAG,YAAY,GAAGnB,gBAAgB,GAAG,GAAG;IACzE;IACA,IAAMoB,gBAAgB,GAAGvD,QAAQ,CAACwD,MAAM,GAAGtD,gBAAgB;IAC3D,IAAMuD,kBAAkB,GAAGF,gBAAgB,GAAGD,6BAA6B,GAAG,IAAI,GAAGpD,gBAAgB,GAAGF,QAAQ,CAAC4C,QAAQ,GAAG,GAAG;IAC/H1D,GAAG,CAACI,aAAa,CAACkC,IAAI,CAAC1C,OAAO,GAAG2E,kBAAkB,CAAC;EACxD,CAAC,CAAC;;EAEF;EACA,IAAMC,eAAe,GAAG,gBAAgB,GAAGxE,GAAG,CAACE,OAAO,CAAC+D,IAAI,CAAC,IAAI,CAAC,GAAG,OAAO;EAC3E,IAAMQ,kBAAkB,GAAG,mBAAmB,GAAGzE,GAAG,CAACG,SAAS,CAAC8D,IAAI,CAAC,IAAI,CAAC,GAAG,OAAO;EACnF,IAAMS,sBAAsB,GAAG,uBAAuB,GAAG1E,GAAG,CAACI,aAAa,CAAC6D,IAAI,CAAC,IAAI,CAAC,GAAG,OAAO;EAE/F,IAAMU,cAAc,GAAG3E,GAAG,CAACM,KAAK,CAAC2D,IAAI,CAAC,IAAI,CAAC;EAC3C,IAAMW,eAAe,GAAG5E,GAAG,CAACK,MAAM,CAAC4D,IAAI,CAAC,IAAI,CAAC;EAE7C,IAAMY,gBAAgB,GAAG,EAAE,GACvB,YAAY,GACZjF,OAAO,GAAG,gBAAgB,GAC1BA,OAAO,GAAG,sBAAsB,GAChCA,OAAO,GAAG,8BAA8B,GACxC,KAAK;EAETI,GAAG,CAACC,QAAQ,GAAG,EAAE,GACbuE,eAAe,GAAG,IAAI,GACtBC,kBAAkB,GAAG,IAAI,GACzBC,sBAAsB,GAAG,IAAI,GAC7BC,cAAc,GAAG,IAAI,GACrBC,eAAe,GAAG,IAAI,GACtBC,gBAAgB;EAEpB,OAAO7E,GAAG;AACd;AAGO,SAASY,eAAeA,CAC3Bb,KAAqD,EACP;EAC9CA,KAAK,GAAG,IAAA+E,gBAAS,EAAC/E,KAAK,CAAC;EAExB,IAAMc,MAAM,GAAG,IAAAkE,uCAAuB,EAAChF,KAAK,CAACc,MAAM,CAAC;EACpD;EACAN,MAAM,CAACyE,IAAI,CAACnE,MAAM,CAACgB,UAAU,CAAC,CAACpB,OAAO,CAAC2B,GAAG,IAAI;IAC1C,IAAIA,GAAG,CAAC6C,UAAU,CAAC,GAAG,CAAC,EAAE;MACrB,OAAOpE,MAAM,CAACgB,UAAU,CAACO,GAAG,CAAC;IACjC;EACJ,CAAC,CAAC;EACFrC,KAAK,CAACc,MAAM,GAAGA,MAAM;;EAErB;EACA,IAAI,CAACd,KAAK,CAACmF,YAAY,EAAE;IACrBnF,KAAK,CAACmF,YAAY,GAAG,UAAU;EACnC;EACArE,MAAM,CAACgB,UAAU,CAAC9B,KAAK,CAACmF,YAAY,CAAC,GAAG;IACpCtD,IAAI,EAAE;EACV,CAAC;EACAf,MAAM,CAACmB,QAAQ,CAAcM,IAAI,CAACvC,KAAK,CAACmF,YAAY,CAAC;;EAEtD;EACA,IAAI,CAACnF,KAAK,CAACe,QAAQ,EAAE;IACjBf,KAAK,CAACe,QAAQ,GAAG,CAAC,CAAQ;EAC9B;EACA,IAAMA,QAAkB,GAAGf,KAAK,CAACe,QAAe;EAChD,IAAI,CAACA,QAAQ,CAACwB,IAAI,EAAE;IAChBxB,QAAQ,CAACwB,IAAI,GAAG,MAAM;EAC1B;EACA,IAAI,CAACxB,QAAQ,CAACa,OAAO,EAAE;IACnBb,QAAQ,CAACa,OAAO,GAAG,SAAS;EAChC;EACA,IAAI,CAACb,QAAQ,CAAC0B,UAAU,EAAE;IACtB1B,QAAQ,CAAC0B,UAAU,GAAG,YAAY;EACtC;EACA,IAAI,CAAC1B,QAAQ,CAACgD,IAAI,EAAE;IAChBhD,QAAQ,CAACgD,IAAI,GAAG,MAAM;EAC1B;EACA,IAAI,CAAChD,QAAQ,CAAC4C,QAAQ,EAAE;IACpB5C,QAAQ,CAAC4C,QAAQ,GAAG,UAAU;EAClC;EACA,IAAI,CAAC5C,QAAQ,CAACwD,MAAM,EAAE;IAClBxD,QAAQ,CAACwD,MAAM,GAAG,QAAQ;EAC9B;EACA,IAAI,CAACxD,QAAQ,CAACoC,OAAO,EAAE;IACnBpC,QAAQ,CAACoC,OAAO,GAAG,SAAS;EAChC;EACA,IAAI,CAACnD,KAAK,CAACgD,YAAY,EAAE;IACrBhD,KAAK,CAACgD,YAAY,GAAG,EAAE;EAC3B;EAGA,IAAI,CAAChD,KAAK,CAACoF,aAAa,EAAE;IACtBpF,KAAK,CAACoF,aAAa,GAAG,KAAK;EAC/B;EAEA,IAAI,CAACpF,KAAK,CAACsB,eAAe,EAAE;IACxBtB,KAAK,CAACsB,eAAe,GAAG,EAAE;EAC9B;EACA,IAAI,CAACtB,KAAK,CAACuD,gBAAgB,EAAE;IACzBvD,KAAK,CAACuD,gBAAgB,GAAG,EAAE;EAC/B;EAEA,OAAOvD,KAAK;AAChB;AAEA,SAASqB,mBAAmBA,CAAIP,MAAuB,EAAEuE,KAAe,EAA4B;EAChG,IAAMC,MAAW,GAAG,IAAAC,YAAK,EAACzE,MAAM,CAAC;EACjCuE,KAAK,CAAC3E,OAAO,CAAC2B,GAAG,IAAI;IACjB,OAAOiD,MAAM,CAACxD,UAAU,CAACO,GAAG,CAAC;EACjC,CAAC,CAAC;EACF,OAAOiD,MAAM;AACjB;;AAEA;AACA;AACA;AACA;AACA,SAASxC,uBAAuBA,CAACD,GAAW,EAAE5B,gBAAwB,EAAU;EAC5E,OAAO4B,GAAG,CAAC2C,OAAO,CAAC,GAAG,GAAGvE,gBAAgB,GAAG,KAAK,EAAE,GAAG,GAAGA,gBAAgB,GAAG,GAAG,CAAC;AACpF","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/replication-graphql/graphql-websocket.js b/dist/cjs/plugins/replication-graphql/graphql-websocket.js deleted file mode 100644 index a575df1efe5..00000000000 --- a/dist/cjs/plugins/replication-graphql/graphql-websocket.js +++ /dev/null @@ -1,45 +0,0 @@ -"use strict"; - -var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault"); -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.GRAPHQL_WEBSOCKET_BY_URL = void 0; -exports.getGraphQLWebSocket = getGraphQLWebSocket; -exports.removeGraphQLWebSocketRef = removeGraphQLWebSocketRef; -var _graphqlWs = require("graphql-ws"); -var _index = require("../../plugins/utils/index.js"); -var _isomorphicWs = _interopRequireDefault(require("isomorphic-ws")); -var { - WebSocket: IsomorphicWebSocket -} = _isomorphicWs.default; -var GRAPHQL_WEBSOCKET_BY_URL = exports.GRAPHQL_WEBSOCKET_BY_URL = new Map(); -function getGraphQLWebSocket(url, headers) { - var has = (0, _index.getFromMapOrCreate)(GRAPHQL_WEBSOCKET_BY_URL, url, () => { - var wsClient = (0, _graphqlWs.createClient)({ - url, - shouldRetry: () => true, - webSocketImpl: IsomorphicWebSocket, - connectionParams: headers ? { - headers - } : undefined - }); - return { - url, - socket: wsClient, - refCount: 1 - }; - }, value => { - value.refCount = value.refCount + 1; - }); - return has.socket; -} -function removeGraphQLWebSocketRef(url) { - var obj = (0, _index.getFromMapOrThrow)(GRAPHQL_WEBSOCKET_BY_URL, url); - obj.refCount = obj.refCount - 1; - if (obj.refCount === 0) { - GRAPHQL_WEBSOCKET_BY_URL.delete(url); - obj.socket.dispose(); - } -} -//# sourceMappingURL=graphql-websocket.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/replication-graphql/graphql-websocket.js.map b/dist/cjs/plugins/replication-graphql/graphql-websocket.js.map deleted file mode 100644 index 93cde0a5923..00000000000 --- a/dist/cjs/plugins/replication-graphql/graphql-websocket.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"graphql-websocket.js","names":["_graphqlWs","require","_index","_isomorphicWs","_interopRequireDefault","WebSocket","IsomorphicWebSocket","ws","GRAPHQL_WEBSOCKET_BY_URL","exports","Map","getGraphQLWebSocket","url","headers","has","getFromMapOrCreate","wsClient","createClient","shouldRetry","webSocketImpl","connectionParams","undefined","socket","refCount","value","removeGraphQLWebSocketRef","obj","getFromMapOrThrow","delete","dispose"],"sources":["../../../../src/plugins/replication-graphql/graphql-websocket.ts"],"sourcesContent":["import { Client, createClient } from 'graphql-ws';\nimport { getFromMapOrCreate, getFromMapOrThrow } from '../../plugins/utils/index.ts';\nimport ws from 'isomorphic-ws';\n\nconst { WebSocket: IsomorphicWebSocket } = ws;\n\nexport type WebsocketWithRefCount = {\n url: string;\n socket: Client;\n refCount: number;\n};\n\nexport const GRAPHQL_WEBSOCKET_BY_URL: Map = new Map();\n\n\nexport function getGraphQLWebSocket(\n url: string,\n headers?: { [k: string]: string; }\n): Client {\n\n const has = getFromMapOrCreate(\n GRAPHQL_WEBSOCKET_BY_URL,\n url,\n () => {\n const wsClient = createClient({\n url,\n shouldRetry: () => true,\n webSocketImpl: IsomorphicWebSocket,\n connectionParams: headers ? { headers } : undefined,\n });\n return {\n url,\n socket: wsClient,\n refCount: 1\n };\n },\n (value) => {\n value.refCount = value.refCount + 1;\n }\n );\n return has.socket;\n}\n\n\nexport function removeGraphQLWebSocketRef(\n url: string\n) {\n const obj = getFromMapOrThrow(GRAPHQL_WEBSOCKET_BY_URL, url);\n obj.refCount = obj.refCount - 1;\n if (obj.refCount === 0) {\n GRAPHQL_WEBSOCKET_BY_URL.delete(url);\n obj.socket.dispose();\n }\n}\n"],"mappings":";;;;;;;;;AAAA,IAAAA,UAAA,GAAAC,OAAA;AACA,IAAAC,MAAA,GAAAD,OAAA;AACA,IAAAE,aAAA,GAAAC,sBAAA,CAAAH,OAAA;AAEA,IAAM;EAAEI,SAAS,EAAEC;AAAoB,CAAC,GAAGC,qBAAE;AAQtC,IAAMC,wBAA4D,GAAAC,OAAA,CAAAD,wBAAA,GAAG,IAAIE,GAAG,CAAC,CAAC;AAG9E,SAASC,mBAAmBA,CAC/BC,GAAW,EACXC,OAAkC,EAC5B;EAEN,IAAMC,GAAG,GAAG,IAAAC,yBAAkB,EAC1BP,wBAAwB,EACxBI,GAAG,EACH,MAAM;IACF,IAAMI,QAAQ,GAAG,IAAAC,uBAAY,EAAC;MAC1BL,GAAG;MACHM,WAAW,EAAEA,CAAA,KAAM,IAAI;MACvBC,aAAa,EAAEb,mBAAmB;MAClCc,gBAAgB,EAAEP,OAAO,GAAG;QAAEA;MAAQ,CAAC,GAAGQ;IAC9C,CAAC,CAAC;IACF,OAAO;MACHT,GAAG;MACHU,MAAM,EAAEN,QAAQ;MAChBO,QAAQ,EAAE;IACd,CAAC;EACL,CAAC,EACAC,KAAK,IAAK;IACPA,KAAK,CAACD,QAAQ,GAAGC,KAAK,CAACD,QAAQ,GAAG,CAAC;EACvC,CACJ,CAAC;EACD,OAAOT,GAAG,CAACQ,MAAM;AACrB;AAGO,SAASG,yBAAyBA,CACrCb,GAAW,EACb;EACE,IAAMc,GAAG,GAAG,IAAAC,wBAAiB,EAACnB,wBAAwB,EAAEI,GAAG,CAAC;EAC5Dc,GAAG,CAACH,QAAQ,GAAGG,GAAG,CAACH,QAAQ,GAAG,CAAC;EAC/B,IAAIG,GAAG,CAACH,QAAQ,KAAK,CAAC,EAAE;IACpBf,wBAAwB,CAACoB,MAAM,CAAChB,GAAG,CAAC;IACpCc,GAAG,CAACJ,MAAM,CAACO,OAAO,CAAC,CAAC;EACxB;AACJ","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/replication-graphql/helper.js b/dist/cjs/plugins/replication-graphql/helper.js deleted file mode 100644 index 4deb1587601..00000000000 --- a/dist/cjs/plugins/replication-graphql/helper.js +++ /dev/null @@ -1,23 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.GRAPHQL_REPLICATION_PLUGIN_IDENTITY_PREFIX = void 0; -exports.graphQLRequest = graphQLRequest; -var _index = require("../../plugins/utils/index.js"); -var GRAPHQL_REPLICATION_PLUGIN_IDENTITY_PREFIX = exports.GRAPHQL_REPLICATION_PLUGIN_IDENTITY_PREFIX = 'graphql'; -function graphQLRequest(fetchRequest, httpUrl, clientState, queryParams) { - var headers = new Headers(clientState.headers || {}); - headers.append('Content-Type', 'application/json'); - var req = new Request((0, _index.ensureNotFalsy)(httpUrl), { - method: 'POST', - body: JSON.stringify(queryParams), - headers, - credentials: clientState.credentials - }); - return fetchRequest(req).then(res => res.json()).then(body => { - return body; - }); -} -//# sourceMappingURL=helper.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/replication-graphql/helper.js.map b/dist/cjs/plugins/replication-graphql/helper.js.map deleted file mode 100644 index 2d635a24983..00000000000 --- a/dist/cjs/plugins/replication-graphql/helper.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"helper.js","names":["_index","require","GRAPHQL_REPLICATION_PLUGIN_IDENTITY_PREFIX","exports","graphQLRequest","fetchRequest","httpUrl","clientState","queryParams","headers","Headers","append","req","Request","ensureNotFalsy","method","body","JSON","stringify","credentials","then","res","json"],"sources":["../../../../src/plugins/replication-graphql/helper.ts"],"sourcesContent":["import type { RxGraphQLReplicationClientState, RxGraphQLReplicationQueryBuilderResponseObject } from '../../types/index.d.ts';\nimport { ensureNotFalsy } from '../../plugins/utils/index.ts';\n\nexport const GRAPHQL_REPLICATION_PLUGIN_IDENTITY_PREFIX = 'graphql';\n\nexport interface GraphQLError {\n message: string;\n locations: Array<{\n line: number;\n column: number;\n }>;\n path: string[];\n}\nexport type GraphQLErrors = Array;\n\n\n\nexport function graphQLRequest(\n fetchRequest: WindowOrWorkerGlobalScope['fetch'],\n httpUrl: string,\n clientState: RxGraphQLReplicationClientState,\n queryParams: RxGraphQLReplicationQueryBuilderResponseObject\n) {\n\n const headers = new Headers(clientState.headers || {});\n headers.append('Content-Type', 'application/json');\n\n const req = new Request(\n ensureNotFalsy(httpUrl),\n {\n method: 'POST',\n body: JSON.stringify(queryParams),\n headers,\n credentials: clientState.credentials,\n }\n );\n \n return fetchRequest(req)\n .then((res) => res.json())\n .then((body) => {\n return body;\n });\n}\n"],"mappings":";;;;;;;AACA,IAAAA,MAAA,GAAAC,OAAA;AAEO,IAAMC,0CAA0C,GAAAC,OAAA,CAAAD,0CAAA,GAAG,SAAS;AAc5D,SAASE,cAAcA,CAC1BC,YAAgD,EAChDC,OAAe,EACfC,WAA4C,EAC5CC,WAA2D,EAC7D;EAEE,IAAMC,OAAO,GAAG,IAAIC,OAAO,CAACH,WAAW,CAACE,OAAO,IAAI,CAAC,CAAC,CAAC;EACtDA,OAAO,CAACE,MAAM,CAAC,cAAc,EAAE,kBAAkB,CAAC;EAElD,IAAMC,GAAG,GAAG,IAAIC,OAAO,CACnB,IAAAC,qBAAc,EAACR,OAAO,CAAC,EACvB;IACIS,MAAM,EAAE,MAAM;IACdC,IAAI,EAAEC,IAAI,CAACC,SAAS,CAACV,WAAW,CAAC;IACjCC,OAAO;IACPU,WAAW,EAAEZ,WAAW,CAACY;EAC7B,CACJ,CAAC;EAED,OAAOd,YAAY,CAACO,GAAG,CAAC,CACnBQ,IAAI,CAAEC,GAAG,IAAKA,GAAG,CAACC,IAAI,CAAC,CAAC,CAAC,CACzBF,IAAI,CAAEJ,IAAI,IAAK;IACZ,OAAOA,IAAI;EACf,CAAC,CAAC;AACV","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/replication-graphql/index.js b/dist/cjs/plugins/replication-graphql/index.js deleted file mode 100644 index 5b09ec77bff..00000000000 --- a/dist/cjs/plugins/replication-graphql/index.js +++ /dev/null @@ -1,217 +0,0 @@ -"use strict"; - -var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault"); -Object.defineProperty(exports, "__esModule", { - value: true -}); -var _exportNames = { - RxGraphQLReplicationState: true, - replicateGraphQL: true -}; -exports.RxGraphQLReplicationState = void 0; -exports.replicateGraphQL = replicateGraphQL; -var _inheritsLoose2 = _interopRequireDefault(require("@babel/runtime/helpers/inheritsLoose")); -var _index = require("../../plugins/utils/index.js"); -var _helper = require("./helper.js"); -Object.keys(_helper).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; - if (key in exports && exports[key] === _helper[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _helper[key]; - } - }); -}); -var _index2 = require("../leader-election/index.js"); -var _index3 = require("../replication/index.js"); -var _index4 = require("../../index.js"); -var _graphqlWebsocket = require("./graphql-websocket.js"); -Object.keys(_graphqlWebsocket).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; - if (key in exports && exports[key] === _graphqlWebsocket[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _graphqlWebsocket[key]; - } - }); -}); -var _rxjs = require("rxjs"); -var _graphqlSchemaFromRxSchema = require("./graphql-schema-from-rx-schema.js"); -Object.keys(_graphqlSchemaFromRxSchema).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; - if (key in exports && exports[key] === _graphqlSchemaFromRxSchema[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _graphqlSchemaFromRxSchema[key]; - } - }); -}); -var _queryBuilderFromRxSchema = require("./query-builder-from-rx-schema.js"); -Object.keys(_queryBuilderFromRxSchema).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; - if (key in exports && exports[key] === _queryBuilderFromRxSchema[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _queryBuilderFromRxSchema[key]; - } - }); -}); -/** - * this plugin adds the RxCollection.syncGraphQl()-function to rxdb - * you can use it to sync collections with a remote graphql endpoint. - */ -var RxGraphQLReplicationState = exports.RxGraphQLReplicationState = /*#__PURE__*/function (_RxReplicationState) { - function RxGraphQLReplicationState(url, clientState, replicationIdentifier, collection, deletedField, pull, push, live, retryTime, autoStart, customFetch) { - var _this; - _this = _RxReplicationState.call(this, replicationIdentifier, collection, deletedField, pull, push, live, retryTime, autoStart) || this; - _this.url = url; - _this.clientState = clientState; - _this.replicationIdentifier = replicationIdentifier; - _this.collection = collection; - _this.deletedField = deletedField; - _this.pull = pull; - _this.push = push; - _this.live = live; - _this.retryTime = retryTime; - _this.autoStart = autoStart; - _this.customFetch = customFetch; - return _this; - } - (0, _inheritsLoose2.default)(RxGraphQLReplicationState, _RxReplicationState); - var _proto = RxGraphQLReplicationState.prototype; - _proto.setHeaders = function setHeaders(headers) { - this.clientState.headers = (0, _index.flatClone)(headers); - }; - _proto.setCredentials = function setCredentials(credentials) { - this.clientState.credentials = credentials; - }; - _proto.graphQLRequest = function graphQLRequest(queryParams) { - return (0, _helper.graphQLRequest)(this.customFetch ?? fetch, (0, _index.ensureNotFalsy)(this.url.http), this.clientState, queryParams); - }; - return RxGraphQLReplicationState; -}(_index3.RxReplicationState); -function replicateGraphQL({ - collection, - url, - headers = {}, - credentials, - deletedField = '_deleted', - waitForLeadership = true, - pull, - push, - live = true, - fetch: customFetch, - retryTime = 1000 * 5, - // in ms - autoStart = true, - replicationIdentifier -}) { - (0, _index4.addRxPlugin)(_index2.RxDBLeaderElectionPlugin); - /** - * We use this object to store the GraphQL client - * so we can later swap out the client inside of the replication handlers. - */ - var mutateableClientState = { - headers, - credentials - }; - var pullStream$ = new _rxjs.Subject(); - var replicationPrimitivesPull; - if (pull) { - var pullBatchSize = pull.batchSize ? pull.batchSize : 20; - replicationPrimitivesPull = { - async handler(lastPulledCheckpoint) { - var pullGraphQL = await pull.queryBuilder(lastPulledCheckpoint, pullBatchSize); - var result = await graphqlReplicationState.graphQLRequest(pullGraphQL); - if (result.errors) { - throw result.errors; - } - var dataPath = pull.dataPath || ['data', Object.keys(result.data)[0]]; - var data = (0, _index.getProperty)(result, dataPath); - if (pull.responseModifier) { - data = await pull.responseModifier(data, 'handler', lastPulledCheckpoint); - } - var docsData = data.documents; - var newCheckpoint = data.checkpoint; - return { - documents: docsData, - checkpoint: newCheckpoint - }; - }, - batchSize: pull.batchSize, - modifier: pull.modifier, - stream$: pullStream$.asObservable() - }; - } - var replicationPrimitivesPush; - if (push) { - replicationPrimitivesPush = { - async handler(rows) { - var pushObj = await push.queryBuilder(rows); - var result = await graphqlReplicationState.graphQLRequest(pushObj); - if (result.errors) { - throw result.errors; - } - var dataPath = push.dataPath || Object.keys(result.data)[0]; - var data = (0, _index.getProperty)(result.data, dataPath); - if (push.responseModifier) { - data = await push.responseModifier(data); - } - return data; - }, - batchSize: push.batchSize, - modifier: push.modifier - }; - } - var graphqlReplicationState = new RxGraphQLReplicationState(url, mutateableClientState, replicationIdentifier, collection, deletedField, replicationPrimitivesPull, replicationPrimitivesPush, live, retryTime, autoStart, customFetch); - var mustUseSocket = url.ws && pull && pull.streamQueryBuilder && live; - var startBefore = graphqlReplicationState.start.bind(graphqlReplicationState); - graphqlReplicationState.start = () => { - if (mustUseSocket) { - var httpHeaders = pull.includeWsHeaders ? mutateableClientState.headers : undefined; - var wsClient = (0, _graphqlWebsocket.getGraphQLWebSocket)((0, _index.ensureNotFalsy)(url.ws), httpHeaders); - wsClient.on('connected', () => { - pullStream$.next('RESYNC'); - }); - var query = (0, _index.ensureNotFalsy)(pull.streamQueryBuilder)(mutateableClientState.headers); - wsClient.subscribe(query, { - next: async streamResponse => { - var firstField = Object.keys(streamResponse.data)[0]; - var data = streamResponse.data[firstField]; - if (pull.responseModifier) { - data = await pull.responseModifier(data, 'stream'); - } - pullStream$.next(data); - }, - error: error => { - pullStream$.error(error); - }, - complete: () => { - pullStream$.complete(); - } - }); - } - return startBefore(); - }; - var cancelBefore = graphqlReplicationState.cancel.bind(graphqlReplicationState); - graphqlReplicationState.cancel = () => { - if (!graphqlReplicationState.isStopped()) { - pullStream$.complete(); - if (mustUseSocket) { - (0, _graphqlWebsocket.removeGraphQLWebSocketRef)((0, _index.ensureNotFalsy)(url.ws)); - } - } - return cancelBefore(); - }; - (0, _index3.startReplicationOnLeaderShip)(waitForLeadership, graphqlReplicationState); - return graphqlReplicationState; -} -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/replication-graphql/index.js.map b/dist/cjs/plugins/replication-graphql/index.js.map deleted file mode 100644 index e459d430754..00000000000 --- a/dist/cjs/plugins/replication-graphql/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["_index","require","_helper","Object","keys","forEach","key","prototype","hasOwnProperty","call","_exportNames","exports","defineProperty","enumerable","get","_index2","_index3","_index4","_graphqlWebsocket","_rxjs","_graphqlSchemaFromRxSchema","_queryBuilderFromRxSchema","RxGraphQLReplicationState","_RxReplicationState","url","clientState","replicationIdentifier","collection","deletedField","pull","push","live","retryTime","autoStart","customFetch","_this","_inheritsLoose2","default","_proto","setHeaders","headers","flatClone","setCredentials","credentials","graphQLRequest","queryParams","fetch","ensureNotFalsy","http","RxReplicationState","replicateGraphQL","waitForLeadership","addRxPlugin","RxDBLeaderElectionPlugin","mutateableClientState","pullStream$","Subject","replicationPrimitivesPull","pullBatchSize","batchSize","handler","lastPulledCheckpoint","pullGraphQL","queryBuilder","result","graphqlReplicationState","errors","dataPath","data","getProperty","responseModifier","docsData","documents","newCheckpoint","checkpoint","modifier","stream$","asObservable","replicationPrimitivesPush","rows","pushObj","mustUseSocket","ws","streamQueryBuilder","startBefore","start","bind","httpHeaders","includeWsHeaders","undefined","wsClient","getGraphQLWebSocket","on","next","query","subscribe","streamResponse","firstField","error","complete","cancelBefore","cancel","isStopped","removeGraphQLWebSocketRef","startReplicationOnLeaderShip"],"sources":["../../../../src/plugins/replication-graphql/index.ts"],"sourcesContent":["/**\n * this plugin adds the RxCollection.syncGraphQl()-function to rxdb\n * you can use it to sync collections with a remote graphql endpoint.\n */\nimport {\n ensureNotFalsy,\n flatClone,\n getProperty\n} from '../../plugins/utils/index.ts';\n\nimport {\n graphQLRequest\n} from './helper.ts';\n\nimport { RxDBLeaderElectionPlugin } from '../leader-election/index.ts';\nimport type {\n RxCollection,\n ReplicationPullOptions,\n ReplicationPushOptions,\n RxReplicationWriteToMasterRow,\n GraphQLServerUrl,\n RxReplicationPullStreamItem,\n RxGraphQLReplicationQueryBuilderResponseObject,\n RxGraphQLReplicationClientState,\n ById\n} from '../../types/index.d.ts';\nimport {\n RxReplicationState,\n startReplicationOnLeaderShip\n} from '../replication/index.ts';\nimport {\n addRxPlugin,\n SyncOptionsGraphQL,\n WithDeleted\n} from '../../index.ts';\n\nimport {\n removeGraphQLWebSocketRef,\n getGraphQLWebSocket\n} from './graphql-websocket.ts';\nimport { Subject } from 'rxjs';\n\n\n\n\nexport class RxGraphQLReplicationState extends RxReplicationState {\n constructor(\n public readonly url: GraphQLServerUrl,\n public readonly clientState: RxGraphQLReplicationClientState,\n public readonly replicationIdentifier: string,\n public readonly collection: RxCollection,\n public readonly deletedField: string,\n public readonly pull?: ReplicationPullOptions,\n public readonly push?: ReplicationPushOptions,\n public readonly live?: boolean,\n public retryTime?: number,\n public autoStart?: boolean,\n public readonly customFetch?: WindowOrWorkerGlobalScope['fetch']\n ) {\n super(\n replicationIdentifier,\n collection,\n deletedField,\n pull,\n push,\n live,\n retryTime,\n autoStart\n );\n }\n\n setHeaders(headers: ById): void {\n this.clientState.headers = flatClone(headers);\n }\n\n setCredentials(credentials: RequestCredentials | undefined) {\n this.clientState.credentials = credentials;\n }\n\n graphQLRequest(\n queryParams: RxGraphQLReplicationQueryBuilderResponseObject\n ) {\n return graphQLRequest(\n this.customFetch ?? fetch,\n ensureNotFalsy(this.url.http),\n this.clientState,\n queryParams\n );\n }\n}\n\nexport function replicateGraphQL(\n {\n collection,\n url,\n headers = {},\n credentials,\n deletedField = '_deleted',\n waitForLeadership = true,\n pull,\n push,\n live = true,\n fetch: customFetch,\n retryTime = 1000 * 5, // in ms\n autoStart = true,\n replicationIdentifier\n }: SyncOptionsGraphQL\n): RxGraphQLReplicationState {\n addRxPlugin(RxDBLeaderElectionPlugin);\n /**\n * We use this object to store the GraphQL client\n * so we can later swap out the client inside of the replication handlers.\n */\n const mutateableClientState = {\n headers,\n credentials\n };\n\n\n const pullStream$: Subject> = new Subject();\n\n let replicationPrimitivesPull: ReplicationPullOptions | undefined;\n if (pull) {\n const pullBatchSize = pull.batchSize ? pull.batchSize : 20;\n replicationPrimitivesPull = {\n async handler(\n lastPulledCheckpoint: CheckpointType | undefined\n ) {\n const pullGraphQL = await pull.queryBuilder(lastPulledCheckpoint, pullBatchSize);\n const result = await graphqlReplicationState.graphQLRequest(pullGraphQL);\n if (result.errors) {\n throw result.errors;\n }\n const dataPath = pull.dataPath || ['data', Object.keys(result.data)[0]];\n let data: any = getProperty(result, dataPath);\n if (pull.responseModifier) {\n data = await pull.responseModifier(\n data,\n 'handler',\n lastPulledCheckpoint\n );\n }\n\n const docsData: WithDeleted[] = data.documents;\n const newCheckpoint = data.checkpoint;\n\n return {\n documents: docsData,\n checkpoint: newCheckpoint\n };\n },\n batchSize: pull.batchSize,\n modifier: pull.modifier,\n stream$: pullStream$.asObservable()\n };\n }\n let replicationPrimitivesPush: ReplicationPushOptions | undefined;\n if (push) {\n replicationPrimitivesPush = {\n async handler(\n rows: RxReplicationWriteToMasterRow[]\n ) {\n const pushObj = await push.queryBuilder(rows);\n const result = await graphqlReplicationState.graphQLRequest(pushObj);\n\n if (result.errors) {\n throw result.errors;\n }\n const dataPath = push.dataPath || Object.keys(result.data)[0];\n let data: any = getProperty(result.data, dataPath);\n\n if (push.responseModifier) {\n data = await push.responseModifier(\n data,\n );\n }\n\n return data;\n },\n batchSize: push.batchSize,\n modifier: push.modifier\n };\n }\n\n const graphqlReplicationState = new RxGraphQLReplicationState(\n url,\n mutateableClientState,\n replicationIdentifier,\n collection,\n deletedField,\n replicationPrimitivesPull,\n replicationPrimitivesPush,\n live,\n retryTime,\n autoStart,\n customFetch\n );\n\n const mustUseSocket = url.ws &&\n pull &&\n pull.streamQueryBuilder &&\n live;\n\n const startBefore = graphqlReplicationState.start.bind(graphqlReplicationState);\n graphqlReplicationState.start = () => {\n if (mustUseSocket) {\n const httpHeaders = pull.includeWsHeaders ? mutateableClientState.headers : undefined;\n const wsClient = getGraphQLWebSocket(ensureNotFalsy(url.ws), httpHeaders);\n\n wsClient.on('connected', () => {\n pullStream$.next('RESYNC');\n });\n\n const query: any = ensureNotFalsy(pull.streamQueryBuilder)(mutateableClientState.headers);\n\n wsClient.subscribe(\n query,\n {\n next: async (streamResponse: any) => {\n const firstField = Object.keys(streamResponse.data)[0];\n let data = streamResponse.data[firstField];\n if (pull.responseModifier) {\n data = await pull.responseModifier(\n data,\n 'stream'\n );\n }\n pullStream$.next(data);\n },\n error: (error: any) => {\n pullStream$.error(error);\n },\n complete: () => {\n pullStream$.complete();\n }\n });\n }\n return startBefore();\n };\n\n const cancelBefore = graphqlReplicationState.cancel.bind(graphqlReplicationState);\n graphqlReplicationState.cancel = () => {\n if (!graphqlReplicationState.isStopped()) {\n pullStream$.complete();\n if (mustUseSocket) {\n removeGraphQLWebSocketRef(ensureNotFalsy(url.ws));\n }\n }\n return cancelBefore();\n };\n\n startReplicationOnLeaderShip(waitForLeadership, graphqlReplicationState);\n return graphqlReplicationState;\n}\n\nexport * from './helper.ts';\nexport * from './graphql-schema-from-rx-schema.ts';\nexport * from './query-builder-from-rx-schema.ts';\nexport * from './graphql-websocket.ts';\n"],"mappings":";;;;;;;;;;;;;AAIA,IAAAA,MAAA,GAAAC,OAAA;AAMA,IAAAC,OAAA,GAAAD,OAAA;AAqPAE,MAAA,CAAAC,IAAA,CAAAF,OAAA,EAAAG,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAJ,OAAA,CAAAI,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAZ,OAAA,CAAAI,GAAA;IAAA;EAAA;AAAA;AAjPA,IAAAS,OAAA,GAAAd,OAAA;AAYA,IAAAe,OAAA,GAAAf,OAAA;AAIA,IAAAgB,OAAA,GAAAhB,OAAA;AAMA,IAAAiB,iBAAA,GAAAjB,OAAA;AA8NAE,MAAA,CAAAC,IAAA,CAAAc,iBAAA,EAAAb,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAY,iBAAA,CAAAZ,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAI,iBAAA,CAAAZ,GAAA;IAAA;EAAA;AAAA;AA1NA,IAAAa,KAAA,GAAAlB,OAAA;AAwNA,IAAAmB,0BAAA,GAAAnB,OAAA;AAAAE,MAAA,CAAAC,IAAA,CAAAgB,0BAAA,EAAAf,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAc,0BAAA,CAAAd,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAM,0BAAA,CAAAd,GAAA;IAAA;EAAA;AAAA;AACA,IAAAe,yBAAA,GAAApB,OAAA;AAAAE,MAAA,CAAAC,IAAA,CAAAiB,yBAAA,EAAAhB,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAe,yBAAA,CAAAf,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAO,yBAAA,CAAAf,GAAA;IAAA;EAAA;AAAA;AAjQA;AACA;AACA;AACA;AAHA,IA6CagB,yBAAyB,GAAAX,OAAA,CAAAW,yBAAA,0BAAAC,mBAAA;EAClC,SAAAD,0BACoBE,GAAqB,EACrBC,WAA4C,EAC5CC,qBAA6B,EAC7BC,UAAmC,EACnCC,YAAoB,EACpBC,IAAwD,EACxDC,IAAwC,EACxCC,IAAc,EACvBC,SAAkB,EAClBC,SAAmB,EACVC,WAAgD,EAClE;IAAA,IAAAC,KAAA;IACEA,KAAA,GAAAZ,mBAAA,CAAAd,IAAA,OACIiB,qBAAqB,EACrBC,UAAU,EACVC,YAAY,EACZC,IAAI,EACJC,IAAI,EACJC,IAAI,EACJC,SAAS,EACTC,SACJ,CAAC;IAACE,KAAA,CArBcX,GAAqB,GAArBA,GAAqB;IAAAW,KAAA,CACrBV,WAA4C,GAA5CA,WAA4C;IAAAU,KAAA,CAC5CT,qBAA6B,GAA7BA,qBAA6B;IAAAS,KAAA,CAC7BR,UAAmC,GAAnCA,UAAmC;IAAAQ,KAAA,CACnCP,YAAoB,GAApBA,YAAoB;IAAAO,KAAA,CACpBN,IAAwD,GAAxDA,IAAwD;IAAAM,KAAA,CACxDL,IAAwC,GAAxCA,IAAwC;IAAAK,KAAA,CACxCJ,IAAc,GAAdA,IAAc;IAAAI,KAAA,CACvBH,SAAkB,GAAlBA,SAAkB;IAAAG,KAAA,CAClBF,SAAmB,GAAnBA,SAAmB;IAAAE,KAAA,CACVD,WAAgD,GAAhDA,WAAgD;IAAA,OAAAC,KAAA;EAYpE;EAAC,IAAAC,eAAA,CAAAC,OAAA,EAAAf,yBAAA,EAAAC,mBAAA;EAAA,IAAAe,MAAA,GAAAhB,yBAAA,CAAAf,SAAA;EAAA+B,MAAA,CAEDC,UAAU,GAAV,SAAAA,WAAWC,OAAqB,EAAQ;IACpC,IAAI,CAACf,WAAW,CAACe,OAAO,GAAG,IAAAC,gBAAS,EAACD,OAAO,CAAC;EACjD,CAAC;EAAAF,MAAA,CAEDI,cAAc,GAAd,SAAAA,eAAeC,WAA2C,EAAE;IACxD,IAAI,CAAClB,WAAW,CAACkB,WAAW,GAAGA,WAAW;EAC9C,CAAC;EAAAL,MAAA,CAEDM,cAAc,GAAd,SAAAA,eACIC,WAA2D,EAC7D;IACE,OAAO,IAAAD,sBAAc,EACjB,IAAI,CAACV,WAAW,IAAIY,KAAK,EACzB,IAAAC,qBAAc,EAAC,IAAI,CAACvB,GAAG,CAACwB,IAAI,CAAC,EAC7B,IAAI,CAACvB,WAAW,EAChBoB,WACJ,CAAC;EACL,CAAC;EAAA,OAAAvB,yBAAA;AAAA,EA3CqE2B,0BAAkB;AA8CrF,SAASC,gBAAgBA,CAC5B;EACIvB,UAAU;EACVH,GAAG;EACHgB,OAAO,GAAG,CAAC,CAAC;EACZG,WAAW;EACXf,YAAY,GAAG,UAAU;EACzBuB,iBAAiB,GAAG,IAAI;EACxBtB,IAAI;EACJC,IAAI;EACJC,IAAI,GAAG,IAAI;EACXe,KAAK,EAAEZ,WAAW;EAClBF,SAAS,GAAG,IAAI,GAAG,CAAC;EAAE;EACtBC,SAAS,GAAG,IAAI;EAChBP;AAC2C,CAAC,EACI;EACpD,IAAA0B,mBAAW,EAACC,gCAAwB,CAAC;EACrC;AACJ;AACA;AACA;EACI,IAAMC,qBAAqB,GAAG;IAC1Bd,OAAO;IACPG;EACJ,CAAC;EAGD,IAAMY,WAA4E,GAAG,IAAIC,aAAO,CAAC,CAAC;EAElG,IAAIC,yBAAwF;EAC5F,IAAI5B,IAAI,EAAE;IACN,IAAM6B,aAAa,GAAG7B,IAAI,CAAC8B,SAAS,GAAG9B,IAAI,CAAC8B,SAAS,GAAG,EAAE;IAC1DF,yBAAyB,GAAG;MACxB,MAAMG,OAAOA,CACTC,oBAAgD,EAClD;QACE,IAAMC,WAAW,GAAG,MAAMjC,IAAI,CAACkC,YAAY,CAACF,oBAAoB,EAAEH,aAAa,CAAC;QAChF,IAAMM,MAAM,GAAG,MAAMC,uBAAuB,CAACrB,cAAc,CAACkB,WAAW,CAAC;QACxE,IAAIE,MAAM,CAACE,MAAM,EAAE;UACf,MAAMF,MAAM,CAACE,MAAM;QACvB;QACA,IAAMC,QAAQ,GAAGtC,IAAI,CAACsC,QAAQ,IAAI,CAAC,MAAM,EAAEhE,MAAM,CAACC,IAAI,CAAC4D,MAAM,CAACI,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;QACvE,IAAIA,IAAS,GAAG,IAAAC,kBAAW,EAACL,MAAM,EAAEG,QAAQ,CAAC;QAC7C,IAAItC,IAAI,CAACyC,gBAAgB,EAAE;UACvBF,IAAI,GAAG,MAAMvC,IAAI,CAACyC,gBAAgB,CAC9BF,IAAI,EACJ,SAAS,EACTP,oBACJ,CAAC;QACL;QAEA,IAAMU,QAAkC,GAAGH,IAAI,CAACI,SAAS;QACzD,IAAMC,aAAa,GAAGL,IAAI,CAACM,UAAU;QAErC,OAAO;UACHF,SAAS,EAAED,QAAQ;UACnBG,UAAU,EAAED;QAChB,CAAC;MACL,CAAC;MACDd,SAAS,EAAE9B,IAAI,CAAC8B,SAAS;MACzBgB,QAAQ,EAAE9C,IAAI,CAAC8C,QAAQ;MACvBC,OAAO,EAAErB,WAAW,CAACsB,YAAY,CAAC;IACtC,CAAC;EACL;EACA,IAAIC,yBAAwE;EAC5E,IAAIhD,IAAI,EAAE;IACNgD,yBAAyB,GAAG;MACxB,MAAMlB,OAAOA,CACTmB,IAAgD,EAClD;QACE,IAAMC,OAAO,GAAG,MAAMlD,IAAI,CAACiC,YAAY,CAACgB,IAAI,CAAC;QAC7C,IAAMf,MAAM,GAAG,MAAMC,uBAAuB,CAACrB,cAAc,CAACoC,OAAO,CAAC;QAEpE,IAAIhB,MAAM,CAACE,MAAM,EAAE;UACf,MAAMF,MAAM,CAACE,MAAM;QACvB;QACA,IAAMC,QAAQ,GAAGrC,IAAI,CAACqC,QAAQ,IAAIhE,MAAM,CAACC,IAAI,CAAC4D,MAAM,CAACI,IAAI,CAAC,CAAC,CAAC,CAAC;QAC7D,IAAIA,IAAS,GAAG,IAAAC,kBAAW,EAACL,MAAM,CAACI,IAAI,EAAED,QAAQ,CAAC;QAElD,IAAIrC,IAAI,CAACwC,gBAAgB,EAAE;UACvBF,IAAI,GAAG,MAAMtC,IAAI,CAACwC,gBAAgB,CAC9BF,IACJ,CAAC;QACL;QAEA,OAAOA,IAAI;MACf,CAAC;MACDT,SAAS,EAAE7B,IAAI,CAAC6B,SAAS;MACzBgB,QAAQ,EAAE7C,IAAI,CAAC6C;IACnB,CAAC;EACL;EAEA,IAAMV,uBAAuB,GAAG,IAAI3C,yBAAyB,CACzDE,GAAG,EACH8B,qBAAqB,EACrB5B,qBAAqB,EACrBC,UAAU,EACVC,YAAY,EACZ6B,yBAAyB,EACzBqB,yBAAyB,EACzB/C,IAAI,EACJC,SAAS,EACTC,SAAS,EACTC,WACJ,CAAC;EAED,IAAM+C,aAAa,GAAGzD,GAAG,CAAC0D,EAAE,IACxBrD,IAAI,IACJA,IAAI,CAACsD,kBAAkB,IACvBpD,IAAI;EAER,IAAMqD,WAAW,GAAGnB,uBAAuB,CAACoB,KAAK,CAACC,IAAI,CAACrB,uBAAuB,CAAC;EAC/EA,uBAAuB,CAACoB,KAAK,GAAG,MAAM;IAClC,IAAIJ,aAAa,EAAE;MACf,IAAMM,WAAW,GAAG1D,IAAI,CAAC2D,gBAAgB,GAAGlC,qBAAqB,CAACd,OAAO,GAAGiD,SAAS;MACrF,IAAMC,QAAQ,GAAG,IAAAC,qCAAmB,EAAC,IAAA5C,qBAAc,EAACvB,GAAG,CAAC0D,EAAE,CAAC,EAAEK,WAAW,CAAC;MAEzEG,QAAQ,CAACE,EAAE,CAAC,WAAW,EAAE,MAAM;QAC3BrC,WAAW,CAACsC,IAAI,CAAC,QAAQ,CAAC;MAC9B,CAAC,CAAC;MAEF,IAAMC,KAAU,GAAG,IAAA/C,qBAAc,EAAClB,IAAI,CAACsD,kBAAkB,CAAC,CAAC7B,qBAAqB,CAACd,OAAO,CAAC;MAEzFkD,QAAQ,CAACK,SAAS,CACdD,KAAK,EACL;QACID,IAAI,EAAE,MAAOG,cAAmB,IAAK;UACjC,IAAMC,UAAU,GAAG9F,MAAM,CAACC,IAAI,CAAC4F,cAAc,CAAC5B,IAAI,CAAC,CAAC,CAAC,CAAC;UACtD,IAAIA,IAAI,GAAG4B,cAAc,CAAC5B,IAAI,CAAC6B,UAAU,CAAC;UAC1C,IAAIpE,IAAI,CAACyC,gBAAgB,EAAE;YACvBF,IAAI,GAAG,MAAMvC,IAAI,CAACyC,gBAAgB,CAC9BF,IAAI,EACJ,QACJ,CAAC;UACL;UACAb,WAAW,CAACsC,IAAI,CAACzB,IAAI,CAAC;QAC1B,CAAC;QACD8B,KAAK,EAAGA,KAAU,IAAK;UACnB3C,WAAW,CAAC2C,KAAK,CAACA,KAAK,CAAC;QAC5B,CAAC;QACDC,QAAQ,EAAEA,CAAA,KAAM;UACZ5C,WAAW,CAAC4C,QAAQ,CAAC,CAAC;QAC1B;MACJ,CAAC,CAAC;IACV;IACA,OAAOf,WAAW,CAAC,CAAC;EACxB,CAAC;EAED,IAAMgB,YAAY,GAAGnC,uBAAuB,CAACoC,MAAM,CAACf,IAAI,CAACrB,uBAAuB,CAAC;EACjFA,uBAAuB,CAACoC,MAAM,GAAG,MAAM;IACnC,IAAI,CAACpC,uBAAuB,CAACqC,SAAS,CAAC,CAAC,EAAE;MACtC/C,WAAW,CAAC4C,QAAQ,CAAC,CAAC;MACtB,IAAIlB,aAAa,EAAE;QACf,IAAAsB,2CAAyB,EAAC,IAAAxD,qBAAc,EAACvB,GAAG,CAAC0D,EAAE,CAAC,CAAC;MACrD;IACJ;IACA,OAAOkB,YAAY,CAAC,CAAC;EACzB,CAAC;EAED,IAAAI,oCAA4B,EAACrD,iBAAiB,EAAEc,uBAAuB,CAAC;EACxE,OAAOA,uBAAuB;AAClC","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/replication-graphql/query-builder-from-rx-schema.js b/dist/cjs/plugins/replication-graphql/query-builder-from-rx-schema.js deleted file mode 100644 index 42155d570b1..00000000000 --- a/dist/cjs/plugins/replication-graphql/query-builder-from-rx-schema.js +++ /dev/null @@ -1,134 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.pullQueryBuilderFromRxSchema = pullQueryBuilderFromRxSchema; -exports.pullStreamBuilderFromRxSchema = pullStreamBuilderFromRxSchema; -exports.pushQueryBuilderFromRxSchema = pushQueryBuilderFromRxSchema; -var _graphqlSchemaFromRxSchema = require("./graphql-schema-from-rx-schema.js"); -var _index = require("../../plugins/utils/index.js"); -function pullQueryBuilderFromRxSchema(collectionName, input) { - input = (0, _graphqlSchemaFromRxSchema.fillUpOptionals)(input); - var schema = input.schema; - var prefixes = input.prefixes; - var ucCollectionName = (0, _index.ucfirst)(collectionName); - var queryName = prefixes.pull + ucCollectionName; - var operationName = (0, _index.ucfirst)(queryName); - var outputFields = generateGQLOutputFields({ - schema, - ignoreOutputKeys: input.ignoreOutputKeys - }); - // outputFields.push(input.deletedField); - - var checkpointInputName = ucCollectionName + 'Input' + prefixes.checkpoint; - var builder = (checkpoint, limit) => { - var query = 'query ' + operationName + '($checkpoint: ' + checkpointInputName + ', $limit: Int!) {\n' + _graphqlSchemaFromRxSchema.SPACING + _graphqlSchemaFromRxSchema.SPACING + queryName + '(checkpoint: $checkpoint, limit: $limit) {\n' + _graphqlSchemaFromRxSchema.SPACING + _graphqlSchemaFromRxSchema.SPACING + _graphqlSchemaFromRxSchema.SPACING + 'documents {\n' + outputFields + '\n' + _graphqlSchemaFromRxSchema.SPACING + _graphqlSchemaFromRxSchema.SPACING + _graphqlSchemaFromRxSchema.SPACING + '}\n' + _graphqlSchemaFromRxSchema.SPACING + _graphqlSchemaFromRxSchema.SPACING + _graphqlSchemaFromRxSchema.SPACING + 'checkpoint {\n' + _graphqlSchemaFromRxSchema.SPACING + _graphqlSchemaFromRxSchema.SPACING + _graphqlSchemaFromRxSchema.SPACING + _graphqlSchemaFromRxSchema.SPACING + input.checkpointFields.join('\n' + _graphqlSchemaFromRxSchema.SPACING + _graphqlSchemaFromRxSchema.SPACING + _graphqlSchemaFromRxSchema.SPACING + _graphqlSchemaFromRxSchema.SPACING) + '\n' + _graphqlSchemaFromRxSchema.SPACING + _graphqlSchemaFromRxSchema.SPACING + _graphqlSchemaFromRxSchema.SPACING + '}\n' + _graphqlSchemaFromRxSchema.SPACING + _graphqlSchemaFromRxSchema.SPACING + '}\n' + '}'; - return { - query, - operationName, - variables: { - checkpoint, - limit - } - }; - }; - return builder; -} -function pullStreamBuilderFromRxSchema(collectionName, input) { - input = (0, _graphqlSchemaFromRxSchema.fillUpOptionals)(input); - var schema = input.schema; - var prefixes = input.prefixes; - var ucCollectionName = (0, _index.ucfirst)(collectionName); - var queryName = prefixes.stream + ucCollectionName; - var outputFields = generateGQLOutputFields({ - schema, - ignoreOutputKeys: input.ignoreOutputKeys - }); - var headersName = ucCollectionName + 'Input' + prefixes.headers; - var query = 'subscription on' + (0, _index.ucfirst)((0, _index.ensureNotFalsy)(prefixes.stream)) + '($headers: ' + headersName + ') {\n' + _graphqlSchemaFromRxSchema.SPACING + queryName + '(headers: $headers) {\n' + _graphqlSchemaFromRxSchema.SPACING + _graphqlSchemaFromRxSchema.SPACING + _graphqlSchemaFromRxSchema.SPACING + 'documents {\n' + outputFields + '\n' + _graphqlSchemaFromRxSchema.SPACING + _graphqlSchemaFromRxSchema.SPACING + _graphqlSchemaFromRxSchema.SPACING + '}\n' + _graphqlSchemaFromRxSchema.SPACING + _graphqlSchemaFromRxSchema.SPACING + _graphqlSchemaFromRxSchema.SPACING + 'checkpoint {\n' + _graphqlSchemaFromRxSchema.SPACING + _graphqlSchemaFromRxSchema.SPACING + _graphqlSchemaFromRxSchema.SPACING + _graphqlSchemaFromRxSchema.SPACING + input.checkpointFields.join('\n' + _graphqlSchemaFromRxSchema.SPACING + _graphqlSchemaFromRxSchema.SPACING + _graphqlSchemaFromRxSchema.SPACING + _graphqlSchemaFromRxSchema.SPACING) + '\n' + _graphqlSchemaFromRxSchema.SPACING + _graphqlSchemaFromRxSchema.SPACING + _graphqlSchemaFromRxSchema.SPACING + '}\n' + _graphqlSchemaFromRxSchema.SPACING + '}' + '}'; - var builder = headers => { - return { - query, - variables: { - headers - } - }; - }; - return builder; -} -function pushQueryBuilderFromRxSchema(collectionName, input) { - input = (0, _graphqlSchemaFromRxSchema.fillUpOptionals)(input); - var prefixes = input.prefixes; - var ucCollectionName = (0, _index.ucfirst)(collectionName); - var queryName = prefixes.push + ucCollectionName; - var operationName = (0, _index.ucfirst)(queryName); - var variableName = collectionName + prefixes.pushRow; - var returnFields = generateGQLOutputFields({ - schema: input.schema, - spaceCount: 2 - }); - var builder = pushRows => { - var query = '' + 'mutation ' + operationName + '($' + variableName + ': [' + ucCollectionName + 'Input' + prefixes.pushRow + '!]) {\n' + _graphqlSchemaFromRxSchema.SPACING + queryName + '(' + variableName + ': $' + variableName + ') {\n' + returnFields + '\n' + _graphqlSchemaFromRxSchema.SPACING + '}\n' + '}'; - var sendRows = []; - function transformPushDoc(doc) { - var sendDoc = {}; - Object.entries(doc).forEach(([k, v]) => { - if ( - // skip if in ignoreInputKeys list - !input.ignoreInputKeys.includes(k) && - // only use properties that are in the schema - input.schema.properties[k]) { - sendDoc[k] = v; - } - }); - return sendDoc; - } - pushRows.forEach(pushRow => { - var newRow = { - newDocumentState: transformPushDoc(pushRow.newDocumentState), - assumedMasterState: pushRow.assumedMasterState ? transformPushDoc(pushRow.assumedMasterState) : undefined - }; - sendRows.push(newRow); - }); - var variables = { - [variableName]: sendRows - }; - return { - query, - operationName, - variables - }; - }; - return builder; -} -function generateGQLOutputFields(options) { - var { - schema, - spaceCount = 4, - depth = 0, - ignoreOutputKeys = [] - } = options; - var outputFields = []; - var properties = schema.properties; - var NESTED_SPACING = _graphqlSchemaFromRxSchema.SPACING.repeat(depth); - var LINE_SPACING = _graphqlSchemaFromRxSchema.SPACING.repeat(spaceCount); - for (var key in properties) { - //only skipping top level keys that are in ignoreOutputKeys list - if (ignoreOutputKeys.includes(key)) { - continue; - } - var value = properties[key]; - if (value.type === "object") { - outputFields.push(LINE_SPACING + NESTED_SPACING + key + " {", generateGQLOutputFields({ - schema: value, - spaceCount, - depth: depth + 1 - }), LINE_SPACING + NESTED_SPACING + "}"); - } else { - outputFields.push(LINE_SPACING + NESTED_SPACING + key); - } - } - return outputFields.join('\n'); -} -//# sourceMappingURL=query-builder-from-rx-schema.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/replication-graphql/query-builder-from-rx-schema.js.map b/dist/cjs/plugins/replication-graphql/query-builder-from-rx-schema.js.map deleted file mode 100644 index 09277953694..00000000000 --- a/dist/cjs/plugins/replication-graphql/query-builder-from-rx-schema.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"query-builder-from-rx-schema.js","names":["_graphqlSchemaFromRxSchema","require","_index","pullQueryBuilderFromRxSchema","collectionName","input","fillUpOptionals","schema","prefixes","ucCollectionName","ucfirst","queryName","pull","operationName","outputFields","generateGQLOutputFields","ignoreOutputKeys","checkpointInputName","checkpoint","builder","limit","query","SPACING","checkpointFields","join","variables","pullStreamBuilderFromRxSchema","stream","headersName","headers","ensureNotFalsy","pushQueryBuilderFromRxSchema","push","variableName","pushRow","returnFields","spaceCount","pushRows","sendRows","transformPushDoc","doc","sendDoc","Object","entries","forEach","k","v","ignoreInputKeys","includes","properties","newRow","newDocumentState","assumedMasterState","undefined","options","depth","NESTED_SPACING","repeat","LINE_SPACING","key","value","type"],"sources":["../../../../src/plugins/replication-graphql/query-builder-from-rx-schema.ts"],"sourcesContent":["import {\n GraphQLSchemaFromRxSchemaInputSingleCollection,\n fillUpOptionals,\n Prefixes,\n SPACING\n} from './graphql-schema-from-rx-schema.ts';\nimport { ensureNotFalsy, ucfirst } from '../../plugins/utils/index.ts';\nimport type {\n RxGraphQLReplicationPullQueryBuilder,\n RxGraphQLReplicationPullStreamQueryBuilder,\n RxGraphQLReplicationPushQueryBuilder,\n RxJsonSchema,\n TopLevelProperty,\n WithDeleted\n} from '../../types/index.d.ts';\n\nexport function pullQueryBuilderFromRxSchema(\n collectionName: string,\n input: GraphQLSchemaFromRxSchemaInputSingleCollection,\n): RxGraphQLReplicationPullQueryBuilder {\n input = fillUpOptionals(input);\n const schema = input.schema\n const prefixes: Prefixes = input.prefixes as any;\n\n const ucCollectionName = ucfirst(collectionName);\n const queryName = prefixes.pull + ucCollectionName;\n const operationName = ucfirst(queryName);\n\n const outputFields = generateGQLOutputFields({ schema, ignoreOutputKeys: input.ignoreOutputKeys })\n // outputFields.push(input.deletedField); \n \n const checkpointInputName = ucCollectionName + 'Input' + prefixes.checkpoint;\n const builder: RxGraphQLReplicationPullQueryBuilder = (checkpoint: any, limit: number) => {\n const query = 'query ' + operationName + '($checkpoint: ' + checkpointInputName + ', $limit: Int!) {\\n' +\n SPACING + SPACING + queryName + '(checkpoint: $checkpoint, limit: $limit) {\\n' +\n SPACING + SPACING + SPACING + 'documents {\\n' + \n outputFields + '\\n' +\n SPACING + SPACING + SPACING + '}\\n' +\n SPACING + SPACING + SPACING + 'checkpoint {\\n' +\n SPACING + SPACING + SPACING + SPACING + input.checkpointFields.join('\\n' + SPACING + SPACING + SPACING + SPACING) + '\\n' +\n SPACING + SPACING + SPACING + '}\\n' +\n SPACING + SPACING + '}\\n' +\n '}';\n return {\n query,\n operationName,\n variables: {\n checkpoint,\n limit\n }\n };\n };\n\n return builder;\n}\n\nexport function pullStreamBuilderFromRxSchema(\n collectionName: string,\n input: GraphQLSchemaFromRxSchemaInputSingleCollection,\n) {\n input = fillUpOptionals(input);\n const schema = input.schema;\n const prefixes: Prefixes = input.prefixes as any;\n\n const ucCollectionName = ucfirst(collectionName);\n const queryName = prefixes.stream + ucCollectionName;\n const outputFields = generateGQLOutputFields({ schema, ignoreOutputKeys: input.ignoreOutputKeys })\n\n const headersName = ucCollectionName + 'Input' + prefixes.headers;\n\n const query = 'subscription on' + ucfirst(ensureNotFalsy(prefixes.stream)) + '($headers: ' + headersName + ') {\\n' +\n SPACING + queryName + '(headers: $headers) {\\n' +\n SPACING + SPACING + SPACING + 'documents {\\n' +\n outputFields + '\\n' +\n SPACING + SPACING + SPACING + '}\\n' +\n SPACING + SPACING + SPACING + 'checkpoint {\\n' +\n SPACING + SPACING + SPACING + SPACING + input.checkpointFields.join('\\n' + SPACING + SPACING + SPACING + SPACING) + '\\n' +\n SPACING + SPACING + SPACING + '}\\n' +\n SPACING + '}' +\n '}';\n\n const builder: RxGraphQLReplicationPullStreamQueryBuilder = (headers: any) => {\n return {\n query,\n variables: {\n headers\n }\n };\n };\n return builder;\n}\n\n\nexport function pushQueryBuilderFromRxSchema(\n collectionName: string,\n input: GraphQLSchemaFromRxSchemaInputSingleCollection\n): RxGraphQLReplicationPushQueryBuilder {\n input = fillUpOptionals(input);\n const prefixes: Prefixes = input.prefixes as any;\n\n const ucCollectionName = ucfirst(collectionName);\n const queryName = prefixes.push + ucCollectionName;\n const operationName = ucfirst(queryName);\n\n const variableName = collectionName + prefixes.pushRow;\n const returnFields = generateGQLOutputFields({ schema: input.schema, spaceCount: 2 })\n \n const builder: RxGraphQLReplicationPushQueryBuilder = (pushRows) => {\n const query = '' +\n 'mutation ' + operationName + '($' + variableName + ': [' + ucCollectionName + 'Input' + prefixes.pushRow + '!]) {\\n' +\n SPACING + queryName + '(' + variableName + ': $' + variableName + ') {\\n' +\n returnFields + '\\n' +\n SPACING + '}\\n' +\n '}';\n\n const sendRows: typeof pushRows = [];\n function transformPushDoc(doc: WithDeleted) {\n const sendDoc: any = {};\n Object.entries(doc).forEach(([k, v]) => {\n if (\n // skip if in ignoreInputKeys list\n !(input.ignoreInputKeys as string[]).includes(k) &&\n // only use properties that are in the schema\n input.schema.properties[k]\n ) {\n sendDoc[k] = v;\n }\n });\n return sendDoc;\n }\n pushRows.forEach(pushRow => {\n const newRow: typeof pushRow = {\n newDocumentState: transformPushDoc(pushRow.newDocumentState),\n assumedMasterState: pushRow.assumedMasterState ? transformPushDoc(pushRow.assumedMasterState) : undefined\n };\n sendRows.push(newRow);\n });\n const variables = {\n [variableName]: sendRows\n };\n return {\n query,\n operationName,\n variables\n };\n };\n\n return builder;\n}\n\ntype GenerateGQLOutputFieldsOptions = {\n schema: RxJsonSchema | TopLevelProperty,\n spaceCount?: number,\n depth?: number\n ignoreOutputKeys?: string[]\n}\n\nfunction generateGQLOutputFields(options: GenerateGQLOutputFieldsOptions) {\n const { schema, spaceCount = 4, depth = 0, ignoreOutputKeys = [] } = options;\n\n const outputFields: string[] = [];\n const properties = schema.properties \n const NESTED_SPACING = SPACING.repeat(depth);\n const LINE_SPACING = SPACING.repeat(spaceCount);\n \n for (const key in properties) {\n //only skipping top level keys that are in ignoreOutputKeys list\n if (ignoreOutputKeys.includes(key)) {\n continue;\n }\n\n const value = properties[key];\n if (value.type === \"object\") {\n outputFields.push(\n LINE_SPACING + NESTED_SPACING + key + \" {\",\n generateGQLOutputFields({ schema: value, spaceCount, depth: depth + 1 }),\n LINE_SPACING + NESTED_SPACING + \"}\"\n );\n } else {\n outputFields.push(LINE_SPACING + NESTED_SPACING + key);\n }\n }\n \n return outputFields.join('\\n');\n}\n\n\n\n"],"mappings":";;;;;;;;AAAA,IAAAA,0BAAA,GAAAC,OAAA;AAMA,IAAAC,MAAA,GAAAD,OAAA;AAUO,SAASE,4BAA4BA,CACxCC,cAAsB,EACtBC,KAAqD,EACZ;EACzCA,KAAK,GAAG,IAAAC,0CAAe,EAACD,KAAK,CAAC;EAC9B,IAAME,MAAM,GAAGF,KAAK,CAACE,MAAM;EAC3B,IAAMC,QAAkB,GAAGH,KAAK,CAACG,QAAe;EAEhD,IAAMC,gBAAgB,GAAG,IAAAC,cAAO,EAACN,cAAc,CAAC;EAChD,IAAMO,SAAS,GAAGH,QAAQ,CAACI,IAAI,GAAGH,gBAAgB;EAClD,IAAMI,aAAa,GAAG,IAAAH,cAAO,EAACC,SAAS,CAAC;EAExC,IAAMG,YAAY,GAAGC,uBAAuB,CAAC;IAAER,MAAM;IAAES,gBAAgB,EAAEX,KAAK,CAACW;EAAiB,CAAC,CAAC;EAClG;;EAEA,IAAMC,mBAAmB,GAAGR,gBAAgB,GAAG,OAAO,GAAGD,QAAQ,CAACU,UAAU;EAC5E,IAAMC,OAAkD,GAAGA,CAACD,UAAe,EAAEE,KAAa,KAAK;IAC3F,IAAMC,KAAK,GAAG,QAAQ,GAAGR,aAAa,GAAG,gBAAgB,GAAGI,mBAAmB,GAAG,qBAAqB,GACnGK,kCAAO,GAAGA,kCAAO,GAAGX,SAAS,GAAG,8CAA8C,GAC9EW,kCAAO,GAAGA,kCAAO,GAAGA,kCAAO,GAAG,eAAe,GAC7CR,YAAY,GAAI,IAAI,GACpBQ,kCAAO,GAAGA,kCAAO,GAAGA,kCAAO,GAAG,KAAK,GACnCA,kCAAO,GAAGA,kCAAO,GAAGA,kCAAO,GAAG,gBAAgB,GAC9CA,kCAAO,GAAGA,kCAAO,GAAGA,kCAAO,GAAGA,kCAAO,GAAGjB,KAAK,CAACkB,gBAAgB,CAACC,IAAI,CAAC,IAAI,GAAGF,kCAAO,GAAGA,kCAAO,GAAGA,kCAAO,GAAGA,kCAAO,CAAC,GAAG,IAAI,GACxHA,kCAAO,GAAGA,kCAAO,GAAGA,kCAAO,GAAG,KAAK,GACnCA,kCAAO,GAAGA,kCAAO,GAAG,KAAK,GACzB,GAAG;IACP,OAAO;MACHD,KAAK;MACLR,aAAa;MACbY,SAAS,EAAE;QACPP,UAAU;QACVE;MACJ;IACJ,CAAC;EACL,CAAC;EAED,OAAOD,OAAO;AAClB;AAEO,SAASO,6BAA6BA,CACzCtB,cAAsB,EACtBC,KAAqD,EACvD;EACEA,KAAK,GAAG,IAAAC,0CAAe,EAACD,KAAK,CAAC;EAC9B,IAAME,MAAM,GAAGF,KAAK,CAACE,MAAM;EAC3B,IAAMC,QAAkB,GAAGH,KAAK,CAACG,QAAe;EAEhD,IAAMC,gBAAgB,GAAG,IAAAC,cAAO,EAACN,cAAc,CAAC;EAChD,IAAMO,SAAS,GAAGH,QAAQ,CAACmB,MAAM,GAAGlB,gBAAgB;EACpD,IAAMK,YAAY,GAAGC,uBAAuB,CAAC;IAAER,MAAM;IAAES,gBAAgB,EAAEX,KAAK,CAACW;EAAiB,CAAC,CAAC;EAElG,IAAMY,WAAW,GAAGnB,gBAAgB,GAAG,OAAO,GAAGD,QAAQ,CAACqB,OAAO;EAEjE,IAAMR,KAAK,GAAG,iBAAiB,GAAG,IAAAX,cAAO,EAAC,IAAAoB,qBAAc,EAACtB,QAAQ,CAACmB,MAAM,CAAC,CAAC,GAAG,aAAa,GAAGC,WAAW,GAAG,OAAO,GAC9GN,kCAAO,GAAGX,SAAS,GAAG,yBAAyB,GAC/CW,kCAAO,GAAGA,kCAAO,GAAGA,kCAAO,GAAG,eAAe,GAC7CR,YAAY,GAAI,IAAI,GACpBQ,kCAAO,GAAGA,kCAAO,GAAGA,kCAAO,GAAG,KAAK,GACnCA,kCAAO,GAAGA,kCAAO,GAAGA,kCAAO,GAAG,gBAAgB,GAC9CA,kCAAO,GAAGA,kCAAO,GAAGA,kCAAO,GAAGA,kCAAO,GAAGjB,KAAK,CAACkB,gBAAgB,CAACC,IAAI,CAAC,IAAI,GAAGF,kCAAO,GAAGA,kCAAO,GAAGA,kCAAO,GAAGA,kCAAO,CAAC,GAAG,IAAI,GACxHA,kCAAO,GAAGA,kCAAO,GAAGA,kCAAO,GAAG,KAAK,GACnCA,kCAAO,GAAG,GAAG,GACb,GAAG;EAEP,IAAMH,OAAmD,GAAIU,OAAY,IAAK;IAC1E,OAAO;MACHR,KAAK;MACLI,SAAS,EAAE;QACPI;MACJ;IACJ,CAAC;EACL,CAAC;EACD,OAAOV,OAAO;AAClB;AAGO,SAASY,4BAA4BA,CACxC3B,cAAsB,EACtBC,KAAqD,EACjB;EACpCA,KAAK,GAAG,IAAAC,0CAAe,EAACD,KAAK,CAAC;EAC9B,IAAMG,QAAkB,GAAGH,KAAK,CAACG,QAAe;EAEhD,IAAMC,gBAAgB,GAAG,IAAAC,cAAO,EAACN,cAAc,CAAC;EAChD,IAAMO,SAAS,GAAGH,QAAQ,CAACwB,IAAI,GAAGvB,gBAAgB;EAClD,IAAMI,aAAa,GAAG,IAAAH,cAAO,EAACC,SAAS,CAAC;EAExC,IAAMsB,YAAY,GAAG7B,cAAc,GAAGI,QAAQ,CAAC0B,OAAO;EACtD,IAAMC,YAAY,GAAGpB,uBAAuB,CAAC;IAAER,MAAM,EAAEF,KAAK,CAACE,MAAM;IAAE6B,UAAU,EAAE;EAAE,CAAC,CAAC;EAErF,IAAMjB,OAA6C,GAAIkB,QAAQ,IAAK;IAChE,IAAMhB,KAAK,GAAG,EAAE,GACZ,WAAW,GAAGR,aAAa,GAAG,IAAI,GAAGoB,YAAY,GAAG,KAAK,GAAGxB,gBAAgB,GAAG,OAAO,GAAGD,QAAQ,CAAC0B,OAAO,GAAG,SAAS,GACrHZ,kCAAO,GAAGX,SAAS,GAAG,GAAG,GAAGsB,YAAY,GAAG,KAAK,GAAGA,YAAY,GAAG,OAAO,GACzEE,YAAY,GAAI,IAAI,GACpBb,kCAAO,GAAG,KAAK,GACf,GAAG;IAEP,IAAMgB,QAAyB,GAAG,EAAE;IACpC,SAASC,gBAAgBA,CAACC,GAAqB,EAAE;MAC7C,IAAMC,OAAY,GAAG,CAAC,CAAC;MACvBC,MAAM,CAACC,OAAO,CAACH,GAAG,CAAC,CAACI,OAAO,CAAC,CAAC,CAACC,CAAC,EAAEC,CAAC,CAAC,KAAK;QACpC;QACI;QACA,CAAEzC,KAAK,CAAC0C,eAAe,CAAcC,QAAQ,CAACH,CAAC,CAAC;QAChD;QACAxC,KAAK,CAACE,MAAM,CAAC0C,UAAU,CAACJ,CAAC,CAAC,EAC5B;UACEJ,OAAO,CAACI,CAAC,CAAC,GAAGC,CAAC;QAClB;MACJ,CAAC,CAAC;MACF,OAAOL,OAAO;IAClB;IACAJ,QAAQ,CAACO,OAAO,CAACV,OAAO,IAAI;MACxB,IAAMgB,MAAsB,GAAG;QAC3BC,gBAAgB,EAAEZ,gBAAgB,CAACL,OAAO,CAACiB,gBAAgB,CAAC;QAC5DC,kBAAkB,EAAElB,OAAO,CAACkB,kBAAkB,GAAGb,gBAAgB,CAACL,OAAO,CAACkB,kBAAkB,CAAC,GAAGC;MACpG,CAAC;MACDf,QAAQ,CAACN,IAAI,CAACkB,MAAM,CAAC;IACzB,CAAC,CAAC;IACF,IAAMzB,SAAS,GAAG;MACd,CAACQ,YAAY,GAAGK;IACpB,CAAC;IACD,OAAO;MACHjB,KAAK;MACLR,aAAa;MACbY;IACJ,CAAC;EACL,CAAC;EAED,OAAON,OAAO;AAClB;AASA,SAASJ,uBAAuBA,CAACuC,OAAuC,EAAE;EACtE,IAAM;IAAE/C,MAAM;IAAE6B,UAAU,GAAG,CAAC;IAAEmB,KAAK,GAAG,CAAC;IAAEvC,gBAAgB,GAAG;EAAG,CAAC,GAAGsC,OAAO;EAE5E,IAAMxC,YAAsB,GAAG,EAAE;EACjC,IAAMmC,UAAU,GAAG1C,MAAM,CAAC0C,UAAU;EACpC,IAAMO,cAAc,GAAGlC,kCAAO,CAACmC,MAAM,CAACF,KAAK,CAAC;EAC5C,IAAMG,YAAY,GAAGpC,kCAAO,CAACmC,MAAM,CAACrB,UAAU,CAAC;EAE/C,KAAK,IAAMuB,GAAG,IAAIV,UAAU,EAAE;IAC1B;IACA,IAAIjC,gBAAgB,CAACgC,QAAQ,CAACW,GAAG,CAAC,EAAE;MAChC;IACJ;IAEA,IAAMC,KAAK,GAAGX,UAAU,CAACU,GAAG,CAAC;IAC7B,IAAIC,KAAK,CAACC,IAAI,KAAK,QAAQ,EAAE;MAC3B/C,YAAY,CAACkB,IAAI,CACf0B,YAAY,GAAGF,cAAc,GAAGG,GAAG,GAAG,IAAI,EAC1C5C,uBAAuB,CAAC;QAAER,MAAM,EAAEqD,KAAK;QAAExB,UAAU;QAAEmB,KAAK,EAAEA,KAAK,GAAG;MAAE,CAAC,CAAC,EACxEG,YAAY,GAAGF,cAAc,GAAG,GAClC,CAAC;IACH,CAAC,MAAM;MACH1C,YAAY,CAACkB,IAAI,CAAC0B,YAAY,GAAGF,cAAc,GAAGG,GAAG,CAAC;IAC1D;EACJ;EAEA,OAAO7C,YAAY,CAACU,IAAI,CAAC,IAAI,CAAC;AAClC","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/replication-nats/index.js b/dist/cjs/plugins/replication-nats/index.js deleted file mode 100644 index 75fc5bd48c6..00000000000 --- a/dist/cjs/plugins/replication-nats/index.js +++ /dev/null @@ -1,235 +0,0 @@ -"use strict"; - -var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault"); -Object.defineProperty(exports, "__esModule", { - value: true -}); -var _exportNames = { - RxNatsReplicationState: true, - replicateNats: true -}; -exports.RxNatsReplicationState = void 0; -exports.replicateNats = replicateNats; -var _inheritsLoose2 = _interopRequireDefault(require("@babel/runtime/helpers/inheritsLoose")); -var _index = require("../../plugins/utils/index.js"); -var _index2 = require("../leader-election/index.js"); -var _index3 = require("../replication/index.js"); -var _index4 = require("../../index.js"); -var _rxjs = require("rxjs"); -var _nats = require("nats"); -var _natsHelper = require("./nats-helper.js"); -Object.keys(_natsHelper).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; - if (key in exports && exports[key] === _natsHelper[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _natsHelper[key]; - } - }); -}); -var _replicationHelper = require("../replication/replication-helper.js"); -var _natsTypes = require("./nats-types.js"); -Object.keys(_natsTypes).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; - if (key in exports && exports[key] === _natsTypes[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _natsTypes[key]; - } - }); -}); -var RxNatsReplicationState = exports.RxNatsReplicationState = /*#__PURE__*/function (_RxReplicationState) { - function RxNatsReplicationState(replicationIdentifier, collection, pull, push, live = true, retryTime = 1000 * 5, autoStart = true) { - var _this; - _this = _RxReplicationState.call(this, replicationIdentifier, collection, '_deleted', pull, push, live, retryTime, autoStart) || this; - _this.replicationIdentifier = replicationIdentifier; - _this.collection = collection; - _this.pull = pull; - _this.push = push; - _this.live = live; - _this.retryTime = retryTime; - _this.autoStart = autoStart; - return _this; - } - (0, _inheritsLoose2.default)(RxNatsReplicationState, _RxReplicationState); - return RxNatsReplicationState; -}(_index3.RxReplicationState); -function replicateNats(options) { - options.live = typeof options.live === 'undefined' ? true : options.live; - options.waitForLeadership = typeof options.waitForLeadership === 'undefined' ? true : options.waitForLeadership; - var collection = options.collection; - var primaryPath = collection.schema.primaryPath; - (0, _index4.addRxPlugin)(_index2.RxDBLeaderElectionPlugin); - var jc = (0, _nats.JSONCodec)(); - var connectionStatePromise = (async () => { - var nc = await (0, _nats.connect)(options.connection); - var jetstreamClient = nc.jetstream(); - var jsm = await nc.jetstreamManager(); - await jsm.streams.add({ - name: options.streamName, - subjects: [options.subjectPrefix + '.*'] - }); - var natsStream = await jetstreamClient.streams.get(options.streamName); - return { - nc, - jetstreamClient, - jsm, - natsStream - }; - })(); - var pullStream$ = new _rxjs.Subject(); - var replicationPrimitivesPull; - if (options.pull) { - replicationPrimitivesPull = { - async handler(lastPulledCheckpoint, batchSize) { - var cn = await connectionStatePromise; - var newCheckpoint = { - sequence: lastPulledCheckpoint ? lastPulledCheckpoint.sequence : 0 - }; - var consumer = await cn.natsStream.getConsumer({ - opt_start_seq: lastPulledCheckpoint ? lastPulledCheckpoint.sequence : 0, - deliver_policy: _nats.DeliverPolicy.LastPerSubject, - replay_policy: _nats.ReplayPolicy.Instant - }); - var fetchedMessages = await consumer.fetch({ - max_messages: batchSize - }); - await fetchedMessages.signal; - await fetchedMessages.close(); - var useMessages = []; - for await (var m of fetchedMessages) { - useMessages.push(m.json()); - newCheckpoint.sequence = m.seq; - m.ack(); - } - return { - documents: useMessages, - checkpoint: newCheckpoint - }; - }, - batchSize: (0, _index.ensureNotFalsy)(options.pull).batchSize, - modifier: (0, _index.ensureNotFalsy)(options.pull).modifier, - stream$: pullStream$.asObservable() - }; - } - var replicationPrimitivesPush; - if (options.push) { - replicationPrimitivesPush = { - async handler(rows) { - var cn = await connectionStatePromise; - var conflicts = []; - await Promise.all(rows.map(async writeRow => { - var docId = writeRow.newDocumentState[primaryPath]; - - /** - * first get the current state of the documents from the server - * so that we have the sequence number for conflict detection. - */ - var remoteDocState; - try { - remoteDocState = await (0, _natsHelper.getNatsServerDocumentState)(cn.natsStream, options.subjectPrefix, docId); - } catch (err) { - if (!err.message.includes('no message found')) { - throw err; - } - } - if (remoteDocState && (!writeRow.assumedMasterState || (await collection.conflictHandler({ - newDocumentState: remoteDocState.json(), - realMasterState: writeRow.assumedMasterState - }, 'replication-firestore-push')).isEqual === false)) { - // conflict - conflicts.push(remoteDocState.json()); - } else { - // no conflict (yet) - var pushDone = false; - while (!pushDone) { - try { - await cn.jetstreamClient.publish(options.subjectPrefix + '.' + docId, jc.encode(writeRow.newDocumentState), { - expect: remoteDocState ? { - streamName: options.streamName, - lastSubjectSequence: remoteDocState.seq - } : undefined - }); - pushDone = true; - } catch (err) { - if (err.message.includes('wrong last sequence')) { - // A write happened while we are doing our write -> handle conflict - var newServerState = await (0, _natsHelper.getNatsServerDocumentState)(cn.natsStream, options.subjectPrefix, docId); - conflicts.push((0, _index.ensureNotFalsy)(newServerState).json()); - pushDone = true; - } else { - replicationState.subjects.error.next((0, _index4.newRxError)('RC_STREAM', { - document: writeRow.newDocumentState, - error: (0, _index.errorToPlainJson)(err) - })); - - // -> retry after wait - await (0, _replicationHelper.awaitRetry)(collection, replicationState.retryTime); - } - } - } - } - })); - return conflicts; - }, - batchSize: options.push.batchSize, - modifier: options.push.modifier - }; - } - var replicationState = new RxNatsReplicationState(options.replicationIdentifier, collection, replicationPrimitivesPull, replicationPrimitivesPush, options.live, options.retryTime, options.autoStart); - - /** - * Use long polling to get live changes for the pull.stream$ - */ - if (options.live && options.pull) { - var startBefore = replicationState.start.bind(replicationState); - var cancelBefore = replicationState.cancel.bind(replicationState); - replicationState.start = async () => { - var cn = await connectionStatePromise; - - /** - * First get the last sequence so that we can - * laster only fetch 'newer' messages. - */ - var lastSeq = 0; - try { - var lastDocState = await cn.natsStream.getMessage({ - last_by_subj: options.subjectPrefix + '.*' - }); - lastSeq = lastDocState.seq; - } catch (err) { - if (!err.message.includes('no message found')) { - throw err; - } - } - var consumer = await cn.natsStream.getConsumer({ - opt_start_seq: lastSeq - }); - var newMessages = await consumer.consume(); - (async () => { - for await (var m of newMessages) { - var docData = m.json(); - pullStream$.next({ - documents: [docData], - checkpoint: { - sequence: m.seq - } - }); - m.ack(); - } - })(); - replicationState.cancel = () => { - newMessages.close(); - return cancelBefore(); - }; - return startBefore(); - }; - } - (0, _index3.startReplicationOnLeaderShip)(options.waitForLeadership, replicationState); - return replicationState; -} -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/replication-nats/index.js.map b/dist/cjs/plugins/replication-nats/index.js.map deleted file mode 100644 index 6cd3e1b19a6..00000000000 --- a/dist/cjs/plugins/replication-nats/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["_index","require","_index2","_index3","_index4","_rxjs","_nats","_natsHelper","Object","keys","forEach","key","prototype","hasOwnProperty","call","_exportNames","exports","defineProperty","enumerable","get","_replicationHelper","_natsTypes","RxNatsReplicationState","_RxReplicationState","replicationIdentifier","collection","pull","push","live","retryTime","autoStart","_this","_inheritsLoose2","default","RxReplicationState","replicateNats","options","waitForLeadership","primaryPath","schema","addRxPlugin","RxDBLeaderElectionPlugin","jc","JSONCodec","connectionStatePromise","nc","connect","connection","jetstreamClient","jetstream","jsm","jetstreamManager","streams","add","name","streamName","subjects","subjectPrefix","natsStream","pullStream$","Subject","replicationPrimitivesPull","handler","lastPulledCheckpoint","batchSize","cn","newCheckpoint","sequence","consumer","getConsumer","opt_start_seq","deliver_policy","DeliverPolicy","LastPerSubject","replay_policy","ReplayPolicy","Instant","fetchedMessages","fetch","max_messages","signal","close","useMessages","m","json","seq","ack","documents","checkpoint","ensureNotFalsy","modifier","stream$","asObservable","replicationPrimitivesPush","rows","conflicts","Promise","all","map","writeRow","docId","newDocumentState","remoteDocState","getNatsServerDocumentState","err","message","includes","assumedMasterState","conflictHandler","realMasterState","isEqual","pushDone","publish","encode","expect","lastSubjectSequence","undefined","newServerState","replicationState","error","next","newRxError","document","errorToPlainJson","awaitRetry","startBefore","start","bind","cancelBefore","cancel","lastSeq","lastDocState","getMessage","last_by_subj","newMessages","consume","docData","startReplicationOnLeaderShip"],"sources":["../../../../src/plugins/replication-nats/index.ts"],"sourcesContent":["import {\n ensureNotFalsy,\n errorToPlainJson\n} from '../../plugins/utils/index.ts';\n\n\nimport { RxDBLeaderElectionPlugin } from '../leader-election/index.ts';\nimport type {\n RxCollection,\n ReplicationPullOptions,\n ReplicationPushOptions,\n RxReplicationWriteToMasterRow,\n RxReplicationPullStreamItem\n} from '../../types/index.d.ts';\nimport {\n RxReplicationState,\n startReplicationOnLeaderShip\n} from '../replication/index.ts';\nimport {\n addRxPlugin,\n newRxError,\n WithDeleted\n} from '../../index.ts';\n\nimport { Subject } from 'rxjs';\nimport type {\n NatsCheckpointType,\n NatsSyncOptions\n} from './nats-types.ts';\nimport { connect, DeliverPolicy, JSONCodec, ReplayPolicy } from 'nats';\nimport { getNatsServerDocumentState } from './nats-helper.ts';\nimport { awaitRetry } from '../replication/replication-helper.ts';\n\nexport * from './nats-types.ts';\nexport * from './nats-helper.ts';\n\n\nexport class RxNatsReplicationState extends RxReplicationState {\n constructor(\n public readonly replicationIdentifier: string,\n public readonly collection: RxCollection,\n public readonly pull?: ReplicationPullOptions,\n public readonly push?: ReplicationPushOptions,\n public readonly live: boolean = true,\n public retryTime: number = 1000 * 5,\n public autoStart: boolean = true\n ) {\n super(\n replicationIdentifier,\n collection,\n '_deleted',\n pull,\n push,\n live,\n retryTime,\n autoStart\n );\n }\n}\n\n\n\nexport function replicateNats(\n options: NatsSyncOptions\n): RxNatsReplicationState {\n options.live = typeof options.live === 'undefined' ? true : options.live;\n options.waitForLeadership = typeof options.waitForLeadership === 'undefined' ? true : options.waitForLeadership;\n\n const collection = options.collection;\n const primaryPath = collection.schema.primaryPath;\n addRxPlugin(RxDBLeaderElectionPlugin);\n\n const jc = JSONCodec();\n\n\n const connectionStatePromise = (async () => {\n const nc = await connect(options.connection);\n const jetstreamClient = nc.jetstream();\n const jsm = await nc.jetstreamManager();\n await jsm.streams.add({\n name: options.streamName, subjects: [\n options.subjectPrefix + '.*'\n ]\n });\n const natsStream = await jetstreamClient.streams.get(options.streamName);\n return {\n nc,\n jetstreamClient,\n jsm,\n natsStream\n };\n })();\n const pullStream$: Subject> = new Subject();\n\n let replicationPrimitivesPull: ReplicationPullOptions | undefined;\n if (options.pull) {\n replicationPrimitivesPull = {\n async handler(\n lastPulledCheckpoint: NatsCheckpointType | undefined,\n batchSize: number\n ) {\n const cn = await connectionStatePromise;\n const newCheckpoint: NatsCheckpointType = {\n sequence: lastPulledCheckpoint ? lastPulledCheckpoint.sequence : 0\n };\n const consumer = await cn.natsStream.getConsumer({\n opt_start_seq: lastPulledCheckpoint ? lastPulledCheckpoint.sequence : 0,\n deliver_policy: DeliverPolicy.LastPerSubject,\n replay_policy: ReplayPolicy.Instant\n });\n\n const fetchedMessages = await consumer.fetch({\n max_messages: batchSize\n });\n await (fetchedMessages as any).signal;\n await fetchedMessages.close();\n\n const useMessages: WithDeleted[] = [];\n for await (const m of fetchedMessages) {\n useMessages.push(m.json());\n newCheckpoint.sequence = m.seq;\n m.ack();\n }\n return {\n documents: useMessages,\n checkpoint: newCheckpoint\n };\n },\n batchSize: ensureNotFalsy(options.pull).batchSize,\n modifier: ensureNotFalsy(options.pull).modifier,\n stream$: pullStream$.asObservable()\n };\n }\n\n\n let replicationPrimitivesPush: ReplicationPushOptions | undefined;\n if (options.push) {\n replicationPrimitivesPush = {\n async handler(\n rows: RxReplicationWriteToMasterRow[]\n ) {\n const cn = await connectionStatePromise;\n const conflicts: WithDeleted[] = [];\n await Promise.all(\n rows.map(async (writeRow) => {\n const docId = (writeRow.newDocumentState as any)[primaryPath];\n\n /**\n * first get the current state of the documents from the server\n * so that we have the sequence number for conflict detection.\n */\n let remoteDocState;\n try {\n remoteDocState = await getNatsServerDocumentState(\n cn.natsStream,\n options.subjectPrefix,\n docId\n );\n } catch (err: Error | any) {\n if (!err.message.includes('no message found')) {\n throw err;\n }\n }\n\n if (\n remoteDocState &&\n (\n !writeRow.assumedMasterState ||\n (await collection.conflictHandler({\n newDocumentState: remoteDocState.json(),\n realMasterState: writeRow.assumedMasterState\n }, 'replication-firestore-push')).isEqual === false\n )\n ) {\n // conflict\n conflicts.push(remoteDocState.json());\n } else {\n // no conflict (yet)\n let pushDone = false;\n while (!pushDone) {\n try {\n await cn.jetstreamClient.publish(\n options.subjectPrefix + '.' + docId,\n jc.encode(writeRow.newDocumentState),\n {\n expect: remoteDocState ? {\n streamName: options.streamName,\n lastSubjectSequence: remoteDocState.seq\n } : undefined\n }\n );\n pushDone = true;\n } catch (err: Error | any) {\n if (err.message.includes('wrong last sequence')) {\n // A write happened while we are doing our write -> handle conflict\n const newServerState = await getNatsServerDocumentState(\n cn.natsStream,\n options.subjectPrefix,\n docId\n );\n conflicts.push(ensureNotFalsy(newServerState).json());\n pushDone = true;\n } else {\n replicationState.subjects.error.next(\n newRxError('RC_STREAM', {\n document: writeRow.newDocumentState,\n error: errorToPlainJson(err)\n })\n );\n\n // -> retry after wait\n await awaitRetry(\n collection,\n replicationState.retryTime\n );\n }\n }\n }\n }\n })\n );\n return conflicts;\n },\n batchSize: options.push.batchSize,\n modifier: options.push.modifier\n };\n }\n\n\n const replicationState = new RxNatsReplicationState(\n options.replicationIdentifier,\n collection,\n replicationPrimitivesPull,\n replicationPrimitivesPush,\n options.live,\n options.retryTime,\n options.autoStart\n );\n\n /**\n * Use long polling to get live changes for the pull.stream$\n */\n if (options.live && options.pull) {\n const startBefore = replicationState.start.bind(replicationState);\n const cancelBefore = replicationState.cancel.bind(replicationState);\n replicationState.start = async () => {\n const cn = await connectionStatePromise;\n\n /**\n * First get the last sequence so that we can\n * laster only fetch 'newer' messages.\n */\n let lastSeq = 0;\n try {\n const lastDocState = await cn.natsStream.getMessage({\n last_by_subj: options.subjectPrefix + '.*'\n });\n lastSeq = lastDocState.seq;\n } catch (err: any | Error) {\n if (!err.message.includes('no message found')) {\n throw err;\n }\n }\n\n const consumer = await cn.natsStream.getConsumer({\n opt_start_seq: lastSeq\n });\n const newMessages = await consumer.consume();\n (async () => {\n for await (const m of newMessages) {\n const docData: WithDeleted = m.json();\n pullStream$.next({\n documents: [docData],\n checkpoint: {\n sequence: m.seq\n }\n });\n m.ack();\n }\n })();\n replicationState.cancel = () => {\n newMessages.close();\n return cancelBefore();\n };\n return startBefore();\n };\n }\n\n startReplicationOnLeaderShip(options.waitForLeadership, replicationState);\n\n return replicationState;\n}\n"],"mappings":";;;;;;;;;;;;;AAAA,IAAAA,MAAA,GAAAC,OAAA;AAMA,IAAAC,OAAA,GAAAD,OAAA;AAQA,IAAAE,OAAA,GAAAF,OAAA;AAIA,IAAAG,OAAA,GAAAH,OAAA;AAMA,IAAAI,KAAA,GAAAJ,OAAA;AAKA,IAAAK,KAAA,GAAAL,OAAA;AACA,IAAAM,WAAA,GAAAN,OAAA;AAIAO,MAAA,CAAAC,IAAA,CAAAF,WAAA,EAAAG,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAJ,WAAA,CAAAI,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAZ,WAAA,CAAAI,GAAA;IAAA;EAAA;AAAA;AAHA,IAAAS,kBAAA,GAAAnB,OAAA;AAEA,IAAAoB,UAAA,GAAApB,OAAA;AAAAO,MAAA,CAAAC,IAAA,CAAAY,UAAA,EAAAX,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAU,UAAA,CAAAV,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAE,UAAA,CAAAV,GAAA;IAAA;EAAA;AAAA;AAAgC,IAInBW,sBAAsB,GAAAN,OAAA,CAAAM,sBAAA,0BAAAC,mBAAA;EAC/B,SAAAD,uBACoBE,qBAA6B,EAC7BC,UAAmC,EACnCC,IAA4D,EAC5DC,IAAwC,EACxCC,IAAa,GAAG,IAAI,EAC7BC,SAAiB,GAAG,IAAI,GAAG,CAAC,EAC5BC,SAAkB,GAAG,IAAI,EAClC;IAAA,IAAAC,KAAA;IACEA,KAAA,GAAAR,mBAAA,CAAAT,IAAA,OACIU,qBAAqB,EACrBC,UAAU,EACV,UAAU,EACVC,IAAI,EACJC,IAAI,EACJC,IAAI,EACJC,SAAS,EACTC,SACJ,CAAC;IAACC,KAAA,CAjBcP,qBAA6B,GAA7BA,qBAA6B;IAAAO,KAAA,CAC7BN,UAAmC,GAAnCA,UAAmC;IAAAM,KAAA,CACnCL,IAA4D,GAA5DA,IAA4D;IAAAK,KAAA,CAC5DJ,IAAwC,GAAxCA,IAAwC;IAAAI,KAAA,CACxCH,IAAa,GAAbA,IAAa;IAAAG,KAAA,CACtBF,SAAiB,GAAjBA,SAAiB;IAAAE,KAAA,CACjBD,SAAkB,GAAlBA,SAAkB;IAAA,OAAAC,KAAA;EAY7B;EAAC,IAAAC,eAAA,CAAAC,OAAA,EAAAX,sBAAA,EAAAC,mBAAA;EAAA,OAAAD,sBAAA;AAAA,EApBkDY,0BAAkB;AAyBlE,SAASC,aAAaA,CACzBC,OAAmC,EACF;EACjCA,OAAO,CAACR,IAAI,GAAG,OAAOQ,OAAO,CAACR,IAAI,KAAK,WAAW,GAAG,IAAI,GAAGQ,OAAO,CAACR,IAAI;EACxEQ,OAAO,CAACC,iBAAiB,GAAG,OAAOD,OAAO,CAACC,iBAAiB,KAAK,WAAW,GAAG,IAAI,GAAGD,OAAO,CAACC,iBAAiB;EAE/G,IAAMZ,UAAU,GAAGW,OAAO,CAACX,UAAU;EACrC,IAAMa,WAAW,GAAGb,UAAU,CAACc,MAAM,CAACD,WAAW;EACjD,IAAAE,mBAAW,EAACC,gCAAwB,CAAC;EAErC,IAAMC,EAAE,GAAG,IAAAC,eAAS,EAAC,CAAC;EAGtB,IAAMC,sBAAsB,GAAG,CAAC,YAAY;IACxC,IAAMC,EAAE,GAAG,MAAM,IAAAC,aAAO,EAACV,OAAO,CAACW,UAAU,CAAC;IAC5C,IAAMC,eAAe,GAAGH,EAAE,CAACI,SAAS,CAAC,CAAC;IACtC,IAAMC,GAAG,GAAG,MAAML,EAAE,CAACM,gBAAgB,CAAC,CAAC;IACvC,MAAMD,GAAG,CAACE,OAAO,CAACC,GAAG,CAAC;MAClBC,IAAI,EAAElB,OAAO,CAACmB,UAAU;MAAEC,QAAQ,EAAE,CAChCpB,OAAO,CAACqB,aAAa,GAAG,IAAI;IAEpC,CAAC,CAAC;IACF,IAAMC,UAAU,GAAG,MAAMV,eAAe,CAACI,OAAO,CAACjC,GAAG,CAACiB,OAAO,CAACmB,UAAU,CAAC;IACxE,OAAO;MACHV,EAAE;MACFG,eAAe;MACfE,GAAG;MACHQ;IACJ,CAAC;EACL,CAAC,EAAE,CAAC;EACJ,IAAMC,WAAgF,GAAG,IAAIC,aAAO,CAAC,CAAC;EAEtG,IAAIC,yBAA4F;EAChG,IAAIzB,OAAO,CAACV,IAAI,EAAE;IACdmC,yBAAyB,GAAG;MACxB,MAAMC,OAAOA,CACTC,oBAAoD,EACpDC,SAAiB,EACnB;QACE,IAAMC,EAAE,GAAG,MAAMrB,sBAAsB;QACvC,IAAMsB,aAAiC,GAAG;UACtCC,QAAQ,EAAEJ,oBAAoB,GAAGA,oBAAoB,CAACI,QAAQ,GAAG;QACrE,CAAC;QACD,IAAMC,QAAQ,GAAG,MAAMH,EAAE,CAACP,UAAU,CAACW,WAAW,CAAC;UAC7CC,aAAa,EAAEP,oBAAoB,GAAGA,oBAAoB,CAACI,QAAQ,GAAG,CAAC;UACvEI,cAAc,EAAEC,mBAAa,CAACC,cAAc;UAC5CC,aAAa,EAAEC,kBAAY,CAACC;QAChC,CAAC,CAAC;QAEF,IAAMC,eAAe,GAAG,MAAMT,QAAQ,CAACU,KAAK,CAAC;UACzCC,YAAY,EAAEf;QAClB,CAAC,CAAC;QACF,MAAOa,eAAe,CAASG,MAAM;QACrC,MAAMH,eAAe,CAACI,KAAK,CAAC,CAAC;QAE7B,IAAMC,WAAqC,GAAG,EAAE;QAChD,WAAW,IAAMC,CAAC,IAAIN,eAAe,EAAE;UACnCK,WAAW,CAACvD,IAAI,CAACwD,CAAC,CAACC,IAAI,CAAC,CAAC,CAAC;UAC1BlB,aAAa,CAACC,QAAQ,GAAGgB,CAAC,CAACE,GAAG;UAC9BF,CAAC,CAACG,GAAG,CAAC,CAAC;QACX;QACA,OAAO;UACHC,SAAS,EAAEL,WAAW;UACtBM,UAAU,EAAEtB;QAChB,CAAC;MACL,CAAC;MACDF,SAAS,EAAE,IAAAyB,qBAAc,EAACrD,OAAO,CAACV,IAAI,CAAC,CAACsC,SAAS;MACjD0B,QAAQ,EAAE,IAAAD,qBAAc,EAACrD,OAAO,CAACV,IAAI,CAAC,CAACgE,QAAQ;MAC/CC,OAAO,EAAEhC,WAAW,CAACiC,YAAY,CAAC;IACtC,CAAC;EACL;EAGA,IAAIC,yBAAwE;EAC5E,IAAIzD,OAAO,CAACT,IAAI,EAAE;IACdkE,yBAAyB,GAAG;MACxB,MAAM/B,OAAOA,CACTgC,IAAgD,EAClD;QACE,IAAM7B,EAAE,GAAG,MAAMrB,sBAAsB;QACvC,IAAMmD,SAAmC,GAAG,EAAE;QAC9C,MAAMC,OAAO,CAACC,GAAG,CACbH,IAAI,CAACI,GAAG,CAAC,MAAOC,QAAQ,IAAK;UACzB,IAAMC,KAAK,GAAID,QAAQ,CAACE,gBAAgB,CAAS/D,WAAW,CAAC;;UAE7D;AACxB;AACA;AACA;UACwB,IAAIgE,cAAc;UAClB,IAAI;YACAA,cAAc,GAAG,MAAM,IAAAC,sCAA0B,EAC7CtC,EAAE,CAACP,UAAU,EACbtB,OAAO,CAACqB,aAAa,EACrB2C,KACJ,CAAC;UACL,CAAC,CAAC,OAAOI,GAAgB,EAAE;YACvB,IAAI,CAACA,GAAG,CAACC,OAAO,CAACC,QAAQ,CAAC,kBAAkB,CAAC,EAAE;cAC3C,MAAMF,GAAG;YACb;UACJ;UAEA,IACIF,cAAc,KAEV,CAACH,QAAQ,CAACQ,kBAAkB,IAC5B,CAAC,MAAMlF,UAAU,CAACmF,eAAe,CAAC;YAC9BP,gBAAgB,EAAEC,cAAc,CAAClB,IAAI,CAAC,CAAC;YACvCyB,eAAe,EAAEV,QAAQ,CAACQ;UAC9B,CAAC,EAAE,4BAA4B,CAAC,EAAEG,OAAO,KAAK,KAAK,CACtD,EACH;YACE;YACAf,SAAS,CAACpE,IAAI,CAAC2E,cAAc,CAAClB,IAAI,CAAC,CAAC,CAAC;UACzC,CAAC,MAAM;YACH;YACA,IAAI2B,QAAQ,GAAG,KAAK;YACpB,OAAO,CAACA,QAAQ,EAAE;cACd,IAAI;gBACA,MAAM9C,EAAE,CAACjB,eAAe,CAACgE,OAAO,CAC5B5E,OAAO,CAACqB,aAAa,GAAG,GAAG,GAAG2C,KAAK,EACnC1D,EAAE,CAACuE,MAAM,CAACd,QAAQ,CAACE,gBAAgB,CAAC,EACpC;kBACIa,MAAM,EAAEZ,cAAc,GAAG;oBACrB/C,UAAU,EAAEnB,OAAO,CAACmB,UAAU;oBAC9B4D,mBAAmB,EAAEb,cAAc,CAACjB;kBACxC,CAAC,GAAG+B;gBACR,CACJ,CAAC;gBACDL,QAAQ,GAAG,IAAI;cACnB,CAAC,CAAC,OAAOP,GAAgB,EAAE;gBACvB,IAAIA,GAAG,CAACC,OAAO,CAACC,QAAQ,CAAC,qBAAqB,CAAC,EAAE;kBAC7C;kBACA,IAAMW,cAAc,GAAG,MAAM,IAAAd,sCAA0B,EACnDtC,EAAE,CAACP,UAAU,EACbtB,OAAO,CAACqB,aAAa,EACrB2C,KACJ,CAAC;kBACDL,SAAS,CAACpE,IAAI,CAAC,IAAA8D,qBAAc,EAAC4B,cAAc,CAAC,CAACjC,IAAI,CAAC,CAAC,CAAC;kBACrD2B,QAAQ,GAAG,IAAI;gBACnB,CAAC,MAAM;kBACHO,gBAAgB,CAAC9D,QAAQ,CAAC+D,KAAK,CAACC,IAAI,CAChC,IAAAC,kBAAU,EAAC,WAAW,EAAE;oBACpBC,QAAQ,EAAEvB,QAAQ,CAACE,gBAAgB;oBACnCkB,KAAK,EAAE,IAAAI,uBAAgB,EAACnB,GAAG;kBAC/B,CAAC,CACL,CAAC;;kBAED;kBACA,MAAM,IAAAoB,6BAAU,EACZnG,UAAU,EACV6F,gBAAgB,CAACzF,SACrB,CAAC;gBACL;cACJ;YACJ;UACJ;QACJ,CAAC,CACL,CAAC;QACD,OAAOkE,SAAS;MACpB,CAAC;MACD/B,SAAS,EAAE5B,OAAO,CAACT,IAAI,CAACqC,SAAS;MACjC0B,QAAQ,EAAEtD,OAAO,CAACT,IAAI,CAAC+D;IAC3B,CAAC;EACL;EAGA,IAAM4B,gBAAgB,GAAG,IAAIhG,sBAAsB,CAC/Cc,OAAO,CAACZ,qBAAqB,EAC7BC,UAAU,EACVoC,yBAAyB,EACzBgC,yBAAyB,EACzBzD,OAAO,CAACR,IAAI,EACZQ,OAAO,CAACP,SAAS,EACjBO,OAAO,CAACN,SACZ,CAAC;;EAED;AACJ;AACA;EACI,IAAIM,OAAO,CAACR,IAAI,IAAIQ,OAAO,CAACV,IAAI,EAAE;IAC9B,IAAMmG,WAAW,GAAGP,gBAAgB,CAACQ,KAAK,CAACC,IAAI,CAACT,gBAAgB,CAAC;IACjE,IAAMU,YAAY,GAAGV,gBAAgB,CAACW,MAAM,CAACF,IAAI,CAACT,gBAAgB,CAAC;IACnEA,gBAAgB,CAACQ,KAAK,GAAG,YAAY;MACjC,IAAM7D,EAAE,GAAG,MAAMrB,sBAAsB;;MAEvC;AACZ;AACA;AACA;MACY,IAAIsF,OAAO,GAAG,CAAC;MACf,IAAI;QACA,IAAMC,YAAY,GAAG,MAAMlE,EAAE,CAACP,UAAU,CAAC0E,UAAU,CAAC;UAChDC,YAAY,EAAEjG,OAAO,CAACqB,aAAa,GAAG;QAC1C,CAAC,CAAC;QACFyE,OAAO,GAAGC,YAAY,CAAC9C,GAAG;MAC9B,CAAC,CAAC,OAAOmB,GAAgB,EAAE;QACvB,IAAI,CAACA,GAAG,CAACC,OAAO,CAACC,QAAQ,CAAC,kBAAkB,CAAC,EAAE;UAC3C,MAAMF,GAAG;QACb;MACJ;MAEA,IAAMpC,QAAQ,GAAG,MAAMH,EAAE,CAACP,UAAU,CAACW,WAAW,CAAC;QAC7CC,aAAa,EAAE4D;MACnB,CAAC,CAAC;MACF,IAAMI,WAAW,GAAG,MAAMlE,QAAQ,CAACmE,OAAO,CAAC,CAAC;MAC5C,CAAC,YAAY;QACT,WAAW,IAAMpD,CAAC,IAAImD,WAAW,EAAE;UAC/B,IAAME,OAA+B,GAAGrD,CAAC,CAACC,IAAI,CAAC,CAAC;UAChDzB,WAAW,CAAC6D,IAAI,CAAC;YACbjC,SAAS,EAAE,CAACiD,OAAO,CAAC;YACpBhD,UAAU,EAAE;cACRrB,QAAQ,EAAEgB,CAAC,CAACE;YAChB;UACJ,CAAC,CAAC;UACFF,CAAC,CAACG,GAAG,CAAC,CAAC;QACX;MACJ,CAAC,EAAE,CAAC;MACJgC,gBAAgB,CAACW,MAAM,GAAG,MAAM;QAC5BK,WAAW,CAACrD,KAAK,CAAC,CAAC;QACnB,OAAO+C,YAAY,CAAC,CAAC;MACzB,CAAC;MACD,OAAOH,WAAW,CAAC,CAAC;IACxB,CAAC;EACL;EAEA,IAAAY,oCAA4B,EAACrG,OAAO,CAACC,iBAAiB,EAAEiF,gBAAgB,CAAC;EAEzE,OAAOA,gBAAgB;AAC3B","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/replication-nats/nats-helper.js b/dist/cjs/plugins/replication-nats/nats-helper.js deleted file mode 100644 index d8101d5bb5c..00000000000 --- a/dist/cjs/plugins/replication-nats/nats-helper.js +++ /dev/null @@ -1,13 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.getNatsServerDocumentState = getNatsServerDocumentState; -async function getNatsServerDocumentState(natsStream, subjectPrefix, docId) { - var remoteDocState = await natsStream.getMessage({ - last_by_subj: subjectPrefix + '.' + docId - }); - return remoteDocState; -} -//# sourceMappingURL=nats-helper.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/replication-nats/nats-helper.js.map b/dist/cjs/plugins/replication-nats/nats-helper.js.map deleted file mode 100644 index 97d5a416722..00000000000 --- a/dist/cjs/plugins/replication-nats/nats-helper.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"nats-helper.js","names":["getNatsServerDocumentState","natsStream","subjectPrefix","docId","remoteDocState","getMessage","last_by_subj"],"sources":["../../../../src/plugins/replication-nats/nats-helper.ts"],"sourcesContent":["import type {\n StoredMsg,\n Stream\n} from 'nats';\n\nexport async function getNatsServerDocumentState(\n natsStream: Stream,\n subjectPrefix: string,\n docId: string\n): Promise {\n const remoteDocState = await natsStream.getMessage({\n last_by_subj: subjectPrefix + '.' + docId\n });\n return remoteDocState;\n}\n"],"mappings":";;;;;;AAKO,eAAeA,0BAA0BA,CAC5CC,UAAkB,EAClBC,aAAqB,EACrBC,KAAa,EACiB;EAC9B,IAAMC,cAAc,GAAG,MAAMH,UAAU,CAACI,UAAU,CAAC;IAC/CC,YAAY,EAAEJ,aAAa,GAAG,GAAG,GAAGC;EACxC,CAAC,CAAC;EACF,OAAOC,cAAc;AACzB","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/replication-nats/nats-types.js b/dist/cjs/plugins/replication-nats/nats-types.js deleted file mode 100644 index c890b9a0892..00000000000 --- a/dist/cjs/plugins/replication-nats/nats-types.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -//# sourceMappingURL=nats-types.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/replication-nats/nats-types.js.map b/dist/cjs/plugins/replication-nats/nats-types.js.map deleted file mode 100644 index f94a79adae4..00000000000 --- a/dist/cjs/plugins/replication-nats/nats-types.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"nats-types.js","names":[],"sources":["../../../../src/plugins/replication-nats/nats-types.ts"],"sourcesContent":["import type {\n ReplicationOptions,\n ReplicationPullOptions,\n ReplicationPushOptions\n} from '../../types/index.d.ts';\n\n\nimport {\n ConnectionOptions\n} from 'nats';\n\n\nexport type NatsCheckpointType = {\n sequence: number;\n};\n\nexport type NatsSyncPullOptions =\n Omit, 'handler' | 'stream$'>\n & {\n };\n\nexport type NatsSyncPushOptions = Omit, 'handler'>\n & {\n};\n\nexport type NatsSyncOptions = Omit<\n ReplicationOptions,\n 'pull' | 'push'\n> & {\n\n connection: ConnectionOptions;\n streamName: string;\n /**\n * NATS subject prefix like 'foo.bar'\n * which means a message for a document would have the subject\n * 'foo.bar.myDoc' where the last part 'myDoc' would be the primaryKey in\n * the RxDB document.\n * @link https://docs.nats.io/nats-concepts/subjects\n */\n subjectPrefix: string;\n pull?: NatsSyncPullOptions;\n push?: NatsSyncPushOptions;\n};\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/replication-webrtc/connection-handler-p2pcf.js b/dist/cjs/plugins/replication-webrtc/connection-handler-p2pcf.js deleted file mode 100644 index c49815ce7ec..00000000000 --- a/dist/cjs/plugins/replication-webrtc/connection-handler-p2pcf.js +++ /dev/null @@ -1,79 +0,0 @@ -// import { Subject } from 'rxjs'; -// import { PROMISE_RESOLVE_VOID, randomCouchString } from '../../util'; -// import type { -// P2PConnectionHandler, -// P2PConnectionHandlerCreator, -// P2PMessage, -// P2PPeer, -// PeerWithMessage, -// PeerWithResponse -// } from './p2p-types'; - -// import P2PCF from 'p2pcf'; - -// /** -// * Returns a connection handler that uses the Cloudflare worker signaling server -// * @link https://github.com/gfodor/p2pcf -// */ -// export function getConnectionHandlerP2PCF( -// p2pCFOptions: { -// workerUrl?: string -// } = {} -// ): P2PConnectionHandlerCreator { -// // const P2PCF = require('p2pcf'); - -// const creator: P2PConnectionHandlerCreator = (options) => { -// const clientId = randomCouchString(10); -// const p2p2 = new P2PCF(clientId, options.topic, p2pCFOptions); - -// const connect$ = new Subject(); -// p2p2.on('peerconnect', (peer) => connect$.next(peer as any)); - -// const disconnect$ = new Subject(); -// p2p2.on('peerclose', (peer) => disconnect$.next(peer as any)); - -// const message$ = new Subject(); -// const response$ = new Subject(); -// p2p2.on('msg', (peer, messageOrResponse) => { -// if (messageOrResponse.result) { -// response$.next({ -// peer: peer as any, -// response: messageOrResponse -// }); -// } else { -// message$.next({ -// peer: peer as any, -// message: messageOrResponse -// }); -// } - -// }); - -// const handler: P2PConnectionHandler = { -// connect$, -// disconnect$, -// message$, -// response$, -// async send(peer: P2PPeer, message: P2PMessage) { -// const [responsePeer, response] = await p2p2.send(peer as any, message); -// return { -// peer: responsePeer, -// response -// } as any; -// }, -// destroy() { -// p2p2.destroy(); -// connect$.complete(); -// disconnect$.complete(); -// message$.complete(); -// response$.complete(); -// return PROMISE_RESOLVE_VOID; -// } -// } -// p2p2.start(); -// return handler; -// }; -// return creator; -// } -"use strict"; -//# sourceMappingURL=connection-handler-p2pcf.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/replication-webrtc/connection-handler-p2pcf.js.map b/dist/cjs/plugins/replication-webrtc/connection-handler-p2pcf.js.map deleted file mode 100644 index 37221a8b777..00000000000 --- a/dist/cjs/plugins/replication-webrtc/connection-handler-p2pcf.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"connection-handler-p2pcf.js","names":[],"sources":["../../../../src/plugins/replication-webrtc/connection-handler-p2pcf.ts"],"sourcesContent":["// import { Subject } from 'rxjs';\n// import { PROMISE_RESOLVE_VOID, randomCouchString } from '../../util';\n// import type {\n// P2PConnectionHandler,\n// P2PConnectionHandlerCreator,\n// P2PMessage,\n// P2PPeer,\n// PeerWithMessage,\n// PeerWithResponse\n// } from './p2p-types';\n\n// import P2PCF from 'p2pcf';\n\n// /**\n// * Returns a connection handler that uses the Cloudflare worker signaling server\n// * @link https://github.com/gfodor/p2pcf\n// */\n// export function getConnectionHandlerP2PCF(\n// p2pCFOptions: {\n// workerUrl?: string\n// } = {}\n// ): P2PConnectionHandlerCreator {\n// // const P2PCF = require('p2pcf');\n\n// const creator: P2PConnectionHandlerCreator = (options) => {\n// const clientId = randomCouchString(10);\n// const p2p2 = new P2PCF(clientId, options.topic, p2pCFOptions);\n\n// const connect$ = new Subject();\n// p2p2.on('peerconnect', (peer) => connect$.next(peer as any));\n\n// const disconnect$ = new Subject();\n// p2p2.on('peerclose', (peer) => disconnect$.next(peer as any));\n\n// const message$ = new Subject();\n// const response$ = new Subject();\n// p2p2.on('msg', (peer, messageOrResponse) => {\n// if (messageOrResponse.result) {\n// response$.next({\n// peer: peer as any,\n// response: messageOrResponse\n// });\n// } else {\n// message$.next({\n// peer: peer as any,\n// message: messageOrResponse\n// });\n// }\n\n// });\n\n// const handler: P2PConnectionHandler = {\n// connect$,\n// disconnect$,\n// message$,\n// response$,\n// async send(peer: P2PPeer, message: P2PMessage) {\n// const [responsePeer, response] = await p2p2.send(peer as any, message);\n// return {\n// peer: responsePeer,\n// response\n// } as any;\n// },\n// destroy() {\n// p2p2.destroy();\n// connect$.complete();\n// disconnect$.complete();\n// message$.complete();\n// response$.complete();\n// return PROMISE_RESOLVE_VOID;\n// }\n// }\n// p2p2.start();\n// return handler;\n// };\n// return creator;\n// }\n"],"mappings":"AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/replication-webrtc/connection-handler-simple-peer.js b/dist/cjs/plugins/replication-webrtc/connection-handler-simple-peer.js deleted file mode 100644 index 5fde24c560f..00000000000 --- a/dist/cjs/plugins/replication-webrtc/connection-handler-simple-peer.js +++ /dev/null @@ -1,201 +0,0 @@ -"use strict"; - -var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault"); -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.SIMPLE_PEER_PING_INTERVAL = exports.DEFAULT_SIGNALING_SERVER = void 0; -exports.ensureProcessNextTickIsSet = ensureProcessNextTickIsSet; -exports.getConnectionHandlerSimplePeer = getConnectionHandlerSimplePeer; -var _rxjs = require("rxjs"); -var _index = require("../../plugins/utils/index.js"); -var _simplepeerMin = _interopRequireDefault(require("simple-peer/simplepeer.min.js")); -var _rxError = require("../../rx-error.js"); -var Peer = _simplepeerMin.default; -function sendMessage(ws, msg) { - ws.send(JSON.stringify(msg)); -} -var DEFAULT_SIGNALING_SERVER_HOSTNAME = 'signaling.rxdb.info'; -var DEFAULT_SIGNALING_SERVER = exports.DEFAULT_SIGNALING_SERVER = 'wss://' + DEFAULT_SIGNALING_SERVER_HOSTNAME + '/'; -var defaultServerWarningShown = false; -var SIMPLE_PEER_PING_INTERVAL = exports.SIMPLE_PEER_PING_INTERVAL = 1000 * 60 * 2; - -/** - * Returns a connection handler that uses simple-peer and the signaling server. - */ -function getConnectionHandlerSimplePeer({ - signalingServerUrl, - wrtc, - config, - webSocketConstructor -}) { - ensureProcessNextTickIsSet(); - signalingServerUrl = signalingServerUrl ? signalingServerUrl : DEFAULT_SIGNALING_SERVER; - webSocketConstructor = webSocketConstructor ? webSocketConstructor : WebSocket; - if (signalingServerUrl.includes(DEFAULT_SIGNALING_SERVER_HOSTNAME) && !defaultServerWarningShown) { - defaultServerWarningShown = true; - console.warn(['RxDB Warning: You are using the RxDB WebRTC replication plugin', 'but you did not specify your own signaling server url.', 'By default it will use a signaling server provided by RxDB at ' + DEFAULT_SIGNALING_SERVER, 'This server is made for demonstration purposes and tryouts. It is not reliable and might be offline at any time.', 'In production you must always use your own signaling server instead.', 'Learn how to run your own server at https://rxdb.info/replication-webrtc.html', 'Also leave a ⭐ at the RxDB github repo 🙏 https://github.com/pubkey/rxdb 🙏'].join(' ')); - } - var creator = async options => { - var connect$ = new _rxjs.Subject(); - var disconnect$ = new _rxjs.Subject(); - var message$ = new _rxjs.Subject(); - var response$ = new _rxjs.Subject(); - var error$ = new _rxjs.Subject(); - var peers = new Map(); - var closed = false; - var ownPeerId; - var socket = undefined; - createSocket(); - - /** - * Send ping signals to the server. - */ - (async () => { - while (true) { - await (0, _index.promiseWait)(SIMPLE_PEER_PING_INTERVAL / 2); - if (closed) { - break; - } - if (socket) { - sendMessage(socket, { - type: 'ping' - }); - } - } - })(); - - /** - * @recursive calls it self on socket disconnects - * so that when the user goes offline and online - * again, it will recreate the WebSocket connection. - */ - function createSocket() { - if (closed) { - return; - } - socket = new webSocketConstructor(signalingServerUrl); - socket.onclose = () => createSocket(); - socket.onopen = () => { - (0, _index.ensureNotFalsy)(socket).onmessage = msgEvent => { - var msg = JSON.parse(msgEvent.data); - switch (msg.type) { - case 'init': - ownPeerId = msg.yourPeerId; - sendMessage((0, _index.ensureNotFalsy)(socket), { - type: 'join', - room: options.topic - }); - break; - case 'joined': - /** - * PeerId is created by the signaling server - * to prevent spoofing it. - */ - var createPeerConnection = function (remotePeerId) { - var disconnected = false; - var newSimplePeer = new Peer({ - initiator: remotePeerId > ownPeerId, - wrtc, - config, - trickle: true - }); - newSimplePeer.id = (0, _index.randomCouchString)(10); - peers.set(remotePeerId, newSimplePeer); - newSimplePeer.on('signal', signal => { - sendMessage((0, _index.ensureNotFalsy)(socket), { - type: 'signal', - senderPeerId: ownPeerId, - receiverPeerId: remotePeerId, - room: options.topic, - data: signal - }); - }); - newSimplePeer.on('data', messageOrResponse => { - messageOrResponse = JSON.parse(messageOrResponse.toString()); - if (messageOrResponse.result) { - response$.next({ - peer: newSimplePeer, - response: messageOrResponse - }); - } else { - message$.next({ - peer: newSimplePeer, - message: messageOrResponse - }); - } - }); - newSimplePeer.on('error', error => { - error$.next((0, _rxError.newRxError)('RC_WEBRTC_PEER', { - error - })); - newSimplePeer.destroy(); - if (!disconnected) { - disconnected = true; - disconnect$.next(newSimplePeer); - } - }); - newSimplePeer.on('connect', () => { - connect$.next(newSimplePeer); - }); - newSimplePeer.on('close', () => { - if (!disconnected) { - disconnected = true; - disconnect$.next(newSimplePeer); - } - createPeerConnection(remotePeerId); - }); - }; - msg.otherPeerIds.forEach(remotePeerId => { - if (remotePeerId === ownPeerId || peers.has(remotePeerId)) { - return; - } else { - createPeerConnection(remotePeerId); - } - }); - break; - case 'signal': - var peer = (0, _index.getFromMapOrThrow)(peers, msg.senderPeerId); - peer.signal(msg.data); - break; - } - }; - }; - } - ; - var handler = { - error$, - connect$, - disconnect$, - message$, - response$, - async send(peer, message) { - await peer.send(JSON.stringify(message)); - }, - destroy() { - closed = true; - (0, _index.ensureNotFalsy)(socket).close(); - error$.complete(); - connect$.complete(); - disconnect$.complete(); - message$.complete(); - response$.complete(); - return _index.PROMISE_RESOLVE_VOID; - } - }; - return handler; - }; - return creator; -} - -/** - * Multiple people had problems because it requires to have - * the nextTick() method in the runtime. So we check here and - * throw a helpful error. - */ -function ensureProcessNextTickIsSet() { - if (typeof process === 'undefined' || typeof process.nextTick !== 'function') { - throw (0, _rxError.newRxError)('RC7'); - } -} -//# sourceMappingURL=connection-handler-simple-peer.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/replication-webrtc/connection-handler-simple-peer.js.map b/dist/cjs/plugins/replication-webrtc/connection-handler-simple-peer.js.map deleted file mode 100644 index 0756f118eda..00000000000 --- a/dist/cjs/plugins/replication-webrtc/connection-handler-simple-peer.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"connection-handler-simple-peer.js","names":["_rxjs","require","_index","_simplepeerMin","_interopRequireDefault","_rxError","Peer","_Peer","sendMessage","ws","msg","send","JSON","stringify","DEFAULT_SIGNALING_SERVER_HOSTNAME","DEFAULT_SIGNALING_SERVER","exports","defaultServerWarningShown","SIMPLE_PEER_PING_INTERVAL","getConnectionHandlerSimplePeer","signalingServerUrl","wrtc","config","webSocketConstructor","ensureProcessNextTickIsSet","WebSocket","includes","console","warn","join","creator","options","connect$","Subject","disconnect$","message$","response$","error$","peers","Map","closed","ownPeerId","socket","undefined","createSocket","promiseWait","type","onclose","onopen","ensureNotFalsy","onmessage","msgEvent","parse","data","yourPeerId","room","topic","createPeerConnection","remotePeerId","disconnected","newSimplePeer","initiator","trickle","id","randomCouchString","set","on","signal","senderPeerId","receiverPeerId","messageOrResponse","toString","result","next","peer","response","message","error","newRxError","destroy","otherPeerIds","forEach","has","getFromMapOrThrow","handler","close","complete","PROMISE_RESOLVE_VOID","process","nextTick"],"sources":["../../../../src/plugins/replication-webrtc/connection-handler-simple-peer.ts"],"sourcesContent":["import { Subject } from 'rxjs';\nimport {\n ensureNotFalsy,\n getFromMapOrThrow,\n PROMISE_RESOLVE_VOID,\n promiseWait,\n randomCouchString\n} from '../../plugins/utils/index.ts';\nimport type {\n WebRTCConnectionHandler,\n WebRTCConnectionHandlerCreator,\n WebRTCMessage,\n PeerWithMessage,\n PeerWithResponse,\n SyncOptionsWebRTC\n} from './webrtc-types.ts';\n\nimport type { \n SimplePeer as Peer, \n Instance as SimplePeerInstance, \n Options as SimplePeerOptions \n} from 'simple-peer';\nimport {\n default as _Peer\n // @ts-ignore\n} from 'simple-peer/simplepeer.min.js';\n\nconst Peer = _Peer as Peer\n\nimport type { RxError, RxTypeError } from '../../types/index.d.ts';\nimport { newRxError } from '../../rx-error.ts';\n\nexport type SimplePeer = SimplePeerInstance & {\n // add id to make debugging easier\n id: string;\n};\n\nexport type SimplePeerInitMessage = {\n type: 'init';\n yourPeerId: string;\n};\nexport type SimplePeerJoinMessage = {\n type: 'join';\n room: string;\n};\nexport type SimplePeerJoinedMessage = {\n type: 'joined';\n otherPeerIds: string[];\n};\nexport type SimplePeerSignalMessage = {\n type: 'signal';\n room: string;\n senderPeerId: string;\n receiverPeerId: string;\n data: string;\n};\nexport type SimplePeerPingMessage = {\n type: 'ping';\n};\n\nexport type PeerMessage =\n SimplePeerInitMessage |\n SimplePeerJoinMessage |\n SimplePeerJoinedMessage |\n SimplePeerSignalMessage |\n SimplePeerPingMessage;\n\n\nfunction sendMessage(ws: WebSocket, msg: PeerMessage) {\n ws.send(JSON.stringify(msg));\n}\n\nconst DEFAULT_SIGNALING_SERVER_HOSTNAME = 'signaling.rxdb.info';\nexport const DEFAULT_SIGNALING_SERVER = 'wss://' + DEFAULT_SIGNALING_SERVER_HOSTNAME + '/';\nlet defaultServerWarningShown = false;\n\nexport type SimplePeerWrtc = SimplePeerOptions['wrtc'];\nexport type SimplePeerConfig = SimplePeerOptions['config'];\n\nexport type SimplePeerConnectionHandlerOptions = {\n /**\n * If no server is specified, the default signaling server\n * from signaling.rxdb.info is used.\n * This server is not reliable and you should use\n * your own signaling server instead.\n */\n signalingServerUrl?: string;\n wrtc?: SimplePeerWrtc;\n config?: SimplePeerConfig;\n webSocketConstructor?: WebSocket;\n};\n\nexport const SIMPLE_PEER_PING_INTERVAL = 1000 * 60 * 2;\n\n/**\n * Returns a connection handler that uses simple-peer and the signaling server.\n */\nexport function getConnectionHandlerSimplePeer({\n signalingServerUrl,\n wrtc,\n config,\n webSocketConstructor\n}: SimplePeerConnectionHandlerOptions): WebRTCConnectionHandlerCreator {\n ensureProcessNextTickIsSet();\n\n signalingServerUrl = signalingServerUrl ? signalingServerUrl : DEFAULT_SIGNALING_SERVER;\n webSocketConstructor = webSocketConstructor ? webSocketConstructor as any : WebSocket;\n\n if (\n signalingServerUrl.includes(DEFAULT_SIGNALING_SERVER_HOSTNAME) &&\n !defaultServerWarningShown\n ) {\n defaultServerWarningShown = true;\n console.warn(\n [\n 'RxDB Warning: You are using the RxDB WebRTC replication plugin',\n 'but you did not specify your own signaling server url.',\n 'By default it will use a signaling server provided by RxDB at ' + DEFAULT_SIGNALING_SERVER,\n 'This server is made for demonstration purposes and tryouts. It is not reliable and might be offline at any time.',\n 'In production you must always use your own signaling server instead.',\n 'Learn how to run your own server at https://rxdb.info/replication-webrtc.html',\n 'Also leave a ⭐ at the RxDB github repo 🙏 https://github.com/pubkey/rxdb 🙏'\n ].join(' ')\n );\n }\n\n const creator: WebRTCConnectionHandlerCreator = async (options: SyncOptionsWebRTC) => {\n\n const connect$ = new Subject();\n const disconnect$ = new Subject();\n const message$ = new Subject>();\n const response$ = new Subject>();\n const error$ = new Subject();\n\n const peers = new Map();\n let closed = false;\n let ownPeerId: string;\n let socket: WebSocket | undefined = undefined;\n createSocket();\n\n\n /**\n * Send ping signals to the server.\n */\n (async () => {\n while (true) {\n await promiseWait(SIMPLE_PEER_PING_INTERVAL / 2);\n if (closed) {\n break;\n }\n if (socket) {\n sendMessage(socket, { type: 'ping' });\n }\n }\n })();\n\n\n /**\n * @recursive calls it self on socket disconnects\n * so that when the user goes offline and online\n * again, it will recreate the WebSocket connection.\n */\n function createSocket() {\n if (closed) {\n return;\n }\n socket = new (webSocketConstructor as any)(signalingServerUrl) as WebSocket;\n socket.onclose = () => createSocket();\n socket.onopen = () => {\n ensureNotFalsy(socket).onmessage = (msgEvent: any) => {\n const msg: PeerMessage = JSON.parse(msgEvent.data as any);\n switch (msg.type) {\n case 'init':\n ownPeerId = msg.yourPeerId;\n sendMessage(ensureNotFalsy(socket), {\n type: 'join',\n room: options.topic\n });\n break;\n case 'joined':\n /**\n * PeerId is created by the signaling server\n * to prevent spoofing it.\n */\n function createPeerConnection(remotePeerId: string) {\n let disconnected = false;\n const newSimplePeer: SimplePeer = new Peer({\n initiator: remotePeerId > ownPeerId,\n wrtc,\n config,\n trickle: true\n }) as any;\n newSimplePeer.id = randomCouchString(10);\n peers.set(remotePeerId, newSimplePeer);\n\n\n newSimplePeer.on('signal', (signal: any) => {\n sendMessage(ensureNotFalsy(socket), {\n type: 'signal',\n senderPeerId: ownPeerId,\n receiverPeerId: remotePeerId,\n room: options.topic,\n data: signal\n });\n });\n\n newSimplePeer.on('data', (messageOrResponse: any) => {\n messageOrResponse = JSON.parse(messageOrResponse.toString());\n if (messageOrResponse.result) {\n response$.next({\n peer: newSimplePeer,\n response: messageOrResponse\n });\n } else {\n message$.next({\n peer: newSimplePeer,\n message: messageOrResponse\n });\n }\n });\n\n newSimplePeer.on('error', (error) => {\n error$.next(newRxError('RC_WEBRTC_PEER', {\n error\n }));\n newSimplePeer.destroy();\n if (!disconnected) {\n disconnected = true;\n disconnect$.next(newSimplePeer);\n }\n });\n\n newSimplePeer.on('connect', () => {\n connect$.next(newSimplePeer);\n });\n\n newSimplePeer.on('close', () => {\n if (!disconnected) {\n disconnected = true;\n disconnect$.next(newSimplePeer);\n }\n createPeerConnection(remotePeerId);\n });\n }\n msg.otherPeerIds.forEach(remotePeerId => {\n if (\n remotePeerId === ownPeerId ||\n peers.has(remotePeerId)\n ) {\n return;\n } else {\n createPeerConnection(remotePeerId);\n }\n\n });\n break;\n case 'signal':\n const peer = getFromMapOrThrow(peers, msg.senderPeerId);\n peer.signal(msg.data);\n break;\n }\n }\n }\n };\n\n const handler: WebRTCConnectionHandler = {\n error$,\n connect$,\n disconnect$,\n message$,\n response$,\n async send(peer: SimplePeer, message: WebRTCMessage) {\n await peer.send(JSON.stringify(message));\n },\n destroy() {\n closed = true;\n ensureNotFalsy(socket).close();\n error$.complete();\n connect$.complete();\n disconnect$.complete();\n message$.complete();\n response$.complete();\n return PROMISE_RESOLVE_VOID;\n }\n };\n return handler;\n };\n return creator;\n}\n\n\n/**\n * Multiple people had problems because it requires to have\n * the nextTick() method in the runtime. So we check here and\n * throw a helpful error.\n */\nexport function ensureProcessNextTickIsSet() {\n if (\n typeof process === 'undefined' ||\n typeof process.nextTick !== 'function'\n ) {\n throw newRxError('RC7');\n }\n}\n"],"mappings":";;;;;;;;;AAAA,IAAAA,KAAA,GAAAC,OAAA;AACA,IAAAC,MAAA,GAAAD,OAAA;AAqBA,IAAAE,cAAA,GAAAC,sBAAA,CAAAH,OAAA;AAQA,IAAAI,QAAA,GAAAJ,OAAA;AAHA,IAAMK,IAAI,GAAGC,sBAAa;AAyC1B,SAASC,WAAWA,CAACC,EAAa,EAAEC,GAAgB,EAAE;EAClDD,EAAE,CAACE,IAAI,CAACC,IAAI,CAACC,SAAS,CAACH,GAAG,CAAC,CAAC;AAChC;AAEA,IAAMI,iCAAiC,GAAG,qBAAqB;AACxD,IAAMC,wBAAwB,GAAAC,OAAA,CAAAD,wBAAA,GAAG,QAAQ,GAAGD,iCAAiC,GAAG,GAAG;AAC1F,IAAIG,yBAAyB,GAAG,KAAK;AAkB9B,IAAMC,yBAAyB,GAAAF,OAAA,CAAAE,yBAAA,GAAG,IAAI,GAAG,EAAE,GAAG,CAAC;;AAEtD;AACA;AACA;AACO,SAASC,8BAA8BA,CAAC;EAC3CC,kBAAkB;EAClBC,IAAI;EACJC,MAAM;EACNC;AACgC,CAAC,EAA8C;EAC/EC,0BAA0B,CAAC,CAAC;EAE5BJ,kBAAkB,GAAGA,kBAAkB,GAAGA,kBAAkB,GAAGL,wBAAwB;EACvFQ,oBAAoB,GAAGA,oBAAoB,GAAGA,oBAAoB,GAAUE,SAAS;EAErF,IACIL,kBAAkB,CAACM,QAAQ,CAACZ,iCAAiC,CAAC,IAC9D,CAACG,yBAAyB,EAC5B;IACEA,yBAAyB,GAAG,IAAI;IAChCU,OAAO,CAACC,IAAI,CACR,CACI,gEAAgE,EAChE,wDAAwD,EACxD,gEAAgE,GAAGb,wBAAwB,EAC3F,kHAAkH,EAClH,sEAAsE,EACtE,+EAA+E,EAC/E,6EAA6E,CAChF,CAACc,IAAI,CAAC,GAAG,CACd,CAAC;EACL;EAEA,IAAMC,OAAmD,GAAG,MAAOC,OAA2C,IAAK;IAE/G,IAAMC,QAAQ,GAAG,IAAIC,aAAO,CAAa,CAAC;IAC1C,IAAMC,WAAW,GAAG,IAAID,aAAO,CAAa,CAAC;IAC7C,IAAME,QAAQ,GAAG,IAAIF,aAAO,CAA8B,CAAC;IAC3D,IAAMG,SAAS,GAAG,IAAIH,aAAO,CAA+B,CAAC;IAC7D,IAAMI,MAAM,GAAG,IAAIJ,aAAO,CAAwB,CAAC;IAEnD,IAAMK,KAAK,GAAG,IAAIC,GAAG,CAAqB,CAAC;IAC3C,IAAIC,MAAM,GAAG,KAAK;IAClB,IAAIC,SAAiB;IACrB,IAAIC,MAA6B,GAAGC,SAAS;IAC7CC,YAAY,CAAC,CAAC;;IAGd;AACR;AACA;IACQ,CAAC,YAAY;MACT,OAAO,IAAI,EAAE;QACT,MAAM,IAAAC,kBAAW,EAAC3B,yBAAyB,GAAG,CAAC,CAAC;QAChD,IAAIsB,MAAM,EAAE;UACR;QACJ;QACA,IAAIE,MAAM,EAAE;UACRlC,WAAW,CAACkC,MAAM,EAAE;YAAEI,IAAI,EAAE;UAAO,CAAC,CAAC;QACzC;MACJ;IACJ,CAAC,EAAE,CAAC;;IAGJ;AACR;AACA;AACA;AACA;IACQ,SAASF,YAAYA,CAAA,EAAG;MACpB,IAAIJ,MAAM,EAAE;QACR;MACJ;MACAE,MAAM,GAAG,IAAKnB,oBAAoB,CAASH,kBAAkB,CAAc;MAC3EsB,MAAM,CAACK,OAAO,GAAG,MAAMH,YAAY,CAAC,CAAC;MACrCF,MAAM,CAACM,MAAM,GAAG,MAAM;QAClB,IAAAC,qBAAc,EAACP,MAAM,CAAC,CAACQ,SAAS,GAAIC,QAAa,IAAK;UAClD,IAAMzC,GAAgB,GAAGE,IAAI,CAACwC,KAAK,CAACD,QAAQ,CAACE,IAAW,CAAC;UACzD,QAAQ3C,GAAG,CAACoC,IAAI;YACZ,KAAK,MAAM;cACPL,SAAS,GAAG/B,GAAG,CAAC4C,UAAU;cAC1B9C,WAAW,CAAC,IAAAyC,qBAAc,EAACP,MAAM,CAAC,EAAE;gBAChCI,IAAI,EAAE,MAAM;gBACZS,IAAI,EAAExB,OAAO,CAACyB;cAClB,CAAC,CAAC;cACF;YACJ,KAAK,QAAQ;cACT;AAC5B;AACA;AACA;cAH4B,IAISC,oBAAoB,GAA7B,SAAAA,CAA8BC,YAAoB,EAAE;gBAChD,IAAIC,YAAY,GAAG,KAAK;gBACxB,IAAMC,aAAyB,GAAG,IAAItD,IAAI,CAAC;kBACvCuD,SAAS,EAAEH,YAAY,GAAGjB,SAAS;kBACnCpB,IAAI;kBACJC,MAAM;kBACNwC,OAAO,EAAE;gBACb,CAAC,CAAQ;gBACTF,aAAa,CAACG,EAAE,GAAG,IAAAC,wBAAiB,EAAC,EAAE,CAAC;gBACxC1B,KAAK,CAAC2B,GAAG,CAACP,YAAY,EAAEE,aAAa,CAAC;gBAGtCA,aAAa,CAACM,EAAE,CAAC,QAAQ,EAAGC,MAAW,IAAK;kBACxC3D,WAAW,CAAC,IAAAyC,qBAAc,EAACP,MAAM,CAAC,EAAE;oBAChCI,IAAI,EAAE,QAAQ;oBACdsB,YAAY,EAAE3B,SAAS;oBACvB4B,cAAc,EAAEX,YAAY;oBAC5BH,IAAI,EAAExB,OAAO,CAACyB,KAAK;oBACnBH,IAAI,EAAEc;kBACV,CAAC,CAAC;gBACN,CAAC,CAAC;gBAEFP,aAAa,CAACM,EAAE,CAAC,MAAM,EAAGI,iBAAsB,IAAK;kBACjDA,iBAAiB,GAAG1D,IAAI,CAACwC,KAAK,CAACkB,iBAAiB,CAACC,QAAQ,CAAC,CAAC,CAAC;kBAC5D,IAAID,iBAAiB,CAACE,MAAM,EAAE;oBAC1BpC,SAAS,CAACqC,IAAI,CAAC;sBACXC,IAAI,EAAEd,aAAa;sBACnBe,QAAQ,EAAEL;oBACd,CAAC,CAAC;kBACN,CAAC,MAAM;oBACHnC,QAAQ,CAACsC,IAAI,CAAC;sBACVC,IAAI,EAAEd,aAAa;sBACnBgB,OAAO,EAAEN;oBACb,CAAC,CAAC;kBACN;gBACJ,CAAC,CAAC;gBAEFV,aAAa,CAACM,EAAE,CAAC,OAAO,EAAGW,KAAK,IAAK;kBACjCxC,MAAM,CAACoC,IAAI,CAAC,IAAAK,mBAAU,EAAC,gBAAgB,EAAE;oBACrCD;kBACJ,CAAC,CAAC,CAAC;kBACHjB,aAAa,CAACmB,OAAO,CAAC,CAAC;kBACvB,IAAI,CAACpB,YAAY,EAAE;oBACfA,YAAY,GAAG,IAAI;oBACnBzB,WAAW,CAACuC,IAAI,CAACb,aAAa,CAAC;kBACnC;gBACJ,CAAC,CAAC;gBAEFA,aAAa,CAACM,EAAE,CAAC,SAAS,EAAE,MAAM;kBAC9BlC,QAAQ,CAACyC,IAAI,CAACb,aAAa,CAAC;gBAChC,CAAC,CAAC;gBAEFA,aAAa,CAACM,EAAE,CAAC,OAAO,EAAE,MAAM;kBAC5B,IAAI,CAACP,YAAY,EAAE;oBACfA,YAAY,GAAG,IAAI;oBACnBzB,WAAW,CAACuC,IAAI,CAACb,aAAa,CAAC;kBACnC;kBACAH,oBAAoB,CAACC,YAAY,CAAC;gBACtC,CAAC,CAAC;cACN,CAAC;cACDhD,GAAG,CAACsE,YAAY,CAACC,OAAO,CAACvB,YAAY,IAAI;gBACrC,IACIA,YAAY,KAAKjB,SAAS,IAC1BH,KAAK,CAAC4C,GAAG,CAACxB,YAAY,CAAC,EACzB;kBACE;gBACJ,CAAC,MAAM;kBACHD,oBAAoB,CAACC,YAAY,CAAC;gBACtC;cAEJ,CAAC,CAAC;cACF;YACJ,KAAK,QAAQ;cACT,IAAMgB,IAAI,GAAG,IAAAS,wBAAiB,EAAC7C,KAAK,EAAE5B,GAAG,CAAC0D,YAAY,CAAC;cACvDM,IAAI,CAACP,MAAM,CAACzD,GAAG,CAAC2C,IAAI,CAAC;cACrB;UACR;QACJ,CAAC;MACL,CAAC;IACL;IAAC;IAED,IAAM+B,OAA4C,GAAG;MACjD/C,MAAM;MACNL,QAAQ;MACRE,WAAW;MACXC,QAAQ;MACRC,SAAS;MACT,MAAMzB,IAAIA,CAAC+D,IAAgB,EAAEE,OAAsB,EAAE;QACjD,MAAMF,IAAI,CAAC/D,IAAI,CAACC,IAAI,CAACC,SAAS,CAAC+D,OAAO,CAAC,CAAC;MAC5C,CAAC;MACDG,OAAOA,CAAA,EAAG;QACNvC,MAAM,GAAG,IAAI;QACb,IAAAS,qBAAc,EAACP,MAAM,CAAC,CAAC2C,KAAK,CAAC,CAAC;QAC9BhD,MAAM,CAACiD,QAAQ,CAAC,CAAC;QACjBtD,QAAQ,CAACsD,QAAQ,CAAC,CAAC;QACnBpD,WAAW,CAACoD,QAAQ,CAAC,CAAC;QACtBnD,QAAQ,CAACmD,QAAQ,CAAC,CAAC;QACnBlD,SAAS,CAACkD,QAAQ,CAAC,CAAC;QACpB,OAAOC,2BAAoB;MAC/B;IACJ,CAAC;IACD,OAAOH,OAAO;EAClB,CAAC;EACD,OAAOtD,OAAO;AAClB;;AAGA;AACA;AACA;AACA;AACA;AACO,SAASN,0BAA0BA,CAAA,EAAG;EACzC,IACI,OAAOgE,OAAO,KAAK,WAAW,IAC9B,OAAOA,OAAO,CAACC,QAAQ,KAAK,UAAU,EACxC;IACE,MAAM,IAAAX,mBAAU,EAAC,KAAK,CAAC;EAC3B;AACJ","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/replication-webrtc/connection-handler-webtorrent.js b/dist/cjs/plugins/replication-webrtc/connection-handler-webtorrent.js deleted file mode 100644 index 2d28b032d56..00000000000 --- a/dist/cjs/plugins/replication-webrtc/connection-handler-webtorrent.js +++ /dev/null @@ -1,137 +0,0 @@ -// /** -// * Uses the Webtorrent servers as signaling server, works similar to p2pt. -// * We could not use p2pt directly because it has so many bugs and behaves wrong in -// * cases with more then 2 peers. -// * @link https://github.com/subins2000/p2pt/blob/master/p2pt.js -// */ - -// import { Subject } from 'rxjs'; -// import { PROMISE_RESOLVE_VOID, randomCouchString } from '../../util'; -// import { P2PConnectionHandler, P2PConnectionHandlerCreator, P2PMessage, P2PPeer, PeerWithMessage, PeerWithResponse } from './p2p-types'; -// const wrtc = require('wrtc'); - -// const WebSocketTracker = require('bittorrent-tracker/lib/client/websocket-tracker'); -// const Client = require('bittorrent-tracker'); -// const randombytes = require('randombytes'); -// const EventEmitter = require('events'); -// const sha1 = require('simple-sha1'); -// const debug = require('debug')('p2pt'); - -// export const P2PT_DEFAULT_TRACKERS = [ -// 'wss://tracker.files.fm:7073/announce', -// 'wss://tracker.btorrent.xyz', -// 'wss://spacetradersapi-chatbox.herokuapp.com:443/announce', -// 'wss://qot.abiir.top:443/announce' -// ]; - -// export function getConnectionHandlerWebtorrent( -// trackers: string[] = P2PT_DEFAULT_TRACKERS, -// /** -// * Port is only required in Node.js, -// * not on browsers. -// */ -// torrentClientPort = 18669 -// ): P2PConnectionHandlerCreator { -// const creator: P2PConnectionHandlerCreator = (options) => { -// /** -// * @link https://github.com/webtorrent/bittorrent-tracker#client -// */ -// const requiredOpts = { -// infoHash: sha1.sync(options.topic).toLowerCase(), -// peerId: randombytes(20), -// announce: trackers, -// port: torrentClientPort, -// wrtc -// } -// const client = new Client(requiredOpts); - -// const connect$ = new Subject(); -// const disconnect$ = new Subject(); -// const message$ = new Subject(); -// const response$ = new Subject(); - -// client.on('error', function (err) { -// console.error('fatal client error! ' + requiredOpts.peerId.toString('hex')); -// console.log(err.message) -// }) - -// client.on('warning', function (err) { -// // a tracker was unavailable or sent bad data to the client. you can probably ignore it -// console.log(err.message) -// }) - -// client.on('update', function (data) { -// console.log('got an announce response from tracker: ' + data.announce) -// console.log('number of seeders in the swarm: ' + data.complete) -// console.log('number of leechers in the swarm: ' + data.incomplete) -// }); - -// const knownPeers = new Set(); -// client.on('peer', function (peer: P2PPeer) { -// console.log('found a peer: ' + peer.id + ' ' + requiredOpts.peerId.toString('hex')) // 85.10.239.191:48623 -// if (knownPeers.has(peer.id)) { -// return; -// } -// knownPeers.add(peer.id); -// peer.once('connect', () => { -// connect$.next(peer); -// }); -// peer.on('data', (data: Buffer) => { -// console.log('# GOT DATA FROM PEER:'); -// const messageOrResponse = JSON.parse(data as any); -// console.dir(messageOrResponse); -// if (messageOrResponse.result) { -// response$.next({ -// peer: peer as any, -// response: messageOrResponse -// }); -// } else { -// message$.next({ -// peer, -// message: JSON.parse(data) -// }); -// } -// }); -// peer.on('signal', (signal) => { -// console.log('GOT SIGNAL: ' + requiredOpts.peerId.toString('hex')); -// console.dir(signal); -// client.signal(signal); -// client.update(); -// client.scrape(); -// }); -// }); - -// client.on('scrape', function (data) { -// console.log('number of leechers in the swarm: ' + data.incomplete) -// }) - -// const handler: P2PConnectionHandler = { -// connect$, -// disconnect$, -// message$, -// response$, -// async send(peer: P2PPeer, message: P2PMessage) { -// await peer.send(JSON.stringify(message)); -// }, -// destroy() { -// client.destroy(); -// connect$.complete(); -// disconnect$.complete(); -// message$.complete(); -// response$.complete(); -// return PROMISE_RESOLVE_VOID; -// } -// } -// client.start(); -// client.update(); -// client.scrape(); -// setInterval(() => { -// // client.update(); -// }, 10000); -// return handler; -// }; - -// return creator; -// } -"use strict"; -//# sourceMappingURL=connection-handler-webtorrent.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/replication-webrtc/connection-handler-webtorrent.js.map b/dist/cjs/plugins/replication-webrtc/connection-handler-webtorrent.js.map deleted file mode 100644 index 8969848e381..00000000000 --- a/dist/cjs/plugins/replication-webrtc/connection-handler-webtorrent.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"connection-handler-webtorrent.js","names":[],"sources":["../../../../src/plugins/replication-webrtc/connection-handler-webtorrent.ts"],"sourcesContent":["// /**\n// * Uses the Webtorrent servers as signaling server, works similar to p2pt.\n// * We could not use p2pt directly because it has so many bugs and behaves wrong in\n// * cases with more then 2 peers.\n// * @link https://github.com/subins2000/p2pt/blob/master/p2pt.js\n// */\n\n// import { Subject } from 'rxjs';\n// import { PROMISE_RESOLVE_VOID, randomCouchString } from '../../util';\n// import { P2PConnectionHandler, P2PConnectionHandlerCreator, P2PMessage, P2PPeer, PeerWithMessage, PeerWithResponse } from './p2p-types';\n// const wrtc = require('wrtc');\n\n// const WebSocketTracker = require('bittorrent-tracker/lib/client/websocket-tracker');\n// const Client = require('bittorrent-tracker');\n// const randombytes = require('randombytes');\n// const EventEmitter = require('events');\n// const sha1 = require('simple-sha1');\n// const debug = require('debug')('p2pt');\n\n\n// export const P2PT_DEFAULT_TRACKERS = [\n// 'wss://tracker.files.fm:7073/announce',\n// 'wss://tracker.btorrent.xyz',\n// 'wss://spacetradersapi-chatbox.herokuapp.com:443/announce',\n// 'wss://qot.abiir.top:443/announce'\n// ];\n\n// export function getConnectionHandlerWebtorrent(\n// trackers: string[] = P2PT_DEFAULT_TRACKERS,\n// /**\n// * Port is only required in Node.js,\n// * not on browsers.\n// */\n// torrentClientPort = 18669\n// ): P2PConnectionHandlerCreator {\n// const creator: P2PConnectionHandlerCreator = (options) => {\n// /**\n// * @link https://github.com/webtorrent/bittorrent-tracker#client\n// */\n// const requiredOpts = {\n// infoHash: sha1.sync(options.topic).toLowerCase(),\n// peerId: randombytes(20),\n// announce: trackers,\n// port: torrentClientPort,\n// wrtc\n// }\n// const client = new Client(requiredOpts);\n\n// const connect$ = new Subject();\n// const disconnect$ = new Subject();\n// const message$ = new Subject();\n// const response$ = new Subject();\n\n\n// client.on('error', function (err) {\n// console.error('fatal client error! ' + requiredOpts.peerId.toString('hex'));\n// console.log(err.message)\n// })\n\n// client.on('warning', function (err) {\n// // a tracker was unavailable or sent bad data to the client. you can probably ignore it\n// console.log(err.message)\n// })\n\n// client.on('update', function (data) {\n// console.log('got an announce response from tracker: ' + data.announce)\n// console.log('number of seeders in the swarm: ' + data.complete)\n// console.log('number of leechers in the swarm: ' + data.incomplete)\n// });\n\n// const knownPeers = new Set();\n// client.on('peer', function (peer: P2PPeer) {\n// console.log('found a peer: ' + peer.id + ' ' + requiredOpts.peerId.toString('hex')) // 85.10.239.191:48623\n// if (knownPeers.has(peer.id)) {\n// return;\n// }\n// knownPeers.add(peer.id);\n// peer.once('connect', () => {\n// connect$.next(peer);\n// });\n// peer.on('data', (data: Buffer) => {\n// console.log('# GOT DATA FROM PEER:');\n// const messageOrResponse = JSON.parse(data as any);\n// console.dir(messageOrResponse);\n// if (messageOrResponse.result) {\n// response$.next({\n// peer: peer as any,\n// response: messageOrResponse\n// });\n// } else {\n// message$.next({\n// peer,\n// message: JSON.parse(data)\n// });\n// }\n// });\n// peer.on('signal', (signal) => {\n// console.log('GOT SIGNAL: ' + requiredOpts.peerId.toString('hex'));\n// console.dir(signal);\n// client.signal(signal);\n// client.update();\n// client.scrape();\n// });\n// });\n\n// client.on('scrape', function (data) {\n// console.log('number of leechers in the swarm: ' + data.incomplete)\n// })\n\n// const handler: P2PConnectionHandler = {\n// connect$,\n// disconnect$,\n// message$,\n// response$,\n// async send(peer: P2PPeer, message: P2PMessage) {\n// await peer.send(JSON.stringify(message));\n// },\n// destroy() {\n// client.destroy();\n// connect$.complete();\n// disconnect$.complete();\n// message$.complete();\n// response$.complete();\n// return PROMISE_RESOLVE_VOID;\n// }\n// }\n// client.start();\n// client.update();\n// client.scrape();\n// setInterval(() => {\n// // client.update();\n// }, 10000);\n// return handler;\n// };\n\n// return creator;\n// }\n"],"mappings":"AAAA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAGA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAGA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AAAA","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/replication-webrtc/index.js b/dist/cjs/plugins/replication-webrtc/index.js deleted file mode 100644 index 9855e25b69e..00000000000 --- a/dist/cjs/plugins/replication-webrtc/index.js +++ /dev/null @@ -1,260 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -var _exportNames = { - replicateWebRTC: true, - RxWebRTCReplicationPool: true -}; -exports.RxWebRTCReplicationPool = void 0; -exports.replicateWebRTC = replicateWebRTC; -var _rxjs = require("rxjs"); -var _plugin = require("../../plugin.js"); -var _index = require("../../replication-protocol/index.js"); -var _index2 = require("../../plugins/utils/index.js"); -var _index3 = require("../leader-election/index.js"); -var _index4 = require("../replication/index.js"); -var _webrtcHelper = require("./webrtc-helper.js"); -Object.keys(_webrtcHelper).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; - if (key in exports && exports[key] === _webrtcHelper[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _webrtcHelper[key]; - } - }); -}); -var _rxError = require("../../rx-error.js"); -var _signalingServer = require("./signaling-server.js"); -Object.keys(_signalingServer).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; - if (key in exports && exports[key] === _signalingServer[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _signalingServer[key]; - } - }); -}); -var _webrtcTypes = require("./webrtc-types.js"); -Object.keys(_webrtcTypes).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; - if (key in exports && exports[key] === _webrtcTypes[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _webrtcTypes[key]; - } - }); -}); -var _connectionHandlerSimplePeer = require("./connection-handler-simple-peer.js"); -Object.keys(_connectionHandlerSimplePeer).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; - if (key in exports && exports[key] === _connectionHandlerSimplePeer[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _connectionHandlerSimplePeer[key]; - } - }); -}); -async function replicateWebRTC(options) { - var collection = options.collection; - (0, _plugin.addRxPlugin)(_index3.RxDBLeaderElectionPlugin); - - // fill defaults - if (options.pull) { - if (!options.pull.batchSize) { - options.pull.batchSize = 20; - } - } - if (options.push) { - if (!options.push.batchSize) { - options.push.batchSize = 20; - } - } - if (collection.database.multiInstance) { - await collection.database.waitForLeadership(); - } - - // used to easier debug stuff - var requestCounter = 0; - var requestFlag = (0, _index2.randomCouchString)(10); - function getRequestId() { - var count = requestCounter++; - return collection.database.token + '|' + requestFlag + '|' + count; - } - var storageToken = await collection.database.storageToken; - var pool = new RxWebRTCReplicationPool(collection, options, await options.connectionHandlerCreator(options)); - pool.subs.push(pool.connectionHandler.error$.subscribe(err => pool.error$.next(err)), pool.connectionHandler.disconnect$.subscribe(peer => pool.removePeer(peer))); - - /** - * Answer if someone requests our storage token - */ - pool.subs.push(pool.connectionHandler.message$.pipe((0, _rxjs.filter)(data => data.message.method === 'token')).subscribe(data => { - pool.connectionHandler.send(data.peer, { - id: data.message.id, - result: storageToken - }); - })); - var connectSub = pool.connectionHandler.connect$.pipe((0, _rxjs.filter)(() => !pool.canceled)).subscribe(async peer => { - if (options.isPeerValid) { - var isValid = await options.isPeerValid(peer); - if (!isValid) { - return; - } - } - var peerToken; - try { - var tokenResponse = await (0, _webrtcHelper.sendMessageAndAwaitAnswer)(pool.connectionHandler, peer, { - id: getRequestId(), - method: 'token', - params: [] - }); - peerToken = tokenResponse.result; - } catch (error) { - /** - * If could not get the tokenResponse, - * just ignore that peer. - */ - pool.error$.next((0, _rxError.newRxError)('RC_WEBRTC_PEER', { - error - })); - return; - } - var isMaster = await (0, _webrtcHelper.isMasterInWebRTCReplication)(collection.database.hashFunction, storageToken, peerToken); - var replicationState; - if (isMaster) { - var masterHandler = pool.masterReplicationHandler; - var masterChangeStreamSub = masterHandler.masterChangeStream$.subscribe(ev => { - var streamResponse = { - id: 'masterChangeStream$', - result: ev - }; - pool.connectionHandler.send(peer, streamResponse); - }); - - // clean up the subscription - pool.subs.push(masterChangeStreamSub, pool.connectionHandler.disconnect$.pipe((0, _rxjs.filter)(p => p === peer)).subscribe(() => masterChangeStreamSub.unsubscribe())); - var messageSub = pool.connectionHandler.message$.pipe((0, _rxjs.filter)(data => data.peer === peer), (0, _rxjs.filter)(data => data.message.method !== 'token')).subscribe(async data => { - var { - peer: msgPeer, - message - } = data; - /** - * If it is not a function, - * it means that the client requested the masterChangeStream$ - */ - var method = masterHandler[message.method].bind(masterHandler); - var result = await method(...message.params); - var response = { - id: message.id, - result - }; - pool.connectionHandler.send(msgPeer, response); - }); - pool.subs.push(messageSub); - } else { - replicationState = (0, _index4.replicateRxCollection)({ - replicationIdentifier: [collection.name, options.topic, peerToken].join('||'), - collection: collection, - autoStart: true, - deletedField: '_deleted', - live: true, - retryTime: options.retryTime, - waitForLeadership: false, - pull: options.pull ? Object.assign({}, options.pull, { - async handler(lastPulledCheckpoint) { - var answer = await (0, _webrtcHelper.sendMessageAndAwaitAnswer)(pool.connectionHandler, peer, { - method: 'masterChangesSince', - params: [lastPulledCheckpoint, (0, _index2.ensureNotFalsy)(options.pull).batchSize], - id: getRequestId() - }); - return answer.result; - }, - stream$: pool.connectionHandler.response$.pipe((0, _rxjs.filter)(m => m.response.id === 'masterChangeStream$'), (0, _rxjs.map)(m => m.response.result)) - }) : undefined, - push: options.push ? Object.assign({}, options.push, { - async handler(docs) { - var answer = await (0, _webrtcHelper.sendMessageAndAwaitAnswer)(pool.connectionHandler, peer, { - method: 'masterWrite', - params: [docs], - id: getRequestId() - }); - return answer.result; - } - }) : undefined - }); - } - pool.addPeer(peer, replicationState); - }); - pool.subs.push(connectSub); - return pool; -} - -/** - * Because the WebRTC replication runs between many instances, - * we use a Pool instead of returning a single replication state. - */ -var RxWebRTCReplicationPool = exports.RxWebRTCReplicationPool = /*#__PURE__*/function () { - function RxWebRTCReplicationPool(collection, options, connectionHandler) { - this.peerStates$ = new _rxjs.BehaviorSubject(new Map()); - this.canceled = false; - this.subs = []; - this.error$ = new _rxjs.Subject(); - this.collection = collection; - this.options = options; - this.connectionHandler = connectionHandler; - this.collection.onDestroy.push(() => this.cancel()); - this.masterReplicationHandler = (0, _index.rxStorageInstanceToReplicationHandler)(collection.storageInstance, collection.conflictHandler, collection.database.token); - } - var _proto = RxWebRTCReplicationPool.prototype; - _proto.addPeer = function addPeer(peer, - // only if isMaster=false it has a replicationState - replicationState) { - var peerState = { - peer, - replicationState, - subs: [] - }; - this.peerStates$.next(this.peerStates$.getValue().set(peer, peerState)); - if (replicationState) { - peerState.subs.push(replicationState.error$.subscribe(ev => this.error$.next(ev))); - } - }; - _proto.removePeer = function removePeer(peer) { - var peerState = (0, _index2.getFromMapOrThrow)(this.peerStates$.getValue(), peer); - this.peerStates$.getValue().delete(peer); - this.peerStates$.next(this.peerStates$.getValue()); - peerState.subs.forEach(sub => sub.unsubscribe()); - if (peerState.replicationState) { - peerState.replicationState.cancel(); - } - } - - // often used in unit tests - ; - _proto.awaitFirstPeer = function awaitFirstPeer() { - return (0, _rxjs.firstValueFrom)(this.peerStates$.pipe((0, _rxjs.filter)(peerStates => peerStates.size > 0))); - }; - _proto.cancel = async function cancel() { - if (this.canceled) { - return; - } - this.canceled = true; - this.subs.forEach(sub => sub.unsubscribe()); - Array.from(this.peerStates$.getValue().keys()).forEach(peer => { - this.removePeer(peer); - }); - await this.connectionHandler.destroy(); - }; - return RxWebRTCReplicationPool; -}(); // export * from './connection-handler-webtorrent'; -// export * from './connection-handler-p2pcf'; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/replication-webrtc/index.js.map b/dist/cjs/plugins/replication-webrtc/index.js.map deleted file mode 100644 index 6b71d4ddca4..00000000000 --- a/dist/cjs/plugins/replication-webrtc/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["_rxjs","require","_plugin","_index","_index2","_index3","_index4","_webrtcHelper","Object","keys","forEach","key","prototype","hasOwnProperty","call","_exportNames","exports","defineProperty","enumerable","get","_rxError","_signalingServer","_webrtcTypes","_connectionHandlerSimplePeer","replicateWebRTC","options","collection","addRxPlugin","RxDBLeaderElectionPlugin","pull","batchSize","push","database","multiInstance","waitForLeadership","requestCounter","requestFlag","randomCouchString","getRequestId","count","token","storageToken","pool","RxWebRTCReplicationPool","connectionHandlerCreator","subs","connectionHandler","error$","subscribe","err","next","disconnect$","peer","removePeer","message$","pipe","filter","data","message","method","send","id","result","connectSub","connect$","canceled","isPeerValid","isValid","peerToken","tokenResponse","sendMessageAndAwaitAnswer","params","error","newRxError","isMaster","isMasterInWebRTCReplication","hashFunction","replicationState","masterHandler","masterReplicationHandler","masterChangeStreamSub","masterChangeStream$","ev","streamResponse","p","unsubscribe","messageSub","msgPeer","bind","response","replicateRxCollection","replicationIdentifier","name","topic","join","autoStart","deletedField","live","retryTime","assign","handler","lastPulledCheckpoint","answer","ensureNotFalsy","stream$","response$","m","map","undefined","docs","addPeer","peerStates$","BehaviorSubject","Map","Subject","onDestroy","cancel","rxStorageInstanceToReplicationHandler","storageInstance","conflictHandler","_proto","peerState","getValue","set","getFromMapOrThrow","delete","sub","awaitFirstPeer","firstValueFrom","peerStates","size","Array","from","destroy"],"sources":["../../../../src/plugins/replication-webrtc/index.ts"],"sourcesContent":["import {\n BehaviorSubject,\n filter,\n firstValueFrom,\n map,\n Subject,\n Subscription\n} from 'rxjs';\nimport { addRxPlugin } from '../../plugin.ts';\nimport { rxStorageInstanceToReplicationHandler } from '../../replication-protocol/index.ts';\nimport type {\n RxCollection,\n RxError,\n RxReplicationHandler,\n RxReplicationWriteToMasterRow,\n RxTypeError\n} from '../../types/index.d.ts';\nimport {\n ensureNotFalsy,\n getFromMapOrThrow,\n randomCouchString\n} from '../../plugins/utils/index.ts';\nimport { RxDBLeaderElectionPlugin } from '../leader-election/index.ts';\nimport { replicateRxCollection } from '../replication/index.ts';\nimport {\n isMasterInWebRTCReplication,\n sendMessageAndAwaitAnswer\n} from './webrtc-helper.ts';\nimport type {\n WebRTCConnectionHandler,\n WebRTCPeerState,\n WebRTCReplicationCheckpoint,\n WebRTCResponse,\n RxWebRTCReplicationState,\n SyncOptionsWebRTC\n} from './webrtc-types.ts';\nimport { newRxError } from '../../rx-error.ts';\n\n\nexport async function replicateWebRTC(\n options: SyncOptionsWebRTC\n): Promise> {\n const collection = options.collection;\n addRxPlugin(RxDBLeaderElectionPlugin);\n\n // fill defaults\n if (options.pull) {\n if (!options.pull.batchSize) {\n options.pull.batchSize = 20;\n }\n }\n if (options.push) {\n if (!options.push.batchSize) {\n options.push.batchSize = 20;\n }\n }\n\n if (collection.database.multiInstance) {\n await collection.database.waitForLeadership();\n }\n\n // used to easier debug stuff\n let requestCounter = 0;\n const requestFlag = randomCouchString(10);\n function getRequestId() {\n const count = requestCounter++;\n return collection.database.token + '|' + requestFlag + '|' + count;\n }\n\n const storageToken = await collection.database.storageToken;\n const pool = new RxWebRTCReplicationPool(\n collection,\n options,\n await options.connectionHandlerCreator(options)\n );\n\n\n pool.subs.push(\n pool.connectionHandler.error$.subscribe(err => pool.error$.next(err)),\n pool.connectionHandler.disconnect$.subscribe(peer => pool.removePeer(peer))\n );\n\n /**\n * Answer if someone requests our storage token\n */\n pool.subs.push(\n pool.connectionHandler.message$.pipe(\n filter(data => data.message.method === 'token')\n ).subscribe(data => {\n pool.connectionHandler.send(data.peer, {\n id: data.message.id,\n result: storageToken\n });\n })\n );\n\n const connectSub = pool.connectionHandler.connect$\n .pipe(\n filter(() => !pool.canceled)\n )\n .subscribe(async (peer) => {\n if (options.isPeerValid) {\n const isValid = await options.isPeerValid(peer);\n if (!isValid) {\n return;\n }\n }\n\n let peerToken: string;\n try {\n const tokenResponse = await sendMessageAndAwaitAnswer(\n pool.connectionHandler,\n peer,\n {\n id: getRequestId(),\n method: 'token',\n params: []\n }\n );\n peerToken = tokenResponse.result;\n } catch (error: any) {\n /**\n * If could not get the tokenResponse,\n * just ignore that peer.\n */\n pool.error$.next(newRxError('RC_WEBRTC_PEER', {\n error\n }));\n return;\n }\n const isMaster = await isMasterInWebRTCReplication(collection.database.hashFunction, storageToken, peerToken);\n\n let replicationState: RxWebRTCReplicationState | undefined;\n if (isMaster) {\n const masterHandler = pool.masterReplicationHandler;\n const masterChangeStreamSub = masterHandler.masterChangeStream$.subscribe(ev => {\n const streamResponse: WebRTCResponse = {\n id: 'masterChangeStream$',\n result: ev\n };\n pool.connectionHandler.send(peer, streamResponse);\n });\n\n // clean up the subscription\n pool.subs.push(\n masterChangeStreamSub,\n pool.connectionHandler.disconnect$.pipe(\n filter(p => p === peer)\n ).subscribe(() => masterChangeStreamSub.unsubscribe())\n );\n\n const messageSub = pool.connectionHandler.message$\n .pipe(\n filter(data => data.peer === peer),\n filter(data => data.message.method !== 'token')\n )\n .subscribe(async (data) => {\n const { peer: msgPeer, message } = data;\n /**\n * If it is not a function,\n * it means that the client requested the masterChangeStream$\n */\n const method = (masterHandler as any)[message.method].bind(masterHandler);\n const result = await (method as any)(...message.params);\n const response: WebRTCResponse = {\n id: message.id,\n result\n };\n pool.connectionHandler.send(msgPeer, response);\n });\n pool.subs.push(messageSub);\n } else {\n replicationState = replicateRxCollection({\n replicationIdentifier: [collection.name, options.topic, peerToken].join('||'),\n collection: collection,\n autoStart: true,\n deletedField: '_deleted',\n live: true,\n retryTime: options.retryTime,\n waitForLeadership: false,\n pull: options.pull ? Object.assign({}, options.pull, {\n async handler(lastPulledCheckpoint: WebRTCReplicationCheckpoint | undefined) {\n const answer = await sendMessageAndAwaitAnswer(\n pool.connectionHandler,\n peer,\n {\n method: 'masterChangesSince',\n params: [\n lastPulledCheckpoint,\n ensureNotFalsy(options.pull).batchSize\n ],\n id: getRequestId()\n }\n );\n return answer.result;\n },\n stream$: pool.connectionHandler.response$.pipe(\n filter(m => m.response.id === 'masterChangeStream$'),\n map(m => m.response.result)\n )\n\n }) : undefined,\n push: options.push ? Object.assign({}, options.push, {\n async handler(docs: RxReplicationWriteToMasterRow[]) {\n const answer = await sendMessageAndAwaitAnswer(\n pool.connectionHandler,\n peer,\n {\n method: 'masterWrite',\n params: [docs],\n id: getRequestId()\n }\n );\n return answer.result;\n }\n }) : undefined\n });\n }\n pool.addPeer(peer, replicationState);\n });\n pool.subs.push(connectSub);\n return pool;\n}\n\n\n/**\n * Because the WebRTC replication runs between many instances,\n * we use a Pool instead of returning a single replication state.\n */\nexport class RxWebRTCReplicationPool {\n peerStates$: BehaviorSubject>> = new BehaviorSubject(new Map());\n canceled: boolean = false;\n masterReplicationHandler: RxReplicationHandler;\n subs: Subscription[] = [];\n\n public error$ = new Subject();\n\n constructor(\n public readonly collection: RxCollection,\n public readonly options: SyncOptionsWebRTC,\n public readonly connectionHandler: WebRTCConnectionHandler\n ) {\n this.collection.onDestroy.push(() => this.cancel());\n this.masterReplicationHandler = rxStorageInstanceToReplicationHandler(\n collection.storageInstance,\n collection.conflictHandler,\n collection.database.token,\n );\n }\n\n addPeer(\n peer: PeerType,\n // only if isMaster=false it has a replicationState\n replicationState?: RxWebRTCReplicationState\n ) {\n const peerState: WebRTCPeerState = {\n peer,\n replicationState,\n subs: []\n };\n this.peerStates$.next(this.peerStates$.getValue().set(peer, peerState));\n if (replicationState) {\n peerState.subs.push(\n replicationState.error$.subscribe(ev => this.error$.next(ev))\n );\n }\n }\n removePeer(peer: PeerType) {\n const peerState = getFromMapOrThrow(this.peerStates$.getValue(), peer);\n this.peerStates$.getValue().delete(peer);\n this.peerStates$.next(this.peerStates$.getValue());\n peerState.subs.forEach(sub => sub.unsubscribe());\n if (peerState.replicationState) {\n peerState.replicationState.cancel();\n }\n }\n\n // often used in unit tests\n awaitFirstPeer() {\n return firstValueFrom(\n this.peerStates$.pipe(\n filter(peerStates => peerStates.size > 0)\n )\n );\n }\n\n public async cancel() {\n if (this.canceled) {\n return;\n }\n this.canceled = true;\n this.subs.forEach(sub => sub.unsubscribe());\n Array.from(this.peerStates$.getValue().keys()).forEach(peer => {\n this.removePeer(peer);\n });\n await this.connectionHandler.destroy();\n }\n}\n\nexport * from './webrtc-helper.ts';\nexport * from './signaling-server.ts';\nexport * from './webrtc-types.ts';\n// export * from './connection-handler-webtorrent';\n// export * from './connection-handler-p2pcf';\nexport * from './connection-handler-simple-peer.ts';\n"],"mappings":";;;;;;;;;;;AAAA,IAAAA,KAAA,GAAAC,OAAA;AAQA,IAAAC,OAAA,GAAAD,OAAA;AACA,IAAAE,MAAA,GAAAF,OAAA;AAQA,IAAAG,OAAA,GAAAH,OAAA;AAKA,IAAAI,OAAA,GAAAJ,OAAA;AACA,IAAAK,OAAA,GAAAL,OAAA;AACA,IAAAM,aAAA,GAAAN,OAAA;AAmRAO,MAAA,CAAAC,IAAA,CAAAF,aAAA,EAAAG,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAJ,aAAA,CAAAI,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAZ,aAAA,CAAAI,GAAA;IAAA;EAAA;AAAA;AAvQA,IAAAS,QAAA,GAAAnB,OAAA;AAwQA,IAAAoB,gBAAA,GAAApB,OAAA;AAAAO,MAAA,CAAAC,IAAA,CAAAY,gBAAA,EAAAX,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAU,gBAAA,CAAAV,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAE,gBAAA,CAAAV,GAAA;IAAA;EAAA;AAAA;AACA,IAAAW,YAAA,GAAArB,OAAA;AAAAO,MAAA,CAAAC,IAAA,CAAAa,YAAA,EAAAZ,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAW,YAAA,CAAAX,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAG,YAAA,CAAAX,GAAA;IAAA;EAAA;AAAA;AAGA,IAAAY,4BAAA,GAAAtB,OAAA;AAAAO,MAAA,CAAAC,IAAA,CAAAc,4BAAA,EAAAb,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAY,4BAAA,CAAAZ,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAI,4BAAA,CAAAZ,GAAA;IAAA;EAAA;AAAA;AAzQO,eAAea,eAAeA,CACjCC,OAA+C,EACM;EACrD,IAAMC,UAAU,GAAGD,OAAO,CAACC,UAAU;EACrC,IAAAC,mBAAW,EAACC,gCAAwB,CAAC;;EAErC;EACA,IAAIH,OAAO,CAACI,IAAI,EAAE;IACd,IAAI,CAACJ,OAAO,CAACI,IAAI,CAACC,SAAS,EAAE;MACzBL,OAAO,CAACI,IAAI,CAACC,SAAS,GAAG,EAAE;IAC/B;EACJ;EACA,IAAIL,OAAO,CAACM,IAAI,EAAE;IACd,IAAI,CAACN,OAAO,CAACM,IAAI,CAACD,SAAS,EAAE;MACzBL,OAAO,CAACM,IAAI,CAACD,SAAS,GAAG,EAAE;IAC/B;EACJ;EAEA,IAAIJ,UAAU,CAACM,QAAQ,CAACC,aAAa,EAAE;IACnC,MAAMP,UAAU,CAACM,QAAQ,CAACE,iBAAiB,CAAC,CAAC;EACjD;;EAEA;EACA,IAAIC,cAAc,GAAG,CAAC;EACtB,IAAMC,WAAW,GAAG,IAAAC,yBAAiB,EAAC,EAAE,CAAC;EACzC,SAASC,YAAYA,CAAA,EAAG;IACpB,IAAMC,KAAK,GAAGJ,cAAc,EAAE;IAC9B,OAAOT,UAAU,CAACM,QAAQ,CAACQ,KAAK,GAAG,GAAG,GAAGJ,WAAW,GAAG,GAAG,GAAGG,KAAK;EACtE;EAEA,IAAME,YAAY,GAAG,MAAMf,UAAU,CAACM,QAAQ,CAACS,YAAY;EAC3D,IAAMC,IAAI,GAAG,IAAIC,uBAAuB,CACpCjB,UAAU,EACVD,OAAO,EACP,MAAMA,OAAO,CAACmB,wBAAwB,CAACnB,OAAO,CAClD,CAAC;EAGDiB,IAAI,CAACG,IAAI,CAACd,IAAI,CACVW,IAAI,CAACI,iBAAiB,CAACC,MAAM,CAACC,SAAS,CAACC,GAAG,IAAIP,IAAI,CAACK,MAAM,CAACG,IAAI,CAACD,GAAG,CAAC,CAAC,EACrEP,IAAI,CAACI,iBAAiB,CAACK,WAAW,CAACH,SAAS,CAACI,IAAI,IAAIV,IAAI,CAACW,UAAU,CAACD,IAAI,CAAC,CAC9E,CAAC;;EAED;AACJ;AACA;EACIV,IAAI,CAACG,IAAI,CAACd,IAAI,CACVW,IAAI,CAACI,iBAAiB,CAACQ,QAAQ,CAACC,IAAI,CAChC,IAAAC,YAAM,EAACC,IAAI,IAAIA,IAAI,CAACC,OAAO,CAACC,MAAM,KAAK,OAAO,CAClD,CAAC,CAACX,SAAS,CAACS,IAAI,IAAI;IAChBf,IAAI,CAACI,iBAAiB,CAACc,IAAI,CAACH,IAAI,CAACL,IAAI,EAAE;MACnCS,EAAE,EAAEJ,IAAI,CAACC,OAAO,CAACG,EAAE;MACnBC,MAAM,EAAErB;IACZ,CAAC,CAAC;EACN,CAAC,CACL,CAAC;EAED,IAAMsB,UAAU,GAAGrB,IAAI,CAACI,iBAAiB,CAACkB,QAAQ,CAC7CT,IAAI,CACD,IAAAC,YAAM,EAAC,MAAM,CAACd,IAAI,CAACuB,QAAQ,CAC/B,CAAC,CACAjB,SAAS,CAAC,MAAOI,IAAI,IAAK;IACvB,IAAI3B,OAAO,CAACyC,WAAW,EAAE;MACrB,IAAMC,OAAO,GAAG,MAAM1C,OAAO,CAACyC,WAAW,CAACd,IAAI,CAAC;MAC/C,IAAI,CAACe,OAAO,EAAE;QACV;MACJ;IACJ;IAEA,IAAIC,SAAiB;IACrB,IAAI;MACA,IAAMC,aAAa,GAAG,MAAM,IAAAC,uCAAyB,EACjD5B,IAAI,CAACI,iBAAiB,EACtBM,IAAI,EACJ;QACIS,EAAE,EAAEvB,YAAY,CAAC,CAAC;QAClBqB,MAAM,EAAE,OAAO;QACfY,MAAM,EAAE;MACZ,CACJ,CAAC;MACDH,SAAS,GAAGC,aAAa,CAACP,MAAM;IACpC,CAAC,CAAC,OAAOU,KAAU,EAAE;MACjB;AAChB;AACA;AACA;MACgB9B,IAAI,CAACK,MAAM,CAACG,IAAI,CAAC,IAAAuB,mBAAU,EAAC,gBAAgB,EAAE;QAC1CD;MACJ,CAAC,CAAC,CAAC;MACH;IACJ;IACA,IAAME,QAAQ,GAAG,MAAM,IAAAC,yCAA2B,EAACjD,UAAU,CAACM,QAAQ,CAAC4C,YAAY,EAAEnC,YAAY,EAAE2B,SAAS,CAAC;IAE7G,IAAIS,gBAAiE;IACrE,IAAIH,QAAQ,EAAE;MACV,IAAMI,aAAa,GAAGpC,IAAI,CAACqC,wBAAwB;MACnD,IAAMC,qBAAqB,GAAGF,aAAa,CAACG,mBAAmB,CAACjC,SAAS,CAACkC,EAAE,IAAI;QAC5E,IAAMC,cAA8B,GAAG;UACnCtB,EAAE,EAAE,qBAAqB;UACzBC,MAAM,EAAEoB;QACZ,CAAC;QACDxC,IAAI,CAACI,iBAAiB,CAACc,IAAI,CAACR,IAAI,EAAE+B,cAAc,CAAC;MACrD,CAAC,CAAC;;MAEF;MACAzC,IAAI,CAACG,IAAI,CAACd,IAAI,CACViD,qBAAqB,EACrBtC,IAAI,CAACI,iBAAiB,CAACK,WAAW,CAACI,IAAI,CACnC,IAAAC,YAAM,EAAC4B,CAAC,IAAIA,CAAC,KAAKhC,IAAI,CAC1B,CAAC,CAACJ,SAAS,CAAC,MAAMgC,qBAAqB,CAACK,WAAW,CAAC,CAAC,CACzD,CAAC;MAED,IAAMC,UAAU,GAAG5C,IAAI,CAACI,iBAAiB,CAACQ,QAAQ,CAC7CC,IAAI,CACD,IAAAC,YAAM,EAACC,IAAI,IAAIA,IAAI,CAACL,IAAI,KAAKA,IAAI,CAAC,EAClC,IAAAI,YAAM,EAACC,IAAI,IAAIA,IAAI,CAACC,OAAO,CAACC,MAAM,KAAK,OAAO,CAClD,CAAC,CACAX,SAAS,CAAC,MAAOS,IAAI,IAAK;QACvB,IAAM;UAAEL,IAAI,EAAEmC,OAAO;UAAE7B;QAAQ,CAAC,GAAGD,IAAI;QACvC;AACxB;AACA;AACA;QACwB,IAAME,MAAM,GAAImB,aAAa,CAASpB,OAAO,CAACC,MAAM,CAAC,CAAC6B,IAAI,CAACV,aAAa,CAAC;QACzE,IAAMhB,MAAM,GAAG,MAAOH,MAAM,CAAS,GAAGD,OAAO,CAACa,MAAM,CAAC;QACvD,IAAMkB,QAAwB,GAAG;UAC7B5B,EAAE,EAAEH,OAAO,CAACG,EAAE;UACdC;QACJ,CAAC;QACDpB,IAAI,CAACI,iBAAiB,CAACc,IAAI,CAAC2B,OAAO,EAAEE,QAAQ,CAAC;MAClD,CAAC,CAAC;MACN/C,IAAI,CAACG,IAAI,CAACd,IAAI,CAACuD,UAAU,CAAC;IAC9B,CAAC,MAAM;MACHT,gBAAgB,GAAG,IAAAa,6BAAqB,EAAC;QACrCC,qBAAqB,EAAE,CAACjE,UAAU,CAACkE,IAAI,EAAEnE,OAAO,CAACoE,KAAK,EAAEzB,SAAS,CAAC,CAAC0B,IAAI,CAAC,IAAI,CAAC;QAC7EpE,UAAU,EAAEA,UAAU;QACtBqE,SAAS,EAAE,IAAI;QACfC,YAAY,EAAE,UAAU;QACxBC,IAAI,EAAE,IAAI;QACVC,SAAS,EAAEzE,OAAO,CAACyE,SAAS;QAC5BhE,iBAAiB,EAAE,KAAK;QACxBL,IAAI,EAAEJ,OAAO,CAACI,IAAI,GAAGrB,MAAM,CAAC2F,MAAM,CAAC,CAAC,CAAC,EAAE1E,OAAO,CAACI,IAAI,EAAE;UACjD,MAAMuE,OAAOA,CAACC,oBAA6D,EAAE;YACzE,IAAMC,MAAM,GAAG,MAAM,IAAAhC,uCAAyB,EAC1C5B,IAAI,CAACI,iBAAiB,EACtBM,IAAI,EACJ;cACIO,MAAM,EAAE,oBAAoB;cAC5BY,MAAM,EAAE,CACJ8B,oBAAoB,EACpB,IAAAE,sBAAc,EAAC9E,OAAO,CAACI,IAAI,CAAC,CAACC,SAAS,CACzC;cACD+B,EAAE,EAAEvB,YAAY,CAAC;YACrB,CACJ,CAAC;YACD,OAAOgE,MAAM,CAACxC,MAAM;UACxB,CAAC;UACD0C,OAAO,EAAE9D,IAAI,CAACI,iBAAiB,CAAC2D,SAAS,CAAClD,IAAI,CAC1C,IAAAC,YAAM,EAACkD,CAAC,IAAIA,CAAC,CAACjB,QAAQ,CAAC5B,EAAE,KAAK,qBAAqB,CAAC,EACpD,IAAA8C,SAAG,EAACD,CAAC,IAAIA,CAAC,CAACjB,QAAQ,CAAC3B,MAAM,CAC9B;QAEJ,CAAC,CAAC,GAAG8C,SAAS;QACd7E,IAAI,EAAEN,OAAO,CAACM,IAAI,GAAGvB,MAAM,CAAC2F,MAAM,CAAC,CAAC,CAAC,EAAE1E,OAAO,CAACM,IAAI,EAAE;UACjD,MAAMqE,OAAOA,CAACS,IAAgD,EAAE;YAC5D,IAAMP,MAAM,GAAG,MAAM,IAAAhC,uCAAyB,EAC1C5B,IAAI,CAACI,iBAAiB,EACtBM,IAAI,EACJ;cACIO,MAAM,EAAE,aAAa;cACrBY,MAAM,EAAE,CAACsC,IAAI,CAAC;cACdhD,EAAE,EAAEvB,YAAY,CAAC;YACrB,CACJ,CAAC;YACD,OAAOgE,MAAM,CAACxC,MAAM;UACxB;QACJ,CAAC,CAAC,GAAG8C;MACT,CAAC,CAAC;IACN;IACAlE,IAAI,CAACoE,OAAO,CAAC1D,IAAI,EAAEyB,gBAAgB,CAAC;EACxC,CAAC,CAAC;EACNnC,IAAI,CAACG,IAAI,CAACd,IAAI,CAACgC,UAAU,CAAC;EAC1B,OAAOrB,IAAI;AACf;;AAGA;AACA;AACA;AACA;AAHA,IAIaC,uBAAuB,GAAA3B,OAAA,CAAA2B,uBAAA;EAQhC,SAAAA,wBACoBjB,UAAmC,EACnCD,OAA+C,EAC/CqB,iBAAoD,EACtE;IAAA,KAXFiE,WAAW,GAAyE,IAAIC,qBAAe,CAAC,IAAIC,GAAG,CAAC,CAAC,CAAC;IAAA,KAClHhD,QAAQ,GAAY,KAAK;IAAA,KAEzBpB,IAAI,GAAmB,EAAE;IAAA,KAElBE,MAAM,GAAG,IAAImE,aAAO,CAAwB,CAAC;IAAA,KAGhCxF,UAAmC,GAAnCA,UAAmC;IAAA,KACnCD,OAA+C,GAA/CA,OAA+C;IAAA,KAC/CqB,iBAAoD,GAApDA,iBAAoD;IAEpE,IAAI,CAACpB,UAAU,CAACyF,SAAS,CAACpF,IAAI,CAAC,MAAM,IAAI,CAACqF,MAAM,CAAC,CAAC,CAAC;IACnD,IAAI,CAACrC,wBAAwB,GAAG,IAAAsC,4CAAqC,EACjE3F,UAAU,CAAC4F,eAAe,EAC1B5F,UAAU,CAAC6F,eAAe,EAC1B7F,UAAU,CAACM,QAAQ,CAACQ,KACxB,CAAC;EACL;EAAC,IAAAgF,MAAA,GAAA7E,uBAAA,CAAA/B,SAAA;EAAA4G,MAAA,CAEDV,OAAO,GAAP,SAAAA,QACI1D,IAAc;EACd;EACAyB,gBAAsD,EACxD;IACE,IAAM4C,SAA+C,GAAG;MACpDrE,IAAI;MACJyB,gBAAgB;MAChBhC,IAAI,EAAE;IACV,CAAC;IACD,IAAI,CAACkE,WAAW,CAAC7D,IAAI,CAAC,IAAI,CAAC6D,WAAW,CAACW,QAAQ,CAAC,CAAC,CAACC,GAAG,CAACvE,IAAI,EAAEqE,SAAS,CAAC,CAAC;IACvE,IAAI5C,gBAAgB,EAAE;MAClB4C,SAAS,CAAC5E,IAAI,CAACd,IAAI,CACf8C,gBAAgB,CAAC9B,MAAM,CAACC,SAAS,CAACkC,EAAE,IAAI,IAAI,CAACnC,MAAM,CAACG,IAAI,CAACgC,EAAE,CAAC,CAChE,CAAC;IACL;EACJ,CAAC;EAAAsC,MAAA,CACDnE,UAAU,GAAV,SAAAA,WAAWD,IAAc,EAAE;IACvB,IAAMqE,SAAS,GAAG,IAAAG,yBAAiB,EAAC,IAAI,CAACb,WAAW,CAACW,QAAQ,CAAC,CAAC,EAAEtE,IAAI,CAAC;IACtE,IAAI,CAAC2D,WAAW,CAACW,QAAQ,CAAC,CAAC,CAACG,MAAM,CAACzE,IAAI,CAAC;IACxC,IAAI,CAAC2D,WAAW,CAAC7D,IAAI,CAAC,IAAI,CAAC6D,WAAW,CAACW,QAAQ,CAAC,CAAC,CAAC;IAClDD,SAAS,CAAC5E,IAAI,CAACnC,OAAO,CAACoH,GAAG,IAAIA,GAAG,CAACzC,WAAW,CAAC,CAAC,CAAC;IAChD,IAAIoC,SAAS,CAAC5C,gBAAgB,EAAE;MAC5B4C,SAAS,CAAC5C,gBAAgB,CAACuC,MAAM,CAAC,CAAC;IACvC;EACJ;;EAEA;EAAA;EAAAI,MAAA,CACAO,cAAc,GAAd,SAAAA,eAAA,EAAiB;IACb,OAAO,IAAAC,oBAAc,EACjB,IAAI,CAACjB,WAAW,CAACxD,IAAI,CACjB,IAAAC,YAAM,EAACyE,UAAU,IAAIA,UAAU,CAACC,IAAI,GAAG,CAAC,CAC5C,CACJ,CAAC;EACL,CAAC;EAAAV,MAAA,CAEYJ,MAAM,GAAnB,eAAAA,OAAA,EAAsB;IAClB,IAAI,IAAI,CAACnD,QAAQ,EAAE;MACf;IACJ;IACA,IAAI,CAACA,QAAQ,GAAG,IAAI;IACpB,IAAI,CAACpB,IAAI,CAACnC,OAAO,CAACoH,GAAG,IAAIA,GAAG,CAACzC,WAAW,CAAC,CAAC,CAAC;IAC3C8C,KAAK,CAACC,IAAI,CAAC,IAAI,CAACrB,WAAW,CAACW,QAAQ,CAAC,CAAC,CAACjH,IAAI,CAAC,CAAC,CAAC,CAACC,OAAO,CAAC0C,IAAI,IAAI;MAC3D,IAAI,CAACC,UAAU,CAACD,IAAI,CAAC;IACzB,CAAC,CAAC;IACF,MAAM,IAAI,CAACN,iBAAiB,CAACuF,OAAO,CAAC,CAAC;EAC1C,CAAC;EAAA,OAAA1F,uBAAA;AAAA,KAML;AACA","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/replication-webrtc/signaling-server.js b/dist/cjs/plugins/replication-webrtc/signaling-server.js deleted file mode 100644 index f310af3e87b..00000000000 --- a/dist/cjs/plugins/replication-webrtc/signaling-server.js +++ /dev/null @@ -1,151 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.PEER_ID_LENGTH = void 0; -exports.startSignalingServerSimplePeer = startSignalingServerSimplePeer; -var _index = require("../utils/index.js"); -var _connectionHandlerSimplePeer = require("./connection-handler-simple-peer.js"); -var PEER_ID_LENGTH = exports.PEER_ID_LENGTH = 12; -/** - * Starts a WebRTC signaling server - * that can be used in tests. -*/ -async function startSignalingServerSimplePeer(serverOptions) { - var { - WebSocketServer - } = await import('ws'); - var wss = new WebSocketServer(serverOptions); - var peerById = new Map(); - var peersByRoom = new Map(); - var serverClosed = false; - wss.on('close', () => { - serverClosed = true; - peerById.clear(); - peersByRoom.clear(); - }); - - /** - * Clients can disconnect without telling that to the - * server. Therefore we have to automatically disconnect clients that - * have not send a ping message in the last 2 minutes. - */ - (async () => { - var _loop = async function () { - await (0, _index.promiseWait)(1000 * 5); - var minTime = Date.now() - _connectionHandlerSimplePeer.SIMPLE_PEER_PING_INTERVAL; - Array.from(peerById.values()).forEach(peer => { - if (peer.lastPing < minTime) { - disconnectSocket(peer.id, 'no ping for 2 minutes'); - } - }); - }; - while (!serverClosed) { - await _loop(); - } - })(); - function disconnectSocket(peerId, reason) { - console.log('# disconnect peer ' + peerId + ' reason: ' + reason); - var peer = peerById.get(peerId); - if (peer) { - peer.socket.close && peer.socket.close(undefined, reason); - peer.rooms.forEach(roomId => { - var room = peersByRoom.get(roomId); - room?.delete(peerId); - if (room && room.size === 0) { - peersByRoom.delete(roomId); - } - }); - } - peerById.delete(peerId); - } - wss.on('connection', function (ws) { - /** - * PeerID is created by the server to prevent malicious - * actors from falsy claiming other peoples ids. - */ - var peerId = (0, _index.randomCouchString)(PEER_ID_LENGTH); - var peer = { - id: peerId, - socket: ws, - rooms: new Set(), - lastPing: Date.now() - }; - peerById.set(peerId, peer); - sendMessage(ws, { - type: 'init', - yourPeerId: peerId - }); - ws.on('error', err => { - console.error('SERVER ERROR:'); - console.dir(err); - disconnectSocket(peerId, 'socket errored'); - }); - ws.on('close', () => { - disconnectSocket(peerId, 'socket disconnected'); - }); - ws.on('message', msgEvent => { - peer.lastPing = Date.now(); - var message = JSON.parse(msgEvent.toString()); - var type = message.type; - switch (type) { - case 'join': - var roomId = message.room; - if (!validateIdString(roomId) || !validateIdString(peerId)) { - disconnectSocket(peerId, 'invalid ids'); - return; - } - if (peer.rooms.has(peerId)) { - return; - } - peer.rooms.add(roomId); - var room = (0, _index.getFromMapOrCreate)(peersByRoom, message.room, () => new Set()); - room.add(peerId); - - // tell everyone about new room state - room.forEach(otherPeerId => { - var otherPeer = peerById.get(otherPeerId); - if (otherPeer) { - sendMessage(otherPeer.socket, { - type: 'joined', - otherPeerIds: Array.from(room) - }); - } - }); - break; - case 'signal': - if (message.senderPeerId !== peerId) { - disconnectSocket(peerId, 'spoofed sender'); - return; - } - var receiver = peerById.get(message.receiverPeerId); - if (receiver) { - sendMessage(receiver.socket, message); - } - break; - case 'ping': - break; - default: - disconnectSocket(peerId, 'unknown message type ' + type); - } - }); - }); - return { - port: serverOptions.port, - server: wss, - localUrl: 'ws://localhost:' + serverOptions.port - }; -} -function sendMessage(ws, message) { - var msgString = JSON.stringify(message); - ws.send(msgString); -} -function validateIdString(roomId) { - if (typeof roomId === 'string' && roomId.length > 5 && roomId.length < 100) { - return true; - } else { - return false; - } -} -//# sourceMappingURL=signaling-server.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/replication-webrtc/signaling-server.js.map b/dist/cjs/plugins/replication-webrtc/signaling-server.js.map deleted file mode 100644 index 7982917a463..00000000000 --- a/dist/cjs/plugins/replication-webrtc/signaling-server.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"signaling-server.js","names":["_index","require","_connectionHandlerSimplePeer","PEER_ID_LENGTH","exports","startSignalingServerSimplePeer","serverOptions","WebSocketServer","wss","peerById","Map","peersByRoom","serverClosed","on","clear","_loop","promiseWait","minTime","Date","now","SIMPLE_PEER_PING_INTERVAL","Array","from","values","forEach","peer","lastPing","disconnectSocket","id","peerId","reason","console","log","get","socket","close","undefined","rooms","roomId","room","delete","size","ws","randomCouchString","Set","set","sendMessage","type","yourPeerId","err","error","dir","msgEvent","message","JSON","parse","toString","validateIdString","has","add","getFromMapOrCreate","otherPeerId","otherPeer","otherPeerIds","senderPeerId","receiver","receiverPeerId","port","server","localUrl","msgString","stringify","send","length"],"sources":["../../../../src/plugins/replication-webrtc/signaling-server.ts"],"sourcesContent":["import {\n getFromMapOrCreate,\n promiseWait,\n randomCouchString\n} from '../utils/index.ts';\nimport {\n SIMPLE_PEER_PING_INTERVAL,\n type PeerMessage\n} from './connection-handler-simple-peer.ts';\nimport type {\n WebSocket,\n ServerOptions\n} from 'ws';\n\nexport const PEER_ID_LENGTH = 12;\nexport type ServerPeer = {\n id: string;\n socket: WebSocket;\n rooms: Set;\n lastPing: number;\n};\n\n\n/**\n * Starts a WebRTC signaling server\n * that can be used in tests.\n*/\nexport async function startSignalingServerSimplePeer(\n serverOptions: ServerOptions\n) {\n const { WebSocketServer } = await import('ws');\n const wss = new WebSocketServer(serverOptions);\n\n const peerById = new Map();\n const peersByRoom = new Map>();\n\n let serverClosed = false;\n wss.on('close', () => {\n serverClosed = true\n peerById.clear();\n peersByRoom.clear();\n });\n\n /**\n * Clients can disconnect without telling that to the\n * server. Therefore we have to automatically disconnect clients that\n * have not send a ping message in the last 2 minutes.\n */\n (async () => {\n while (!serverClosed) {\n await promiseWait(1000 * 5);\n const minTime = Date.now() - SIMPLE_PEER_PING_INTERVAL;\n Array.from(peerById.values()).forEach(peer => {\n if (peer.lastPing < minTime) {\n disconnectSocket(peer.id, 'no ping for 2 minutes');\n }\n });\n }\n })();\n\n function disconnectSocket(peerId: string, reason: string) {\n console.log('# disconnect peer ' + peerId + ' reason: ' + reason);\n const peer = peerById.get(peerId);\n if (peer) {\n peer.socket.close && peer.socket.close(undefined, reason);\n peer.rooms.forEach(roomId => {\n const room = peersByRoom.get(roomId);\n room?.delete(peerId);\n if (room && room.size === 0) {\n peersByRoom.delete(roomId);\n }\n });\n }\n peerById.delete(peerId);\n }\n\n wss.on('connection', function (ws) {\n /**\n * PeerID is created by the server to prevent malicious\n * actors from falsy claiming other peoples ids.\n */\n const peerId = randomCouchString(PEER_ID_LENGTH);\n const peer: ServerPeer = {\n id: peerId,\n socket: ws,\n rooms: new Set(),\n lastPing: Date.now()\n };\n peerById.set(peerId, peer);\n\n sendMessage(ws, { type: 'init', yourPeerId: peerId });\n\n\n ws.on('error', err => {\n console.error('SERVER ERROR:');\n console.dir(err);\n disconnectSocket(peerId, 'socket errored');\n });\n ws.on('close', () => {\n disconnectSocket(peerId, 'socket disconnected');\n });\n\n ws.on('message', msgEvent => {\n peer.lastPing = Date.now();\n const message = JSON.parse(msgEvent.toString());\n const type = message.type;\n switch (type) {\n case 'join':\n const roomId = message.room;\n if (\n !validateIdString(roomId) ||\n !validateIdString(peerId)\n ) {\n disconnectSocket(peerId, 'invalid ids');\n return;\n }\n\n if (peer.rooms.has(peerId)) {\n return;\n }\n peer.rooms.add(roomId);\n\n\n const room = getFromMapOrCreate(\n peersByRoom,\n message.room,\n () => new Set()\n );\n\n room.add(peerId);\n\n // tell everyone about new room state\n room.forEach(otherPeerId => {\n const otherPeer = peerById.get(otherPeerId);\n if (otherPeer) {\n sendMessage(\n otherPeer.socket,\n {\n type: 'joined',\n otherPeerIds: Array.from(room)\n }\n );\n }\n });\n break;\n case 'signal':\n if (\n message.senderPeerId !== peerId\n ) {\n disconnectSocket(peerId, 'spoofed sender');\n return;\n }\n const receiver = peerById.get(message.receiverPeerId);\n if (receiver) {\n sendMessage(\n receiver.socket,\n message\n );\n }\n break;\n case 'ping':\n break;\n default:\n disconnectSocket(peerId, 'unknown message type ' + type);\n }\n\n });\n });\n\n return {\n port: serverOptions.port,\n server: wss,\n localUrl: 'ws://localhost:' + serverOptions.port\n };\n}\n\n\nfunction sendMessage(ws: WebSocket, message: PeerMessage) {\n const msgString = JSON.stringify(message);\n ws.send(msgString);\n}\n\nfunction validateIdString(roomId: string): boolean {\n if (\n typeof roomId === 'string' &&\n roomId.length > 5 &&\n roomId.length < 100\n ) {\n return true;\n } else {\n return false;\n }\n}\n"],"mappings":";;;;;;;AAAA,IAAAA,MAAA,GAAAC,OAAA;AAKA,IAAAC,4BAAA,GAAAD,OAAA;AASO,IAAME,cAAc,GAAAC,OAAA,CAAAD,cAAA,GAAG,EAAE;AAShC;AACA;AACA;AACA;AACO,eAAeE,8BAA8BA,CAChDC,aAA4B,EAC9B;EACE,IAAM;IAAEC;EAAgB,CAAC,GAAG,MAAM,MAAM,CAAC,IAAI,CAAC;EAC9C,IAAMC,GAAG,GAAG,IAAID,eAAe,CAACD,aAAa,CAAC;EAE9C,IAAMG,QAAQ,GAAG,IAAIC,GAAG,CAAqB,CAAC;EAC9C,IAAMC,WAAW,GAAG,IAAID,GAAG,CAAsB,CAAC;EAElD,IAAIE,YAAY,GAAG,KAAK;EACxBJ,GAAG,CAACK,EAAE,CAAC,OAAO,EAAE,MAAM;IAClBD,YAAY,GAAG,IAAI;IACnBH,QAAQ,CAACK,KAAK,CAAC,CAAC;IAChBH,WAAW,CAACG,KAAK,CAAC,CAAC;EACvB,CAAC,CAAC;;EAEF;AACJ;AACA;AACA;AACA;EACI,CAAC,YAAY;IAAA,IAAAC,KAAA,kBAAAA,CAAA,EACa;MAClB,MAAM,IAAAC,kBAAW,EAAC,IAAI,GAAG,CAAC,CAAC;MAC3B,IAAMC,OAAO,GAAGC,IAAI,CAACC,GAAG,CAAC,CAAC,GAAGC,sDAAyB;MACtDC,KAAK,CAACC,IAAI,CAACb,QAAQ,CAACc,MAAM,CAAC,CAAC,CAAC,CAACC,OAAO,CAACC,IAAI,IAAI;QAC1C,IAAIA,IAAI,CAACC,QAAQ,GAAGT,OAAO,EAAE;UACzBU,gBAAgB,CAACF,IAAI,CAACG,EAAE,EAAE,uBAAuB,CAAC;QACtD;MACJ,CAAC,CAAC;IACN,CAAC;IARD,OAAO,CAAChB,YAAY;MAAA,MAAAG,KAAA;IAAA;EASxB,CAAC,EAAE,CAAC;EAEJ,SAASY,gBAAgBA,CAACE,MAAc,EAAEC,MAAc,EAAE;IACtDC,OAAO,CAACC,GAAG,CAAC,oBAAoB,GAAGH,MAAM,GAAG,WAAW,GAAGC,MAAM,CAAC;IACjE,IAAML,IAAI,GAAGhB,QAAQ,CAACwB,GAAG,CAACJ,MAAM,CAAC;IACjC,IAAIJ,IAAI,EAAE;MACNA,IAAI,CAACS,MAAM,CAACC,KAAK,IAAIV,IAAI,CAACS,MAAM,CAACC,KAAK,CAACC,SAAS,EAAEN,MAAM,CAAC;MACzDL,IAAI,CAACY,KAAK,CAACb,OAAO,CAACc,MAAM,IAAI;QACzB,IAAMC,IAAI,GAAG5B,WAAW,CAACsB,GAAG,CAACK,MAAM,CAAC;QACpCC,IAAI,EAAEC,MAAM,CAACX,MAAM,CAAC;QACpB,IAAIU,IAAI,IAAIA,IAAI,CAACE,IAAI,KAAK,CAAC,EAAE;UACzB9B,WAAW,CAAC6B,MAAM,CAACF,MAAM,CAAC;QAC9B;MACJ,CAAC,CAAC;IACN;IACA7B,QAAQ,CAAC+B,MAAM,CAACX,MAAM,CAAC;EAC3B;EAEArB,GAAG,CAACK,EAAE,CAAC,YAAY,EAAE,UAAU6B,EAAE,EAAE;IAC/B;AACR;AACA;AACA;IACQ,IAAMb,MAAM,GAAG,IAAAc,wBAAiB,EAACxC,cAAc,CAAC;IAChD,IAAMsB,IAAgB,GAAG;MACrBG,EAAE,EAAEC,MAAM;MACVK,MAAM,EAAEQ,EAAE;MACVL,KAAK,EAAE,IAAIO,GAAG,CAAC,CAAC;MAChBlB,QAAQ,EAAER,IAAI,CAACC,GAAG,CAAC;IACvB,CAAC;IACDV,QAAQ,CAACoC,GAAG,CAAChB,MAAM,EAAEJ,IAAI,CAAC;IAE1BqB,WAAW,CAACJ,EAAE,EAAE;MAAEK,IAAI,EAAE,MAAM;MAAEC,UAAU,EAAEnB;IAAO,CAAC,CAAC;IAGrDa,EAAE,CAAC7B,EAAE,CAAC,OAAO,EAAEoC,GAAG,IAAI;MAClBlB,OAAO,CAACmB,KAAK,CAAC,eAAe,CAAC;MAC9BnB,OAAO,CAACoB,GAAG,CAACF,GAAG,CAAC;MAChBtB,gBAAgB,CAACE,MAAM,EAAE,gBAAgB,CAAC;IAC9C,CAAC,CAAC;IACFa,EAAE,CAAC7B,EAAE,CAAC,OAAO,EAAE,MAAM;MACjBc,gBAAgB,CAACE,MAAM,EAAE,qBAAqB,CAAC;IACnD,CAAC,CAAC;IAEFa,EAAE,CAAC7B,EAAE,CAAC,SAAS,EAAEuC,QAAQ,IAAI;MACzB3B,IAAI,CAACC,QAAQ,GAAGR,IAAI,CAACC,GAAG,CAAC,CAAC;MAC1B,IAAMkC,OAAO,GAAGC,IAAI,CAACC,KAAK,CAACH,QAAQ,CAACI,QAAQ,CAAC,CAAC,CAAC;MAC/C,IAAMT,IAAI,GAAGM,OAAO,CAACN,IAAI;MACzB,QAAQA,IAAI;QACR,KAAK,MAAM;UACP,IAAMT,MAAM,GAAGe,OAAO,CAACd,IAAI;UAC3B,IACI,CAACkB,gBAAgB,CAACnB,MAAM,CAAC,IACzB,CAACmB,gBAAgB,CAAC5B,MAAM,CAAC,EAC3B;YACEF,gBAAgB,CAACE,MAAM,EAAE,aAAa,CAAC;YACvC;UACJ;UAEA,IAAIJ,IAAI,CAACY,KAAK,CAACqB,GAAG,CAAC7B,MAAM,CAAC,EAAE;YACxB;UACJ;UACAJ,IAAI,CAACY,KAAK,CAACsB,GAAG,CAACrB,MAAM,CAAC;UAGtB,IAAMC,IAAI,GAAG,IAAAqB,yBAAkB,EAC3BjD,WAAW,EACX0C,OAAO,CAACd,IAAI,EACZ,MAAM,IAAIK,GAAG,CAAC,CAClB,CAAC;UAEDL,IAAI,CAACoB,GAAG,CAAC9B,MAAM,CAAC;;UAEhB;UACAU,IAAI,CAACf,OAAO,CAACqC,WAAW,IAAI;YACxB,IAAMC,SAAS,GAAGrD,QAAQ,CAACwB,GAAG,CAAC4B,WAAW,CAAC;YAC3C,IAAIC,SAAS,EAAE;cACXhB,WAAW,CACPgB,SAAS,CAAC5B,MAAM,EAChB;gBACIa,IAAI,EAAE,QAAQ;gBACdgB,YAAY,EAAE1C,KAAK,CAACC,IAAI,CAACiB,IAAI;cACjC,CACJ,CAAC;YACL;UACJ,CAAC,CAAC;UACF;QACJ,KAAK,QAAQ;UACT,IACIc,OAAO,CAACW,YAAY,KAAKnC,MAAM,EACjC;YACEF,gBAAgB,CAACE,MAAM,EAAE,gBAAgB,CAAC;YAC1C;UACJ;UACA,IAAMoC,QAAQ,GAAGxD,QAAQ,CAACwB,GAAG,CAACoB,OAAO,CAACa,cAAc,CAAC;UACrD,IAAID,QAAQ,EAAE;YACVnB,WAAW,CACPmB,QAAQ,CAAC/B,MAAM,EACfmB,OACJ,CAAC;UACL;UACA;QACJ,KAAK,MAAM;UACP;QACJ;UACI1B,gBAAgB,CAACE,MAAM,EAAE,uBAAuB,GAAGkB,IAAI,CAAC;MAChE;IAEJ,CAAC,CAAC;EACN,CAAC,CAAC;EAEF,OAAO;IACHoB,IAAI,EAAE7D,aAAa,CAAC6D,IAAI;IACxBC,MAAM,EAAE5D,GAAG;IACX6D,QAAQ,EAAE,iBAAiB,GAAG/D,aAAa,CAAC6D;EAChD,CAAC;AACL;AAGA,SAASrB,WAAWA,CAACJ,EAAa,EAAEW,OAAoB,EAAE;EACtD,IAAMiB,SAAS,GAAGhB,IAAI,CAACiB,SAAS,CAAClB,OAAO,CAAC;EACzCX,EAAE,CAAC8B,IAAI,CAACF,SAAS,CAAC;AACtB;AAEA,SAASb,gBAAgBA,CAACnB,MAAc,EAAW;EAC/C,IACI,OAAOA,MAAM,KAAK,QAAQ,IAC1BA,MAAM,CAACmC,MAAM,GAAG,CAAC,IACjBnC,MAAM,CAACmC,MAAM,GAAG,GAAG,EACrB;IACE,OAAO,IAAI;EACf,CAAC,MAAM;IACH,OAAO,KAAK;EAChB;AACJ","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/replication-webrtc/webrtc-helper.js b/dist/cjs/plugins/replication-webrtc/webrtc-helper.js deleted file mode 100644 index e7962bece69..00000000000 --- a/dist/cjs/plugins/replication-webrtc/webrtc-helper.js +++ /dev/null @@ -1,32 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.isMasterInWebRTCReplication = isMasterInWebRTCReplication; -exports.sendMessageAndAwaitAnswer = sendMessageAndAwaitAnswer; -var _rxjs = require("rxjs"); -/** - * To deterministically define which peer is master and - * which peer is fork, we compare the storage tokens. - * But we have to hash them before, to ensure that - * a storageToken like 'aaaaaa' is not always the master - * for all peers. - */ -async function isMasterInWebRTCReplication(hashFunction, ownStorageToken, otherStorageToken) { - var isMaster = (await hashFunction([ownStorageToken, otherStorageToken].join('|'))) > (await hashFunction([otherStorageToken, ownStorageToken].join('|'))); - return isMaster; -} - -/** - * Send a message to the peer and await the answer. - * @throws with an EmptyErrorImpl if the peer connection - * was closed before an answer was received. - */ -function sendMessageAndAwaitAnswer(handler, peer, message) { - var requestId = message.id; - var answerPromise = (0, _rxjs.firstValueFrom)(handler.response$.pipe((0, _rxjs.filter)(d => d.peer === peer), (0, _rxjs.filter)(d => d.response.id === requestId), (0, _rxjs.map)(d => d.response))); - handler.send(peer, message); - return answerPromise; -} -//# sourceMappingURL=webrtc-helper.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/replication-webrtc/webrtc-helper.js.map b/dist/cjs/plugins/replication-webrtc/webrtc-helper.js.map deleted file mode 100644 index fc45b277d6c..00000000000 --- a/dist/cjs/plugins/replication-webrtc/webrtc-helper.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"webrtc-helper.js","names":["_rxjs","require","isMasterInWebRTCReplication","hashFunction","ownStorageToken","otherStorageToken","isMaster","join","sendMessageAndAwaitAnswer","handler","peer","message","requestId","id","answerPromise","firstValueFrom","response$","pipe","filter","d","response","map","send"],"sources":["../../../../src/plugins/replication-webrtc/webrtc-helper.ts"],"sourcesContent":["import type {\n HashFunction\n} from '../../types/index.d.ts';\nimport type {\n WebRTCConnectionHandler,\n WebRTCMessage,\n WebRTCResponse\n} from './webrtc-types.ts';\nimport { filter, firstValueFrom, map } from 'rxjs';\n\n\n\n/**\n * To deterministically define which peer is master and\n * which peer is fork, we compare the storage tokens.\n * But we have to hash them before, to ensure that\n * a storageToken like 'aaaaaa' is not always the master\n * for all peers.\n */\nexport async function isMasterInWebRTCReplication(\n hashFunction: HashFunction,\n ownStorageToken: string,\n otherStorageToken: string\n): Promise {\n const isMaster =\n await hashFunction([ownStorageToken, otherStorageToken].join('|'))\n >\n await hashFunction([otherStorageToken, ownStorageToken].join('|'));\n return isMaster;\n}\n\n/**\n * Send a message to the peer and await the answer.\n * @throws with an EmptyErrorImpl if the peer connection\n * was closed before an answer was received.\n */\nexport function sendMessageAndAwaitAnswer(\n handler: WebRTCConnectionHandler,\n peer: PeerType,\n message: WebRTCMessage\n): Promise {\n\n\n const requestId = message.id;\n const answerPromise = firstValueFrom(\n handler.response$.pipe(\n filter(d => d.peer === peer),\n filter(d => d.response.id === requestId),\n map(d => d.response)\n )\n );\n handler.send(peer, message);\n return answerPromise;\n}\n"],"mappings":";;;;;;;AAQA,IAAAA,KAAA,GAAAC,OAAA;AAIA;AACA;AACA;AACA;AACA;AACA;AACA;AACO,eAAeC,2BAA2BA,CAC7CC,YAA0B,EAC1BC,eAAuB,EACvBC,iBAAyB,EACT;EAChB,IAAMC,QAAQ,GACV,OAAMH,YAAY,CAAC,CAACC,eAAe,EAAEC,iBAAiB,CAAC,CAACE,IAAI,CAAC,GAAG,CAAC,CAAC,KAElE,MAAMJ,YAAY,CAAC,CAACE,iBAAiB,EAAED,eAAe,CAAC,CAACG,IAAI,CAAC,GAAG,CAAC,CAAC;EACtE,OAAOD,QAAQ;AACnB;;AAEA;AACA;AACA;AACA;AACA;AACO,SAASE,yBAAyBA,CACrCC,OAA0C,EAC1CC,IAAc,EACdC,OAAsB,EACC;EAGvB,IAAMC,SAAS,GAAGD,OAAO,CAACE,EAAE;EAC5B,IAAMC,aAAa,GAAG,IAAAC,oBAAc,EAChCN,OAAO,CAACO,SAAS,CAACC,IAAI,CAClB,IAAAC,YAAM,EAACC,CAAC,IAAIA,CAAC,CAACT,IAAI,KAAKA,IAAI,CAAC,EAC5B,IAAAQ,YAAM,EAACC,CAAC,IAAIA,CAAC,CAACC,QAAQ,CAACP,EAAE,KAAKD,SAAS,CAAC,EACxC,IAAAS,SAAG,EAACF,CAAC,IAAIA,CAAC,CAACC,QAAQ,CACvB,CACJ,CAAC;EACDX,OAAO,CAACa,IAAI,CAACZ,IAAI,EAAEC,OAAO,CAAC;EAC3B,OAAOG,aAAa;AACxB","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/replication-webrtc/webrtc-types.js b/dist/cjs/plugins/replication-webrtc/webrtc-types.js deleted file mode 100644 index 3b2f66d78fc..00000000000 --- a/dist/cjs/plugins/replication-webrtc/webrtc-types.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -//# sourceMappingURL=webrtc-types.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/replication-webrtc/webrtc-types.js.map b/dist/cjs/plugins/replication-webrtc/webrtc-types.js.map deleted file mode 100644 index 5d19fb332c8..00000000000 --- a/dist/cjs/plugins/replication-webrtc/webrtc-types.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"webrtc-types.js","names":[],"sources":["../../../../src/plugins/replication-webrtc/webrtc-types.ts"],"sourcesContent":["import { Observable, Subscription } from 'rxjs';\nimport type {\n MaybePromise,\n ReplicationOptions,\n ReplicationPullOptions,\n ReplicationPushOptions,\n RxError,\n RxReplicationHandler,\n RxStorageDefaultCheckpoint,\n RxTypeError,\n StringKeys\n} from '../../types/index.d.ts';\nimport { RxReplicationState } from '../replication/index.ts';\nimport { WebsocketMessageResponseType, WebsocketMessageType } from '../replication-websocket/index.ts';\n\nexport type WebRTCReplicationCheckpoint = RxStorageDefaultCheckpoint;\n\n\nexport type WebRTCMessage = Omit & {\n method: StringKeys> | 'token';\n};\nexport type WebRTCResponse = Omit;\nexport type PeerWithMessage = {\n peer: PeerType;\n message: WebRTCMessage;\n};\nexport type PeerWithResponse = {\n peer: PeerType;\n response: WebRTCResponse;\n};\n\nexport type WebRTCConnectionHandler = {\n connect$: Observable;\n disconnect$: Observable;\n message$: Observable>;\n response$: Observable>;\n error$: Observable;\n send(peer: PeerType, message: WebRTCMessage | WebRTCResponse): Promise;\n destroy(): Promise;\n};\n\nexport type WebRTCConnectionHandlerCreator = (\n opts: SyncOptionsWebRTC\n) => Promise>;\n\nexport type WebRTCSyncPushOptions = Omit<\n ReplicationPushOptions,\n 'handler'\n> & {};\n\nexport type WebRTCSyncPullOptions = Omit<\n ReplicationPullOptions,\n 'handler' | 'stream$'\n> & {};\n\nexport type SyncOptionsWebRTC = Omit<\n ReplicationOptions,\n 'pull' |\n 'push' |\n 'replicationIdentifier' |\n 'deletedField' |\n 'live' |\n 'autostart' |\n 'waitForLeadership'\n> & {\n /**\n * It will only replicate with other instances\n * that use the same topic.\n */\n topic: string;\n connectionHandlerCreator: WebRTCConnectionHandlerCreator;\n /**\n * Run on new peers so that bad peers can be blocked.\n * If returns true, the peer is valid and it will replicate.\n * If returns false, it will drop the peer.\n */\n isPeerValid?: (peer: PeerType) => MaybePromise;\n pull?: WebRTCSyncPullOptions;\n push?: WebRTCSyncPushOptions;\n};\n\nexport type RxWebRTCReplicationState = RxReplicationState;\n\n\nexport type WebRTCPeerState = {\n peer: PeerType;\n // only exists when the peer was picked as master and the own client was picked as fork.\n replicationState?: RxWebRTCReplicationState;\n // clean this up when removing the peer\n subs: Subscription[];\n};\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/replication-websocket/index.js b/dist/cjs/plugins/replication-websocket/index.js deleted file mode 100644 index e0139cd969c..00000000000 --- a/dist/cjs/plugins/replication-websocket/index.js +++ /dev/null @@ -1,39 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -var _websocketClient = require("./websocket-client.js"); -Object.keys(_websocketClient).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _websocketClient[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _websocketClient[key]; - } - }); -}); -var _websocketServer = require("./websocket-server.js"); -Object.keys(_websocketServer).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _websocketServer[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _websocketServer[key]; - } - }); -}); -var _websocketTypes = require("./websocket-types.js"); -Object.keys(_websocketTypes).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _websocketTypes[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _websocketTypes[key]; - } - }); -}); -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/replication-websocket/index.js.map b/dist/cjs/plugins/replication-websocket/index.js.map deleted file mode 100644 index d21b8e905a6..00000000000 --- a/dist/cjs/plugins/replication-websocket/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["_websocketClient","require","Object","keys","forEach","key","exports","defineProperty","enumerable","get","_websocketServer","_websocketTypes"],"sources":["../../../../src/plugins/replication-websocket/index.ts"],"sourcesContent":["export * from './websocket-client.ts';\nexport * from './websocket-server.ts';\nexport * from './websocket-types.ts';\n"],"mappings":";;;;;AAAA,IAAAA,gBAAA,GAAAC,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAH,gBAAA,EAAAI,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAL,gBAAA,CAAAK,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAT,gBAAA,CAAAK,GAAA;IAAA;EAAA;AAAA;AACA,IAAAK,gBAAA,GAAAT,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAO,gBAAA,EAAAN,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAK,gBAAA,CAAAL,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAC,gBAAA,CAAAL,GAAA;IAAA;EAAA;AAAA;AACA,IAAAM,eAAA,GAAAV,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAQ,eAAA,EAAAP,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAM,eAAA,CAAAN,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAE,eAAA,CAAAN,GAAA;IAAA;EAAA;AAAA","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/replication-websocket/websocket-client.js b/dist/cjs/plugins/replication-websocket/websocket-client.js deleted file mode 100644 index 325996754ab..00000000000 --- a/dist/cjs/plugins/replication-websocket/websocket-client.js +++ /dev/null @@ -1,148 +0,0 @@ -"use strict"; - -var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault"); -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.createWebSocketClient = createWebSocketClient; -exports.ensureIsWebsocket = ensureIsWebsocket; -exports.replicateWithWebsocketServer = replicateWithWebsocketServer; -var _index = require("../replication/index.js"); -var _reconnectingWebsocket = _interopRequireDefault(require("reconnecting-websocket")); -var _isomorphicWs = _interopRequireDefault(require("isomorphic-ws")); -var _index2 = require("../../plugins/utils/index.js"); -var _rxjs = require("rxjs"); -var _rxError = require("../../rx-error.js"); -/** - * Copied and adapted from the 'reconnecting-websocket' npm module. - * Some bundlers have problems with bundling the isomorphic-ws plugin - * so we directly check the correctness in RxDB to ensure that we can - * throw a helpful error. - */ -function ensureIsWebsocket(w) { - var is = typeof w !== 'undefined' && !!w && w.CLOSING === 2; - if (!is) { - console.dir(w); - throw new Error('websocket not valid'); - } -} -async function createWebSocketClient(options) { - ensureIsWebsocket(_isomorphicWs.default); - var wsClient = new _reconnectingWebsocket.default(options.url, [], { - WebSocket: _isomorphicWs.default - }); - var connected$ = new _rxjs.BehaviorSubject(false); - var message$ = new _rxjs.Subject(); - var error$ = new _rxjs.Subject(); - wsClient.onerror = err => { - console.log('--- WAS CLIENT GOT ERROR:'); - console.log(err.error.message); - var emitError = (0, _rxError.newRxError)('RC_STREAM', { - errors: (0, _index2.toArray)(err).map(er => (0, _index2.errorToPlainJson)(er)), - direction: 'pull' - }); - error$.next(emitError); - }; - await new Promise(res => { - wsClient.onopen = () => { - if (options.headers) { - var authMessage = { - collection: options.collection.name, - id: (0, _index2.randomCouchString)(10), - params: [options.headers], - method: 'auth' - }; - wsClient.send(JSON.stringify(authMessage)); - } - connected$.next(true); - res(); - }; - }); - wsClient.onclose = () => { - connected$.next(false); - }; - wsClient.onmessage = messageObj => { - var message = JSON.parse(messageObj.data); - message$.next(message); - }; - return { - url: options.url, - socket: wsClient, - connected$, - message$, - error$ - }; -} -async function replicateWithWebsocketServer(options) { - var websocketClient = await createWebSocketClient(options); - var wsClient = websocketClient.socket; - var messages$ = websocketClient.message$; - var requestCounter = 0; - var requestFlag = (0, _index2.randomCouchString)(10); - function getRequestId() { - var count = requestCounter++; - return options.collection.database.token + '|' + requestFlag + '|' + count; - } - var replicationState = (0, _index.replicateRxCollection)({ - collection: options.collection, - replicationIdentifier: options.replicationIdentifier, - live: options.live, - pull: { - batchSize: options.batchSize, - stream$: messages$.pipe((0, _rxjs.filter)(msg => msg.id === 'stream' && msg.collection === options.collection.name), (0, _rxjs.map)(msg => msg.result)), - async handler(lastPulledCheckpoint, batchSize) { - var requestId = getRequestId(); - var request = { - id: requestId, - collection: options.collection.name, - method: 'masterChangesSince', - params: [lastPulledCheckpoint, batchSize] - }; - wsClient.send(JSON.stringify(request)); - var result = await (0, _rxjs.firstValueFrom)(messages$.pipe((0, _rxjs.filter)(msg => msg.id === requestId), (0, _rxjs.map)(msg => msg.result))); - return result; - } - }, - push: { - batchSize: options.batchSize, - handler(docs) { - var requestId = getRequestId(); - var request = { - id: requestId, - collection: options.collection.name, - method: 'masterWrite', - params: [docs] - }; - wsClient.send(JSON.stringify(request)); - return (0, _rxjs.firstValueFrom)(messages$.pipe((0, _rxjs.filter)(msg => msg.id === requestId), (0, _rxjs.map)(msg => msg.result))); - } - } - }); - websocketClient.error$.subscribe(err => replicationState.subjects.error.next(err)); - websocketClient.connected$.subscribe(isConnected => { - if (isConnected) { - /** - * When the client goes offline and online again, - * we have to send a 'RESYNC' signal because the client - * might have missed out events while being offline. - */ - replicationState.reSync(); - - /** - * Because reconnecting creates a new websocket-instance, - * we have to start the changestream from the remote again - * each time. - */ - var streamRequest = { - id: 'stream', - collection: options.collection.name, - method: 'masterChangeStream$', - params: [] - }; - wsClient.send(JSON.stringify(streamRequest)); - } - }); - options.collection.onDestroy.push(() => websocketClient.socket.close()); - return replicationState; -} -//# sourceMappingURL=websocket-client.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/replication-websocket/websocket-client.js.map b/dist/cjs/plugins/replication-websocket/websocket-client.js.map deleted file mode 100644 index c85bb03c872..00000000000 --- a/dist/cjs/plugins/replication-websocket/websocket-client.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"websocket-client.js","names":["_index","require","_reconnectingWebsocket","_interopRequireDefault","_isomorphicWs","_index2","_rxjs","_rxError","ensureIsWebsocket","w","is","CLOSING","console","dir","Error","createWebSocketClient","options","IsomorphicWebSocket","wsClient","ReconnectingWebSocket","url","WebSocket","connected$","BehaviorSubject","message$","Subject","error$","onerror","err","log","error","message","emitError","newRxError","errors","toArray","map","er","errorToPlainJson","direction","next","Promise","res","onopen","headers","authMessage","collection","name","id","randomCouchString","params","method","send","JSON","stringify","onclose","onmessage","messageObj","parse","data","socket","replicateWithWebsocketServer","websocketClient","messages$","requestCounter","requestFlag","getRequestId","count","database","token","replicationState","replicateRxCollection","replicationIdentifier","live","pull","batchSize","stream$","pipe","filter","msg","result","handler","lastPulledCheckpoint","requestId","request","firstValueFrom","push","docs","subscribe","subjects","isConnected","reSync","streamRequest","onDestroy","close"],"sources":["../../../../src/plugins/replication-websocket/websocket-client.ts"],"sourcesContent":["import {\n replicateRxCollection,\n RxReplicationState\n} from '../replication/index.ts';\nimport {\n WebsocketClientOptions,\n WebsocketMessageType\n} from './websocket-types.ts';\n\nimport ReconnectingWebSocket from 'reconnecting-websocket';\n\nimport IsomorphicWebSocket from 'isomorphic-ws';\nimport {\n errorToPlainJson,\n randomCouchString,\n toArray\n} from '../../plugins/utils/index.ts';\nimport {\n filter,\n map,\n Subject,\n firstValueFrom,\n BehaviorSubject\n} from 'rxjs';\nimport type {\n RxError,\n RxReplicationWriteToMasterRow\n} from '../../types/index.d.ts';\nimport { newRxError } from '../../rx-error.ts';\n\nexport type WebsocketClient = {\n url: string;\n socket: any;\n connected$: BehaviorSubject;\n message$: Subject;\n error$: Subject;\n};\n\n\n/**\n * Copied and adapted from the 'reconnecting-websocket' npm module.\n * Some bundlers have problems with bundling the isomorphic-ws plugin\n * so we directly check the correctness in RxDB to ensure that we can\n * throw a helpful error.\n */\nexport function ensureIsWebsocket(w: typeof IsomorphicWebSocket) {\n const is = typeof w !== 'undefined' && !!w && w.CLOSING === 2;\n if (!is) {\n console.dir(w);\n throw new Error('websocket not valid');\n }\n}\n\n\nexport async function createWebSocketClient(options: WebsocketClientOptions): Promise {\n ensureIsWebsocket(IsomorphicWebSocket);\n const wsClient = new ReconnectingWebSocket(\n options.url,\n [],\n {\n WebSocket: IsomorphicWebSocket\n }\n );\n const connected$ = new BehaviorSubject(false);\n const message$ = new Subject();\n const error$ = new Subject();\n wsClient.onerror = (err) => {\n\n console.log('--- WAS CLIENT GOT ERROR:');\n console.log(err.error.message);\n\n const emitError = newRxError('RC_STREAM', {\n errors: toArray(err).map((er: any) => errorToPlainJson(er)),\n direction: 'pull'\n });\n error$.next(emitError);\n };\n await new Promise(res => {\n wsClient.onopen = () => {\n\n if (options.headers) {\n const authMessage: WebsocketMessageType = {\n collection: options.collection.name,\n id: randomCouchString(10),\n params: [options.headers],\n method: 'auth'\n };\n wsClient.send(JSON.stringify(authMessage));\n }\n\n connected$.next(true);\n res();\n };\n });\n wsClient.onclose = () => {\n connected$.next(false);\n };\n\n wsClient.onmessage = (messageObj) => {\n const message = JSON.parse(messageObj.data);\n message$.next(message);\n };\n\n return {\n url: options.url,\n socket: wsClient,\n connected$,\n message$,\n error$\n };\n\n}\n\nexport async function replicateWithWebsocketServer(\n options: WebsocketClientOptions\n): Promise> {\n const websocketClient = await createWebSocketClient(options);\n const wsClient = websocketClient.socket;\n const messages$ = websocketClient.message$;\n\n let requestCounter = 0;\n const requestFlag = randomCouchString(10);\n function getRequestId() {\n const count = requestCounter++;\n return options.collection.database.token + '|' + requestFlag + '|' + count;\n }\n const replicationState = replicateRxCollection({\n collection: options.collection,\n replicationIdentifier: options.replicationIdentifier,\n live: options.live,\n pull: {\n batchSize: options.batchSize,\n stream$: messages$.pipe(\n filter(msg => msg.id === 'stream' && msg.collection === options.collection.name),\n map(msg => msg.result)\n ),\n async handler(lastPulledCheckpoint: CheckpointType | undefined, batchSize: number) {\n const requestId = getRequestId();\n const request: WebsocketMessageType = {\n id: requestId,\n collection: options.collection.name,\n method: 'masterChangesSince',\n params: [lastPulledCheckpoint, batchSize]\n };\n wsClient.send(JSON.stringify(request));\n const result = await firstValueFrom(\n messages$.pipe(\n filter(msg => msg.id === requestId),\n map(msg => msg.result)\n )\n );\n return result;\n }\n },\n push: {\n batchSize: options.batchSize,\n handler(docs: RxReplicationWriteToMasterRow[]) {\n const requestId = getRequestId();\n const request: WebsocketMessageType = {\n id: requestId,\n collection: options.collection.name,\n method: 'masterWrite',\n params: [docs]\n };\n wsClient.send(JSON.stringify(request));\n return firstValueFrom(\n messages$.pipe(\n filter(msg => msg.id === requestId),\n map(msg => msg.result)\n )\n );\n }\n }\n });\n\n websocketClient.error$.subscribe(err => replicationState.subjects.error.next(err));\n\n websocketClient.connected$.subscribe(isConnected => {\n if (isConnected) {\n /**\n * When the client goes offline and online again,\n * we have to send a 'RESYNC' signal because the client\n * might have missed out events while being offline.\n */\n replicationState.reSync();\n\n /**\n * Because reconnecting creates a new websocket-instance,\n * we have to start the changestream from the remote again\n * each time.\n */\n const streamRequest: WebsocketMessageType = {\n id: 'stream',\n collection: options.collection.name,\n method: 'masterChangeStream$',\n params: []\n };\n wsClient.send(JSON.stringify(streamRequest));\n }\n });\n\n options.collection.onDestroy.push(() => websocketClient.socket.close());\n return replicationState;\n}\n"],"mappings":";;;;;;;;;AAAA,IAAAA,MAAA,GAAAC,OAAA;AASA,IAAAC,sBAAA,GAAAC,sBAAA,CAAAF,OAAA;AAEA,IAAAG,aAAA,GAAAD,sBAAA,CAAAF,OAAA;AACA,IAAAI,OAAA,GAAAJ,OAAA;AAKA,IAAAK,KAAA,GAAAL,OAAA;AAWA,IAAAM,QAAA,GAAAN,OAAA;AAWA;AACA;AACA;AACA;AACA;AACA;AACO,SAASO,iBAAiBA,CAACC,CAA6B,EAAE;EAC7D,IAAMC,EAAE,GAAG,OAAOD,CAAC,KAAK,WAAW,IAAI,CAAC,CAACA,CAAC,IAAIA,CAAC,CAACE,OAAO,KAAK,CAAC;EAC7D,IAAI,CAACD,EAAE,EAAE;IACLE,OAAO,CAACC,GAAG,CAACJ,CAAC,CAAC;IACd,MAAM,IAAIK,KAAK,CAAC,qBAAqB,CAAC;EAC1C;AACJ;AAGO,eAAeC,qBAAqBA,CAAYC,OAA0C,EAA4B;EACzHR,iBAAiB,CAACS,qBAAmB,CAAC;EACtC,IAAMC,QAAQ,GAAG,IAAIC,8BAAqB,CACtCH,OAAO,CAACI,GAAG,EACX,EAAE,EACF;IACIC,SAAS,EAAEJ;EACf,CACJ,CAAC;EACD,IAAMK,UAAU,GAAG,IAAIC,qBAAe,CAAU,KAAK,CAAC;EACtD,IAAMC,QAAQ,GAAG,IAAIC,aAAO,CAAM,CAAC;EACnC,IAAMC,MAAM,GAAG,IAAID,aAAO,CAAM,CAAC;EACjCP,QAAQ,CAACS,OAAO,GAAIC,GAAG,IAAK;IAExBhB,OAAO,CAACiB,GAAG,CAAC,2BAA2B,CAAC;IACxCjB,OAAO,CAACiB,GAAG,CAACD,GAAG,CAACE,KAAK,CAACC,OAAO,CAAC;IAE9B,IAAMC,SAAS,GAAG,IAAAC,mBAAU,EAAC,WAAW,EAAE;MACtCC,MAAM,EAAE,IAAAC,eAAO,EAACP,GAAG,CAAC,CAACQ,GAAG,CAAEC,EAAO,IAAK,IAAAC,wBAAgB,EAACD,EAAE,CAAC,CAAC;MAC3DE,SAAS,EAAE;IACf,CAAC,CAAC;IACFb,MAAM,CAACc,IAAI,CAACR,SAAS,CAAC;EAC1B,CAAC;EACD,MAAM,IAAIS,OAAO,CAAOC,GAAG,IAAI;IAC3BxB,QAAQ,CAACyB,MAAM,GAAG,MAAM;MAEpB,IAAI3B,OAAO,CAAC4B,OAAO,EAAE;QACjB,IAAMC,WAAiC,GAAG;UACtCC,UAAU,EAAE9B,OAAO,CAAC8B,UAAU,CAACC,IAAI;UACnCC,EAAE,EAAE,IAAAC,yBAAiB,EAAC,EAAE,CAAC;UACzBC,MAAM,EAAE,CAAClC,OAAO,CAAC4B,OAAO,CAAC;UACzBO,MAAM,EAAE;QACZ,CAAC;QACDjC,QAAQ,CAACkC,IAAI,CAACC,IAAI,CAACC,SAAS,CAACT,WAAW,CAAC,CAAC;MAC9C;MAEAvB,UAAU,CAACkB,IAAI,CAAC,IAAI,CAAC;MACrBE,GAAG,CAAC,CAAC;IACT,CAAC;EACL,CAAC,CAAC;EACFxB,QAAQ,CAACqC,OAAO,GAAG,MAAM;IACrBjC,UAAU,CAACkB,IAAI,CAAC,KAAK,CAAC;EAC1B,CAAC;EAEDtB,QAAQ,CAACsC,SAAS,GAAIC,UAAU,IAAK;IACjC,IAAM1B,OAAO,GAAGsB,IAAI,CAACK,KAAK,CAACD,UAAU,CAACE,IAAI,CAAC;IAC3CnC,QAAQ,CAACgB,IAAI,CAACT,OAAO,CAAC;EAC1B,CAAC;EAED,OAAO;IACHX,GAAG,EAAEJ,OAAO,CAACI,GAAG;IAChBwC,MAAM,EAAE1C,QAAQ;IAChBI,UAAU;IACVE,QAAQ;IACRE;EACJ,CAAC;AAEL;AAEO,eAAemC,4BAA4BA,CAC9C7C,OAA0C,EACY;EACtD,IAAM8C,eAAe,GAAG,MAAM/C,qBAAqB,CAACC,OAAO,CAAC;EAC5D,IAAME,QAAQ,GAAG4C,eAAe,CAACF,MAAM;EACvC,IAAMG,SAAS,GAAGD,eAAe,CAACtC,QAAQ;EAE1C,IAAIwC,cAAc,GAAG,CAAC;EACtB,IAAMC,WAAW,GAAG,IAAAhB,yBAAiB,EAAC,EAAE,CAAC;EACzC,SAASiB,YAAYA,CAAA,EAAG;IACpB,IAAMC,KAAK,GAAGH,cAAc,EAAE;IAC9B,OAAOhD,OAAO,CAAC8B,UAAU,CAACsB,QAAQ,CAACC,KAAK,GAAG,GAAG,GAAGJ,WAAW,GAAG,GAAG,GAAGE,KAAK;EAC9E;EACA,IAAMG,gBAAgB,GAAG,IAAAC,4BAAqB,EAA4B;IACtEzB,UAAU,EAAE9B,OAAO,CAAC8B,UAAU;IAC9B0B,qBAAqB,EAAExD,OAAO,CAACwD,qBAAqB;IACpDC,IAAI,EAAEzD,OAAO,CAACyD,IAAI;IAClBC,IAAI,EAAE;MACFC,SAAS,EAAE3D,OAAO,CAAC2D,SAAS;MAC5BC,OAAO,EAAEb,SAAS,CAACc,IAAI,CACnB,IAAAC,YAAM,EAACC,GAAG,IAAIA,GAAG,CAAC/B,EAAE,KAAK,QAAQ,IAAI+B,GAAG,CAACjC,UAAU,KAAK9B,OAAO,CAAC8B,UAAU,CAACC,IAAI,CAAC,EAChF,IAAAX,SAAG,EAAC2C,GAAG,IAAIA,GAAG,CAACC,MAAM,CACzB,CAAC;MACD,MAAMC,OAAOA,CAACC,oBAAgD,EAAEP,SAAiB,EAAE;QAC/E,IAAMQ,SAAS,GAAGjB,YAAY,CAAC,CAAC;QAChC,IAAMkB,OAA6B,GAAG;UAClCpC,EAAE,EAAEmC,SAAS;UACbrC,UAAU,EAAE9B,OAAO,CAAC8B,UAAU,CAACC,IAAI;UACnCI,MAAM,EAAE,oBAAoB;UAC5BD,MAAM,EAAE,CAACgC,oBAAoB,EAAEP,SAAS;QAC5C,CAAC;QACDzD,QAAQ,CAACkC,IAAI,CAACC,IAAI,CAACC,SAAS,CAAC8B,OAAO,CAAC,CAAC;QACtC,IAAMJ,MAAM,GAAG,MAAM,IAAAK,oBAAc,EAC/BtB,SAAS,CAACc,IAAI,CACV,IAAAC,YAAM,EAACC,GAAG,IAAIA,GAAG,CAAC/B,EAAE,KAAKmC,SAAS,CAAC,EACnC,IAAA/C,SAAG,EAAC2C,GAAG,IAAIA,GAAG,CAACC,MAAM,CACzB,CACJ,CAAC;QACD,OAAOA,MAAM;MACjB;IACJ,CAAC;IACDM,IAAI,EAAE;MACFX,SAAS,EAAE3D,OAAO,CAAC2D,SAAS;MAC5BM,OAAOA,CAACM,IAAgD,EAAE;QACtD,IAAMJ,SAAS,GAAGjB,YAAY,CAAC,CAAC;QAChC,IAAMkB,OAA6B,GAAG;UAClCpC,EAAE,EAAEmC,SAAS;UACbrC,UAAU,EAAE9B,OAAO,CAAC8B,UAAU,CAACC,IAAI;UACnCI,MAAM,EAAE,aAAa;UACrBD,MAAM,EAAE,CAACqC,IAAI;QACjB,CAAC;QACDrE,QAAQ,CAACkC,IAAI,CAACC,IAAI,CAACC,SAAS,CAAC8B,OAAO,CAAC,CAAC;QACtC,OAAO,IAAAC,oBAAc,EACjBtB,SAAS,CAACc,IAAI,CACV,IAAAC,YAAM,EAACC,GAAG,IAAIA,GAAG,CAAC/B,EAAE,KAAKmC,SAAS,CAAC,EACnC,IAAA/C,SAAG,EAAC2C,GAAG,IAAIA,GAAG,CAACC,MAAM,CACzB,CACJ,CAAC;MACL;IACJ;EACJ,CAAC,CAAC;EAEFlB,eAAe,CAACpC,MAAM,CAAC8D,SAAS,CAAC5D,GAAG,IAAI0C,gBAAgB,CAACmB,QAAQ,CAAC3D,KAAK,CAACU,IAAI,CAACZ,GAAG,CAAC,CAAC;EAElFkC,eAAe,CAACxC,UAAU,CAACkE,SAAS,CAACE,WAAW,IAAI;IAChD,IAAIA,WAAW,EAAE;MACb;AACZ;AACA;AACA;AACA;MACYpB,gBAAgB,CAACqB,MAAM,CAAC,CAAC;;MAEzB;AACZ;AACA;AACA;AACA;MACY,IAAMC,aAAmC,GAAG;QACxC5C,EAAE,EAAE,QAAQ;QACZF,UAAU,EAAE9B,OAAO,CAAC8B,UAAU,CAACC,IAAI;QACnCI,MAAM,EAAE,qBAAqB;QAC7BD,MAAM,EAAE;MACZ,CAAC;MACDhC,QAAQ,CAACkC,IAAI,CAACC,IAAI,CAACC,SAAS,CAACsC,aAAa,CAAC,CAAC;IAChD;EACJ,CAAC,CAAC;EAEF5E,OAAO,CAAC8B,UAAU,CAAC+C,SAAS,CAACP,IAAI,CAAC,MAAMxB,eAAe,CAACF,MAAM,CAACkC,KAAK,CAAC,CAAC,CAAC;EACvE,OAAOxB,gBAAgB;AAC3B","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/replication-websocket/websocket-server.js b/dist/cjs/plugins/replication-websocket/websocket-server.js deleted file mode 100644 index 317971e896e..00000000000 --- a/dist/cjs/plugins/replication-websocket/websocket-server.js +++ /dev/null @@ -1,112 +0,0 @@ -"use strict"; - -var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault"); -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.getReplicationHandlerByCollection = getReplicationHandlerByCollection; -exports.startSocketServer = startSocketServer; -exports.startWebsocketServer = startWebsocketServer; -var _isomorphicWs = _interopRequireDefault(require("isomorphic-ws")); -var _index = require("../../replication-protocol/index.js"); -var _index2 = require("../../plugins/utils/index.js"); -var _rxjs = require("rxjs"); -var { - WebSocketServer -} = _isomorphicWs.default; -function startSocketServer(options) { - var wss = new WebSocketServer(options); - var closed = false; - function closeServer() { - if (closed) { - return _index2.PROMISE_RESOLVE_VOID; - } - closed = true; - onConnection$.complete(); - return new Promise((res, rej) => { - /** - * We have to close all client connections, - * otherwise wss.close() will never call the callback. - * @link https://github.com/websockets/ws/issues/1288#issuecomment-360594458 - */ - for (var ws of wss.clients) { - ws.close(); - } - wss.close(err => { - if (err) { - rej(err); - } else { - res(); - } - }); - }); - } - var onConnection$ = new _rxjs.Subject(); - wss.on('connection', ws => onConnection$.next(ws)); - return { - server: wss, - close: closeServer, - onConnection$: onConnection$.asObservable() - }; -} -var REPLICATION_HANDLER_BY_COLLECTION = new Map(); -function getReplicationHandlerByCollection(database, collectionName) { - if (!database.collections[collectionName]) { - throw new Error('collection ' + collectionName + ' does not exist'); - } - var collection = database.collections[collectionName]; - var handler = (0, _index2.getFromMapOrCreate)(REPLICATION_HANDLER_BY_COLLECTION, collection, () => { - return (0, _index.rxStorageInstanceToReplicationHandler)(collection.storageInstance, collection.conflictHandler, database.token); - }); - return handler; -} -function startWebsocketServer(options) { - var { - database, - ...wsOptions - } = options; - var serverState = startSocketServer(wsOptions); - - // auto close when the database gets destroyed - database.onDestroy.push(() => serverState.close()); - serverState.onConnection$.subscribe(ws => { - var onCloseHandlers = []; - ws.onclose = () => { - onCloseHandlers.map(fn => fn()); - }; - ws.on('message', async messageString => { - var message = JSON.parse(messageString); - var handler = getReplicationHandlerByCollection(database, message.collection); - if (message.method === 'auth') { - return; - } - var method = handler[message.method]; - - /** - * If it is not a function, - * it means that the client requested the masterChangeStream$ - */ - if (typeof method !== 'function') { - var changeStreamSub = handler.masterChangeStream$.subscribe(ev => { - var streamResponse = { - id: 'stream', - collection: message.collection, - result: ev - }; - ws.send(JSON.stringify(streamResponse)); - }); - onCloseHandlers.push(() => changeStreamSub.unsubscribe()); - return; - } - var result = await method(...message.params); - var response = { - id: message.id, - collection: message.collection, - result - }; - ws.send(JSON.stringify(response)); - }); - }); - return serverState; -} -//# sourceMappingURL=websocket-server.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/replication-websocket/websocket-server.js.map b/dist/cjs/plugins/replication-websocket/websocket-server.js.map deleted file mode 100644 index a786ba79bc2..00000000000 --- a/dist/cjs/plugins/replication-websocket/websocket-server.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"websocket-server.js","names":["_isomorphicWs","_interopRequireDefault","require","_index","_index2","_rxjs","WebSocketServer","pkg","startSocketServer","options","wss","closed","closeServer","PROMISE_RESOLVE_VOID","onConnection$","complete","Promise","res","rej","ws","clients","close","err","Subject","on","next","server","asObservable","REPLICATION_HANDLER_BY_COLLECTION","Map","getReplicationHandlerByCollection","database","collectionName","collections","Error","collection","handler","getFromMapOrCreate","rxStorageInstanceToReplicationHandler","storageInstance","conflictHandler","token","startWebsocketServer","wsOptions","serverState","onDestroy","push","subscribe","onCloseHandlers","onclose","map","fn","messageString","message","JSON","parse","method","changeStreamSub","masterChangeStream$","ev","streamResponse","id","result","send","stringify","unsubscribe","params","response"],"sources":["../../../../src/plugins/replication-websocket/websocket-server.ts"],"sourcesContent":["import type {\n RxCollection,\n RxDatabase,\n RxReplicationHandler\n} from '../../types/index.d.ts';\n\nimport type {\n WebSocket,\n ServerOptions\n} from 'isomorphic-ws';\nimport pkg from 'isomorphic-ws';\nconst { WebSocketServer } = pkg;\n\nimport type {\n WebsocketMessageResponseType,\n WebsocketMessageType,\n WebsocketServerOptions,\n WebsocketServerState\n} from './websocket-types.ts';\nimport { rxStorageInstanceToReplicationHandler } from '../../replication-protocol/index.ts';\nimport {\n PROMISE_RESOLVE_VOID, getFromMapOrCreate\n} from '../../plugins/utils/index.ts';\nimport { Subject } from 'rxjs';\n\nexport function startSocketServer(options: ServerOptions): WebsocketServerState {\n const wss = new WebSocketServer(options);\n let closed = false;\n function closeServer() {\n if (closed) {\n return PROMISE_RESOLVE_VOID;\n }\n closed = true;\n onConnection$.complete();\n return new Promise((res, rej) => {\n /**\n * We have to close all client connections,\n * otherwise wss.close() will never call the callback.\n * @link https://github.com/websockets/ws/issues/1288#issuecomment-360594458\n */\n for (const ws of wss.clients) {\n ws.close();\n }\n wss.close((err: any) => {\n if (err) {\n rej(err);\n } else {\n res();\n }\n });\n });\n }\n\n const onConnection$ = new Subject();\n wss.on('connection', (ws: any) => onConnection$.next(ws));\n\n return {\n server: wss,\n close: closeServer,\n onConnection$: onConnection$.asObservable()\n };\n}\n\nconst REPLICATION_HANDLER_BY_COLLECTION: WeakMap> = new Map();\nexport function getReplicationHandlerByCollection(\n database: RxDatabase,\n collectionName: string\n): RxReplicationHandler {\n if (!database.collections[collectionName]) {\n throw new Error('collection ' + collectionName + ' does not exist');\n }\n\n const collection = database.collections[collectionName];\n const handler = getFromMapOrCreate>(\n REPLICATION_HANDLER_BY_COLLECTION,\n collection,\n () => {\n return rxStorageInstanceToReplicationHandler(\n collection.storageInstance,\n collection.conflictHandler,\n database.token\n );\n }\n );\n return handler;\n}\n\nexport function startWebsocketServer(options: WebsocketServerOptions): WebsocketServerState {\n const { database, ...wsOptions } = options;\n const serverState = startSocketServer(wsOptions);\n\n // auto close when the database gets destroyed\n database.onDestroy.push(() => serverState.close());\n\n serverState.onConnection$.subscribe(ws => {\n const onCloseHandlers: Function[] = [];\n ws.onclose = () => {\n onCloseHandlers.map(fn => fn());\n };\n ws.on('message', async (messageString: string) => {\n const message: WebsocketMessageType = JSON.parse(messageString);\n const handler = getReplicationHandlerByCollection(database, message.collection);\n if (message.method === 'auth') {\n return;\n }\n const method = handler[message.method];\n\n /**\n * If it is not a function,\n * it means that the client requested the masterChangeStream$\n */\n if (typeof method !== 'function') {\n const changeStreamSub = handler.masterChangeStream$.subscribe(ev => {\n const streamResponse: WebsocketMessageResponseType = {\n id: 'stream',\n collection: message.collection,\n result: ev\n };\n ws.send(JSON.stringify(streamResponse));\n });\n onCloseHandlers.push(() => changeStreamSub.unsubscribe());\n return;\n }\n const result = await (method as any)(...message.params);\n const response: WebsocketMessageResponseType = {\n id: message.id,\n collection: message.collection,\n result\n };\n ws.send(JSON.stringify(response));\n });\n });\n\n\n return serverState;\n}\n"],"mappings":";;;;;;;;;AAUA,IAAAA,aAAA,GAAAC,sBAAA,CAAAC,OAAA;AASA,IAAAC,MAAA,GAAAD,OAAA;AACA,IAAAE,OAAA,GAAAF,OAAA;AAGA,IAAAG,KAAA,GAAAH,OAAA;AAZA,IAAM;EAAEI;AAAgB,CAAC,GAAGC,qBAAG;AAcxB,SAASC,iBAAiBA,CAACC,OAAsB,EAAwB;EAC5E,IAAMC,GAAG,GAAG,IAAIJ,eAAe,CAACG,OAAO,CAAC;EACxC,IAAIE,MAAM,GAAG,KAAK;EAClB,SAASC,WAAWA,CAAA,EAAG;IACnB,IAAID,MAAM,EAAE;MACR,OAAOE,4BAAoB;IAC/B;IACAF,MAAM,GAAG,IAAI;IACbG,aAAa,CAACC,QAAQ,CAAC,CAAC;IACxB,OAAO,IAAIC,OAAO,CAAO,CAACC,GAAG,EAAEC,GAAG,KAAK;MACnC;AACZ;AACA;AACA;AACA;MACY,KAAK,IAAMC,EAAE,IAAIT,GAAG,CAACU,OAAO,EAAE;QAC1BD,EAAE,CAACE,KAAK,CAAC,CAAC;MACd;MACAX,GAAG,CAACW,KAAK,CAAEC,GAAQ,IAAK;QACpB,IAAIA,GAAG,EAAE;UACLJ,GAAG,CAACI,GAAG,CAAC;QACZ,CAAC,MAAM;UACHL,GAAG,CAAC,CAAC;QACT;MACJ,CAAC,CAAC;IACN,CAAC,CAAC;EACN;EAEA,IAAMH,aAAa,GAAG,IAAIS,aAAO,CAAY,CAAC;EAC9Cb,GAAG,CAACc,EAAE,CAAC,YAAY,EAAGL,EAAO,IAAKL,aAAa,CAACW,IAAI,CAACN,EAAE,CAAC,CAAC;EAEzD,OAAO;IACHO,MAAM,EAAEhB,GAAG;IACXW,KAAK,EAAET,WAAW;IAClBE,aAAa,EAAEA,aAAa,CAACa,YAAY,CAAC;EAC9C,CAAC;AACL;AAEA,IAAMC,iCAAwF,GAAG,IAAIC,GAAG,CAAC,CAAC;AACnG,SAASC,iCAAiCA,CAC7CC,QAAyB,EACzBC,cAAsB,EACc;EACpC,IAAI,CAACD,QAAQ,CAACE,WAAW,CAACD,cAAc,CAAC,EAAE;IACvC,MAAM,IAAIE,KAAK,CAAC,aAAa,GAAGF,cAAc,GAAG,iBAAiB,CAAC;EACvE;EAEA,IAAMG,UAAU,GAAGJ,QAAQ,CAACE,WAAW,CAACD,cAAc,CAAC;EACvD,IAAMI,OAAO,GAAG,IAAAC,0BAAkB,EAC9BT,iCAAiC,EACjCO,UAAU,EACV,MAAM;IACF,OAAO,IAAAG,4CAAqC,EACxCH,UAAU,CAACI,eAAe,EAC1BJ,UAAU,CAACK,eAAe,EAC1BT,QAAQ,CAACU,KACb,CAAC;EACL,CACJ,CAAC;EACD,OAAOL,OAAO;AAClB;AAEO,SAASM,oBAAoBA,CAACjC,OAA+B,EAAwB;EACxF,IAAM;IAAEsB,QAAQ;IAAE,GAAGY;EAAU,CAAC,GAAGlC,OAAO;EAC1C,IAAMmC,WAAW,GAAGpC,iBAAiB,CAACmC,SAAS,CAAC;;EAEhD;EACAZ,QAAQ,CAACc,SAAS,CAACC,IAAI,CAAC,MAAMF,WAAW,CAACvB,KAAK,CAAC,CAAC,CAAC;EAElDuB,WAAW,CAAC9B,aAAa,CAACiC,SAAS,CAAC5B,EAAE,IAAI;IACtC,IAAM6B,eAA2B,GAAG,EAAE;IACtC7B,EAAE,CAAC8B,OAAO,GAAG,MAAM;MACfD,eAAe,CAACE,GAAG,CAACC,EAAE,IAAIA,EAAE,CAAC,CAAC,CAAC;IACnC,CAAC;IACDhC,EAAE,CAACK,EAAE,CAAC,SAAS,EAAE,MAAO4B,aAAqB,IAAK;MAC9C,IAAMC,OAA6B,GAAGC,IAAI,CAACC,KAAK,CAACH,aAAa,CAAC;MAC/D,IAAMhB,OAAO,GAAGN,iCAAiC,CAACC,QAAQ,EAAEsB,OAAO,CAAClB,UAAU,CAAC;MAC/E,IAAIkB,OAAO,CAACG,MAAM,KAAK,MAAM,EAAE;QAC3B;MACJ;MACA,IAAMA,MAAM,GAAGpB,OAAO,CAACiB,OAAO,CAACG,MAAM,CAAC;;MAEtC;AACZ;AACA;AACA;MACY,IAAI,OAAOA,MAAM,KAAK,UAAU,EAAE;QAC9B,IAAMC,eAAe,GAAGrB,OAAO,CAACsB,mBAAmB,CAACX,SAAS,CAACY,EAAE,IAAI;UAChE,IAAMC,cAA4C,GAAG;YACjDC,EAAE,EAAE,QAAQ;YACZ1B,UAAU,EAAEkB,OAAO,CAAClB,UAAU;YAC9B2B,MAAM,EAAEH;UACZ,CAAC;UACDxC,EAAE,CAAC4C,IAAI,CAACT,IAAI,CAACU,SAAS,CAACJ,cAAc,CAAC,CAAC;QAC3C,CAAC,CAAC;QACFZ,eAAe,CAACF,IAAI,CAAC,MAAMW,eAAe,CAACQ,WAAW,CAAC,CAAC,CAAC;QACzD;MACJ;MACA,IAAMH,MAAM,GAAG,MAAON,MAAM,CAAS,GAAGH,OAAO,CAACa,MAAM,CAAC;MACvD,IAAMC,QAAsC,GAAG;QAC3CN,EAAE,EAAER,OAAO,CAACQ,EAAE;QACd1B,UAAU,EAAEkB,OAAO,CAAClB,UAAU;QAC9B2B;MACJ,CAAC;MACD3C,EAAE,CAAC4C,IAAI,CAACT,IAAI,CAACU,SAAS,CAACG,QAAQ,CAAC,CAAC;IACrC,CAAC,CAAC;EACN,CAAC,CAAC;EAGF,OAAOvB,WAAW;AACtB","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/replication-websocket/websocket-types.js b/dist/cjs/plugins/replication-websocket/websocket-types.js deleted file mode 100644 index cd5213017f2..00000000000 --- a/dist/cjs/plugins/replication-websocket/websocket-types.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -//# sourceMappingURL=websocket-types.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/replication-websocket/websocket-types.js.map b/dist/cjs/plugins/replication-websocket/websocket-types.js.map deleted file mode 100644 index f15c7a897a0..00000000000 --- a/dist/cjs/plugins/replication-websocket/websocket-types.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"websocket-types.js","names":[],"sources":["../../../../src/plugins/replication-websocket/websocket-types.ts"],"sourcesContent":["import type {\n Observable,\n} from 'rxjs';\nimport type {\n ServerOptions,\n ClientOptions,\n WebSocketServer,\n WebSocket\n} from 'ws';\nimport type {\n RxCollection,\n RxDatabase,\n RxReplicationHandler,\n StringKeys\n} from '../../types/index.d.ts';\n\nexport type WebsocketServerOptions = {\n database: RxDatabase;\n} & ServerOptions;\n\nexport type WebsocketServerState = {\n server: WebSocketServer;\n close: () => Promise;\n onConnection$: Observable;\n};\n\nexport type WebsocketClientOptions = {\n replicationIdentifier: string;\n collection: RxCollection;\n url: string;\n batchSize?: number;\n live?: boolean;\n headers?: { [k: string]: string; };\n} & ClientOptions;\n\nexport type WebsocketMessageType = {\n id: string;\n collection: string;\n method: StringKeys> | 'auth';\n params: any[];\n};\n\nexport type WebsocketMessageResponseType = {\n id: string;\n collection: string;\n result: any;\n};\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/replication/index.js b/dist/cjs/plugins/replication/index.js deleted file mode 100644 index cd75231a113..00000000000 --- a/dist/cjs/plugins/replication/index.js +++ /dev/null @@ -1,407 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.RxReplicationState = exports.REPLICATION_STATE_BY_COLLECTION = void 0; -exports.replicateRxCollection = replicateRxCollection; -exports.startReplicationOnLeaderShip = startReplicationOnLeaderShip; -var _rxjs = require("rxjs"); -var _index = require("../leader-election/index.js"); -var _index2 = require("../../plugins/utils/index.js"); -var _index3 = require("../../replication-protocol/index.js"); -var _rxError = require("../../rx-error.js"); -var _replicationHelper = require("./replication-helper.js"); -var _rxDatabaseInternalStore = require("../../rx-database-internal-store.js"); -var _plugin = require("../../plugin.js"); -var _rxStorageHelper = require("../../rx-storage-helper.js"); -var _overwritable = require("../../overwritable.js"); -var _hooks = require("../../hooks.js"); -/** - * This plugin contains the primitives to create - * a RxDB client-server replication. - * It is used in the other replication plugins - * but also can be used as standalone with a custom replication handler. - */ - -var REPLICATION_STATE_BY_COLLECTION = exports.REPLICATION_STATE_BY_COLLECTION = new WeakMap(); -var RxReplicationState = exports.RxReplicationState = /*#__PURE__*/function () { - function RxReplicationState( - /** - * The identifier, used to flag revisions - * and to identify which documents state came from the remote. - */ - replicationIdentifier, collection, deletedField, pull, push, live, retryTime, autoStart) { - this.subs = []; - this.subjects = { - received: new _rxjs.Subject(), - // all documents that are received from the endpoint - sent: new _rxjs.Subject(), - // all documents that are send to the endpoint - error: new _rxjs.Subject(), - // all errors that are received from the endpoint, emits new Error() objects - canceled: new _rxjs.BehaviorSubject(false), - // true when the replication was canceled - active: new _rxjs.BehaviorSubject(false) // true when something is running, false when not - }; - this.received$ = this.subjects.received.asObservable(); - this.sent$ = this.subjects.sent.asObservable(); - this.error$ = this.subjects.error.asObservable(); - this.canceled$ = this.subjects.canceled.asObservable(); - this.active$ = this.subjects.active.asObservable(); - this.onCancel = []; - this.callOnStart = undefined; - this.remoteEvents$ = new _rxjs.Subject(); - this.replicationIdentifier = replicationIdentifier; - this.collection = collection; - this.deletedField = deletedField; - this.pull = pull; - this.push = push; - this.live = live; - this.retryTime = retryTime; - this.autoStart = autoStart; - this.metaInfoPromise = (async () => { - var metaInstanceCollectionName = 'rx-replication-meta-' + (await collection.database.hashFunction([this.collection.name, this.replicationIdentifier].join('-'))); - var metaInstanceSchema = (0, _index3.getRxReplicationMetaInstanceSchema)(this.collection.schema.jsonSchema, (0, _rxStorageHelper.hasEncryption)(this.collection.schema.jsonSchema)); - return { - collectionName: metaInstanceCollectionName, - schema: metaInstanceSchema - }; - })(); - var replicationStates = (0, _index2.getFromMapOrCreate)(REPLICATION_STATE_BY_COLLECTION, collection, () => []); - replicationStates.push(this); - - // stop the replication when the collection gets destroyed - this.collection.onDestroy.push(() => this.cancel()); - - // create getters for the observables - Object.keys(this.subjects).forEach(key => { - Object.defineProperty(this, key + '$', { - get: function () { - return this.subjects[key].asObservable(); - } - }); - }); - var startPromise = new Promise(res => { - this.callOnStart = res; - }); - this.startPromise = startPromise; - } - var _proto = RxReplicationState.prototype; - _proto.start = async function start() { - if (this.isStopped()) { - return; - } - - // fill in defaults for pull & push - var pullModifier = this.pull && this.pull.modifier ? this.pull.modifier : _replicationHelper.DEFAULT_MODIFIER; - var pushModifier = this.push && this.push.modifier ? this.push.modifier : _replicationHelper.DEFAULT_MODIFIER; - var database = this.collection.database; - var metaInfo = await this.metaInfoPromise; - var [metaInstance] = await Promise.all([this.collection.database.storage.createStorageInstance({ - databaseName: database.name, - collectionName: metaInfo.collectionName, - databaseInstanceToken: database.token, - multiInstance: database.multiInstance, - // TODO is this always false? - options: {}, - schema: metaInfo.schema, - password: database.password, - devMode: _overwritable.overwritable.isDevMode() - }), (0, _rxDatabaseInternalStore.addConnectedStorageToCollection)(this.collection, metaInfo.collectionName, metaInfo.schema)]); - this.metaInstance = metaInstance; - this.internalReplicationState = (0, _index3.replicateRxStorageInstance)({ - pushBatchSize: this.push && this.push.batchSize ? this.push.batchSize : 100, - pullBatchSize: this.pull && this.pull.batchSize ? this.pull.batchSize : 100, - initialCheckpoint: { - upstream: this.push ? this.push.initialCheckpoint : undefined, - downstream: this.pull ? this.pull.initialCheckpoint : undefined - }, - forkInstance: this.collection.storageInstance, - metaInstance: this.metaInstance, - hashFunction: database.hashFunction, - identifier: 'rxdbreplication' + this.replicationIdentifier, - conflictHandler: this.collection.conflictHandler, - replicationHandler: { - masterChangeStream$: this.remoteEvents$.asObservable().pipe((0, _rxjs.filter)(_v => !!this.pull), (0, _rxjs.mergeMap)(async ev => { - if (ev === 'RESYNC') { - return ev; - } - var useEv = (0, _index2.flatClone)(ev); - useEv.documents = (0, _replicationHelper.handlePulledDocuments)(this.collection, this.deletedField, useEv.documents); - useEv.documents = await Promise.all(useEv.documents.map(d => pullModifier(d))); - return useEv; - })), - masterChangesSince: async (checkpoint, batchSize) => { - if (!this.pull) { - return { - checkpoint: null, - documents: [] - }; - } - /** - * Retries must be done here in the replication primitives plugin, - * because the replication protocol itself has no - * error handling. - */ - var done = false; - var result = {}; - while (!done && !this.isStopped()) { - try { - result = await this.pull.handler(checkpoint, batchSize); - done = true; - } catch (err) { - var emitError = (0, _rxError.newRxError)('RC_PULL', { - checkpoint, - errors: (0, _index2.toArray)(err).map(er => (0, _index2.errorToPlainJson)(er)), - direction: 'pull' - }); - this.subjects.error.next(emitError); - await (0, _replicationHelper.awaitRetry)(this.collection, (0, _index2.ensureNotFalsy)(this.retryTime)); - } - } - if (this.isStopped()) { - return { - checkpoint: null, - documents: [] - }; - } - var useResult = (0, _index2.flatClone)(result); - useResult.documents = (0, _replicationHelper.handlePulledDocuments)(this.collection, this.deletedField, useResult.documents); - useResult.documents = await Promise.all(useResult.documents.map(d => pullModifier(d))); - return useResult; - }, - masterWrite: async rows => { - if (!this.push) { - return []; - } - var done = false; - await (0, _hooks.runAsyncPluginHooks)('preReplicationMasterWrite', { - rows, - collection: this.collection - }); - var useRowsOrNull = await Promise.all(rows.map(async row => { - row.newDocumentState = await pushModifier(row.newDocumentState); - if (row.newDocumentState === null) { - return null; - } - if (row.assumedMasterState) { - row.assumedMasterState = await pushModifier(row.assumedMasterState); - } - if (this.deletedField !== '_deleted') { - row.newDocumentState = (0, _replicationHelper.swapDefaultDeletedTodeletedField)(this.deletedField, row.newDocumentState); - if (row.assumedMasterState) { - row.assumedMasterState = (0, _replicationHelper.swapDefaultDeletedTodeletedField)(this.deletedField, row.assumedMasterState); - } - } - return row; - })); - var useRows = useRowsOrNull.filter(_index2.arrayFilterNotEmpty); - var result = null; - - // In case all the rows have been filtered and nothing has to be sent - if (useRows.length === 0) { - done = true; - result = []; - } - while (!done && !this.isStopped()) { - try { - result = await this.push.handler(useRows); - /** - * It is a common problem that people have wrongly behaving backend - * that do not return an array with the conflicts on push requests. - * So we run this check here to make it easier to debug. - * @link https://github.com/pubkey/rxdb/issues/4103 - */ - if (!Array.isArray(result)) { - throw (0, _rxError.newRxError)('RC_PUSH_NO_AR', { - pushRows: rows, - direction: 'push', - args: { - result - } - }); - } - done = true; - } catch (err) { - var emitError = err.rxdb ? err : (0, _rxError.newRxError)('RC_PUSH', { - pushRows: rows, - errors: (0, _index2.toArray)(err).map(er => (0, _index2.errorToPlainJson)(er)), - direction: 'push' - }); - this.subjects.error.next(emitError); - await (0, _replicationHelper.awaitRetry)(this.collection, (0, _index2.ensureNotFalsy)(this.retryTime)); - } - } - if (this.isStopped()) { - return []; - } - await (0, _hooks.runAsyncPluginHooks)('preReplicationMasterWriteDocumentsHandle', { - result, - collection: this.collection - }); - var conflicts = (0, _replicationHelper.handlePulledDocuments)(this.collection, this.deletedField, (0, _index2.ensureNotFalsy)(result)); - return conflicts; - } - } - }); - this.subs.push(this.internalReplicationState.events.error.subscribe(err => { - this.subjects.error.next(err); - }), this.internalReplicationState.events.processed.down.subscribe(row => this.subjects.received.next(row.document)), this.internalReplicationState.events.processed.up.subscribe(writeToMasterRow => { - this.subjects.sent.next(writeToMasterRow.newDocumentState); - }), (0, _rxjs.combineLatest)([this.internalReplicationState.events.active.down, this.internalReplicationState.events.active.up]).subscribe(([down, up]) => { - var isActive = down || up; - this.subjects.active.next(isActive); - })); - if (this.pull && this.pull.stream$ && this.live) { - this.subs.push(this.pull.stream$.subscribe({ - next: ev => { - this.remoteEvents$.next(ev); - }, - error: err => { - this.subjects.error.next(err); - } - })); - } - - /** - * Non-live replications run once - * and then automatically get canceled. - */ - if (!this.live) { - await (0, _index3.awaitRxStorageReplicationFirstInSync)(this.internalReplicationState); - await (0, _index3.awaitRxStorageReplicationInSync)(this.internalReplicationState); - await this.cancel(); - } - this.callOnStart(); - }; - _proto.isStopped = function isStopped() { - if (this.subjects.canceled.getValue()) { - return true; - } - return false; - }; - _proto.awaitInitialReplication = async function awaitInitialReplication() { - await this.startPromise; - return (0, _index3.awaitRxStorageReplicationFirstInSync)((0, _index2.ensureNotFalsy)(this.internalReplicationState)); - } - - /** - * Returns a promise that resolves when: - * - All local data is replicated with the remote - * - No replication cycle is running or in retry-state - * - * WARNING: USing this function directly in a multi-tab browser application - * is dangerous because only the leading instance will ever be replicated, - * so this promise will not resolve in the other tabs. - * For multi-tab support you should set and observe a flag in a local document. - */; - _proto.awaitInSync = async function awaitInSync() { - await this.startPromise; - await (0, _index3.awaitRxStorageReplicationFirstInSync)((0, _index2.ensureNotFalsy)(this.internalReplicationState)); - - /** - * To reduce the amount of re-renders and make testing - * and to make the whole behavior more predictable, - * we await these things multiple times. - * For example the state might be in sync already and at the - * exact same time a pull.stream$ event comes in and we want to catch - * that in the same call to awaitInSync() instead of resolving - * while actually the state is not in sync. - */ - var t = 2; - while (t > 0) { - t--; - - /** - * Often awaitInSync() is called directly after a document write, - * like in the unit tests. - * So we first have to await the idleness to ensure that all RxChangeEvents - * are processed already. - */ - await this.collection.database.requestIdlePromise(); - await (0, _index3.awaitRxStorageReplicationInSync)((0, _index2.ensureNotFalsy)(this.internalReplicationState)); - } - return true; - }; - _proto.reSync = function reSync() { - this.remoteEvents$.next('RESYNC'); - }; - _proto.emitEvent = function emitEvent(ev) { - this.remoteEvents$.next(ev); - }; - _proto.cancel = async function cancel() { - if (this.isStopped()) { - return _index2.PROMISE_RESOLVE_FALSE; - } - var promises = this.onCancel.map(fn => (0, _index2.toPromise)(fn())); - if (this.internalReplicationState) { - await (0, _index3.cancelRxStorageReplication)(this.internalReplicationState); - } - if (this.metaInstance) { - promises.push((0, _index2.ensureNotFalsy)(this.internalReplicationState).checkpointQueue.then(() => (0, _index2.ensureNotFalsy)(this.metaInstance).close())); - } - this.subs.forEach(sub => sub.unsubscribe()); - this.subjects.canceled.next(true); - this.subjects.active.complete(); - this.subjects.canceled.complete(); - this.subjects.error.complete(); - this.subjects.received.complete(); - this.subjects.sent.complete(); - return Promise.all(promises); - }; - _proto.remove = async function remove() { - await (0, _index2.ensureNotFalsy)(this.metaInstance).remove(); - var metaInfo = await this.metaInfoPromise; - await this.cancel(); - await (0, _rxDatabaseInternalStore.removeConnectedStorageFromCollection)(this.collection, metaInfo.collectionName, metaInfo.schema); - }; - return RxReplicationState; -}(); -function replicateRxCollection({ - replicationIdentifier, - collection, - deletedField = '_deleted', - pull, - push, - live = true, - retryTime = 1000 * 5, - waitForLeadership = true, - autoStart = true -}) { - (0, _plugin.addRxPlugin)(_index.RxDBLeaderElectionPlugin); - - /** - * It is a common error to forget to add these config - * objects. So we check here because it makes no sense - * to start a replication with neither push nor pull. - */ - if (!pull && !push) { - throw (0, _rxError.newRxError)('UT3', { - collection: collection.name, - args: { - replicationIdentifier - } - }); - } - var replicationState = new RxReplicationState(replicationIdentifier, collection, deletedField, pull, push, live, retryTime, autoStart); - startReplicationOnLeaderShip(waitForLeadership, replicationState); - return replicationState; -} -function startReplicationOnLeaderShip(waitForLeadership, replicationState) { - /** - * Always await this Promise to ensure that the current instance - * is leader when waitForLeadership=true - */ - var mustWaitForLeadership = waitForLeadership && replicationState.collection.database.multiInstance; - var waitTillRun = mustWaitForLeadership ? replicationState.collection.database.waitForLeadership() : _index2.PROMISE_RESOLVE_TRUE; - return waitTillRun.then(() => { - if (replicationState.isStopped()) { - return; - } - if (replicationState.autoStart) { - replicationState.start(); - } - }); -} -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/replication/index.js.map b/dist/cjs/plugins/replication/index.js.map deleted file mode 100644 index 976c89d3a13..00000000000 --- a/dist/cjs/plugins/replication/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["_rxjs","require","_index","_index2","_index3","_rxError","_replicationHelper","_rxDatabaseInternalStore","_plugin","_rxStorageHelper","_overwritable","_hooks","REPLICATION_STATE_BY_COLLECTION","exports","WeakMap","RxReplicationState","replicationIdentifier","collection","deletedField","pull","push","live","retryTime","autoStart","subs","subjects","received","Subject","sent","error","canceled","BehaviorSubject","active","received$","asObservable","sent$","error$","canceled$","active$","onCancel","callOnStart","undefined","remoteEvents$","metaInfoPromise","metaInstanceCollectionName","database","hashFunction","name","join","metaInstanceSchema","getRxReplicationMetaInstanceSchema","schema","jsonSchema","hasEncryption","collectionName","replicationStates","getFromMapOrCreate","onDestroy","cancel","Object","keys","forEach","key","defineProperty","get","startPromise","Promise","res","_proto","prototype","start","isStopped","pullModifier","modifier","DEFAULT_MODIFIER","pushModifier","metaInfo","metaInstance","all","storage","createStorageInstance","databaseName","databaseInstanceToken","token","multiInstance","options","password","devMode","overwritable","isDevMode","addConnectedStorageToCollection","internalReplicationState","replicateRxStorageInstance","pushBatchSize","batchSize","pullBatchSize","initialCheckpoint","upstream","downstream","forkInstance","storageInstance","identifier","conflictHandler","replicationHandler","masterChangeStream$","pipe","filter","_v","mergeMap","ev","useEv","flatClone","documents","handlePulledDocuments","map","d","masterChangesSince","checkpoint","done","result","handler","err","emitError","newRxError","errors","toArray","er","errorToPlainJson","direction","next","awaitRetry","ensureNotFalsy","useResult","masterWrite","rows","runAsyncPluginHooks","useRowsOrNull","row","newDocumentState","assumedMasterState","swapDefaultDeletedTodeletedField","useRows","arrayFilterNotEmpty","length","Array","isArray","pushRows","args","rxdb","conflicts","events","subscribe","processed","down","document","up","writeToMasterRow","combineLatest","isActive","stream$","awaitRxStorageReplicationFirstInSync","awaitRxStorageReplicationInSync","getValue","awaitInitialReplication","awaitInSync","t","requestIdlePromise","reSync","emitEvent","PROMISE_RESOLVE_FALSE","promises","fn","toPromise","cancelRxStorageReplication","checkpointQueue","then","close","sub","unsubscribe","complete","remove","removeConnectedStorageFromCollection","replicateRxCollection","waitForLeadership","addRxPlugin","RxDBLeaderElectionPlugin","replicationState","startReplicationOnLeaderShip","mustWaitForLeadership","waitTillRun","PROMISE_RESOLVE_TRUE"],"sources":["../../../../src/plugins/replication/index.ts"],"sourcesContent":["/**\n * This plugin contains the primitives to create\n * a RxDB client-server replication.\n * It is used in the other replication plugins\n * but also can be used as standalone with a custom replication handler.\n */\n\nimport {\n BehaviorSubject,\n combineLatest,\n filter,\n mergeMap,\n Observable,\n Subject,\n Subscription\n} from 'rxjs';\nimport type {\n ReplicationOptions,\n ReplicationPullHandlerResult,\n ReplicationPullOptions,\n ReplicationPushOptions,\n RxCollection,\n RxDocumentData,\n RxError,\n RxJsonSchema,\n RxReplicationPullStreamItem,\n RxReplicationWriteToMasterRow,\n RxStorageInstance,\n RxStorageInstanceReplicationState,\n RxStorageReplicationMeta,\n RxTypeError,\n WithDeleted\n} from '../../types/index.d.ts';\nimport { RxDBLeaderElectionPlugin } from '../leader-election/index.ts';\nimport {\n arrayFilterNotEmpty,\n ensureNotFalsy,\n errorToPlainJson,\n flatClone,\n getFromMapOrCreate,\n PROMISE_RESOLVE_FALSE,\n PROMISE_RESOLVE_TRUE,\n toArray,\n toPromise\n} from '../../plugins/utils/index.ts';\nimport {\n awaitRxStorageReplicationFirstInSync,\n awaitRxStorageReplicationInSync,\n cancelRxStorageReplication,\n getRxReplicationMetaInstanceSchema,\n replicateRxStorageInstance\n} from '../../replication-protocol/index.ts';\nimport { newRxError } from '../../rx-error.ts';\nimport {\n awaitRetry,\n DEFAULT_MODIFIER,\n swapDefaultDeletedTodeletedField,\n handlePulledDocuments\n} from './replication-helper.ts';\nimport {\n addConnectedStorageToCollection, removeConnectedStorageFromCollection\n} from '../../rx-database-internal-store.ts';\nimport { addRxPlugin } from '../../plugin.ts';\nimport { hasEncryption } from '../../rx-storage-helper.ts';\nimport { overwritable } from '../../overwritable.ts';\nimport {\n runAsyncPluginHooks\n} from '../../hooks.ts';\n\n\nexport const REPLICATION_STATE_BY_COLLECTION: WeakMap[]> = new WeakMap();\n\nexport class RxReplicationState {\n public readonly subs: Subscription[] = [];\n public readonly subjects = {\n received: new Subject>(), // all documents that are received from the endpoint\n sent: new Subject>(), // all documents that are send to the endpoint\n error: new Subject(), // all errors that are received from the endpoint, emits new Error() objects\n canceled: new BehaviorSubject(false), // true when the replication was canceled\n active: new BehaviorSubject(false) // true when something is running, false when not\n };\n\n readonly received$: Observable> = this.subjects.received.asObservable();\n readonly sent$: Observable> = this.subjects.sent.asObservable();\n readonly error$: Observable = this.subjects.error.asObservable();\n readonly canceled$: Observable = this.subjects.canceled.asObservable();\n readonly active$: Observable = this.subjects.active.asObservable();\n\n readonly metaInfoPromise: Promise<{ collectionName: string, schema: RxJsonSchema>> }>;\n\n public startPromise: Promise;\n\n public onCancel: (() => void)[] = [];\n\n constructor(\n /**\n * The identifier, used to flag revisions\n * and to identify which documents state came from the remote.\n */\n public readonly replicationIdentifier: string,\n public readonly collection: RxCollection,\n public readonly deletedField: string,\n public readonly pull?: ReplicationPullOptions,\n public readonly push?: ReplicationPushOptions,\n public readonly live?: boolean,\n public retryTime?: number,\n public autoStart?: boolean,\n ) {\n this.metaInfoPromise = (async () => {\n const metaInstanceCollectionName = 'rx-replication-meta-' + await collection.database.hashFunction([\n this.collection.name,\n this.replicationIdentifier\n ].join('-'));\n const metaInstanceSchema = getRxReplicationMetaInstanceSchema(\n this.collection.schema.jsonSchema,\n hasEncryption(this.collection.schema.jsonSchema)\n );\n return {\n collectionName: metaInstanceCollectionName,\n schema: metaInstanceSchema\n };\n })();\n const replicationStates = getFromMapOrCreate(\n REPLICATION_STATE_BY_COLLECTION,\n collection,\n () => []\n );\n replicationStates.push(this);\n\n // stop the replication when the collection gets destroyed\n this.collection.onDestroy.push(() => this.cancel());\n\n // create getters for the observables\n Object.keys(this.subjects).forEach(key => {\n Object.defineProperty(this, key + '$', {\n get: function () {\n return this.subjects[key].asObservable();\n }\n });\n });\n const startPromise = new Promise(res => {\n this.callOnStart = res;\n });\n this.startPromise = startPromise;\n }\n\n private callOnStart: () => void = undefined as any;\n\n public internalReplicationState?: RxStorageInstanceReplicationState;\n public metaInstance?: RxStorageInstance, any, {}, any>;\n public remoteEvents$: Subject> = new Subject();\n\n public async start(): Promise {\n if (this.isStopped()) {\n return;\n }\n\n // fill in defaults for pull & push\n const pullModifier = this.pull && this.pull.modifier ? this.pull.modifier : DEFAULT_MODIFIER;\n const pushModifier = this.push && this.push.modifier ? this.push.modifier : DEFAULT_MODIFIER;\n\n const database = this.collection.database;\n\n const metaInfo = await this.metaInfoPromise;\n\n const [metaInstance] = await Promise.all([\n this.collection.database.storage.createStorageInstance>({\n databaseName: database.name,\n collectionName: metaInfo.collectionName,\n databaseInstanceToken: database.token,\n multiInstance: database.multiInstance, // TODO is this always false?\n options: {},\n schema: metaInfo.schema,\n password: database.password,\n devMode: overwritable.isDevMode()\n }),\n addConnectedStorageToCollection(\n this.collection,\n metaInfo.collectionName,\n metaInfo.schema\n )\n ]);\n this.metaInstance = metaInstance;\n\n this.internalReplicationState = replicateRxStorageInstance({\n pushBatchSize: this.push && this.push.batchSize ? this.push.batchSize : 100,\n pullBatchSize: this.pull && this.pull.batchSize ? this.pull.batchSize : 100,\n initialCheckpoint: {\n upstream: this.push ? this.push.initialCheckpoint : undefined,\n downstream: this.pull ? this.pull.initialCheckpoint : undefined\n },\n forkInstance: this.collection.storageInstance,\n metaInstance: this.metaInstance,\n hashFunction: database.hashFunction,\n identifier: 'rxdbreplication' + this.replicationIdentifier,\n conflictHandler: this.collection.conflictHandler,\n replicationHandler: {\n masterChangeStream$: this.remoteEvents$.asObservable().pipe(\n filter(_v => !!this.pull),\n mergeMap(async (ev) => {\n if (ev === 'RESYNC') {\n return ev;\n }\n const useEv = flatClone(ev);\n useEv.documents = handlePulledDocuments(this.collection, this.deletedField, useEv.documents);\n useEv.documents = await Promise.all(\n useEv.documents.map(d => pullModifier(d))\n );\n return useEv;\n })\n ),\n masterChangesSince: async (\n checkpoint: CheckpointType | undefined,\n batchSize: number\n ) => {\n if (!this.pull) {\n return {\n checkpoint: null,\n documents: []\n };\n }\n /**\n * Retries must be done here in the replication primitives plugin,\n * because the replication protocol itself has no\n * error handling.\n */\n let done = false;\n let result: ReplicationPullHandlerResult = {} as any;\n while (!done && !this.isStopped()) {\n try {\n result = await this.pull.handler(\n checkpoint,\n batchSize\n );\n done = true;\n } catch (err: any | Error | Error[]) {\n const emitError = newRxError('RC_PULL', {\n checkpoint,\n errors: toArray(err).map(er => errorToPlainJson(er)),\n direction: 'pull'\n });\n this.subjects.error.next(emitError);\n await awaitRetry(this.collection, ensureNotFalsy(this.retryTime));\n }\n }\n\n if (this.isStopped()) {\n return {\n checkpoint: null,\n documents: []\n };\n }\n\n const useResult = flatClone(result);\n useResult.documents = handlePulledDocuments(this.collection, this.deletedField, useResult.documents);\n useResult.documents = await Promise.all(\n useResult.documents.map(d => pullModifier(d))\n );\n return useResult;\n },\n masterWrite: async (\n rows: RxReplicationWriteToMasterRow[]\n ) => {\n if (!this.push) {\n return [];\n }\n let done = false;\n\n await runAsyncPluginHooks('preReplicationMasterWrite', {\n rows,\n collection: this.collection\n });\n\n const useRowsOrNull = await Promise.all(\n rows.map(async (row) => {\n row.newDocumentState = await pushModifier(row.newDocumentState);\n if (row.newDocumentState === null) {\n return null;\n }\n if (row.assumedMasterState) {\n row.assumedMasterState = await pushModifier(row.assumedMasterState);\n }\n if (this.deletedField !== '_deleted') {\n row.newDocumentState = swapDefaultDeletedTodeletedField(this.deletedField, row.newDocumentState) as any;\n if (row.assumedMasterState) {\n row.assumedMasterState = swapDefaultDeletedTodeletedField(this.deletedField, row.assumedMasterState) as any;\n }\n }\n return row;\n })\n );\n const useRows: RxReplicationWriteToMasterRow[] = useRowsOrNull.filter(arrayFilterNotEmpty);\n\n let result: WithDeleted[] = null as any;\n\n // In case all the rows have been filtered and nothing has to be sent\n if (useRows.length === 0) {\n done = true;\n result = [];\n }\n\n while (!done && !this.isStopped()) {\n try {\n result = await this.push.handler(useRows);\n /**\n * It is a common problem that people have wrongly behaving backend\n * that do not return an array with the conflicts on push requests.\n * So we run this check here to make it easier to debug.\n * @link https://github.com/pubkey/rxdb/issues/4103\n */\n if (!Array.isArray(result)) {\n throw newRxError(\n 'RC_PUSH_NO_AR',\n {\n pushRows: rows,\n direction: 'push',\n args: { result }\n }\n );\n }\n done = true;\n } catch (err: any | Error | Error[] | RxError) {\n const emitError = (err as RxError).rxdb ? err : newRxError('RC_PUSH', {\n pushRows: rows,\n errors: toArray(err).map(er => errorToPlainJson(er)),\n direction: 'push'\n });\n this.subjects.error.next(emitError);\n await awaitRetry(this.collection, ensureNotFalsy(this.retryTime));\n }\n }\n if (this.isStopped()) {\n return [];\n }\n\n await runAsyncPluginHooks('preReplicationMasterWriteDocumentsHandle', {\n result,\n collection: this.collection\n });\n\n const conflicts = handlePulledDocuments(this.collection, this.deletedField, ensureNotFalsy(result));\n return conflicts;\n }\n }\n });\n this.subs.push(\n this.internalReplicationState.events.error.subscribe(err => {\n this.subjects.error.next(err);\n }),\n this.internalReplicationState.events.processed.down\n .subscribe(row => this.subjects.received.next(row.document as any)),\n this.internalReplicationState.events.processed.up\n .subscribe(writeToMasterRow => {\n this.subjects.sent.next(writeToMasterRow.newDocumentState);\n }),\n combineLatest([\n this.internalReplicationState.events.active.down,\n this.internalReplicationState.events.active.up\n ]).subscribe(([down, up]) => {\n const isActive = down || up;\n this.subjects.active.next(isActive);\n })\n );\n\n if (\n this.pull &&\n this.pull.stream$ &&\n this.live\n ) {\n this.subs.push(\n this.pull.stream$.subscribe({\n next: ev => {\n this.remoteEvents$.next(ev);\n },\n error: err => {\n this.subjects.error.next(err);\n }\n })\n );\n }\n\n /**\n * Non-live replications run once\n * and then automatically get canceled.\n */\n if (!this.live) {\n await awaitRxStorageReplicationFirstInSync(this.internalReplicationState);\n await awaitRxStorageReplicationInSync(this.internalReplicationState);\n await this.cancel();\n }\n this.callOnStart();\n }\n\n isStopped(): boolean {\n if (this.subjects.canceled.getValue()) {\n return true;\n }\n return false;\n }\n\n async awaitInitialReplication(): Promise {\n await this.startPromise;\n return awaitRxStorageReplicationFirstInSync(\n ensureNotFalsy(this.internalReplicationState)\n );\n }\n\n /**\n * Returns a promise that resolves when:\n * - All local data is replicated with the remote\n * - No replication cycle is running or in retry-state\n *\n * WARNING: USing this function directly in a multi-tab browser application\n * is dangerous because only the leading instance will ever be replicated,\n * so this promise will not resolve in the other tabs.\n * For multi-tab support you should set and observe a flag in a local document.\n */\n async awaitInSync(): Promise {\n await this.startPromise;\n await awaitRxStorageReplicationFirstInSync(ensureNotFalsy(this.internalReplicationState));\n\n /**\n * To reduce the amount of re-renders and make testing\n * and to make the whole behavior more predictable,\n * we await these things multiple times.\n * For example the state might be in sync already and at the\n * exact same time a pull.stream$ event comes in and we want to catch\n * that in the same call to awaitInSync() instead of resolving\n * while actually the state is not in sync.\n */\n let t = 2;\n while (t > 0) {\n t--;\n\n /**\n * Often awaitInSync() is called directly after a document write,\n * like in the unit tests.\n * So we first have to await the idleness to ensure that all RxChangeEvents\n * are processed already.\n */\n await this.collection.database.requestIdlePromise();\n await awaitRxStorageReplicationInSync(ensureNotFalsy(this.internalReplicationState));\n }\n\n return true;\n }\n\n reSync() {\n this.remoteEvents$.next('RESYNC');\n }\n emitEvent(ev: RxReplicationPullStreamItem) {\n this.remoteEvents$.next(ev);\n }\n\n async cancel(): Promise {\n if (this.isStopped()) {\n return PROMISE_RESOLVE_FALSE;\n }\n\n const promises: Promise[] = this.onCancel.map(fn => toPromise(fn()));\n\n if (this.internalReplicationState) {\n await cancelRxStorageReplication(this.internalReplicationState);\n }\n if (this.metaInstance) {\n promises.push(\n ensureNotFalsy(this.internalReplicationState).checkpointQueue\n .then(() => ensureNotFalsy(this.metaInstance).close())\n );\n }\n\n this.subs.forEach(sub => sub.unsubscribe());\n this.subjects.canceled.next(true);\n\n this.subjects.active.complete();\n this.subjects.canceled.complete();\n this.subjects.error.complete();\n this.subjects.received.complete();\n this.subjects.sent.complete();\n\n return Promise.all(promises);\n }\n\n async remove() {\n await ensureNotFalsy(this.metaInstance).remove();\n const metaInfo = await this.metaInfoPromise;\n await this.cancel();\n await removeConnectedStorageFromCollection(\n this.collection,\n metaInfo.collectionName,\n metaInfo.schema\n );\n }\n}\n\n\nexport function replicateRxCollection(\n {\n replicationIdentifier,\n collection,\n deletedField = '_deleted',\n pull,\n push,\n live = true,\n retryTime = 1000 * 5,\n waitForLeadership = true,\n autoStart = true,\n }: ReplicationOptions\n): RxReplicationState {\n addRxPlugin(RxDBLeaderElectionPlugin);\n\n /**\n * It is a common error to forget to add these config\n * objects. So we check here because it makes no sense\n * to start a replication with neither push nor pull.\n */\n if (!pull && !push) {\n throw newRxError('UT3', {\n collection: collection.name,\n args: {\n replicationIdentifier\n }\n });\n }\n\n const replicationState = new RxReplicationState(\n replicationIdentifier,\n collection,\n deletedField,\n pull,\n push,\n live,\n retryTime,\n autoStart\n );\n\n\n startReplicationOnLeaderShip(waitForLeadership, replicationState);\n return replicationState as any;\n}\n\n\nexport function startReplicationOnLeaderShip(\n waitForLeadership: boolean,\n replicationState: RxReplicationState\n) {\n /**\n * Always await this Promise to ensure that the current instance\n * is leader when waitForLeadership=true\n */\n const mustWaitForLeadership = waitForLeadership && replicationState.collection.database.multiInstance;\n const waitTillRun: Promise = mustWaitForLeadership ? replicationState.collection.database.waitForLeadership() : PROMISE_RESOLVE_TRUE;\n return waitTillRun.then(() => {\n if (replicationState.isStopped()) {\n return;\n }\n if (replicationState.autoStart) {\n replicationState.start();\n }\n });\n}\n"],"mappings":";;;;;;;;AAOA,IAAAA,KAAA,GAAAC,OAAA;AA0BA,IAAAC,MAAA,GAAAD,OAAA;AACA,IAAAE,OAAA,GAAAF,OAAA;AAWA,IAAAG,OAAA,GAAAH,OAAA;AAOA,IAAAI,QAAA,GAAAJ,OAAA;AACA,IAAAK,kBAAA,GAAAL,OAAA;AAMA,IAAAM,wBAAA,GAAAN,OAAA;AAGA,IAAAO,OAAA,GAAAP,OAAA;AACA,IAAAQ,gBAAA,GAAAR,OAAA;AACA,IAAAS,aAAA,GAAAT,OAAA;AACA,IAAAU,MAAA,GAAAV,OAAA;AAjEA;AACA;AACA;AACA;AACA;AACA;;AAiEO,IAAMW,+BAAsF,GAAAC,OAAA,CAAAD,+BAAA,GAAG,IAAIE,OAAO,CAAC,CAAC;AAAC,IAEvGC,kBAAkB,GAAAF,OAAA,CAAAE,kBAAA;EAsB3B,SAAAA;EACI;AACR;AACA;AACA;EACwBC,qBAA6B,EAC7BC,UAAmC,EACnCC,YAAoB,EACpBC,IAAwD,EACxDC,IAAwC,EACxCC,IAAc,EACvBC,SAAkB,EAClBC,SAAmB,EAC5B;IAAA,KAlCcC,IAAI,GAAmB,EAAE;IAAA,KACzBC,QAAQ,GAAG;MACvBC,QAAQ,EAAE,IAAIC,aAAO,CAA4B,CAAC;MAAE;MACpDC,IAAI,EAAE,IAAID,aAAO,CAAyB,CAAC;MAAE;MAC7CE,KAAK,EAAE,IAAIF,aAAO,CAAwB,CAAC;MAAE;MAC7CG,QAAQ,EAAE,IAAIC,qBAAe,CAAU,KAAK,CAAC;MAAE;MAC/CC,MAAM,EAAE,IAAID,qBAAe,CAAU,KAAK,CAAC,CAAC;IAChD,CAAC;IAAA,KAEQE,SAAS,GAA0C,IAAI,CAACR,QAAQ,CAACC,QAAQ,CAACQ,YAAY,CAAC,CAAC;IAAA,KACxFC,KAAK,GAAuC,IAAI,CAACV,QAAQ,CAACG,IAAI,CAACM,YAAY,CAAC,CAAC;IAAA,KAC7EE,MAAM,GAAsC,IAAI,CAACX,QAAQ,CAACI,KAAK,CAACK,YAAY,CAAC,CAAC;IAAA,KAC9EG,SAAS,GAAoB,IAAI,CAACZ,QAAQ,CAACK,QAAQ,CAACI,YAAY,CAAC,CAAC;IAAA,KAClEI,OAAO,GAAwB,IAAI,CAACb,QAAQ,CAACO,MAAM,CAACE,YAAY,CAAC,CAAC;IAAA,KAMpEK,QAAQ,GAAmB,EAAE;IAAA,KAsD5BC,WAAW,GAAeC,SAAS;IAAA,KAIpCC,aAAa,GAAoE,IAAIf,aAAO,CAAC,CAAC;IAAA,KAnDjFX,qBAA6B,GAA7BA,qBAA6B;IAAA,KAC7BC,UAAmC,GAAnCA,UAAmC;IAAA,KACnCC,YAAoB,GAApBA,YAAoB;IAAA,KACpBC,IAAwD,GAAxDA,IAAwD;IAAA,KACxDC,IAAwC,GAAxCA,IAAwC;IAAA,KACxCC,IAAc,GAAdA,IAAc;IAAA,KACvBC,SAAkB,GAAlBA,SAAkB;IAAA,KAClBC,SAAmB,GAAnBA,SAAmB;IAE1B,IAAI,CAACoB,eAAe,GAAG,CAAC,YAAY;MAChC,IAAMC,0BAA0B,GAAG,sBAAsB,IAAG,MAAM3B,UAAU,CAAC4B,QAAQ,CAACC,YAAY,CAAC,CAC/F,IAAI,CAAC7B,UAAU,CAAC8B,IAAI,EACpB,IAAI,CAAC/B,qBAAqB,CAC7B,CAACgC,IAAI,CAAC,GAAG,CAAC,CAAC;MACZ,IAAMC,kBAAkB,GAAG,IAAAC,0CAAkC,EACzD,IAAI,CAACjC,UAAU,CAACkC,MAAM,CAACC,UAAU,EACjC,IAAAC,8BAAa,EAAC,IAAI,CAACpC,UAAU,CAACkC,MAAM,CAACC,UAAU,CACnD,CAAC;MACD,OAAO;QACHE,cAAc,EAAEV,0BAA0B;QAC1CO,MAAM,EAAEF;MACZ,CAAC;IACL,CAAC,EAAE,CAAC;IACJ,IAAMM,iBAAiB,GAAG,IAAAC,0BAAkB,EACxC5C,+BAA+B,EAC/BK,UAAU,EACV,MAAM,EACV,CAAC;IACDsC,iBAAiB,CAACnC,IAAI,CAAC,IAAI,CAAC;;IAE5B;IACA,IAAI,CAACH,UAAU,CAACwC,SAAS,CAACrC,IAAI,CAAC,MAAM,IAAI,CAACsC,MAAM,CAAC,CAAC,CAAC;;IAEnD;IACAC,MAAM,CAACC,IAAI,CAAC,IAAI,CAACnC,QAAQ,CAAC,CAACoC,OAAO,CAACC,GAAG,IAAI;MACtCH,MAAM,CAACI,cAAc,CAAC,IAAI,EAAED,GAAG,GAAG,GAAG,EAAE;QACnCE,GAAG,EAAE,SAAAA,CAAA,EAAY;UACb,OAAO,IAAI,CAACvC,QAAQ,CAACqC,GAAG,CAAC,CAAC5B,YAAY,CAAC,CAAC;QAC5C;MACJ,CAAC,CAAC;IACN,CAAC,CAAC;IACF,IAAM+B,YAAY,GAAG,IAAIC,OAAO,CAAOC,GAAG,IAAI;MAC1C,IAAI,CAAC3B,WAAW,GAAG2B,GAAG;IAC1B,CAAC,CAAC;IACF,IAAI,CAACF,YAAY,GAAGA,YAAY;EACpC;EAAC,IAAAG,MAAA,GAAArD,kBAAA,CAAAsD,SAAA;EAAAD,MAAA,CAQYE,KAAK,GAAlB,eAAAA,MAAA,EAAoC;IAChC,IAAI,IAAI,CAACC,SAAS,CAAC,CAAC,EAAE;MAClB;IACJ;;IAEA;IACA,IAAMC,YAAY,GAAG,IAAI,CAACrD,IAAI,IAAI,IAAI,CAACA,IAAI,CAACsD,QAAQ,GAAG,IAAI,CAACtD,IAAI,CAACsD,QAAQ,GAAGC,mCAAgB;IAC5F,IAAMC,YAAY,GAAG,IAAI,CAACvD,IAAI,IAAI,IAAI,CAACA,IAAI,CAACqD,QAAQ,GAAG,IAAI,CAACrD,IAAI,CAACqD,QAAQ,GAAGC,mCAAgB;IAE5F,IAAM7B,QAAQ,GAAG,IAAI,CAAC5B,UAAU,CAAC4B,QAAQ;IAEzC,IAAM+B,QAAQ,GAAG,MAAM,IAAI,CAACjC,eAAe;IAE3C,IAAM,CAACkC,YAAY,CAAC,GAAG,MAAMX,OAAO,CAACY,GAAG,CAAC,CACrC,IAAI,CAAC7D,UAAU,CAAC4B,QAAQ,CAACkC,OAAO,CAACC,qBAAqB,CAAsD;MACxGC,YAAY,EAAEpC,QAAQ,CAACE,IAAI;MAC3BO,cAAc,EAAEsB,QAAQ,CAACtB,cAAc;MACvC4B,qBAAqB,EAAErC,QAAQ,CAACsC,KAAK;MACrCC,aAAa,EAAEvC,QAAQ,CAACuC,aAAa;MAAE;MACvCC,OAAO,EAAE,CAAC,CAAC;MACXlC,MAAM,EAAEyB,QAAQ,CAACzB,MAAM;MACvBmC,QAAQ,EAAEzC,QAAQ,CAACyC,QAAQ;MAC3BC,OAAO,EAAEC,0BAAY,CAACC,SAAS,CAAC;IACpC,CAAC,CAAC,EACF,IAAAC,wDAA+B,EAC3B,IAAI,CAACzE,UAAU,EACf2D,QAAQ,CAACtB,cAAc,EACvBsB,QAAQ,CAACzB,MACb,CAAC,CACJ,CAAC;IACF,IAAI,CAAC0B,YAAY,GAAGA,YAAY;IAEhC,IAAI,CAACc,wBAAwB,GAAG,IAAAC,kCAA0B,EAAC;MACvDC,aAAa,EAAE,IAAI,CAACzE,IAAI,IAAI,IAAI,CAACA,IAAI,CAAC0E,SAAS,GAAG,IAAI,CAAC1E,IAAI,CAAC0E,SAAS,GAAG,GAAG;MAC3EC,aAAa,EAAE,IAAI,CAAC5E,IAAI,IAAI,IAAI,CAACA,IAAI,CAAC2E,SAAS,GAAG,IAAI,CAAC3E,IAAI,CAAC2E,SAAS,GAAG,GAAG;MAC3EE,iBAAiB,EAAE;QACfC,QAAQ,EAAE,IAAI,CAAC7E,IAAI,GAAG,IAAI,CAACA,IAAI,CAAC4E,iBAAiB,GAAGvD,SAAS;QAC7DyD,UAAU,EAAE,IAAI,CAAC/E,IAAI,GAAG,IAAI,CAACA,IAAI,CAAC6E,iBAAiB,GAAGvD;MAC1D,CAAC;MACD0D,YAAY,EAAE,IAAI,CAAClF,UAAU,CAACmF,eAAe;MAC7CvB,YAAY,EAAE,IAAI,CAACA,YAAY;MAC/B/B,YAAY,EAAED,QAAQ,CAACC,YAAY;MACnCuD,UAAU,EAAE,iBAAiB,GAAG,IAAI,CAACrF,qBAAqB;MAC1DsF,eAAe,EAAE,IAAI,CAACrF,UAAU,CAACqF,eAAe;MAChDC,kBAAkB,EAAE;QAChBC,mBAAmB,EAAE,IAAI,CAAC9D,aAAa,CAACR,YAAY,CAAC,CAAC,CAACuE,IAAI,CACvD,IAAAC,YAAM,EAACC,EAAE,IAAI,CAAC,CAAC,IAAI,CAACxF,IAAI,CAAC,EACzB,IAAAyF,cAAQ,EAAC,MAAOC,EAAE,IAAK;UACnB,IAAIA,EAAE,KAAK,QAAQ,EAAE;YACjB,OAAOA,EAAE;UACb;UACA,IAAMC,KAAK,GAAG,IAAAC,iBAAS,EAACF,EAAE,CAAC;UAC3BC,KAAK,CAACE,SAAS,GAAG,IAAAC,wCAAqB,EAAC,IAAI,CAAChG,UAAU,EAAE,IAAI,CAACC,YAAY,EAAE4F,KAAK,CAACE,SAAS,CAAC;UAC5FF,KAAK,CAACE,SAAS,GAAG,MAAM9C,OAAO,CAACY,GAAG,CAC/BgC,KAAK,CAACE,SAAS,CAACE,GAAG,CAACC,CAAC,IAAI3C,YAAY,CAAC2C,CAAC,CAAC,CAC5C,CAAC;UACD,OAAOL,KAAK;QAChB,CAAC,CACL,CAAC;QACDM,kBAAkB,EAAE,MAAAA,CAChBC,UAAsC,EACtCvB,SAAiB,KAChB;UACD,IAAI,CAAC,IAAI,CAAC3E,IAAI,EAAE;YACZ,OAAO;cACHkG,UAAU,EAAE,IAAI;cAChBL,SAAS,EAAE;YACf,CAAC;UACL;UACA;AACpB;AACA;AACA;AACA;UACoB,IAAIM,IAAI,GAAG,KAAK;UAChB,IAAIC,MAA+D,GAAG,CAAC,CAAQ;UAC/E,OAAO,CAACD,IAAI,IAAI,CAAC,IAAI,CAAC/C,SAAS,CAAC,CAAC,EAAE;YAC/B,IAAI;cACAgD,MAAM,GAAG,MAAM,IAAI,CAACpG,IAAI,CAACqG,OAAO,CAC5BH,UAAU,EACVvB,SACJ,CAAC;cACDwB,IAAI,GAAG,IAAI;YACf,CAAC,CAAC,OAAOG,GAA0B,EAAE;cACjC,IAAMC,SAAS,GAAG,IAAAC,mBAAU,EAAC,SAAS,EAAE;gBACpCN,UAAU;gBACVO,MAAM,EAAE,IAAAC,eAAO,EAACJ,GAAG,CAAC,CAACP,GAAG,CAACY,EAAE,IAAI,IAAAC,wBAAgB,EAACD,EAAE,CAAC,CAAC;gBACpDE,SAAS,EAAE;cACf,CAAC,CAAC;cACF,IAAI,CAACvG,QAAQ,CAACI,KAAK,CAACoG,IAAI,CAACP,SAAS,CAAC;cACnC,MAAM,IAAAQ,6BAAU,EAAC,IAAI,CAACjH,UAAU,EAAE,IAAAkH,sBAAc,EAAC,IAAI,CAAC7G,SAAS,CAAC,CAAC;YACrE;UACJ;UAEA,IAAI,IAAI,CAACiD,SAAS,CAAC,CAAC,EAAE;YAClB,OAAO;cACH8C,UAAU,EAAE,IAAI;cAChBL,SAAS,EAAE;YACf,CAAC;UACL;UAEA,IAAMoB,SAAS,GAAG,IAAArB,iBAAS,EAACQ,MAAM,CAAC;UACnCa,SAAS,CAACpB,SAAS,GAAG,IAAAC,wCAAqB,EAAC,IAAI,CAAChG,UAAU,EAAE,IAAI,CAACC,YAAY,EAAEkH,SAAS,CAACpB,SAAS,CAAC;UACpGoB,SAAS,CAACpB,SAAS,GAAG,MAAM9C,OAAO,CAACY,GAAG,CACnCsD,SAAS,CAACpB,SAAS,CAACE,GAAG,CAACC,CAAC,IAAI3C,YAAY,CAAC2C,CAAC,CAAC,CAChD,CAAC;UACD,OAAOiB,SAAS;QACpB,CAAC;QACDC,WAAW,EAAE,MACTC,IAAgD,IAC/C;UACD,IAAI,CAAC,IAAI,CAAClH,IAAI,EAAE;YACZ,OAAO,EAAE;UACb;UACA,IAAIkG,IAAI,GAAG,KAAK;UAEhB,MAAM,IAAAiB,0BAAmB,EAAC,2BAA2B,EAAE;YACnDD,IAAI;YACJrH,UAAU,EAAE,IAAI,CAACA;UACrB,CAAC,CAAC;UAEF,IAAMuH,aAAa,GAAG,MAAMtE,OAAO,CAACY,GAAG,CACnCwD,IAAI,CAACpB,GAAG,CAAC,MAAOuB,GAAG,IAAK;YACpBA,GAAG,CAACC,gBAAgB,GAAG,MAAM/D,YAAY,CAAC8D,GAAG,CAACC,gBAAgB,CAAC;YAC/D,IAAID,GAAG,CAACC,gBAAgB,KAAK,IAAI,EAAE;cAC/B,OAAO,IAAI;YACf;YACA,IAAID,GAAG,CAACE,kBAAkB,EAAE;cACxBF,GAAG,CAACE,kBAAkB,GAAG,MAAMhE,YAAY,CAAC8D,GAAG,CAACE,kBAAkB,CAAC;YACvE;YACA,IAAI,IAAI,CAACzH,YAAY,KAAK,UAAU,EAAE;cAClCuH,GAAG,CAACC,gBAAgB,GAAG,IAAAE,mDAAgC,EAAC,IAAI,CAAC1H,YAAY,EAAEuH,GAAG,CAACC,gBAAgB,CAAQ;cACvG,IAAID,GAAG,CAACE,kBAAkB,EAAE;gBACxBF,GAAG,CAACE,kBAAkB,GAAG,IAAAC,mDAAgC,EAAC,IAAI,CAAC1H,YAAY,EAAEuH,GAAG,CAACE,kBAAkB,CAAQ;cAC/G;YACJ;YACA,OAAOF,GAAG;UACd,CAAC,CACL,CAAC;UACD,IAAMI,OAAmD,GAAGL,aAAa,CAAC9B,MAAM,CAACoC,2BAAmB,CAAC;UAErG,IAAIvB,MAAgC,GAAG,IAAW;;UAElD;UACA,IAAIsB,OAAO,CAACE,MAAM,KAAK,CAAC,EAAE;YACtBzB,IAAI,GAAG,IAAI;YACXC,MAAM,GAAG,EAAE;UACf;UAEA,OAAO,CAACD,IAAI,IAAI,CAAC,IAAI,CAAC/C,SAAS,CAAC,CAAC,EAAE;YAC/B,IAAI;cACAgD,MAAM,GAAG,MAAM,IAAI,CAACnG,IAAI,CAACoG,OAAO,CAACqB,OAAO,CAAC;cACzC;AAC5B;AACA;AACA;AACA;AACA;cAC4B,IAAI,CAACG,KAAK,CAACC,OAAO,CAAC1B,MAAM,CAAC,EAAE;gBACxB,MAAM,IAAAI,mBAAU,EACZ,eAAe,EACf;kBACIuB,QAAQ,EAAEZ,IAAI;kBACdN,SAAS,EAAE,MAAM;kBACjBmB,IAAI,EAAE;oBAAE5B;kBAAO;gBACnB,CACJ,CAAC;cACL;cACAD,IAAI,GAAG,IAAI;YACf,CAAC,CAAC,OAAOG,GAAoC,EAAE;cAC3C,IAAMC,SAAS,GAAID,GAAG,CAAa2B,IAAI,GAAG3B,GAAG,GAAG,IAAAE,mBAAU,EAAC,SAAS,EAAE;gBAClEuB,QAAQ,EAAEZ,IAAI;gBACdV,MAAM,EAAE,IAAAC,eAAO,EAACJ,GAAG,CAAC,CAACP,GAAG,CAACY,EAAE,IAAI,IAAAC,wBAAgB,EAACD,EAAE,CAAC,CAAC;gBACpDE,SAAS,EAAE;cACf,CAAC,CAAC;cACF,IAAI,CAACvG,QAAQ,CAACI,KAAK,CAACoG,IAAI,CAACP,SAAS,CAAC;cACnC,MAAM,IAAAQ,6BAAU,EAAC,IAAI,CAACjH,UAAU,EAAE,IAAAkH,sBAAc,EAAC,IAAI,CAAC7G,SAAS,CAAC,CAAC;YACrE;UACJ;UACA,IAAI,IAAI,CAACiD,SAAS,CAAC,CAAC,EAAE;YAClB,OAAO,EAAE;UACb;UAEA,MAAM,IAAAgE,0BAAmB,EAAC,0CAA0C,EAAE;YAClEhB,MAAM;YACNtG,UAAU,EAAE,IAAI,CAACA;UACrB,CAAC,CAAC;UAEF,IAAMoI,SAAS,GAAG,IAAApC,wCAAqB,EAAC,IAAI,CAAChG,UAAU,EAAE,IAAI,CAACC,YAAY,EAAE,IAAAiH,sBAAc,EAACZ,MAAM,CAAC,CAAC;UACnG,OAAO8B,SAAS;QACpB;MACJ;IACJ,CAAC,CAAC;IACF,IAAI,CAAC7H,IAAI,CAACJ,IAAI,CACV,IAAI,CAACuE,wBAAwB,CAAC2D,MAAM,CAACzH,KAAK,CAAC0H,SAAS,CAAC9B,GAAG,IAAI;MACxD,IAAI,CAAChG,QAAQ,CAACI,KAAK,CAACoG,IAAI,CAACR,GAAG,CAAC;IACjC,CAAC,CAAC,EACF,IAAI,CAAC9B,wBAAwB,CAAC2D,MAAM,CAACE,SAAS,CAACC,IAAI,CAC9CF,SAAS,CAACd,GAAG,IAAI,IAAI,CAAChH,QAAQ,CAACC,QAAQ,CAACuG,IAAI,CAACQ,GAAG,CAACiB,QAAe,CAAC,CAAC,EACvE,IAAI,CAAC/D,wBAAwB,CAAC2D,MAAM,CAACE,SAAS,CAACG,EAAE,CAC5CJ,SAAS,CAACK,gBAAgB,IAAI;MAC3B,IAAI,CAACnI,QAAQ,CAACG,IAAI,CAACqG,IAAI,CAAC2B,gBAAgB,CAAClB,gBAAgB,CAAC;IAC9D,CAAC,CAAC,EACN,IAAAmB,mBAAa,EAAC,CACV,IAAI,CAAClE,wBAAwB,CAAC2D,MAAM,CAACtH,MAAM,CAACyH,IAAI,EAChD,IAAI,CAAC9D,wBAAwB,CAAC2D,MAAM,CAACtH,MAAM,CAAC2H,EAAE,CACjD,CAAC,CAACJ,SAAS,CAAC,CAAC,CAACE,IAAI,EAAEE,EAAE,CAAC,KAAK;MACzB,IAAMG,QAAQ,GAAGL,IAAI,IAAIE,EAAE;MAC3B,IAAI,CAAClI,QAAQ,CAACO,MAAM,CAACiG,IAAI,CAAC6B,QAAQ,CAAC;IACvC,CAAC,CACL,CAAC;IAED,IACI,IAAI,CAAC3I,IAAI,IACT,IAAI,CAACA,IAAI,CAAC4I,OAAO,IACjB,IAAI,CAAC1I,IAAI,EACX;MACE,IAAI,CAACG,IAAI,CAACJ,IAAI,CACV,IAAI,CAACD,IAAI,CAAC4I,OAAO,CAACR,SAAS,CAAC;QACxBtB,IAAI,EAAEpB,EAAE,IAAI;UACR,IAAI,CAACnE,aAAa,CAACuF,IAAI,CAACpB,EAAE,CAAC;QAC/B,CAAC;QACDhF,KAAK,EAAE4F,GAAG,IAAI;UACV,IAAI,CAAChG,QAAQ,CAACI,KAAK,CAACoG,IAAI,CAACR,GAAG,CAAC;QACjC;MACJ,CAAC,CACL,CAAC;IACL;;IAEA;AACR;AACA;AACA;IACQ,IAAI,CAAC,IAAI,CAACpG,IAAI,EAAE;MACZ,MAAM,IAAA2I,4CAAoC,EAAC,IAAI,CAACrE,wBAAwB,CAAC;MACzE,MAAM,IAAAsE,uCAA+B,EAAC,IAAI,CAACtE,wBAAwB,CAAC;MACpE,MAAM,IAAI,CAACjC,MAAM,CAAC,CAAC;IACvB;IACA,IAAI,CAAClB,WAAW,CAAC,CAAC;EACtB,CAAC;EAAA4B,MAAA,CAEDG,SAAS,GAAT,SAAAA,UAAA,EAAqB;IACjB,IAAI,IAAI,CAAC9C,QAAQ,CAACK,QAAQ,CAACoI,QAAQ,CAAC,CAAC,EAAE;MACnC,OAAO,IAAI;IACf;IACA,OAAO,KAAK;EAChB,CAAC;EAAA9F,MAAA,CAEK+F,uBAAuB,GAA7B,eAAAA,wBAAA,EAA+C;IAC3C,MAAM,IAAI,CAAClG,YAAY;IACvB,OAAO,IAAA+F,4CAAoC,EACvC,IAAA7B,sBAAc,EAAC,IAAI,CAACxC,wBAAwB,CAChD,CAAC;EACL;;EAEA;AACJ;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,KATI;EAAAvB,MAAA,CAUMgG,WAAW,GAAjB,eAAAA,YAAA,EAAmC;IAC/B,MAAM,IAAI,CAACnG,YAAY;IACvB,MAAM,IAAA+F,4CAAoC,EAAC,IAAA7B,sBAAc,EAAC,IAAI,CAACxC,wBAAwB,CAAC,CAAC;;IAEzF;AACR;AACA;AACA;AACA;AACA;AACA;AACA;AACA;IACQ,IAAI0E,CAAC,GAAG,CAAC;IACT,OAAOA,CAAC,GAAG,CAAC,EAAE;MACVA,CAAC,EAAE;;MAEH;AACZ;AACA;AACA;AACA;AACA;MACY,MAAM,IAAI,CAACpJ,UAAU,CAAC4B,QAAQ,CAACyH,kBAAkB,CAAC,CAAC;MACnD,MAAM,IAAAL,uCAA+B,EAAC,IAAA9B,sBAAc,EAAC,IAAI,CAACxC,wBAAwB,CAAC,CAAC;IACxF;IAEA,OAAO,IAAI;EACf,CAAC;EAAAvB,MAAA,CAEDmG,MAAM,GAAN,SAAAA,OAAA,EAAS;IACL,IAAI,CAAC7H,aAAa,CAACuF,IAAI,CAAC,QAAQ,CAAC;EACrC,CAAC;EAAA7D,MAAA,CACDoG,SAAS,GAAT,SAAAA,UAAU3D,EAA0D,EAAE;IAClE,IAAI,CAACnE,aAAa,CAACuF,IAAI,CAACpB,EAAE,CAAC;EAC/B,CAAC;EAAAzC,MAAA,CAEKV,MAAM,GAAZ,eAAAA,OAAA,EAA6B;IACzB,IAAI,IAAI,CAACa,SAAS,CAAC,CAAC,EAAE;MAClB,OAAOkG,6BAAqB;IAChC;IAEA,IAAMC,QAAwB,GAAG,IAAI,CAACnI,QAAQ,CAAC2E,GAAG,CAACyD,EAAE,IAAI,IAAAC,iBAAS,EAACD,EAAE,CAAC,CAAC,CAAC,CAAC;IAEzE,IAAI,IAAI,CAAChF,wBAAwB,EAAE;MAC/B,MAAM,IAAAkF,kCAA0B,EAAC,IAAI,CAAClF,wBAAwB,CAAC;IACnE;IACA,IAAI,IAAI,CAACd,YAAY,EAAE;MACnB6F,QAAQ,CAACtJ,IAAI,CACT,IAAA+G,sBAAc,EAAC,IAAI,CAACxC,wBAAwB,CAAC,CAACmF,eAAe,CACxDC,IAAI,CAAC,MAAM,IAAA5C,sBAAc,EAAC,IAAI,CAACtD,YAAY,CAAC,CAACmG,KAAK,CAAC,CAAC,CAC7D,CAAC;IACL;IAEA,IAAI,CAACxJ,IAAI,CAACqC,OAAO,CAACoH,GAAG,IAAIA,GAAG,CAACC,WAAW,CAAC,CAAC,CAAC;IAC3C,IAAI,CAACzJ,QAAQ,CAACK,QAAQ,CAACmG,IAAI,CAAC,IAAI,CAAC;IAEjC,IAAI,CAACxG,QAAQ,CAACO,MAAM,CAACmJ,QAAQ,CAAC,CAAC;IAC/B,IAAI,CAAC1J,QAAQ,CAACK,QAAQ,CAACqJ,QAAQ,CAAC,CAAC;IACjC,IAAI,CAAC1J,QAAQ,CAACI,KAAK,CAACsJ,QAAQ,CAAC,CAAC;IAC9B,IAAI,CAAC1J,QAAQ,CAACC,QAAQ,CAACyJ,QAAQ,CAAC,CAAC;IACjC,IAAI,CAAC1J,QAAQ,CAACG,IAAI,CAACuJ,QAAQ,CAAC,CAAC;IAE7B,OAAOjH,OAAO,CAACY,GAAG,CAAC4F,QAAQ,CAAC;EAChC,CAAC;EAAAtG,MAAA,CAEKgH,MAAM,GAAZ,eAAAA,OAAA,EAAe;IACX,MAAM,IAAAjD,sBAAc,EAAC,IAAI,CAACtD,YAAY,CAAC,CAACuG,MAAM,CAAC,CAAC;IAChD,IAAMxG,QAAQ,GAAG,MAAM,IAAI,CAACjC,eAAe;IAC3C,MAAM,IAAI,CAACe,MAAM,CAAC,CAAC;IACnB,MAAM,IAAA2H,6DAAoC,EACtC,IAAI,CAACpK,UAAU,EACf2D,QAAQ,CAACtB,cAAc,EACvBsB,QAAQ,CAACzB,MACb,CAAC;EACL,CAAC;EAAA,OAAApC,kBAAA;AAAA;AAIE,SAASuK,qBAAqBA,CACjC;EACItK,qBAAqB;EACrBC,UAAU;EACVC,YAAY,GAAG,UAAU;EACzBC,IAAI;EACJC,IAAI;EACJC,IAAI,GAAG,IAAI;EACXC,SAAS,GAAG,IAAI,GAAG,CAAC;EACpBiK,iBAAiB,GAAG,IAAI;EACxBhK,SAAS,GAAG;AAC+B,CAAC,EACH;EAC7C,IAAAiK,mBAAW,EAACC,+BAAwB,CAAC;;EAErC;AACJ;AACA;AACA;AACA;EACI,IAAI,CAACtK,IAAI,IAAI,CAACC,IAAI,EAAE;IAChB,MAAM,IAAAuG,mBAAU,EAAC,KAAK,EAAE;MACpB1G,UAAU,EAAEA,UAAU,CAAC8B,IAAI;MAC3BoG,IAAI,EAAE;QACFnI;MACJ;IACJ,CAAC,CAAC;EACN;EAEA,IAAM0K,gBAAgB,GAAG,IAAI3K,kBAAkB,CAC3CC,qBAAqB,EACrBC,UAAU,EACVC,YAAY,EACZC,IAAI,EACJC,IAAI,EACJC,IAAI,EACJC,SAAS,EACTC,SACJ,CAAC;EAGDoK,4BAA4B,CAACJ,iBAAiB,EAAEG,gBAAgB,CAAC;EACjE,OAAOA,gBAAgB;AAC3B;AAGO,SAASC,4BAA4BA,CACxCJ,iBAA0B,EAC1BG,gBAA8C,EAChD;EACE;AACJ;AACA;AACA;EACI,IAAME,qBAAqB,GAAGL,iBAAiB,IAAIG,gBAAgB,CAACzK,UAAU,CAAC4B,QAAQ,CAACuC,aAAa;EACrG,IAAMyG,WAAyB,GAAGD,qBAAqB,GAAGF,gBAAgB,CAACzK,UAAU,CAAC4B,QAAQ,CAAC0I,iBAAiB,CAAC,CAAC,GAAGO,4BAAoB;EACzI,OAAOD,WAAW,CAACd,IAAI,CAAC,MAAM;IAC1B,IAAIW,gBAAgB,CAACnH,SAAS,CAAC,CAAC,EAAE;MAC9B;IACJ;IACA,IAAImH,gBAAgB,CAACnK,SAAS,EAAE;MAC5BmK,gBAAgB,CAACpH,KAAK,CAAC,CAAC;IAC5B;EACJ,CAAC,CAAC;AACN","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/replication/replication-helper.js b/dist/cjs/plugins/replication/replication-helper.js deleted file mode 100644 index e32d38f9738..00000000000 --- a/dist/cjs/plugins/replication/replication-helper.js +++ /dev/null @@ -1,77 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.DEFAULT_MODIFIER = void 0; -exports.awaitRetry = awaitRetry; -exports.handlePulledDocuments = handlePulledDocuments; -exports.swapDefaultDeletedTodeletedField = swapDefaultDeletedTodeletedField; -var _index = require("../../plugins/utils/index.js"); -var _rxSchemaHelper = require("../../rx-schema-helper.js"); -// does nothing -var DEFAULT_MODIFIER = d => Promise.resolve(d); -exports.DEFAULT_MODIFIER = DEFAULT_MODIFIER; -function swapDefaultDeletedTodeletedField(deletedField, doc) { - if (deletedField === '_deleted') { - return doc; - } else { - doc = (0, _index.flatClone)(doc); - var isDeleted = !!doc._deleted; - doc[deletedField] = isDeleted; - delete doc._deleted; - return doc; - } -} - -/** - * Must be run over all plain document data - * that was pulled from the remote. - * Used to fill up fields or modify the deleted field etc. - */ -function handlePulledDocuments(collection, deletedField, docs) { - return docs.map(doc => { - var useDoc = (0, _index.flatClone)(doc); - - /** - * Swap out the deleted field - */ - if (deletedField !== '_deleted') { - var isDeleted = !!useDoc[deletedField]; - useDoc._deleted = isDeleted; - delete useDoc[deletedField]; - } else { - // ensure we have a boolean. - useDoc._deleted = !!useDoc._deleted; - } - - /** - * Fill up composed primary - */ - var primaryPath = collection.schema.primaryPath; - useDoc[primaryPath] = (0, _rxSchemaHelper.getComposedPrimaryKeyOfDocumentData)(collection.schema.jsonSchema, useDoc); - return useDoc; - }); -} - -/** - * Like normal promiseWait() - * but will skip the wait time if the online-state changes. - */ -function awaitRetry(collection, retryTime) { - if (typeof window === 'undefined' || typeof window !== 'object' || typeof window.addEventListener === 'undefined' || navigator.onLine) { - return collection.promiseWait(retryTime); - } - var listener; - var onlineAgain = new Promise(res => { - listener = () => { - window.removeEventListener('online', listener); - res(); - }; - window.addEventListener('online', listener); - }); - return Promise.race([onlineAgain, collection.promiseWait(retryTime)]).then(() => { - window.removeEventListener('online', listener); - }); -} -//# sourceMappingURL=replication-helper.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/replication/replication-helper.js.map b/dist/cjs/plugins/replication/replication-helper.js.map deleted file mode 100644 index 7dc0df0a0fa..00000000000 --- a/dist/cjs/plugins/replication/replication-helper.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"replication-helper.js","names":["_index","require","_rxSchemaHelper","DEFAULT_MODIFIER","d","Promise","resolve","exports","swapDefaultDeletedTodeletedField","deletedField","doc","flatClone","isDeleted","_deleted","handlePulledDocuments","collection","docs","map","useDoc","primaryPath","schema","getComposedPrimaryKeyOfDocumentData","jsonSchema","awaitRetry","retryTime","window","addEventListener","navigator","onLine","promiseWait","listener","onlineAgain","res","removeEventListener","race","then"],"sources":["../../../../src/plugins/replication/replication-helper.ts"],"sourcesContent":["import type {\n RxCollection,\n WithDeleted\n} from '../../types/index.d.ts';\nimport { flatClone } from '../../plugins/utils/index.ts';\nimport { getComposedPrimaryKeyOfDocumentData } from '../../rx-schema-helper.ts';\n\n// does nothing\nexport const DEFAULT_MODIFIER = (d: any) => Promise.resolve(d);\n\n\nexport function swapDefaultDeletedTodeletedField(\n deletedField: string,\n doc: WithDeleted\n): RxDocType {\n if (deletedField === '_deleted') {\n return doc;\n } else {\n doc = flatClone(doc);\n const isDeleted = !!doc._deleted;\n (doc as any)[deletedField] = isDeleted;\n delete (doc as any)._deleted;\n return doc;\n }\n}\n\n/**\n * Must be run over all plain document data\n * that was pulled from the remote.\n * Used to fill up fields or modify the deleted field etc.\n */\nexport function handlePulledDocuments(\n collection: RxCollection,\n deletedField: string,\n docs: RxDocType[]\n): WithDeleted[] {\n return docs.map(doc => {\n const useDoc: WithDeleted = flatClone(doc) as any;\n\n /**\n * Swap out the deleted field\n */\n if (deletedField !== '_deleted') {\n const isDeleted = !!(useDoc as any)[deletedField];\n (useDoc as any)._deleted = isDeleted;\n delete (useDoc as any)[deletedField];\n } else {\n // ensure we have a boolean.\n useDoc._deleted = !!useDoc._deleted;\n }\n\n /**\n * Fill up composed primary\n */\n const primaryPath = collection.schema.primaryPath;\n (useDoc as any)[primaryPath] = getComposedPrimaryKeyOfDocumentData(\n collection.schema.jsonSchema,\n useDoc\n );\n return useDoc as any;\n });\n}\n\n\n/**\n * Like normal promiseWait()\n * but will skip the wait time if the online-state changes.\n */\nexport function awaitRetry(\n collection: RxCollection,\n retryTime: number\n) {\n if (\n typeof window === 'undefined' ||\n typeof window !== 'object' ||\n typeof window.addEventListener === 'undefined' ||\n navigator.onLine\n ) {\n return collection.promiseWait(retryTime);\n }\n\n let listener: any;\n const onlineAgain = new Promise(res => {\n listener = () => {\n window.removeEventListener('online', listener);\n res();\n };\n window.addEventListener('online', listener);\n });\n\n return Promise.race([\n onlineAgain,\n collection.promiseWait(retryTime)\n ]).then(() => {\n window.removeEventListener('online', listener);\n });\n}\n"],"mappings":";;;;;;;;;AAIA,IAAAA,MAAA,GAAAC,OAAA;AACA,IAAAC,eAAA,GAAAD,OAAA;AAEA;AACO,IAAME,gBAAgB,GAAIC,CAAM,IAAKC,OAAO,CAACC,OAAO,CAACF,CAAC,CAAC;AAACG,OAAA,CAAAJ,gBAAA,GAAAA,gBAAA;AAGxD,SAASK,gCAAgCA,CAC5CC,YAAoB,EACpBC,GAA2B,EAClB;EACT,IAAID,YAAY,KAAK,UAAU,EAAE;IAC7B,OAAOC,GAAG;EACd,CAAC,MAAM;IACHA,GAAG,GAAG,IAAAC,gBAAS,EAACD,GAAG,CAAC;IACpB,IAAME,SAAS,GAAG,CAAC,CAACF,GAAG,CAACG,QAAQ;IAC/BH,GAAG,CAASD,YAAY,CAAC,GAAGG,SAAS;IACtC,OAAQF,GAAG,CAASG,QAAQ;IAC5B,OAAOH,GAAG;EACd;AACJ;;AAEA;AACA;AACA;AACA;AACA;AACO,SAASI,qBAAqBA,CACjCC,UAAmC,EACnCN,YAAoB,EACpBO,IAAiB,EACO;EACxB,OAAOA,IAAI,CAACC,GAAG,CAACP,GAAG,IAAI;IACnB,IAAMQ,MAA8B,GAAG,IAAAP,gBAAS,EAACD,GAAG,CAAQ;;IAE5D;AACR;AACA;IACQ,IAAID,YAAY,KAAK,UAAU,EAAE;MAC7B,IAAMG,SAAS,GAAG,CAAC,CAAEM,MAAM,CAAST,YAAY,CAAC;MAChDS,MAAM,CAASL,QAAQ,GAAGD,SAAS;MACpC,OAAQM,MAAM,CAAST,YAAY,CAAC;IACxC,CAAC,MAAM;MACH;MACAS,MAAM,CAACL,QAAQ,GAAG,CAAC,CAACK,MAAM,CAACL,QAAQ;IACvC;;IAEA;AACR;AACA;IACQ,IAAMM,WAAW,GAAGJ,UAAU,CAACK,MAAM,CAACD,WAAW;IAChDD,MAAM,CAASC,WAAW,CAAC,GAAG,IAAAE,mDAAmC,EAC9DN,UAAU,CAACK,MAAM,CAACE,UAAU,EAC5BJ,MACJ,CAAC;IACD,OAAOA,MAAM;EACjB,CAAC,CAAC;AACN;;AAGA;AACA;AACA;AACA;AACO,SAASK,UAAUA,CACtBR,UAAwB,EACxBS,SAAiB,EACnB;EACE,IACI,OAAOC,MAAM,KAAK,WAAW,IAC7B,OAAOA,MAAM,KAAK,QAAQ,IAC1B,OAAOA,MAAM,CAACC,gBAAgB,KAAK,WAAW,IAC9CC,SAAS,CAACC,MAAM,EAClB;IACE,OAAOb,UAAU,CAACc,WAAW,CAACL,SAAS,CAAC;EAC5C;EAEA,IAAIM,QAAa;EACjB,IAAMC,WAAW,GAAG,IAAI1B,OAAO,CAAO2B,GAAG,IAAI;IACzCF,QAAQ,GAAGA,CAAA,KAAM;MACbL,MAAM,CAACQ,mBAAmB,CAAC,QAAQ,EAAEH,QAAQ,CAAC;MAC9CE,GAAG,CAAC,CAAC;IACT,CAAC;IACDP,MAAM,CAACC,gBAAgB,CAAC,QAAQ,EAAEI,QAAQ,CAAC;EAC/C,CAAC,CAAC;EAEF,OAAOzB,OAAO,CAAC6B,IAAI,CAAC,CAChBH,WAAW,EACXhB,UAAU,CAACc,WAAW,CAACL,SAAS,CAAC,CACpC,CAAC,CAACW,IAAI,CAAC,MAAM;IACVV,MAAM,CAACQ,mBAAmB,CAAC,QAAQ,EAAEH,QAAQ,CAAC;EAClD,CAAC,CAAC;AACN","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/state/helpers.js b/dist/cjs/plugins/state/helpers.js deleted file mode 100644 index 6d28daa250c..00000000000 --- a/dist/cjs/plugins/state/helpers.js +++ /dev/null @@ -1,61 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.RX_STATE_SCHEMA_TITLE = exports.RX_STATE_ID_LENGTH = exports.RX_STATE_COLLECTION_SCHEMA = void 0; -exports.nextRxStateId = nextRxStateId; -var RX_STATE_SCHEMA_TITLE = exports.RX_STATE_SCHEMA_TITLE = 'RxStateCollection'; -var RX_STATE_ID_LENGTH = exports.RX_STATE_ID_LENGTH = 14; -var RX_STATE_COLLECTION_SCHEMA = exports.RX_STATE_COLLECTION_SCHEMA = { - title: RX_STATE_SCHEMA_TITLE, - primaryKey: 'id', - version: 0, - type: 'object', - properties: { - id: { - type: 'string', - /** - * We store numbers in string format like '0001' - * with a left-pad. - * TODO instead we should transform the number to a string - * with the same sort-position to improve performance. - */ - maxLength: RX_STATE_ID_LENGTH, - minLength: RX_STATE_ID_LENGTH, - pattern: '[0-9]+' - }, - sId: { - type: 'string', - maxLength: 10, - minLength: 10 - }, - ops: { - type: 'array', - minItems: 1, - items: { - type: 'object', - properties: { - k: { - type: 'string' - }, - v: { - type: 'object' - } - }, - required: ['key', 'value'] - } - } - }, - required: ['id', 'sId', 'ops'] -}; -function nextRxStateId(lastId) { - if (!lastId) { - return ''.padStart(RX_STATE_ID_LENGTH, '0'); - } - var parsed = parseInt(lastId, 10); - var next = parsed + 1; - var nextString = next.toString(); - return nextString.padStart(RX_STATE_ID_LENGTH, '0'); -} -//# sourceMappingURL=helpers.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/state/helpers.js.map b/dist/cjs/plugins/state/helpers.js.map deleted file mode 100644 index 5d216b9c7e9..00000000000 --- a/dist/cjs/plugins/state/helpers.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"helpers.js","names":["RX_STATE_SCHEMA_TITLE","exports","RX_STATE_ID_LENGTH","RX_STATE_COLLECTION_SCHEMA","title","primaryKey","version","type","properties","id","maxLength","minLength","pattern","sId","ops","minItems","items","k","v","required","nextRxStateId","lastId","padStart","parsed","parseInt","next","nextString","toString"],"sources":["../../../../src/plugins/state/helpers.ts"],"sourcesContent":["import type { DeepReadonly, RxJsonSchema } from '../../types';\nimport type { RxStateDocument } from './types';\n\nexport const RX_STATE_SCHEMA_TITLE = 'RxStateCollection';\nexport const RX_STATE_ID_LENGTH = 14;\nexport const RX_STATE_COLLECTION_SCHEMA: DeepReadonly> = {\n title: RX_STATE_SCHEMA_TITLE,\n primaryKey: 'id',\n version: 0,\n type: 'object',\n properties: {\n id: {\n type: 'string',\n /**\n * We store numbers in string format like '0001'\n * with a left-pad.\n * TODO instead we should transform the number to a string\n * with the same sort-position to improve performance.\n */\n maxLength: RX_STATE_ID_LENGTH,\n minLength: RX_STATE_ID_LENGTH,\n pattern: '[0-9]+'\n },\n sId: {\n type: 'string',\n maxLength: 10,\n minLength: 10\n },\n ops: {\n type: 'array',\n minItems: 1,\n items: {\n type: 'object',\n properties: {\n k: {\n type: 'string'\n },\n v: {\n type: 'object'\n }\n },\n required: [\n 'key',\n 'value'\n ]\n }\n }\n },\n required: [\n 'id',\n 'sId',\n 'ops'\n ]\n} as const;\n\n\nexport function nextRxStateId(lastId?: string): string {\n if (!lastId) {\n return ''.padStart(RX_STATE_ID_LENGTH, '0');\n }\n const parsed = parseInt(lastId, 10);\n const next = parsed + 1;\n const nextString = next.toString();\n return nextString.padStart(RX_STATE_ID_LENGTH, '0');\n}\n"],"mappings":";;;;;;;AAGO,IAAMA,qBAAqB,GAAAC,OAAA,CAAAD,qBAAA,GAAG,mBAAmB;AACjD,IAAME,kBAAkB,GAAAD,OAAA,CAAAC,kBAAA,GAAG,EAAE;AAC7B,IAAMC,0BAAuE,GAAAF,OAAA,CAAAE,0BAAA,GAAG;EACnFC,KAAK,EAAEJ,qBAAqB;EAC5BK,UAAU,EAAE,IAAI;EAChBC,OAAO,EAAE,CAAC;EACVC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRC,EAAE,EAAE;MACAF,IAAI,EAAE,QAAQ;MACd;AACZ;AACA;AACA;AACA;AACA;MACYG,SAAS,EAAER,kBAAkB;MAC7BS,SAAS,EAAET,kBAAkB;MAC7BU,OAAO,EAAE;IACb,CAAC;IACDC,GAAG,EAAE;MACDN,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE,EAAE;MACbC,SAAS,EAAE;IACf,CAAC;IACDG,GAAG,EAAE;MACDP,IAAI,EAAE,OAAO;MACbQ,QAAQ,EAAE,CAAC;MACXC,KAAK,EAAE;QACHT,IAAI,EAAE,QAAQ;QACdC,UAAU,EAAE;UACRS,CAAC,EAAE;YACCV,IAAI,EAAE;UACV,CAAC;UACDW,CAAC,EAAE;YACCX,IAAI,EAAE;UACV;QACJ,CAAC;QACDY,QAAQ,EAAE,CACN,KAAK,EACL,OAAO;MAEf;IACJ;EACJ,CAAC;EACDA,QAAQ,EAAE,CACN,IAAI,EACJ,KAAK,EACL,KAAK;AAEb,CAAU;AAGH,SAASC,aAAaA,CAACC,MAAe,EAAU;EACnD,IAAI,CAACA,MAAM,EAAE;IACT,OAAO,EAAE,CAACC,QAAQ,CAACpB,kBAAkB,EAAE,GAAG,CAAC;EAC/C;EACA,IAAMqB,MAAM,GAAGC,QAAQ,CAACH,MAAM,EAAE,EAAE,CAAC;EACnC,IAAMI,IAAI,GAAGF,MAAM,GAAG,CAAC;EACvB,IAAMG,UAAU,GAAGD,IAAI,CAACE,QAAQ,CAAC,CAAC;EAClC,OAAOD,UAAU,CAACJ,QAAQ,CAACpB,kBAAkB,EAAE,GAAG,CAAC;AACvD","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/state/index.js b/dist/cjs/plugins/state/index.js deleted file mode 100644 index 41ae26de553..00000000000 --- a/dist/cjs/plugins/state/index.js +++ /dev/null @@ -1,42 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -var _exportNames = { - addState: true, - RxDBStatePlugin: true -}; -exports.RxDBStatePlugin = void 0; -exports.addState = addState; -var _utilsMap = require("../utils/utils-map.js"); -var _rxState = require("./rx-state.js"); -var _helpers = require("./helpers.js"); -Object.keys(_helpers).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; - if (key in exports && exports[key] === _helpers[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _helpers[key]; - } - }); -}); -var STATE_BY_DATABASE = new WeakMap(); -async function addState(namespace = '') { - var stateCache = (0, _utilsMap.getFromMapOrCreate)(STATE_BY_DATABASE, this, () => new Map()); - var state = await (0, _utilsMap.getFromMapOrCreate)(stateCache, namespace, () => (0, _rxState.createRxState)(this, namespace)); - this.states[namespace] = state; - return state; -} -var RxDBStatePlugin = exports.RxDBStatePlugin = { - name: 'state', - rxdb: true, - prototypes: { - RxDatabase(proto) { - proto.addState = addState; - } - } -}; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/state/index.js.map b/dist/cjs/plugins/state/index.js.map deleted file mode 100644 index e173ba6d5b8..00000000000 --- a/dist/cjs/plugins/state/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["_utilsMap","require","_rxState","_helpers","Object","keys","forEach","key","prototype","hasOwnProperty","call","_exportNames","exports","defineProperty","enumerable","get","STATE_BY_DATABASE","WeakMap","addState","namespace","stateCache","getFromMapOrCreate","Map","state","createRxState","states","RxDBStatePlugin","name","rxdb","prototypes","RxDatabase","proto"],"sources":["../../../../src/plugins/state/index.ts"],"sourcesContent":["import type {\n RxDatabase,\n RxPlugin,\n RxState\n} from '../../types/index.d.ts';\nimport { getFromMapOrCreate } from '../utils/utils-map.ts';\nimport { RxStateBase, createRxState } from './rx-state.ts';\n\nexport * from './helpers.ts';\n\ntype StateByPrefix = Map>>;\nconst STATE_BY_DATABASE = new WeakMap();\n\nexport async function addState(\n this: RxDatabase,\n namespace: string = ''\n): Promise> {\n const stateCache = getFromMapOrCreate(\n STATE_BY_DATABASE,\n this,\n () => new Map()\n );\n const state = await getFromMapOrCreate(\n stateCache,\n namespace,\n () => createRxState(this, namespace)\n );\n this.states[namespace] = state;\n return state as any;\n}\n\nexport const RxDBStatePlugin: RxPlugin = {\n name: 'state',\n rxdb: true,\n prototypes: {\n RxDatabase(proto: any) {\n proto.addState = addState;\n }\n }\n};\n"],"mappings":";;;;;;;;;;;AAKA,IAAAA,SAAA,GAAAC,OAAA;AACA,IAAAC,QAAA,GAAAD,OAAA;AAEA,IAAAE,QAAA,GAAAF,OAAA;AAAAG,MAAA,CAAAC,IAAA,CAAAF,QAAA,EAAAG,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAJ,QAAA,CAAAI,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAZ,QAAA,CAAAI,GAAA;IAAA;EAAA;AAAA;AAGA,IAAMS,iBAAiB,GAAG,IAAIC,OAAO,CAA4B,CAAC;AAE3D,eAAeC,QAAQA,CAE1BC,SAAiB,GAAG,EAAE,EACH;EACnB,IAAMC,UAAU,GAAG,IAAAC,4BAAkB,EACjCL,iBAAiB,EACjB,IAAI,EACJ,MAAM,IAAIM,GAAG,CAAC,CAClB,CAAC;EACD,IAAMC,KAAK,GAAG,MAAM,IAAAF,4BAAkB,EAClCD,UAAU,EACVD,SAAS,EACT,MAAM,IAAAK,sBAAa,EAAI,IAAI,EAAEL,SAAS,CAC1C,CAAC;EACD,IAAI,CAACM,MAAM,CAACN,SAAS,CAAC,GAAGI,KAAK;EAC9B,OAAOA,KAAK;AAChB;AAEO,IAAMG,eAAyB,GAAAd,OAAA,CAAAc,eAAA,GAAG;EACrCC,IAAI,EAAE,OAAO;EACbC,IAAI,EAAE,IAAI;EACVC,UAAU,EAAE;IACRC,UAAUA,CAACC,KAAU,EAAE;MACnBA,KAAK,CAACb,QAAQ,GAAGA,QAAQ;IAC7B;EACJ;AACJ,CAAC","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/state/rx-state.js b/dist/cjs/plugins/state/rx-state.js deleted file mode 100644 index 6e0ddfe9b5c..00000000000 --- a/dist/cjs/plugins/state/rx-state.js +++ /dev/null @@ -1,248 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.RxStateBase = void 0; -exports.createRxState = createRxState; -exports.mergeOperationsIntoState = mergeOperationsIntoState; -var _rxjs = require("rxjs"); -var _overwritable = require("../../overwritable.js"); -var _rxStorageHelper = require("../../rx-storage-helper.js"); -var _index = require("../utils/index.js"); -var _helpers = require("./helpers.js"); -var _rxError = require("../../rx-error.js"); -var _hooks = require("../../hooks.js"); -var debugId = 0; - -/** - * RxDB internally used properties are - * prefixed with lodash _ to make them less - * likely to clash with actual state properties - * from the user. - */ -var RxStateBase = exports.RxStateBase = /*#__PURE__*/function () { - // used for debugging - - function RxStateBase(prefix, collection) { - this._id = debugId++; - this._state = {}; - this._nonPersisted = []; - this._writeQueue = _index.PROMISE_RESOLVE_VOID; - this._initDone = false; - this._instanceId = (0, _index.randomCouchString)(_helpers.RX_STATE_COLLECTION_SCHEMA.properties.sId.maxLength); - this._ownEmits$ = new _rxjs.Subject(); - this.prefix = prefix; - this.collection = collection; - this.collection.onDestroy.push(() => this._writeQueue); - this._lastIdQuery = this.collection.findOne({ - sort: [{ - id: 'desc' - }] - }); - // make it "hot" for better write performance - this._lastIdQuery.$.subscribe(); - this.$ = (0, _rxjs.merge)(this._ownEmits$, this.collection.$.pipe((0, _rxjs.tap)(event => { - if (this._initDone && event.operation === 'INSERT' && event.documentData.sId !== this._instanceId) { - mergeOperationsIntoState(this._state, event.documentData.ops); - } - }))).pipe((0, _rxjs.shareReplay)(_index.RXJS_SHARE_REPLAY_DEFAULTS), (0, _rxjs.map)(() => this._state)); - // directly subscribe because of the tap() side effect - this.$.subscribe(); - } - var _proto = RxStateBase.prototype; - _proto.set = async function set(path, modifier) { - this._nonPersisted.push({ - path, - modifier - }); - return this._triggerWrite(); - } - - /** - * To have deterministic writes, - * and to ensure that multiple js realms do not overwrite - * each other, the write happens with incremental ids - * that would throw conflict errors and trigger a retry. - */; - _proto._triggerWrite = function _triggerWrite() { - this._writeQueue = this._writeQueue.then(async () => { - if (this._nonPersisted.length === 0) { - return; - } - var useWrites = []; - var done = false; - while (!done) { - var lastIdDoc = await this._lastIdQuery.exec(); - (0, _index.appendToArray)(useWrites, this._nonPersisted); - this._nonPersisted = []; - var nextId = (0, _helpers.nextRxStateId)(lastIdDoc ? lastIdDoc.id : undefined); - try { - /** - * TODO instead of a deep-clone we should - * only clone the parts where we know that they - * will be changed. This would improve performance. - */ - var newState = (0, _index.clone)(this._state); - var ops = []; - for (var index = 0; index < useWrites.length; index++) { - var writeRow = useWrites[index]; - var value = (0, _index.getProperty)(newState, writeRow.path); - var newValue = writeRow.modifier(value); - (0, _index.setProperty)(newState, writeRow.path, newValue); - ops.push({ - k: writeRow.path, - /** - * Here we have to clone the value because - * some storages like the memory storage - * make input data deep-frozen in dev-mode. - */ - v: (0, _index.clone)(newValue) - }); - } - await this.collection.insert({ - id: nextId, - sId: this._instanceId, - ops - }); - this._state = newState; - this._ownEmits$.next(this._state); - done = true; - } catch (err) { - if (err.code !== 'CONFLICT') { - throw err; - } - } - } - }).catch(error => { - throw (0, _rxError.newRxError)('SNH', { - name: 'RxState WRITE QUEUE ERROR', - error - }); - }); - return this._writeQueue; - }; - _proto.get = function get(path) { - if (!path) { - return _overwritable.overwritable.deepFreezeWhenDevMode(this._state); - } - return _overwritable.overwritable.deepFreezeWhenDevMode((0, _index.getProperty)(this._state, path)); - }; - _proto.get$ = function get$(path) { - return this.$.pipe((0, _rxjs.map)(() => this.get(path)), (0, _rxjs.startWith)(this.get(path)), (0, _rxjs.distinctUntilChanged)(_index.deepEqual), (0, _rxjs.shareReplay)(_index.RXJS_SHARE_REPLAY_DEFAULTS)); - }; - _proto.get$$ = function get$$(path) { - var obs = this.get$(path); - var reactivity = this.collection.database.getReactivityFactory(); - return reactivity.fromObservable(obs, this.get(path), this.collection.database); - } - - /** - * Merges the state operations into a single write row - * to store space and make recreating the state from - * disc faster. - */; - _proto._cleanup = async function _cleanup() { - var firstWrite = await this.collection.findOne({ - sort: [{ - id: 'asc' - }] - }).exec(); - var lastWrite = await this._lastIdQuery.exec(); - if (!firstWrite || !lastWrite) { - return; - } - var firstNr = parseInt(firstWrite.id, 10); - var lastNr = parseInt(lastWrite.id, 10); - if (lastNr - 5 < firstNr) { - // only run if more then 5 write rows - return; - } - - // update whole state object - await this._writeQueue; - await this.set('', () => this._state); - - // delete old ones - await this.collection.find({ - selector: { - id: { - $lte: lastWrite.id - } - } - }).remove(); - }; - return RxStateBase; -}(); -async function createRxState(database, prefix) { - var collectionName = 'rx-state-' + prefix; - await database.addCollections({ - [collectionName]: { - schema: _helpers.RX_STATE_COLLECTION_SCHEMA - } - }); - var collection = database.collections[collectionName]; - var rxState = new RxStateBase(prefix, collection); - - /** - * Directly get the state and put it into memory. - * This ensures we can do non-async accesses to the - * correct state. - */ - var done = false; - var checkpoint = undefined; - while (!done) { - var result = await (0, _rxStorageHelper.getChangedDocumentsSince)(collection.storageInstance, 1000, checkpoint); - checkpoint = result.checkpoint; - var documents = result.documents; - if (documents.length === 0) { - done = true; - } else { - for (var index = 0; index < documents.length; index++) { - var document = documents[index]; - mergeOperationsIntoState(rxState._state, document.ops); - } - } - } - rxState._initDone = true; - var proxy = new Proxy(rxState, { - get(target, property) { - if (typeof property !== 'string') { - return target[property]; - } - if (rxState[property]) { - var ret = rxState[property]; - if (typeof ret === 'function') { - return ret.bind(rxState); - } else { - return ret; - } - } - var lastChar = property.charAt(property.length - 1); - if (property.endsWith('$$')) { - var key = property.slice(0, -2); - return rxState.get$$(key); - } else if (lastChar === '$') { - var _key = property.slice(0, -1); - return rxState.get$(_key); - } else { - return rxState.get(property); - } - }, - set(target, newValue, receiver) { - throw new Error('Do not write to RxState'); - } - }); - (0, _hooks.runPluginHooks)('createRxState', { - collection, - state: proxy - }); - return proxy; -} -function mergeOperationsIntoState(state, operations) { - for (var index = 0; index < operations.length; index++) { - var operation = operations[index]; - (0, _index.setProperty)(state, operation.k, (0, _index.clone)(operation.v)); - } -} -//# sourceMappingURL=rx-state.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/state/rx-state.js.map b/dist/cjs/plugins/state/rx-state.js.map deleted file mode 100644 index 9181fcf1917..00000000000 --- a/dist/cjs/plugins/state/rx-state.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-state.js","names":["_rxjs","require","_overwritable","_rxStorageHelper","_index","_helpers","_rxError","_hooks","debugId","RxStateBase","exports","prefix","collection","_id","_state","_nonPersisted","_writeQueue","PROMISE_RESOLVE_VOID","_initDone","_instanceId","randomCouchString","RX_STATE_COLLECTION_SCHEMA","properties","sId","maxLength","_ownEmits$","Subject","onDestroy","push","_lastIdQuery","findOne","sort","id","$","subscribe","merge","pipe","tap","event","operation","documentData","mergeOperationsIntoState","ops","shareReplay","RXJS_SHARE_REPLAY_DEFAULTS","map","_proto","prototype","set","path","modifier","_triggerWrite","then","length","useWrites","done","lastIdDoc","exec","appendToArray","nextId","nextRxStateId","undefined","newState","clone","index","writeRow","value","getProperty","newValue","setProperty","k","v","insert","next","err","code","catch","error","newRxError","name","get","overwritable","deepFreezeWhenDevMode","get$","startWith","distinctUntilChanged","deepEqual","get$$","obs","reactivity","database","getReactivityFactory","fromObservable","_cleanup","firstWrite","lastWrite","firstNr","parseInt","lastNr","find","selector","$lte","remove","createRxState","collectionName","addCollections","schema","collections","rxState","checkpoint","result","getChangedDocumentsSince","storageInstance","documents","document","proxy","Proxy","target","property","ret","bind","lastChar","charAt","endsWith","key","slice","receiver","Error","runPluginHooks","state","operations"],"sources":["../../../../src/plugins/state/rx-state.ts"],"sourcesContent":["import {\n Observable,\n Subject,\n distinctUntilChanged,\n map,\n merge,\n shareReplay,\n startWith,\n tap\n} from 'rxjs';\nimport { overwritable } from '../../overwritable.ts';\nimport { getChangedDocumentsSince } from '../../rx-storage-helper.ts';\nimport type {\n RxCollection,\n RxDatabase,\n RxQuery,\n RxDocument,\n RxError,\n Paths\n} from '../../types';\nimport {\n RXJS_SHARE_REPLAY_DEFAULTS,\n getProperty,\n setProperty,\n PROMISE_RESOLVE_VOID,\n appendToArray,\n clone,\n randomCouchString,\n deepEqual\n} from '../utils/index.ts';\nimport {\n RX_STATE_COLLECTION_SCHEMA,\n nextRxStateId\n} from './helpers.ts';\nimport {\n RxStateDocument,\n RxStateOperation,\n RxStateModifier\n} from './types.ts';\nimport { newRxError } from '../../rx-error.ts';\nimport { runPluginHooks } from '../../hooks.ts';\n\n\nlet debugId = 0;\n\n\n/**\n * RxDB internally used properties are\n * prefixed with lodash _ to make them less\n * likely to clash with actual state properties\n * from the user.\n */\nexport class RxStateBase {\n // used for debugging\n public _id: number = debugId++;\n public _state: T | any = {};\n public $: Observable;\n public _lastIdQuery: RxQuery | null>;\n public _nonPersisted: {\n path: string;\n modifier: RxStateModifier;\n }[] = [];\n public _writeQueue = PROMISE_RESOLVE_VOID;\n public _initDone = false;\n public _instanceId = randomCouchString(RX_STATE_COLLECTION_SCHEMA.properties.sId.maxLength);\n public _ownEmits$ = new Subject();\n\n constructor(\n public readonly prefix: string,\n public readonly collection: RxCollection\n ) {\n this.collection.onDestroy.push(() => this._writeQueue);\n this._lastIdQuery = this.collection.findOne({\n sort: [\n { id: 'desc' }\n ]\n });\n // make it \"hot\" for better write performance\n this._lastIdQuery.$.subscribe();\n\n this.$ = merge(\n this._ownEmits$,\n this.collection.$.pipe(\n tap(event => {\n if (\n this._initDone &&\n event.operation === 'INSERT' &&\n event.documentData.sId !== this._instanceId\n ) {\n mergeOperationsIntoState(this._state, event.documentData.ops);\n }\n })\n )\n ).pipe(\n shareReplay(RXJS_SHARE_REPLAY_DEFAULTS),\n map(() => this._state)\n );\n // directly subscribe because of the tap() side effect\n this.$.subscribe();\n }\n\n async set(\n path: Paths | '',\n modifier: RxStateModifier\n ) {\n this._nonPersisted.push({\n path,\n modifier\n });\n return this._triggerWrite();\n }\n\n /**\n * To have deterministic writes,\n * and to ensure that multiple js realms do not overwrite\n * each other, the write happens with incremental ids\n * that would throw conflict errors and trigger a retry.\n */\n _triggerWrite() {\n this._writeQueue = this._writeQueue.then(async () => {\n if (this._nonPersisted.length === 0) {\n return;\n }\n let useWrites: typeof this._nonPersisted = [];\n let done = false;\n while (!done) {\n const lastIdDoc = await this._lastIdQuery.exec();\n appendToArray(useWrites, this._nonPersisted);\n this._nonPersisted = [];\n const nextId = nextRxStateId(lastIdDoc ? lastIdDoc.id : undefined);\n try {\n /**\n * TODO instead of a deep-clone we should\n * only clone the parts where we know that they\n * will be changed. This would improve performance.\n */\n const newState = clone(this._state);\n const ops: RxStateOperation[] = [];\n for (let index = 0; index < useWrites.length; index++) {\n const writeRow = useWrites[index];\n const value = getProperty(newState, writeRow.path);\n const newValue = writeRow.modifier(value);\n setProperty(newState, writeRow.path, newValue);\n ops.push({\n k: writeRow.path,\n /**\n * Here we have to clone the value because\n * some storages like the memory storage\n * make input data deep-frozen in dev-mode.\n */\n v: clone(newValue)\n });\n }\n await this.collection.insert({\n id: nextId,\n sId: this._instanceId,\n ops\n });\n this._state = newState;\n this._ownEmits$.next(this._state);\n done = true;\n } catch (err) {\n if ((err as RxError).code !== 'CONFLICT') {\n throw err;\n }\n }\n }\n }).catch(error => {\n throw newRxError('SNH', {\n name: 'RxState WRITE QUEUE ERROR',\n error\n });\n });\n return this._writeQueue;\n }\n\n get(path?: Paths) {\n if (!path) {\n return overwritable.deepFreezeWhenDevMode(this._state);\n }\n return overwritable.deepFreezeWhenDevMode(\n getProperty(this._state, path)\n );\n }\n get$(path?: Paths): Observable {\n return this.$.pipe(\n map(() => this.get(path)),\n startWith(this.get(path)),\n distinctUntilChanged(deepEqual),\n shareReplay(RXJS_SHARE_REPLAY_DEFAULTS),\n );\n }\n get$$(path?: Paths): Reactivity {\n const obs = this.get$(path);\n const reactivity = this.collection.database.getReactivityFactory();\n return reactivity.fromObservable(\n obs,\n this.get(path),\n this.collection.database\n );\n }\n\n /**\n * Merges the state operations into a single write row\n * to store space and make recreating the state from\n * disc faster.\n */\n async _cleanup() {\n const firstWrite = await this.collection.findOne({\n sort: [{ id: 'asc' }]\n }).exec();\n const lastWrite = await this._lastIdQuery.exec();\n\n if (!firstWrite || !lastWrite) {\n return;\n }\n\n const firstNr = parseInt(firstWrite.id, 10);\n const lastNr = parseInt(lastWrite.id, 10);\n if ((lastNr - 5) < firstNr) {\n // only run if more then 5 write rows\n return;\n }\n\n // update whole state object\n await this._writeQueue;\n await this.set('', () => this._state);\n\n // delete old ones\n await this.collection.find({\n selector: {\n id: {\n $lte: lastWrite.id\n }\n }\n }).remove();\n }\n}\n\n\nexport async function createRxState(\n database: RxDatabase,\n prefix: string\n): Promise> {\n const collectionName = 'rx-state-' + prefix;\n await database.addCollections({\n [collectionName]: {\n schema: RX_STATE_COLLECTION_SCHEMA as any\n }\n });\n const collection: RxCollection = database.collections[collectionName];\n\n const rxState = new RxStateBase(\n prefix,\n collection\n );\n\n\n /**\n * Directly get the state and put it into memory.\n * This ensures we can do non-async accesses to the\n * correct state.\n */\n let done = false;\n let checkpoint: any = undefined;\n while (!done) {\n const result = await getChangedDocumentsSince(\n collection.storageInstance,\n 1000,\n checkpoint\n );\n checkpoint = result.checkpoint;\n const documents = result.documents;\n if (documents.length === 0) {\n done = true;\n } else {\n for (let index = 0; index < documents.length; index++) {\n const document = documents[index];\n mergeOperationsIntoState(rxState._state, document.ops);\n }\n }\n }\n rxState._initDone = true;\n\n const proxy = new Proxy(\n rxState as any,\n {\n get(target, property: any) {\n if (typeof property !== 'string') {\n return target[property];\n }\n if ((rxState as any)[property]) {\n const ret = (rxState as any)[property];\n if (typeof ret === 'function') {\n return ret.bind(rxState);\n } else {\n return ret;\n }\n }\n const lastChar = property.charAt(property.length - 1);\n if (property.endsWith('$$')) {\n const key = property.slice(0, -2);\n return rxState.get$$(key as any);\n } else if (lastChar === '$') {\n const key = property.slice(0, -1);\n return rxState.get$(key as any);\n } else {\n return rxState.get(property as any);\n }\n },\n set(target, newValue, receiver) {\n throw new Error('Do not write to RxState');\n }\n }\n );\n\n runPluginHooks('createRxState', {\n collection,\n state: proxy\n });\n\n return proxy;\n}\n\n\nexport function mergeOperationsIntoState(\n state: T,\n operations: RxStateOperation[]\n) {\n for (let index = 0; index < operations.length; index++) {\n const operation = operations[index];\n setProperty(state, operation.k, clone(operation.v));\n }\n}\n"],"mappings":";;;;;;;;AAAA,IAAAA,KAAA,GAAAC,OAAA;AAUA,IAAAC,aAAA,GAAAD,OAAA;AACA,IAAAE,gBAAA,GAAAF,OAAA;AASA,IAAAG,MAAA,GAAAH,OAAA;AAUA,IAAAI,QAAA,GAAAJ,OAAA;AASA,IAAAK,QAAA,GAAAL,OAAA;AACA,IAAAM,MAAA,GAAAN,OAAA;AAGA,IAAIO,OAAO,GAAG,CAAC;;AAGf;AACA;AACA;AACA;AACA;AACA;AALA,IAMaC,WAAW,GAAAC,OAAA,CAAAD,WAAA;EACpB;;EAcA,SAAAA,YACoBE,MAAc,EACdC,UAAyC,EAC3D;IAAA,KAhBKC,GAAG,GAAWL,OAAO,EAAE;IAAA,KACvBM,MAAM,GAAY,CAAC,CAAC;IAAA,KAGpBC,aAAa,GAGd,EAAE;IAAA,KACDC,WAAW,GAAGC,2BAAoB;IAAA,KAClCC,SAAS,GAAG,KAAK;IAAA,KACjBC,WAAW,GAAG,IAAAC,wBAAiB,EAACC,mCAA0B,CAACC,UAAU,CAACC,GAAG,CAACC,SAAS,CAAC;IAAA,KACpFC,UAAU,GAAG,IAAIC,aAAO,CAAI,CAAC;IAAA,KAGhBf,MAAc,GAAdA,MAAc;IAAA,KACdC,UAAyC,GAAzCA,UAAyC;IAEzD,IAAI,CAACA,UAAU,CAACe,SAAS,CAACC,IAAI,CAAC,MAAM,IAAI,CAACZ,WAAW,CAAC;IACtD,IAAI,CAACa,YAAY,GAAG,IAAI,CAACjB,UAAU,CAACkB,OAAO,CAAC;MACxCC,IAAI,EAAE,CACF;QAAEC,EAAE,EAAE;MAAO,CAAC;IAEtB,CAAC,CAAC;IACF;IACA,IAAI,CAACH,YAAY,CAACI,CAAC,CAACC,SAAS,CAAC,CAAC;IAE/B,IAAI,CAACD,CAAC,GAAG,IAAAE,WAAK,EACV,IAAI,CAACV,UAAU,EACf,IAAI,CAACb,UAAU,CAACqB,CAAC,CAACG,IAAI,CAClB,IAAAC,SAAG,EAACC,KAAK,IAAI;MACT,IACI,IAAI,CAACpB,SAAS,IACdoB,KAAK,CAACC,SAAS,KAAK,QAAQ,IAC5BD,KAAK,CAACE,YAAY,CAACjB,GAAG,KAAK,IAAI,CAACJ,WAAW,EAC7C;QACEsB,wBAAwB,CAAC,IAAI,CAAC3B,MAAM,EAAEwB,KAAK,CAACE,YAAY,CAACE,GAAG,CAAC;MACjE;IACJ,CAAC,CACL,CACJ,CAAC,CAACN,IAAI,CACF,IAAAO,iBAAW,EAACC,iCAA0B,CAAC,EACvC,IAAAC,SAAG,EAAC,MAAM,IAAI,CAAC/B,MAAM,CACzB,CAAC;IACD;IACA,IAAI,CAACmB,CAAC,CAACC,SAAS,CAAC,CAAC;EACtB;EAAC,IAAAY,MAAA,GAAArC,WAAA,CAAAsC,SAAA;EAAAD,MAAA,CAEKE,GAAG,GAAT,eAAAA,IACIC,IAAmB,EACnBC,QAAyB,EAC3B;IACE,IAAI,CAACnC,aAAa,CAACa,IAAI,CAAC;MACpBqB,IAAI;MACJC;IACJ,CAAC,CAAC;IACF,OAAO,IAAI,CAACC,aAAa,CAAC,CAAC;EAC/B;;EAEA;AACJ;AACA;AACA;AACA;AACA,KALI;EAAAL,MAAA,CAMAK,aAAa,GAAb,SAAAA,cAAA,EAAgB;IACZ,IAAI,CAACnC,WAAW,GAAG,IAAI,CAACA,WAAW,CAACoC,IAAI,CAAC,YAAY;MACjD,IAAI,IAAI,CAACrC,aAAa,CAACsC,MAAM,KAAK,CAAC,EAAE;QACjC;MACJ;MACA,IAAIC,SAAoC,GAAG,EAAE;MAC7C,IAAIC,IAAI,GAAG,KAAK;MAChB,OAAO,CAACA,IAAI,EAAE;QACV,IAAMC,SAAS,GAAG,MAAM,IAAI,CAAC3B,YAAY,CAAC4B,IAAI,CAAC,CAAC;QAChD,IAAAC,oBAAa,EAACJ,SAAS,EAAE,IAAI,CAACvC,aAAa,CAAC;QAC5C,IAAI,CAACA,aAAa,GAAG,EAAE;QACvB,IAAM4C,MAAM,GAAG,IAAAC,sBAAa,EAACJ,SAAS,GAAGA,SAAS,CAACxB,EAAE,GAAG6B,SAAS,CAAC;QAClE,IAAI;UACA;AACpB;AACA;AACA;AACA;UACoB,IAAMC,QAAQ,GAAG,IAAAC,YAAK,EAAC,IAAI,CAACjD,MAAM,CAAC;UACnC,IAAM4B,GAAuB,GAAG,EAAE;UAClC,KAAK,IAAIsB,KAAK,GAAG,CAAC,EAAEA,KAAK,GAAGV,SAAS,CAACD,MAAM,EAAEW,KAAK,EAAE,EAAE;YACnD,IAAMC,QAAQ,GAAGX,SAAS,CAACU,KAAK,CAAC;YACjC,IAAME,KAAK,GAAG,IAAAC,kBAAW,EAACL,QAAQ,EAAEG,QAAQ,CAAChB,IAAI,CAAC;YAClD,IAAMmB,QAAQ,GAAGH,QAAQ,CAACf,QAAQ,CAACgB,KAAK,CAAC;YACzC,IAAAG,kBAAW,EAACP,QAAQ,EAAEG,QAAQ,CAAChB,IAAI,EAAEmB,QAAQ,CAAC;YAC9C1B,GAAG,CAACd,IAAI,CAAC;cACL0C,CAAC,EAAEL,QAAQ,CAAChB,IAAI;cAChB;AAC5B;AACA;AACA;AACA;cAC4BsB,CAAC,EAAE,IAAAR,YAAK,EAACK,QAAQ;YACrB,CAAC,CAAC;UACN;UACA,MAAM,IAAI,CAACxD,UAAU,CAAC4D,MAAM,CAAC;YACzBxC,EAAE,EAAE2B,MAAM;YACVpC,GAAG,EAAE,IAAI,CAACJ,WAAW;YACrBuB;UACJ,CAAC,CAAC;UACF,IAAI,CAAC5B,MAAM,GAAGgD,QAAQ;UACtB,IAAI,CAACrC,UAAU,CAACgD,IAAI,CAAC,IAAI,CAAC3D,MAAM,CAAC;UACjCyC,IAAI,GAAG,IAAI;QACf,CAAC,CAAC,OAAOmB,GAAG,EAAE;UACV,IAAKA,GAAG,CAAaC,IAAI,KAAK,UAAU,EAAE;YACtC,MAAMD,GAAG;UACb;QACJ;MACJ;IACJ,CAAC,CAAC,CAACE,KAAK,CAACC,KAAK,IAAI;MACd,MAAM,IAAAC,mBAAU,EAAC,KAAK,EAAE;QACpBC,IAAI,EAAE,2BAA2B;QACjCF;MACJ,CAAC,CAAC;IACN,CAAC,CAAC;IACF,OAAO,IAAI,CAAC7D,WAAW;EAC3B,CAAC;EAAA8B,MAAA,CAEDkC,GAAG,GAAH,SAAAA,IAAI/B,IAAe,EAAE;IACjB,IAAI,CAACA,IAAI,EAAE;MACP,OAAOgC,0BAAY,CAACC,qBAAqB,CAAC,IAAI,CAACpE,MAAM,CAAC;IAC1D;IACA,OAAOmE,0BAAY,CAACC,qBAAqB,CACrC,IAAAf,kBAAW,EAAC,IAAI,CAACrD,MAAM,EAAEmC,IAAI,CACjC,CAAC;EACL,CAAC;EAAAH,MAAA,CACDqC,IAAI,GAAJ,SAAAA,KAAKlC,IAAe,EAAmB;IACnC,OAAO,IAAI,CAAChB,CAAC,CAACG,IAAI,CACd,IAAAS,SAAG,EAAC,MAAM,IAAI,CAACmC,GAAG,CAAC/B,IAAI,CAAC,CAAC,EACzB,IAAAmC,eAAS,EAAC,IAAI,CAACJ,GAAG,CAAC/B,IAAI,CAAC,CAAC,EACzB,IAAAoC,0BAAoB,EAACC,gBAAS,CAAC,EAC/B,IAAA3C,iBAAW,EAACC,iCAA0B,CAC1C,CAAC;EACL,CAAC;EAAAE,MAAA,CACDyC,KAAK,GAAL,SAAAA,MAAMtC,IAAe,EAAc;IAC/B,IAAMuC,GAAG,GAAG,IAAI,CAACL,IAAI,CAAClC,IAAI,CAAC;IAC3B,IAAMwC,UAAU,GAAG,IAAI,CAAC7E,UAAU,CAAC8E,QAAQ,CAACC,oBAAoB,CAAC,CAAC;IAClE,OAAOF,UAAU,CAACG,cAAc,CAC5BJ,GAAG,EACH,IAAI,CAACR,GAAG,CAAC/B,IAAI,CAAC,EACd,IAAI,CAACrC,UAAU,CAAC8E,QACpB,CAAC;EACL;;EAEA;AACJ;AACA;AACA;AACA,KAJI;EAAA5C,MAAA,CAKM+C,QAAQ,GAAd,eAAAA,SAAA,EAAiB;IACb,IAAMC,UAAU,GAAG,MAAM,IAAI,CAAClF,UAAU,CAACkB,OAAO,CAAC;MAC7CC,IAAI,EAAE,CAAC;QAAEC,EAAE,EAAE;MAAM,CAAC;IACxB,CAAC,CAAC,CAACyB,IAAI,CAAC,CAAC;IACT,IAAMsC,SAAS,GAAG,MAAM,IAAI,CAAClE,YAAY,CAAC4B,IAAI,CAAC,CAAC;IAEhD,IAAI,CAACqC,UAAU,IAAI,CAACC,SAAS,EAAE;MAC3B;IACJ;IAEA,IAAMC,OAAO,GAAGC,QAAQ,CAACH,UAAU,CAAC9D,EAAE,EAAE,EAAE,CAAC;IAC3C,IAAMkE,MAAM,GAAGD,QAAQ,CAACF,SAAS,CAAC/D,EAAE,EAAE,EAAE,CAAC;IACzC,IAAKkE,MAAM,GAAG,CAAC,GAAIF,OAAO,EAAE;MACxB;MACA;IACJ;;IAEA;IACA,MAAM,IAAI,CAAChF,WAAW;IACtB,MAAM,IAAI,CAACgC,GAAG,CAAC,EAAE,EAAE,MAAM,IAAI,CAAClC,MAAM,CAAC;;IAErC;IACA,MAAM,IAAI,CAACF,UAAU,CAACuF,IAAI,CAAC;MACvBC,QAAQ,EAAE;QACNpE,EAAE,EAAE;UACAqE,IAAI,EAAEN,SAAS,CAAC/D;QACpB;MACJ;IACJ,CAAC,CAAC,CAACsE,MAAM,CAAC,CAAC;EACf,CAAC;EAAA,OAAA7F,WAAA;AAAA;AAIE,eAAe8F,aAAaA,CAC/Bb,QAAoB,EACpB/E,MAAc,EACS;EACvB,IAAM6F,cAAc,GAAG,WAAW,GAAG7F,MAAM;EAC3C,MAAM+E,QAAQ,CAACe,cAAc,CAAC;IAC1B,CAACD,cAAc,GAAG;MACdE,MAAM,EAAErF;IACZ;EACJ,CAAC,CAAC;EACF,IAAMT,UAAyC,GAAG8E,QAAQ,CAACiB,WAAW,CAACH,cAAc,CAAC;EAEtF,IAAMI,OAAO,GAAG,IAAInG,WAAW,CAC3BE,MAAM,EACNC,UACJ,CAAC;;EAGD;AACJ;AACA;AACA;AACA;EACI,IAAI2C,IAAI,GAAG,KAAK;EAChB,IAAIsD,UAAe,GAAGhD,SAAS;EAC/B,OAAO,CAACN,IAAI,EAAE;IACV,IAAMuD,MAAM,GAAG,MAAM,IAAAC,yCAAwB,EACzCnG,UAAU,CAACoG,eAAe,EAC1B,IAAI,EACJH,UACJ,CAAC;IACDA,UAAU,GAAGC,MAAM,CAACD,UAAU;IAC9B,IAAMI,SAAS,GAAGH,MAAM,CAACG,SAAS;IAClC,IAAIA,SAAS,CAAC5D,MAAM,KAAK,CAAC,EAAE;MACxBE,IAAI,GAAG,IAAI;IACf,CAAC,MAAM;MACH,KAAK,IAAIS,KAAK,GAAG,CAAC,EAAEA,KAAK,GAAGiD,SAAS,CAAC5D,MAAM,EAAEW,KAAK,EAAE,EAAE;QACnD,IAAMkD,QAAQ,GAAGD,SAAS,CAACjD,KAAK,CAAC;QACjCvB,wBAAwB,CAACmE,OAAO,CAAC9F,MAAM,EAAEoG,QAAQ,CAACxE,GAAG,CAAC;MAC1D;IACJ;EACJ;EACAkE,OAAO,CAAC1F,SAAS,GAAG,IAAI;EAExB,IAAMiG,KAAK,GAAG,IAAIC,KAAK,CACnBR,OAAO,EACP;IACI5B,GAAGA,CAACqC,MAAM,EAAEC,QAAa,EAAE;MACvB,IAAI,OAAOA,QAAQ,KAAK,QAAQ,EAAE;QAC9B,OAAOD,MAAM,CAACC,QAAQ,CAAC;MAC3B;MACA,IAAKV,OAAO,CAASU,QAAQ,CAAC,EAAE;QAC5B,IAAMC,GAAG,GAAIX,OAAO,CAASU,QAAQ,CAAC;QACtC,IAAI,OAAOC,GAAG,KAAK,UAAU,EAAE;UAC3B,OAAOA,GAAG,CAACC,IAAI,CAACZ,OAAO,CAAC;QAC5B,CAAC,MAAM;UACH,OAAOW,GAAG;QACd;MACJ;MACA,IAAME,QAAQ,GAAGH,QAAQ,CAACI,MAAM,CAACJ,QAAQ,CAACjE,MAAM,GAAG,CAAC,CAAC;MACrD,IAAIiE,QAAQ,CAACK,QAAQ,CAAC,IAAI,CAAC,EAAE;QACzB,IAAMC,GAAG,GAAGN,QAAQ,CAACO,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;QACjC,OAAOjB,OAAO,CAACrB,KAAK,CAACqC,GAAU,CAAC;MACpC,CAAC,MAAM,IAAIH,QAAQ,KAAK,GAAG,EAAE;QACzB,IAAMG,IAAG,GAAGN,QAAQ,CAACO,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;QACjC,OAAOjB,OAAO,CAACzB,IAAI,CAACyC,IAAU,CAAC;MACnC,CAAC,MAAM;QACH,OAAOhB,OAAO,CAAC5B,GAAG,CAACsC,QAAe,CAAC;MACvC;IACJ,CAAC;IACDtE,GAAGA,CAACqE,MAAM,EAAEjD,QAAQ,EAAE0D,QAAQ,EAAE;MAC5B,MAAM,IAAIC,KAAK,CAAC,yBAAyB,CAAC;IAC9C;EACJ,CACJ,CAAC;EAED,IAAAC,qBAAc,EAAC,eAAe,EAAE;IAC5BpH,UAAU;IACVqH,KAAK,EAAEd;EACX,CAAC,CAAC;EAEF,OAAOA,KAAK;AAChB;AAGO,SAAS1E,wBAAwBA,CACpCwF,KAAQ,EACRC,UAA8B,EAChC;EACE,KAAK,IAAIlE,KAAK,GAAG,CAAC,EAAEA,KAAK,GAAGkE,UAAU,CAAC7E,MAAM,EAAEW,KAAK,EAAE,EAAE;IACpD,IAAMzB,SAAS,GAAG2F,UAAU,CAAClE,KAAK,CAAC;IACnC,IAAAK,kBAAW,EAAC4D,KAAK,EAAE1F,SAAS,CAAC+B,CAAC,EAAE,IAAAP,YAAK,EAACxB,SAAS,CAACgC,CAAC,CAAC,CAAC;EACvD;AACJ","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/state/types.js b/dist/cjs/plugins/state/types.js deleted file mode 100644 index 2f0e4146c53..00000000000 --- a/dist/cjs/plugins/state/types.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -//# sourceMappingURL=types.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/state/types.js.map b/dist/cjs/plugins/state/types.js.map deleted file mode 100644 index b7684b38e0f..00000000000 --- a/dist/cjs/plugins/state/types.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"types.js","names":[],"sources":["../../../../src/plugins/state/types.ts"],"sourcesContent":["import { DeepReadonly } from '../../types';\n\n/**\n * \n */\nexport type RxStateDocument = {\n /**\n * Ensures that when multiple\n * javascript realms write at the same time,\n * we do not overwrite each other but instead\n * one write must conflict-error and retry.\n * The clock value is also the primary key.\n * The clock value contains incremental numbers\n * in a string format like '0001', '0123'...\n */\n id: string;\n /**\n * Id of the JavaScript Instance of RxState\n * that did the write. Used to optimise performance\n * by not running these modifiers twice.\n */\n sId: string;\n ops: RxStateOperation[]\n};\n\n\nexport type RxStateOperation = {\n k: string;\n v: any;\n};\n\nexport type RxStateModifier = (preValue: DeepReadonly) => any;\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/storage-denokv/denokv-helper.js b/dist/cjs/plugins/storage-denokv/denokv-helper.js deleted file mode 100644 index 983ce889680..00000000000 --- a/dist/cjs/plugins/storage-denokv/denokv-helper.js +++ /dev/null @@ -1,30 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.RX_STORAGE_NAME_DENOKV = exports.DENOKV_DOCUMENT_ROOT_PATH = exports.CLEANUP_INDEX = void 0; -exports.getDenoGlobal = getDenoGlobal; -exports.getDenoKVIndexName = getDenoKVIndexName; -var RX_STORAGE_NAME_DENOKV = exports.RX_STORAGE_NAME_DENOKV = 'denokv'; -function getDenoKVIndexName(index) { - return index.join('|'); -} - -/** - * Used for non-index rows that contain the document data, - * not just a documentId - */ -var DENOKV_DOCUMENT_ROOT_PATH = exports.DENOKV_DOCUMENT_ROOT_PATH = '||'; -var CLEANUP_INDEX = exports.CLEANUP_INDEX = ['_deleted', '_meta.lwt']; - -/** - * Get the global Deno variable from globalThis.Deno - * so that compiling with plain typescript does not fail. - * TODO download the deno typings from somewhere - * and use them. - */ -function getDenoGlobal() { - return globalThis.Deno; -} -//# sourceMappingURL=denokv-helper.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/storage-denokv/denokv-helper.js.map b/dist/cjs/plugins/storage-denokv/denokv-helper.js.map deleted file mode 100644 index 32ba66297b9..00000000000 --- a/dist/cjs/plugins/storage-denokv/denokv-helper.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"denokv-helper.js","names":["RX_STORAGE_NAME_DENOKV","exports","getDenoKVIndexName","index","join","DENOKV_DOCUMENT_ROOT_PATH","CLEANUP_INDEX","getDenoGlobal","globalThis","Deno"],"sources":["../../../../src/plugins/storage-denokv/denokv-helper.ts"],"sourcesContent":["export const RX_STORAGE_NAME_DENOKV = 'denokv';\n\nexport function getDenoKVIndexName(index: string[]): string {\n return index.join('|');\n}\n\n/**\n * Used for non-index rows that contain the document data,\n * not just a documentId\n */\nexport const DENOKV_DOCUMENT_ROOT_PATH = '||';\n\nexport const CLEANUP_INDEX: string[] = ['_deleted', '_meta.lwt'];\n\n\n/**\n * Get the global Deno variable from globalThis.Deno\n * so that compiling with plain typescript does not fail.\n * TODO download the deno typings from somewhere\n * and use them.\n */\nexport function getDenoGlobal(): any {\n return (globalThis as any).Deno;\n}\n"],"mappings":";;;;;;;;AAAO,IAAMA,sBAAsB,GAAAC,OAAA,CAAAD,sBAAA,GAAG,QAAQ;AAEvC,SAASE,kBAAkBA,CAACC,KAAe,EAAU;EACxD,OAAOA,KAAK,CAACC,IAAI,CAAC,GAAG,CAAC;AAC1B;;AAEA;AACA;AACA;AACA;AACO,IAAMC,yBAAyB,GAAAJ,OAAA,CAAAI,yBAAA,GAAG,IAAI;AAEtC,IAAMC,aAAuB,GAAAL,OAAA,CAAAK,aAAA,GAAG,CAAC,UAAU,EAAE,WAAW,CAAC;;AAGhE;AACA;AACA;AACA;AACA;AACA;AACO,SAASC,aAAaA,CAAA,EAAQ;EACjC,OAAQC,UAAU,CAASC,IAAI;AACnC","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/storage-denokv/denokv-query.js b/dist/cjs/plugins/storage-denokv/denokv-query.js deleted file mode 100644 index 4c9ba2c9040..00000000000 --- a/dist/cjs/plugins/storage-denokv/denokv-query.js +++ /dev/null @@ -1,91 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.queryDenoKV = queryDenoKV; -var _customIndex = require("../../custom-index.js"); -var _index = require("../../plugins/utils/index.js"); -var _rxQueryHelper = require("../../rx-query-helper.js"); -var _denokvHelper = require("./denokv-helper.js"); -async function queryDenoKV(instance, preparedQuery) { - var queryPlan = preparedQuery.queryPlan; - var query = preparedQuery.query; - var skip = query.skip ? query.skip : 0; - var limit = query.limit ? query.limit : Infinity; - var skipPlusLimit = skip + limit; - var queryPlanFields = queryPlan.index; - var mustManuallyResort = !queryPlan.sortSatisfiedByIndex; - var queryMatcher = false; - if (!queryPlan.selectorSatisfiedByIndex) { - queryMatcher = (0, _rxQueryHelper.getQueryMatcher)(instance.schema, preparedQuery.query); - } - var kv = await instance.kvPromise; - var indexForName = queryPlanFields.slice(0); - var indexName = (0, _denokvHelper.getDenoKVIndexName)(indexForName); - var indexMeta = (0, _index.ensureNotFalsy)(instance.internals.indexes[indexName]); - var lowerBound = queryPlan.startKeys; - var lowerBoundString = (0, _customIndex.getStartIndexStringFromLowerBound)(instance.schema, indexForName, lowerBound); - if (!queryPlan.inclusiveStart) { - lowerBoundString = (0, _customIndex.changeIndexableStringByOneQuantum)(lowerBoundString, 1); - } - var upperBound = queryPlan.endKeys; - var upperBoundString = (0, _customIndex.getStartIndexStringFromUpperBound)(instance.schema, indexForName, upperBound); - if (queryPlan.inclusiveEnd) { - upperBoundString = (0, _customIndex.changeIndexableStringByOneQuantum)(upperBoundString, +1); - } - var result = []; - - /** - * TODO for whatever reason the keySelectors like firstGreaterThan etc. - * do not work properly. So we have to hack here to find the correct - * document in case lowerBoundString===upperBoundString. - * This likely must be fixed in the foundationdb library. - * When it is fixed, we do not need this if-case and instead - * can rely on .getRangeBatch() in all cases. - */ - if (lowerBoundString === upperBoundString) { - var singleDocResult = await kv.get([instance.keySpace, indexMeta.indexId, lowerBoundString], instance.kvOptions); - if (singleDocResult.value) { - var docId = singleDocResult.value; - var docDataResult = await kv.get([instance.keySpace, _denokvHelper.DENOKV_DOCUMENT_ROOT_PATH, docId], instance.kvOptions); - var docData = (0, _index.ensureNotFalsy)(docDataResult.value); - if (!queryMatcher || queryMatcher(docData)) { - result.push(docData); - } - } - return { - documents: result - }; - } - var range = kv.list({ - start: [instance.keySpace, indexMeta.indexId, lowerBoundString], - end: [instance.keySpace, indexMeta.indexId, upperBoundString] - }, { - consistency: instance.settings.consistencyLevel, - limit: !mustManuallyResort && queryPlan.selectorSatisfiedByIndex ? skipPlusLimit : undefined, - batchSize: instance.settings.batchSize - }); - for await (var indexDocEntry of range) { - var _docId = indexDocEntry.value; - var _docDataResult = await kv.get([instance.keySpace, _denokvHelper.DENOKV_DOCUMENT_ROOT_PATH, _docId], instance.kvOptions); - var _docData = (0, _index.ensureNotFalsy)(_docDataResult.value); - if (!queryMatcher || queryMatcher(_docData)) { - result.push(_docData); - } - if (!mustManuallyResort && result.length === skipPlusLimit) { - break; - } - } - if (mustManuallyResort) { - var sortComparator = (0, _rxQueryHelper.getSortComparator)(instance.schema, preparedQuery.query); - result = result.sort(sortComparator); - } - - // apply skip and limit boundaries. - result = result.slice(skip, skipPlusLimit); - return { - documents: result - }; -} -//# sourceMappingURL=denokv-query.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/storage-denokv/denokv-query.js.map b/dist/cjs/plugins/storage-denokv/denokv-query.js.map deleted file mode 100644 index 0f3d28be9d4..00000000000 --- a/dist/cjs/plugins/storage-denokv/denokv-query.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"denokv-query.js","names":["_customIndex","require","_index","_rxQueryHelper","_denokvHelper","queryDenoKV","instance","preparedQuery","queryPlan","query","skip","limit","Infinity","skipPlusLimit","queryPlanFields","index","mustManuallyResort","sortSatisfiedByIndex","queryMatcher","selectorSatisfiedByIndex","getQueryMatcher","schema","kv","kvPromise","indexForName","slice","indexName","getDenoKVIndexName","indexMeta","ensureNotFalsy","internals","indexes","lowerBound","startKeys","lowerBoundString","getStartIndexStringFromLowerBound","inclusiveStart","changeIndexableStringByOneQuantum","upperBound","endKeys","upperBoundString","getStartIndexStringFromUpperBound","inclusiveEnd","result","singleDocResult","get","keySpace","indexId","kvOptions","value","docId","docDataResult","DENOKV_DOCUMENT_ROOT_PATH","docData","push","documents","range","list","start","end","consistency","settings","consistencyLevel","undefined","batchSize","indexDocEntry","length","sortComparator","getSortComparator","sort"],"sources":["../../../../src/plugins/storage-denokv/denokv-query.ts"],"sourcesContent":["import {\n changeIndexableStringByOneQuantum,\n getStartIndexStringFromLowerBound,\n getStartIndexStringFromUpperBound\n} from '../../custom-index.ts';\nimport type {\n PreparedQuery,\n QueryMatcher,\n RxDocumentData,\n RxStorageQueryResult\n} from '../../types/index.d.ts';\nimport { ensureNotFalsy } from '../../plugins/utils/index.ts';\nimport { getQueryMatcher, getSortComparator } from '../../rx-query-helper.ts';\nimport { RxStorageInstanceDenoKV } from \"./rx-storage-instance-denokv.ts\";\nimport { DENOKV_DOCUMENT_ROOT_PATH, getDenoKVIndexName } from \"./denokv-helper.ts\";\n\nexport async function queryDenoKV(\n instance: RxStorageInstanceDenoKV,\n preparedQuery: PreparedQuery\n): Promise> {\n const queryPlan = preparedQuery.queryPlan;\n const query = preparedQuery.query;\n const skip = query.skip ? query.skip : 0;\n const limit = query.limit ? query.limit : Infinity;\n const skipPlusLimit = skip + limit;\n const queryPlanFields: string[] = queryPlan.index;\n const mustManuallyResort = !queryPlan.sortSatisfiedByIndex;\n\n\n let queryMatcher: QueryMatcher> | false = false;\n if (!queryPlan.selectorSatisfiedByIndex) {\n queryMatcher = getQueryMatcher(\n instance.schema,\n preparedQuery.query\n );\n }\n\n const kv = await instance.kvPromise;\n\n const indexForName = queryPlanFields.slice(0);\n const indexName = getDenoKVIndexName(indexForName);\n const indexMeta = ensureNotFalsy(instance.internals.indexes[indexName]);\n\n let lowerBound: any[] = queryPlan.startKeys;\n let lowerBoundString = getStartIndexStringFromLowerBound(\n instance.schema,\n indexForName,\n lowerBound\n );\n if (!queryPlan.inclusiveStart) {\n lowerBoundString = changeIndexableStringByOneQuantum(lowerBoundString, 1);\n }\n\n let upperBound: any[] = queryPlan.endKeys;\n let upperBoundString = getStartIndexStringFromUpperBound(\n instance.schema,\n indexForName,\n upperBound\n );\n\n if (queryPlan.inclusiveEnd) {\n upperBoundString = changeIndexableStringByOneQuantum(upperBoundString, +1);\n }\n\n\n let result: RxDocumentData[] = [];\n\n\n /**\n * TODO for whatever reason the keySelectors like firstGreaterThan etc.\n * do not work properly. So we have to hack here to find the correct\n * document in case lowerBoundString===upperBoundString.\n * This likely must be fixed in the foundationdb library.\n * When it is fixed, we do not need this if-case and instead\n * can rely on .getRangeBatch() in all cases.\n */\n if (lowerBoundString === upperBoundString) {\n const singleDocResult = await kv.get([instance.keySpace, indexMeta.indexId, lowerBoundString], instance.kvOptions);\n if (singleDocResult.value) {\n const docId: string = singleDocResult.value;\n const docDataResult = await kv.get([instance.keySpace, DENOKV_DOCUMENT_ROOT_PATH, docId], instance.kvOptions);\n const docData = ensureNotFalsy(docDataResult.value);\n if (!queryMatcher || queryMatcher(docData)) {\n result.push(docData);\n }\n }\n return {\n documents: result\n };\n }\n\n const range = kv.list({\n start: [instance.keySpace, indexMeta.indexId, lowerBoundString],\n end: [instance.keySpace, indexMeta.indexId, upperBoundString]\n }, {\n consistency: instance.settings.consistencyLevel,\n limit: (!mustManuallyResort && queryPlan.selectorSatisfiedByIndex) ? skipPlusLimit : undefined,\n batchSize: instance.settings.batchSize\n });\n\n for await (const indexDocEntry of range) {\n const docId = indexDocEntry.value;\n const docDataResult = await kv.get([instance.keySpace, DENOKV_DOCUMENT_ROOT_PATH, docId], instance.kvOptions);\n const docData = ensureNotFalsy(docDataResult.value);\n if (!queryMatcher || queryMatcher(docData)) {\n result.push(docData);\n }\n if (\n !mustManuallyResort &&\n result.length === skipPlusLimit\n ) {\n break;\n }\n }\n\n if (mustManuallyResort) {\n const sortComparator = getSortComparator(instance.schema, preparedQuery.query);\n result = result.sort(sortComparator);\n }\n\n // apply skip and limit boundaries.\n result = result.slice(skip, skipPlusLimit);\n\n return {\n documents: result\n };\n}\n"],"mappings":";;;;;;AAAA,IAAAA,YAAA,GAAAC,OAAA;AAWA,IAAAC,MAAA,GAAAD,OAAA;AACA,IAAAE,cAAA,GAAAF,OAAA;AAEA,IAAAG,aAAA,GAAAH,OAAA;AAEO,eAAeI,WAAWA,CAC7BC,QAA4C,EAC5CC,aAAuC,EACC;EACxC,IAAMC,SAAS,GAAGD,aAAa,CAACC,SAAS;EACzC,IAAMC,KAAK,GAAGF,aAAa,CAACE,KAAK;EACjC,IAAMC,IAAI,GAAGD,KAAK,CAACC,IAAI,GAAGD,KAAK,CAACC,IAAI,GAAG,CAAC;EACxC,IAAMC,KAAK,GAAGF,KAAK,CAACE,KAAK,GAAGF,KAAK,CAACE,KAAK,GAAGC,QAAQ;EAClD,IAAMC,aAAa,GAAGH,IAAI,GAAGC,KAAK;EAClC,IAAMG,eAAyB,GAAGN,SAAS,CAACO,KAAK;EACjD,IAAMC,kBAAkB,GAAG,CAACR,SAAS,CAACS,oBAAoB;EAG1D,IAAIC,YAA6D,GAAG,KAAK;EACzE,IAAI,CAACV,SAAS,CAACW,wBAAwB,EAAE;IACrCD,YAAY,GAAG,IAAAE,8BAAe,EAC1Bd,QAAQ,CAACe,MAAM,EACfd,aAAa,CAACE,KAClB,CAAC;EACL;EAEA,IAAMa,EAAE,GAAG,MAAMhB,QAAQ,CAACiB,SAAS;EAEnC,IAAMC,YAAY,GAAGV,eAAe,CAACW,KAAK,CAAC,CAAC,CAAC;EAC7C,IAAMC,SAAS,GAAG,IAAAC,gCAAkB,EAACH,YAAY,CAAC;EAClD,IAAMI,SAAS,GAAG,IAAAC,qBAAc,EAACvB,QAAQ,CAACwB,SAAS,CAACC,OAAO,CAACL,SAAS,CAAC,CAAC;EAEvE,IAAIM,UAAiB,GAAGxB,SAAS,CAACyB,SAAS;EAC3C,IAAIC,gBAAgB,GAAG,IAAAC,8CAAiC,EACpD7B,QAAQ,CAACe,MAAM,EACfG,YAAY,EACZQ,UACJ,CAAC;EACD,IAAI,CAACxB,SAAS,CAAC4B,cAAc,EAAE;IAC3BF,gBAAgB,GAAG,IAAAG,8CAAiC,EAACH,gBAAgB,EAAE,CAAC,CAAC;EAC7E;EAEA,IAAII,UAAiB,GAAG9B,SAAS,CAAC+B,OAAO;EACzC,IAAIC,gBAAgB,GAAG,IAAAC,8CAAiC,EACpDnC,QAAQ,CAACe,MAAM,EACfG,YAAY,EACZc,UACJ,CAAC;EAED,IAAI9B,SAAS,CAACkC,YAAY,EAAE;IACxBF,gBAAgB,GAAG,IAAAH,8CAAiC,EAACG,gBAAgB,EAAE,CAAC,CAAC,CAAC;EAC9E;EAGA,IAAIG,MAAmC,GAAG,EAAE;;EAG5C;AACJ;AACA;AACA;AACA;AACA;AACA;AACA;EACI,IAAIT,gBAAgB,KAAKM,gBAAgB,EAAE;IACvC,IAAMI,eAAe,GAAG,MAAMtB,EAAE,CAACuB,GAAG,CAAC,CAACvC,QAAQ,CAACwC,QAAQ,EAAElB,SAAS,CAACmB,OAAO,EAAEb,gBAAgB,CAAC,EAAE5B,QAAQ,CAAC0C,SAAS,CAAC;IAClH,IAAIJ,eAAe,CAACK,KAAK,EAAE;MACvB,IAAMC,KAAa,GAAGN,eAAe,CAACK,KAAK;MAC3C,IAAME,aAAa,GAAG,MAAM7B,EAAE,CAACuB,GAAG,CAAC,CAACvC,QAAQ,CAACwC,QAAQ,EAAEM,uCAAyB,EAAEF,KAAK,CAAC,EAAE5C,QAAQ,CAAC0C,SAAS,CAAC;MAC7G,IAAMK,OAAO,GAAG,IAAAxB,qBAAc,EAACsB,aAAa,CAACF,KAAK,CAAC;MACnD,IAAI,CAAC/B,YAAY,IAAIA,YAAY,CAACmC,OAAO,CAAC,EAAE;QACxCV,MAAM,CAACW,IAAI,CAACD,OAAO,CAAC;MACxB;IACJ;IACA,OAAO;MACHE,SAAS,EAAEZ;IACf,CAAC;EACL;EAEA,IAAMa,KAAK,GAAGlC,EAAE,CAACmC,IAAI,CAAC;IAClBC,KAAK,EAAE,CAACpD,QAAQ,CAACwC,QAAQ,EAAElB,SAAS,CAACmB,OAAO,EAAEb,gBAAgB,CAAC;IAC/DyB,GAAG,EAAE,CAACrD,QAAQ,CAACwC,QAAQ,EAAElB,SAAS,CAACmB,OAAO,EAAEP,gBAAgB;EAChE,CAAC,EAAE;IACCoB,WAAW,EAAEtD,QAAQ,CAACuD,QAAQ,CAACC,gBAAgB;IAC/CnD,KAAK,EAAG,CAACK,kBAAkB,IAAIR,SAAS,CAACW,wBAAwB,GAAIN,aAAa,GAAGkD,SAAS;IAC9FC,SAAS,EAAE1D,QAAQ,CAACuD,QAAQ,CAACG;EACjC,CAAC,CAAC;EAEF,WAAW,IAAMC,aAAa,IAAIT,KAAK,EAAE;IACrC,IAAMN,MAAK,GAAGe,aAAa,CAAChB,KAAK;IACjC,IAAME,cAAa,GAAG,MAAM7B,EAAE,CAACuB,GAAG,CAAC,CAACvC,QAAQ,CAACwC,QAAQ,EAAEM,uCAAyB,EAAEF,MAAK,CAAC,EAAE5C,QAAQ,CAAC0C,SAAS,CAAC;IAC7G,IAAMK,QAAO,GAAG,IAAAxB,qBAAc,EAACsB,cAAa,CAACF,KAAK,CAAC;IACnD,IAAI,CAAC/B,YAAY,IAAIA,YAAY,CAACmC,QAAO,CAAC,EAAE;MACxCV,MAAM,CAACW,IAAI,CAACD,QAAO,CAAC;IACxB;IACA,IACI,CAACrC,kBAAkB,IACnB2B,MAAM,CAACuB,MAAM,KAAKrD,aAAa,EACjC;MACE;IACJ;EACJ;EAEA,IAAIG,kBAAkB,EAAE;IACpB,IAAMmD,cAAc,GAAG,IAAAC,gCAAiB,EAAC9D,QAAQ,CAACe,MAAM,EAAEd,aAAa,CAACE,KAAK,CAAC;IAC9EkC,MAAM,GAAGA,MAAM,CAAC0B,IAAI,CAACF,cAAc,CAAC;EACxC;;EAEA;EACAxB,MAAM,GAAGA,MAAM,CAAClB,KAAK,CAACf,IAAI,EAAEG,aAAa,CAAC;EAE1C,OAAO;IACH0C,SAAS,EAAEZ;EACf,CAAC;AACL","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/storage-denokv/denokv-types.js b/dist/cjs/plugins/storage-denokv/denokv-types.js deleted file mode 100644 index 49fa0a6ac92..00000000000 --- a/dist/cjs/plugins/storage-denokv/denokv-types.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -//# sourceMappingURL=denokv-types.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/storage-denokv/denokv-types.js.map b/dist/cjs/plugins/storage-denokv/denokv-types.js.map deleted file mode 100644 index 4f247a383aa..00000000000 --- a/dist/cjs/plugins/storage-denokv/denokv-types.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"denokv-types.js","names":[],"sources":["../../../../src/plugins/storage-denokv/denokv-types.ts"],"sourcesContent":["import type { RxDocumentData } from \"../../types/index.d.ts\";\n\nexport type DenoKVSettings = {\n consistencyLevel: \"strong\" | \"eventual\";\n openKvPath?: string;\n batchSize?: number;\n};\nexport type DenoKVStorageInternals = {\n indexes: {\n [indexName: string]: DenoKVIndexMeta;\n };\n};\n\nexport type DenoKVIndexMeta = {\n indexId: string;\n indexName: string;\n index: string[];\n getIndexableString: (doc: RxDocumentData) => string;\n};\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/storage-denokv/index.js b/dist/cjs/plugins/storage-denokv/index.js deleted file mode 100644 index 93f160fd3fb..00000000000 --- a/dist/cjs/plugins/storage-denokv/index.js +++ /dev/null @@ -1,31 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.RxStorageDenoKV = void 0; -exports.getRxStorageDenoKV = getRxStorageDenoKV; -var _rxStorageHelper = require("../../rx-storage-helper.js"); -var _denokvHelper = require("./denokv-helper.js"); -var _rxStorageInstanceDenokv = require("./rx-storage-instance-denokv.js"); -var _utilsRxdbVersion = require("../utils/utils-rxdb-version.js"); -var RxStorageDenoKV = exports.RxStorageDenoKV = /*#__PURE__*/function () { - function RxStorageDenoKV(settings) { - this.name = _denokvHelper.RX_STORAGE_NAME_DENOKV; - this.rxdbVersion = _utilsRxdbVersion.RXDB_VERSION; - this.settings = settings; - } - var _proto = RxStorageDenoKV.prototype; - _proto.createStorageInstance = function createStorageInstance(params) { - (0, _rxStorageHelper.ensureRxStorageInstanceParamsAreCorrect)(params); - return (0, _rxStorageInstanceDenokv.createDenoKVStorageInstance)(this, params, this.settings); - }; - return RxStorageDenoKV; -}(); -function getRxStorageDenoKV(settings = { - consistencyLevel: 'strong' -}) { - var storage = new RxStorageDenoKV(settings); - return storage; -} -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/storage-denokv/index.js.map b/dist/cjs/plugins/storage-denokv/index.js.map deleted file mode 100644 index 67b617d650d..00000000000 --- a/dist/cjs/plugins/storage-denokv/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["_rxStorageHelper","require","_denokvHelper","_rxStorageInstanceDenokv","_utilsRxdbVersion","RxStorageDenoKV","exports","settings","name","RX_STORAGE_NAME_DENOKV","rxdbVersion","RXDB_VERSION","_proto","prototype","createStorageInstance","params","ensureRxStorageInstanceParamsAreCorrect","createDenoKVStorageInstance","getRxStorageDenoKV","consistencyLevel","storage"],"sources":["../../../../src/plugins/storage-denokv/index.ts"],"sourcesContent":["import type {\n RxStorage,\n RxStorageInstanceCreationParams\n} from '../../types/index.d.ts';\nimport { ensureRxStorageInstanceParamsAreCorrect } from '../../rx-storage-helper.ts';\nimport type { DenoKVSettings, DenoKVStorageInternals } from './denokv-types.ts';\nimport { RX_STORAGE_NAME_DENOKV } from \"./denokv-helper.ts\";\nimport { RxStorageInstanceDenoKV, createDenoKVStorageInstance } from \"./rx-storage-instance-denokv.ts\";\nimport { RXDB_VERSION } from '../utils/utils-rxdb-version.ts';\n\n\n\nexport class RxStorageDenoKV implements RxStorage, DenoKVSettings> {\n public name = RX_STORAGE_NAME_DENOKV;\n public readonly rxdbVersion = RXDB_VERSION;\n\n constructor(\n public settings: DenoKVSettings\n ) { }\n\n public createStorageInstance(\n params: RxStorageInstanceCreationParams\n ): Promise> {\n ensureRxStorageInstanceParamsAreCorrect(params);\n return createDenoKVStorageInstance(this, params, this.settings);\n }\n}\n\n\nexport function getRxStorageDenoKV(\n settings: DenoKVSettings = {\n consistencyLevel: 'strong'\n }\n): RxStorageDenoKV {\n const storage = new RxStorageDenoKV(settings);\n return storage;\n}\n"],"mappings":";;;;;;;AAIA,IAAAA,gBAAA,GAAAC,OAAA;AAEA,IAAAC,aAAA,GAAAD,OAAA;AACA,IAAAE,wBAAA,GAAAF,OAAA;AACA,IAAAG,iBAAA,GAAAH,OAAA;AAA8D,IAIjDI,eAAe,GAAAC,OAAA,CAAAD,eAAA;EAIxB,SAAAA,gBACWE,QAAwB,EACjC;IAAA,KALKC,IAAI,GAAGC,oCAAsB;IAAA,KACpBC,WAAW,GAAGC,8BAAY;IAAA,KAG/BJ,QAAwB,GAAxBA,QAAwB;EAC/B;EAAC,IAAAK,MAAA,GAAAP,eAAA,CAAAQ,SAAA;EAAAD,MAAA,CAEEE,qBAAqB,GAA5B,SAAAA,sBACIC,MAAkE,EACvB;IAC3C,IAAAC,wDAAuC,EAACD,MAAM,CAAC;IAC/C,OAAO,IAAAE,oDAA2B,EAAC,IAAI,EAAEF,MAAM,EAAE,IAAI,CAACR,QAAQ,CAAC;EACnE,CAAC;EAAA,OAAAF,eAAA;AAAA;AAIE,SAASa,kBAAkBA,CAC9BX,QAAwB,GAAG;EACvBY,gBAAgB,EAAE;AACtB,CAAC,EACc;EACf,IAAMC,OAAO,GAAG,IAAIf,eAAe,CAACE,QAAQ,CAAC;EAC7C,OAAOa,OAAO;AAClB","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/storage-denokv/rx-storage-instance-denokv.js b/dist/cjs/plugins/storage-denokv/rx-storage-instance-denokv.js deleted file mode 100644 index e0523d6f8be..00000000000 --- a/dist/cjs/plugins/storage-denokv/rx-storage-instance-denokv.js +++ /dev/null @@ -1,315 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.RxStorageInstanceDenoKV = void 0; -exports.createDenoKVStorageInstance = createDenoKVStorageInstance; -var _rxjs = require("rxjs"); -var _rxSchemaHelper = require("../../rx-schema-helper.js"); -var _rxStorageMultiinstance = require("../../rx-storage-multiinstance.js"); -var _denokvHelper = require("./denokv-helper.js"); -var _customIndex = require("../../custom-index.js"); -var _utilsArray = require("../utils/utils-array.js"); -var _utilsOther = require("../utils/utils-other.js"); -var _rxStorageHelper = require("../../rx-storage-helper.js"); -var _utilsTime = require("../utils/utils-time.js"); -var _denokvQuery = require("./denokv-query.js"); -var _queryPlanner = require("../../query-planner.js"); -var _utilsPromise = require("../utils/utils-promise.js"); -var _utilsObject = require("../utils/utils-object.js"); -var RxStorageInstanceDenoKV = exports.RxStorageInstanceDenoKV = /*#__PURE__*/function () { - function RxStorageInstanceDenoKV(storage, databaseName, collectionName, schema, internals, options, settings, keySpace = ['rxdb', databaseName, collectionName, schema.version].join('|'), kvOptions = { - consistency: settings.consistencyLevel - }) { - this.changes$ = new _rxjs.Subject(); - this.storage = storage; - this.databaseName = databaseName; - this.collectionName = collectionName; - this.schema = schema; - this.internals = internals; - this.options = options; - this.settings = settings; - this.keySpace = keySpace; - this.kvOptions = kvOptions; - this.primaryPath = (0, _rxSchemaHelper.getPrimaryFieldOfPrimaryKey)(this.schema.primaryKey); - this.kvPromise = (0, _denokvHelper.getDenoGlobal)().openKv(settings.openKvPath).then(async kv => { - // insert writeBlockKey - await kv.set([this.keySpace], 1); - return kv; - }); - } - - /** - * DenoKV has no transactions - * so we have to ensure that there is no write in between our queries - * which would confuse RxDB and return wrong query results. - */ - var _proto = RxStorageInstanceDenoKV.prototype; - _proto.retryUntilNoWriteInBetween = async function retryUntilNoWriteInBetween(fn) { - var kv = await this.kvPromise; - while (true) { - var writeBlockKeyBefore = await kv.get([this.keySpace], this.kvOptions); - var writeBlockValueBefore = writeBlockKeyBefore ? writeBlockKeyBefore.value : -1; - var result = await fn(); - var writeBlockKeyAfter = await kv.get([this.keySpace], this.kvOptions); - var writeBlockValueAfter = writeBlockKeyAfter ? writeBlockKeyAfter.value : -1; - if (writeBlockValueBefore === writeBlockValueAfter) { - return result; - } - } - }; - _proto.bulkWrite = async function bulkWrite(documentWrites, context) { - var _this = this; - var kv = await this.kvPromise; - var primaryPath = this.primaryPath; - var ret = { - success: [], - error: [] - }; - var batches = (0, _utilsArray.batchArray)(documentWrites, (0, _utilsOther.ensureNotFalsy)(this.settings.batchSize)); - - /** - * DenoKV does not have transactions - * so we use a special writeBlock row to ensure - * atomic writes (per document) - * and so that we can do bulkWrites - */ - for (var writeBatch of batches) { - var _loop = async function () { - var writeBlockKey = await kv.get([_this.keySpace], _this.kvOptions); - var docsInDB = new Map(); - - /** - * TODO the max amount for .getMany() is 10 which is defined by deno itself. - * How can this be increased? - */ - var readManyBatches = (0, _utilsArray.batchArray)(writeBatch, 10); - await Promise.all(readManyBatches.map(async readManyBatch => { - var docsResult = await kv.getMany(readManyBatch.map(writeRow => { - var docId = writeRow.document[primaryPath]; - return [_this.keySpace, _denokvHelper.DENOKV_DOCUMENT_ROOT_PATH, docId]; - })); - docsResult.map(row => { - var docData = row.value; - if (!docData) { - return; - } - var docId = docData[primaryPath]; - docsInDB.set(docId, docData); - }); - })); - var categorized = (0, _rxStorageHelper.categorizeBulkWriteRows)(_this, _this.primaryPath, docsInDB, writeBatch, context); - var tx = kv.atomic(); - tx = tx.set([_this.keySpace], (0, _utilsOther.ensureNotFalsy)(writeBlockKey.value) + 1); - tx = tx.check(writeBlockKey); - - // INSERTS - categorized.bulkInsertDocs.forEach(writeRow => { - var docId = writeRow.document[_this.primaryPath]; - ret.success.push(writeRow.document); - - // insert document data - tx = tx.set([_this.keySpace, _denokvHelper.DENOKV_DOCUMENT_ROOT_PATH, docId], writeRow.document); - - // insert secondary indexes - Object.values(_this.internals.indexes).forEach(indexMeta => { - var indexString = indexMeta.getIndexableString(writeRow.document); - tx = tx.set([_this.keySpace, indexMeta.indexId, indexString], docId); - }); - }); - // UPDATES - categorized.bulkUpdateDocs.forEach(writeRow => { - var docId = writeRow.document[_this.primaryPath]; - - // insert document data - tx = tx.set([_this.keySpace, _denokvHelper.DENOKV_DOCUMENT_ROOT_PATH, docId], writeRow.document); - - // insert secondary indexes - Object.values(_this.internals.indexes).forEach(indexMeta => { - var oldIndexString = indexMeta.getIndexableString((0, _utilsOther.ensureNotFalsy)(writeRow.previous)); - var newIndexString = indexMeta.getIndexableString(writeRow.document); - if (oldIndexString !== newIndexString) { - tx = tx.delete([_this.keySpace, indexMeta.indexId, oldIndexString]); - tx = tx.set([_this.keySpace, indexMeta.indexId, newIndexString], docId); - } - }); - ret.success.push(writeRow.document); - }); - var txResult = await tx.commit(); - if (txResult.ok) { - (0, _utilsArray.appendToArray)(ret.error, categorized.errors); - if (categorized.eventBulk.events.length > 0) { - var lastState = (0, _utilsOther.ensureNotFalsy)(categorized.newestRow).document; - categorized.eventBulk.checkpoint = { - id: lastState[primaryPath], - lwt: lastState._meta.lwt - }; - categorized.eventBulk.endTime = (0, _utilsTime.now)(); - _this.changes$.next(categorized.eventBulk); - } - return 1; // break - } - }; - while (true) { - if (await _loop()) break; - } - } - return ret; - }; - _proto.findDocumentsById = async function findDocumentsById(ids, withDeleted) { - var kv = await this.kvPromise; - var ret = []; - await Promise.all(ids.map(async docId => { - var kvKey = [this.keySpace, _denokvHelper.DENOKV_DOCUMENT_ROOT_PATH, docId]; - var findSingleResult = await kv.get(kvKey, this.kvOptions); - var docInDb = findSingleResult.value; - if (docInDb && (!docInDb._deleted || withDeleted)) { - ret.push(docInDb); - } - })); - return ret; - }; - _proto.query = function query(preparedQuery) { - return this.retryUntilNoWriteInBetween(() => (0, _denokvQuery.queryDenoKV)(this, preparedQuery)); - }; - _proto.count = async function count(preparedQuery) { - /** - * At this point in time (end 2023), DenoKV does not support - * range counts. So we have to run a normal query and use the result set length. - * @link https://github.com/denoland/deno/issues/18965 - */ - var result = await this.retryUntilNoWriteInBetween(() => this.query(preparedQuery)); - return { - count: result.documents.length, - mode: 'fast' - }; - }; - _proto.getAttachmentData = function getAttachmentData(documentId, attachmentId, digest) { - throw new Error("Method not implemented."); - }; - _proto.changeStream = function changeStream() { - return this.changes$.asObservable(); - }; - _proto.cleanup = async function cleanup(minimumDeletedTime) { - var _this2 = this; - var maxDeletionTime = (0, _utilsTime.now)() - minimumDeletedTime; - var kv = await this.kvPromise; - var index = _denokvHelper.CLEANUP_INDEX; - var indexName = (0, _denokvHelper.getDenoKVIndexName)(index); - var indexMeta = this.internals.indexes[indexName]; - var lowerBoundString = (0, _customIndex.getStartIndexStringFromLowerBound)(this.schema, index, [true, - /** - * Do not use 0 here, - * because 1 is the minimum value for _meta.lwt - */ - 1]); - var upperBoundString = (0, _customIndex.getStartIndexStringFromLowerBound)(this.schema, index, [true, maxDeletionTime]); - var noMoreUndeleted = true; - var range = kv.list({ - start: [this.keySpace, indexMeta.indexId, lowerBoundString], - end: [this.keySpace, indexMeta.indexId, upperBoundString] - }, { - consistency: this.settings.consistencyLevel, - batchSize: this.settings.batchSize, - limit: this.settings.batchSize - }); - var rangeCount = 0; - var _loop2 = async function () { - rangeCount = rangeCount + 1; - var docId = row.value; - var docDataResult = await kv.get([_this2.keySpace, _denokvHelper.DENOKV_DOCUMENT_ROOT_PATH, docId], _this2.kvOptions); - if (!docDataResult.value) { - return 0; // continue - } - var docData = (0, _utilsOther.ensureNotFalsy)(docDataResult.value); - if (!docData._deleted || docData._meta.lwt > maxDeletionTime) { - return 0; // continue - } - var tx = kv.atomic(); - tx = tx.check(docDataResult); - tx = tx.delete([_this2.keySpace, _denokvHelper.DENOKV_DOCUMENT_ROOT_PATH, docId]); - Object.values(_this2.internals.indexes).forEach(indexMetaInner => { - tx = tx.delete([_this2.keySpace, indexMetaInner.indexId, docId]); - }); - await tx.commit(); - }, - _ret; - for await (var row of range) { - _ret = await _loop2(); - if (_ret === 0) continue; - } - return noMoreUndeleted; - }; - _proto.close = async function close() { - if (this.closed) { - return this.closed; - } - this.closed = (async () => { - this.changes$.complete(); - var kv = await this.kvPromise; - await kv.close(); - })(); - return this.closed; - }; - _proto.remove = async function remove() { - ensureNotClosed(this); - var kv = await this.kvPromise; - var range = kv.list({ - start: [this.keySpace], - end: [this.keySpace, _queryPlanner.INDEX_MAX] - }, { - consistency: this.settings.consistencyLevel, - batchSize: this.settings.batchSize - }); - var promises = []; - for await (var row of range) { - promises.push(kv.delete(row.key)); - } - await Promise.all(promises); - return this.close(); - }; - _proto.conflictResultionTasks = function conflictResultionTasks() { - return new _rxjs.Subject().asObservable(); - }; - _proto.resolveConflictResultionTask = function resolveConflictResultionTask(_taskSolution) { - return _utilsPromise.PROMISE_RESOLVE_VOID; - }; - return RxStorageInstanceDenoKV; -}(); -async function createDenoKVStorageInstance(storage, params, settings) { - settings = (0, _utilsObject.flatClone)(settings); - if (!settings.batchSize) { - settings.batchSize = 100; - } - var primaryPath = (0, _rxSchemaHelper.getPrimaryFieldOfPrimaryKey)(params.schema.primaryKey); - var indexDBs = {}; - var useIndexes = params.schema.indexes ? params.schema.indexes.slice(0) : []; - useIndexes.push([primaryPath]); - var useIndexesFinal = useIndexes.map(index => { - var indexAr = (0, _utilsArray.toArray)(index); - return indexAr; - }); - useIndexesFinal.push(_denokvHelper.CLEANUP_INDEX); - useIndexesFinal.forEach((indexAr, indexId) => { - var indexName = (0, _denokvHelper.getDenoKVIndexName)(indexAr); - indexDBs[indexName] = { - indexId: '|' + indexId + '|', - indexName, - getIndexableString: (0, _customIndex.getIndexableStringMonad)(params.schema, indexAr), - index: indexAr - }; - }); - var internals = { - indexes: indexDBs - }; - var instance = new RxStorageInstanceDenoKV(storage, params.databaseName, params.collectionName, params.schema, internals, params.options, settings); - await (0, _rxStorageMultiinstance.addRxStorageMultiInstanceSupport)(_denokvHelper.RX_STORAGE_NAME_DENOKV, params, instance); - return Promise.resolve(instance); -} -function ensureNotClosed(instance) { - if (instance.closed) { - throw new Error('RxStorageInstanceDenoKV is closed ' + instance.databaseName + '-' + instance.collectionName); - } -} -//# sourceMappingURL=rx-storage-instance-denokv.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/storage-denokv/rx-storage-instance-denokv.js.map b/dist/cjs/plugins/storage-denokv/rx-storage-instance-denokv.js.map deleted file mode 100644 index 646174d7fd4..00000000000 --- a/dist/cjs/plugins/storage-denokv/rx-storage-instance-denokv.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-storage-instance-denokv.js","names":["_rxjs","require","_rxSchemaHelper","_rxStorageMultiinstance","_denokvHelper","_customIndex","_utilsArray","_utilsOther","_rxStorageHelper","_utilsTime","_denokvQuery","_queryPlanner","_utilsPromise","_utilsObject","RxStorageInstanceDenoKV","exports","storage","databaseName","collectionName","schema","internals","options","settings","keySpace","version","join","kvOptions","consistency","consistencyLevel","changes$","Subject","primaryPath","getPrimaryFieldOfPrimaryKey","primaryKey","kvPromise","getDenoGlobal","openKv","openKvPath","then","kv","set","_proto","prototype","retryUntilNoWriteInBetween","fn","writeBlockKeyBefore","get","writeBlockValueBefore","value","result","writeBlockKeyAfter","writeBlockValueAfter","bulkWrite","documentWrites","context","_this","ret","success","error","batches","batchArray","ensureNotFalsy","batchSize","writeBatch","_loop","writeBlockKey","docsInDB","Map","readManyBatches","Promise","all","map","readManyBatch","docsResult","getMany","writeRow","docId","document","DENOKV_DOCUMENT_ROOT_PATH","row","docData","categorized","categorizeBulkWriteRows","tx","atomic","check","bulkInsertDocs","forEach","push","Object","values","indexes","indexMeta","indexString","getIndexableString","indexId","bulkUpdateDocs","oldIndexString","previous","newIndexString","delete","txResult","commit","ok","appendToArray","errors","eventBulk","events","length","lastState","newestRow","checkpoint","id","lwt","_meta","endTime","now","next","findDocumentsById","ids","withDeleted","kvKey","findSingleResult","docInDb","_deleted","query","preparedQuery","queryDenoKV","count","documents","mode","getAttachmentData","documentId","attachmentId","digest","Error","changeStream","asObservable","cleanup","minimumDeletedTime","_this2","maxDeletionTime","index","CLEANUP_INDEX","indexName","getDenoKVIndexName","lowerBoundString","getStartIndexStringFromLowerBound","upperBoundString","noMoreUndeleted","range","list","start","end","limit","rangeCount","_loop2","docDataResult","indexMetaInner","_ret","close","closed","complete","remove","ensureNotClosed","INDEX_MAX","promises","key","conflictResultionTasks","resolveConflictResultionTask","_taskSolution","PROMISE_RESOLVE_VOID","createDenoKVStorageInstance","params","flatClone","indexDBs","useIndexes","slice","useIndexesFinal","indexAr","toArray","getIndexableStringMonad","instance","addRxStorageMultiInstanceSupport","RX_STORAGE_NAME_DENOKV","resolve"],"sources":["../../../../src/plugins/storage-denokv/rx-storage-instance-denokv.ts"],"sourcesContent":["\nimport {\n Subject,\n Observable\n} from 'rxjs';\nimport type {\n RxStorageInstance,\n RxStorageChangeEvent,\n RxDocumentData,\n BulkWriteRow,\n RxStorageBulkWriteResponse,\n RxStorageQueryResult,\n RxJsonSchema,\n RxStorageInstanceCreationParams,\n EventBulk,\n StringKeys,\n RxConflictResultionTaskSolution,\n RxStorageDefaultCheckpoint,\n RxStorageCountResult,\n RxConflictResultionTask,\n PreparedQuery\n} from '../../types/index.d.ts';\nimport { getPrimaryFieldOfPrimaryKey } from '../../rx-schema-helper.ts';\nimport { addRxStorageMultiInstanceSupport } from '../../rx-storage-multiinstance.ts';\nimport type { DenoKVIndexMeta, DenoKVSettings, DenoKVStorageInternals } from './denokv-types.ts';\nimport { RxStorageDenoKV } from './index.ts';\nimport { CLEANUP_INDEX, DENOKV_DOCUMENT_ROOT_PATH, RX_STORAGE_NAME_DENOKV, getDenoGlobal, getDenoKVIndexName } from \"./denokv-helper.ts\";\nimport { getIndexableStringMonad, getStartIndexStringFromLowerBound } from \"../../custom-index.ts\";\nimport { appendToArray, batchArray, lastOfArray, toArray } from \"../utils/utils-array.ts\";\nimport { ensureNotFalsy } from \"../utils/utils-other.ts\";\nimport { categorizeBulkWriteRows } from \"../../rx-storage-helper.ts\";\nimport { now } from \"../utils/utils-time.ts\";\nimport { queryDenoKV } from \"./denokv-query.ts\";\nimport { INDEX_MAX } from \"../../query-planner.ts\";\nimport { PROMISE_RESOLVE_VOID } from \"../utils/utils-promise.ts\";\nimport { flatClone } from \"../utils/utils-object.ts\";\n\n\n\nexport class RxStorageInstanceDenoKV implements RxStorageInstance<\n RxDocType,\n DenoKVStorageInternals,\n DenoKVSettings,\n RxStorageDefaultCheckpoint\n> {\n public readonly primaryPath: StringKeys>;\n private changes$: Subject>, RxStorageDefaultCheckpoint>> = new Subject();\n public closed?: Promise;\n public readonly kvPromise: Promise;\n\n constructor(\n public readonly storage: RxStorageDenoKV,\n public readonly databaseName: string,\n public readonly collectionName: string,\n public readonly schema: Readonly>>,\n public readonly internals: DenoKVStorageInternals,\n public readonly options: Readonly,\n public readonly settings: DenoKVSettings,\n public readonly keySpace = ['rxdb', databaseName, collectionName, schema.version].join('|'),\n public readonly kvOptions = { consistency: settings.consistencyLevel }\n ) {\n this.primaryPath = getPrimaryFieldOfPrimaryKey(this.schema.primaryKey);\n this.kvPromise = getDenoGlobal().openKv(settings.openKvPath).then(async (kv: any) => {\n // insert writeBlockKey\n await kv.set([this.keySpace], 1);\n return kv;\n });\n }\n\n /**\n * DenoKV has no transactions\n * so we have to ensure that there is no write in between our queries\n * which would confuse RxDB and return wrong query results.\n */\n async retryUntilNoWriteInBetween(\n fn: () => Promise\n ): Promise {\n const kv = await this.kvPromise;\n while (true) {\n const writeBlockKeyBefore = await kv.get([this.keySpace], this.kvOptions);\n const writeBlockValueBefore = writeBlockKeyBefore ? writeBlockKeyBefore.value : -1;\n const result = await fn();\n const writeBlockKeyAfter = await kv.get([this.keySpace], this.kvOptions);\n const writeBlockValueAfter = writeBlockKeyAfter ? writeBlockKeyAfter.value : -1;\n\n if (writeBlockValueBefore === writeBlockValueAfter) {\n return result;\n }\n }\n }\n\n async bulkWrite(documentWrites: BulkWriteRow[], context: string): Promise> {\n const kv = await this.kvPromise;\n const primaryPath = this.primaryPath;\n const ret: RxStorageBulkWriteResponse = {\n success: [],\n error: []\n };\n\n const batches = batchArray(documentWrites, ensureNotFalsy(this.settings.batchSize));\n\n /**\n * DenoKV does not have transactions\n * so we use a special writeBlock row to ensure\n * atomic writes (per document)\n * and so that we can do bulkWrites\n */\n for (const writeBatch of batches) {\n while (true) {\n const writeBlockKey = await kv.get([this.keySpace], this.kvOptions);\n const docsInDB = new Map>();\n\n /**\n * TODO the max amount for .getMany() is 10 which is defined by deno itself.\n * How can this be increased?\n */\n const readManyBatches = batchArray(writeBatch, 10);\n await Promise.all(\n readManyBatches.map(async (readManyBatch) => {\n const docsResult = await kv.getMany(\n readManyBatch.map(writeRow => {\n const docId: string = writeRow.document[primaryPath] as any;\n return [this.keySpace, DENOKV_DOCUMENT_ROOT_PATH, docId];\n })\n );\n docsResult.map((row: any) => {\n const docData = row.value;\n if (!docData) {\n return;\n }\n const docId: string = docData[primaryPath] as any;\n docsInDB.set(docId, docData);\n });\n })\n );\n const categorized = categorizeBulkWriteRows(\n this,\n this.primaryPath as any,\n docsInDB,\n writeBatch,\n context\n );\n\n let tx = kv.atomic();\n tx = tx.set([this.keySpace], ensureNotFalsy(writeBlockKey.value) + 1);\n tx = tx.check(writeBlockKey);\n\n // INSERTS\n categorized.bulkInsertDocs.forEach(writeRow => {\n const docId: string = writeRow.document[this.primaryPath] as any;\n ret.success.push(writeRow.document);\n\n // insert document data\n tx = tx.set([this.keySpace, DENOKV_DOCUMENT_ROOT_PATH, docId], writeRow.document);\n\n // insert secondary indexes\n Object.values(this.internals.indexes).forEach(indexMeta => {\n const indexString = indexMeta.getIndexableString(writeRow.document as any);\n tx = tx.set([this.keySpace, indexMeta.indexId, indexString], docId);\n });\n });\n // UPDATES\n categorized.bulkUpdateDocs.forEach((writeRow: BulkWriteRow) => {\n const docId: string = writeRow.document[this.primaryPath] as any;\n\n // insert document data\n tx = tx.set([this.keySpace, DENOKV_DOCUMENT_ROOT_PATH, docId], writeRow.document);\n\n // insert secondary indexes\n Object.values(this.internals.indexes).forEach(indexMeta => {\n const oldIndexString = indexMeta.getIndexableString(ensureNotFalsy(writeRow.previous));\n const newIndexString = indexMeta.getIndexableString(writeRow.document as any);\n if (oldIndexString !== newIndexString) {\n tx = tx.delete([this.keySpace, indexMeta.indexId, oldIndexString]);\n tx = tx.set([this.keySpace, indexMeta.indexId, newIndexString], docId);\n }\n });\n ret.success.push(writeRow.document as any);\n });\n\n const txResult = await tx.commit();\n if (txResult.ok) {\n appendToArray(ret.error, categorized.errors);\n if (categorized.eventBulk.events.length > 0) {\n const lastState = ensureNotFalsy(categorized.newestRow).document;\n categorized.eventBulk.checkpoint = {\n id: lastState[primaryPath],\n lwt: lastState._meta.lwt\n };\n categorized.eventBulk.endTime = now();\n this.changes$.next(categorized.eventBulk);\n }\n break;\n }\n }\n }\n return ret;\n }\n async findDocumentsById(ids: string[], withDeleted: boolean): Promise[]> {\n const kv = await this.kvPromise;\n const ret: RxDocumentData[] = [];\n await Promise.all(\n ids.map(async (docId) => {\n const kvKey = [this.keySpace, DENOKV_DOCUMENT_ROOT_PATH, docId];\n const findSingleResult = await kv.get(kvKey, this.kvOptions);\n const docInDb = findSingleResult.value;\n if (\n docInDb &&\n (\n !docInDb._deleted ||\n withDeleted\n )\n ) {\n ret.push(docInDb);\n }\n })\n );\n return ret;\n }\n query(preparedQuery: PreparedQuery): Promise> {\n return this.retryUntilNoWriteInBetween(\n () => queryDenoKV(this, preparedQuery)\n );\n }\n async count(preparedQuery: PreparedQuery): Promise {\n /**\n * At this point in time (end 2023), DenoKV does not support\n * range counts. So we have to run a normal query and use the result set length.\n * @link https://github.com/denoland/deno/issues/18965\n */\n const result = await this.retryUntilNoWriteInBetween(\n () => this.query(preparedQuery)\n );\n return {\n count: result.documents.length,\n mode: 'fast'\n };\n }\n getAttachmentData(documentId: string, attachmentId: string, digest: string): Promise {\n throw new Error(\"Method not implemented.\");\n }\n changeStream() {\n return this.changes$.asObservable();\n }\n async cleanup(minimumDeletedTime: number): Promise {\n const maxDeletionTime = now() - minimumDeletedTime;\n const kv = await this.kvPromise;\n const index = CLEANUP_INDEX;\n const indexName = getDenoKVIndexName(index);\n const indexMeta = this.internals.indexes[indexName];\n const lowerBoundString = getStartIndexStringFromLowerBound(\n this.schema,\n index,\n [\n true,\n /**\n * Do not use 0 here,\n * because 1 is the minimum value for _meta.lwt\n */\n 1\n ]\n );\n const upperBoundString = getStartIndexStringFromLowerBound(\n this.schema,\n index,\n [\n true,\n maxDeletionTime\n ]\n );\n let noMoreUndeleted: boolean = true;\n\n const range = kv.list({\n start: [this.keySpace, indexMeta.indexId, lowerBoundString],\n end: [this.keySpace, indexMeta.indexId, upperBoundString]\n }, {\n consistency: this.settings.consistencyLevel,\n batchSize: this.settings.batchSize,\n limit: this.settings.batchSize\n });\n\n let rangeCount = 0;\n for await (const row of range) {\n rangeCount = rangeCount + 1;\n const docId = row.value;\n const docDataResult = await kv.get([this.keySpace, DENOKV_DOCUMENT_ROOT_PATH, docId], this.kvOptions);\n if (!docDataResult.value) {\n continue;\n }\n const docData = ensureNotFalsy(docDataResult.value);\n if (\n !docData._deleted ||\n docData._meta.lwt > maxDeletionTime\n ) {\n continue;\n }\n\n\n let tx = kv.atomic();\n tx = tx.check(docDataResult);\n tx = tx.delete([this.keySpace, DENOKV_DOCUMENT_ROOT_PATH, docId]);\n Object\n .values(this.internals.indexes)\n .forEach(indexMetaInner => {\n tx = tx.delete([this.keySpace, indexMetaInner.indexId, docId]);\n });\n await tx.commit();\n }\n return noMoreUndeleted;\n }\n async close(): Promise {\n if (this.closed) {\n return this.closed;\n }\n this.closed = (async () => {\n this.changes$.complete();\n const kv = await this.kvPromise;\n await kv.close();\n })();\n return this.closed;\n }\n async remove(): Promise {\n ensureNotClosed(this);\n const kv = await this.kvPromise;\n const range = kv.list({\n start: [this.keySpace],\n end: [this.keySpace, INDEX_MAX]\n }, {\n consistency: this.settings.consistencyLevel,\n batchSize: this.settings.batchSize\n });\n let promises: Promise[] = [];\n for await (const row of range) {\n promises.push(kv.delete(row.key));\n }\n\n await Promise.all(promises);\n return this.close();\n }\n conflictResultionTasks(): Observable> {\n return new Subject().asObservable();\n }\n resolveConflictResultionTask(_taskSolution: RxConflictResultionTaskSolution): Promise {\n return PROMISE_RESOLVE_VOID;\n }\n}\n\n\n\nexport async function createDenoKVStorageInstance(\n storage: RxStorageDenoKV,\n params: RxStorageInstanceCreationParams,\n settings: DenoKVSettings\n): Promise> {\n settings = flatClone(settings);\n if (!settings.batchSize) {\n settings.batchSize = 100;\n }\n\n const primaryPath = getPrimaryFieldOfPrimaryKey(params.schema.primaryKey);\n\n const indexDBs: { [indexName: string]: DenoKVIndexMeta; } = {};\n const useIndexes = params.schema.indexes ? params.schema.indexes.slice(0) : [];\n useIndexes.push([primaryPath]);\n const useIndexesFinal = useIndexes.map(index => {\n const indexAr = toArray(index);\n return indexAr;\n });\n useIndexesFinal.push(CLEANUP_INDEX);\n useIndexesFinal.forEach((indexAr, indexId) => {\n const indexName = getDenoKVIndexName(indexAr);\n indexDBs[indexName] = {\n indexId: '|' + indexId + '|',\n indexName,\n getIndexableString: getIndexableStringMonad(params.schema, indexAr),\n index: indexAr\n };\n });\n\n const internals = {\n indexes: indexDBs\n };\n const instance = new RxStorageInstanceDenoKV(\n storage,\n params.databaseName,\n params.collectionName,\n params.schema,\n internals,\n params.options,\n settings\n );\n\n await addRxStorageMultiInstanceSupport(\n RX_STORAGE_NAME_DENOKV,\n params,\n instance\n );\n\n return Promise.resolve(instance);\n}\n\n\n\nfunction ensureNotClosed(\n instance: RxStorageInstanceDenoKV\n) {\n if (instance.closed) {\n throw new Error('RxStorageInstanceDenoKV is closed ' + instance.databaseName + '-' + instance.collectionName);\n }\n}\n"],"mappings":";;;;;;;AACA,IAAAA,KAAA,GAAAC,OAAA;AAqBA,IAAAC,eAAA,GAAAD,OAAA;AACA,IAAAE,uBAAA,GAAAF,OAAA;AAGA,IAAAG,aAAA,GAAAH,OAAA;AACA,IAAAI,YAAA,GAAAJ,OAAA;AACA,IAAAK,WAAA,GAAAL,OAAA;AACA,IAAAM,WAAA,GAAAN,OAAA;AACA,IAAAO,gBAAA,GAAAP,OAAA;AACA,IAAAQ,UAAA,GAAAR,OAAA;AACA,IAAAS,YAAA,GAAAT,OAAA;AACA,IAAAU,aAAA,GAAAV,OAAA;AACA,IAAAW,aAAA,GAAAX,OAAA;AACA,IAAAY,YAAA,GAAAZ,OAAA;AAAqD,IAIxCa,uBAAuB,GAAAC,OAAA,CAAAD,uBAAA;EAWhC,SAAAA,wBACoBE,OAAwB,EACxBC,YAAoB,EACpBC,cAAsB,EACtBC,MAAyD,EACzDC,SAA4C,EAC5CC,OAAiC,EACjCC,QAAwB,EACxBC,QAAQ,GAAG,CAAC,MAAM,EAAEN,YAAY,EAAEC,cAAc,EAAEC,MAAM,CAACK,OAAO,CAAC,CAACC,IAAI,CAAC,GAAG,CAAC,EAC3EC,SAAS,GAAG;IAAEC,WAAW,EAAEL,QAAQ,CAACM;EAAiB,CAAC,EACxE;IAAA,KAdMC,QAAQ,GAAoG,IAAIC,aAAO,CAAC,CAAC;IAAA,KAK7Gd,OAAwB,GAAxBA,OAAwB;IAAA,KACxBC,YAAoB,GAApBA,YAAoB;IAAA,KACpBC,cAAsB,GAAtBA,cAAsB;IAAA,KACtBC,MAAyD,GAAzDA,MAAyD;IAAA,KACzDC,SAA4C,GAA5CA,SAA4C;IAAA,KAC5CC,OAAiC,GAAjCA,OAAiC;IAAA,KACjCC,QAAwB,GAAxBA,QAAwB;IAAA,KACxBC,QAAQ,GAARA,QAAQ;IAAA,KACRG,SAAS,GAATA,SAAS;IAEzB,IAAI,CAACK,WAAW,GAAG,IAAAC,2CAA2B,EAAC,IAAI,CAACb,MAAM,CAACc,UAAU,CAAC;IACtE,IAAI,CAACC,SAAS,GAAG,IAAAC,2BAAa,EAAC,CAAC,CAACC,MAAM,CAACd,QAAQ,CAACe,UAAU,CAAC,CAACC,IAAI,CAAC,MAAOC,EAAO,IAAK;MACjF;MACA,MAAMA,EAAE,CAACC,GAAG,CAAC,CAAC,IAAI,CAACjB,QAAQ,CAAC,EAAE,CAAC,CAAC;MAChC,OAAOgB,EAAE;IACb,CAAC,CAAC;EACN;;EAEA;AACJ;AACA;AACA;AACA;EAJI,IAAAE,MAAA,GAAA3B,uBAAA,CAAA4B,SAAA;EAAAD,MAAA,CAKME,0BAA0B,GAAhC,eAAAA,2BACIC,EAAoB,EACV;IACV,IAAML,EAAE,GAAG,MAAM,IAAI,CAACL,SAAS;IAC/B,OAAO,IAAI,EAAE;MACT,IAAMW,mBAAmB,GAAG,MAAMN,EAAE,CAACO,GAAG,CAAC,CAAC,IAAI,CAACvB,QAAQ,CAAC,EAAE,IAAI,CAACG,SAAS,CAAC;MACzE,IAAMqB,qBAAqB,GAAGF,mBAAmB,GAAGA,mBAAmB,CAACG,KAAK,GAAG,CAAC,CAAC;MAClF,IAAMC,MAAM,GAAG,MAAML,EAAE,CAAC,CAAC;MACzB,IAAMM,kBAAkB,GAAG,MAAMX,EAAE,CAACO,GAAG,CAAC,CAAC,IAAI,CAACvB,QAAQ,CAAC,EAAE,IAAI,CAACG,SAAS,CAAC;MACxE,IAAMyB,oBAAoB,GAAGD,kBAAkB,GAAGA,kBAAkB,CAACF,KAAK,GAAG,CAAC,CAAC;MAE/E,IAAID,qBAAqB,KAAKI,oBAAoB,EAAE;QAChD,OAAOF,MAAM;MACjB;IACJ;EACJ,CAAC;EAAAR,MAAA,CAEKW,SAAS,GAAf,eAAAA,UAAgBC,cAAyC,EAAEC,OAAe,EAAkD;IAAA,IAAAC,KAAA;IACxH,IAAMhB,EAAE,GAAG,MAAM,IAAI,CAACL,SAAS;IAC/B,IAAMH,WAAW,GAAG,IAAI,CAACA,WAAW;IACpC,IAAMyB,GAA0C,GAAG;MAC/CC,OAAO,EAAE,EAAE;MACXC,KAAK,EAAE;IACX,CAAC;IAED,IAAMC,OAAO,GAAG,IAAAC,sBAAU,EAACP,cAAc,EAAE,IAAAQ,0BAAc,EAAC,IAAI,CAACvC,QAAQ,CAACwC,SAAS,CAAC,CAAC;;IAEnF;AACR;AACA;AACA;AACA;AACA;IACQ,KAAK,IAAMC,UAAU,IAAIJ,OAAO,EAAE;MAAA,IAAAK,KAAA,kBAAAA,CAAA,EACjB;QACT,IAAMC,aAAa,GAAG,MAAM1B,EAAE,CAACO,GAAG,CAAC,CAACS,KAAI,CAAChC,QAAQ,CAAC,EAAEgC,KAAI,CAAC7B,SAAS,CAAC;QACnE,IAAMwC,QAAQ,GAAG,IAAIC,GAAG,CAAoC,CAAC;;QAE7D;AAChB;AACA;AACA;QACgB,IAAMC,eAAe,GAAG,IAAAR,sBAAU,EAACG,UAAU,EAAE,EAAE,CAAC;QAClD,MAAMM,OAAO,CAACC,GAAG,CACbF,eAAe,CAACG,GAAG,CAAC,MAAOC,aAAa,IAAK;UACzC,IAAMC,UAAU,GAAG,MAAMlC,EAAE,CAACmC,OAAO,CAC/BF,aAAa,CAACD,GAAG,CAACI,QAAQ,IAAI;YAC1B,IAAMC,KAAa,GAAGD,QAAQ,CAACE,QAAQ,CAAC9C,WAAW,CAAQ;YAC3D,OAAO,CAACwB,KAAI,CAAChC,QAAQ,EAAEuD,uCAAyB,EAAEF,KAAK,CAAC;UAC5D,CAAC,CACL,CAAC;UACDH,UAAU,CAACF,GAAG,CAAEQ,GAAQ,IAAK;YACzB,IAAMC,OAAO,GAAGD,GAAG,CAAC/B,KAAK;YACzB,IAAI,CAACgC,OAAO,EAAE;cACV;YACJ;YACA,IAAMJ,KAAa,GAAGI,OAAO,CAACjD,WAAW,CAAQ;YACjDmC,QAAQ,CAAC1B,GAAG,CAACoC,KAAK,EAAEI,OAAO,CAAC;UAChC,CAAC,CAAC;QACN,CAAC,CACL,CAAC;QACD,IAAMC,WAAW,GAAG,IAAAC,wCAAuB,EACvC3B,KAAI,EACJA,KAAI,CAACxB,WAAW,EAChBmC,QAAQ,EACRH,UAAU,EACVT,OACJ,CAAC;QAED,IAAI6B,EAAE,GAAG5C,EAAE,CAAC6C,MAAM,CAAC,CAAC;QACpBD,EAAE,GAAGA,EAAE,CAAC3C,GAAG,CAAC,CAACe,KAAI,CAAChC,QAAQ,CAAC,EAAE,IAAAsC,0BAAc,EAACI,aAAa,CAACjB,KAAK,CAAC,GAAG,CAAC,CAAC;QACrEmC,EAAE,GAAGA,EAAE,CAACE,KAAK,CAACpB,aAAa,CAAC;;QAE5B;QACAgB,WAAW,CAACK,cAAc,CAACC,OAAO,CAACZ,QAAQ,IAAI;UAC3C,IAAMC,KAAa,GAAGD,QAAQ,CAACE,QAAQ,CAACtB,KAAI,CAACxB,WAAW,CAAQ;UAChEyB,GAAG,CAACC,OAAO,CAAC+B,IAAI,CAACb,QAAQ,CAACE,QAAQ,CAAC;;UAEnC;UACAM,EAAE,GAAGA,EAAE,CAAC3C,GAAG,CAAC,CAACe,KAAI,CAAChC,QAAQ,EAAEuD,uCAAyB,EAAEF,KAAK,CAAC,EAAED,QAAQ,CAACE,QAAQ,CAAC;;UAEjF;UACAY,MAAM,CAACC,MAAM,CAACnC,KAAI,CAACnC,SAAS,CAACuE,OAAO,CAAC,CAACJ,OAAO,CAACK,SAAS,IAAI;YACvD,IAAMC,WAAW,GAAGD,SAAS,CAACE,kBAAkB,CAACnB,QAAQ,CAACE,QAAe,CAAC;YAC1EM,EAAE,GAAGA,EAAE,CAAC3C,GAAG,CAAC,CAACe,KAAI,CAAChC,QAAQ,EAAEqE,SAAS,CAACG,OAAO,EAAEF,WAAW,CAAC,EAAEjB,KAAK,CAAC;UACvE,CAAC,CAAC;QACN,CAAC,CAAC;QACF;QACAK,WAAW,CAACe,cAAc,CAACT,OAAO,CAAEZ,QAAiC,IAAK;UACtE,IAAMC,KAAa,GAAGD,QAAQ,CAACE,QAAQ,CAACtB,KAAI,CAACxB,WAAW,CAAQ;;UAEhE;UACAoD,EAAE,GAAGA,EAAE,CAAC3C,GAAG,CAAC,CAACe,KAAI,CAAChC,QAAQ,EAAEuD,uCAAyB,EAAEF,KAAK,CAAC,EAAED,QAAQ,CAACE,QAAQ,CAAC;;UAEjF;UACAY,MAAM,CAACC,MAAM,CAACnC,KAAI,CAACnC,SAAS,CAACuE,OAAO,CAAC,CAACJ,OAAO,CAACK,SAAS,IAAI;YACvD,IAAMK,cAAc,GAAGL,SAAS,CAACE,kBAAkB,CAAC,IAAAjC,0BAAc,EAACc,QAAQ,CAACuB,QAAQ,CAAC,CAAC;YACtF,IAAMC,cAAc,GAAGP,SAAS,CAACE,kBAAkB,CAACnB,QAAQ,CAACE,QAAe,CAAC;YAC7E,IAAIoB,cAAc,KAAKE,cAAc,EAAE;cACnChB,EAAE,GAAGA,EAAE,CAACiB,MAAM,CAAC,CAAC7C,KAAI,CAAChC,QAAQ,EAAEqE,SAAS,CAACG,OAAO,EAAEE,cAAc,CAAC,CAAC;cAClEd,EAAE,GAAGA,EAAE,CAAC3C,GAAG,CAAC,CAACe,KAAI,CAAChC,QAAQ,EAAEqE,SAAS,CAACG,OAAO,EAAEI,cAAc,CAAC,EAAEvB,KAAK,CAAC;YAC1E;UACJ,CAAC,CAAC;UACFpB,GAAG,CAACC,OAAO,CAAC+B,IAAI,CAACb,QAAQ,CAACE,QAAe,CAAC;QAC9C,CAAC,CAAC;QAEF,IAAMwB,QAAQ,GAAG,MAAMlB,EAAE,CAACmB,MAAM,CAAC,CAAC;QAClC,IAAID,QAAQ,CAACE,EAAE,EAAE;UACb,IAAAC,yBAAa,EAAChD,GAAG,CAACE,KAAK,EAAEuB,WAAW,CAACwB,MAAM,CAAC;UAC5C,IAAIxB,WAAW,CAACyB,SAAS,CAACC,MAAM,CAACC,MAAM,GAAG,CAAC,EAAE;YACzC,IAAMC,SAAS,GAAG,IAAAhD,0BAAc,EAACoB,WAAW,CAAC6B,SAAS,CAAC,CAACjC,QAAQ;YAChEI,WAAW,CAACyB,SAAS,CAACK,UAAU,GAAG;cAC/BC,EAAE,EAAEH,SAAS,CAAC9E,WAAW,CAAC;cAC1BkF,GAAG,EAAEJ,SAAS,CAACK,KAAK,CAACD;YACzB,CAAC;YACDhC,WAAW,CAACyB,SAAS,CAACS,OAAO,GAAG,IAAAC,cAAG,EAAC,CAAC;YACrC7D,KAAI,CAAC1B,QAAQ,CAACwF,IAAI,CAACpC,WAAW,CAACyB,SAAS,CAAC;UAC7C;UAAC;QAEL;MACJ,CAAC;MAtFD,OAAO,IAAI;QAAA,UAAA1C,KAAA,IAoFH;MAAM;IAGlB;IACA,OAAOR,GAAG;EACd,CAAC;EAAAf,MAAA,CACK6E,iBAAiB,GAAvB,eAAAA,kBAAwBC,GAAa,EAAEC,WAAoB,EAAwC;IAC/F,IAAMjF,EAAE,GAAG,MAAM,IAAI,CAACL,SAAS;IAC/B,IAAMsB,GAAgC,GAAG,EAAE;IAC3C,MAAMa,OAAO,CAACC,GAAG,CACbiD,GAAG,CAAChD,GAAG,CAAC,MAAOK,KAAK,IAAK;MACrB,IAAM6C,KAAK,GAAG,CAAC,IAAI,CAAClG,QAAQ,EAAEuD,uCAAyB,EAAEF,KAAK,CAAC;MAC/D,IAAM8C,gBAAgB,GAAG,MAAMnF,EAAE,CAACO,GAAG,CAAC2E,KAAK,EAAE,IAAI,CAAC/F,SAAS,CAAC;MAC5D,IAAMiG,OAAO,GAAGD,gBAAgB,CAAC1E,KAAK;MACtC,IACI2E,OAAO,KAEH,CAACA,OAAO,CAACC,QAAQ,IACjBJ,WAAW,CACd,EACH;QACEhE,GAAG,CAACgC,IAAI,CAACmC,OAAO,CAAC;MACrB;IACJ,CAAC,CACL,CAAC;IACD,OAAOnE,GAAG;EACd,CAAC;EAAAf,MAAA,CACDoF,KAAK,GAAL,SAAAA,MAAMC,aAAuC,EAA4C;IACrF,OAAO,IAAI,CAACnF,0BAA0B,CAClC,MAAM,IAAAoF,wBAAW,EAAC,IAAI,EAAED,aAAa,CACzC,CAAC;EACL,CAAC;EAAArF,MAAA,CACKuF,KAAK,GAAX,eAAAA,MAAYF,aAAuC,EAAiC;IAChF;AACR;AACA;AACA;AACA;IACQ,IAAM7E,MAAM,GAAG,MAAM,IAAI,CAACN,0BAA0B,CAChD,MAAM,IAAI,CAACkF,KAAK,CAACC,aAAa,CAClC,CAAC;IACD,OAAO;MACHE,KAAK,EAAE/E,MAAM,CAACgF,SAAS,CAACrB,MAAM;MAC9BsB,IAAI,EAAE;IACV,CAAC;EACL,CAAC;EAAAzF,MAAA,CACD0F,iBAAiB,GAAjB,SAAAA,kBAAkBC,UAAkB,EAAEC,YAAoB,EAAEC,MAAc,EAAmB;IACzF,MAAM,IAAIC,KAAK,CAAC,yBAAyB,CAAC;EAC9C,CAAC;EAAA9F,MAAA,CACD+F,YAAY,GAAZ,SAAAA,aAAA,EAAe;IACX,OAAO,IAAI,CAAC3G,QAAQ,CAAC4G,YAAY,CAAC,CAAC;EACvC,CAAC;EAAAhG,MAAA,CACKiG,OAAO,GAAb,eAAAA,QAAcC,kBAA0B,EAAoB;IAAA,IAAAC,MAAA;IACxD,IAAMC,eAAe,GAAG,IAAAzB,cAAG,EAAC,CAAC,GAAGuB,kBAAkB;IAClD,IAAMpG,EAAE,GAAG,MAAM,IAAI,CAACL,SAAS;IAC/B,IAAM4G,KAAK,GAAGC,2BAAa;IAC3B,IAAMC,SAAS,GAAG,IAAAC,gCAAkB,EAACH,KAAK,CAAC;IAC3C,IAAMlD,SAAS,GAAG,IAAI,CAACxE,SAAS,CAACuE,OAAO,CAACqD,SAAS,CAAC;IACnD,IAAME,gBAAgB,GAAG,IAAAC,8CAAiC,EACtD,IAAI,CAAChI,MAAM,EACX2H,KAAK,EACL,CACI,IAAI;IACJ;AAChB;AACA;AACA;IACgB,CAAC,CAET,CAAC;IACD,IAAMM,gBAAgB,GAAG,IAAAD,8CAAiC,EACtD,IAAI,CAAChI,MAAM,EACX2H,KAAK,EACL,CACI,IAAI,EACJD,eAAe,CAEvB,CAAC;IACD,IAAIQ,eAAwB,GAAG,IAAI;IAEnC,IAAMC,KAAK,GAAG/G,EAAE,CAACgH,IAAI,CAAC;MAClBC,KAAK,EAAE,CAAC,IAAI,CAACjI,QAAQ,EAAEqE,SAAS,CAACG,OAAO,EAAEmD,gBAAgB,CAAC;MAC3DO,GAAG,EAAE,CAAC,IAAI,CAAClI,QAAQ,EAAEqE,SAAS,CAACG,OAAO,EAAEqD,gBAAgB;IAC5D,CAAC,EAAE;MACCzH,WAAW,EAAE,IAAI,CAACL,QAAQ,CAACM,gBAAgB;MAC3CkC,SAAS,EAAE,IAAI,CAACxC,QAAQ,CAACwC,SAAS;MAClC4F,KAAK,EAAE,IAAI,CAACpI,QAAQ,CAACwC;IACzB,CAAC,CAAC;IAEF,IAAI6F,UAAU,GAAG,CAAC;IAAC,IAAAC,MAAA,kBAAAA,CAAA,EACY;QAC3BD,UAAU,GAAGA,UAAU,GAAG,CAAC;QAC3B,IAAM/E,KAAK,GAAGG,GAAG,CAAC/B,KAAK;QACvB,IAAM6G,aAAa,GAAG,MAAMtH,EAAE,CAACO,GAAG,CAAC,CAAC8F,MAAI,CAACrH,QAAQ,EAAEuD,uCAAyB,EAAEF,KAAK,CAAC,EAAEgE,MAAI,CAAClH,SAAS,CAAC;QACrG,IAAI,CAACmI,aAAa,CAAC7G,KAAK,EAAE;UAAA;QAE1B;QACA,IAAMgC,OAAO,GAAG,IAAAnB,0BAAc,EAACgG,aAAa,CAAC7G,KAAK,CAAC;QACnD,IACI,CAACgC,OAAO,CAAC4C,QAAQ,IACjB5C,OAAO,CAACkC,KAAK,CAACD,GAAG,GAAG4B,eAAe,EACrC;UAAA;QAEF;QAGA,IAAI1D,EAAE,GAAG5C,EAAE,CAAC6C,MAAM,CAAC,CAAC;QACpBD,EAAE,GAAGA,EAAE,CAACE,KAAK,CAACwE,aAAa,CAAC;QAC5B1E,EAAE,GAAGA,EAAE,CAACiB,MAAM,CAAC,CAACwC,MAAI,CAACrH,QAAQ,EAAEuD,uCAAyB,EAAEF,KAAK,CAAC,CAAC;QACjEa,MAAM,CACDC,MAAM,CAACkD,MAAI,CAACxH,SAAS,CAACuE,OAAO,CAAC,CAC9BJ,OAAO,CAACuE,cAAc,IAAI;UACvB3E,EAAE,GAAGA,EAAE,CAACiB,MAAM,CAAC,CAACwC,MAAI,CAACrH,QAAQ,EAAEuI,cAAc,CAAC/D,OAAO,EAAEnB,KAAK,CAAC,CAAC;QAClE,CAAC,CAAC;QACN,MAAMO,EAAE,CAACmB,MAAM,CAAC,CAAC;MACrB,CAAC;MAAAyD,IAAA;IAzBD,WAAW,IAAMhF,GAAG,IAAIuE,KAAK;MAAAS,IAAA,SAAAH,MAAA;MAAA,IAAAG,IAAA,QAKrB;IAAS;IAqBjB,OAAOV,eAAe;EAC1B,CAAC;EAAA5G,MAAA,CACKuH,KAAK,GAAX,eAAAA,MAAA,EAA6B;IACzB,IAAI,IAAI,CAACC,MAAM,EAAE;MACb,OAAO,IAAI,CAACA,MAAM;IACtB;IACA,IAAI,CAACA,MAAM,GAAG,CAAC,YAAY;MACvB,IAAI,CAACpI,QAAQ,CAACqI,QAAQ,CAAC,CAAC;MACxB,IAAM3H,EAAE,GAAG,MAAM,IAAI,CAACL,SAAS;MAC/B,MAAMK,EAAE,CAACyH,KAAK,CAAC,CAAC;IACpB,CAAC,EAAE,CAAC;IACJ,OAAO,IAAI,CAACC,MAAM;EACtB,CAAC;EAAAxH,MAAA,CACK0H,MAAM,GAAZ,eAAAA,OAAA,EAA8B;IAC1BC,eAAe,CAAC,IAAI,CAAC;IACrB,IAAM7H,EAAE,GAAG,MAAM,IAAI,CAACL,SAAS;IAC/B,IAAMoH,KAAK,GAAG/G,EAAE,CAACgH,IAAI,CAAC;MAClBC,KAAK,EAAE,CAAC,IAAI,CAACjI,QAAQ,CAAC;MACtBkI,GAAG,EAAE,CAAC,IAAI,CAAClI,QAAQ,EAAE8I,uBAAS;IAClC,CAAC,EAAE;MACC1I,WAAW,EAAE,IAAI,CAACL,QAAQ,CAACM,gBAAgB;MAC3CkC,SAAS,EAAE,IAAI,CAACxC,QAAQ,CAACwC;IAC7B,CAAC,CAAC;IACF,IAAIwG,QAAwB,GAAG,EAAE;IACjC,WAAW,IAAMvF,GAAG,IAAIuE,KAAK,EAAE;MAC3BgB,QAAQ,CAAC9E,IAAI,CAACjD,EAAE,CAAC6D,MAAM,CAACrB,GAAG,CAACwF,GAAG,CAAC,CAAC;IACrC;IAEA,MAAMlG,OAAO,CAACC,GAAG,CAACgG,QAAQ,CAAC;IAC3B,OAAO,IAAI,CAACN,KAAK,CAAC,CAAC;EACvB,CAAC;EAAAvH,MAAA,CACD+H,sBAAsB,GAAtB,SAAAA,uBAAA,EAAyE;IACrE,OAAO,IAAI1I,aAAO,CAAM,CAAC,CAAC2G,YAAY,CAAC,CAAC;EAC5C,CAAC;EAAAhG,MAAA,CACDgI,4BAA4B,GAA5B,SAAAA,6BAA6BC,aAAyD,EAAiB;IACnG,OAAOC,kCAAoB;EAC/B,CAAC;EAAA,OAAA7J,uBAAA;AAAA;AAKE,eAAe8J,2BAA2BA,CAC7C5J,OAAwB,EACxB6J,MAAkE,EAClEvJ,QAAwB,EACmB;EAC3CA,QAAQ,GAAG,IAAAwJ,sBAAS,EAACxJ,QAAQ,CAAC;EAC9B,IAAI,CAACA,QAAQ,CAACwC,SAAS,EAAE;IACrBxC,QAAQ,CAACwC,SAAS,GAAG,GAAG;EAC5B;EAEA,IAAM/B,WAAW,GAAG,IAAAC,2CAA2B,EAAC6I,MAAM,CAAC1J,MAAM,CAACc,UAAU,CAAC;EAEzE,IAAM8I,QAA8D,GAAG,CAAC,CAAC;EACzE,IAAMC,UAAU,GAAGH,MAAM,CAAC1J,MAAM,CAACwE,OAAO,GAAGkF,MAAM,CAAC1J,MAAM,CAACwE,OAAO,CAACsF,KAAK,CAAC,CAAC,CAAC,GAAG,EAAE;EAC9ED,UAAU,CAACxF,IAAI,CAAC,CAACzD,WAAW,CAAC,CAAC;EAC9B,IAAMmJ,eAAe,GAAGF,UAAU,CAACzG,GAAG,CAACuE,KAAK,IAAI;IAC5C,IAAMqC,OAAO,GAAG,IAAAC,mBAAO,EAACtC,KAAK,CAAC;IAC9B,OAAOqC,OAAO;EAClB,CAAC,CAAC;EACFD,eAAe,CAAC1F,IAAI,CAACuD,2BAAa,CAAC;EACnCmC,eAAe,CAAC3F,OAAO,CAAC,CAAC4F,OAAO,EAAEpF,OAAO,KAAK;IAC1C,IAAMiD,SAAS,GAAG,IAAAC,gCAAkB,EAACkC,OAAO,CAAC;IAC7CJ,QAAQ,CAAC/B,SAAS,CAAC,GAAG;MAClBjD,OAAO,EAAE,GAAG,GAAGA,OAAO,GAAG,GAAG;MAC5BiD,SAAS;MACTlD,kBAAkB,EAAE,IAAAuF,oCAAuB,EAACR,MAAM,CAAC1J,MAAM,EAAEgK,OAAO,CAAC;MACnErC,KAAK,EAAEqC;IACX,CAAC;EACL,CAAC,CAAC;EAEF,IAAM/J,SAAS,GAAG;IACduE,OAAO,EAAEoF;EACb,CAAC;EACD,IAAMO,QAAQ,GAAG,IAAIxK,uBAAuB,CACxCE,OAAO,EACP6J,MAAM,CAAC5J,YAAY,EACnB4J,MAAM,CAAC3J,cAAc,EACrB2J,MAAM,CAAC1J,MAAM,EACbC,SAAS,EACTyJ,MAAM,CAACxJ,OAAO,EACdC,QACJ,CAAC;EAED,MAAM,IAAAiK,wDAAgC,EAClCC,oCAAsB,EACtBX,MAAM,EACNS,QACJ,CAAC;EAED,OAAOjH,OAAO,CAACoH,OAAO,CAACH,QAAQ,CAAC;AACpC;AAIA,SAASlB,eAAeA,CACpBkB,QAAsC,EACxC;EACE,IAAIA,QAAQ,CAACrB,MAAM,EAAE;IACjB,MAAM,IAAI1B,KAAK,CAAC,oCAAoC,GAAG+C,QAAQ,CAACrK,YAAY,GAAG,GAAG,GAAGqK,QAAQ,CAACpK,cAAc,CAAC;EACjH;AACJ","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/storage-dexie/dexie-helper.js b/dist/cjs/plugins/storage-dexie/dexie-helper.js deleted file mode 100644 index 63b00810fd1..00000000000 --- a/dist/cjs/plugins/storage-dexie/dexie-helper.js +++ /dev/null @@ -1,254 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.RX_STORAGE_NAME_DEXIE = exports.DEXIE_PIPE_SUBSTITUTE = exports.DEXIE_DOCS_TABLE_NAME = exports.DEXIE_CHANGES_TABLE_NAME = exports.DEXIE_ATTACHMENTS_TABLE_NAME = void 0; -exports.attachmentObjectId = attachmentObjectId; -exports.closeDexieDb = closeDexieDb; -exports.dexieReplaceIfStartsWithPipe = dexieReplaceIfStartsWithPipe; -exports.dexieReplaceIfStartsWithPipeRevert = dexieReplaceIfStartsWithPipeRevert; -exports.fromDexieToStorage = fromDexieToStorage; -exports.fromDexieToStorageField = fromDexieToStorageField; -exports.fromStorageToDexie = fromStorageToDexie; -exports.fromStorageToDexieField = fromStorageToDexieField; -exports.getBooleanIndexes = getBooleanIndexes; -exports.getDexieDbWithTables = getDexieDbWithTables; -exports.getDexieStoreSchema = getDexieStoreSchema; -exports.getDocsInDb = getDocsInDb; -var _dexie = require("dexie"); -var _index = require("../utils/index.js"); -var _rxSchemaHelper = require("../../rx-schema-helper.js"); -var DEXIE_DOCS_TABLE_NAME = exports.DEXIE_DOCS_TABLE_NAME = 'docs'; -var DEXIE_CHANGES_TABLE_NAME = exports.DEXIE_CHANGES_TABLE_NAME = 'changes'; -var DEXIE_ATTACHMENTS_TABLE_NAME = exports.DEXIE_ATTACHMENTS_TABLE_NAME = 'attachments'; -var RX_STORAGE_NAME_DEXIE = exports.RX_STORAGE_NAME_DEXIE = 'dexie'; -var DEXIE_STATE_DB_BY_NAME = new Map(); -var REF_COUNT_PER_DEXIE_DB = new Map(); -function getDexieDbWithTables(databaseName, collectionName, settings, schema) { - var dexieDbName = 'rxdb-dexie-' + databaseName + '--' + schema.version + '--' + collectionName; - var state = (0, _index.getFromMapOrCreate)(DEXIE_STATE_DB_BY_NAME, dexieDbName, () => { - var value = (async () => { - /** - * IndexedDB was not designed for dynamically adding tables on the fly, - * so we create one dexie database per RxDB storage instance. - * @link https://github.com/dexie/Dexie.js/issues/684#issuecomment-373224696 - */ - var useSettings = (0, _index.flatClone)(settings); - useSettings.autoOpen = false; - var dexieDb = new _dexie.Dexie(dexieDbName, useSettings); - var dexieStoresSettings = { - [DEXIE_DOCS_TABLE_NAME]: getDexieStoreSchema(schema), - [DEXIE_CHANGES_TABLE_NAME]: '++sequence, id', - [DEXIE_ATTACHMENTS_TABLE_NAME]: 'id' - }; - dexieDb.version(1).stores(dexieStoresSettings); - await dexieDb.open(); - return { - dexieDb, - dexieTable: dexieDb[DEXIE_DOCS_TABLE_NAME], - dexieAttachmentsTable: dexieDb[DEXIE_ATTACHMENTS_TABLE_NAME], - booleanIndexes: getBooleanIndexes(schema) - }; - })(); - DEXIE_STATE_DB_BY_NAME.set(dexieDbName, state); - REF_COUNT_PER_DEXIE_DB.set(state, 0); - return value; - }); - return state; -} -async function closeDexieDb(statePromise) { - var state = await statePromise; - var prevCount = REF_COUNT_PER_DEXIE_DB.get(statePromise); - var newCount = prevCount - 1; - if (newCount === 0) { - state.dexieDb.close(); - REF_COUNT_PER_DEXIE_DB.delete(statePromise); - } else { - REF_COUNT_PER_DEXIE_DB.set(statePromise, newCount); - } -} - -/** - * It is not possible to set non-javascript-variable-syntax - * keys as IndexedDB indexes. So we have to substitute the pipe-char - * which comes from the key-compression plugin. - */ -var DEXIE_PIPE_SUBSTITUTE = exports.DEXIE_PIPE_SUBSTITUTE = '__'; -function dexieReplaceIfStartsWithPipe(str) { - var split = str.split('.'); - if (split.length > 1) { - return split.map(part => dexieReplaceIfStartsWithPipe(part)).join('.'); - } - if (str.startsWith('|')) { - var withoutFirst = str.substring(1); - return DEXIE_PIPE_SUBSTITUTE + withoutFirst; - } else { - return str; - } -} -function dexieReplaceIfStartsWithPipeRevert(str) { - var split = str.split('.'); - if (split.length > 1) { - return split.map(part => dexieReplaceIfStartsWithPipeRevert(part)).join('.'); - } - if (str.startsWith(DEXIE_PIPE_SUBSTITUTE)) { - var withoutFirst = str.substring(DEXIE_PIPE_SUBSTITUTE.length); - return '|' + withoutFirst; - } else { - return str; - } -} - -/** - * IndexedDB does not support boolean indexing. - * So we have to replace true/false with '1'/'0' - * @param d - */ -function fromStorageToDexie(booleanIndexes, d) { - if (!d) { - return d; - } - d = (0, _index.flatClone)(d); - d = fromStorageToDexieField(d); - booleanIndexes.forEach(idx => { - var val = (0, _index.getProperty)(d, idx); - var newVal = val ? '1' : '0'; - (0, _index.setProperty)(d, idx, newVal); - }); - return d; -} -function fromDexieToStorage(booleanIndexes, d) { - if (!d) { - return d; - } - d = (0, _index.flatClone)(d); - d = fromDexieToStorageField(d); - booleanIndexes.forEach(idx => { - var val = (0, _index.getProperty)(d, idx); - var newVal = val === '1' ? true : false; - (0, _index.setProperty)(d, idx, newVal); - }); - return d; -} - -/** - * @recursive - */ -function fromStorageToDexieField(documentData) { - if (!documentData || typeof documentData === 'string' || typeof documentData === 'number' || typeof documentData === 'boolean') { - return documentData; - } else if (Array.isArray(documentData)) { - return documentData.map(row => fromStorageToDexieField(row)); - } else if (typeof documentData === 'object') { - var ret = {}; - Object.entries(documentData).forEach(([key, value]) => { - if (typeof value === 'object') { - value = fromStorageToDexieField(value); - } - ret[dexieReplaceIfStartsWithPipe(key)] = value; - }); - return ret; - } -} -function fromDexieToStorageField(documentData) { - if (!documentData || typeof documentData === 'string' || typeof documentData === 'number' || typeof documentData === 'boolean') { - return documentData; - } else if (Array.isArray(documentData)) { - return documentData.map(row => fromDexieToStorageField(row)); - } else if (typeof documentData === 'object') { - var ret = {}; - Object.entries(documentData).forEach(([key, value]) => { - if (typeof value === 'object' || Array.isArray(documentData)) { - value = fromDexieToStorageField(value); - } - ret[dexieReplaceIfStartsWithPipeRevert(key)] = value; - }); - return ret; - } -} - -/** - * Creates a string that can be used to create the dexie store. - * @link https://dexie.org/docs/API-Reference#quick-reference - */ -function getDexieStoreSchema(rxJsonSchema) { - var parts = []; - - /** - * First part must be the primary key - * @link https://github.com/dexie/Dexie.js/issues/1307#issuecomment-846590912 - */ - var primaryKey = (0, _rxSchemaHelper.getPrimaryFieldOfPrimaryKey)(rxJsonSchema.primaryKey); - parts.push([primaryKey]); - parts.push(['_deleted', primaryKey]); - - // add other indexes - if (rxJsonSchema.indexes) { - rxJsonSchema.indexes.forEach(index => { - var arIndex = (0, _index.toArray)(index); - parts.push(arIndex); - }); - } - - // we also need the _meta.lwt+primaryKey index for the getChangedDocumentsSince() method. - parts.push(['_meta.lwt', primaryKey]); - - // and this one for the cleanup() - parts.push(['_meta.lwt']); - - /** - * It is not possible to set non-javascript-variable-syntax - * keys as IndexedDB indexes. So we have to substitute the pipe-char - * which comes from the key-compression plugin. - */ - parts = parts.map(part => { - return part.map(str => dexieReplaceIfStartsWithPipe(str)); - }); - var dexieSchemaRows = parts.map(part => { - if (part.length === 1) { - return part[0]; - } else { - return '[' + part.join('+') + ']'; - } - }); - dexieSchemaRows = dexieSchemaRows.filter((elem, pos, arr) => arr.indexOf(elem) === pos); // unique; - var dexieSchema = dexieSchemaRows.join(', '); - return dexieSchema; -} - -/** - * Returns all documents in the database. - * Non-deleted plus deleted ones. - */ -async function getDocsInDb(internals, docIds) { - var state = await internals; - var docsInDb = await state.dexieTable.bulkGet(docIds); - return docsInDb.map(d => fromDexieToStorage(state.booleanIndexes, d)); -} -function attachmentObjectId(documentId, attachmentId) { - return documentId + '||' + attachmentId; -} -function getBooleanIndexes(schema) { - var checkedFields = new Set(); - var ret = []; - if (!schema.indexes) { - return ret; - } - schema.indexes.forEach(index => { - var fields = (0, _index.toArray)(index); - fields.forEach(field => { - if (checkedFields.has(field)) { - return; - } - checkedFields.add(field); - var schemaObj = (0, _rxSchemaHelper.getSchemaByObjectPath)(schema, field); - if (schemaObj.type === 'boolean') { - ret.push(field); - } - }); - }); - ret.push('_deleted'); - return (0, _index.uniqueArray)(ret); -} -//# sourceMappingURL=dexie-helper.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/storage-dexie/dexie-helper.js.map b/dist/cjs/plugins/storage-dexie/dexie-helper.js.map deleted file mode 100644 index c4b71d86b71..00000000000 --- a/dist/cjs/plugins/storage-dexie/dexie-helper.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"dexie-helper.js","names":["_dexie","require","_index","_rxSchemaHelper","DEXIE_DOCS_TABLE_NAME","exports","DEXIE_CHANGES_TABLE_NAME","DEXIE_ATTACHMENTS_TABLE_NAME","RX_STORAGE_NAME_DEXIE","DEXIE_STATE_DB_BY_NAME","Map","REF_COUNT_PER_DEXIE_DB","getDexieDbWithTables","databaseName","collectionName","settings","schema","dexieDbName","version","state","getFromMapOrCreate","value","useSettings","flatClone","autoOpen","dexieDb","Dexie","dexieStoresSettings","getDexieStoreSchema","stores","open","dexieTable","dexieAttachmentsTable","booleanIndexes","getBooleanIndexes","set","closeDexieDb","statePromise","prevCount","get","newCount","close","delete","DEXIE_PIPE_SUBSTITUTE","dexieReplaceIfStartsWithPipe","str","split","length","map","part","join","startsWith","withoutFirst","substring","dexieReplaceIfStartsWithPipeRevert","fromStorageToDexie","d","fromStorageToDexieField","forEach","idx","val","getProperty","newVal","setProperty","fromDexieToStorage","fromDexieToStorageField","documentData","Array","isArray","row","ret","Object","entries","key","rxJsonSchema","parts","primaryKey","getPrimaryFieldOfPrimaryKey","push","indexes","index","arIndex","toArray","dexieSchemaRows","filter","elem","pos","arr","indexOf","dexieSchema","getDocsInDb","internals","docIds","docsInDb","bulkGet","attachmentObjectId","documentId","attachmentId","checkedFields","Set","fields","field","has","add","schemaObj","getSchemaByObjectPath","type","uniqueArray"],"sources":["../../../../src/plugins/storage-dexie/dexie-helper.ts"],"sourcesContent":["import type {\n DexieStorageInternals,\n RxDocumentData,\n RxJsonSchema\n} from '../../types/index.d.ts';\nimport { Dexie } from 'dexie';\nimport type { DexieSettings } from '../../types/index.d.ts';\nimport { flatClone, getFromMapOrCreate, getProperty, setProperty, toArray, uniqueArray } from '../utils/index.ts';\nimport {\n getPrimaryFieldOfPrimaryKey,\n getSchemaByObjectPath\n} from '../../rx-schema-helper.ts';\n\nexport const DEXIE_DOCS_TABLE_NAME = 'docs';\nexport const DEXIE_CHANGES_TABLE_NAME = 'changes';\nexport const DEXIE_ATTACHMENTS_TABLE_NAME = 'attachments';\n\nexport const RX_STORAGE_NAME_DEXIE = 'dexie';\n\nconst DEXIE_STATE_DB_BY_NAME: Map = new Map();\nconst REF_COUNT_PER_DEXIE_DB: Map = new Map();\nexport function getDexieDbWithTables(\n databaseName: string,\n collectionName: string,\n settings: DexieSettings,\n schema: RxJsonSchema\n): DexieStorageInternals {\n const dexieDbName = 'rxdb-dexie-' + databaseName + '--' + schema.version + '--' + collectionName;\n\n const state = getFromMapOrCreate(\n DEXIE_STATE_DB_BY_NAME,\n dexieDbName,\n () => {\n const value = (async () => {\n /**\n * IndexedDB was not designed for dynamically adding tables on the fly,\n * so we create one dexie database per RxDB storage instance.\n * @link https://github.com/dexie/Dexie.js/issues/684#issuecomment-373224696\n */\n const useSettings = flatClone(settings);\n useSettings.autoOpen = false;\n const dexieDb = new Dexie(dexieDbName, useSettings);\n const dexieStoresSettings = {\n [DEXIE_DOCS_TABLE_NAME]: getDexieStoreSchema(schema),\n [DEXIE_CHANGES_TABLE_NAME]: '++sequence, id',\n [DEXIE_ATTACHMENTS_TABLE_NAME]: 'id'\n };\n\n dexieDb.version(1).stores(dexieStoresSettings);\n await dexieDb.open();\n\n return {\n dexieDb,\n dexieTable: (dexieDb as any)[DEXIE_DOCS_TABLE_NAME],\n dexieAttachmentsTable: (dexieDb as any)[DEXIE_ATTACHMENTS_TABLE_NAME],\n booleanIndexes: getBooleanIndexes(schema)\n };\n })();\n DEXIE_STATE_DB_BY_NAME.set(dexieDbName, state);\n REF_COUNT_PER_DEXIE_DB.set(state, 0);\n return value;\n }\n );\n return state;\n}\n\nexport async function closeDexieDb(statePromise: DexieStorageInternals) {\n const state = await statePromise;\n const prevCount = REF_COUNT_PER_DEXIE_DB.get(statePromise);\n const newCount = (prevCount as any) - 1;\n if (newCount === 0) {\n state.dexieDb.close();\n REF_COUNT_PER_DEXIE_DB.delete(statePromise);\n } else {\n REF_COUNT_PER_DEXIE_DB.set(statePromise, newCount);\n }\n}\n\n\n\n/**\n * It is not possible to set non-javascript-variable-syntax\n * keys as IndexedDB indexes. So we have to substitute the pipe-char\n * which comes from the key-compression plugin.\n */\nexport const DEXIE_PIPE_SUBSTITUTE = '__';\nexport function dexieReplaceIfStartsWithPipe(str: string): string {\n const split = str.split('.');\n if (split.length > 1) {\n return split.map(part => dexieReplaceIfStartsWithPipe(part)).join('.');\n }\n\n if (str.startsWith('|')) {\n const withoutFirst = str.substring(1);\n return DEXIE_PIPE_SUBSTITUTE + withoutFirst;\n } else {\n return str;\n }\n}\n\nexport function dexieReplaceIfStartsWithPipeRevert(str: string): string {\n const split = str.split('.');\n if (split.length > 1) {\n return split.map(part => dexieReplaceIfStartsWithPipeRevert(part)).join('.');\n }\n\n if (str.startsWith(DEXIE_PIPE_SUBSTITUTE)) {\n const withoutFirst = str.substring(DEXIE_PIPE_SUBSTITUTE.length);\n return '|' + withoutFirst;\n } else {\n return str;\n }\n}\n\n\n/**\n * IndexedDB does not support boolean indexing.\n * So we have to replace true/false with '1'/'0'\n * @param d \n */\nexport function fromStorageToDexie(\n booleanIndexes: string[],\n d: RxDocumentData\n): any {\n if (!d) {\n return d;\n }\n d = flatClone(d);\n d = fromStorageToDexieField(d);\n\n booleanIndexes.forEach(idx => {\n const val = getProperty(d, idx);\n const newVal = val ? '1' : '0';\n setProperty(d, idx, newVal);\n });\n\n return d;\n}\nexport function fromDexieToStorage(\n booleanIndexes: string[],\n d: any\n): RxDocumentData {\n if (!d) {\n return d;\n }\n\n d = flatClone(d);\n d = fromDexieToStorageField(d);\n\n booleanIndexes.forEach(idx => {\n const val = getProperty(d, idx);\n const newVal = val === '1' ? true : false;\n setProperty(d, idx, newVal);\n });\n\n return d;\n}\n\n/**\n * @recursive\n */\nexport function fromStorageToDexieField(documentData: RxDocumentData): any {\n if (\n !documentData ||\n typeof documentData === 'string' ||\n typeof documentData === 'number' ||\n typeof documentData === 'boolean'\n ) {\n return documentData;\n } else if (Array.isArray(documentData)) {\n return documentData.map(row => fromStorageToDexieField(row));\n } else if (typeof documentData === 'object') {\n const ret: any = {};\n Object.entries(documentData).forEach(([key, value]) => {\n if (typeof value === 'object') {\n value = fromStorageToDexieField(value);\n }\n ret[dexieReplaceIfStartsWithPipe(key)] = value;\n });\n return ret;\n }\n}\n\nexport function fromDexieToStorageField(documentData: any): RxDocumentData {\n if (!documentData || typeof documentData === 'string' || typeof documentData === 'number' || typeof documentData === 'boolean') {\n return documentData;\n } else if (Array.isArray(documentData)) {\n return documentData.map(row => fromDexieToStorageField(row));\n } else if (typeof documentData === 'object') {\n const ret: any = {};\n Object.entries(documentData).forEach(([key, value]) => {\n if (typeof value === 'object' || Array.isArray(documentData)) {\n value = fromDexieToStorageField(value);\n }\n ret[dexieReplaceIfStartsWithPipeRevert(key)] = value;\n });\n return ret;\n }\n}\n\n\n/**\n * Creates a string that can be used to create the dexie store.\n * @link https://dexie.org/docs/API-Reference#quick-reference\n */\nexport function getDexieStoreSchema(\n rxJsonSchema: RxJsonSchema\n): string {\n let parts: string[][] = [];\n\n /**\n * First part must be the primary key\n * @link https://github.com/dexie/Dexie.js/issues/1307#issuecomment-846590912\n */\n const primaryKey = getPrimaryFieldOfPrimaryKey(rxJsonSchema.primaryKey);\n parts.push([primaryKey]);\n parts.push(['_deleted', primaryKey]);\n\n // add other indexes\n if (rxJsonSchema.indexes) {\n rxJsonSchema.indexes.forEach(index => {\n const arIndex = toArray(index);\n parts.push(arIndex);\n });\n }\n\n // we also need the _meta.lwt+primaryKey index for the getChangedDocumentsSince() method.\n parts.push(['_meta.lwt', primaryKey]);\n\n // and this one for the cleanup()\n parts.push(['_meta.lwt']);\n\n /**\n * It is not possible to set non-javascript-variable-syntax\n * keys as IndexedDB indexes. So we have to substitute the pipe-char\n * which comes from the key-compression plugin.\n */\n parts = parts.map(part => {\n return part.map(str => dexieReplaceIfStartsWithPipe(str));\n });\n\n let dexieSchemaRows = parts.map(part => {\n if (part.length === 1) {\n return part[0];\n } else {\n return '[' + part.join('+') + ']';\n }\n });\n dexieSchemaRows = dexieSchemaRows.filter((elem: any, pos: any, arr: any) => arr.indexOf(elem) === pos); // unique;\n const dexieSchema = dexieSchemaRows.join(', ');\n\n return dexieSchema;\n}\n\n/**\n * Returns all documents in the database.\n * Non-deleted plus deleted ones.\n */\nexport async function getDocsInDb(\n internals: DexieStorageInternals,\n docIds: string[]\n): Promise[]> {\n const state = await internals;\n const docsInDb = await state.dexieTable.bulkGet(docIds);\n return docsInDb.map(d => fromDexieToStorage(state.booleanIndexes, d));\n}\n\n\nexport function attachmentObjectId(documentId: string, attachmentId: string): string {\n return documentId + '||' + attachmentId;\n}\n\n\nexport function getBooleanIndexes(schema: RxJsonSchema): string[] {\n const checkedFields = new Set();\n const ret: string[] = [];\n if (!schema.indexes) {\n return ret;\n }\n schema.indexes.forEach(index => {\n const fields = toArray(index);\n fields.forEach(field => {\n if (checkedFields.has(field)) {\n return;\n }\n checkedFields.add(field);\n const schemaObj = getSchemaByObjectPath(schema, field);\n if (schemaObj.type === 'boolean') {\n ret.push(field);\n }\n });\n });\n ret.push('_deleted');\n\n return uniqueArray(ret);\n}\n\n"],"mappings":";;;;;;;;;;;;;;;;;;AAKA,IAAAA,MAAA,GAAAC,OAAA;AAEA,IAAAC,MAAA,GAAAD,OAAA;AACA,IAAAE,eAAA,GAAAF,OAAA;AAKO,IAAMG,qBAAqB,GAAAC,OAAA,CAAAD,qBAAA,GAAG,MAAM;AACpC,IAAME,wBAAwB,GAAAD,OAAA,CAAAC,wBAAA,GAAG,SAAS;AAC1C,IAAMC,4BAA4B,GAAAF,OAAA,CAAAE,4BAAA,GAAG,aAAa;AAElD,IAAMC,qBAAqB,GAAAH,OAAA,CAAAG,qBAAA,GAAG,OAAO;AAE5C,IAAMC,sBAA0D,GAAG,IAAIC,GAAG,CAAC,CAAC;AAC5E,IAAMC,sBAA0D,GAAG,IAAID,GAAG,CAAC,CAAC;AACrE,SAASE,oBAAoBA,CAChCC,YAAoB,EACpBC,cAAsB,EACtBC,QAAuB,EACvBC,MAAyB,EACJ;EACrB,IAAMC,WAAW,GAAG,aAAa,GAAGJ,YAAY,GAAG,IAAI,GAAGG,MAAM,CAACE,OAAO,GAAG,IAAI,GAAGJ,cAAc;EAEhG,IAAMK,KAAK,GAAG,IAAAC,yBAAkB,EAC5BX,sBAAsB,EACtBQ,WAAW,EACX,MAAM;IACF,IAAMI,KAAK,GAAG,CAAC,YAAY;MACvB;AAChB;AACA;AACA;AACA;MACgB,IAAMC,WAAW,GAAG,IAAAC,gBAAS,EAACR,QAAQ,CAAC;MACvCO,WAAW,CAACE,QAAQ,GAAG,KAAK;MAC5B,IAAMC,OAAO,GAAG,IAAIC,YAAK,CAACT,WAAW,EAAEK,WAAW,CAAC;MACnD,IAAMK,mBAAmB,GAAG;QACxB,CAACvB,qBAAqB,GAAGwB,mBAAmB,CAACZ,MAAM,CAAC;QACpD,CAACV,wBAAwB,GAAG,gBAAgB;QAC5C,CAACC,4BAA4B,GAAG;MACpC,CAAC;MAEDkB,OAAO,CAACP,OAAO,CAAC,CAAC,CAAC,CAACW,MAAM,CAACF,mBAAmB,CAAC;MAC9C,MAAMF,OAAO,CAACK,IAAI,CAAC,CAAC;MAEpB,OAAO;QACHL,OAAO;QACPM,UAAU,EAAGN,OAAO,CAASrB,qBAAqB,CAAC;QACnD4B,qBAAqB,EAAGP,OAAO,CAASlB,4BAA4B,CAAC;QACrE0B,cAAc,EAAEC,iBAAiB,CAAClB,MAAM;MAC5C,CAAC;IACL,CAAC,EAAE,CAAC;IACJP,sBAAsB,CAAC0B,GAAG,CAAClB,WAAW,EAAEE,KAAK,CAAC;IAC9CR,sBAAsB,CAACwB,GAAG,CAAChB,KAAK,EAAE,CAAC,CAAC;IACpC,OAAOE,KAAK;EAChB,CACJ,CAAC;EACD,OAAOF,KAAK;AAChB;AAEO,eAAeiB,YAAYA,CAACC,YAAmC,EAAE;EACpE,IAAMlB,KAAK,GAAG,MAAMkB,YAAY;EAChC,IAAMC,SAAS,GAAG3B,sBAAsB,CAAC4B,GAAG,CAACF,YAAY,CAAC;EAC1D,IAAMG,QAAQ,GAAIF,SAAS,GAAW,CAAC;EACvC,IAAIE,QAAQ,KAAK,CAAC,EAAE;IAChBrB,KAAK,CAACM,OAAO,CAACgB,KAAK,CAAC,CAAC;IACrB9B,sBAAsB,CAAC+B,MAAM,CAACL,YAAY,CAAC;EAC/C,CAAC,MAAM;IACH1B,sBAAsB,CAACwB,GAAG,CAACE,YAAY,EAAEG,QAAQ,CAAC;EACtD;AACJ;;AAIA;AACA;AACA;AACA;AACA;AACO,IAAMG,qBAAqB,GAAAtC,OAAA,CAAAsC,qBAAA,GAAG,IAAI;AAClC,SAASC,4BAA4BA,CAACC,GAAW,EAAU;EAC9D,IAAMC,KAAK,GAAGD,GAAG,CAACC,KAAK,CAAC,GAAG,CAAC;EAC5B,IAAIA,KAAK,CAACC,MAAM,GAAG,CAAC,EAAE;IAClB,OAAOD,KAAK,CAACE,GAAG,CAACC,IAAI,IAAIL,4BAA4B,CAACK,IAAI,CAAC,CAAC,CAACC,IAAI,CAAC,GAAG,CAAC;EAC1E;EAEA,IAAIL,GAAG,CAACM,UAAU,CAAC,GAAG,CAAC,EAAE;IACrB,IAAMC,YAAY,GAAGP,GAAG,CAACQ,SAAS,CAAC,CAAC,CAAC;IACrC,OAAOV,qBAAqB,GAAGS,YAAY;EAC/C,CAAC,MAAM;IACH,OAAOP,GAAG;EACd;AACJ;AAEO,SAASS,kCAAkCA,CAACT,GAAW,EAAU;EACpE,IAAMC,KAAK,GAAGD,GAAG,CAACC,KAAK,CAAC,GAAG,CAAC;EAC5B,IAAIA,KAAK,CAACC,MAAM,GAAG,CAAC,EAAE;IAClB,OAAOD,KAAK,CAACE,GAAG,CAACC,IAAI,IAAIK,kCAAkC,CAACL,IAAI,CAAC,CAAC,CAACC,IAAI,CAAC,GAAG,CAAC;EAChF;EAEA,IAAIL,GAAG,CAACM,UAAU,CAACR,qBAAqB,CAAC,EAAE;IACvC,IAAMS,YAAY,GAAGP,GAAG,CAACQ,SAAS,CAACV,qBAAqB,CAACI,MAAM,CAAC;IAChE,OAAO,GAAG,GAAGK,YAAY;EAC7B,CAAC,MAAM;IACH,OAAOP,GAAG;EACd;AACJ;;AAGA;AACA;AACA;AACA;AACA;AACO,SAASU,kBAAkBA,CAC9BtB,cAAwB,EACxBuB,CAA4B,EACzB;EACH,IAAI,CAACA,CAAC,EAAE;IACJ,OAAOA,CAAC;EACZ;EACAA,CAAC,GAAG,IAAAjC,gBAAS,EAACiC,CAAC,CAAC;EAChBA,CAAC,GAAGC,uBAAuB,CAACD,CAAC,CAAC;EAE9BvB,cAAc,CAACyB,OAAO,CAACC,GAAG,IAAI;IAC1B,IAAMC,GAAG,GAAG,IAAAC,kBAAW,EAACL,CAAC,EAAEG,GAAG,CAAC;IAC/B,IAAMG,MAAM,GAAGF,GAAG,GAAG,GAAG,GAAG,GAAG;IAC9B,IAAAG,kBAAW,EAACP,CAAC,EAAEG,GAAG,EAAEG,MAAM,CAAC;EAC/B,CAAC,CAAC;EAEF,OAAON,CAAC;AACZ;AACO,SAASQ,kBAAkBA,CAC9B/B,cAAwB,EACxBuB,CAAM,EACmB;EACzB,IAAI,CAACA,CAAC,EAAE;IACJ,OAAOA,CAAC;EACZ;EAEAA,CAAC,GAAG,IAAAjC,gBAAS,EAACiC,CAAC,CAAC;EAChBA,CAAC,GAAGS,uBAAuB,CAACT,CAAC,CAAC;EAE9BvB,cAAc,CAACyB,OAAO,CAACC,GAAG,IAAI;IAC1B,IAAMC,GAAG,GAAG,IAAAC,kBAAW,EAACL,CAAC,EAAEG,GAAG,CAAC;IAC/B,IAAMG,MAAM,GAAGF,GAAG,KAAK,GAAG,GAAG,IAAI,GAAG,KAAK;IACzC,IAAAG,kBAAW,EAACP,CAAC,EAAEG,GAAG,EAAEG,MAAM,CAAC;EAC/B,CAAC,CAAC;EAEF,OAAON,CAAC;AACZ;;AAEA;AACA;AACA;AACO,SAASC,uBAAuBA,CAACS,YAAiC,EAAO;EAC5E,IACI,CAACA,YAAY,IACb,OAAOA,YAAY,KAAK,QAAQ,IAChC,OAAOA,YAAY,KAAK,QAAQ,IAChC,OAAOA,YAAY,KAAK,SAAS,EACnC;IACE,OAAOA,YAAY;EACvB,CAAC,MAAM,IAAIC,KAAK,CAACC,OAAO,CAACF,YAAY,CAAC,EAAE;IACpC,OAAOA,YAAY,CAAClB,GAAG,CAACqB,GAAG,IAAIZ,uBAAuB,CAACY,GAAG,CAAC,CAAC;EAChE,CAAC,MAAM,IAAI,OAAOH,YAAY,KAAK,QAAQ,EAAE;IACzC,IAAMI,GAAQ,GAAG,CAAC,CAAC;IACnBC,MAAM,CAACC,OAAO,CAACN,YAAY,CAAC,CAACR,OAAO,CAAC,CAAC,CAACe,GAAG,EAAEpD,KAAK,CAAC,KAAK;MACnD,IAAI,OAAOA,KAAK,KAAK,QAAQ,EAAE;QAC3BA,KAAK,GAAGoC,uBAAuB,CAACpC,KAAK,CAAC;MAC1C;MACAiD,GAAG,CAAC1B,4BAA4B,CAAC6B,GAAG,CAAC,CAAC,GAAGpD,KAAK;IAClD,CAAC,CAAC;IACF,OAAOiD,GAAG;EACd;AACJ;AAEO,SAASL,uBAAuBA,CAACC,YAAiB,EAAuB;EAC5E,IAAI,CAACA,YAAY,IAAI,OAAOA,YAAY,KAAK,QAAQ,IAAI,OAAOA,YAAY,KAAK,QAAQ,IAAI,OAAOA,YAAY,KAAK,SAAS,EAAE;IAC5H,OAAOA,YAAY;EACvB,CAAC,MAAM,IAAIC,KAAK,CAACC,OAAO,CAACF,YAAY,CAAC,EAAE;IACpC,OAAOA,YAAY,CAAClB,GAAG,CAACqB,GAAG,IAAIJ,uBAAuB,CAACI,GAAG,CAAC,CAAC;EAChE,CAAC,MAAM,IAAI,OAAOH,YAAY,KAAK,QAAQ,EAAE;IACzC,IAAMI,GAAQ,GAAG,CAAC,CAAC;IACnBC,MAAM,CAACC,OAAO,CAACN,YAAY,CAAC,CAACR,OAAO,CAAC,CAAC,CAACe,GAAG,EAAEpD,KAAK,CAAC,KAAK;MACnD,IAAI,OAAOA,KAAK,KAAK,QAAQ,IAAI8C,KAAK,CAACC,OAAO,CAACF,YAAY,CAAC,EAAE;QAC1D7C,KAAK,GAAG4C,uBAAuB,CAAC5C,KAAK,CAAC;MAC1C;MACAiD,GAAG,CAAChB,kCAAkC,CAACmB,GAAG,CAAC,CAAC,GAAGpD,KAAK;IACxD,CAAC,CAAC;IACF,OAAOiD,GAAG;EACd;AACJ;;AAGA;AACA;AACA;AACA;AACO,SAAS1C,mBAAmBA,CAC/B8C,YAA+B,EACzB;EACN,IAAIC,KAAiB,GAAG,EAAE;;EAE1B;AACJ;AACA;AACA;EACI,IAAMC,UAAU,GAAG,IAAAC,2CAA2B,EAACH,YAAY,CAACE,UAAU,CAAC;EACvED,KAAK,CAACG,IAAI,CAAC,CAACF,UAAU,CAAC,CAAC;EACxBD,KAAK,CAACG,IAAI,CAAC,CAAC,UAAU,EAAEF,UAAU,CAAC,CAAC;;EAEpC;EACA,IAAIF,YAAY,CAACK,OAAO,EAAE;IACtBL,YAAY,CAACK,OAAO,CAACrB,OAAO,CAACsB,KAAK,IAAI;MAClC,IAAMC,OAAO,GAAG,IAAAC,cAAO,EAACF,KAAK,CAAC;MAC9BL,KAAK,CAACG,IAAI,CAACG,OAAO,CAAC;IACvB,CAAC,CAAC;EACN;;EAEA;EACAN,KAAK,CAACG,IAAI,CAAC,CAAC,WAAW,EAAEF,UAAU,CAAC,CAAC;;EAErC;EACAD,KAAK,CAACG,IAAI,CAAC,CAAC,WAAW,CAAC,CAAC;;EAEzB;AACJ;AACA;AACA;AACA;EACIH,KAAK,GAAGA,KAAK,CAAC3B,GAAG,CAACC,IAAI,IAAI;IACtB,OAAOA,IAAI,CAACD,GAAG,CAACH,GAAG,IAAID,4BAA4B,CAACC,GAAG,CAAC,CAAC;EAC7D,CAAC,CAAC;EAEF,IAAIsC,eAAe,GAAGR,KAAK,CAAC3B,GAAG,CAACC,IAAI,IAAI;IACpC,IAAIA,IAAI,CAACF,MAAM,KAAK,CAAC,EAAE;MACnB,OAAOE,IAAI,CAAC,CAAC,CAAC;IAClB,CAAC,MAAM;MACH,OAAO,GAAG,GAAGA,IAAI,CAACC,IAAI,CAAC,GAAG,CAAC,GAAG,GAAG;IACrC;EACJ,CAAC,CAAC;EACFiC,eAAe,GAAGA,eAAe,CAACC,MAAM,CAAC,CAACC,IAAS,EAAEC,GAAQ,EAAEC,GAAQ,KAAKA,GAAG,CAACC,OAAO,CAACH,IAAI,CAAC,KAAKC,GAAG,CAAC,CAAC,CAAC;EACxG,IAAMG,WAAW,GAAGN,eAAe,CAACjC,IAAI,CAAC,IAAI,CAAC;EAE9C,OAAOuC,WAAW;AACtB;;AAEA;AACA;AACA;AACA;AACO,eAAeC,WAAWA,CAC7BC,SAAgC,EAChCC,MAAgB,EACoB;EACpC,IAAMzE,KAAK,GAAG,MAAMwE,SAAS;EAC7B,IAAME,QAAQ,GAAG,MAAM1E,KAAK,CAACY,UAAU,CAAC+D,OAAO,CAACF,MAAM,CAAC;EACvD,OAAOC,QAAQ,CAAC7C,GAAG,CAACQ,CAAC,IAAIQ,kBAAkB,CAAC7C,KAAK,CAACc,cAAc,EAAEuB,CAAC,CAAC,CAAC;AACzE;AAGO,SAASuC,kBAAkBA,CAACC,UAAkB,EAAEC,YAAoB,EAAU;EACjF,OAAOD,UAAU,GAAG,IAAI,GAAGC,YAAY;AAC3C;AAGO,SAAS/D,iBAAiBA,CAAClB,MAAyB,EAAY;EACnE,IAAMkF,aAAa,GAAG,IAAIC,GAAG,CAAS,CAAC;EACvC,IAAM7B,GAAa,GAAG,EAAE;EACxB,IAAI,CAACtD,MAAM,CAAC+D,OAAO,EAAE;IACjB,OAAOT,GAAG;EACd;EACAtD,MAAM,CAAC+D,OAAO,CAACrB,OAAO,CAACsB,KAAK,IAAI;IAC5B,IAAMoB,MAAM,GAAG,IAAAlB,cAAO,EAACF,KAAK,CAAC;IAC7BoB,MAAM,CAAC1C,OAAO,CAAC2C,KAAK,IAAI;MACpB,IAAIH,aAAa,CAACI,GAAG,CAACD,KAAK,CAAC,EAAE;QAC1B;MACJ;MACAH,aAAa,CAACK,GAAG,CAACF,KAAK,CAAC;MACxB,IAAMG,SAAS,GAAG,IAAAC,qCAAqB,EAACzF,MAAM,EAAEqF,KAAK,CAAC;MACtD,IAAIG,SAAS,CAACE,IAAI,KAAK,SAAS,EAAE;QAC9BpC,GAAG,CAACQ,IAAI,CAACuB,KAAK,CAAC;MACnB;IACJ,CAAC,CAAC;EACN,CAAC,CAAC;EACF/B,GAAG,CAACQ,IAAI,CAAC,UAAU,CAAC;EAEpB,OAAO,IAAA6B,kBAAW,EAACrC,GAAG,CAAC;AAC3B","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/storage-dexie/dexie-query.js b/dist/cjs/plugins/storage-dexie/dexie-query.js deleted file mode 100644 index 54f752d0d64..00000000000 --- a/dist/cjs/plugins/storage-dexie/dexie-query.js +++ /dev/null @@ -1,160 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.dexieCount = dexieCount; -exports.dexieQuery = dexieQuery; -exports.getKeyRangeByQueryPlan = getKeyRangeByQueryPlan; -exports.mapKeyForKeyRange = mapKeyForKeyRange; -var _queryPlanner = require("../../query-planner.js"); -var _rxQueryHelper = require("../../rx-query-helper.js"); -var _dexieHelper = require("./dexie-helper.js"); -function mapKeyForKeyRange(k) { - if (k === _queryPlanner.INDEX_MIN) { - return -Infinity; - } else { - return k; - } -} -function rangeFieldToBooleanSubstitute(booleanIndexes, fieldName, value) { - if (booleanIndexes.includes(fieldName)) { - var newValue = value === _queryPlanner.INDEX_MAX || value === true ? '1' : '0'; - return newValue; - } else { - return value; - } -} -function getKeyRangeByQueryPlan(booleanIndexes, queryPlan, IDBKeyRange) { - if (!IDBKeyRange) { - if (typeof window === 'undefined') { - throw new Error('IDBKeyRange missing'); - } else { - IDBKeyRange = window.IDBKeyRange; - } - } - var startKeys = queryPlan.startKeys.map((v, i) => { - var fieldName = queryPlan.index[i]; - return rangeFieldToBooleanSubstitute(booleanIndexes, fieldName, v); - }).map(mapKeyForKeyRange); - var endKeys = queryPlan.endKeys.map((v, i) => { - var fieldName = queryPlan.index[i]; - return rangeFieldToBooleanSubstitute(booleanIndexes, fieldName, v); - }).map(mapKeyForKeyRange); - var keyRange = IDBKeyRange.bound(startKeys, endKeys, !queryPlan.inclusiveStart, !queryPlan.inclusiveEnd); - return keyRange; -} - -/** - * Runs mango queries over the Dexie.js database. - */ -async function dexieQuery(instance, preparedQuery) { - var state = await instance.internals; - var query = preparedQuery.query; - var skip = query.skip ? query.skip : 0; - var limit = query.limit ? query.limit : Infinity; - var skipPlusLimit = skip + limit; - var queryPlan = preparedQuery.queryPlan; - var queryMatcher = false; - if (!queryPlan.selectorSatisfiedByIndex) { - queryMatcher = (0, _rxQueryHelper.getQueryMatcher)(instance.schema, preparedQuery.query); - } - var keyRange = getKeyRangeByQueryPlan(state.booleanIndexes, queryPlan, state.dexieDb._options.IDBKeyRange); - var queryPlanFields = queryPlan.index; - var rows = []; - await state.dexieDb.transaction('r', state.dexieTable, async dexieTx => { - /** - * TODO here we use the native IndexedDB transaction - * to get the cursor. - * Instead we should not leave Dexie.js API and find - * a way to create the cursor with Dexie.js. - */ - var tx = dexieTx.idbtrans; - - // const nativeIndexedDB = state.dexieDb.backendDB(); - // const trans = nativeIndexedDB.transaction([DEXIE_DOCS_TABLE_NAME], 'readonly'); - - var store = tx.objectStore(_dexieHelper.DEXIE_DOCS_TABLE_NAME); - var index; - var indexName; - indexName = '[' + queryPlanFields.map(field => (0, _dexieHelper.dexieReplaceIfStartsWithPipe)(field)).join('+') + ']'; - index = store.index(indexName); - var cursorReq = index.openCursor(keyRange); - await new Promise(res => { - cursorReq.onsuccess = function (e) { - var cursor = e.target.result; - if (cursor) { - // We have a record in cursor.value - var docData = (0, _dexieHelper.fromDexieToStorage)(state.booleanIndexes, cursor.value); - if (!queryMatcher || queryMatcher(docData)) { - rows.push(docData); - } - - /** - * If we do not have to manually sort - * and have enough documents, - * we can abort iterating over the cursor - * because we already have every relevant document. - */ - if (queryPlan.sortSatisfiedByIndex && rows.length === skipPlusLimit) { - res(); - } else { - cursor.continue(); - } - } else { - // Iteration complete - res(); - } - }; - }); - }); - if (!queryPlan.sortSatisfiedByIndex) { - var sortComparator = (0, _rxQueryHelper.getSortComparator)(instance.schema, preparedQuery.query); - rows = rows.sort(sortComparator); - } - - // apply skip and limit boundaries. - rows = rows.slice(skip, skipPlusLimit); - - /** - * Comment this in for debugging to check all fields in the database. - */ - // const docsInDb = await state.dexieTable.filter(queryMatcher).toArray(); - // let documents = docsInDb - // .map(docData => stripDexieKey(docData)) - // .sort(sortComparator); - // if (preparedQuery.skip) { - // documents = documents.slice(preparedQuery.skip); - // } - // if (preparedQuery.limit && documents.length > preparedQuery.limit) { - // documents = documents.slice(0, preparedQuery.limit); - // } - - return { - documents: rows - }; -} -async function dexieCount(instance, preparedQuery) { - var state = await instance.internals; - var queryPlan = preparedQuery.queryPlan; - var queryPlanFields = queryPlan.index; - var keyRange = getKeyRangeByQueryPlan(state.booleanIndexes, queryPlan, state.dexieDb._options.IDBKeyRange); - var count = -1; - await state.dexieDb.transaction('r', state.dexieTable, async dexieTx => { - var tx = dexieTx.idbtrans; - var store = tx.objectStore(_dexieHelper.DEXIE_DOCS_TABLE_NAME); - var index; - var indexName; - indexName = '[' + queryPlanFields.map(field => (0, _dexieHelper.dexieReplaceIfStartsWithPipe)(field)).join('+') + ']'; - index = store.index(indexName); - var request = index.count(keyRange); - count = await new Promise((res, rej) => { - request.onsuccess = function () { - res(request.result); - }; - request.onerror = err => rej(err); - }); - }); - return count; -} -//# sourceMappingURL=dexie-query.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/storage-dexie/dexie-query.js.map b/dist/cjs/plugins/storage-dexie/dexie-query.js.map deleted file mode 100644 index 8636cdf1972..00000000000 --- a/dist/cjs/plugins/storage-dexie/dexie-query.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"dexie-query.js","names":["_queryPlanner","require","_rxQueryHelper","_dexieHelper","mapKeyForKeyRange","k","INDEX_MIN","Infinity","rangeFieldToBooleanSubstitute","booleanIndexes","fieldName","value","includes","newValue","INDEX_MAX","getKeyRangeByQueryPlan","queryPlan","IDBKeyRange","window","Error","startKeys","map","v","i","index","endKeys","keyRange","bound","inclusiveStart","inclusiveEnd","dexieQuery","instance","preparedQuery","state","internals","query","skip","limit","skipPlusLimit","queryMatcher","selectorSatisfiedByIndex","getQueryMatcher","schema","dexieDb","_options","queryPlanFields","rows","transaction","dexieTable","dexieTx","tx","idbtrans","store","objectStore","DEXIE_DOCS_TABLE_NAME","indexName","field","dexieReplaceIfStartsWithPipe","join","cursorReq","openCursor","Promise","res","onsuccess","e","cursor","target","result","docData","fromDexieToStorage","push","sortSatisfiedByIndex","length","continue","sortComparator","getSortComparator","sort","slice","documents","dexieCount","count","request","rej","onerror","err"],"sources":["../../../../src/plugins/storage-dexie/dexie-query.ts"],"sourcesContent":["import { INDEX_MAX, INDEX_MIN } from '../../query-planner.ts';\nimport { getQueryMatcher, getSortComparator } from '../../rx-query-helper.ts';\nimport type {\n PreparedQuery,\n QueryMatcher,\n RxDocumentData,\n RxQueryPlan,\n RxStorageQueryResult\n} from '../../types/index.d.ts';\nimport {\n dexieReplaceIfStartsWithPipe,\n DEXIE_DOCS_TABLE_NAME,\n fromDexieToStorage\n} from './dexie-helper.ts';\nimport type { RxStorageInstanceDexie } from './rx-storage-instance-dexie.ts';\n\nexport function mapKeyForKeyRange(k: any) {\n if (k === INDEX_MIN) {\n return -Infinity;\n } else {\n return k;\n }\n}\n\nfunction rangeFieldToBooleanSubstitute(\n booleanIndexes: string[],\n fieldName: string,\n value: any\n) {\n if (booleanIndexes.includes(fieldName)) {\n const newValue = value === INDEX_MAX || value === true ? '1' : '0';\n return newValue;\n } else {\n return value;\n }\n}\n\nexport function getKeyRangeByQueryPlan(\n booleanIndexes: string[],\n queryPlan: RxQueryPlan,\n IDBKeyRange?: any\n) {\n if (!IDBKeyRange) {\n if (typeof window === 'undefined') {\n throw new Error('IDBKeyRange missing');\n } else {\n IDBKeyRange = window.IDBKeyRange;\n }\n }\n\n\n const startKeys = queryPlan.startKeys\n .map((v, i) => {\n const fieldName = queryPlan.index[i];\n return rangeFieldToBooleanSubstitute(booleanIndexes, fieldName, v);\n })\n .map(mapKeyForKeyRange);\n const endKeys = queryPlan.endKeys\n .map((v, i) => {\n const fieldName = queryPlan.index[i];\n return rangeFieldToBooleanSubstitute(booleanIndexes, fieldName, v);\n })\n .map(mapKeyForKeyRange);\n\n const keyRange = IDBKeyRange.bound(\n startKeys,\n endKeys,\n !queryPlan.inclusiveStart,\n !queryPlan.inclusiveEnd\n );\n return keyRange;\n}\n\n\n/**\n * Runs mango queries over the Dexie.js database.\n */\nexport async function dexieQuery(\n instance: RxStorageInstanceDexie,\n preparedQuery: PreparedQuery\n): Promise> {\n const state = await instance.internals;\n const query = preparedQuery.query;\n\n const skip = query.skip ? query.skip : 0;\n const limit = query.limit ? query.limit : Infinity;\n const skipPlusLimit = skip + limit;\n const queryPlan = preparedQuery.queryPlan;\n\n let queryMatcher: QueryMatcher> | false = false;\n if (!queryPlan.selectorSatisfiedByIndex) {\n queryMatcher = getQueryMatcher(\n instance.schema,\n preparedQuery.query\n );\n }\n const keyRange = getKeyRangeByQueryPlan(\n state.booleanIndexes,\n queryPlan,\n (state.dexieDb as any)._options.IDBKeyRange\n );\n\n const queryPlanFields: string[] = queryPlan.index;\n\n let rows: any[] = [];\n await state.dexieDb.transaction(\n 'r',\n state.dexieTable,\n async (dexieTx) => {\n /**\n * TODO here we use the native IndexedDB transaction\n * to get the cursor.\n * Instead we should not leave Dexie.js API and find\n * a way to create the cursor with Dexie.js.\n */\n const tx = (dexieTx as any).idbtrans;\n\n // const nativeIndexedDB = state.dexieDb.backendDB();\n // const trans = nativeIndexedDB.transaction([DEXIE_DOCS_TABLE_NAME], 'readonly');\n\n const store = tx.objectStore(DEXIE_DOCS_TABLE_NAME);\n let index: any;\n let indexName: string;\n indexName = '[' +\n queryPlanFields\n .map(field => dexieReplaceIfStartsWithPipe(field))\n .join('+')\n + ']';\n index = store.index(indexName);\n\n\n const cursorReq = index.openCursor(keyRange);\n await new Promise(res => {\n cursorReq.onsuccess = function (e: any) {\n const cursor = e.target.result;\n if (cursor) {\n // We have a record in cursor.value\n const docData = fromDexieToStorage(state.booleanIndexes, cursor.value);\n if (!queryMatcher || queryMatcher(docData)) {\n rows.push(docData);\n }\n\n /**\n * If we do not have to manually sort\n * and have enough documents,\n * we can abort iterating over the cursor\n * because we already have every relevant document.\n */\n if (\n queryPlan.sortSatisfiedByIndex &&\n rows.length === skipPlusLimit\n ) {\n res();\n } else {\n cursor.continue();\n }\n } else {\n // Iteration complete\n res();\n }\n };\n });\n\n\n }\n );\n\n\n if (!queryPlan.sortSatisfiedByIndex) {\n const sortComparator = getSortComparator(instance.schema, preparedQuery.query);\n rows = rows.sort(sortComparator);\n }\n\n // apply skip and limit boundaries.\n rows = rows.slice(skip, skipPlusLimit);\n\n /**\n * Comment this in for debugging to check all fields in the database.\n */\n // const docsInDb = await state.dexieTable.filter(queryMatcher).toArray();\n // let documents = docsInDb\n // .map(docData => stripDexieKey(docData))\n // .sort(sortComparator);\n // if (preparedQuery.skip) {\n // documents = documents.slice(preparedQuery.skip);\n // }\n // if (preparedQuery.limit && documents.length > preparedQuery.limit) {\n // documents = documents.slice(0, preparedQuery.limit);\n // }\n\n\n\n return {\n documents: rows\n };\n}\n\n\nexport async function dexieCount(\n instance: RxStorageInstanceDexie,\n preparedQuery: PreparedQuery\n): Promise {\n const state = await instance.internals;\n const queryPlan = preparedQuery.queryPlan;\n const queryPlanFields: string[] = queryPlan.index;\n\n const keyRange = getKeyRangeByQueryPlan(\n state.booleanIndexes,\n queryPlan,\n (state.dexieDb as any)._options.IDBKeyRange\n );\n let count: number = -1;\n await state.dexieDb.transaction(\n 'r',\n state.dexieTable,\n async (dexieTx) => {\n const tx = (dexieTx as any).idbtrans;\n const store = tx.objectStore(DEXIE_DOCS_TABLE_NAME);\n let index: any;\n let indexName: string;\n indexName = '[' +\n queryPlanFields\n .map(field => dexieReplaceIfStartsWithPipe(field))\n .join('+')\n + ']';\n index = store.index(indexName);\n const request = index.count(keyRange);\n count = await new Promise((res, rej) => {\n request.onsuccess = function () {\n res(request.result);\n };\n request.onerror = (err: any) => rej(err);\n });\n }\n );\n return count;\n}\n"],"mappings":";;;;;;;;;AAAA,IAAAA,aAAA,GAAAC,OAAA;AACA,IAAAC,cAAA,GAAAD,OAAA;AAQA,IAAAE,YAAA,GAAAF,OAAA;AAOO,SAASG,iBAAiBA,CAACC,CAAM,EAAE;EACtC,IAAIA,CAAC,KAAKC,uBAAS,EAAE;IACjB,OAAO,CAACC,QAAQ;EACpB,CAAC,MAAM;IACH,OAAOF,CAAC;EACZ;AACJ;AAEA,SAASG,6BAA6BA,CAClCC,cAAwB,EACxBC,SAAiB,EACjBC,KAAU,EACZ;EACE,IAAIF,cAAc,CAACG,QAAQ,CAACF,SAAS,CAAC,EAAE;IACpC,IAAMG,QAAQ,GAAGF,KAAK,KAAKG,uBAAS,IAAIH,KAAK,KAAK,IAAI,GAAG,GAAG,GAAG,GAAG;IAClE,OAAOE,QAAQ;EACnB,CAAC,MAAM;IACH,OAAOF,KAAK;EAChB;AACJ;AAEO,SAASI,sBAAsBA,CAClCN,cAAwB,EACxBO,SAAsB,EACtBC,WAAiB,EACnB;EACE,IAAI,CAACA,WAAW,EAAE;IACd,IAAI,OAAOC,MAAM,KAAK,WAAW,EAAE;MAC/B,MAAM,IAAIC,KAAK,CAAC,qBAAqB,CAAC;IAC1C,CAAC,MAAM;MACHF,WAAW,GAAGC,MAAM,CAACD,WAAW;IACpC;EACJ;EAGA,IAAMG,SAAS,GAAGJ,SAAS,CAACI,SAAS,CAChCC,GAAG,CAAC,CAACC,CAAC,EAAEC,CAAC,KAAK;IACX,IAAMb,SAAS,GAAGM,SAAS,CAACQ,KAAK,CAACD,CAAC,CAAC;IACpC,OAAOf,6BAA6B,CAACC,cAAc,EAAEC,SAAS,EAAEY,CAAC,CAAC;EACtE,CAAC,CAAC,CACDD,GAAG,CAACjB,iBAAiB,CAAC;EAC3B,IAAMqB,OAAO,GAAGT,SAAS,CAACS,OAAO,CAC5BJ,GAAG,CAAC,CAACC,CAAC,EAAEC,CAAC,KAAK;IACX,IAAMb,SAAS,GAAGM,SAAS,CAACQ,KAAK,CAACD,CAAC,CAAC;IACpC,OAAOf,6BAA6B,CAACC,cAAc,EAAEC,SAAS,EAAEY,CAAC,CAAC;EACtE,CAAC,CAAC,CACDD,GAAG,CAACjB,iBAAiB,CAAC;EAE3B,IAAMsB,QAAQ,GAAGT,WAAW,CAACU,KAAK,CAC9BP,SAAS,EACTK,OAAO,EACP,CAACT,SAAS,CAACY,cAAc,EACzB,CAACZ,SAAS,CAACa,YACf,CAAC;EACD,OAAOH,QAAQ;AACnB;;AAGA;AACA;AACA;AACO,eAAeI,UAAUA,CAC5BC,QAA2C,EAC3CC,aAAuC,EACC;EACxC,IAAMC,KAAK,GAAG,MAAMF,QAAQ,CAACG,SAAS;EACtC,IAAMC,KAAK,GAAGH,aAAa,CAACG,KAAK;EAEjC,IAAMC,IAAI,GAAGD,KAAK,CAACC,IAAI,GAAGD,KAAK,CAACC,IAAI,GAAG,CAAC;EACxC,IAAMC,KAAK,GAAGF,KAAK,CAACE,KAAK,GAAGF,KAAK,CAACE,KAAK,GAAG9B,QAAQ;EAClD,IAAM+B,aAAa,GAAGF,IAAI,GAAGC,KAAK;EAClC,IAAMrB,SAAS,GAAGgB,aAAa,CAAChB,SAAS;EAEzC,IAAIuB,YAA6D,GAAG,KAAK;EACzE,IAAI,CAACvB,SAAS,CAACwB,wBAAwB,EAAE;IACrCD,YAAY,GAAG,IAAAE,8BAAe,EAC1BV,QAAQ,CAACW,MAAM,EACfV,aAAa,CAACG,KAClB,CAAC;EACL;EACA,IAAMT,QAAQ,GAAGX,sBAAsB,CACnCkB,KAAK,CAACxB,cAAc,EACpBO,SAAS,EACRiB,KAAK,CAACU,OAAO,CAASC,QAAQ,CAAC3B,WACpC,CAAC;EAED,IAAM4B,eAAyB,GAAG7B,SAAS,CAACQ,KAAK;EAEjD,IAAIsB,IAAW,GAAG,EAAE;EACpB,MAAMb,KAAK,CAACU,OAAO,CAACI,WAAW,CAC3B,GAAG,EACHd,KAAK,CAACe,UAAU,EAChB,MAAOC,OAAO,IAAK;IACf;AACZ;AACA;AACA;AACA;AACA;IACY,IAAMC,EAAE,GAAID,OAAO,CAASE,QAAQ;;IAEpC;IACA;;IAEA,IAAMC,KAAK,GAAGF,EAAE,CAACG,WAAW,CAACC,kCAAqB,CAAC;IACnD,IAAI9B,KAAU;IACd,IAAI+B,SAAiB;IACrBA,SAAS,GAAG,GAAG,GACXV,eAAe,CACVxB,GAAG,CAACmC,KAAK,IAAI,IAAAC,yCAA4B,EAACD,KAAK,CAAC,CAAC,CACjDE,IAAI,CAAC,GAAG,CAAC,GACZ,GAAG;IACTlC,KAAK,GAAG4B,KAAK,CAAC5B,KAAK,CAAC+B,SAAS,CAAC;IAG9B,IAAMI,SAAS,GAAGnC,KAAK,CAACoC,UAAU,CAAClC,QAAQ,CAAC;IAC5C,MAAM,IAAImC,OAAO,CAAOC,GAAG,IAAI;MAC3BH,SAAS,CAACI,SAAS,GAAG,UAAUC,CAAM,EAAE;QACpC,IAAMC,MAAM,GAAGD,CAAC,CAACE,MAAM,CAACC,MAAM;QAC9B,IAAIF,MAAM,EAAE;UACR;UACA,IAAMG,OAAO,GAAG,IAAAC,+BAAkB,EAAYpC,KAAK,CAACxB,cAAc,EAAEwD,MAAM,CAACtD,KAAK,CAAC;UACjF,IAAI,CAAC4B,YAAY,IAAIA,YAAY,CAAC6B,OAAO,CAAC,EAAE;YACxCtB,IAAI,CAACwB,IAAI,CAACF,OAAO,CAAC;UACtB;;UAEA;AACxB;AACA;AACA;AACA;AACA;UACwB,IACIpD,SAAS,CAACuD,oBAAoB,IAC9BzB,IAAI,CAAC0B,MAAM,KAAKlC,aAAa,EAC/B;YACEwB,GAAG,CAAC,CAAC;UACT,CAAC,MAAM;YACHG,MAAM,CAACQ,QAAQ,CAAC,CAAC;UACrB;QACJ,CAAC,MAAM;UACH;UACAX,GAAG,CAAC,CAAC;QACT;MACJ,CAAC;IACL,CAAC,CAAC;EAGN,CACJ,CAAC;EAGD,IAAI,CAAC9C,SAAS,CAACuD,oBAAoB,EAAE;IACjC,IAAMG,cAAc,GAAG,IAAAC,gCAAiB,EAAC5C,QAAQ,CAACW,MAAM,EAAEV,aAAa,CAACG,KAAK,CAAC;IAC9EW,IAAI,GAAGA,IAAI,CAAC8B,IAAI,CAACF,cAAc,CAAC;EACpC;;EAEA;EACA5B,IAAI,GAAGA,IAAI,CAAC+B,KAAK,CAACzC,IAAI,EAAEE,aAAa,CAAC;;EAEtC;AACJ;AACA;EACI;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;;EAIA,OAAO;IACHwC,SAAS,EAAEhC;EACf,CAAC;AACL;AAGO,eAAeiC,UAAUA,CAC5BhD,QAA2C,EAC3CC,aAAuC,EACxB;EACf,IAAMC,KAAK,GAAG,MAAMF,QAAQ,CAACG,SAAS;EACtC,IAAMlB,SAAS,GAAGgB,aAAa,CAAChB,SAAS;EACzC,IAAM6B,eAAyB,GAAG7B,SAAS,CAACQ,KAAK;EAEjD,IAAME,QAAQ,GAAGX,sBAAsB,CACnCkB,KAAK,CAACxB,cAAc,EACpBO,SAAS,EACRiB,KAAK,CAACU,OAAO,CAASC,QAAQ,CAAC3B,WACpC,CAAC;EACD,IAAI+D,KAAa,GAAG,CAAC,CAAC;EACtB,MAAM/C,KAAK,CAACU,OAAO,CAACI,WAAW,CAC3B,GAAG,EACHd,KAAK,CAACe,UAAU,EAChB,MAAOC,OAAO,IAAK;IACf,IAAMC,EAAE,GAAID,OAAO,CAASE,QAAQ;IACpC,IAAMC,KAAK,GAAGF,EAAE,CAACG,WAAW,CAACC,kCAAqB,CAAC;IACnD,IAAI9B,KAAU;IACd,IAAI+B,SAAiB;IACrBA,SAAS,GAAG,GAAG,GACXV,eAAe,CACVxB,GAAG,CAACmC,KAAK,IAAI,IAAAC,yCAA4B,EAACD,KAAK,CAAC,CAAC,CACjDE,IAAI,CAAC,GAAG,CAAC,GACZ,GAAG;IACTlC,KAAK,GAAG4B,KAAK,CAAC5B,KAAK,CAAC+B,SAAS,CAAC;IAC9B,IAAM0B,OAAO,GAAGzD,KAAK,CAACwD,KAAK,CAACtD,QAAQ,CAAC;IACrCsD,KAAK,GAAG,MAAM,IAAInB,OAAO,CAAS,CAACC,GAAG,EAAEoB,GAAG,KAAK;MAC5CD,OAAO,CAAClB,SAAS,GAAG,YAAY;QAC5BD,GAAG,CAACmB,OAAO,CAACd,MAAM,CAAC;MACvB,CAAC;MACDc,OAAO,CAACE,OAAO,GAAIC,GAAQ,IAAKF,GAAG,CAACE,GAAG,CAAC;IAC5C,CAAC,CAAC;EACN,CACJ,CAAC;EACD,OAAOJ,KAAK;AAChB","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/storage-dexie/index.js b/dist/cjs/plugins/storage-dexie/index.js deleted file mode 100644 index dd68e34a430..00000000000 --- a/dist/cjs/plugins/storage-dexie/index.js +++ /dev/null @@ -1,50 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -var _rxStorageDexie = require("./rx-storage-dexie.js"); -Object.keys(_rxStorageDexie).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _rxStorageDexie[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _rxStorageDexie[key]; - } - }); -}); -var _rxStorageInstanceDexie = require("./rx-storage-instance-dexie.js"); -Object.keys(_rxStorageInstanceDexie).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _rxStorageInstanceDexie[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _rxStorageInstanceDexie[key]; - } - }); -}); -var _dexieHelper = require("./dexie-helper.js"); -Object.keys(_dexieHelper).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _dexieHelper[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _dexieHelper[key]; - } - }); -}); -var _dexieQuery = require("./dexie-query.js"); -Object.keys(_dexieQuery).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _dexieQuery[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _dexieQuery[key]; - } - }); -}); -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/storage-dexie/index.js.map b/dist/cjs/plugins/storage-dexie/index.js.map deleted file mode 100644 index 47c3ea1f35c..00000000000 --- a/dist/cjs/plugins/storage-dexie/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["_rxStorageDexie","require","Object","keys","forEach","key","exports","defineProperty","enumerable","get","_rxStorageInstanceDexie","_dexieHelper","_dexieQuery"],"sources":["../../../../src/plugins/storage-dexie/index.ts"],"sourcesContent":["export * from './rx-storage-dexie.ts';\nexport * from './rx-storage-instance-dexie.ts';\nexport * from './dexie-helper.ts';\nexport * from './dexie-query.ts';\n"],"mappings":";;;;;AAAA,IAAAA,eAAA,GAAAC,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAH,eAAA,EAAAI,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAL,eAAA,CAAAK,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAT,eAAA,CAAAK,GAAA;IAAA;EAAA;AAAA;AACA,IAAAK,uBAAA,GAAAT,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAO,uBAAA,EAAAN,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAK,uBAAA,CAAAL,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAC,uBAAA,CAAAL,GAAA;IAAA;EAAA;AAAA;AACA,IAAAM,YAAA,GAAAV,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAQ,YAAA,EAAAP,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAM,YAAA,CAAAN,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAE,YAAA,CAAAN,GAAA;IAAA;EAAA;AAAA;AACA,IAAAO,WAAA,GAAAX,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAS,WAAA,EAAAR,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAO,WAAA,CAAAP,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAG,WAAA,CAAAP,GAAA;IAAA;EAAA;AAAA","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/storage-dexie/rx-storage-dexie.js b/dist/cjs/plugins/storage-dexie/rx-storage-dexie.js deleted file mode 100644 index bca662a5e69..00000000000 --- a/dist/cjs/plugins/storage-dexie/rx-storage-dexie.js +++ /dev/null @@ -1,29 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.RxStorageDexie = void 0; -exports.getRxStorageDexie = getRxStorageDexie; -var _dexieHelper = require("./dexie-helper.js"); -var _rxStorageInstanceDexie = require("./rx-storage-instance-dexie.js"); -var _rxStorageHelper = require("../../rx-storage-helper.js"); -var _utilsRxdbVersion = require("../utils/utils-rxdb-version.js"); -var RxStorageDexie = exports.RxStorageDexie = /*#__PURE__*/function () { - function RxStorageDexie(settings) { - this.name = _dexieHelper.RX_STORAGE_NAME_DEXIE; - this.rxdbVersion = _utilsRxdbVersion.RXDB_VERSION; - this.settings = settings; - } - var _proto = RxStorageDexie.prototype; - _proto.createStorageInstance = function createStorageInstance(params) { - (0, _rxStorageHelper.ensureRxStorageInstanceParamsAreCorrect)(params); - return (0, _rxStorageInstanceDexie.createDexieStorageInstance)(this, params, this.settings); - }; - return RxStorageDexie; -}(); -function getRxStorageDexie(settings = {}) { - var storage = new RxStorageDexie(settings); - return storage; -} -//# sourceMappingURL=rx-storage-dexie.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/storage-dexie/rx-storage-dexie.js.map b/dist/cjs/plugins/storage-dexie/rx-storage-dexie.js.map deleted file mode 100644 index cc233d17de5..00000000000 --- a/dist/cjs/plugins/storage-dexie/rx-storage-dexie.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-storage-dexie.js","names":["_dexieHelper","require","_rxStorageInstanceDexie","_rxStorageHelper","_utilsRxdbVersion","RxStorageDexie","exports","settings","name","RX_STORAGE_NAME_DEXIE","rxdbVersion","RXDB_VERSION","_proto","prototype","createStorageInstance","params","ensureRxStorageInstanceParamsAreCorrect","createDexieStorageInstance","getRxStorageDexie","storage"],"sources":["../../../../src/plugins/storage-dexie/rx-storage-dexie.ts"],"sourcesContent":["import type {\n RxStorage,\n RxStorageInstanceCreationParams\n} from '../../types/index.d.ts';\nimport {\n RX_STORAGE_NAME_DEXIE\n} from './dexie-helper.ts';\nimport type {\n DexieSettings,\n DexieStorageInternals\n} from '../../types/plugins/dexie.d.ts';\nimport {\n createDexieStorageInstance,\n RxStorageInstanceDexie\n} from './rx-storage-instance-dexie.ts';\nimport { ensureRxStorageInstanceParamsAreCorrect } from '../../rx-storage-helper.ts';\nimport { RXDB_VERSION } from '../utils/utils-rxdb-version.ts';\n\n\n\nexport class RxStorageDexie implements RxStorage {\n public name = RX_STORAGE_NAME_DEXIE;\n public readonly rxdbVersion = RXDB_VERSION;\n constructor(\n public settings: DexieSettings\n ) { }\n\n public createStorageInstance(\n params: RxStorageInstanceCreationParams\n ): Promise> {\n ensureRxStorageInstanceParamsAreCorrect(params);\n return createDexieStorageInstance(this, params, this.settings);\n }\n}\n\n\nexport function getRxStorageDexie(\n settings: DexieSettings = {}\n): RxStorageDexie {\n const storage = new RxStorageDexie(settings);\n return storage;\n}\n"],"mappings":";;;;;;;AAIA,IAAAA,YAAA,GAAAC,OAAA;AAOA,IAAAC,uBAAA,GAAAD,OAAA;AAIA,IAAAE,gBAAA,GAAAF,OAAA;AACA,IAAAG,iBAAA,GAAAH,OAAA;AAA8D,IAIjDI,cAAc,GAAAC,OAAA,CAAAD,cAAA;EAGvB,SAAAA,eACWE,QAAuB,EAChC;IAAA,KAJKC,IAAI,GAAGC,kCAAqB;IAAA,KACnBC,WAAW,GAAGC,8BAAY;IAAA,KAE/BJ,QAAuB,GAAvBA,QAAuB;EAC9B;EAAC,IAAAK,MAAA,GAAAP,cAAA,CAAAQ,SAAA;EAAAD,MAAA,CAEEE,qBAAqB,GAA5B,SAAAA,sBACIC,MAAiE,EACvB;IAC1C,IAAAC,wDAAuC,EAACD,MAAM,CAAC;IAC/C,OAAO,IAAAE,kDAA0B,EAAC,IAAI,EAAEF,MAAM,EAAE,IAAI,CAACR,QAAQ,CAAC;EAClE,CAAC;EAAA,OAAAF,cAAA;AAAA;AAIE,SAASa,iBAAiBA,CAC7BX,QAAuB,GAAG,CAAC,CAAC,EACd;EACd,IAAMY,OAAO,GAAG,IAAId,cAAc,CAACE,QAAQ,CAAC;EAC5C,OAAOY,OAAO;AAClB","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/storage-dexie/rx-storage-instance-dexie.js b/dist/cjs/plugins/storage-dexie/rx-storage-instance-dexie.js deleted file mode 100644 index ecabce32aeb..00000000000 --- a/dist/cjs/plugins/storage-dexie/rx-storage-instance-dexie.js +++ /dev/null @@ -1,258 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.RxStorageInstanceDexie = exports.DEXIE_TEST_META_FIELD = void 0; -exports.createDexieStorageInstance = createDexieStorageInstance; -var _rxjs = require("rxjs"); -var _index = require("../utils/index.js"); -var _dexieHelper = require("./dexie-helper.js"); -var _dexieQuery = require("./dexie-query.js"); -var _rxSchemaHelper = require("../../rx-schema-helper.js"); -var _rxStorageHelper = require("../../rx-storage-helper.js"); -var _rxStorageMultiinstance = require("../../rx-storage-multiinstance.js"); -var _rxError = require("../../rx-error.js"); -var instanceId = (0, _index.now)(); -var DEXIE_TEST_META_FIELD = exports.DEXIE_TEST_META_FIELD = 'dexieTestMetaField'; -var shownNonPremiumLog = false; -var RxStorageInstanceDexie = exports.RxStorageInstanceDexie = /*#__PURE__*/function () { - function RxStorageInstanceDexie(storage, databaseName, collectionName, schema, internals, options, settings, devMode) { - this.changes$ = new _rxjs.Subject(); - this.instanceId = instanceId++; - this.storage = storage; - this.databaseName = databaseName; - this.collectionName = collectionName; - this.schema = schema; - this.internals = internals; - this.options = options; - this.settings = settings; - this.devMode = devMode; - this.primaryPath = (0, _rxSchemaHelper.getPrimaryFieldOfPrimaryKey)(this.schema.primaryKey); - } - var _proto = RxStorageInstanceDexie.prototype; - _proto.bulkWrite = async function bulkWrite(documentWrites, context) { - ensureNotClosed(this); - if (!shownNonPremiumLog && (!_index.RXDB_UTILS_GLOBAL.premium || typeof _index.RXDB_UTILS_GLOBAL.premium !== 'string' || (await (0, _index.defaultHashSha256)(_index.RXDB_UTILS_GLOBAL.premium)) !== _index.PREMIUM_FLAG_HASH)) { - console.warn(['-------------- RxDB Open Core RxStorage -------------------------------', 'You are using the free Dexie.js based RxStorage implementation from RxDB https://rxdb.info/rx-storage-dexie.html?console=dexie ', 'While this is a great option, we want to let you know that there are faster storage solutions available in our premium plugins.', 'For professional users and production environments, we highly recommend considering these premium options to enhance performance and reliability.', ' https://rxdb.info/premium?console=dexie ', 'If you already purchased premium access you can disable this log by calling the setPremiumFlag() function from rxdb-premium/plugins/shared.', '---------------------------------------------------------------------'].join('\n')); - shownNonPremiumLog = true; - } else { - shownNonPremiumLog = true; - } - - /** - * Check some assumptions to ensure RxDB - * does not call the storage with an invalid write. - */ - documentWrites.forEach(row => { - // ensure revision is set - if (!row.document._rev || row.previous && !row.previous._rev) { - throw (0, _rxError.newRxError)('SNH', { - args: { - row - } - }); - } - - // ensure prev-data is set - if (this.devMode) { - if (row.previous && (!row.previous._meta[DEXIE_TEST_META_FIELD] || row.previous._meta[DEXIE_TEST_META_FIELD] !== row.previous._rev)) { - console.dir(row); - throw new Error('missing or wrong _meta.' + DEXIE_TEST_META_FIELD); - } - } - }); - var state = await this.internals; - var ret = { - success: [], - error: [] - }; - - /** - * Some storages might add any _meta fields - * internally. To ensure RxDB can work with that in the - * test suite, we add a random field here. - * To ensure - */ - if (this.devMode) { - documentWrites = documentWrites.map(row => { - var doc = (0, _rxStorageHelper.flatCloneDocWithMeta)(row.document); - doc._meta[DEXIE_TEST_META_FIELD] = doc._rev; - return { - previous: row.previous, - document: doc - }; - }); - } - var documentKeys = documentWrites.map(writeRow => writeRow.document[this.primaryPath]); - var categorized; - await state.dexieDb.transaction('rw', state.dexieTable, state.dexieAttachmentsTable, async () => { - var docsInDbMap = new Map(); - var docsInDbWithInternals = await (0, _dexieHelper.getDocsInDb)(this.internals, documentKeys); - docsInDbWithInternals.forEach(docWithDexieInternals => { - var doc = docWithDexieInternals; - if (doc) { - docsInDbMap.set(doc[this.primaryPath], doc); - } - return doc; - }); - categorized = (0, _rxStorageHelper.categorizeBulkWriteRows)(this, this.primaryPath, docsInDbMap, documentWrites, context); - ret.error = categorized.errors; - - /** - * Batch up the database operations - * so we can later run them in bulk. - */ - var bulkPutDocs = []; - categorized.bulkInsertDocs.forEach(row => { - ret.success.push(row.document); - bulkPutDocs.push(row.document); - }); - categorized.bulkUpdateDocs.forEach(row => { - ret.success.push(row.document); - bulkPutDocs.push(row.document); - }); - bulkPutDocs = bulkPutDocs.map(d => (0, _dexieHelper.fromStorageToDexie)(state.booleanIndexes, d)); - if (bulkPutDocs.length > 0) { - await state.dexieTable.bulkPut(bulkPutDocs); - } - - // handle attachments - var putAttachments = []; - categorized.attachmentsAdd.forEach(attachment => { - putAttachments.push({ - id: (0, _dexieHelper.attachmentObjectId)(attachment.documentId, attachment.attachmentId), - data: attachment.attachmentData.data - }); - }); - categorized.attachmentsUpdate.forEach(attachment => { - putAttachments.push({ - id: (0, _dexieHelper.attachmentObjectId)(attachment.documentId, attachment.attachmentId), - data: attachment.attachmentData.data - }); - }); - await state.dexieAttachmentsTable.bulkPut(putAttachments); - await state.dexieAttachmentsTable.bulkDelete(categorized.attachmentsRemove.map(attachment => (0, _dexieHelper.attachmentObjectId)(attachment.documentId, attachment.attachmentId))); - }); - categorized = (0, _index.ensureNotFalsy)(categorized); - if (categorized.eventBulk.events.length > 0) { - var lastState = (0, _index.ensureNotFalsy)(categorized.newestRow).document; - categorized.eventBulk.checkpoint = { - id: lastState[this.primaryPath], - lwt: lastState._meta.lwt - }; - categorized.eventBulk.endTime = (0, _index.now)(); - this.changes$.next(categorized.eventBulk); - } - return ret; - }; - _proto.findDocumentsById = async function findDocumentsById(ids, deleted) { - ensureNotClosed(this); - var state = await this.internals; - var ret = []; - await state.dexieDb.transaction('r', state.dexieTable, async () => { - var docsInDb = await (0, _dexieHelper.getDocsInDb)(this.internals, ids); - docsInDb.forEach(documentInDb => { - if (documentInDb && (!documentInDb._deleted || deleted)) { - ret.push(documentInDb); - } - }); - }); - return ret; - }; - _proto.query = function query(preparedQuery) { - ensureNotClosed(this); - return (0, _dexieQuery.dexieQuery)(this, preparedQuery); - }; - _proto.count = async function count(preparedQuery) { - if (preparedQuery.queryPlan.selectorSatisfiedByIndex) { - var result = await (0, _dexieQuery.dexieCount)(this, preparedQuery); - return { - count: result, - mode: 'fast' - }; - } else { - var _result = await (0, _dexieQuery.dexieQuery)(this, preparedQuery); - return { - count: _result.documents.length, - mode: 'slow' - }; - } - }; - _proto.changeStream = function changeStream() { - ensureNotClosed(this); - return this.changes$.asObservable(); - }; - _proto.cleanup = async function cleanup(minimumDeletedTime) { - ensureNotClosed(this); - var state = await this.internals; - await state.dexieDb.transaction('rw', state.dexieTable, async () => { - var maxDeletionTime = (0, _index.now)() - minimumDeletedTime; - /** - * TODO only fetch _deleted=true - */ - var toRemove = await state.dexieTable.where('_meta.lwt').below(maxDeletionTime).toArray(); - var removeIds = []; - toRemove.forEach(doc => { - if (doc._deleted === '1') { - removeIds.push(doc[this.primaryPath]); - } - }); - await state.dexieTable.bulkDelete(removeIds); - }); - - /** - * TODO instead of deleting all deleted docs at once, - * only clean up some of them and return false if there are more documents to clean up. - * This ensures that when many documents have to be purged, - * we do not block the more important tasks too long. - */ - return true; - }; - _proto.getAttachmentData = async function getAttachmentData(documentId, attachmentId, _digest) { - ensureNotClosed(this); - var state = await this.internals; - var id = (0, _dexieHelper.attachmentObjectId)(documentId, attachmentId); - return await state.dexieDb.transaction('r', state.dexieAttachmentsTable, async () => { - var attachment = await state.dexieAttachmentsTable.get(id); - if (attachment) { - return attachment.data; - } else { - throw new Error('attachment missing documentId: ' + documentId + ' attachmentId: ' + attachmentId); - } - }); - }; - _proto.remove = async function remove() { - ensureNotClosed(this); - var state = await this.internals; - await state.dexieTable.clear(); - return this.close(); - }; - _proto.close = function close() { - if (this.closed) { - return this.closed; - } - this.closed = (async () => { - this.changes$.complete(); - await (0, _dexieHelper.closeDexieDb)(this.internals); - })(); - return this.closed; - }; - _proto.conflictResultionTasks = function conflictResultionTasks() { - return new _rxjs.Subject(); - }; - _proto.resolveConflictResultionTask = async function resolveConflictResultionTask(_taskSolution) {}; - return RxStorageInstanceDexie; -}(); -async function createDexieStorageInstance(storage, params, settings) { - var internals = (0, _dexieHelper.getDexieDbWithTables)(params.databaseName, params.collectionName, settings, params.schema); - var instance = new RxStorageInstanceDexie(storage, params.databaseName, params.collectionName, params.schema, internals, params.options, settings, params.devMode); - await (0, _rxStorageMultiinstance.addRxStorageMultiInstanceSupport)(_dexieHelper.RX_STORAGE_NAME_DEXIE, params, instance); - return Promise.resolve(instance); -} -function ensureNotClosed(instance) { - if (instance.closed) { - throw new Error('RxStorageInstanceDexie is closed ' + instance.databaseName + '-' + instance.collectionName); - } -} -//# sourceMappingURL=rx-storage-instance-dexie.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/storage-dexie/rx-storage-instance-dexie.js.map b/dist/cjs/plugins/storage-dexie/rx-storage-instance-dexie.js.map deleted file mode 100644 index 466cf0601d8..00000000000 --- a/dist/cjs/plugins/storage-dexie/rx-storage-instance-dexie.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-storage-instance-dexie.js","names":["_rxjs","require","_index","_dexieHelper","_dexieQuery","_rxSchemaHelper","_rxStorageHelper","_rxStorageMultiinstance","_rxError","instanceId","now","DEXIE_TEST_META_FIELD","exports","shownNonPremiumLog","RxStorageInstanceDexie","storage","databaseName","collectionName","schema","internals","options","settings","devMode","changes$","Subject","primaryPath","getPrimaryFieldOfPrimaryKey","primaryKey","_proto","prototype","bulkWrite","documentWrites","context","ensureNotClosed","RXDB_UTILS_GLOBAL","premium","defaultHashSha256","PREMIUM_FLAG_HASH","console","warn","join","forEach","row","document","_rev","previous","newRxError","args","_meta","dir","Error","state","ret","success","error","map","doc","flatCloneDocWithMeta","documentKeys","writeRow","categorized","dexieDb","transaction","dexieTable","dexieAttachmentsTable","docsInDbMap","Map","docsInDbWithInternals","getDocsInDb","docWithDexieInternals","set","categorizeBulkWriteRows","errors","bulkPutDocs","bulkInsertDocs","push","bulkUpdateDocs","d","fromStorageToDexie","booleanIndexes","length","bulkPut","putAttachments","attachmentsAdd","attachment","id","attachmentObjectId","documentId","attachmentId","data","attachmentData","attachmentsUpdate","bulkDelete","attachmentsRemove","ensureNotFalsy","eventBulk","events","lastState","newestRow","checkpoint","lwt","endTime","next","findDocumentsById","ids","deleted","docsInDb","documentInDb","_deleted","query","preparedQuery","dexieQuery","count","queryPlan","selectorSatisfiedByIndex","result","dexieCount","mode","documents","changeStream","asObservable","cleanup","minimumDeletedTime","maxDeletionTime","toRemove","where","below","toArray","removeIds","getAttachmentData","_digest","get","remove","clear","close","closed","complete","closeDexieDb","conflictResultionTasks","resolveConflictResultionTask","_taskSolution","createDexieStorageInstance","params","getDexieDbWithTables","instance","addRxStorageMultiInstanceSupport","RX_STORAGE_NAME_DEXIE","Promise","resolve"],"sources":["../../../../src/plugins/storage-dexie/rx-storage-instance-dexie.ts"],"sourcesContent":["import {\n Subject,\n Observable\n} from 'rxjs';\nimport {\n now,\n ensureNotFalsy,\n defaultHashSha256,\n RXDB_UTILS_GLOBAL,\n PREMIUM_FLAG_HASH\n} from '../utils/index.ts';\nimport type {\n RxStorageInstance,\n RxStorageChangeEvent,\n RxDocumentData,\n BulkWriteRow,\n RxStorageBulkWriteResponse,\n RxStorageQueryResult,\n RxJsonSchema,\n RxStorageInstanceCreationParams,\n EventBulk,\n StringKeys,\n RxConflictResultionTask,\n RxConflictResultionTaskSolution,\n RxStorageDefaultCheckpoint,\n CategorizeBulkWriteRowsOutput,\n RxStorageCountResult,\n PreparedQuery\n} from '../../types/index.d.ts';\nimport type {\n DexieSettings,\n DexieStorageInternals\n} from '../../types/plugins/dexie.d.ts';\nimport { RxStorageDexie } from './rx-storage-dexie.ts';\nimport {\n attachmentObjectId,\n closeDexieDb,\n fromStorageToDexie,\n getDexieDbWithTables,\n getDocsInDb,\n RX_STORAGE_NAME_DEXIE\n} from './dexie-helper.ts';\nimport { dexieCount, dexieQuery } from './dexie-query.ts';\nimport { getPrimaryFieldOfPrimaryKey } from '../../rx-schema-helper.ts';\nimport { categorizeBulkWriteRows, flatCloneDocWithMeta } from '../../rx-storage-helper.ts';\nimport { addRxStorageMultiInstanceSupport } from '../../rx-storage-multiinstance.ts';\nimport { newRxError } from '../../rx-error.ts';\n\nlet instanceId = now();\nexport const DEXIE_TEST_META_FIELD = 'dexieTestMetaField';\n\nlet shownNonPremiumLog = false;\n\n\nexport class RxStorageInstanceDexie implements RxStorageInstance<\n RxDocType,\n DexieStorageInternals,\n DexieSettings,\n RxStorageDefaultCheckpoint\n> {\n public readonly primaryPath: StringKeys>;\n private changes$: Subject>, RxStorageDefaultCheckpoint>> = new Subject();\n public readonly instanceId = instanceId++;\n public closed?: Promise;\n\n constructor(\n public readonly storage: RxStorageDexie,\n public readonly databaseName: string,\n public readonly collectionName: string,\n public readonly schema: Readonly>>,\n public readonly internals: DexieStorageInternals,\n public readonly options: Readonly,\n public readonly settings: DexieSettings,\n public readonly devMode: boolean\n ) {\n this.primaryPath = getPrimaryFieldOfPrimaryKey(this.schema.primaryKey);\n }\n\n async bulkWrite(\n documentWrites: BulkWriteRow[],\n context: string\n ): Promise> {\n ensureNotClosed(this);\n\n\n if (\n !shownNonPremiumLog &&\n (\n !RXDB_UTILS_GLOBAL.premium ||\n typeof RXDB_UTILS_GLOBAL.premium !== 'string' ||\n (await defaultHashSha256(RXDB_UTILS_GLOBAL.premium) !== PREMIUM_FLAG_HASH)\n )\n ) {\n console.warn(\n [\n '-------------- RxDB Open Core RxStorage -------------------------------',\n 'You are using the free Dexie.js based RxStorage implementation from RxDB https://rxdb.info/rx-storage-dexie.html?console=dexie ',\n 'While this is a great option, we want to let you know that there are faster storage solutions available in our premium plugins.',\n 'For professional users and production environments, we highly recommend considering these premium options to enhance performance and reliability.',\n ' https://rxdb.info/premium?console=dexie ',\n 'If you already purchased premium access you can disable this log by calling the setPremiumFlag() function from rxdb-premium/plugins/shared.',\n '---------------------------------------------------------------------'\n ].join('\\n')\n );\n shownNonPremiumLog = true;\n } else {\n shownNonPremiumLog = true;\n }\n\n\n /**\n * Check some assumptions to ensure RxDB\n * does not call the storage with an invalid write.\n */\n documentWrites.forEach(row => {\n // ensure revision is set\n if (\n !row.document._rev ||\n (\n row.previous &&\n !row.previous._rev\n )\n ) {\n throw newRxError('SNH', { args: { row } });\n }\n\n // ensure prev-data is set\n if (this.devMode) {\n if (\n row.previous &&\n (\n !row.previous._meta[DEXIE_TEST_META_FIELD] ||\n row.previous._meta[DEXIE_TEST_META_FIELD] !== row.previous._rev\n )\n ) {\n console.dir(row);\n throw new Error('missing or wrong _meta.' + DEXIE_TEST_META_FIELD);\n }\n }\n });\n\n const state = await this.internals;\n const ret: RxStorageBulkWriteResponse = {\n success: [],\n error: []\n };\n\n /**\n * Some storages might add any _meta fields\n * internally. To ensure RxDB can work with that in the\n * test suite, we add a random field here.\n * To ensure \n */\n if (this.devMode) {\n documentWrites = documentWrites.map(row => {\n const doc = flatCloneDocWithMeta(row.document);\n doc._meta[DEXIE_TEST_META_FIELD] = doc._rev;\n return {\n previous: row.previous,\n document: doc\n }\n })\n }\n\n\n const documentKeys: string[] = documentWrites.map(writeRow => writeRow.document[this.primaryPath] as any);\n let categorized: CategorizeBulkWriteRowsOutput | undefined;\n await state.dexieDb.transaction(\n 'rw',\n state.dexieTable,\n state.dexieAttachmentsTable,\n async () => {\n const docsInDbMap = new Map>();\n const docsInDbWithInternals = await getDocsInDb(this.internals, documentKeys);\n docsInDbWithInternals.forEach(docWithDexieInternals => {\n const doc = docWithDexieInternals;\n if (doc) {\n docsInDbMap.set((doc as any)[this.primaryPath], doc as any);\n }\n return doc;\n });\n\n categorized = categorizeBulkWriteRows(\n this,\n this.primaryPath as any,\n docsInDbMap,\n documentWrites,\n context\n );\n ret.error = categorized.errors;\n\n /**\n * Batch up the database operations\n * so we can later run them in bulk.\n */\n let bulkPutDocs: any[] = [];\n categorized.bulkInsertDocs.forEach(row => {\n ret.success.push(row.document);\n bulkPutDocs.push(row.document);\n });\n categorized.bulkUpdateDocs.forEach(row => {\n ret.success.push(row.document);\n bulkPutDocs.push(row.document);\n });\n bulkPutDocs = bulkPutDocs.map(d => fromStorageToDexie(state.booleanIndexes, d));\n if (bulkPutDocs.length > 0) {\n await state.dexieTable.bulkPut(bulkPutDocs);\n }\n\n // handle attachments\n const putAttachments: { id: string, data: string }[] = [];\n categorized.attachmentsAdd.forEach(attachment => {\n putAttachments.push({\n id: attachmentObjectId(attachment.documentId, attachment.attachmentId),\n data: attachment.attachmentData.data\n });\n });\n categorized.attachmentsUpdate.forEach(attachment => {\n putAttachments.push({\n id: attachmentObjectId(attachment.documentId, attachment.attachmentId),\n data: attachment.attachmentData.data\n });\n });\n await state.dexieAttachmentsTable.bulkPut(putAttachments);\n await state.dexieAttachmentsTable.bulkDelete(\n categorized.attachmentsRemove.map(attachment => attachmentObjectId(attachment.documentId, attachment.attachmentId))\n );\n\n });\n\n categorized = ensureNotFalsy(categorized);\n if (categorized.eventBulk.events.length > 0) {\n const lastState = ensureNotFalsy(categorized.newestRow).document;\n categorized.eventBulk.checkpoint = {\n id: lastState[this.primaryPath],\n lwt: lastState._meta.lwt\n };\n categorized.eventBulk.endTime = now();\n this.changes$.next(categorized.eventBulk);\n }\n\n return ret;\n }\n\n async findDocumentsById(\n ids: string[],\n deleted: boolean\n ): Promise[]> {\n ensureNotClosed(this);\n const state = await this.internals;\n const ret: RxDocumentData[] = [];\n\n await state.dexieDb.transaction(\n 'r',\n state.dexieTable,\n async () => {\n const docsInDb = await getDocsInDb(this.internals, ids);\n docsInDb.forEach(documentInDb => {\n if (\n documentInDb &&\n (!documentInDb._deleted || deleted)\n ) {\n ret.push(documentInDb);\n }\n });\n });\n return ret;\n }\n\n query(preparedQuery: PreparedQuery): Promise> {\n ensureNotClosed(this);\n return dexieQuery(\n this,\n preparedQuery\n );\n }\n async count(\n preparedQuery: PreparedQuery\n ): Promise {\n if (preparedQuery.queryPlan.selectorSatisfiedByIndex) {\n const result = await dexieCount(this, preparedQuery);\n return {\n count: result,\n mode: 'fast'\n };\n } else {\n const result = await dexieQuery(this, preparedQuery);\n return {\n count: result.documents.length,\n mode: 'slow'\n };\n }\n }\n\n changeStream(): Observable>, RxStorageDefaultCheckpoint>> {\n ensureNotClosed(this);\n return this.changes$.asObservable();\n }\n\n async cleanup(minimumDeletedTime: number): Promise {\n ensureNotClosed(this);\n const state = await this.internals;\n await state.dexieDb.transaction(\n 'rw',\n state.dexieTable,\n async () => {\n const maxDeletionTime = now() - minimumDeletedTime;\n /**\n * TODO only fetch _deleted=true\n */\n const toRemove = await state.dexieTable\n .where('_meta.lwt')\n .below(maxDeletionTime)\n .toArray();\n const removeIds: string[] = [];\n toRemove.forEach(doc => {\n if (doc._deleted === '1') {\n removeIds.push(doc[this.primaryPath]);\n }\n });\n await state.dexieTable.bulkDelete(removeIds);\n }\n );\n\n /**\n * TODO instead of deleting all deleted docs at once,\n * only clean up some of them and return false if there are more documents to clean up.\n * This ensures that when many documents have to be purged,\n * we do not block the more important tasks too long.\n */\n return true;\n }\n\n async getAttachmentData(documentId: string, attachmentId: string, _digest: string): Promise {\n ensureNotClosed(this);\n const state = await this.internals;\n const id = attachmentObjectId(documentId, attachmentId);\n return await state.dexieDb.transaction(\n 'r',\n state.dexieAttachmentsTable,\n async () => {\n\n const attachment = await state.dexieAttachmentsTable.get(id);\n if (attachment) {\n return attachment.data;\n } else {\n throw new Error('attachment missing documentId: ' + documentId + ' attachmentId: ' + attachmentId);\n }\n });\n }\n\n async remove(): Promise {\n ensureNotClosed(this);\n const state = await this.internals;\n await state.dexieTable.clear()\n return this.close();\n }\n\n\n close(): Promise {\n if (this.closed) {\n return this.closed;\n }\n this.closed = (async () => {\n this.changes$.complete();\n await closeDexieDb(this.internals);\n })();\n return this.closed;\n }\n\n conflictResultionTasks(): Observable> {\n return new Subject();\n }\n async resolveConflictResultionTask(_taskSolution: RxConflictResultionTaskSolution): Promise { }\n\n}\n\n\nexport async function createDexieStorageInstance(\n storage: RxStorageDexie,\n params: RxStorageInstanceCreationParams,\n settings: DexieSettings\n): Promise> {\n const internals = getDexieDbWithTables(\n params.databaseName,\n params.collectionName,\n settings,\n params.schema\n );\n\n const instance = new RxStorageInstanceDexie(\n storage,\n params.databaseName,\n params.collectionName,\n params.schema,\n internals,\n params.options,\n settings,\n params.devMode\n );\n\n await addRxStorageMultiInstanceSupport(\n RX_STORAGE_NAME_DEXIE,\n params,\n instance\n );\n\n return Promise.resolve(instance);\n}\n\n\n\nfunction ensureNotClosed(\n instance: RxStorageInstanceDexie\n) {\n if (instance.closed) {\n throw new Error('RxStorageInstanceDexie is closed ' + instance.databaseName + '-' + instance.collectionName);\n }\n}\n"],"mappings":";;;;;;;AAAA,IAAAA,KAAA,GAAAC,OAAA;AAIA,IAAAC,MAAA,GAAAD,OAAA;AA8BA,IAAAE,YAAA,GAAAF,OAAA;AAQA,IAAAG,WAAA,GAAAH,OAAA;AACA,IAAAI,eAAA,GAAAJ,OAAA;AACA,IAAAK,gBAAA,GAAAL,OAAA;AACA,IAAAM,uBAAA,GAAAN,OAAA;AACA,IAAAO,QAAA,GAAAP,OAAA;AAEA,IAAIQ,UAAU,GAAG,IAAAC,UAAG,EAAC,CAAC;AACf,IAAMC,qBAAqB,GAAAC,OAAA,CAAAD,qBAAA,GAAG,oBAAoB;AAEzD,IAAIE,kBAAkB,GAAG,KAAK;AAAC,IAGlBC,sBAAsB,GAAAF,OAAA,CAAAE,sBAAA;EAW/B,SAAAA,uBACoBC,OAAuB,EACvBC,YAAoB,EACpBC,cAAsB,EACtBC,MAAyD,EACzDC,SAAgC,EAChCC,OAAgC,EAChCC,QAAuB,EACvBC,OAAgB,EAClC;IAAA,KAbMC,QAAQ,GAAoG,IAAIC,aAAO,CAAC,CAAC;IAAA,KACjHf,UAAU,GAAGA,UAAU,EAAE;IAAA,KAIrBM,OAAuB,GAAvBA,OAAuB;IAAA,KACvBC,YAAoB,GAApBA,YAAoB;IAAA,KACpBC,cAAsB,GAAtBA,cAAsB;IAAA,KACtBC,MAAyD,GAAzDA,MAAyD;IAAA,KACzDC,SAAgC,GAAhCA,SAAgC;IAAA,KAChCC,OAAgC,GAAhCA,OAAgC;IAAA,KAChCC,QAAuB,GAAvBA,QAAuB;IAAA,KACvBC,OAAgB,GAAhBA,OAAgB;IAEhC,IAAI,CAACG,WAAW,GAAG,IAAAC,2CAA2B,EAAC,IAAI,CAACR,MAAM,CAACS,UAAU,CAAC;EAC1E;EAAC,IAAAC,MAAA,GAAAd,sBAAA,CAAAe,SAAA;EAAAD,MAAA,CAEKE,SAAS,GAAf,eAAAA,UACIC,cAAyC,EACzCC,OAAe,EAC+B;IAC9CC,eAAe,CAAC,IAAI,CAAC;IAGrB,IACI,CAACpB,kBAAkB,KAEf,CAACqB,wBAAiB,CAACC,OAAO,IAC1B,OAAOD,wBAAiB,CAACC,OAAO,KAAK,QAAQ,IAC5C,OAAM,IAAAC,wBAAiB,EAACF,wBAAiB,CAACC,OAAO,CAAC,MAAKE,wBAAkB,CAC7E,EACH;MACEC,OAAO,CAACC,IAAI,CACR,CACI,yEAAyE,EACzE,iIAAiI,EACjI,iIAAiI,EACjI,mJAAmJ,EACnJ,2CAA2C,EAC3C,6IAA6I,EAC7I,uEAAuE,CAC1E,CAACC,IAAI,CAAC,IAAI,CACf,CAAC;MACD3B,kBAAkB,GAAG,IAAI;IAC7B,CAAC,MAAM;MACHA,kBAAkB,GAAG,IAAI;IAC7B;;IAGA;AACR;AACA;AACA;IACQkB,cAAc,CAACU,OAAO,CAACC,GAAG,IAAI;MAC1B;MACA,IACI,CAACA,GAAG,CAACC,QAAQ,CAACC,IAAI,IAEdF,GAAG,CAACG,QAAQ,IACZ,CAACH,GAAG,CAACG,QAAQ,CAACD,IACjB,EACH;QACE,MAAM,IAAAE,mBAAU,EAAC,KAAK,EAAE;UAAEC,IAAI,EAAE;YAAEL;UAAI;QAAE,CAAC,CAAC;MAC9C;;MAEA;MACA,IAAI,IAAI,CAACpB,OAAO,EAAE;QACd,IACIoB,GAAG,CAACG,QAAQ,KAER,CAACH,GAAG,CAACG,QAAQ,CAACG,KAAK,CAACrC,qBAAqB,CAAC,IAC1C+B,GAAG,CAACG,QAAQ,CAACG,KAAK,CAACrC,qBAAqB,CAAC,KAAK+B,GAAG,CAACG,QAAQ,CAACD,IAAI,CAClE,EACH;UACEN,OAAO,CAACW,GAAG,CAACP,GAAG,CAAC;UAChB,MAAM,IAAIQ,KAAK,CAAC,yBAAyB,GAAGvC,qBAAqB,CAAC;QACtE;MACJ;IACJ,CAAC,CAAC;IAEF,IAAMwC,KAAK,GAAG,MAAM,IAAI,CAAChC,SAAS;IAClC,IAAMiC,GAA0C,GAAG;MAC/CC,OAAO,EAAE,EAAE;MACXC,KAAK,EAAE;IACX,CAAC;;IAED;AACR;AACA;AACA;AACA;AACA;IACQ,IAAI,IAAI,CAAChC,OAAO,EAAE;MACdS,cAAc,GAAGA,cAAc,CAACwB,GAAG,CAACb,GAAG,IAAI;QACvC,IAAMc,GAAG,GAAG,IAAAC,qCAAoB,EAACf,GAAG,CAACC,QAAQ,CAAC;QAC9Ca,GAAG,CAACR,KAAK,CAACrC,qBAAqB,CAAC,GAAG6C,GAAG,CAACZ,IAAI;QAC3C,OAAO;UACHC,QAAQ,EAAEH,GAAG,CAACG,QAAQ;UACtBF,QAAQ,EAAEa;QACd,CAAC;MACL,CAAC,CAAC;IACN;IAGA,IAAME,YAAsB,GAAG3B,cAAc,CAACwB,GAAG,CAACI,QAAQ,IAAIA,QAAQ,CAAChB,QAAQ,CAAC,IAAI,CAAClB,WAAW,CAAQ,CAAC;IACzG,IAAImC,WAAiE;IACrE,MAAMT,KAAK,CAACU,OAAO,CAACC,WAAW,CAC3B,IAAI,EACJX,KAAK,CAACY,UAAU,EAChBZ,KAAK,CAACa,qBAAqB,EAC3B,YAAY;MACR,IAAMC,WAAW,GAAG,IAAIC,GAAG,CAAoC,CAAC;MAChE,IAAMC,qBAAqB,GAAG,MAAM,IAAAC,wBAAW,EAAY,IAAI,CAACjD,SAAS,EAAEuC,YAAY,CAAC;MACxFS,qBAAqB,CAAC1B,OAAO,CAAC4B,qBAAqB,IAAI;QACnD,IAAMb,GAAG,GAAGa,qBAAqB;QACjC,IAAIb,GAAG,EAAE;UACLS,WAAW,CAACK,GAAG,CAAEd,GAAG,CAAS,IAAI,CAAC/B,WAAW,CAAC,EAAE+B,GAAU,CAAC;QAC/D;QACA,OAAOA,GAAG;MACd,CAAC,CAAC;MAEFI,WAAW,GAAG,IAAAW,wCAAuB,EACjC,IAAI,EACJ,IAAI,CAAC9C,WAAW,EAChBwC,WAAW,EACXlC,cAAc,EACdC,OACJ,CAAC;MACDoB,GAAG,CAACE,KAAK,GAAGM,WAAW,CAACY,MAAM;;MAE9B;AAChB;AACA;AACA;MACgB,IAAIC,WAAkB,GAAG,EAAE;MAC3Bb,WAAW,CAACc,cAAc,CAACjC,OAAO,CAACC,GAAG,IAAI;QACtCU,GAAG,CAACC,OAAO,CAACsB,IAAI,CAACjC,GAAG,CAACC,QAAQ,CAAC;QAC9B8B,WAAW,CAACE,IAAI,CAACjC,GAAG,CAACC,QAAQ,CAAC;MAClC,CAAC,CAAC;MACFiB,WAAW,CAACgB,cAAc,CAACnC,OAAO,CAACC,GAAG,IAAI;QACtCU,GAAG,CAACC,OAAO,CAACsB,IAAI,CAACjC,GAAG,CAACC,QAAQ,CAAC;QAC9B8B,WAAW,CAACE,IAAI,CAACjC,GAAG,CAACC,QAAQ,CAAC;MAClC,CAAC,CAAC;MACF8B,WAAW,GAAGA,WAAW,CAAClB,GAAG,CAACsB,CAAC,IAAI,IAAAC,+BAAkB,EAAC3B,KAAK,CAAC4B,cAAc,EAAEF,CAAC,CAAC,CAAC;MAC/E,IAAIJ,WAAW,CAACO,MAAM,GAAG,CAAC,EAAE;QACxB,MAAM7B,KAAK,CAACY,UAAU,CAACkB,OAAO,CAACR,WAAW,CAAC;MAC/C;;MAEA;MACA,IAAMS,cAA8C,GAAG,EAAE;MACzDtB,WAAW,CAACuB,cAAc,CAAC1C,OAAO,CAAC2C,UAAU,IAAI;QAC7CF,cAAc,CAACP,IAAI,CAAC;UAChBU,EAAE,EAAE,IAAAC,+BAAkB,EAACF,UAAU,CAACG,UAAU,EAAEH,UAAU,CAACI,YAAY,CAAC;UACtEC,IAAI,EAAEL,UAAU,CAACM,cAAc,CAACD;QACpC,CAAC,CAAC;MACN,CAAC,CAAC;MACF7B,WAAW,CAAC+B,iBAAiB,CAAClD,OAAO,CAAC2C,UAAU,IAAI;QAChDF,cAAc,CAACP,IAAI,CAAC;UAChBU,EAAE,EAAE,IAAAC,+BAAkB,EAACF,UAAU,CAACG,UAAU,EAAEH,UAAU,CAACI,YAAY,CAAC;UACtEC,IAAI,EAAEL,UAAU,CAACM,cAAc,CAACD;QACpC,CAAC,CAAC;MACN,CAAC,CAAC;MACF,MAAMtC,KAAK,CAACa,qBAAqB,CAACiB,OAAO,CAACC,cAAc,CAAC;MACzD,MAAM/B,KAAK,CAACa,qBAAqB,CAAC4B,UAAU,CACxChC,WAAW,CAACiC,iBAAiB,CAACtC,GAAG,CAAC6B,UAAU,IAAI,IAAAE,+BAAkB,EAACF,UAAU,CAACG,UAAU,EAAEH,UAAU,CAACI,YAAY,CAAC,CACtH,CAAC;IAEL,CAAC,CAAC;IAEN5B,WAAW,GAAG,IAAAkC,qBAAc,EAAClC,WAAW,CAAC;IACzC,IAAIA,WAAW,CAACmC,SAAS,CAACC,MAAM,CAAChB,MAAM,GAAG,CAAC,EAAE;MACzC,IAAMiB,SAAS,GAAG,IAAAH,qBAAc,EAAClC,WAAW,CAACsC,SAAS,CAAC,CAACvD,QAAQ;MAChEiB,WAAW,CAACmC,SAAS,CAACI,UAAU,GAAG;QAC/Bd,EAAE,EAAEY,SAAS,CAAC,IAAI,CAACxE,WAAW,CAAC;QAC/B2E,GAAG,EAAEH,SAAS,CAACjD,KAAK,CAACoD;MACzB,CAAC;MACDxC,WAAW,CAACmC,SAAS,CAACM,OAAO,GAAG,IAAA3F,UAAG,EAAC,CAAC;MACrC,IAAI,CAACa,QAAQ,CAAC+E,IAAI,CAAC1C,WAAW,CAACmC,SAAS,CAAC;IAC7C;IAEA,OAAO3C,GAAG;EACd,CAAC;EAAAxB,MAAA,CAEK2E,iBAAiB,GAAvB,eAAAA,kBACIC,GAAa,EACbC,OAAgB,EACoB;IACpCxE,eAAe,CAAC,IAAI,CAAC;IACrB,IAAMkB,KAAK,GAAG,MAAM,IAAI,CAAChC,SAAS;IAClC,IAAMiC,GAAgC,GAAG,EAAE;IAE3C,MAAMD,KAAK,CAACU,OAAO,CAACC,WAAW,CAC3B,GAAG,EACHX,KAAK,CAACY,UAAU,EAChB,YAAY;MACR,IAAM2C,QAAQ,GAAG,MAAM,IAAAtC,wBAAW,EAAY,IAAI,CAACjD,SAAS,EAAEqF,GAAG,CAAC;MAClEE,QAAQ,CAACjE,OAAO,CAACkE,YAAY,IAAI;QAC7B,IACIA,YAAY,KACX,CAACA,YAAY,CAACC,QAAQ,IAAIH,OAAO,CAAC,EACrC;UACErD,GAAG,CAACuB,IAAI,CAACgC,YAAY,CAAC;QAC1B;MACJ,CAAC,CAAC;IACN,CAAC,CAAC;IACN,OAAOvD,GAAG;EACd,CAAC;EAAAxB,MAAA,CAEDiF,KAAK,GAAL,SAAAA,MAAMC,aAAuC,EAA4C;IACrF7E,eAAe,CAAC,IAAI,CAAC;IACrB,OAAO,IAAA8E,sBAAU,EACb,IAAI,EACJD,aACJ,CAAC;EACL,CAAC;EAAAlF,MAAA,CACKoF,KAAK,GAAX,eAAAA,MACIF,aAAuC,EACV;IAC7B,IAAIA,aAAa,CAACG,SAAS,CAACC,wBAAwB,EAAE;MAClD,IAAMC,MAAM,GAAG,MAAM,IAAAC,sBAAU,EAAC,IAAI,EAAEN,aAAa,CAAC;MACpD,OAAO;QACHE,KAAK,EAAEG,MAAM;QACbE,IAAI,EAAE;MACV,CAAC;IACL,CAAC,MAAM;MACH,IAAMF,OAAM,GAAG,MAAM,IAAAJ,sBAAU,EAAC,IAAI,EAAED,aAAa,CAAC;MACpD,OAAO;QACHE,KAAK,EAAEG,OAAM,CAACG,SAAS,CAACtC,MAAM;QAC9BqC,IAAI,EAAE;MACV,CAAC;IACL;EACJ,CAAC;EAAAzF,MAAA,CAED2F,YAAY,GAAZ,SAAAA,aAAA,EAAmH;IAC/GtF,eAAe,CAAC,IAAI,CAAC;IACrB,OAAO,IAAI,CAACV,QAAQ,CAACiG,YAAY,CAAC,CAAC;EACvC,CAAC;EAAA5F,MAAA,CAEK6F,OAAO,GAAb,eAAAA,QAAcC,kBAA0B,EAAoB;IACxDzF,eAAe,CAAC,IAAI,CAAC;IACrB,IAAMkB,KAAK,GAAG,MAAM,IAAI,CAAChC,SAAS;IAClC,MAAMgC,KAAK,CAACU,OAAO,CAACC,WAAW,CAC3B,IAAI,EACJX,KAAK,CAACY,UAAU,EAChB,YAAY;MACR,IAAM4D,eAAe,GAAG,IAAAjH,UAAG,EAAC,CAAC,GAAGgH,kBAAkB;MAClD;AAChB;AACA;MACgB,IAAME,QAAQ,GAAG,MAAMzE,KAAK,CAACY,UAAU,CAClC8D,KAAK,CAAC,WAAW,CAAC,CAClBC,KAAK,CAACH,eAAe,CAAC,CACtBI,OAAO,CAAC,CAAC;MACd,IAAMC,SAAmB,GAAG,EAAE;MAC9BJ,QAAQ,CAACnF,OAAO,CAACe,GAAG,IAAI;QACpB,IAAIA,GAAG,CAACoD,QAAQ,KAAK,GAAG,EAAE;UACtBoB,SAAS,CAACrD,IAAI,CAACnB,GAAG,CAAC,IAAI,CAAC/B,WAAW,CAAC,CAAC;QACzC;MACJ,CAAC,CAAC;MACF,MAAM0B,KAAK,CAACY,UAAU,CAAC6B,UAAU,CAACoC,SAAS,CAAC;IAChD,CACJ,CAAC;;IAED;AACR;AACA;AACA;AACA;AACA;IACQ,OAAO,IAAI;EACf,CAAC;EAAApG,MAAA,CAEKqG,iBAAiB,GAAvB,eAAAA,kBAAwB1C,UAAkB,EAAEC,YAAoB,EAAE0C,OAAe,EAAmB;IAChGjG,eAAe,CAAC,IAAI,CAAC;IACrB,IAAMkB,KAAK,GAAG,MAAM,IAAI,CAAChC,SAAS;IAClC,IAAMkE,EAAE,GAAG,IAAAC,+BAAkB,EAACC,UAAU,EAAEC,YAAY,CAAC;IACvD,OAAO,MAAMrC,KAAK,CAACU,OAAO,CAACC,WAAW,CAClC,GAAG,EACHX,KAAK,CAACa,qBAAqB,EAC3B,YAAY;MAER,IAAMoB,UAAU,GAAG,MAAMjC,KAAK,CAACa,qBAAqB,CAACmE,GAAG,CAAC9C,EAAE,CAAC;MAC5D,IAAID,UAAU,EAAE;QACZ,OAAOA,UAAU,CAACK,IAAI;MAC1B,CAAC,MAAM;QACH,MAAM,IAAIvC,KAAK,CAAC,iCAAiC,GAAGqC,UAAU,GAAG,iBAAiB,GAAGC,YAAY,CAAC;MACtG;IACJ,CAAC,CAAC;EACV,CAAC;EAAA5D,MAAA,CAEKwG,MAAM,GAAZ,eAAAA,OAAA,EAA8B;IAC1BnG,eAAe,CAAC,IAAI,CAAC;IACrB,IAAMkB,KAAK,GAAG,MAAM,IAAI,CAAChC,SAAS;IAClC,MAAMgC,KAAK,CAACY,UAAU,CAACsE,KAAK,CAAC,CAAC;IAC9B,OAAO,IAAI,CAACC,KAAK,CAAC,CAAC;EACvB,CAAC;EAAA1G,MAAA,CAGD0G,KAAK,GAAL,SAAAA,MAAA,EAAuB;IACnB,IAAI,IAAI,CAACC,MAAM,EAAE;MACb,OAAO,IAAI,CAACA,MAAM;IACtB;IACA,IAAI,CAACA,MAAM,GAAG,CAAC,YAAY;MACvB,IAAI,CAAChH,QAAQ,CAACiH,QAAQ,CAAC,CAAC;MACxB,MAAM,IAAAC,yBAAY,EAAC,IAAI,CAACtH,SAAS,CAAC;IACtC,CAAC,EAAE,CAAC;IACJ,OAAO,IAAI,CAACoH,MAAM;EACtB,CAAC;EAAA3G,MAAA,CAED8G,sBAAsB,GAAtB,SAAAA,uBAAA,EAAyE;IACrE,OAAO,IAAIlH,aAAO,CAAC,CAAC;EACxB,CAAC;EAAAI,MAAA,CACK+G,4BAA4B,GAAlC,eAAAA,6BAAmCC,aAAyD,EAAiB,CAAE,CAAC;EAAA,OAAA9H,sBAAA;AAAA;AAK7G,eAAe+H,0BAA0BA,CAC5C9H,OAAuB,EACvB+H,MAAiE,EACjEzH,QAAuB,EACmB;EAC1C,IAAMF,SAAS,GAAG,IAAA4H,iCAAoB,EAClCD,MAAM,CAAC9H,YAAY,EACnB8H,MAAM,CAAC7H,cAAc,EACrBI,QAAQ,EACRyH,MAAM,CAAC5H,MACX,CAAC;EAED,IAAM8H,QAAQ,GAAG,IAAIlI,sBAAsB,CACvCC,OAAO,EACP+H,MAAM,CAAC9H,YAAY,EACnB8H,MAAM,CAAC7H,cAAc,EACrB6H,MAAM,CAAC5H,MAAM,EACbC,SAAS,EACT2H,MAAM,CAAC1H,OAAO,EACdC,QAAQ,EACRyH,MAAM,CAACxH,OACX,CAAC;EAED,MAAM,IAAA2H,wDAAgC,EAClCC,kCAAqB,EACrBJ,MAAM,EACNE,QACJ,CAAC;EAED,OAAOG,OAAO,CAACC,OAAO,CAACJ,QAAQ,CAAC;AACpC;AAIA,SAAS/G,eAAeA,CACpB+G,QAAqC,EACvC;EACE,IAAIA,QAAQ,CAACT,MAAM,EAAE;IACjB,MAAM,IAAIrF,KAAK,CAAC,mCAAmC,GAAG8F,QAAQ,CAAChI,YAAY,GAAG,GAAG,GAAGgI,QAAQ,CAAC/H,cAAc,CAAC;EAChH;AACJ","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/storage-foundationdb/foundationdb-helpers.js b/dist/cjs/plugins/storage-foundationdb/foundationdb-helpers.js deleted file mode 100644 index 2f9e8fd29b5..00000000000 --- a/dist/cjs/plugins/storage-foundationdb/foundationdb-helpers.js +++ /dev/null @@ -1,13 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.FOUNDATION_DB_WRITE_BATCH_SIZE = exports.CLEANUP_INDEX = void 0; -exports.getFoundationDBIndexName = getFoundationDBIndexName; -function getFoundationDBIndexName(index) { - return index.join('|'); -} -var CLEANUP_INDEX = exports.CLEANUP_INDEX = ['_deleted', '_meta.lwt']; -var FOUNDATION_DB_WRITE_BATCH_SIZE = exports.FOUNDATION_DB_WRITE_BATCH_SIZE = 2000; -//# sourceMappingURL=foundationdb-helpers.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/storage-foundationdb/foundationdb-helpers.js.map b/dist/cjs/plugins/storage-foundationdb/foundationdb-helpers.js.map deleted file mode 100644 index 43c0e922b32..00000000000 --- a/dist/cjs/plugins/storage-foundationdb/foundationdb-helpers.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"foundationdb-helpers.js","names":["getFoundationDBIndexName","index","join","CLEANUP_INDEX","exports","FOUNDATION_DB_WRITE_BATCH_SIZE"],"sources":["../../../../src/plugins/storage-foundationdb/foundationdb-helpers.ts"],"sourcesContent":["export function getFoundationDBIndexName(index: string[]): string {\n return index.join('|');\n}\nexport const CLEANUP_INDEX: string[] = ['_deleted', '_meta.lwt'];\n\nexport const FOUNDATION_DB_WRITE_BATCH_SIZE = 2000;\n"],"mappings":";;;;;;;AAAO,SAASA,wBAAwBA,CAACC,KAAe,EAAU;EAC9D,OAAOA,KAAK,CAACC,IAAI,CAAC,GAAG,CAAC;AAC1B;AACO,IAAMC,aAAuB,GAAAC,OAAA,CAAAD,aAAA,GAAG,CAAC,UAAU,EAAE,WAAW,CAAC;AAEzD,IAAME,8BAA8B,GAAAD,OAAA,CAAAC,8BAAA,GAAG,IAAI","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/storage-foundationdb/foundationdb-query.js b/dist/cjs/plugins/storage-foundationdb/foundationdb-query.js deleted file mode 100644 index 778253d7e97..00000000000 --- a/dist/cjs/plugins/storage-foundationdb/foundationdb-query.js +++ /dev/null @@ -1,115 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.queryFoundationDB = queryFoundationDB; -var _customIndex = require("../../custom-index.js"); -var _index = require("../../plugins/utils/index.js"); -var _foundationdbHelpers = require("./foundationdb-helpers.js"); -var _rxQueryHelper = require("../../rx-query-helper.js"); -async function queryFoundationDB(instance, preparedQuery) { - var queryPlan = preparedQuery.queryPlan; - var query = preparedQuery.query; - var skip = query.skip ? query.skip : 0; - var limit = query.limit ? query.limit : Infinity; - var skipPlusLimit = skip + limit; - var queryPlanFields = queryPlan.index; - var mustManuallyResort = !queryPlan.sortSatisfiedByIndex; - var queryMatcher = false; - if (!queryPlan.selectorSatisfiedByIndex) { - queryMatcher = (0, _rxQueryHelper.getQueryMatcher)(instance.schema, preparedQuery.query); - } - var dbs = await instance.internals.dbsPromise; - var indexForName = queryPlanFields.slice(0); - var indexName = (0, _foundationdbHelpers.getFoundationDBIndexName)(indexForName); - var indexDB = (0, _index.ensureNotFalsy)(dbs.indexes[indexName]).db; - var lowerBound = queryPlan.startKeys; - var lowerBoundString = (0, _customIndex.getStartIndexStringFromLowerBound)(instance.schema, indexForName, lowerBound); - var upperBound = queryPlan.endKeys; - var upperBoundString = (0, _customIndex.getStartIndexStringFromUpperBound)(instance.schema, indexForName, upperBound); - var result = await dbs.root.doTransaction(async tx => { - var innerResult = []; - var indexTx = tx.at(indexDB.subspace); - var mainTx = tx.at(dbs.main.subspace); - - /** - * TODO for whatever reason the keySelectors like firstGreaterThan etc. - * do not work properly. So we have to hack here to find the correct - * document in case lowerBoundString===upperBoundString. - * This likely must be fixed in the foundationdb library. - * When it is fixed, we do not need this if-case and instead - * can rely on .getRangeBatch() in all cases. - */ - if (lowerBoundString === upperBoundString) { - var docId = await indexTx.get(lowerBoundString); - if (docId) { - var docData = await mainTx.get(docId); - if (!queryMatcher || queryMatcher(docData)) { - innerResult.push(docData); - } - } - return innerResult; - } - if (!queryPlan.inclusiveStart) { - lowerBoundString = (0, _customIndex.changeIndexableStringByOneQuantum)(lowerBoundString, 1); - } - if (queryPlan.inclusiveEnd) { - upperBoundString = (0, _customIndex.changeIndexableStringByOneQuantum)(upperBoundString, +1); - } - var range = indexTx.getRangeBatch(lowerBoundString, upperBoundString, - // queryPlan.inclusiveStart ? keySelector.firstGreaterThan(lowerBoundString) : keySelector.firstGreaterOrEqual(lowerBoundString), - // queryPlan.inclusiveEnd ? keySelector.lastLessOrEqual(upperBoundString) : keySelector.lastLessThan(upperBoundString), - { - // TODO these options seem to be broken in the foundationdb node bindings - // limit: instance.settings.batchSize, - // streamingMode: StreamingMode.Exact - }); - var done = false; - while (!done) { - var next = await range.next(); - if (next.done) { - done = true; - break; - } - var rows = next.value; - if (!queryPlan.inclusiveStart) { - var firstRow = rows[0]; - if (firstRow && firstRow[0] === lowerBoundString) { - rows.shift(); - } - } - if (!queryPlan.inclusiveEnd) { - var lastRow = (0, _index.lastOfArray)(rows); - if (lastRow && lastRow[0] === upperBoundString) { - rows.pop(); - } - } - var docIds = rows.map(row => row[1]); - var docsData = await Promise.all(docIds.map(docId => mainTx.get(docId))); - docsData.forEach(docData => { - if (!done) { - if (!queryMatcher || queryMatcher(docData)) { - innerResult.push(docData); - } - } - if (!mustManuallyResort && innerResult.length === skipPlusLimit) { - done = true; - range.return(); - } - }); - } - return innerResult; - }); - if (mustManuallyResort) { - var sortComparator = (0, _rxQueryHelper.getSortComparator)(instance.schema, preparedQuery.query); - result = result.sort(sortComparator); - } - - // apply skip and limit boundaries. - result = result.slice(skip, skipPlusLimit); - return { - documents: result - }; -} -//# sourceMappingURL=foundationdb-query.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/storage-foundationdb/foundationdb-query.js.map b/dist/cjs/plugins/storage-foundationdb/foundationdb-query.js.map deleted file mode 100644 index 61dea4c2c46..00000000000 --- a/dist/cjs/plugins/storage-foundationdb/foundationdb-query.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"foundationdb-query.js","names":["_customIndex","require","_index","_foundationdbHelpers","_rxQueryHelper","queryFoundationDB","instance","preparedQuery","queryPlan","query","skip","limit","Infinity","skipPlusLimit","queryPlanFields","index","mustManuallyResort","sortSatisfiedByIndex","queryMatcher","selectorSatisfiedByIndex","getQueryMatcher","schema","dbs","internals","dbsPromise","indexForName","slice","indexName","getFoundationDBIndexName","indexDB","ensureNotFalsy","indexes","db","lowerBound","startKeys","lowerBoundString","getStartIndexStringFromLowerBound","upperBound","endKeys","upperBoundString","getStartIndexStringFromUpperBound","result","root","doTransaction","tx","innerResult","indexTx","at","subspace","mainTx","main","docId","get","docData","push","inclusiveStart","changeIndexableStringByOneQuantum","inclusiveEnd","range","getRangeBatch","done","next","rows","value","firstRow","shift","lastRow","lastOfArray","pop","docIds","map","row","docsData","Promise","all","forEach","length","return","sortComparator","getSortComparator","sort","documents"],"sources":["../../../../src/plugins/storage-foundationdb/foundationdb-query.ts"],"sourcesContent":["import {\n changeIndexableStringByOneQuantum,\n getStartIndexStringFromLowerBound,\n getStartIndexStringFromUpperBound\n} from '../../custom-index.ts';\nimport type {\n PreparedQuery,\n QueryMatcher,\n RxDocumentData,\n RxStorageQueryResult\n} from '../../types/index.d.ts';\nimport { ensureNotFalsy, lastOfArray } from '../../plugins/utils/index.ts';\nimport { getFoundationDBIndexName } from './foundationdb-helpers.ts';\nimport { RxStorageInstanceFoundationDB } from './rx-storage-instance-foundationdb.ts';\nimport { getQueryMatcher, getSortComparator } from '../../rx-query-helper.ts';\n\nexport async function queryFoundationDB(\n instance: RxStorageInstanceFoundationDB,\n preparedQuery: PreparedQuery\n): Promise> {\n const queryPlan = preparedQuery.queryPlan;\n const query = preparedQuery.query;\n const skip = query.skip ? query.skip : 0;\n const limit = query.limit ? query.limit : Infinity;\n const skipPlusLimit = skip + limit;\n const queryPlanFields: string[] = queryPlan.index;\n const mustManuallyResort = !queryPlan.sortSatisfiedByIndex;\n\n\n let queryMatcher: QueryMatcher> | false = false;\n if (!queryPlan.selectorSatisfiedByIndex) {\n queryMatcher = getQueryMatcher(\n instance.schema,\n preparedQuery.query\n );\n }\n\n const dbs = await instance.internals.dbsPromise;\n\n\n const indexForName = queryPlanFields.slice(0);\n const indexName = getFoundationDBIndexName(indexForName);\n const indexDB = ensureNotFalsy(dbs.indexes[indexName]).db;\n\n let lowerBound: any[] = queryPlan.startKeys;\n let lowerBoundString = getStartIndexStringFromLowerBound(\n instance.schema,\n indexForName,\n lowerBound\n );\n\n let upperBound: any[] = queryPlan.endKeys;\n let upperBoundString = getStartIndexStringFromUpperBound(\n instance.schema,\n indexForName,\n upperBound\n );\n let result: RxDocumentData[] = await dbs.root.doTransaction(async (tx: any) => {\n const innerResult: RxDocumentData[] = [];\n const indexTx = tx.at(indexDB.subspace);\n const mainTx = tx.at(dbs.main.subspace);\n\n\n /**\n * TODO for whatever reason the keySelectors like firstGreaterThan etc.\n * do not work properly. So we have to hack here to find the correct\n * document in case lowerBoundString===upperBoundString.\n * This likely must be fixed in the foundationdb library.\n * When it is fixed, we do not need this if-case and instead\n * can rely on .getRangeBatch() in all cases.\n */\n if (lowerBoundString === upperBoundString) {\n const docId: string = await indexTx.get(lowerBoundString);\n if (docId) {\n const docData = await mainTx.get(docId);\n if (!queryMatcher || queryMatcher(docData)) {\n innerResult.push(docData);\n }\n }\n return innerResult;\n }\n\n if (!queryPlan.inclusiveStart) {\n lowerBoundString = changeIndexableStringByOneQuantum(lowerBoundString, 1);\n }\n if (queryPlan.inclusiveEnd) {\n upperBoundString = changeIndexableStringByOneQuantum(upperBoundString, +1);\n }\n\n const range = indexTx.getRangeBatch(\n lowerBoundString,\n upperBoundString,\n // queryPlan.inclusiveStart ? keySelector.firstGreaterThan(lowerBoundString) : keySelector.firstGreaterOrEqual(lowerBoundString),\n // queryPlan.inclusiveEnd ? keySelector.lastLessOrEqual(upperBoundString) : keySelector.lastLessThan(upperBoundString),\n {\n // TODO these options seem to be broken in the foundationdb node bindings\n // limit: instance.settings.batchSize,\n // streamingMode: StreamingMode.Exact\n }\n );\n let done = false;\n while (!done) {\n const next = await range.next();\n if (next.done) {\n done = true;\n break;\n }\n const rows: [string, string] = next.value;\n\n if (!queryPlan.inclusiveStart) {\n const firstRow = rows[0];\n if (\n firstRow &&\n firstRow[0] === lowerBoundString\n ) {\n rows.shift();\n }\n }\n if (!queryPlan.inclusiveEnd) {\n const lastRow = lastOfArray(rows);\n if (\n lastRow &&\n lastRow[0] === upperBoundString\n ) {\n rows.pop();\n }\n }\n\n const docIds = rows.map(row => row[1]);\n const docsData: RxDocumentData[] = await Promise.all(docIds.map((docId: string) => mainTx.get(docId)));\n\n docsData.forEach((docData) => {\n if (!done) {\n if (!queryMatcher || queryMatcher(docData)) {\n innerResult.push(docData);\n }\n }\n if (\n !mustManuallyResort &&\n innerResult.length === skipPlusLimit\n ) {\n done = true;\n range.return();\n }\n });\n }\n return innerResult;\n });\n if (mustManuallyResort) {\n const sortComparator = getSortComparator(instance.schema, preparedQuery.query);\n result = result.sort(sortComparator);\n }\n\n // apply skip and limit boundaries.\n result = result.slice(skip, skipPlusLimit);\n\n return {\n documents: result\n };\n}\n"],"mappings":";;;;;;AAAA,IAAAA,YAAA,GAAAC,OAAA;AAWA,IAAAC,MAAA,GAAAD,OAAA;AACA,IAAAE,oBAAA,GAAAF,OAAA;AAEA,IAAAG,cAAA,GAAAH,OAAA;AAEO,eAAeI,iBAAiBA,CACnCC,QAAkD,EAClDC,aAAuC,EACC;EACxC,IAAMC,SAAS,GAAGD,aAAa,CAACC,SAAS;EACzC,IAAMC,KAAK,GAAGF,aAAa,CAACE,KAAK;EACjC,IAAMC,IAAI,GAAGD,KAAK,CAACC,IAAI,GAAGD,KAAK,CAACC,IAAI,GAAG,CAAC;EACxC,IAAMC,KAAK,GAAGF,KAAK,CAACE,KAAK,GAAGF,KAAK,CAACE,KAAK,GAAGC,QAAQ;EAClD,IAAMC,aAAa,GAAGH,IAAI,GAAGC,KAAK;EAClC,IAAMG,eAAyB,GAAGN,SAAS,CAACO,KAAK;EACjD,IAAMC,kBAAkB,GAAG,CAACR,SAAS,CAACS,oBAAoB;EAG1D,IAAIC,YAA6D,GAAG,KAAK;EACzE,IAAI,CAACV,SAAS,CAACW,wBAAwB,EAAE;IACrCD,YAAY,GAAG,IAAAE,8BAAe,EAC1Bd,QAAQ,CAACe,MAAM,EACfd,aAAa,CAACE,KAClB,CAAC;EACL;EAEA,IAAMa,GAAG,GAAG,MAAMhB,QAAQ,CAACiB,SAAS,CAACC,UAAU;EAG/C,IAAMC,YAAY,GAAGX,eAAe,CAACY,KAAK,CAAC,CAAC,CAAC;EAC7C,IAAMC,SAAS,GAAG,IAAAC,6CAAwB,EAACH,YAAY,CAAC;EACxD,IAAMI,OAAO,GAAG,IAAAC,qBAAc,EAACR,GAAG,CAACS,OAAO,CAACJ,SAAS,CAAC,CAAC,CAACK,EAAE;EAEzD,IAAIC,UAAiB,GAAGzB,SAAS,CAAC0B,SAAS;EAC3C,IAAIC,gBAAgB,GAAG,IAAAC,8CAAiC,EACpD9B,QAAQ,CAACe,MAAM,EACfI,YAAY,EACZQ,UACJ,CAAC;EAED,IAAII,UAAiB,GAAG7B,SAAS,CAAC8B,OAAO;EACzC,IAAIC,gBAAgB,GAAG,IAAAC,8CAAiC,EACpDlC,QAAQ,CAACe,MAAM,EACfI,YAAY,EACZY,UACJ,CAAC;EACD,IAAII,MAAmC,GAAG,MAAMnB,GAAG,CAACoB,IAAI,CAACC,aAAa,CAAC,MAAOC,EAAO,IAAK;IACtF,IAAMC,WAAwC,GAAG,EAAE;IACnD,IAAMC,OAAO,GAAGF,EAAE,CAACG,EAAE,CAAClB,OAAO,CAACmB,QAAQ,CAAC;IACvC,IAAMC,MAAM,GAAGL,EAAE,CAACG,EAAE,CAACzB,GAAG,CAAC4B,IAAI,CAACF,QAAQ,CAAC;;IAGvC;AACR;AACA;AACA;AACA;AACA;AACA;AACA;IACQ,IAAIb,gBAAgB,KAAKI,gBAAgB,EAAE;MACvC,IAAMY,KAAa,GAAG,MAAML,OAAO,CAACM,GAAG,CAACjB,gBAAgB,CAAC;MACzD,IAAIgB,KAAK,EAAE;QACP,IAAME,OAAO,GAAG,MAAMJ,MAAM,CAACG,GAAG,CAACD,KAAK,CAAC;QACvC,IAAI,CAACjC,YAAY,IAAIA,YAAY,CAACmC,OAAO,CAAC,EAAE;UACxCR,WAAW,CAACS,IAAI,CAACD,OAAO,CAAC;QAC7B;MACJ;MACA,OAAOR,WAAW;IACtB;IAEA,IAAI,CAACrC,SAAS,CAAC+C,cAAc,EAAE;MAC3BpB,gBAAgB,GAAG,IAAAqB,8CAAiC,EAACrB,gBAAgB,EAAE,CAAC,CAAC;IAC7E;IACA,IAAI3B,SAAS,CAACiD,YAAY,EAAE;MACxBlB,gBAAgB,GAAG,IAAAiB,8CAAiC,EAACjB,gBAAgB,EAAE,CAAC,CAAC,CAAC;IAC9E;IAEA,IAAMmB,KAAK,GAAGZ,OAAO,CAACa,aAAa,CAC/BxB,gBAAgB,EAChBI,gBAAgB;IAChB;IACA;IACA;MACI;MACA;MACA;IAAA,CAER,CAAC;IACD,IAAIqB,IAAI,GAAG,KAAK;IAChB,OAAO,CAACA,IAAI,EAAE;MACV,IAAMC,IAAI,GAAG,MAAMH,KAAK,CAACG,IAAI,CAAC,CAAC;MAC/B,IAAIA,IAAI,CAACD,IAAI,EAAE;QACXA,IAAI,GAAG,IAAI;QACX;MACJ;MACA,IAAME,IAAsB,GAAGD,IAAI,CAACE,KAAK;MAEzC,IAAI,CAACvD,SAAS,CAAC+C,cAAc,EAAE;QAC3B,IAAMS,QAAQ,GAAGF,IAAI,CAAC,CAAC,CAAC;QACxB,IACIE,QAAQ,IACRA,QAAQ,CAAC,CAAC,CAAC,KAAK7B,gBAAgB,EAClC;UACE2B,IAAI,CAACG,KAAK,CAAC,CAAC;QAChB;MACJ;MACA,IAAI,CAACzD,SAAS,CAACiD,YAAY,EAAE;QACzB,IAAMS,OAAO,GAAG,IAAAC,kBAAW,EAACL,IAAI,CAAC;QACjC,IACII,OAAO,IACPA,OAAO,CAAC,CAAC,CAAC,KAAK3B,gBAAgB,EACjC;UACEuB,IAAI,CAACM,GAAG,CAAC,CAAC;QACd;MACJ;MAEA,IAAMC,MAAM,GAAGP,IAAI,CAACQ,GAAG,CAACC,GAAG,IAAIA,GAAG,CAAC,CAAC,CAAC,CAAC;MACtC,IAAMC,QAAqC,GAAG,MAAMC,OAAO,CAACC,GAAG,CAACL,MAAM,CAACC,GAAG,CAAEnB,KAAa,IAAKF,MAAM,CAACG,GAAG,CAACD,KAAK,CAAC,CAAC,CAAC;MAEjHqB,QAAQ,CAACG,OAAO,CAAEtB,OAAO,IAAK;QAC1B,IAAI,CAACO,IAAI,EAAE;UACP,IAAI,CAAC1C,YAAY,IAAIA,YAAY,CAACmC,OAAO,CAAC,EAAE;YACxCR,WAAW,CAACS,IAAI,CAACD,OAAO,CAAC;UAC7B;QACJ;QACA,IACI,CAACrC,kBAAkB,IACnB6B,WAAW,CAAC+B,MAAM,KAAK/D,aAAa,EACtC;UACE+C,IAAI,GAAG,IAAI;UACXF,KAAK,CAACmB,MAAM,CAAC,CAAC;QAClB;MACJ,CAAC,CAAC;IACN;IACA,OAAOhC,WAAW;EACtB,CAAC,CAAC;EACF,IAAI7B,kBAAkB,EAAE;IACpB,IAAM8D,cAAc,GAAG,IAAAC,gCAAiB,EAACzE,QAAQ,CAACe,MAAM,EAAEd,aAAa,CAACE,KAAK,CAAC;IAC9EgC,MAAM,GAAGA,MAAM,CAACuC,IAAI,CAACF,cAAc,CAAC;EACxC;;EAEA;EACArC,MAAM,GAAGA,MAAM,CAACf,KAAK,CAAChB,IAAI,EAAEG,aAAa,CAAC;EAE1C,OAAO;IACHoE,SAAS,EAAExC;EACf,CAAC;AACL","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/storage-foundationdb/foundationdb-types.js b/dist/cjs/plugins/storage-foundationdb/foundationdb-types.js deleted file mode 100644 index e4026246631..00000000000 --- a/dist/cjs/plugins/storage-foundationdb/foundationdb-types.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -//# sourceMappingURL=foundationdb-types.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/storage-foundationdb/foundationdb-types.js.map b/dist/cjs/plugins/storage-foundationdb/foundationdb-types.js.map deleted file mode 100644 index 9f48cdef0bb..00000000000 --- a/dist/cjs/plugins/storage-foundationdb/foundationdb-types.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"foundationdb-types.js","names":[],"sources":["../../../../src/plugins/storage-foundationdb/foundationdb-types.ts"],"sourcesContent":["/* eslint-disable no-unused-vars */\n/* eslint-disable @typescript-eslint/no-unused-vars */\n\nimport type {\n EventBulk,\n RxAttachmentWriteData,\n RxDocumentData,\n RxStorage,\n RxStorageChangeEvent,\n RxStorageDefaultCheckpoint\n} from '../../types/index.d.ts';\nexport type RxStorageFoundationDBSettings = {\n /**\n * Version of the API of the foundationDB server.\n */\n apiVersion: number;\n /**\n * Path to the foundationDB cluster file\n * like '/path/to/fdb.cluster'\n * (optional)\n */\n clusterFile?: string;\n batchSize?: number;\n};\nexport type RxStorageFoundationDBInstanceCreationOptions = {\n // can be overwritten per instance\n batchSize?: number;\n};\n\n/**\n * TODO atm we cannot import types from 'foundationdb'\n * because 'foundationdb' is an optional peer dependency\n * this is NOT also in the devDependencies.\n * This is because it requires to install the foundationdb client cli\n * which would mean everyone that wants to develop RxDB must have this installed manually.\n */\n// import {\n// open as foundationDBOpen,\n// Database,\n// Transaction\n// } from 'foundationdb';\n\nexport type FoundationDBIndexMeta = {\n indexName: string;\n index: string[];\n getIndexableString: (doc: RxDocumentData) => string;\n db: FoundationDBDatabase;\n};\n\nexport type FoundationDBConnection = any; // ReturnType;\nexport type FoundationDBDatabase = any; // Database;\nexport type FoundationDBTransaction = any; // Transaction, any>;\nexport type FoundationDBStorageInternals = {\n connection: FoundationDBConnection;\n dbsPromise: Promise<{\n root: FoundationDBDatabase;\n main: FoundationDBDatabase;\n attachments: FoundationDBDatabase;\n events: FoundationDBDatabase>, RxStorageDefaultCheckpoint>>;\n indexes: {\n [indexName: string]: FoundationDBIndexMeta;\n };\n }>;\n};\nexport type RxStorageFoundationDB = RxStorage, RxStorageFoundationDBInstanceCreationOptions> & {};\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/storage-foundationdb/index.js b/dist/cjs/plugins/storage-foundationdb/index.js deleted file mode 100644 index 7835e87a898..00000000000 --- a/dist/cjs/plugins/storage-foundationdb/index.js +++ /dev/null @@ -1,62 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -var _exportNames = { - getRxStorageFoundationDB: true -}; -exports.getRxStorageFoundationDB = getRxStorageFoundationDB; -var _rxStorageHelper = require("../../rx-storage-helper.js"); -var _utilsRxdbVersion = require("../utils/utils-rxdb-version.js"); -var _rxStorageInstanceFoundationdb = require("./rx-storage-instance-foundationdb.js"); -var _foundationdbTypes = require("./foundationdb-types.js"); -Object.keys(_foundationdbTypes).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; - if (key in exports && exports[key] === _foundationdbTypes[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _foundationdbTypes[key]; - } - }); -}); -var _foundationdbHelpers = require("./foundationdb-helpers.js"); -Object.keys(_foundationdbHelpers).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; - if (key in exports && exports[key] === _foundationdbHelpers[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _foundationdbHelpers[key]; - } - }); -}); -var versionSet; -function getRxStorageFoundationDB(settings) { - if (versionSet && versionSet !== settings.apiVersion) { - throw new Error('foundationdb already initialized with api version ' + versionSet); - } else if (!versionSet) { - versionSet = settings.apiVersion; - var { - setAPIVersion - } = require('foundationdb'); - setAPIVersion(settings.apiVersion); - } - var storage = { - name: 'foundationdb', - rxdbVersion: _utilsRxdbVersion.RXDB_VERSION, - createStorageInstance(params) { - (0, _rxStorageHelper.ensureRxStorageInstanceParamsAreCorrect)(params); - var useSettings = Object.assign({}, settings, params.options); - if (!useSettings.batchSize) { - useSettings.batchSize = 50; - } - return (0, _rxStorageInstanceFoundationdb.createFoundationDBStorageInstance)(this, params, useSettings); - } - }; - return storage; -} -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/storage-foundationdb/index.js.map b/dist/cjs/plugins/storage-foundationdb/index.js.map deleted file mode 100644 index d54c01d54e5..00000000000 --- a/dist/cjs/plugins/storage-foundationdb/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["_rxStorageHelper","require","_utilsRxdbVersion","_rxStorageInstanceFoundationdb","_foundationdbTypes","Object","keys","forEach","key","prototype","hasOwnProperty","call","_exportNames","exports","defineProperty","enumerable","get","_foundationdbHelpers","versionSet","getRxStorageFoundationDB","settings","apiVersion","Error","setAPIVersion","storage","name","rxdbVersion","RXDB_VERSION","createStorageInstance","params","ensureRxStorageInstanceParamsAreCorrect","useSettings","assign","options","batchSize","createFoundationDBStorageInstance"],"sources":["../../../../src/plugins/storage-foundationdb/index.ts"],"sourcesContent":["import { ensureRxStorageInstanceParamsAreCorrect } from '../../rx-storage-helper.ts';\nimport type {\n RxStorageInstanceCreationParams\n} from '../../types/index.d.ts';\nimport { RXDB_VERSION } from '../utils/utils-rxdb-version.ts';\nimport type {\n RxStorageFoundationDB,\n RxStorageFoundationDBInstanceCreationOptions,\n RxStorageFoundationDBSettings\n} from './foundationdb-types.ts';\n\nimport {\n createFoundationDBStorageInstance,\n RxStorageInstanceFoundationDB\n} from './rx-storage-instance-foundationdb.ts';\n\n\nlet versionSet: undefined | number;\n\nexport function getRxStorageFoundationDB(\n settings: RxStorageFoundationDBSettings\n): RxStorageFoundationDB {\n if (versionSet && versionSet !== settings.apiVersion) {\n throw new Error('foundationdb already initialized with api version ' + versionSet);\n } else if (!versionSet) {\n versionSet = settings.apiVersion;\n const { setAPIVersion } = require('foundationdb');\n setAPIVersion(settings.apiVersion);\n }\n\n\n const storage: RxStorageFoundationDB = {\n name: 'foundationdb',\n rxdbVersion: RXDB_VERSION,\n\n createStorageInstance(\n params: RxStorageInstanceCreationParams\n ): Promise> {\n ensureRxStorageInstanceParamsAreCorrect(params);\n const useSettings = Object.assign(\n {},\n settings,\n params.options\n );\n if (!useSettings.batchSize) {\n useSettings.batchSize = 50;\n }\n return createFoundationDBStorageInstance(this, params, useSettings);\n }\n };\n\n return storage;\n}\n\n\nexport * from './foundationdb-types.ts';\nexport * from './foundationdb-helpers.ts';\n"],"mappings":";;;;;;;;;AAAA,IAAAA,gBAAA,GAAAC,OAAA;AAIA,IAAAC,iBAAA,GAAAD,OAAA;AAOA,IAAAE,8BAAA,GAAAF,OAAA;AA4CA,IAAAG,kBAAA,GAAAH,OAAA;AAAAI,MAAA,CAAAC,IAAA,CAAAF,kBAAA,EAAAG,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAJ,kBAAA,CAAAI,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAZ,kBAAA,CAAAI,GAAA;IAAA;EAAA;AAAA;AACA,IAAAS,oBAAA,GAAAhB,OAAA;AAAAI,MAAA,CAAAC,IAAA,CAAAW,oBAAA,EAAAV,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAS,oBAAA,CAAAT,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAC,oBAAA,CAAAT,GAAA;IAAA;EAAA;AAAA;AAvCA,IAAIU,UAA8B;AAE3B,SAASC,wBAAwBA,CACpCC,QAAuC,EAClB;EACrB,IAAIF,UAAU,IAAIA,UAAU,KAAKE,QAAQ,CAACC,UAAU,EAAE;IAClD,MAAM,IAAIC,KAAK,CAAC,oDAAoD,GAAGJ,UAAU,CAAC;EACtF,CAAC,MAAM,IAAI,CAACA,UAAU,EAAE;IACpBA,UAAU,GAAGE,QAAQ,CAACC,UAAU;IAChC,IAAM;MAAEE;IAAc,CAAC,GAAGtB,OAAO,CAAC,cAAc,CAAC;IACjDsB,aAAa,CAACH,QAAQ,CAACC,UAAU,CAAC;EACtC;EAGA,IAAMG,OAA8B,GAAG;IACnCC,IAAI,EAAE,cAAc;IACpBC,WAAW,EAAEC,8BAAY;IAEzBC,qBAAqBA,CACjBC,MAAgG,EAC/C;MACjD,IAAAC,wDAAuC,EAACD,MAAM,CAAC;MAC/C,IAAME,WAAW,GAAG1B,MAAM,CAAC2B,MAAM,CAC7B,CAAC,CAAC,EACFZ,QAAQ,EACRS,MAAM,CAACI,OACX,CAAC;MACD,IAAI,CAACF,WAAW,CAACG,SAAS,EAAE;QACxBH,WAAW,CAACG,SAAS,GAAG,EAAE;MAC9B;MACA,OAAO,IAAAC,gEAAiC,EAAC,IAAI,EAAEN,MAAM,EAAEE,WAAW,CAAC;IACvE;EACJ,CAAC;EAED,OAAOP,OAAO;AAClB","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/storage-foundationdb/rx-storage-instance-foundationdb.js b/dist/cjs/plugins/storage-foundationdb/rx-storage-instance-foundationdb.js deleted file mode 100644 index 5d4bef21cf2..00000000000 --- a/dist/cjs/plugins/storage-foundationdb/rx-storage-instance-foundationdb.js +++ /dev/null @@ -1,294 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.RxStorageInstanceFoundationDB = void 0; -exports.createFoundationDBStorageInstance = createFoundationDBStorageInstance; -var _rxjs = require("rxjs"); -var _rxSchemaHelper = require("../../rx-schema-helper.js"); -var _rxStorageHelper = require("../../rx-storage-helper.js"); -var _foundationdbHelpers = require("./foundationdb-helpers.js"); -var _customIndex = require("../../custom-index.js"); -var _index = require("../../plugins/utils/index.js"); -var _foundationdbQuery = require("./foundationdb-query.js"); -var _queryPlanner = require("../../query-planner.js"); -var _index2 = require("../storage-memory/index.js"); -// import { -// open as foundationDBOpen, -// directory as foundationDBDirectory, -// encoders as foundationDBEncoders, -// keySelector as foundationDBKeySelector, -// StreamingMode as foundationDBStreamingMode -// } from 'foundationdb'; -var RxStorageInstanceFoundationDB = exports.RxStorageInstanceFoundationDB = /*#__PURE__*/function () { - function RxStorageInstanceFoundationDB(storage, databaseName, collectionName, schema, internals, options, settings) { - this.changes$ = new _rxjs.Subject(); - this.storage = storage; - this.databaseName = databaseName; - this.collectionName = collectionName; - this.schema = schema; - this.internals = internals; - this.options = options; - this.settings = settings; - this.primaryPath = (0, _rxSchemaHelper.getPrimaryFieldOfPrimaryKey)(this.schema.primaryKey); - } - var _proto = RxStorageInstanceFoundationDB.prototype; - _proto.bulkWrite = async function bulkWrite(documentWrites, context) { - var dbs = await this.internals.dbsPromise; - var ret = { - success: [], - error: [] - }; - - /** - * Doing too many write in a single transaction - * will throw with a 'Transaction exceeds byte limit' - * so we have to batch up the writes. - */ - var writeBatches = (0, _index.batchArray)(documentWrites, _foundationdbHelpers.FOUNDATION_DB_WRITE_BATCH_SIZE); - await Promise.all(writeBatches.map(async writeBatch => { - var categorized = null; - await dbs.root.doTransaction(async tx => { - var ids = writeBatch.map(row => row.document[this.primaryPath]); - var mainTx = tx.at(dbs.main.subspace); - var attachmentTx = tx.at(dbs.attachments.subspace); - var docsInDB = new Map(); - /** - * TODO this might be faster if fdb - * any time adds a bulk-fetch-by-key method. - */ - await Promise.all(ids.map(async id => { - var doc = await mainTx.get(id); - docsInDB.set(id, doc); - })); - categorized = (0, _rxStorageHelper.categorizeBulkWriteRows)(this, this.primaryPath, docsInDB, writeBatch, context); - (0, _index.appendToArray)(ret.error, categorized.errors); - - // INSERTS - categorized.bulkInsertDocs.forEach(writeRow => { - var docId = writeRow.document[this.primaryPath]; - ret.success.push(writeRow.document); - - // insert document data - mainTx.set(docId, writeRow.document); - - // insert secondary indexes - Object.values(dbs.indexes).forEach(indexMeta => { - var indexString = indexMeta.getIndexableString(writeRow.document); - var indexTx = tx.at(indexMeta.db.subspace); - indexTx.set(indexString, docId); - }); - }); - // UPDATES - categorized.bulkUpdateDocs.forEach(writeRow => { - var docId = writeRow.document[this.primaryPath]; - - // overwrite document data - mainTx.set(docId, writeRow.document); - - // update secondary indexes - Object.values(dbs.indexes).forEach(indexMeta => { - var oldIndexString = indexMeta.getIndexableString((0, _index.ensureNotFalsy)(writeRow.previous)); - var newIndexString = indexMeta.getIndexableString(writeRow.document); - if (oldIndexString !== newIndexString) { - var indexTx = tx.at(indexMeta.db.subspace); - indexTx.delete(oldIndexString); - indexTx.set(newIndexString, docId); - } - }); - ret.success.push(writeRow.document); - }); - - // attachments - categorized.attachmentsAdd.forEach(attachment => { - attachmentTx.set((0, _index2.attachmentMapKey)(attachment.documentId, attachment.attachmentId), attachment.attachmentData); - }); - categorized.attachmentsUpdate.forEach(attachment => { - attachmentTx.set((0, _index2.attachmentMapKey)(attachment.documentId, attachment.attachmentId), attachment.attachmentData); - }); - categorized.attachmentsRemove.forEach(attachment => { - attachmentTx.delete((0, _index2.attachmentMapKey)(attachment.documentId, attachment.attachmentId)); - }); - }); - categorized = (0, _index.ensureNotFalsy)(categorized); - /** - * The events must be emitted AFTER the transaction - * has finished. - * Otherwise an observable changestream might cause a read - * to a document that does not already exist outside of the transaction. - */ - if (categorized.eventBulk.events.length > 0) { - var lastState = (0, _index.ensureNotFalsy)(categorized.newestRow).document; - categorized.eventBulk.checkpoint = { - id: lastState[this.primaryPath], - lwt: lastState._meta.lwt - }; - categorized.eventBulk.endTime = (0, _index.now)(); - this.changes$.next(categorized.eventBulk); - } - })); - return ret; - }; - _proto.findDocumentsById = async function findDocumentsById(ids, withDeleted) { - var dbs = await this.internals.dbsPromise; - return dbs.main.doTransaction(async tx => { - var ret = []; - await Promise.all(ids.map(async docId => { - var docInDb = await tx.get(docId); - if (docInDb && (!docInDb._deleted || withDeleted)) { - ret.push(docInDb); - } - })); - return ret; - }); - }; - _proto.query = function query(preparedQuery) { - return (0, _foundationdbQuery.queryFoundationDB)(this, preparedQuery); - }; - _proto.count = async function count(preparedQuery) { - /** - * At this point in time (end 2022), FoundationDB does not support - * range counts. So we have to run a normal query and use the result set length. - * @link https://github.com/apple/foundationdb/issues/5981 - */ - var result = await this.query(preparedQuery); - return { - count: result.documents.length, - mode: 'fast' - }; - }; - _proto.getAttachmentData = async function getAttachmentData(documentId, attachmentId, _digest) { - var dbs = await this.internals.dbsPromise; - var attachment = await dbs.attachments.get((0, _index2.attachmentMapKey)(documentId, attachmentId)); - return attachment.data; - }; - _proto.changeStream = function changeStream() { - return this.changes$.asObservable(); - }; - _proto.remove = async function remove() { - var dbs = await this.internals.dbsPromise; - await dbs.root.doTransaction(tx => { - tx.clearRange('', _queryPlanner.INDEX_MAX); - return _index.PROMISE_RESOLVE_VOID; - }); - return this.close(); - }; - _proto.cleanup = async function cleanup(minimumDeletedTime) { - var { - keySelector, - StreamingMode - } = require('foundationdb'); - var maxDeletionTime = (0, _index.now)() - minimumDeletedTime; - var dbs = await this.internals.dbsPromise; - var index = _foundationdbHelpers.CLEANUP_INDEX; - var indexName = (0, _foundationdbHelpers.getFoundationDBIndexName)(index); - var indexMeta = dbs.indexes[indexName]; - var lowerBoundString = (0, _customIndex.getStartIndexStringFromLowerBound)(this.schema, index, [true, - /** - * Do not use 0 here, - * because 1 is the minimum value for _meta.lwt - */ - 1]); - var upperBoundString = (0, _customIndex.getStartIndexStringFromUpperBound)(this.schema, index, [true, maxDeletionTime]); - var noMoreUndeleted = true; - await dbs.root.doTransaction(async tx => { - var batchSize = (0, _index.ensureNotFalsy)(this.settings.batchSize); - var indexTx = tx.at(indexMeta.db.subspace); - var mainTx = tx.at(dbs.main.subspace); - var range = await indexTx.getRangeAll(keySelector.firstGreaterThan(lowerBoundString), upperBoundString, { - limit: batchSize + 1, - // get one more extra to detect what to return from cleanup() - streamingMode: StreamingMode.Exact - }); - if (range.length > batchSize) { - noMoreUndeleted = false; - range.pop(); - } - var docIds = range.map(row => row[1]); - var docsData = await Promise.all(docIds.map(docId => mainTx.get(docId))); - Object.values(dbs.indexes).forEach(indexMetaInner => { - var subIndexDB = tx.at(indexMetaInner.db.subspace); - docsData.forEach(docData => { - var indexString = indexMetaInner.getIndexableString(docData); - subIndexDB.delete(indexString); - }); - }); - docIds.forEach(id => mainTx.delete(id)); - }); - return noMoreUndeleted; - }; - _proto.conflictResultionTasks = function conflictResultionTasks() { - return new _rxjs.Subject().asObservable(); - }; - _proto.resolveConflictResultionTask = function resolveConflictResultionTask(_taskSolution) { - return _index.PROMISE_RESOLVE_VOID; - }; - _proto.close = async function close() { - if (this.closed) { - return this.closed; - } - this.closed = (async () => { - this.changes$.complete(); - var dbs = await this.internals.dbsPromise; - await dbs.root.close(); - - // TODO shouldn't we close the index databases? - // Object.values(dbs.indexes).forEach(db => db.close()); - })(); - return this.closed; - }; - return RxStorageInstanceFoundationDB; -}(); -function createFoundationDBStorageInstance(storage, params, settings) { - var primaryPath = (0, _rxSchemaHelper.getPrimaryFieldOfPrimaryKey)(params.schema.primaryKey); - var { - open, - directory, - encoders - } = require('foundationdb'); - var connection = open(settings.clusterFile); - var dbsPromise = (async () => { - var dir = await directory.createOrOpen(connection, 'rxdb'); - var root = connection.at(dir).at(params.databaseName + '.').at(params.collectionName + '.').at(params.schema.version + '.'); - var main = root.at('main.').withKeyEncoding(encoders.string) // automatically encode & decode keys using tuples - .withValueEncoding(encoders.json); // and values using JSON - - var events = root.at('events.').withKeyEncoding(encoders.string).withValueEncoding(encoders.json); - var attachments = root.at('attachments.').withKeyEncoding(encoders.string).withValueEncoding(encoders.json); - var indexDBs = {}; - var useIndexes = params.schema.indexes ? params.schema.indexes.slice(0) : []; - useIndexes.push([primaryPath]); - var useIndexesFinal = useIndexes.map(index => { - var indexAr = (0, _index.toArray)(index); - return indexAr; - }); - // used for `getChangedDocumentsSince()` - useIndexesFinal.push(['_meta.lwt', primaryPath]); - useIndexesFinal.push(_foundationdbHelpers.CLEANUP_INDEX); - useIndexesFinal.forEach(indexAr => { - var indexName = (0, _foundationdbHelpers.getFoundationDBIndexName)(indexAr); - var indexDB = root.at(indexName + '.').withKeyEncoding(encoders.string).withValueEncoding(encoders.string); - indexDBs[indexName] = { - indexName, - db: indexDB, - getIndexableString: (0, _customIndex.getIndexableStringMonad)(params.schema, indexAr), - index: indexAr - }; - }); - return { - root, - main, - events, - attachments, - indexes: indexDBs - }; - })(); - var internals = { - connection, - dbsPromise: dbsPromise - }; - var instance = new RxStorageInstanceFoundationDB(storage, params.databaseName, params.collectionName, params.schema, internals, params.options, settings); - return Promise.resolve(instance); -} -//# sourceMappingURL=rx-storage-instance-foundationdb.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/storage-foundationdb/rx-storage-instance-foundationdb.js.map b/dist/cjs/plugins/storage-foundationdb/rx-storage-instance-foundationdb.js.map deleted file mode 100644 index cd800ca3235..00000000000 --- a/dist/cjs/plugins/storage-foundationdb/rx-storage-instance-foundationdb.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-storage-instance-foundationdb.js","names":["_rxjs","require","_rxSchemaHelper","_rxStorageHelper","_foundationdbHelpers","_customIndex","_index","_foundationdbQuery","_queryPlanner","_index2","RxStorageInstanceFoundationDB","exports","storage","databaseName","collectionName","schema","internals","options","settings","changes$","Subject","primaryPath","getPrimaryFieldOfPrimaryKey","primaryKey","_proto","prototype","bulkWrite","documentWrites","context","dbs","dbsPromise","ret","success","error","writeBatches","batchArray","FOUNDATION_DB_WRITE_BATCH_SIZE","Promise","all","map","writeBatch","categorized","root","doTransaction","tx","ids","row","document","mainTx","at","main","subspace","attachmentTx","attachments","docsInDB","Map","id","doc","get","set","categorizeBulkWriteRows","appendToArray","errors","bulkInsertDocs","forEach","writeRow","docId","push","Object","values","indexes","indexMeta","indexString","getIndexableString","indexTx","db","bulkUpdateDocs","oldIndexString","ensureNotFalsy","previous","newIndexString","delete","attachmentsAdd","attachment","attachmentMapKey","documentId","attachmentId","attachmentData","attachmentsUpdate","attachmentsRemove","eventBulk","events","length","lastState","newestRow","checkpoint","lwt","_meta","endTime","now","next","findDocumentsById","withDeleted","docInDb","_deleted","query","preparedQuery","queryFoundationDB","count","result","documents","mode","getAttachmentData","_digest","data","changeStream","asObservable","remove","clearRange","INDEX_MAX","PROMISE_RESOLVE_VOID","close","cleanup","minimumDeletedTime","keySelector","StreamingMode","maxDeletionTime","index","CLEANUP_INDEX","indexName","getFoundationDBIndexName","lowerBoundString","getStartIndexStringFromLowerBound","upperBoundString","getStartIndexStringFromUpperBound","noMoreUndeleted","batchSize","range","getRangeAll","firstGreaterThan","limit","streamingMode","Exact","pop","docIds","docsData","indexMetaInner","subIndexDB","docData","conflictResultionTasks","resolveConflictResultionTask","_taskSolution","closed","complete","createFoundationDBStorageInstance","params","open","directory","encoders","connection","clusterFile","dir","createOrOpen","version","withKeyEncoding","string","withValueEncoding","json","indexDBs","useIndexes","slice","useIndexesFinal","indexAr","toArray","indexDB","getIndexableStringMonad","instance","resolve"],"sources":["../../../../src/plugins/storage-foundationdb/rx-storage-instance-foundationdb.ts"],"sourcesContent":["import { Observable, Subject } from 'rxjs';\nimport { getPrimaryFieldOfPrimaryKey } from '../../rx-schema-helper.ts';\nimport type {\n BulkWriteRow,\n CategorizeBulkWriteRowsOutput,\n EventBulk,\n PreparedQuery,\n RxAttachmentWriteData,\n RxConflictResultionTask,\n RxConflictResultionTaskSolution,\n RxDocumentData,\n RxJsonSchema,\n RxStorageBulkWriteResponse,\n RxStorageChangeEvent,\n RxStorageCountResult,\n RxStorageDefaultCheckpoint,\n RxStorageInstance,\n RxStorageInstanceCreationParams,\n RxStorageQueryResult,\n StringKeys\n} from '../../types/index.d.ts';\nimport type {\n FoundationDBDatabase,\n FoundationDBIndexMeta,\n FoundationDBStorageInternals,\n RxStorageFoundationDB,\n RxStorageFoundationDBInstanceCreationOptions,\n RxStorageFoundationDBSettings\n} from './foundationdb-types.ts';\n// import {\n// open as foundationDBOpen,\n// directory as foundationDBDirectory,\n// encoders as foundationDBEncoders,\n// keySelector as foundationDBKeySelector,\n// StreamingMode as foundationDBStreamingMode\n// } from 'foundationdb';\nimport {\n categorizeBulkWriteRows\n} from '../../rx-storage-helper.ts';\nimport {\n\n CLEANUP_INDEX,\n FOUNDATION_DB_WRITE_BATCH_SIZE,\n getFoundationDBIndexName\n} from './foundationdb-helpers.ts';\nimport {\n getIndexableStringMonad,\n getStartIndexStringFromLowerBound,\n getStartIndexStringFromUpperBound\n} from '../../custom-index.ts';\nimport {\n appendToArray,\n batchArray,\n ensureNotFalsy,\n lastOfArray,\n now,\n PROMISE_RESOLVE_VOID,\n toArray\n} from '../../plugins/utils/index.ts';\nimport { queryFoundationDB } from './foundationdb-query.ts';\nimport { INDEX_MAX } from '../../query-planner.ts';\nimport { attachmentMapKey } from '../storage-memory/index.ts';\n\nexport class RxStorageInstanceFoundationDB implements RxStorageInstance<\n RxDocType,\n FoundationDBStorageInternals,\n RxStorageFoundationDBInstanceCreationOptions,\n RxStorageDefaultCheckpoint\n> {\n public readonly primaryPath: StringKeys>;\n\n public closed?: Promise;\n private changes$: Subject>, RxStorageDefaultCheckpoint>> = new Subject();\n\n constructor(\n public readonly storage: RxStorageFoundationDB,\n public readonly databaseName: string,\n public readonly collectionName: string,\n public readonly schema: Readonly>>,\n public readonly internals: FoundationDBStorageInternals,\n public readonly options: Readonly,\n public readonly settings: RxStorageFoundationDBSettings\n ) {\n this.primaryPath = getPrimaryFieldOfPrimaryKey(this.schema.primaryKey);\n }\n\n async bulkWrite(\n documentWrites: BulkWriteRow[],\n context: string\n ): Promise> {\n const dbs = await this.internals.dbsPromise;\n const ret: RxStorageBulkWriteResponse = {\n success: [],\n error: []\n };\n\n /**\n * Doing too many write in a single transaction\n * will throw with a 'Transaction exceeds byte limit'\n * so we have to batch up the writes.\n */\n const writeBatches = batchArray(documentWrites, FOUNDATION_DB_WRITE_BATCH_SIZE);\n await Promise.all(\n writeBatches.map(async (writeBatch) => {\n let categorized: CategorizeBulkWriteRowsOutput | undefined = null as any;\n await dbs.root.doTransaction(async (tx: any) => {\n const ids = writeBatch.map(row => (row.document as any)[this.primaryPath]);\n const mainTx = tx.at(dbs.main.subspace);\n const attachmentTx = tx.at(dbs.attachments.subspace);\n const docsInDB = new Map>();\n /**\n * TODO this might be faster if fdb\n * any time adds a bulk-fetch-by-key method.\n */\n await Promise.all(\n ids.map(async (id) => {\n const doc = await mainTx.get(id);\n docsInDB.set(id, doc);\n })\n );\n categorized = categorizeBulkWriteRows(\n this,\n this.primaryPath as any,\n docsInDB,\n writeBatch,\n context\n );\n appendToArray(ret.error, categorized.errors);\n\n // INSERTS\n categorized.bulkInsertDocs.forEach(writeRow => {\n const docId: string = writeRow.document[this.primaryPath] as any;\n ret.success.push(writeRow.document);\n\n // insert document data\n mainTx.set(docId, writeRow.document);\n\n // insert secondary indexes\n Object.values(dbs.indexes).forEach(indexMeta => {\n const indexString = indexMeta.getIndexableString(writeRow.document as any);\n const indexTx = tx.at(indexMeta.db.subspace);\n indexTx.set(indexString, docId);\n });\n });\n // UPDATES\n categorized.bulkUpdateDocs.forEach((writeRow: BulkWriteRow) => {\n const docId: string = writeRow.document[this.primaryPath] as any;\n\n // overwrite document data\n mainTx.set(docId, writeRow.document);\n\n // update secondary indexes\n Object.values(dbs.indexes).forEach(indexMeta => {\n const oldIndexString = indexMeta.getIndexableString(ensureNotFalsy(writeRow.previous));\n const newIndexString = indexMeta.getIndexableString(writeRow.document as any);\n if (oldIndexString !== newIndexString) {\n const indexTx = tx.at(indexMeta.db.subspace);\n indexTx.delete(oldIndexString);\n indexTx.set(newIndexString, docId);\n }\n });\n ret.success.push(writeRow.document as any);\n });\n\n // attachments\n categorized.attachmentsAdd.forEach(attachment => {\n attachmentTx.set(\n attachmentMapKey(attachment.documentId, attachment.attachmentId),\n attachment.attachmentData\n );\n });\n categorized.attachmentsUpdate.forEach(attachment => {\n attachmentTx.set(\n attachmentMapKey(attachment.documentId, attachment.attachmentId),\n attachment.attachmentData\n );\n });\n categorized.attachmentsRemove.forEach(attachment => {\n attachmentTx.delete(\n attachmentMapKey(attachment.documentId, attachment.attachmentId)\n );\n });\n });\n categorized = ensureNotFalsy(categorized);\n /**\n * The events must be emitted AFTER the transaction\n * has finished.\n * Otherwise an observable changestream might cause a read\n * to a document that does not already exist outside of the transaction.\n */\n if (categorized.eventBulk.events.length > 0) {\n const lastState = ensureNotFalsy(categorized.newestRow).document;\n categorized.eventBulk.checkpoint = {\n id: lastState[this.primaryPath],\n lwt: lastState._meta.lwt\n };\n categorized.eventBulk.endTime = now();\n this.changes$.next(categorized.eventBulk);\n }\n })\n );\n\n\n return ret;\n }\n\n async findDocumentsById(ids: string[], withDeleted: boolean): Promise[]> {\n const dbs = await this.internals.dbsPromise;\n return dbs.main.doTransaction(async (tx: any) => {\n const ret: RxDocumentData[] = [];\n await Promise.all(\n ids.map(async (docId) => {\n const docInDb = await tx.get(docId);\n if (\n docInDb &&\n (\n !docInDb._deleted ||\n withDeleted\n )\n ) {\n ret.push(docInDb);\n }\n })\n );\n return ret;\n });\n }\n query(preparedQuery: PreparedQuery): Promise> {\n return queryFoundationDB(this, preparedQuery);\n }\n async count(\n preparedQuery: PreparedQuery\n ): Promise {\n /**\n * At this point in time (end 2022), FoundationDB does not support\n * range counts. So we have to run a normal query and use the result set length.\n * @link https://github.com/apple/foundationdb/issues/5981\n */\n const result = await this.query(preparedQuery);\n return {\n count: result.documents.length,\n mode: 'fast'\n };\n }\n\n async getAttachmentData(documentId: string, attachmentId: string, _digest: string): Promise {\n const dbs = await this.internals.dbsPromise;\n const attachment = await dbs.attachments.get(attachmentMapKey(documentId, attachmentId));\n return attachment.data;\n }\n changeStream(): Observable, RxStorageDefaultCheckpoint>> {\n return this.changes$.asObservable();\n }\n\n async remove(): Promise {\n const dbs = await this.internals.dbsPromise;\n await dbs.root.doTransaction((tx: any) => {\n tx.clearRange('', INDEX_MAX);\n return PROMISE_RESOLVE_VOID;\n });\n return this.close();\n }\n async cleanup(minimumDeletedTime: number): Promise {\n const {\n keySelector,\n StreamingMode\n } = require('foundationdb');\n const maxDeletionTime = now() - minimumDeletedTime;\n const dbs = await this.internals.dbsPromise;\n const index = CLEANUP_INDEX;\n const indexName = getFoundationDBIndexName(index);\n const indexMeta = dbs.indexes[indexName];\n const lowerBoundString = getStartIndexStringFromLowerBound(\n this.schema,\n index,\n [\n true,\n /**\n * Do not use 0 here,\n * because 1 is the minimum value for _meta.lwt\n */\n 1\n ]\n );\n const upperBoundString = getStartIndexStringFromUpperBound(\n this.schema,\n index,\n [\n true,\n maxDeletionTime\n ]\n );\n let noMoreUndeleted: boolean = true;\n await dbs.root.doTransaction(async (tx: any) => {\n const batchSize = ensureNotFalsy(this.settings.batchSize);\n const indexTx = tx.at(indexMeta.db.subspace);\n const mainTx = tx.at(dbs.main.subspace);\n const range = await indexTx.getRangeAll(\n keySelector.firstGreaterThan(lowerBoundString),\n upperBoundString,\n {\n limit: batchSize + 1, // get one more extra to detect what to return from cleanup()\n streamingMode: StreamingMode.Exact\n }\n );\n if (range.length > batchSize) {\n noMoreUndeleted = false;\n range.pop();\n }\n const docIds = range.map((row: string[]) => row[1]);\n const docsData: RxDocumentData[] = await Promise.all(docIds.map((docId: string) => mainTx.get(docId)));\n\n Object\n .values(dbs.indexes)\n .forEach(indexMetaInner => {\n const subIndexDB = tx.at(indexMetaInner.db.subspace);\n docsData.forEach(docData => {\n const indexString = indexMetaInner.getIndexableString(docData);\n subIndexDB.delete(indexString);\n });\n });\n docIds.forEach((id: string) => mainTx.delete(id));\n });\n\n return noMoreUndeleted;\n }\n\n conflictResultionTasks(): Observable> {\n return new Subject().asObservable();\n }\n resolveConflictResultionTask(_taskSolution: RxConflictResultionTaskSolution): Promise {\n return PROMISE_RESOLVE_VOID;\n }\n\n async close() {\n if (this.closed) {\n return this.closed;\n }\n this.closed = (async () => {\n this.changes$.complete();\n const dbs = await this.internals.dbsPromise;\n await dbs.root.close();\n\n // TODO shouldn't we close the index databases?\n // Object.values(dbs.indexes).forEach(db => db.close());\n })();\n return this.closed;\n }\n}\n\n\nexport function createFoundationDBStorageInstance(\n storage: RxStorageFoundationDB,\n params: RxStorageInstanceCreationParams,\n settings: RxStorageFoundationDBSettings\n): Promise> {\n const primaryPath = getPrimaryFieldOfPrimaryKey(params.schema.primaryKey);\n\n const {\n open,\n directory,\n encoders\n } = require('foundationdb');\n\n const connection = open(settings.clusterFile);\n const dbsPromise = (async () => {\n const dir = await directory.createOrOpen(connection, 'rxdb');\n\n const root = connection\n .at(dir)\n .at(params.databaseName + '.')\n .at(params.collectionName + '.')\n .at(params.schema.version + '.');\n const main: FoundationDBDatabase = root\n .at('main.')\n .withKeyEncoding(encoders.string) // automatically encode & decode keys using tuples\n .withValueEncoding(encoders.json) as any; // and values using JSON\n\n\n const events: FoundationDBDatabase>, RxStorageDefaultCheckpoint>> = root\n .at('events.')\n .withKeyEncoding(encoders.string)\n .withValueEncoding(encoders.json) as any;\n\n const attachments: FoundationDBDatabase = root\n .at('attachments.')\n .withKeyEncoding(encoders.string)\n .withValueEncoding(encoders.json) as any;\n\n\n const indexDBs: { [indexName: string]: FoundationDBIndexMeta; } = {};\n const useIndexes = params.schema.indexes ? params.schema.indexes.slice(0) : [];\n useIndexes.push([primaryPath]);\n const useIndexesFinal = useIndexes.map(index => {\n const indexAr = toArray(index);\n return indexAr;\n });\n // used for `getChangedDocumentsSince()`\n useIndexesFinal.push([\n '_meta.lwt',\n primaryPath\n ]);\n useIndexesFinal.push(CLEANUP_INDEX);\n useIndexesFinal.forEach(indexAr => {\n const indexName = getFoundationDBIndexName(indexAr);\n const indexDB = root.at(indexName + '.')\n .withKeyEncoding(encoders.string)\n .withValueEncoding(encoders.string);\n indexDBs[indexName] = {\n indexName,\n db: indexDB,\n getIndexableString: getIndexableStringMonad(params.schema, indexAr),\n index: indexAr\n };\n });\n\n return {\n root,\n main,\n events,\n attachments,\n indexes: indexDBs\n };\n })();\n\n\n const internals: FoundationDBStorageInternals = {\n connection,\n dbsPromise: dbsPromise\n };\n\n const instance = new RxStorageInstanceFoundationDB(\n storage,\n params.databaseName,\n params.collectionName,\n params.schema,\n internals,\n params.options,\n settings\n );\n return Promise.resolve(instance);\n}\n"],"mappings":";;;;;;;AAAA,IAAAA,KAAA,GAAAC,OAAA;AACA,IAAAC,eAAA,GAAAD,OAAA;AAmCA,IAAAE,gBAAA,GAAAF,OAAA;AAGA,IAAAG,oBAAA,GAAAH,OAAA;AAMA,IAAAI,YAAA,GAAAJ,OAAA;AAKA,IAAAK,MAAA,GAAAL,OAAA;AASA,IAAAM,kBAAA,GAAAN,OAAA;AACA,IAAAO,aAAA,GAAAP,OAAA;AACA,IAAAQ,OAAA,GAAAR,OAAA;AAhCA;AACA;AACA;AACA;AACA;AACA;AACA;AAAA,IA4BaS,6BAA6B,GAAAC,OAAA,CAAAD,6BAAA;EAWtC,SAAAA,8BACoBE,OAA8B,EAC9BC,YAAoB,EACpBC,cAAsB,EACtBC,MAAyD,EACzDC,SAAkD,EAClDC,OAA+D,EAC/DC,QAAuC,EACzD;IAAA,KAVMC,QAAQ,GAAoG,IAAIC,aAAO,CAAC,CAAC;IAAA,KAG7GR,OAA8B,GAA9BA,OAA8B;IAAA,KAC9BC,YAAoB,GAApBA,YAAoB;IAAA,KACpBC,cAAsB,GAAtBA,cAAsB;IAAA,KACtBC,MAAyD,GAAzDA,MAAyD;IAAA,KACzDC,SAAkD,GAAlDA,SAAkD;IAAA,KAClDC,OAA+D,GAA/DA,OAA+D;IAAA,KAC/DC,QAAuC,GAAvCA,QAAuC;IAEvD,IAAI,CAACG,WAAW,GAAG,IAAAC,2CAA2B,EAAC,IAAI,CAACP,MAAM,CAACQ,UAAU,CAAC;EAC1E;EAAC,IAAAC,MAAA,GAAAd,6BAAA,CAAAe,SAAA;EAAAD,MAAA,CAEKE,SAAS,GAAf,eAAAA,UACIC,cAAyC,EACzCC,OAAe,EAC+B;IAC9C,IAAMC,GAAG,GAAG,MAAM,IAAI,CAACb,SAAS,CAACc,UAAU;IAC3C,IAAMC,GAA0C,GAAG;MAC/CC,OAAO,EAAE,EAAE;MACXC,KAAK,EAAE;IACX,CAAC;;IAED;AACR;AACA;AACA;AACA;IACQ,IAAMC,YAAY,GAAG,IAAAC,iBAAU,EAACR,cAAc,EAAES,mDAA8B,CAAC;IAC/E,MAAMC,OAAO,CAACC,GAAG,CACbJ,YAAY,CAACK,GAAG,CAAC,MAAOC,UAAU,IAAK;MACnC,IAAIC,WAAiE,GAAG,IAAW;MACnF,MAAMZ,GAAG,CAACa,IAAI,CAACC,aAAa,CAAC,MAAOC,EAAO,IAAK;QAC5C,IAAMC,GAAG,GAAGL,UAAU,CAACD,GAAG,CAACO,GAAG,IAAKA,GAAG,CAACC,QAAQ,CAAS,IAAI,CAAC1B,WAAW,CAAC,CAAC;QAC1E,IAAM2B,MAAM,GAAGJ,EAAE,CAACK,EAAE,CAACpB,GAAG,CAACqB,IAAI,CAACC,QAAQ,CAAC;QACvC,IAAMC,YAAY,GAAGR,EAAE,CAACK,EAAE,CAACpB,GAAG,CAACwB,WAAW,CAACF,QAAQ,CAAC;QACpD,IAAMG,QAAQ,GAAG,IAAIC,GAAG,CAAoC,CAAC;QAC7D;AACpB;AACA;AACA;QACoB,MAAMlB,OAAO,CAACC,GAAG,CACbO,GAAG,CAACN,GAAG,CAAC,MAAOiB,EAAE,IAAK;UAClB,IAAMC,GAAG,GAAG,MAAMT,MAAM,CAACU,GAAG,CAACF,EAAE,CAAC;UAChCF,QAAQ,CAACK,GAAG,CAACH,EAAE,EAAEC,GAAG,CAAC;QACzB,CAAC,CACL,CAAC;QACDhB,WAAW,GAAG,IAAAmB,wCAAuB,EACjC,IAAI,EACJ,IAAI,CAACvC,WAAW,EAChBiC,QAAQ,EACRd,UAAU,EACVZ,OACJ,CAAC;QACD,IAAAiC,oBAAa,EAAC9B,GAAG,CAACE,KAAK,EAAEQ,WAAW,CAACqB,MAAM,CAAC;;QAE5C;QACArB,WAAW,CAACsB,cAAc,CAACC,OAAO,CAACC,QAAQ,IAAI;UAC3C,IAAMC,KAAa,GAAGD,QAAQ,CAAClB,QAAQ,CAAC,IAAI,CAAC1B,WAAW,CAAQ;UAChEU,GAAG,CAACC,OAAO,CAACmC,IAAI,CAACF,QAAQ,CAAClB,QAAQ,CAAC;;UAEnC;UACAC,MAAM,CAACW,GAAG,CAACO,KAAK,EAAED,QAAQ,CAAClB,QAAQ,CAAC;;UAEpC;UACAqB,MAAM,CAACC,MAAM,CAACxC,GAAG,CAACyC,OAAO,CAAC,CAACN,OAAO,CAACO,SAAS,IAAI;YAC5C,IAAMC,WAAW,GAAGD,SAAS,CAACE,kBAAkB,CAACR,QAAQ,CAAClB,QAAe,CAAC;YAC1E,IAAM2B,OAAO,GAAG9B,EAAE,CAACK,EAAE,CAACsB,SAAS,CAACI,EAAE,CAACxB,QAAQ,CAAC;YAC5CuB,OAAO,CAACf,GAAG,CAACa,WAAW,EAAEN,KAAK,CAAC;UACnC,CAAC,CAAC;QACN,CAAC,CAAC;QACF;QACAzB,WAAW,CAACmC,cAAc,CAACZ,OAAO,CAAEC,QAAiC,IAAK;UACtE,IAAMC,KAAa,GAAGD,QAAQ,CAAClB,QAAQ,CAAC,IAAI,CAAC1B,WAAW,CAAQ;;UAEhE;UACA2B,MAAM,CAACW,GAAG,CAACO,KAAK,EAAED,QAAQ,CAAClB,QAAQ,CAAC;;UAEpC;UACAqB,MAAM,CAACC,MAAM,CAACxC,GAAG,CAACyC,OAAO,CAAC,CAACN,OAAO,CAACO,SAAS,IAAI;YAC5C,IAAMM,cAAc,GAAGN,SAAS,CAACE,kBAAkB,CAAC,IAAAK,qBAAc,EAACb,QAAQ,CAACc,QAAQ,CAAC,CAAC;YACtF,IAAMC,cAAc,GAAGT,SAAS,CAACE,kBAAkB,CAACR,QAAQ,CAAClB,QAAe,CAAC;YAC7E,IAAI8B,cAAc,KAAKG,cAAc,EAAE;cACnC,IAAMN,OAAO,GAAG9B,EAAE,CAACK,EAAE,CAACsB,SAAS,CAACI,EAAE,CAACxB,QAAQ,CAAC;cAC5CuB,OAAO,CAACO,MAAM,CAACJ,cAAc,CAAC;cAC9BH,OAAO,CAACf,GAAG,CAACqB,cAAc,EAAEd,KAAK,CAAC;YACtC;UACJ,CAAC,CAAC;UACFnC,GAAG,CAACC,OAAO,CAACmC,IAAI,CAACF,QAAQ,CAAClB,QAAe,CAAC;QAC9C,CAAC,CAAC;;QAEF;QACAN,WAAW,CAACyC,cAAc,CAAClB,OAAO,CAACmB,UAAU,IAAI;UAC7C/B,YAAY,CAACO,GAAG,CACZ,IAAAyB,wBAAgB,EAACD,UAAU,CAACE,UAAU,EAAEF,UAAU,CAACG,YAAY,CAAC,EAChEH,UAAU,CAACI,cACf,CAAC;QACL,CAAC,CAAC;QACF9C,WAAW,CAAC+C,iBAAiB,CAACxB,OAAO,CAACmB,UAAU,IAAI;UAChD/B,YAAY,CAACO,GAAG,CACZ,IAAAyB,wBAAgB,EAACD,UAAU,CAACE,UAAU,EAAEF,UAAU,CAACG,YAAY,CAAC,EAChEH,UAAU,CAACI,cACf,CAAC;QACL,CAAC,CAAC;QACF9C,WAAW,CAACgD,iBAAiB,CAACzB,OAAO,CAACmB,UAAU,IAAI;UAChD/B,YAAY,CAAC6B,MAAM,CACf,IAAAG,wBAAgB,EAACD,UAAU,CAACE,UAAU,EAAEF,UAAU,CAACG,YAAY,CACnE,CAAC;QACL,CAAC,CAAC;MACN,CAAC,CAAC;MACF7C,WAAW,GAAG,IAAAqC,qBAAc,EAACrC,WAAW,CAAC;MACzC;AAChB;AACA;AACA;AACA;AACA;MACgB,IAAIA,WAAW,CAACiD,SAAS,CAACC,MAAM,CAACC,MAAM,GAAG,CAAC,EAAE;QACzC,IAAMC,SAAS,GAAG,IAAAf,qBAAc,EAACrC,WAAW,CAACqD,SAAS,CAAC,CAAC/C,QAAQ;QAChEN,WAAW,CAACiD,SAAS,CAACK,UAAU,GAAG;UAC/BvC,EAAE,EAAEqC,SAAS,CAAC,IAAI,CAACxE,WAAW,CAAC;UAC/B2E,GAAG,EAAEH,SAAS,CAACI,KAAK,CAACD;QACzB,CAAC;QACDvD,WAAW,CAACiD,SAAS,CAACQ,OAAO,GAAG,IAAAC,UAAG,EAAC,CAAC;QACrC,IAAI,CAAChF,QAAQ,CAACiF,IAAI,CAAC3D,WAAW,CAACiD,SAAS,CAAC;MAC7C;IACJ,CAAC,CACL,CAAC;IAGD,OAAO3D,GAAG;EACd,CAAC;EAAAP,MAAA,CAEK6E,iBAAiB,GAAvB,eAAAA,kBAAwBxD,GAAa,EAAEyD,WAAoB,EAAwC;IAC/F,IAAMzE,GAAG,GAAG,MAAM,IAAI,CAACb,SAAS,CAACc,UAAU;IAC3C,OAAOD,GAAG,CAACqB,IAAI,CAACP,aAAa,CAAC,MAAOC,EAAO,IAAK;MAC7C,IAAMb,GAAgC,GAAG,EAAE;MAC3C,MAAMM,OAAO,CAACC,GAAG,CACbO,GAAG,CAACN,GAAG,CAAC,MAAO2B,KAAK,IAAK;QACrB,IAAMqC,OAAO,GAAG,MAAM3D,EAAE,CAACc,GAAG,CAACQ,KAAK,CAAC;QACnC,IACIqC,OAAO,KAEH,CAACA,OAAO,CAACC,QAAQ,IACjBF,WAAW,CACd,EACH;UACEvE,GAAG,CAACoC,IAAI,CAACoC,OAAO,CAAC;QACrB;MACJ,CAAC,CACL,CAAC;MACD,OAAOxE,GAAG;IACd,CAAC,CAAC;EACN,CAAC;EAAAP,MAAA,CACDiF,KAAK,GAAL,SAAAA,MAAMC,aAAuC,EAA4C;IACrF,OAAO,IAAAC,oCAAiB,EAAC,IAAI,EAAED,aAAa,CAAC;EACjD,CAAC;EAAAlF,MAAA,CACKoF,KAAK,GAAX,eAAAA,MACIF,aAAuC,EACV;IAC7B;AACR;AACA;AACA;AACA;IACQ,IAAMG,MAAM,GAAG,MAAM,IAAI,CAACJ,KAAK,CAACC,aAAa,CAAC;IAC9C,OAAO;MACHE,KAAK,EAAEC,MAAM,CAACC,SAAS,CAAClB,MAAM;MAC9BmB,IAAI,EAAE;IACV,CAAC;EACL,CAAC;EAAAvF,MAAA,CAEKwF,iBAAiB,GAAvB,eAAAA,kBAAwB3B,UAAkB,EAAEC,YAAoB,EAAE2B,OAAe,EAAmB;IAChG,IAAMpF,GAAG,GAAG,MAAM,IAAI,CAACb,SAAS,CAACc,UAAU;IAC3C,IAAMqD,UAAU,GAAG,MAAMtD,GAAG,CAACwB,WAAW,CAACK,GAAG,CAAC,IAAA0B,wBAAgB,EAACC,UAAU,EAAEC,YAAY,CAAC,CAAC;IACxF,OAAOH,UAAU,CAAC+B,IAAI;EAC1B,CAAC;EAAA1F,MAAA,CACD2F,YAAY,GAAZ,SAAAA,aAAA,EAAmG;IAC/F,OAAO,IAAI,CAAChG,QAAQ,CAACiG,YAAY,CAAC,CAAC;EACvC,CAAC;EAAA5F,MAAA,CAEK6F,MAAM,GAAZ,eAAAA,OAAA,EAA8B;IAC1B,IAAMxF,GAAG,GAAG,MAAM,IAAI,CAACb,SAAS,CAACc,UAAU;IAC3C,MAAMD,GAAG,CAACa,IAAI,CAACC,aAAa,CAAEC,EAAO,IAAK;MACtCA,EAAE,CAAC0E,UAAU,CAAC,EAAE,EAAEC,uBAAS,CAAC;MAC5B,OAAOC,2BAAoB;IAC/B,CAAC,CAAC;IACF,OAAO,IAAI,CAACC,KAAK,CAAC,CAAC;EACvB,CAAC;EAAAjG,MAAA,CACKkG,OAAO,GAAb,eAAAA,QAAcC,kBAA0B,EAAoB;IACxD,IAAM;MACFC,WAAW;MACXC;IACJ,CAAC,GAAG5H,OAAO,CAAC,cAAc,CAAC;IAC3B,IAAM6H,eAAe,GAAG,IAAA3B,UAAG,EAAC,CAAC,GAAGwB,kBAAkB;IAClD,IAAM9F,GAAG,GAAG,MAAM,IAAI,CAACb,SAAS,CAACc,UAAU;IAC3C,IAAMiG,KAAK,GAAGC,kCAAa;IAC3B,IAAMC,SAAS,GAAG,IAAAC,6CAAwB,EAACH,KAAK,CAAC;IACjD,IAAMxD,SAAS,GAAG1C,GAAG,CAACyC,OAAO,CAAC2D,SAAS,CAAC;IACxC,IAAME,gBAAgB,GAAG,IAAAC,8CAAiC,EACtD,IAAI,CAACrH,MAAM,EACXgH,KAAK,EACL,CACI,IAAI;IACJ;AAChB;AACA;AACA;IACgB,CAAC,CAET,CAAC;IACD,IAAMM,gBAAgB,GAAG,IAAAC,8CAAiC,EACtD,IAAI,CAACvH,MAAM,EACXgH,KAAK,EACL,CACI,IAAI,EACJD,eAAe,CAEvB,CAAC;IACD,IAAIS,eAAwB,GAAG,IAAI;IACnC,MAAM1G,GAAG,CAACa,IAAI,CAACC,aAAa,CAAC,MAAOC,EAAO,IAAK;MAC5C,IAAM4F,SAAS,GAAG,IAAA1D,qBAAc,EAAC,IAAI,CAAC5D,QAAQ,CAACsH,SAAS,CAAC;MACzD,IAAM9D,OAAO,GAAG9B,EAAE,CAACK,EAAE,CAACsB,SAAS,CAACI,EAAE,CAACxB,QAAQ,CAAC;MAC5C,IAAMH,MAAM,GAAGJ,EAAE,CAACK,EAAE,CAACpB,GAAG,CAACqB,IAAI,CAACC,QAAQ,CAAC;MACvC,IAAMsF,KAAK,GAAG,MAAM/D,OAAO,CAACgE,WAAW,CACnCd,WAAW,CAACe,gBAAgB,CAACR,gBAAgB,CAAC,EAC9CE,gBAAgB,EAChB;QACIO,KAAK,EAAEJ,SAAS,GAAG,CAAC;QAAE;QACtBK,aAAa,EAAEhB,aAAa,CAACiB;MACjC,CACJ,CAAC;MACD,IAAIL,KAAK,CAAC7C,MAAM,GAAG4C,SAAS,EAAE;QAC1BD,eAAe,GAAG,KAAK;QACvBE,KAAK,CAACM,GAAG,CAAC,CAAC;MACf;MACA,IAAMC,MAAM,GAAGP,KAAK,CAAClG,GAAG,CAAEO,GAAa,IAAKA,GAAG,CAAC,CAAC,CAAC,CAAC;MACnD,IAAMmG,QAAqC,GAAG,MAAM5G,OAAO,CAACC,GAAG,CAAC0G,MAAM,CAACzG,GAAG,CAAE2B,KAAa,IAAKlB,MAAM,CAACU,GAAG,CAACQ,KAAK,CAAC,CAAC,CAAC;MAEjHE,MAAM,CACDC,MAAM,CAACxC,GAAG,CAACyC,OAAO,CAAC,CACnBN,OAAO,CAACkF,cAAc,IAAI;QACvB,IAAMC,UAAU,GAAGvG,EAAE,CAACK,EAAE,CAACiG,cAAc,CAACvE,EAAE,CAACxB,QAAQ,CAAC;QACpD8F,QAAQ,CAACjF,OAAO,CAACoF,OAAO,IAAI;UACxB,IAAM5E,WAAW,GAAG0E,cAAc,CAACzE,kBAAkB,CAAC2E,OAAO,CAAC;UAC9DD,UAAU,CAAClE,MAAM,CAACT,WAAW,CAAC;QAClC,CAAC,CAAC;MACN,CAAC,CAAC;MACNwE,MAAM,CAAChF,OAAO,CAAER,EAAU,IAAKR,MAAM,CAACiC,MAAM,CAACzB,EAAE,CAAC,CAAC;IACrD,CAAC,CAAC;IAEF,OAAO+E,eAAe;EAC1B,CAAC;EAAA/G,MAAA,CAED6H,sBAAsB,GAAtB,SAAAA,uBAAA,EAAyE;IACrE,OAAO,IAAIjI,aAAO,CAAM,CAAC,CAACgG,YAAY,CAAC,CAAC;EAC5C,CAAC;EAAA5F,MAAA,CACD8H,4BAA4B,GAA5B,SAAAA,6BAA6BC,aAAyD,EAAiB;IACnG,OAAO/B,2BAAoB;EAC/B,CAAC;EAAAhG,MAAA,CAEKiG,KAAK,GAAX,eAAAA,MAAA,EAAc;IACV,IAAI,IAAI,CAAC+B,MAAM,EAAE;MACb,OAAO,IAAI,CAACA,MAAM;IACtB;IACA,IAAI,CAACA,MAAM,GAAG,CAAC,YAAY;MACvB,IAAI,CAACrI,QAAQ,CAACsI,QAAQ,CAAC,CAAC;MACxB,IAAM5H,GAAG,GAAG,MAAM,IAAI,CAACb,SAAS,CAACc,UAAU;MAC3C,MAAMD,GAAG,CAACa,IAAI,CAAC+E,KAAK,CAAC,CAAC;;MAEtB;MACA;IACJ,CAAC,EAAE,CAAC;IACJ,OAAO,IAAI,CAAC+B,MAAM;EACtB,CAAC;EAAA,OAAA9I,6BAAA;AAAA;AAIE,SAASgJ,iCAAiCA,CAC7C9I,OAA8B,EAC9B+I,MAAgG,EAChGzI,QAAuC,EACU;EACjD,IAAMG,WAAW,GAAG,IAAAC,2CAA2B,EAACqI,MAAM,CAAC5I,MAAM,CAACQ,UAAU,CAAC;EAEzE,IAAM;IACFqI,IAAI;IACJC,SAAS;IACTC;EACJ,CAAC,GAAG7J,OAAO,CAAC,cAAc,CAAC;EAE3B,IAAM8J,UAAU,GAAGH,IAAI,CAAC1I,QAAQ,CAAC8I,WAAW,CAAC;EAC7C,IAAMlI,UAAU,GAAG,CAAC,YAAY;IAC5B,IAAMmI,GAAG,GAAG,MAAMJ,SAAS,CAACK,YAAY,CAACH,UAAU,EAAE,MAAM,CAAC;IAE5D,IAAMrH,IAAI,GAAGqH,UAAU,CAClB9G,EAAE,CAACgH,GAAG,CAAC,CACPhH,EAAE,CAAC0G,MAAM,CAAC9I,YAAY,GAAG,GAAG,CAAC,CAC7BoC,EAAE,CAAC0G,MAAM,CAAC7I,cAAc,GAAG,GAAG,CAAC,CAC/BmC,EAAE,CAAC0G,MAAM,CAAC5I,MAAM,CAACoJ,OAAO,GAAG,GAAG,CAAC;IACpC,IAAMjH,IAAqC,GAAGR,IAAI,CAC7CO,EAAE,CAAC,OAAO,CAAC,CACXmH,eAAe,CAACN,QAAQ,CAACO,MAAM,CAAC,CAAC;IAAA,CACjCC,iBAAiB,CAACR,QAAQ,CAACS,IAAI,CAAQ,CAAC,CAAC;;IAG9C,IAAM5E,MAAoH,GAAGjD,IAAI,CAC5HO,EAAE,CAAC,SAAS,CAAC,CACbmH,eAAe,CAACN,QAAQ,CAACO,MAAM,CAAC,CAChCC,iBAAiB,CAACR,QAAQ,CAACS,IAAI,CAAQ;IAE5C,IAAMlH,WAAwD,GAAGX,IAAI,CAChEO,EAAE,CAAC,cAAc,CAAC,CAClBmH,eAAe,CAACN,QAAQ,CAACO,MAAM,CAAC,CAChCC,iBAAiB,CAACR,QAAQ,CAACS,IAAI,CAAQ;IAG5C,IAAMC,QAAoE,GAAG,CAAC,CAAC;IAC/E,IAAMC,UAAU,GAAGd,MAAM,CAAC5I,MAAM,CAACuD,OAAO,GAAGqF,MAAM,CAAC5I,MAAM,CAACuD,OAAO,CAACoG,KAAK,CAAC,CAAC,CAAC,GAAG,EAAE;IAC9ED,UAAU,CAACtG,IAAI,CAAC,CAAC9C,WAAW,CAAC,CAAC;IAC9B,IAAMsJ,eAAe,GAAGF,UAAU,CAAClI,GAAG,CAACwF,KAAK,IAAI;MAC5C,IAAM6C,OAAO,GAAG,IAAAC,cAAO,EAAC9C,KAAK,CAAC;MAC9B,OAAO6C,OAAO;IAClB,CAAC,CAAC;IACF;IACAD,eAAe,CAACxG,IAAI,CAAC,CACjB,WAAW,EACX9C,WAAW,CACd,CAAC;IACFsJ,eAAe,CAACxG,IAAI,CAAC6D,kCAAa,CAAC;IACnC2C,eAAe,CAAC3G,OAAO,CAAC4G,OAAO,IAAI;MAC/B,IAAM3C,SAAS,GAAG,IAAAC,6CAAwB,EAAC0C,OAAO,CAAC;MACnD,IAAME,OAAO,GAAGpI,IAAI,CAACO,EAAE,CAACgF,SAAS,GAAG,GAAG,CAAC,CACnCmC,eAAe,CAACN,QAAQ,CAACO,MAAM,CAAC,CAChCC,iBAAiB,CAACR,QAAQ,CAACO,MAAM,CAAC;MACvCG,QAAQ,CAACvC,SAAS,CAAC,GAAG;QAClBA,SAAS;QACTtD,EAAE,EAAEmG,OAAO;QACXrG,kBAAkB,EAAE,IAAAsG,oCAAuB,EAACpB,MAAM,CAAC5I,MAAM,EAAE6J,OAAO,CAAC;QACnE7C,KAAK,EAAE6C;MACX,CAAC;IACL,CAAC,CAAC;IAEF,OAAO;MACHlI,IAAI;MACJQ,IAAI;MACJyC,MAAM;MACNtC,WAAW;MACXiB,OAAO,EAAEkG;IACb,CAAC;EACL,CAAC,EAAE,CAAC;EAGJ,IAAMxJ,SAAkD,GAAG;IACvD+I,UAAU;IACVjI,UAAU,EAAEA;EAChB,CAAC;EAED,IAAMkJ,QAAQ,GAAG,IAAItK,6BAA6B,CAC9CE,OAAO,EACP+I,MAAM,CAAC9I,YAAY,EACnB8I,MAAM,CAAC7I,cAAc,EACrB6I,MAAM,CAAC5I,MAAM,EACbC,SAAS,EACT2I,MAAM,CAAC1I,OAAO,EACdC,QACJ,CAAC;EACD,OAAOmB,OAAO,CAAC4I,OAAO,CAACD,QAAQ,CAAC;AACpC","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/storage-lokijs/index.js b/dist/cjs/plugins/storage-lokijs/index.js deleted file mode 100644 index 7a7c8373e62..00000000000 --- a/dist/cjs/plugins/storage-lokijs/index.js +++ /dev/null @@ -1,39 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -var _rxStorageLokijs = require("./rx-storage-lokijs.js"); -Object.keys(_rxStorageLokijs).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _rxStorageLokijs[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _rxStorageLokijs[key]; - } - }); -}); -var _lokijsHelper = require("./lokijs-helper.js"); -Object.keys(_lokijsHelper).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _lokijsHelper[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _lokijsHelper[key]; - } - }); -}); -var _rxStorageInstanceLoki = require("./rx-storage-instance-loki.js"); -Object.keys(_rxStorageInstanceLoki).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _rxStorageInstanceLoki[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _rxStorageInstanceLoki[key]; - } - }); -}); -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/storage-lokijs/index.js.map b/dist/cjs/plugins/storage-lokijs/index.js.map deleted file mode 100644 index e23c0d87e6c..00000000000 --- a/dist/cjs/plugins/storage-lokijs/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["_rxStorageLokijs","require","Object","keys","forEach","key","exports","defineProperty","enumerable","get","_lokijsHelper","_rxStorageInstanceLoki"],"sources":["../../../../src/plugins/storage-lokijs/index.ts"],"sourcesContent":["export * from './rx-storage-lokijs.ts';\nexport * from './lokijs-helper.ts';\nexport * from './rx-storage-instance-loki.ts';\n"],"mappings":";;;;;AAAA,IAAAA,gBAAA,GAAAC,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAH,gBAAA,EAAAI,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAL,gBAAA,CAAAK,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAT,gBAAA,CAAAK,GAAA;IAAA;EAAA;AAAA;AACA,IAAAK,aAAA,GAAAT,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAO,aAAA,EAAAN,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAK,aAAA,CAAAL,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAC,aAAA,CAAAL,GAAA;IAAA;EAAA;AAAA;AACA,IAAAM,sBAAA,GAAAV,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAQ,sBAAA,EAAAP,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAM,sBAAA,CAAAN,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAE,sBAAA,CAAAN,GAAA;IAAA;EAAA;AAAA","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/storage-lokijs/loki-save-queue.js b/dist/cjs/plugins/storage-lokijs/loki-save-queue.js deleted file mode 100644 index 6302a054b4c..00000000000 --- a/dist/cjs/plugins/storage-lokijs/loki-save-queue.js +++ /dev/null @@ -1,89 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.LokiSaveQueue = void 0; -var _index = require("../utils/index.js"); -/** - * The autosave feature of lokijs has strange behaviors - * and often runs a save in critical moments when other - * more important tasks are running. - * So instead we use a custom save queue that ensures we - * only run loki.saveDatabase() when nothing else is running. - */ -var LokiSaveQueue = exports.LokiSaveQueue = /*#__PURE__*/function () { - /** - * Ensures that we do not run multiple saves - * in parallel - */ - - // track amount of non-finished save calls in the queue. - - function LokiSaveQueue(lokiDatabase, databaseSettings) { - this.writesSinceLastRun = 0; - this.saveQueue = _index.PROMISE_RESOLVE_VOID; - this.saveQueueC = 0; - this.lokiDatabase = lokiDatabase; - this.databaseSettings = databaseSettings; - } - var _proto = LokiSaveQueue.prototype; - _proto.addWrite = function addWrite() { - this.writesSinceLastRun = this.writesSinceLastRun + 1; - this.run(); - }; - _proto.run = function run() { - if ( - // no persistence adapter given, so we do not need to save - !this.databaseSettings.adapter || - // do not add more then two pending calls to the queue. - this.saveQueueC > 2) { - return this.saveQueue; - } - this.saveQueueC = this.saveQueueC + 1; - this.saveQueue = this.saveQueue.then(async () => { - /** - * Always wait until the JavaScript process is idle. - * This ensures that CPU blocking writes are finished - * before we proceed. - */ - await (0, _index.requestIdlePromise)(); - - // no write happened since the last save call - if (this.writesSinceLastRun === 0) { - return; - } - - /** - * Because LokiJS is a in-memory database, - * we can just wait until the JavaScript process is idle - * via requestIdlePromise(). Then we know that nothing important - * is running at the moment. - */ - await (0, _index.requestIdlePromise)().then(() => (0, _index.requestIdlePromise)()); - if (this.writesSinceLastRun === 0) { - return; - } - var writeAmount = this.writesSinceLastRun; - this.writesSinceLastRun = 0; - return new Promise((res, rej) => { - this.lokiDatabase.saveDatabase(err => { - if (err) { - this.writesSinceLastRun = this.writesSinceLastRun + writeAmount; - rej(err); - } else { - if (this.databaseSettings.autosaveCallback) { - this.databaseSettings.autosaveCallback(); - } - res(); - } - }); - }); - }).catch(() => {}).then(() => { - this.saveQueueC = this.saveQueueC - 1; - }); - return this.saveQueue; - }; - return LokiSaveQueue; -}(); -//# sourceMappingURL=loki-save-queue.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/storage-lokijs/loki-save-queue.js.map b/dist/cjs/plugins/storage-lokijs/loki-save-queue.js.map deleted file mode 100644 index f402f4f8ac8..00000000000 --- a/dist/cjs/plugins/storage-lokijs/loki-save-queue.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"loki-save-queue.js","names":["_index","require","LokiSaveQueue","exports","lokiDatabase","databaseSettings","writesSinceLastRun","saveQueue","PROMISE_RESOLVE_VOID","saveQueueC","_proto","prototype","addWrite","run","adapter","then","requestIdlePromise","writeAmount","Promise","res","rej","saveDatabase","err","autosaveCallback","catch"],"sources":["../../../../src/plugins/storage-lokijs/loki-save-queue.ts"],"sourcesContent":["import type { LokiDatabaseSettings } from '../../types/index.d.ts';\nimport {\n PROMISE_RESOLVE_VOID,\n requestIdlePromise\n} from '../utils/index.ts';\n\n/**\n * The autosave feature of lokijs has strange behaviors\n * and often runs a save in critical moments when other\n * more important tasks are running.\n * So instead we use a custom save queue that ensures we\n * only run loki.saveDatabase() when nothing else is running.\n */\nexport class LokiSaveQueue {\n public writesSinceLastRun: number = 0;\n\n /**\n * Ensures that we do not run multiple saves\n * in parallel\n */\n public saveQueue: Promise = PROMISE_RESOLVE_VOID;\n // track amount of non-finished save calls in the queue.\n public saveQueueC = 0;\n\n constructor(\n public readonly lokiDatabase: any,\n public readonly databaseSettings: LokiDatabaseSettings\n ) {\n\n }\n\n public addWrite() {\n this.writesSinceLastRun = this.writesSinceLastRun + 1;\n this.run();\n }\n\n public run() {\n if (\n // no persistence adapter given, so we do not need to save\n !this.databaseSettings.adapter ||\n // do not add more then two pending calls to the queue.\n this.saveQueueC > 2\n\n ) {\n return this.saveQueue;\n }\n\n this.saveQueueC = this.saveQueueC + 1;\n this.saveQueue = this.saveQueue\n .then(async () => {\n /**\n * Always wait until the JavaScript process is idle.\n * This ensures that CPU blocking writes are finished\n * before we proceed.\n */\n await requestIdlePromise();\n\n // no write happened since the last save call\n if (this.writesSinceLastRun === 0) {\n return;\n }\n\n /**\n * Because LokiJS is a in-memory database,\n * we can just wait until the JavaScript process is idle\n * via requestIdlePromise(). Then we know that nothing important\n * is running at the moment.\n */\n await requestIdlePromise().then(() => requestIdlePromise());\n\n if (this.writesSinceLastRun === 0) {\n return;\n }\n\n const writeAmount = this.writesSinceLastRun;\n this.writesSinceLastRun = 0;\n return new Promise((res, rej) => {\n this.lokiDatabase.saveDatabase((err: any) => {\n if (err) {\n this.writesSinceLastRun = this.writesSinceLastRun + writeAmount;\n rej(err);\n } else {\n if (this.databaseSettings.autosaveCallback) {\n this.databaseSettings.autosaveCallback();\n }\n res();\n }\n });\n });\n })\n .catch(() => { })\n .then(() => {\n this.saveQueueC = this.saveQueueC - 1;\n });\n return this.saveQueue;\n }\n}\n"],"mappings":";;;;;;AACA,IAAAA,MAAA,GAAAC,OAAA;AAKA;AACA;AACA;AACA;AACA;AACA;AACA;AANA,IAOaC,aAAa,GAAAC,OAAA,CAAAD,aAAA;EAGtB;AACJ;AACA;AACA;;EAEI;;EAGA,SAAAA,cACoBE,YAAiB,EACjBC,gBAAsC,EACxD;IAAA,KAbKC,kBAAkB,GAAW,CAAC;IAAA,KAM9BC,SAAS,GAAkBC,2BAAoB;IAAA,KAE/CC,UAAU,GAAG,CAAC;IAAA,KAGDL,YAAiB,GAAjBA,YAAiB;IAAA,KACjBC,gBAAsC,GAAtCA,gBAAsC;EAG1D;EAAC,IAAAK,MAAA,GAAAR,aAAA,CAAAS,SAAA;EAAAD,MAAA,CAEME,QAAQ,GAAf,SAAAA,SAAA,EAAkB;IACd,IAAI,CAACN,kBAAkB,GAAG,IAAI,CAACA,kBAAkB,GAAG,CAAC;IACrD,IAAI,CAACO,GAAG,CAAC,CAAC;EACd,CAAC;EAAAH,MAAA,CAEMG,GAAG,GAAV,SAAAA,IAAA,EAAa;IACT;IACI;IACA,CAAC,IAAI,CAACR,gBAAgB,CAACS,OAAO;IAC9B;IACA,IAAI,CAACL,UAAU,GAAG,CAAC,EAErB;MACE,OAAO,IAAI,CAACF,SAAS;IACzB;IAEA,IAAI,CAACE,UAAU,GAAG,IAAI,CAACA,UAAU,GAAG,CAAC;IACrC,IAAI,CAACF,SAAS,GAAG,IAAI,CAACA,SAAS,CAC1BQ,IAAI,CAAC,YAAY;MACd;AAChB;AACA;AACA;AACA;MACgB,MAAM,IAAAC,yBAAkB,EAAC,CAAC;;MAE1B;MACA,IAAI,IAAI,CAACV,kBAAkB,KAAK,CAAC,EAAE;QAC/B;MACJ;;MAEA;AAChB;AACA;AACA;AACA;AACA;MACgB,MAAM,IAAAU,yBAAkB,EAAC,CAAC,CAACD,IAAI,CAAC,MAAM,IAAAC,yBAAkB,EAAC,CAAC,CAAC;MAE3D,IAAI,IAAI,CAACV,kBAAkB,KAAK,CAAC,EAAE;QAC/B;MACJ;MAEA,IAAMW,WAAW,GAAG,IAAI,CAACX,kBAAkB;MAC3C,IAAI,CAACA,kBAAkB,GAAG,CAAC;MAC3B,OAAO,IAAIY,OAAO,CAAO,CAACC,GAAG,EAAEC,GAAG,KAAK;QACnC,IAAI,CAAChB,YAAY,CAACiB,YAAY,CAAEC,GAAQ,IAAK;UACzC,IAAIA,GAAG,EAAE;YACL,IAAI,CAAChB,kBAAkB,GAAG,IAAI,CAACA,kBAAkB,GAAGW,WAAW;YAC/DG,GAAG,CAACE,GAAG,CAAC;UACZ,CAAC,MAAM;YACH,IAAI,IAAI,CAACjB,gBAAgB,CAACkB,gBAAgB,EAAE;cACxC,IAAI,CAAClB,gBAAgB,CAACkB,gBAAgB,CAAC,CAAC;YAC5C;YACAJ,GAAG,CAAC,CAAC;UACT;QACJ,CAAC,CAAC;MACN,CAAC,CAAC;IACN,CAAC,CAAC,CACDK,KAAK,CAAC,MAAM,CAAE,CAAC,CAAC,CAChBT,IAAI,CAAC,MAAM;MACR,IAAI,CAACN,UAAU,GAAG,IAAI,CAACA,UAAU,GAAG,CAAC;IACzC,CAAC,CAAC;IACN,OAAO,IAAI,CAACF,SAAS;EACzB,CAAC;EAAA,OAAAL,aAAA;AAAA","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/storage-lokijs/lokijs-helper.js b/dist/cjs/plugins/storage-lokijs/lokijs-helper.js deleted file mode 100644 index ee97ce912ec..00000000000 --- a/dist/cjs/plugins/storage-lokijs/lokijs-helper.js +++ /dev/null @@ -1,450 +0,0 @@ -"use strict"; - -var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault"); -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.RX_STORAGE_NAME_LOKIJS = exports.OPEN_LOKIJS_STORAGE_INSTANCES = exports.LOKI_KEY_OBJECT_BROADCAST_CHANNEL_MESSAGE_TYPE = exports.LOKI_BROADCAST_CHANNEL_MESSAGE_TYPE = exports.LOKIJS_COLLECTION_DEFAULT_OPTIONS = exports.CHANGES_COLLECTION_SUFFIX = void 0; -exports.closeLokiCollections = closeLokiCollections; -exports.getLokiDatabase = getLokiDatabase; -exports.getLokiLeaderElector = getLokiLeaderElector; -exports.getLokiSortComparator = getLokiSortComparator; -exports.handleRemoteRequest = handleRemoteRequest; -exports.mustUseLocalState = mustUseLocalState; -exports.requestRemoteInstance = requestRemoteInstance; -exports.stripLokiKey = stripLokiKey; -exports.transformRegexToRegExp = transformRegexToRegExp; -exports.waitUntilHasLeader = waitUntilHasLeader; -var _rxStorageInstanceLoki = require("./rx-storage-instance-loki.js"); -var _lokijs = _interopRequireDefault(require("lokijs")); -var _unload = require("unload"); -var _index = require("../utils/index.js"); -var _lokiSaveQueue = require("./loki-save-queue.js"); -var _rxError = require("../../rx-error.js"); -var _rxStorageMultiinstance = require("../../rx-storage-multiinstance.js"); -var _index2 = require("../leader-election/index.js"); -var _overwritable = require("../../overwritable.js"); -var CHANGES_COLLECTION_SUFFIX = exports.CHANGES_COLLECTION_SUFFIX = '-rxdb-changes'; -var LOKI_BROADCAST_CHANNEL_MESSAGE_TYPE = exports.LOKI_BROADCAST_CHANNEL_MESSAGE_TYPE = 'rxdb-lokijs-remote-request'; -var LOKI_KEY_OBJECT_BROADCAST_CHANNEL_MESSAGE_TYPE = exports.LOKI_KEY_OBJECT_BROADCAST_CHANNEL_MESSAGE_TYPE = 'rxdb-lokijs-remote-request-key-object'; -var RX_STORAGE_NAME_LOKIJS = exports.RX_STORAGE_NAME_LOKIJS = 'lokijs'; - -/** - * Loki attaches a $loki property to all data - * which must be removed before returning the data back to RxDB. - */ -function stripLokiKey(docData) { - if (!docData.$loki) { - return docData; - } - var cloned = (0, _index.flatClone)(docData); - - /** - * In RxDB version 12.0.0, - * we introduced the _meta field that already contains the last write time. - * To be backwards compatible, we have to move the $lastWriteAt to the _meta field. - * TODO remove this in the next major version. - */ - if (cloned.$lastWriteAt) { - cloned._meta = { - lwt: cloned.$lastWriteAt - }; - delete cloned.$lastWriteAt; - } - delete cloned.$loki; - return cloned; -} - -/** - * Used to check in tests if all instances have been cleaned up. - */ -var OPEN_LOKIJS_STORAGE_INSTANCES = exports.OPEN_LOKIJS_STORAGE_INSTANCES = new Set(); -var LOKIJS_COLLECTION_DEFAULT_OPTIONS = exports.LOKIJS_COLLECTION_DEFAULT_OPTIONS = { - disableChangesApi: true, - disableMeta: true, - disableDeltaChangesApi: true, - disableFreeze: true, - // TODO use 'immutable' like WatermelonDB does it - cloneMethod: 'shallow-assign', - clone: false, - transactional: false, - autoupdate: false -}; -var LOKI_DATABASE_STATE_BY_NAME = new Map(); -function getLokiDatabase(databaseName, databaseSettings) { - return (0, _index.getFromMapOrCreate)(LOKI_DATABASE_STATE_BY_NAME, databaseName, () => { - /** - * We assume that as soon as an adapter is passed, - * the database has to be persistent. - */ - var hasPersistence = !!databaseSettings.adapter; - var databaseState = (async () => { - var persistenceMethod = hasPersistence ? 'adapter' : 'memory'; - if (databaseSettings.persistenceMethod) { - persistenceMethod = databaseSettings.persistenceMethod; - } - var useSettings = Object.assign( - // defaults - { - autoload: hasPersistence, - persistenceMethod, - verbose: true - }, databaseSettings, - // overwrites - { - /** - * RxDB uses its custom load and save handling - * so we disable the LokiJS save/load handlers. - */ - autoload: false, - autosave: false, - throttledSaves: false - }); - var database = new _lokijs.default(databaseName + '.db', (0, _index.flatClone)(useSettings)); - var lokiSaveQueue = new _lokiSaveQueue.LokiSaveQueue(database, useSettings); - - /** - * Wait until all data is loaded from persistence adapter. - * Wrap the loading into the saveQueue to ensure that when many - * collections are created at the same time, the load-calls do not interfere - * with each other and cause error logs. - */ - if (hasPersistence) { - var loadDatabasePromise = new Promise((res, rej) => { - try { - database.loadDatabase({ - recursiveWait: false - }, err => { - if (useSettings.autoloadCallback) { - useSettings.autoloadCallback(err); - } - if (err) { - rej(err); - } else { - res(); - } - }); - } catch (err) { - rej(err); - } - }); - lokiSaveQueue.saveQueue = lokiSaveQueue.saveQueue.then(() => loadDatabasePromise); - await loadDatabasePromise; - } - - /** - * Autosave database on process end - */ - var unloads = []; - if (hasPersistence) { - unloads.push((0, _unload.add)(() => lokiSaveQueue.run())); - } - var state = { - database, - databaseSettings: useSettings, - saveQueue: lokiSaveQueue, - collections: {}, - unloads - }; - return state; - })(); - return databaseState; - }); -} -async function closeLokiCollections(databaseName, collections) { - var databaseState = await LOKI_DATABASE_STATE_BY_NAME.get(databaseName); - if (!databaseState) { - // already closed - return; - } - await databaseState.saveQueue.run(); - collections.forEach(collection => { - var collectionName = collection.name; - delete databaseState.collections[collectionName]; - }); - if (Object.keys(databaseState.collections).length === 0) { - // all collections closed -> also close database - LOKI_DATABASE_STATE_BY_NAME.delete(databaseName); - databaseState.unloads.forEach(u => u.remove()); - await new Promise((res, rej) => { - databaseState.database.close(err => { - if (err) { - rej(err); - } else { - res(); - } - }); - }); - } -} - -/** - * This function is at lokijs-helper - * because we need it in multiple places. - */ -function getLokiSortComparator(_schema, query) { - if (!query.sort) { - throw (0, _rxError.newRxError)('SNH', { - query - }); - } - var sortOptions = query.sort; - var fun = (a, b) => { - var compareResult = 0; // 1 | -1 - sortOptions.find(sortPart => { - var fieldName = Object.keys(sortPart)[0]; - var direction = Object.values(sortPart)[0]; - var directionMultiplier = direction === 'asc' ? 1 : -1; - var valueA = (0, _index.getProperty)(a, fieldName); - var valueB = (0, _index.getProperty)(b, fieldName); - if (valueA === valueB) { - return false; - } else { - if (valueA > valueB) { - compareResult = 1 * directionMultiplier; - return true; - } else { - compareResult = -1 * directionMultiplier; - return true; - } - } - }); - - /** - * Two different objects should never have the same sort position. - * We ensure this by having the unique primaryKey in the sort params - * which is added by RxDB if not existing yet. - */ - if (!compareResult) { - throw (0, _rxError.newRxError)('SNH', { - args: { - query, - a, - b - } - }); - } - return compareResult; - }; - return fun; -} -function getLokiLeaderElector(databaseInstanceToken, broadcastChannelRefObject, databaseName) { - var broadcastChannel = (0, _rxStorageMultiinstance.getBroadcastChannelReference)(RX_STORAGE_NAME_LOKIJS, databaseInstanceToken, databaseName, broadcastChannelRefObject); - var elector = (0, _index2.getLeaderElectorByBroadcastChannel)(broadcastChannel); - return elector; -} - -/** - * For multi-instance usage, we send requests to the RxStorage - * to the current leading instance over the BroadcastChannel. - */ -async function requestRemoteInstance(instance, operation, params) { - var isRxStorageInstanceLoki = typeof instance.query === 'function'; - var messageType = isRxStorageInstanceLoki ? LOKI_BROADCAST_CHANNEL_MESSAGE_TYPE : LOKI_KEY_OBJECT_BROADCAST_CHANNEL_MESSAGE_TYPE; - var leaderElector = (0, _index.ensureNotFalsy)(instance.internals.leaderElector); - await waitUntilHasLeader(leaderElector); - var broadcastChannel = leaderElector.broadcastChannel; - var whenDeathListener; - var leaderDeadPromise = new Promise(res => { - whenDeathListener = msg => { - if (msg.context === 'leader' && msg.action === 'death') { - res({ - retry: true - }); - } - }; - broadcastChannel.addEventListener('internal', whenDeathListener); - }); - var requestId = (0, _index.randomCouchString)(12); - var responseListener; - var responsePromise = new Promise((res, _rej) => { - responseListener = msg => { - if (msg.type === messageType && msg.response === true && msg.requestId === requestId) { - if (msg.isError) { - res({ - retry: false, - error: msg.result - }); - } else { - res({ - retry: false, - result: msg.result - }); - } - } - }; - broadcastChannel.addEventListener('message', responseListener); - }); - - // send out the request to the other instance - broadcastChannel.postMessage({ - response: false, - type: messageType, - operation, - params, - requestId, - databaseName: instance.databaseName, - collectionName: instance.collectionName - }); - var timeout; - return Promise.race([leaderDeadPromise, responsePromise - // // comment in timeout to debug - // new Promise(res => { - // timeout = setTimeout(() => { - // res({ error: new Error('requestRemoteInstance() timeout errorored'), retry: false }); - // }, 500); - // }) - ]).then(firstResolved => { - if (timeout) { - clearTimeout(timeout); - } - - // clean up listeners - broadcastChannel.removeEventListener('message', responseListener); - broadcastChannel.removeEventListener('internal', whenDeathListener); - if (firstResolved.retry) { - /** - * The leader died while a remote request was running - * we re-run the whole operation. - * We cannot just re-run requestRemoteInstance() - * because the current instance might be the new leader now - * and then we have to use the local state instead of requesting the remote. - */ - return instance[operation](...params); - } else { - if (firstResolved.error) { - throw firstResolved.error; - } else { - return firstResolved.result; - } - } - }); -} - -/** - * Handles a request that came from a remote instance via requestRemoteInstance() - * Runs the requested operation over the local db instance and sends back the result. - */ -async function handleRemoteRequest(instance, msg) { - if (msg.type === LOKI_BROADCAST_CHANNEL_MESSAGE_TYPE && msg.requestId && msg.databaseName === instance.databaseName && msg.collectionName === instance.collectionName && !msg.response) { - var operation = msg.operation; - var params = msg.params; - var result; - var isError = false; - try { - result = await instance[operation](...params); - } catch (err) { - console.dir(err); - isError = true; - result = err; - } - var response = { - response: true, - requestId: msg.requestId, - databaseName: instance.databaseName, - collectionName: instance.collectionName, - result, - isError, - type: msg.type - }; - (0, _index.ensureNotFalsy)(instance.internals.leaderElector).broadcastChannel.postMessage(response); - } -} -async function waitUntilHasLeader(leaderElector) { - leaderElector.awaitLeadership().catch(() => {}); - await (0, _index.promiseWait)(0); - while (true) { - var has = await leaderElector.hasLeader(); - if (has || leaderElector.broadcastChannel.isClosed || leaderElector.isDead) { - return; - } - if (leaderElector.applyOnce) { - await leaderElector.applyOnce(); - } else { - /** - * Trigger applying for leadership - * but do not await it in case another - * instance becomes leader first. - */ - leaderElector.awaitLeadership().catch(() => {}); - } - await (0, _index.promiseWait)(20); - } -} - -/** - * If the local state must be used, that one is returned. - * Returns false if a remote instance must be used. - */ -async function mustUseLocalState(instance) { - if (instance.closed) { - /** - * If this happens, it means that RxDB made a call to an already closed storage instance. - * This must never happen because when RxDB closes a collection or database, - * all tasks must be cleared so that no more calls are made the the storage. - */ - throw new Error('already closed ' + JSON.stringify({ - instanceClosed: instance.closed, - databaseName: instance.databaseName, - collectionName: instance.collectionName - })); - } - if (instance.internals.localState) { - return instance.internals.localState; - } - var leaderElector = (0, _index.ensureNotFalsy)(instance.internals.leaderElector); - await waitUntilHasLeader(leaderElector); - - /** - * It might already have a localState after the applying - * because another subtask also called mustUSeLocalState() - */ - if (instance.internals.localState) { - return instance.internals.localState; - } - if (leaderElector.isLeader && !instance.internals.localState) { - // own is leader, use local instance - instance.internals.localState = (0, _rxStorageInstanceLoki.createLokiLocalState)({ - databaseInstanceToken: instance.databaseInstanceToken, - databaseName: instance.databaseName, - collectionName: instance.collectionName, - options: instance.options, - schema: instance.schema, - multiInstance: instance.internals.leaderElector ? true : false, - devMode: _overwritable.overwritable.isDevMode() - }, instance.databaseSettings); - return (0, _index.ensureNotFalsy)(instance.internals.localState); - } else { - // other is leader, send message to remote leading instance - return false; - } -} - -/** - * LokiJS does not understand the 'official' $regex operator, - * so we have to transform these back into RegExp objects. - * @recursive - */ -function transformRegexToRegExp(selector) { - if (typeof selector !== 'object' || selector === null) { - return selector; - } - var keys = Object.keys(selector); - var ret = {}; - keys.forEach(key => { - var value = selector[key]; - if (key === '$options') { - return; - } - if (key === '$regex' && !(value instanceof RegExp)) { - var opts = selector['$options']; - ret[key] = new RegExp(value, opts); - } else if (Array.isArray(value)) { - ret[key] = value.map(item => transformRegexToRegExp(item)); - } else { - ret[key] = transformRegexToRegExp(value); - } - }); - return ret; -} -//# sourceMappingURL=lokijs-helper.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/storage-lokijs/lokijs-helper.js.map b/dist/cjs/plugins/storage-lokijs/lokijs-helper.js.map deleted file mode 100644 index f2c0c333a95..00000000000 --- a/dist/cjs/plugins/storage-lokijs/lokijs-helper.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"lokijs-helper.js","names":["_rxStorageInstanceLoki","require","_lokijs","_interopRequireDefault","_unload","_index","_lokiSaveQueue","_rxError","_rxStorageMultiinstance","_index2","_overwritable","CHANGES_COLLECTION_SUFFIX","exports","LOKI_BROADCAST_CHANNEL_MESSAGE_TYPE","LOKI_KEY_OBJECT_BROADCAST_CHANNEL_MESSAGE_TYPE","RX_STORAGE_NAME_LOKIJS","stripLokiKey","docData","$loki","cloned","flatClone","$lastWriteAt","_meta","lwt","OPEN_LOKIJS_STORAGE_INSTANCES","Set","LOKIJS_COLLECTION_DEFAULT_OPTIONS","disableChangesApi","disableMeta","disableDeltaChangesApi","disableFreeze","cloneMethod","clone","transactional","autoupdate","LOKI_DATABASE_STATE_BY_NAME","Map","getLokiDatabase","databaseName","databaseSettings","getFromMapOrCreate","hasPersistence","adapter","databaseState","persistenceMethod","useSettings","Object","assign","autoload","verbose","autosave","throttledSaves","database","Loki","lokiSaveQueue","LokiSaveQueue","loadDatabasePromise","Promise","res","rej","loadDatabase","recursiveWait","err","autoloadCallback","saveQueue","then","unloads","push","unloadAdd","run","state","collections","closeLokiCollections","get","forEach","collection","collectionName","name","keys","length","delete","u","remove","close","getLokiSortComparator","_schema","query","sort","newRxError","sortOptions","fun","a","b","compareResult","find","sortPart","fieldName","direction","values","directionMultiplier","valueA","getProperty","valueB","args","getLokiLeaderElector","databaseInstanceToken","broadcastChannelRefObject","broadcastChannel","getBroadcastChannelReference","elector","getLeaderElectorByBroadcastChannel","requestRemoteInstance","instance","operation","params","isRxStorageInstanceLoki","messageType","leaderElector","ensureNotFalsy","internals","waitUntilHasLeader","whenDeathListener","leaderDeadPromise","msg","context","action","retry","addEventListener","requestId","randomCouchString","responseListener","responsePromise","_rej","type","response","isError","error","result","postMessage","timeout","race","firstResolved","clearTimeout","removeEventListener","handleRemoteRequest","console","dir","awaitLeadership","catch","promiseWait","has","hasLeader","isClosed","isDead","applyOnce","mustUseLocalState","closed","Error","JSON","stringify","instanceClosed","localState","isLeader","createLokiLocalState","options","schema","multiInstance","devMode","overwritable","isDevMode","transformRegexToRegExp","selector","ret","key","value","RegExp","opts","Array","isArray","map","item"],"sources":["../../../../src/plugins/storage-lokijs/lokijs-helper.ts"],"sourcesContent":["import {\n createLokiLocalState,\n RxStorageInstanceLoki\n} from './rx-storage-instance-loki.ts';\nimport Loki from 'lokijs';\nimport type {\n DeterministicSortComparator,\n FilledMangoQuery,\n LokiDatabaseSettings,\n LokiDatabaseState,\n LokiLocalDatabaseState,\n LokiRemoteResponseBroadcastMessage,\n MangoQuerySortDirection,\n MangoQuerySortPart,\n RxDocumentData,\n RxJsonSchema\n} from '../../types/index.d.ts';\nimport {\n add as unloadAdd,\n AddReturn\n} from 'unload';\nimport {\n ensureNotFalsy,\n flatClone,\n getFromMapOrCreate,\n getProperty,\n promiseWait,\n randomCouchString\n} from '../utils/index.ts';\nimport { LokiSaveQueue } from './loki-save-queue.ts';\nimport { newRxError } from '../../rx-error.ts';\nimport {\n LeaderElector,\n OnMessageHandler\n} from 'broadcast-channel';\nimport { getBroadcastChannelReference } from '../../rx-storage-multiinstance.ts';\nimport { getLeaderElectorByBroadcastChannel } from '../leader-election/index.ts';\nimport { overwritable } from '../../overwritable.ts';\n\nexport const CHANGES_COLLECTION_SUFFIX = '-rxdb-changes';\nexport const LOKI_BROADCAST_CHANNEL_MESSAGE_TYPE = 'rxdb-lokijs-remote-request';\nexport const LOKI_KEY_OBJECT_BROADCAST_CHANNEL_MESSAGE_TYPE = 'rxdb-lokijs-remote-request-key-object';\nexport const RX_STORAGE_NAME_LOKIJS = 'lokijs';\n\n/**\n * Loki attaches a $loki property to all data\n * which must be removed before returning the data back to RxDB.\n */\nexport function stripLokiKey(docData: RxDocumentData & { $loki?: number; }): T {\n if (!docData.$loki) {\n return docData;\n }\n const cloned = flatClone(docData);\n\n /**\n * In RxDB version 12.0.0,\n * we introduced the _meta field that already contains the last write time.\n * To be backwards compatible, we have to move the $lastWriteAt to the _meta field.\n * TODO remove this in the next major version.\n */\n if ((cloned as any).$lastWriteAt) {\n cloned._meta = {\n lwt: (cloned as any).$lastWriteAt\n };\n delete (cloned as any).$lastWriteAt;\n }\n\n delete cloned.$loki;\n return cloned;\n}\n\n/**\n * Used to check in tests if all instances have been cleaned up.\n */\nexport const OPEN_LOKIJS_STORAGE_INSTANCES: Set> = new Set();\n\n\nexport const LOKIJS_COLLECTION_DEFAULT_OPTIONS: Partial = {\n disableChangesApi: true,\n disableMeta: true,\n disableDeltaChangesApi: true,\n disableFreeze: true,\n // TODO use 'immutable' like WatermelonDB does it\n cloneMethod: 'shallow-assign',\n clone: false,\n transactional: false,\n autoupdate: false\n};\n\nconst LOKI_DATABASE_STATE_BY_NAME: Map> = new Map();\nexport function getLokiDatabase(\n databaseName: string,\n databaseSettings: LokiDatabaseSettings\n): Promise {\n\n\n return getFromMapOrCreate(\n LOKI_DATABASE_STATE_BY_NAME,\n databaseName,\n () => {\n /**\n * We assume that as soon as an adapter is passed,\n * the database has to be persistent.\n */\n const hasPersistence: boolean = !!databaseSettings.adapter;\n const databaseState = (async () => {\n let persistenceMethod = hasPersistence ? 'adapter' : 'memory';\n if (databaseSettings.persistenceMethod) {\n persistenceMethod = databaseSettings.persistenceMethod;\n }\n const useSettings = Object.assign(\n // defaults\n {\n autoload: hasPersistence,\n persistenceMethod,\n verbose: true\n },\n databaseSettings,\n // overwrites\n {\n /**\n * RxDB uses its custom load and save handling\n * so we disable the LokiJS save/load handlers.\n */\n autoload: false,\n autosave: false,\n throttledSaves: false\n }\n );\n const database = new Loki(\n databaseName + '.db',\n flatClone(useSettings)\n );\n const lokiSaveQueue = new LokiSaveQueue(\n database,\n useSettings\n );\n\n /**\n * Wait until all data is loaded from persistence adapter.\n * Wrap the loading into the saveQueue to ensure that when many\n * collections are created at the same time, the load-calls do not interfere\n * with each other and cause error logs.\n */\n if (hasPersistence) {\n const loadDatabasePromise = new Promise((res, rej) => {\n try {\n database.loadDatabase({\n recursiveWait: false\n }, (err) => {\n if (useSettings.autoloadCallback) {\n useSettings.autoloadCallback(err);\n }\n if (err) {\n rej(err);\n } else {\n res();\n }\n });\n } catch (err) {\n rej(err);\n }\n });\n lokiSaveQueue.saveQueue = lokiSaveQueue.saveQueue.then(() => loadDatabasePromise);\n await loadDatabasePromise;\n }\n\n /**\n * Autosave database on process end\n */\n const unloads: AddReturn[] = [];\n if (hasPersistence) {\n unloads.push(\n unloadAdd(() => lokiSaveQueue.run())\n );\n }\n\n const state: LokiDatabaseState = {\n database,\n databaseSettings: useSettings,\n saveQueue: lokiSaveQueue,\n collections: {},\n unloads\n };\n\n return state;\n })();\n return databaseState;\n }\n );\n}\n\nexport async function closeLokiCollections(\n databaseName: string,\n collections: any[]\n) {\n const databaseState = await LOKI_DATABASE_STATE_BY_NAME.get(databaseName);\n if (!databaseState) {\n // already closed\n return;\n }\n await databaseState.saveQueue.run();\n collections.forEach(collection => {\n const collectionName = collection.name;\n delete databaseState.collections[collectionName];\n });\n if (Object.keys(databaseState.collections).length === 0) {\n // all collections closed -> also close database\n LOKI_DATABASE_STATE_BY_NAME.delete(databaseName);\n databaseState.unloads.forEach(u => u.remove());\n await new Promise((res, rej) => {\n databaseState.database.close((err: any) => {\n if (err) {\n rej(err);\n } else {\n res();\n }\n });\n });\n }\n}\n\n/**\n * This function is at lokijs-helper\n * because we need it in multiple places.\n */\nexport function getLokiSortComparator(\n _schema: RxJsonSchema>,\n query: FilledMangoQuery\n): DeterministicSortComparator {\n if (!query.sort) {\n throw newRxError('SNH', { query });\n }\n const sortOptions: MangoQuerySortPart[] = query.sort;\n\n const fun: DeterministicSortComparator = (a: RxDocType, b: RxDocType) => {\n let compareResult: number = 0; // 1 | -1\n sortOptions.find(sortPart => {\n const fieldName: string = Object.keys(sortPart)[0];\n const direction: MangoQuerySortDirection = Object.values(sortPart)[0];\n const directionMultiplier = direction === 'asc' ? 1 : -1;\n const valueA: any = getProperty(a as any, fieldName);\n const valueB: any = getProperty(b as any, fieldName);\n if (valueA === valueB) {\n return false;\n } else {\n if (valueA > valueB) {\n compareResult = 1 * directionMultiplier;\n return true;\n } else {\n compareResult = -1 * directionMultiplier;\n return true;\n }\n }\n });\n\n /**\n * Two different objects should never have the same sort position.\n * We ensure this by having the unique primaryKey in the sort params\n * which is added by RxDB if not existing yet.\n */\n if (!compareResult) {\n throw newRxError('SNH', { args: { query, a, b } });\n }\n\n return compareResult as any;\n };\n return fun;\n}\n\nexport function getLokiLeaderElector(\n databaseInstanceToken: string,\n broadcastChannelRefObject: any,\n databaseName: string\n): LeaderElector {\n const broadcastChannel = getBroadcastChannelReference(\n RX_STORAGE_NAME_LOKIJS,\n databaseInstanceToken,\n databaseName,\n broadcastChannelRefObject\n );\n const elector = getLeaderElectorByBroadcastChannel(broadcastChannel);\n return elector;\n}\n\n/**\n * For multi-instance usage, we send requests to the RxStorage\n * to the current leading instance over the BroadcastChannel.\n */\nexport async function requestRemoteInstance(\n instance: RxStorageInstanceLoki,\n operation: string,\n params: any[]\n): Promise {\n const isRxStorageInstanceLoki = typeof (instance as any).query === 'function';\n const messageType = isRxStorageInstanceLoki ? LOKI_BROADCAST_CHANNEL_MESSAGE_TYPE : LOKI_KEY_OBJECT_BROADCAST_CHANNEL_MESSAGE_TYPE;\n\n const leaderElector = ensureNotFalsy(instance.internals.leaderElector);\n await waitUntilHasLeader(leaderElector);\n const broadcastChannel = leaderElector.broadcastChannel;\n\n type WinningPromise = {\n retry: boolean;\n result?: any;\n error?: any;\n };\n\n let whenDeathListener: OnMessageHandler;\n const leaderDeadPromise = new Promise(res => {\n whenDeathListener = (msg: any) => {\n if (msg.context === 'leader' && msg.action === 'death') {\n res({\n retry: true\n });\n }\n };\n broadcastChannel.addEventListener('internal', whenDeathListener);\n });\n const requestId = randomCouchString(12);\n let responseListener: OnMessageHandler;\n const responsePromise = new Promise((res, _rej) => {\n responseListener = (msg: any) => {\n if (\n msg.type === messageType &&\n msg.response === true &&\n msg.requestId === requestId\n ) {\n if (msg.isError) {\n res({\n retry: false,\n error: msg.result\n });\n } else {\n res({\n retry: false,\n result: msg.result\n });\n }\n }\n };\n broadcastChannel.addEventListener('message', responseListener);\n });\n\n // send out the request to the other instance\n broadcastChannel.postMessage({\n response: false,\n type: messageType,\n operation,\n params,\n requestId,\n databaseName: instance.databaseName,\n collectionName: instance.collectionName\n });\n let timeout: ReturnType;\n return Promise.race([\n leaderDeadPromise,\n responsePromise,\n // // comment in timeout to debug\n // new Promise(res => {\n // timeout = setTimeout(() => {\n // res({ error: new Error('requestRemoteInstance() timeout errorored'), retry: false });\n // }, 500);\n // })\n\n ]).then(firstResolved => {\n if (timeout) {\n clearTimeout(timeout);\n }\n\n // clean up listeners\n broadcastChannel.removeEventListener('message', responseListener);\n broadcastChannel.removeEventListener('internal', whenDeathListener);\n\n if (firstResolved.retry) {\n /**\n * The leader died while a remote request was running\n * we re-run the whole operation.\n * We cannot just re-run requestRemoteInstance()\n * because the current instance might be the new leader now\n * and then we have to use the local state instead of requesting the remote.\n */\n return (instance as any)[operation](...params);\n } else {\n if (firstResolved.error) {\n throw firstResolved.error;\n } else {\n return firstResolved.result;\n }\n }\n });\n}\n\n/**\n * Handles a request that came from a remote instance via requestRemoteInstance()\n * Runs the requested operation over the local db instance and sends back the result.\n */\nexport async function handleRemoteRequest(\n instance: RxStorageInstanceLoki,\n msg: any\n) {\n if (\n msg.type === LOKI_BROADCAST_CHANNEL_MESSAGE_TYPE &&\n msg.requestId &&\n msg.databaseName === instance.databaseName &&\n msg.collectionName === instance.collectionName &&\n !msg.response\n ) {\n const operation = (msg as any).operation;\n const params = (msg as any).params;\n let result: any;\n let isError = false;\n try {\n result = await (instance as any)[operation](...params);\n } catch (err) {\n console.dir(err);\n isError = true;\n result = err;\n }\n const response: LokiRemoteResponseBroadcastMessage = {\n response: true,\n requestId: msg.requestId,\n databaseName: instance.databaseName,\n collectionName: instance.collectionName,\n result,\n isError,\n type: msg.type\n };\n ensureNotFalsy(instance.internals.leaderElector).broadcastChannel.postMessage(response);\n }\n}\n\nexport async function waitUntilHasLeader(leaderElector: LeaderElector) {\n leaderElector.awaitLeadership().catch(() => { });\n await promiseWait(0);\n while (true) {\n const has = await leaderElector.hasLeader();\n if (\n has ||\n leaderElector.broadcastChannel.isClosed ||\n leaderElector.isDead\n ) {\n return;\n }\n\n if (leaderElector.applyOnce) {\n await leaderElector.applyOnce();\n } else {\n /**\n * Trigger applying for leadership\n * but do not await it in case another\n * instance becomes leader first.\n */\n leaderElector.awaitLeadership().catch(() => { });\n }\n await promiseWait(20);\n }\n}\n\n/**\n * If the local state must be used, that one is returned.\n * Returns false if a remote instance must be used.\n */\nexport async function mustUseLocalState(\n instance: RxStorageInstanceLoki\n): Promise {\n if (instance.closed) {\n /**\n * If this happens, it means that RxDB made a call to an already closed storage instance.\n * This must never happen because when RxDB closes a collection or database,\n * all tasks must be cleared so that no more calls are made the the storage.\n */\n throw new Error('already closed ' + JSON.stringify(\n {\n instanceClosed: instance.closed,\n databaseName: instance.databaseName,\n collectionName: instance.collectionName\n }\n ));\n }\n\n\n if (instance.internals.localState) {\n return instance.internals.localState;\n }\n const leaderElector = ensureNotFalsy(instance.internals.leaderElector);\n await waitUntilHasLeader(leaderElector);\n\n /**\n * It might already have a localState after the applying\n * because another subtask also called mustUSeLocalState()\n */\n if (instance.internals.localState) {\n return instance.internals.localState;\n }\n\n if (\n leaderElector.isLeader &&\n !instance.internals.localState\n ) {\n // own is leader, use local instance\n instance.internals.localState = createLokiLocalState({\n databaseInstanceToken: instance.databaseInstanceToken,\n databaseName: instance.databaseName,\n collectionName: instance.collectionName,\n options: instance.options,\n schema: (instance as RxStorageInstanceLoki).schema,\n multiInstance: instance.internals.leaderElector ? true : false,\n devMode: overwritable.isDevMode()\n }, instance.databaseSettings);\n return ensureNotFalsy(instance.internals.localState);\n } else {\n // other is leader, send message to remote leading instance\n return false;\n }\n}\n\n\n/**\n * LokiJS does not understand the 'official' $regex operator,\n * so we have to transform these back into RegExp objects.\n * @recursive\n */\nexport function transformRegexToRegExp(selector: any) {\n if (typeof selector !== 'object' || selector === null) {\n return selector;\n }\n\n const keys = Object.keys(selector);\n const ret: any = {};\n keys.forEach(key => {\n const value: any = selector[key];\n if (key === '$options') {\n return;\n }\n if (\n key === '$regex' &&\n !(value instanceof RegExp)\n ) {\n const opts = selector['$options'];\n ret[key] = new RegExp(value, opts);\n } else if (Array.isArray(value)) {\n ret[key] = value.map(item => transformRegexToRegExp(item));\n } else {\n ret[key] = transformRegexToRegExp(value);\n }\n });\n return ret;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;AAAA,IAAAA,sBAAA,GAAAC,OAAA;AAIA,IAAAC,OAAA,GAAAC,sBAAA,CAAAF,OAAA;AAaA,IAAAG,OAAA,GAAAH,OAAA;AAIA,IAAAI,MAAA,GAAAJ,OAAA;AAQA,IAAAK,cAAA,GAAAL,OAAA;AACA,IAAAM,QAAA,GAAAN,OAAA;AAKA,IAAAO,uBAAA,GAAAP,OAAA;AACA,IAAAQ,OAAA,GAAAR,OAAA;AACA,IAAAS,aAAA,GAAAT,OAAA;AAEO,IAAMU,yBAAyB,GAAAC,OAAA,CAAAD,yBAAA,GAAG,eAAe;AACjD,IAAME,mCAAmC,GAAAD,OAAA,CAAAC,mCAAA,GAAG,4BAA4B;AACxE,IAAMC,8CAA8C,GAAAF,OAAA,CAAAE,8CAAA,GAAG,uCAAuC;AAC9F,IAAMC,sBAAsB,GAAAH,OAAA,CAAAG,sBAAA,GAAG,QAAQ;;AAE9C;AACA;AACA;AACA;AACO,SAASC,YAAYA,CAAIC,OAAgD,EAAK;EACjF,IAAI,CAACA,OAAO,CAACC,KAAK,EAAE;IAChB,OAAOD,OAAO;EAClB;EACA,IAAME,MAAM,GAAG,IAAAC,gBAAS,EAACH,OAAO,CAAC;;EAEjC;AACJ;AACA;AACA;AACA;AACA;EACI,IAAKE,MAAM,CAASE,YAAY,EAAE;IAC9BF,MAAM,CAACG,KAAK,GAAG;MACXC,GAAG,EAAGJ,MAAM,CAASE;IACzB,CAAC;IACD,OAAQF,MAAM,CAASE,YAAY;EACvC;EAEA,OAAOF,MAAM,CAACD,KAAK;EACnB,OAAOC,MAAM;AACjB;;AAEA;AACA;AACA;AACO,IAAMK,6BAA8D,GAAAZ,OAAA,CAAAY,6BAAA,GAAG,IAAIC,GAAG,CAAC,CAAC;AAGhF,IAAMC,iCAA+C,GAAAd,OAAA,CAAAc,iCAAA,GAAG;EAC3DC,iBAAiB,EAAE,IAAI;EACvBC,WAAW,EAAE,IAAI;EACjBC,sBAAsB,EAAE,IAAI;EAC5BC,aAAa,EAAE,IAAI;EACnB;EACAC,WAAW,EAAE,gBAAgB;EAC7BC,KAAK,EAAE,KAAK;EACZC,aAAa,EAAE,KAAK;EACpBC,UAAU,EAAE;AAChB,CAAC;AAED,IAAMC,2BAAoE,GAAG,IAAIC,GAAG,CAAC,CAAC;AAC/E,SAASC,eAAeA,CAC3BC,YAAoB,EACpBC,gBAAsC,EACZ;EAG1B,OAAO,IAAAC,yBAAkB,EACrBL,2BAA2B,EAC3BG,YAAY,EACZ,MAAM;IACF;AACZ;AACA;AACA;IACY,IAAMG,cAAuB,GAAG,CAAC,CAACF,gBAAgB,CAACG,OAAO;IAC1D,IAAMC,aAAa,GAAG,CAAC,YAAY;MAC/B,IAAIC,iBAAiB,GAAGH,cAAc,GAAG,SAAS,GAAG,QAAQ;MAC7D,IAAIF,gBAAgB,CAACK,iBAAiB,EAAE;QACpCA,iBAAiB,GAAGL,gBAAgB,CAACK,iBAAiB;MAC1D;MACA,IAAMC,WAAW,GAAGC,MAAM,CAACC,MAAM;MAC7B;MACA;QACIC,QAAQ,EAAEP,cAAc;QACxBG,iBAAiB;QACjBK,OAAO,EAAE;MACb,CAAC,EACDV,gBAAgB;MAChB;MACA;QACI;AACxB;AACA;AACA;QACwBS,QAAQ,EAAE,KAAK;QACfE,QAAQ,EAAE,KAAK;QACfC,cAAc,EAAE;MACpB,CACJ,CAAC;MACD,IAAMC,QAAQ,GAAG,IAAIC,eAAI,CACrBf,YAAY,GAAG,KAAK,EACpB,IAAAlB,gBAAS,EAACyB,WAAW,CACzB,CAAC;MACD,IAAMS,aAAa,GAAG,IAAIC,4BAAa,CACnCH,QAAQ,EACRP,WACJ,CAAC;;MAED;AAChB;AACA;AACA;AACA;AACA;MACgB,IAAIJ,cAAc,EAAE;QAChB,IAAMe,mBAAmB,GAAG,IAAIC,OAAO,CAAO,CAACC,GAAG,EAAEC,GAAG,KAAK;UACxD,IAAI;YACAP,QAAQ,CAACQ,YAAY,CAAC;cAClBC,aAAa,EAAE;YACnB,CAAC,EAAGC,GAAG,IAAK;cACR,IAAIjB,WAAW,CAACkB,gBAAgB,EAAE;gBAC9BlB,WAAW,CAACkB,gBAAgB,CAACD,GAAG,CAAC;cACrC;cACA,IAAIA,GAAG,EAAE;gBACLH,GAAG,CAACG,GAAG,CAAC;cACZ,CAAC,MAAM;gBACHJ,GAAG,CAAC,CAAC;cACT;YACJ,CAAC,CAAC;UACN,CAAC,CAAC,OAAOI,GAAG,EAAE;YACVH,GAAG,CAACG,GAAG,CAAC;UACZ;QACJ,CAAC,CAAC;QACFR,aAAa,CAACU,SAAS,GAAGV,aAAa,CAACU,SAAS,CAACC,IAAI,CAAC,MAAMT,mBAAmB,CAAC;QACjF,MAAMA,mBAAmB;MAC7B;;MAEA;AAChB;AACA;MACgB,IAAMU,OAAoB,GAAG,EAAE;MAC/B,IAAIzB,cAAc,EAAE;QAChByB,OAAO,CAACC,IAAI,CACR,IAAAC,WAAS,EAAC,MAAMd,aAAa,CAACe,GAAG,CAAC,CAAC,CACvC,CAAC;MACL;MAEA,IAAMC,KAAwB,GAAG;QAC7BlB,QAAQ;QACRb,gBAAgB,EAAEM,WAAW;QAC7BmB,SAAS,EAAEV,aAAa;QACxBiB,WAAW,EAAE,CAAC,CAAC;QACfL;MACJ,CAAC;MAED,OAAOI,KAAK;IAChB,CAAC,EAAE,CAAC;IACJ,OAAO3B,aAAa;EACxB,CACJ,CAAC;AACL;AAEO,eAAe6B,oBAAoBA,CACtClC,YAAoB,EACpBiC,WAAkB,EACpB;EACE,IAAM5B,aAAa,GAAG,MAAMR,2BAA2B,CAACsC,GAAG,CAACnC,YAAY,CAAC;EACzE,IAAI,CAACK,aAAa,EAAE;IAChB;IACA;EACJ;EACA,MAAMA,aAAa,CAACqB,SAAS,CAACK,GAAG,CAAC,CAAC;EACnCE,WAAW,CAACG,OAAO,CAACC,UAAU,IAAI;IAC9B,IAAMC,cAAc,GAAGD,UAAU,CAACE,IAAI;IACtC,OAAOlC,aAAa,CAAC4B,WAAW,CAACK,cAAc,CAAC;EACpD,CAAC,CAAC;EACF,IAAI9B,MAAM,CAACgC,IAAI,CAACnC,aAAa,CAAC4B,WAAW,CAAC,CAACQ,MAAM,KAAK,CAAC,EAAE;IACrD;IACA5C,2BAA2B,CAAC6C,MAAM,CAAC1C,YAAY,CAAC;IAChDK,aAAa,CAACuB,OAAO,CAACQ,OAAO,CAACO,CAAC,IAAIA,CAAC,CAACC,MAAM,CAAC,CAAC,CAAC;IAC9C,MAAM,IAAIzB,OAAO,CAAO,CAACC,GAAG,EAAEC,GAAG,KAAK;MAClChB,aAAa,CAACS,QAAQ,CAAC+B,KAAK,CAAErB,GAAQ,IAAK;QACvC,IAAIA,GAAG,EAAE;UACLH,GAAG,CAACG,GAAG,CAAC;QACZ,CAAC,MAAM;UACHJ,GAAG,CAAC,CAAC;QACT;MACJ,CAAC,CAAC;IACN,CAAC,CAAC;EACN;AACJ;;AAEA;AACA;AACA;AACA;AACO,SAAS0B,qBAAqBA,CACjCC,OAAgD,EAChDC,KAAkC,EACI;EACtC,IAAI,CAACA,KAAK,CAACC,IAAI,EAAE;IACb,MAAM,IAAAC,mBAAU,EAAC,KAAK,EAAE;MAAEF;IAAM,CAAC,CAAC;EACtC;EACA,IAAMG,WAA4C,GAAGH,KAAK,CAACC,IAAI;EAE/D,IAAMG,GAA2C,GAAGA,CAACC,CAAY,EAAEC,CAAY,KAAK;IAChF,IAAIC,aAAqB,GAAG,CAAC,CAAC,CAAC;IAC/BJ,WAAW,CAACK,IAAI,CAACC,QAAQ,IAAI;MACzB,IAAMC,SAAiB,GAAGlD,MAAM,CAACgC,IAAI,CAACiB,QAAQ,CAAC,CAAC,CAAC,CAAC;MAClD,IAAME,SAAkC,GAAGnD,MAAM,CAACoD,MAAM,CAACH,QAAQ,CAAC,CAAC,CAAC,CAAC;MACrE,IAAMI,mBAAmB,GAAGF,SAAS,KAAK,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC;MACxD,IAAMG,MAAW,GAAG,IAAAC,kBAAW,EAACV,CAAC,EAASK,SAAS,CAAC;MACpD,IAAMM,MAAW,GAAG,IAAAD,kBAAW,EAACT,CAAC,EAASI,SAAS,CAAC;MACpD,IAAII,MAAM,KAAKE,MAAM,EAAE;QACnB,OAAO,KAAK;MAChB,CAAC,MAAM;QACH,IAAIF,MAAM,GAAGE,MAAM,EAAE;UACjBT,aAAa,GAAG,CAAC,GAAGM,mBAAmB;UACvC,OAAO,IAAI;QACf,CAAC,MAAM;UACHN,aAAa,GAAG,CAAC,CAAC,GAAGM,mBAAmB;UACxC,OAAO,IAAI;QACf;MACJ;IACJ,CAAC,CAAC;;IAEF;AACR;AACA;AACA;AACA;IACQ,IAAI,CAACN,aAAa,EAAE;MAChB,MAAM,IAAAL,mBAAU,EAAC,KAAK,EAAE;QAAEe,IAAI,EAAE;UAAEjB,KAAK;UAAEK,CAAC;UAAEC;QAAE;MAAE,CAAC,CAAC;IACtD;IAEA,OAAOC,aAAa;EACxB,CAAC;EACD,OAAOH,GAAG;AACd;AAEO,SAASc,oBAAoBA,CAChCC,qBAA6B,EAC7BC,yBAA8B,EAC9BpE,YAAoB,EACP;EACb,IAAMqE,gBAAgB,GAAG,IAAAC,oDAA4B,EACjD7F,sBAAsB,EACtB0F,qBAAqB,EACrBnE,YAAY,EACZoE,yBACJ,CAAC;EACD,IAAMG,OAAO,GAAG,IAAAC,0CAAkC,EAACH,gBAAgB,CAAC;EACpE,OAAOE,OAAO;AAClB;;AAEA;AACA;AACA;AACA;AACO,eAAeE,qBAAqBA,CACvCC,QAAoC,EACpCC,SAAiB,EACjBC,MAAa,EACO;EACpB,IAAMC,uBAAuB,GAAG,OAAQH,QAAQ,CAAS1B,KAAK,KAAK,UAAU;EAC7E,IAAM8B,WAAW,GAAGD,uBAAuB,GAAGtG,mCAAmC,GAAGC,8CAA8C;EAElI,IAAMuG,aAAa,GAAG,IAAAC,qBAAc,EAACN,QAAQ,CAACO,SAAS,CAACF,aAAa,CAAC;EACtE,MAAMG,kBAAkB,CAACH,aAAa,CAAC;EACvC,IAAMV,gBAAgB,GAAGU,aAAa,CAACV,gBAAgB;EAQvD,IAAIc,iBAAwC;EAC5C,IAAMC,iBAAiB,GAAG,IAAIjE,OAAO,CAAiBC,GAAG,IAAI;IACzD+D,iBAAiB,GAAIE,GAAQ,IAAK;MAC9B,IAAIA,GAAG,CAACC,OAAO,KAAK,QAAQ,IAAID,GAAG,CAACE,MAAM,KAAK,OAAO,EAAE;QACpDnE,GAAG,CAAC;UACAoE,KAAK,EAAE;QACX,CAAC,CAAC;MACN;IACJ,CAAC;IACDnB,gBAAgB,CAACoB,gBAAgB,CAAC,UAAU,EAAEN,iBAAiB,CAAC;EACpE,CAAC,CAAC;EACF,IAAMO,SAAS,GAAG,IAAAC,wBAAiB,EAAC,EAAE,CAAC;EACvC,IAAIC,gBAAuC;EAC3C,IAAMC,eAAe,GAAG,IAAI1E,OAAO,CAAiB,CAACC,GAAG,EAAE0E,IAAI,KAAK;IAC/DF,gBAAgB,GAAIP,GAAQ,IAAK;MAC7B,IACIA,GAAG,CAACU,IAAI,KAAKjB,WAAW,IACxBO,GAAG,CAACW,QAAQ,KAAK,IAAI,IACrBX,GAAG,CAACK,SAAS,KAAKA,SAAS,EAC7B;QACE,IAAIL,GAAG,CAACY,OAAO,EAAE;UACb7E,GAAG,CAAC;YACAoE,KAAK,EAAE,KAAK;YACZU,KAAK,EAAEb,GAAG,CAACc;UACf,CAAC,CAAC;QACN,CAAC,MAAM;UACH/E,GAAG,CAAC;YACAoE,KAAK,EAAE,KAAK;YACZW,MAAM,EAAEd,GAAG,CAACc;UAChB,CAAC,CAAC;QACN;MACJ;IACJ,CAAC;IACD9B,gBAAgB,CAACoB,gBAAgB,CAAC,SAAS,EAAEG,gBAAgB,CAAC;EAClE,CAAC,CAAC;;EAEF;EACAvB,gBAAgB,CAAC+B,WAAW,CAAC;IACzBJ,QAAQ,EAAE,KAAK;IACfD,IAAI,EAAEjB,WAAW;IACjBH,SAAS;IACTC,MAAM;IACNc,SAAS;IACT1F,YAAY,EAAE0E,QAAQ,CAAC1E,YAAY;IACnCsC,cAAc,EAAEoC,QAAQ,CAACpC;EAC7B,CAAC,CAAC;EACF,IAAI+D,OAAsC;EAC1C,OAAOlF,OAAO,CAACmF,IAAI,CAAC,CAChBlB,iBAAiB,EACjBS;EACA;EACA;EACA;EACA;EACA;EACA;EAAA,CAEH,CAAC,CAAClE,IAAI,CAAC4E,aAAa,IAAI;IACrB,IAAIF,OAAO,EAAE;MACTG,YAAY,CAACH,OAAO,CAAC;IACzB;;IAEA;IACAhC,gBAAgB,CAACoC,mBAAmB,CAAC,SAAS,EAAEb,gBAAgB,CAAC;IACjEvB,gBAAgB,CAACoC,mBAAmB,CAAC,UAAU,EAAEtB,iBAAiB,CAAC;IAEnE,IAAIoB,aAAa,CAACf,KAAK,EAAE;MACrB;AACZ;AACA;AACA;AACA;AACA;AACA;MACY,OAAQd,QAAQ,CAASC,SAAS,CAAC,CAAC,GAAGC,MAAM,CAAC;IAClD,CAAC,MAAM;MACH,IAAI2B,aAAa,CAACL,KAAK,EAAE;QACrB,MAAMK,aAAa,CAACL,KAAK;MAC7B,CAAC,MAAM;QACH,OAAOK,aAAa,CAACJ,MAAM;MAC/B;IACJ;EACJ,CAAC,CAAC;AACN;;AAEA;AACA;AACA;AACA;AACO,eAAeO,mBAAmBA,CACrChC,QAAoC,EACpCW,GAAQ,EACV;EACE,IACIA,GAAG,CAACU,IAAI,KAAKxH,mCAAmC,IAChD8G,GAAG,CAACK,SAAS,IACbL,GAAG,CAACrF,YAAY,KAAK0E,QAAQ,CAAC1E,YAAY,IAC1CqF,GAAG,CAAC/C,cAAc,KAAKoC,QAAQ,CAACpC,cAAc,IAC9C,CAAC+C,GAAG,CAACW,QAAQ,EACf;IACE,IAAMrB,SAAS,GAAIU,GAAG,CAASV,SAAS;IACxC,IAAMC,MAAM,GAAIS,GAAG,CAAST,MAAM;IAClC,IAAIuB,MAAW;IACf,IAAIF,OAAO,GAAG,KAAK;IACnB,IAAI;MACAE,MAAM,GAAG,MAAOzB,QAAQ,CAASC,SAAS,CAAC,CAAC,GAAGC,MAAM,CAAC;IAC1D,CAAC,CAAC,OAAOpD,GAAG,EAAE;MACVmF,OAAO,CAACC,GAAG,CAACpF,GAAG,CAAC;MAChByE,OAAO,GAAG,IAAI;MACdE,MAAM,GAAG3E,GAAG;IAChB;IACA,IAAMwE,QAA4C,GAAG;MACjDA,QAAQ,EAAE,IAAI;MACdN,SAAS,EAAEL,GAAG,CAACK,SAAS;MACxB1F,YAAY,EAAE0E,QAAQ,CAAC1E,YAAY;MACnCsC,cAAc,EAAEoC,QAAQ,CAACpC,cAAc;MACvC6D,MAAM;MACNF,OAAO;MACPF,IAAI,EAAEV,GAAG,CAACU;IACd,CAAC;IACD,IAAAf,qBAAc,EAACN,QAAQ,CAACO,SAAS,CAACF,aAAa,CAAC,CAACV,gBAAgB,CAAC+B,WAAW,CAACJ,QAAQ,CAAC;EAC3F;AACJ;AAEO,eAAed,kBAAkBA,CAACH,aAA4B,EAAE;EACnEA,aAAa,CAAC8B,eAAe,CAAC,CAAC,CAACC,KAAK,CAAC,MAAM,CAAE,CAAC,CAAC;EAChD,MAAM,IAAAC,kBAAW,EAAC,CAAC,CAAC;EACpB,OAAO,IAAI,EAAE;IACT,IAAMC,GAAG,GAAG,MAAMjC,aAAa,CAACkC,SAAS,CAAC,CAAC;IAC3C,IACID,GAAG,IACHjC,aAAa,CAACV,gBAAgB,CAAC6C,QAAQ,IACvCnC,aAAa,CAACoC,MAAM,EACtB;MACE;IACJ;IAEA,IAAIpC,aAAa,CAACqC,SAAS,EAAE;MACzB,MAAMrC,aAAa,CAACqC,SAAS,CAAC,CAAC;IACnC,CAAC,MAAM;MACH;AACZ;AACA;AACA;AACA;MACYrC,aAAa,CAAC8B,eAAe,CAAC,CAAC,CAACC,KAAK,CAAC,MAAM,CAAE,CAAC,CAAC;IACpD;IACA,MAAM,IAAAC,kBAAW,EAAC,EAAE,CAAC;EACzB;AACJ;;AAEA;AACA;AACA;AACA;AACO,eAAeM,iBAAiBA,CACnC3C,QAAoC,EACG;EACvC,IAAIA,QAAQ,CAAC4C,MAAM,EAAE;IACjB;AACR;AACA;AACA;AACA;IACQ,MAAM,IAAIC,KAAK,CAAC,iBAAiB,GAAGC,IAAI,CAACC,SAAS,CAC9C;MACIC,cAAc,EAAEhD,QAAQ,CAAC4C,MAAM;MAC/BtH,YAAY,EAAE0E,QAAQ,CAAC1E,YAAY;MACnCsC,cAAc,EAAEoC,QAAQ,CAACpC;IAC7B,CACJ,CAAC,CAAC;EACN;EAGA,IAAIoC,QAAQ,CAACO,SAAS,CAAC0C,UAAU,EAAE;IAC/B,OAAOjD,QAAQ,CAACO,SAAS,CAAC0C,UAAU;EACxC;EACA,IAAM5C,aAAa,GAAG,IAAAC,qBAAc,EAACN,QAAQ,CAACO,SAAS,CAACF,aAAa,CAAC;EACtE,MAAMG,kBAAkB,CAACH,aAAa,CAAC;;EAEvC;AACJ;AACA;AACA;EACI,IAAIL,QAAQ,CAACO,SAAS,CAAC0C,UAAU,EAAE;IAC/B,OAAOjD,QAAQ,CAACO,SAAS,CAAC0C,UAAU;EACxC;EAEA,IACI5C,aAAa,CAAC6C,QAAQ,IACtB,CAAClD,QAAQ,CAACO,SAAS,CAAC0C,UAAU,EAChC;IACE;IACAjD,QAAQ,CAACO,SAAS,CAAC0C,UAAU,GAAG,IAAAE,2CAAoB,EAAM;MACtD1D,qBAAqB,EAAEO,QAAQ,CAACP,qBAAqB;MACrDnE,YAAY,EAAE0E,QAAQ,CAAC1E,YAAY;MACnCsC,cAAc,EAAEoC,QAAQ,CAACpC,cAAc;MACvCwF,OAAO,EAAEpD,QAAQ,CAACoD,OAAO;MACzBC,MAAM,EAAGrD,QAAQ,CAAgCqD,MAAM;MACvDC,aAAa,EAAEtD,QAAQ,CAACO,SAAS,CAACF,aAAa,GAAG,IAAI,GAAG,KAAK;MAC9DkD,OAAO,EAAEC,0BAAY,CAACC,SAAS,CAAC;IACpC,CAAC,EAAEzD,QAAQ,CAACzE,gBAAgB,CAAC;IAC7B,OAAO,IAAA+E,qBAAc,EAACN,QAAQ,CAACO,SAAS,CAAC0C,UAAU,CAAC;EACxD,CAAC,MAAM;IACH;IACA,OAAO,KAAK;EAChB;AACJ;;AAGA;AACA;AACA;AACA;AACA;AACO,SAASS,sBAAsBA,CAACC,QAAa,EAAE;EAClD,IAAI,OAAOA,QAAQ,KAAK,QAAQ,IAAIA,QAAQ,KAAK,IAAI,EAAE;IACnD,OAAOA,QAAQ;EACnB;EAEA,IAAM7F,IAAI,GAAGhC,MAAM,CAACgC,IAAI,CAAC6F,QAAQ,CAAC;EAClC,IAAMC,GAAQ,GAAG,CAAC,CAAC;EACnB9F,IAAI,CAACJ,OAAO,CAACmG,GAAG,IAAI;IAChB,IAAMC,KAAU,GAAGH,QAAQ,CAACE,GAAG,CAAC;IAChC,IAAIA,GAAG,KAAK,UAAU,EAAE;MACpB;IACJ;IACA,IACIA,GAAG,KAAK,QAAQ,IAChB,EAAEC,KAAK,YAAYC,MAAM,CAAC,EAC5B;MACE,IAAMC,IAAI,GAAGL,QAAQ,CAAC,UAAU,CAAC;MACjCC,GAAG,CAACC,GAAG,CAAC,GAAG,IAAIE,MAAM,CAACD,KAAK,EAAEE,IAAI,CAAC;IACtC,CAAC,MAAM,IAAIC,KAAK,CAACC,OAAO,CAACJ,KAAK,CAAC,EAAE;MAC7BF,GAAG,CAACC,GAAG,CAAC,GAAGC,KAAK,CAACK,GAAG,CAACC,IAAI,IAAIV,sBAAsB,CAACU,IAAI,CAAC,CAAC;IAC9D,CAAC,MAAM;MACHR,GAAG,CAACC,GAAG,CAAC,GAAGH,sBAAsB,CAACI,KAAK,CAAC;IAC5C;EACJ,CAAC,CAAC;EACF,OAAOF,GAAG;AACd","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/storage-lokijs/rx-storage-instance-loki.js b/dist/cjs/plugins/storage-lokijs/rx-storage-instance-loki.js deleted file mode 100644 index f5deeef5753..00000000000 --- a/dist/cjs/plugins/storage-lokijs/rx-storage-instance-loki.js +++ /dev/null @@ -1,325 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.RxStorageInstanceLoki = void 0; -exports.createLokiLocalState = createLokiLocalState; -exports.createLokiStorageInstance = createLokiStorageInstance; -var _rxjs = require("rxjs"); -var _index = require("../utils/index.js"); -var _rxError = require("../../rx-error.js"); -var _lokijsHelper = require("./lokijs-helper.js"); -var _rxSchemaHelper = require("../../rx-schema-helper.js"); -var _rxStorageHelper = require("../../rx-storage-helper.js"); -var _rxStorageMultiinstance = require("../../rx-storage-multiinstance.js"); -var _rxQueryHelper = require("../../rx-query-helper.js"); -var instanceId = (0, _index.now)(); -var shownNonPremiumLog = false; -var RxStorageInstanceLoki = exports.RxStorageInstanceLoki = /*#__PURE__*/function () { - function RxStorageInstanceLoki(databaseInstanceToken, storage, databaseName, collectionName, schema, internals, options, databaseSettings) { - this.changes$ = new _rxjs.Subject(); - this.instanceId = instanceId++; - this.databaseInstanceToken = databaseInstanceToken; - this.storage = storage; - this.databaseName = databaseName; - this.collectionName = collectionName; - this.schema = schema; - this.internals = internals; - this.options = options; - this.databaseSettings = databaseSettings; - this.primaryPath = (0, _rxSchemaHelper.getPrimaryFieldOfPrimaryKey)(this.schema.primaryKey); - _lokijsHelper.OPEN_LOKIJS_STORAGE_INSTANCES.add(this); - if (this.internals.leaderElector) { - /** - * To run handleRemoteRequest(), - * the instance will call its own methods. - * But these methods could have already been swapped out by a RxStorageWrapper - * so we must store the original methods here and use them instead. - */ - var copiedSelf = { - bulkWrite: this.bulkWrite.bind(this), - changeStream: this.changeStream.bind(this), - cleanup: this.cleanup.bind(this), - close: this.close.bind(this), - query: this.query.bind(this), - count: this.count.bind(this), - findDocumentsById: this.findDocumentsById.bind(this), - collectionName: this.collectionName, - databaseName: this.databaseName, - conflictResultionTasks: this.conflictResultionTasks.bind(this), - getAttachmentData: this.getAttachmentData.bind(this), - internals: this.internals, - options: this.options, - remove: this.remove.bind(this), - resolveConflictResultionTask: this.resolveConflictResultionTask.bind(this), - schema: this.schema - }; - this.internals.leaderElector.awaitLeadership().then(() => { - // this instance is leader now, so it has to reply to queries from other instances - (0, _index.ensureNotFalsy)(this.internals.leaderElector).broadcastChannel.addEventListener('message', msg => (0, _lokijsHelper.handleRemoteRequest)(copiedSelf, msg)); - }).catch(() => {}); - } - } - var _proto = RxStorageInstanceLoki.prototype; - _proto.bulkWrite = async function bulkWrite(documentWrites, context) { - if (!shownNonPremiumLog && (!_index.RXDB_UTILS_GLOBAL.premium || typeof _index.RXDB_UTILS_GLOBAL.premium !== 'string' || (await (0, _index.defaultHashSha256)(_index.RXDB_UTILS_GLOBAL.premium)) !== _index.PREMIUM_FLAG_HASH)) { - console.warn(['-------------- RxDB Open Core RxStorage -------------------------------', 'You are using the free LokiJS based RxStorage implementation from RxDB https://rxdb.info/rx-storage-lokijs.html?console=loki ', 'While this is a great option, we want to let you know that there are faster storage solutions available in our premium plugins.', 'For professional users and production environments, we highly recommend considering these premium options to enhance performance and reliability.', ' https://rxdb.info/premium?console=loki ', 'If you already purchased premium access you can disable this log by calling the setPremiumFlag() function from rxdb-premium/plugins/shared.', '---------------------------------------------------------------------'].join('\n')); - shownNonPremiumLog = true; - } else { - shownNonPremiumLog = true; - } - if (documentWrites.length === 0) { - throw (0, _rxError.newRxError)('P2', { - args: { - documentWrites - } - }); - } - var localState = await (0, _lokijsHelper.mustUseLocalState)(this); - if (!localState) { - return (0, _lokijsHelper.requestRemoteInstance)(this, 'bulkWrite', [documentWrites]); - } - var ret = { - success: [], - error: [] - }; - var docsInDb = new Map(); - var docsInDbWithLokiKey = new Map(); - documentWrites.forEach(writeRow => { - var id = writeRow.document[this.primaryPath]; - var documentInDb = localState.collection.by(this.primaryPath, id); - if (documentInDb) { - docsInDbWithLokiKey.set(id, documentInDb); - docsInDb.set(id, (0, _lokijsHelper.stripLokiKey)(documentInDb)); - } - }); - var categorized = (0, _rxStorageHelper.categorizeBulkWriteRows)(this, this.primaryPath, docsInDb, documentWrites, context); - ret.error = categorized.errors; - categorized.bulkInsertDocs.forEach(writeRow => { - localState.collection.insert((0, _index.flatClone)(writeRow.document)); - ret.success.push(writeRow.document); - }); - categorized.bulkUpdateDocs.forEach(writeRow => { - var docId = writeRow.document[this.primaryPath]; - var documentInDbWithLokiKey = (0, _index.getFromMapOrThrow)(docsInDbWithLokiKey, docId); - var writeDoc = Object.assign({}, writeRow.document, { - $loki: documentInDbWithLokiKey.$loki - }); - localState.collection.update(writeDoc); - ret.success.push(writeRow.document); - }); - localState.databaseState.saveQueue.addWrite(); - if (categorized.eventBulk.events.length > 0) { - var lastState = (0, _index.ensureNotFalsy)(categorized.newestRow).document; - categorized.eventBulk.checkpoint = { - id: lastState[this.primaryPath], - lwt: lastState._meta.lwt - }; - categorized.eventBulk.endTime = (0, _index.now)(); - this.changes$.next(categorized.eventBulk); - } - return ret; - }; - _proto.findDocumentsById = async function findDocumentsById(ids, deleted) { - var localState = await (0, _lokijsHelper.mustUseLocalState)(this); - if (!localState) { - return (0, _lokijsHelper.requestRemoteInstance)(this, 'findDocumentsById', [ids, deleted]); - } - var ret = []; - ids.forEach(id => { - var documentInDb = localState.collection.by(this.primaryPath, id); - if (documentInDb && (!documentInDb._deleted || deleted)) { - ret.push((0, _lokijsHelper.stripLokiKey)(documentInDb)); - } - }); - return ret; - }; - _proto.query = async function query(preparedQueryOriginal) { - var localState = await (0, _lokijsHelper.mustUseLocalState)(this); - if (!localState) { - return (0, _lokijsHelper.requestRemoteInstance)(this, 'query', [preparedQueryOriginal]); - } - var preparedQuery = (0, _index.ensureNotFalsy)(preparedQueryOriginal.query); - if (preparedQuery.selector) { - preparedQuery = (0, _index.flatClone)(preparedQuery); - preparedQuery.selector = (0, _lokijsHelper.transformRegexToRegExp)(preparedQuery.selector); - } - var query = preparedQueryOriginal.query; - var skip = query.skip ? query.skip : 0; - var limit = query.limit ? query.limit : Infinity; - var skipPlusLimit = skip + limit; - - /** - * LokiJS is not able to give correct results for some - * operators, so we have to check all documents in that case - * and laster apply skip and limit manually. - * @link https://github.com/pubkey/rxdb/issues/5320 - */ - var mustRunMatcher = false; - if ((0, _index.hasDeepProperty)(preparedQuery.selector, '$in')) { - mustRunMatcher = true; - } - var lokiQuery = localState.collection.chain().find(mustRunMatcher ? {} : preparedQuery.selector); - if (preparedQuery.sort) { - lokiQuery = lokiQuery.sort((0, _lokijsHelper.getLokiSortComparator)(this.schema, preparedQuery)); - } - var foundDocuments = lokiQuery.data().map(lokiDoc => (0, _lokijsHelper.stripLokiKey)(lokiDoc)); - - /** - * LokiJS returned wrong results on some queries - * with complex indexes. Therefore we run the query-match - * over all result docs to patch this bug. - * TODO create an issue at the LokiJS repository. - */ - var queryMatcher = (0, _rxQueryHelper.getQueryMatcher)(this.schema, preparedQuery); - foundDocuments = foundDocuments.filter(d => queryMatcher(d)); - - // always apply offset and limit like this, because - // sylvieQuery.offset() and sylvieQuery.limit() results were inconsistent - foundDocuments = foundDocuments.slice(skip, skipPlusLimit); - return { - documents: foundDocuments - }; - }; - _proto.count = async function count(preparedQuery) { - var result = await this.query(preparedQuery); - return { - count: result.documents.length, - mode: 'fast' - }; - }; - _proto.getAttachmentData = function getAttachmentData(_documentId, _attachmentId, _digest) { - throw new Error('Attachments are not implemented in the lokijs RxStorage. Make a pull request.'); - }; - _proto.changeStream = function changeStream() { - return this.changes$.asObservable(); - }; - _proto.cleanup = async function cleanup(minimumDeletedTime) { - var localState = await (0, _lokijsHelper.mustUseLocalState)(this); - if (!localState) { - return (0, _lokijsHelper.requestRemoteInstance)(this, 'cleanup', [minimumDeletedTime]); - } - var deleteAmountPerRun = 10; - var maxDeletionTime = (0, _index.now)() - minimumDeletedTime; - var query = localState.collection.chain().find({ - _deleted: true, - '_meta.lwt': { - $lt: maxDeletionTime - } - }).limit(deleteAmountPerRun); - var foundDocuments = query.data(); - if (foundDocuments.length > 0) { - localState.collection.remove(foundDocuments); - localState.databaseState.saveQueue.addWrite(); - } - return foundDocuments.length !== deleteAmountPerRun; - }; - _proto.close = async function close() { - if (this.closed) { - return this.closed; - } - this.closed = (async () => { - this.changes$.complete(); - _lokijsHelper.OPEN_LOKIJS_STORAGE_INSTANCES.delete(this); - if (this.internals.localState) { - var localState = await this.internals.localState; - var dbState = await (0, _lokijsHelper.getLokiDatabase)(this.databaseName, this.databaseSettings); - await dbState.saveQueue.run(); - await (0, _lokijsHelper.closeLokiCollections)(this.databaseName, [localState.collection]); - } - })(); - return this.closed; - }; - _proto.remove = async function remove() { - var localState = await (0, _lokijsHelper.mustUseLocalState)(this); - if (!localState) { - return (0, _lokijsHelper.requestRemoteInstance)(this, 'remove', []); - } - localState.databaseState.database.removeCollection(localState.collection.name); - await localState.databaseState.saveQueue.run(); - return this.close(); - }; - _proto.conflictResultionTasks = function conflictResultionTasks() { - return new _rxjs.Subject(); - }; - _proto.resolveConflictResultionTask = async function resolveConflictResultionTask(_taskSolution) {}; - return RxStorageInstanceLoki; -}(); -async function createLokiLocalState(params, databaseSettings) { - if (!params.options) { - params.options = {}; - } - var databaseState = await (0, _lokijsHelper.getLokiDatabase)(params.databaseName, databaseSettings); - - /** - * Construct loki indexes from RxJsonSchema indexes. - * TODO what about compound indexes? Are they possible in lokijs? - */ - var indices = []; - if (params.schema.indexes) { - params.schema.indexes.forEach(idx => { - if (!(0, _index.isMaybeReadonlyArray)(idx)) { - indices.push(idx); - } - }); - } - /** - * LokiJS has no concept of custom primary key, they use a number-id that is generated. - * To be able to query fast by primary key, we always add an index to the primary. - */ - var primaryKey = (0, _rxSchemaHelper.getPrimaryFieldOfPrimaryKey)(params.schema.primaryKey); - indices.push(primaryKey); - var lokiCollectionName = params.collectionName + '-' + params.schema.version; - var collectionOptions = Object.assign({}, lokiCollectionName, { - indices: indices, - unique: [primaryKey] - }, _lokijsHelper.LOKIJS_COLLECTION_DEFAULT_OPTIONS); - var collection = databaseState.database.addCollection(lokiCollectionName, collectionOptions); - databaseState.collections[params.collectionName] = collection; - var ret = { - databaseState, - collection - }; - return ret; -} -async function createLokiStorageInstance(storage, params, databaseSettings) { - var internals = {}; - var broadcastChannelRefObject = {}; - if (params.multiInstance) { - var leaderElector = (0, _lokijsHelper.getLokiLeaderElector)(params.databaseInstanceToken, broadcastChannelRefObject, params.databaseName); - internals.leaderElector = leaderElector; - } else { - // optimisation shortcut, directly create db is non multi instance. - internals.localState = createLokiLocalState(params, databaseSettings); - await internals.localState; - } - var instance = new RxStorageInstanceLoki(params.databaseInstanceToken, storage, params.databaseName, params.collectionName, params.schema, internals, params.options, databaseSettings); - await (0, _rxStorageMultiinstance.addRxStorageMultiInstanceSupport)(_lokijsHelper.RX_STORAGE_NAME_LOKIJS, params, instance, internals.leaderElector ? internals.leaderElector.broadcastChannel : undefined); - if (params.multiInstance) { - /** - * Clean up the broadcast-channel reference on close() - */ - var closeBefore = instance.close.bind(instance); - instance.close = function () { - (0, _rxStorageMultiinstance.removeBroadcastChannelReference)(params.databaseInstanceToken, broadcastChannelRefObject); - return closeBefore(); - }; - var removeBefore = instance.remove.bind(instance); - instance.remove = function () { - (0, _rxStorageMultiinstance.removeBroadcastChannelReference)(params.databaseInstanceToken, broadcastChannelRefObject); - return removeBefore(); - }; - - /** - * Directly create the localState when/if the db becomes leader. - */ - (0, _index.ensureNotFalsy)(internals.leaderElector).awaitLeadership().then(() => { - if (!instance.closed) { - (0, _lokijsHelper.mustUseLocalState)(instance); - } - }); - } - return instance; -} -//# sourceMappingURL=rx-storage-instance-loki.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/storage-lokijs/rx-storage-instance-loki.js.map b/dist/cjs/plugins/storage-lokijs/rx-storage-instance-loki.js.map deleted file mode 100644 index c9e6dc76c0f..00000000000 --- a/dist/cjs/plugins/storage-lokijs/rx-storage-instance-loki.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-storage-instance-loki.js","names":["_rxjs","require","_index","_rxError","_lokijsHelper","_rxSchemaHelper","_rxStorageHelper","_rxStorageMultiinstance","_rxQueryHelper","instanceId","now","shownNonPremiumLog","RxStorageInstanceLoki","exports","databaseInstanceToken","storage","databaseName","collectionName","schema","internals","options","databaseSettings","changes$","Subject","primaryPath","getPrimaryFieldOfPrimaryKey","primaryKey","OPEN_LOKIJS_STORAGE_INSTANCES","add","leaderElector","copiedSelf","bulkWrite","bind","changeStream","cleanup","close","query","count","findDocumentsById","conflictResultionTasks","getAttachmentData","remove","resolveConflictResultionTask","awaitLeadership","then","ensureNotFalsy","broadcastChannel","addEventListener","msg","handleRemoteRequest","catch","_proto","prototype","documentWrites","context","RXDB_UTILS_GLOBAL","premium","defaultHashSha256","PREMIUM_FLAG_HASH","console","warn","join","length","newRxError","args","localState","mustUseLocalState","requestRemoteInstance","ret","success","error","docsInDb","Map","docsInDbWithLokiKey","forEach","writeRow","id","document","documentInDb","collection","by","set","stripLokiKey","categorized","categorizeBulkWriteRows","errors","bulkInsertDocs","insert","flatClone","push","bulkUpdateDocs","docId","documentInDbWithLokiKey","getFromMapOrThrow","writeDoc","Object","assign","$loki","update","databaseState","saveQueue","addWrite","eventBulk","events","lastState","newestRow","checkpoint","lwt","_meta","endTime","next","ids","deleted","_deleted","preparedQueryOriginal","preparedQuery","selector","transformRegexToRegExp","skip","limit","Infinity","skipPlusLimit","mustRunMatcher","hasDeepProperty","lokiQuery","chain","find","sort","getLokiSortComparator","foundDocuments","data","map","lokiDoc","queryMatcher","getQueryMatcher","filter","d","slice","documents","result","mode","_documentId","_attachmentId","_digest","Error","asObservable","minimumDeletedTime","deleteAmountPerRun","maxDeletionTime","$lt","closed","complete","delete","dbState","getLokiDatabase","run","closeLokiCollections","database","removeCollection","name","_taskSolution","createLokiLocalState","params","indices","indexes","idx","isMaybeReadonlyArray","lokiCollectionName","version","collectionOptions","unique","LOKIJS_COLLECTION_DEFAULT_OPTIONS","addCollection","collections","createLokiStorageInstance","broadcastChannelRefObject","multiInstance","getLokiLeaderElector","instance","addRxStorageMultiInstanceSupport","RX_STORAGE_NAME_LOKIJS","undefined","closeBefore","removeBroadcastChannelReference","removeBefore"],"sources":["../../../../src/plugins/storage-lokijs/rx-storage-instance-loki.ts"],"sourcesContent":["import {\n Subject,\n Observable\n} from 'rxjs';\nimport {\n flatClone,\n now,\n ensureNotFalsy,\n isMaybeReadonlyArray,\n getFromMapOrThrow,\n hasDeepProperty,\n RXDB_UTILS_GLOBAL,\n defaultHashSha256,\n PREMIUM_FLAG_HASH\n} from '../utils/index.ts';\nimport { newRxError } from '../../rx-error.ts';\nimport type {\n RxStorageInstance,\n LokiSettings,\n RxStorageChangeEvent,\n RxDocumentData,\n BulkWriteRow,\n RxStorageBulkWriteResponse,\n RxStorageQueryResult,\n RxJsonSchema,\n MangoQuery,\n LokiStorageInternals,\n RxStorageInstanceCreationParams,\n LokiDatabaseSettings,\n LokiLocalDatabaseState,\n EventBulk,\n StringKeys,\n DeepReadonly,\n RxConflictResultionTask,\n RxConflictResultionTaskSolution,\n RxStorageDefaultCheckpoint,\n RxStorageCountResult,\n PreparedQuery\n} from '../../types/index.d.ts';\nimport {\n closeLokiCollections,\n getLokiDatabase,\n OPEN_LOKIJS_STORAGE_INSTANCES,\n LOKIJS_COLLECTION_DEFAULT_OPTIONS,\n stripLokiKey,\n getLokiSortComparator,\n getLokiLeaderElector,\n requestRemoteInstance,\n mustUseLocalState,\n handleRemoteRequest,\n RX_STORAGE_NAME_LOKIJS,\n transformRegexToRegExp\n} from './lokijs-helper.ts';\nimport type { RxStorageLoki } from './rx-storage-lokijs.ts';\nimport { getPrimaryFieldOfPrimaryKey } from '../../rx-schema-helper.ts';\nimport { categorizeBulkWriteRows } from '../../rx-storage-helper.ts';\nimport {\n addRxStorageMultiInstanceSupport,\n removeBroadcastChannelReference\n} from '../../rx-storage-multiinstance.ts';\nimport { getQueryMatcher } from '../../rx-query-helper.ts';\n\nlet instanceId = now();\nlet shownNonPremiumLog = false;\n\nexport class RxStorageInstanceLoki implements RxStorageInstance<\n RxDocType,\n LokiStorageInternals,\n LokiSettings,\n RxStorageDefaultCheckpoint\n> {\n\n public readonly primaryPath: StringKeys>;\n private changes$: Subject>, RxStorageDefaultCheckpoint>> = new Subject();\n public readonly instanceId = instanceId++;\n\n public closed?: Promise;\n\n constructor(\n public readonly databaseInstanceToken: string,\n public readonly storage: RxStorageLoki,\n public readonly databaseName: string,\n public readonly collectionName: string,\n public readonly schema: Readonly>>,\n public readonly internals: LokiStorageInternals,\n public readonly options: Readonly,\n public readonly databaseSettings: LokiDatabaseSettings\n ) {\n this.primaryPath = getPrimaryFieldOfPrimaryKey(this.schema.primaryKey);\n OPEN_LOKIJS_STORAGE_INSTANCES.add(this);\n if (this.internals.leaderElector) {\n\n\n /**\n * To run handleRemoteRequest(),\n * the instance will call its own methods.\n * But these methods could have already been swapped out by a RxStorageWrapper\n * so we must store the original methods here and use them instead.\n */\n const copiedSelf: RxStorageInstance = {\n bulkWrite: this.bulkWrite.bind(this),\n changeStream: this.changeStream.bind(this),\n cleanup: this.cleanup.bind(this),\n close: this.close.bind(this),\n query: this.query.bind(this),\n count: this.count.bind(this),\n findDocumentsById: this.findDocumentsById.bind(this),\n collectionName: this.collectionName,\n databaseName: this.databaseName,\n conflictResultionTasks: this.conflictResultionTasks.bind(this),\n getAttachmentData: this.getAttachmentData.bind(this),\n internals: this.internals,\n options: this.options,\n remove: this.remove.bind(this),\n resolveConflictResultionTask: this.resolveConflictResultionTask.bind(this),\n schema: this.schema\n };\n\n this.internals.leaderElector.awaitLeadership().then(() => {\n // this instance is leader now, so it has to reply to queries from other instances\n ensureNotFalsy(this.internals.leaderElector).broadcastChannel\n .addEventListener('message', (msg) => handleRemoteRequest(copiedSelf as any, msg));\n }).catch(() => { });\n }\n }\n\n async bulkWrite(\n documentWrites: BulkWriteRow[],\n context: string\n ): Promise> {\n\n\n if (\n !shownNonPremiumLog &&\n (\n !RXDB_UTILS_GLOBAL.premium ||\n typeof RXDB_UTILS_GLOBAL.premium !== 'string' ||\n (await defaultHashSha256(RXDB_UTILS_GLOBAL.premium) !== PREMIUM_FLAG_HASH)\n )\n ) {\n console.warn(\n [\n '-------------- RxDB Open Core RxStorage -------------------------------',\n 'You are using the free LokiJS based RxStorage implementation from RxDB https://rxdb.info/rx-storage-lokijs.html?console=loki ',\n 'While this is a great option, we want to let you know that there are faster storage solutions available in our premium plugins.',\n 'For professional users and production environments, we highly recommend considering these premium options to enhance performance and reliability.',\n ' https://rxdb.info/premium?console=loki ',\n 'If you already purchased premium access you can disable this log by calling the setPremiumFlag() function from rxdb-premium/plugins/shared.',\n '---------------------------------------------------------------------'\n ].join('\\n')\n );\n shownNonPremiumLog = true;\n } else {\n shownNonPremiumLog = true;\n }\n\n if (documentWrites.length === 0) {\n throw newRxError('P2', {\n args: {\n documentWrites\n }\n });\n }\n const localState = await mustUseLocalState(this);\n if (!localState) {\n return requestRemoteInstance(this, 'bulkWrite', [documentWrites]);\n }\n\n const ret: RxStorageBulkWriteResponse = {\n success: [],\n error: []\n };\n\n const docsInDb: Map[StringKeys], RxDocumentData> = new Map();\n const docsInDbWithLokiKey: Map<\n RxDocumentData[StringKeys],\n RxDocumentData & { $loki: number; }\n > = new Map();\n documentWrites.forEach(writeRow => {\n const id = writeRow.document[this.primaryPath];\n const documentInDb = localState.collection.by(this.primaryPath, id);\n if (documentInDb) {\n docsInDbWithLokiKey.set(id as any, documentInDb);\n docsInDb.set(id as any, stripLokiKey(documentInDb));\n }\n });\n\n const categorized = categorizeBulkWriteRows(\n this,\n this.primaryPath as any,\n docsInDb,\n documentWrites,\n context\n );\n ret.error = categorized.errors;\n\n categorized.bulkInsertDocs.forEach(writeRow => {\n localState.collection.insert(flatClone(writeRow.document));\n ret.success.push(writeRow.document);\n });\n categorized.bulkUpdateDocs.forEach(writeRow => {\n const docId = writeRow.document[this.primaryPath];\n const documentInDbWithLokiKey = getFromMapOrThrow(docsInDbWithLokiKey, docId as any);\n const writeDoc: any = Object.assign(\n {},\n writeRow.document,\n {\n $loki: documentInDbWithLokiKey.$loki\n }\n );\n localState.collection.update(writeDoc);\n ret.success.push(writeRow.document);\n });\n localState.databaseState.saveQueue.addWrite();\n\n if (categorized.eventBulk.events.length > 0) {\n const lastState = ensureNotFalsy(categorized.newestRow).document;\n categorized.eventBulk.checkpoint = {\n id: lastState[this.primaryPath],\n lwt: lastState._meta.lwt\n };\n categorized.eventBulk.endTime = now();\n this.changes$.next(categorized.eventBulk);\n }\n\n return ret;\n }\n async findDocumentsById(ids: string[], deleted: boolean): Promise[]> {\n const localState = await mustUseLocalState(this);\n if (!localState) {\n return requestRemoteInstance(this, 'findDocumentsById', [ids, deleted]);\n }\n\n const ret: RxDocumentData[] = [];\n ids.forEach(id => {\n const documentInDb = localState.collection.by(this.primaryPath, id);\n if (\n documentInDb &&\n (!documentInDb._deleted || deleted)\n ) {\n ret.push(stripLokiKey(documentInDb));\n }\n });\n return ret;\n }\n async query(preparedQueryOriginal: PreparedQuery): Promise> {\n const localState = await mustUseLocalState(this);\n if (!localState) {\n return requestRemoteInstance(this, 'query', [preparedQueryOriginal]);\n }\n\n let preparedQuery = ensureNotFalsy(preparedQueryOriginal.query);\n if (preparedQuery.selector) {\n preparedQuery = flatClone(preparedQuery);\n preparedQuery.selector = transformRegexToRegExp(preparedQuery.selector);\n }\n\n const query = preparedQueryOriginal.query;\n const skip = query.skip ? query.skip : 0;\n const limit = query.limit ? query.limit : Infinity;\n const skipPlusLimit = skip + limit;\n\n /**\n * LokiJS is not able to give correct results for some\n * operators, so we have to check all documents in that case\n * and laster apply skip and limit manually.\n * @link https://github.com/pubkey/rxdb/issues/5320\n */\n let mustRunMatcher = false;\n if (hasDeepProperty(preparedQuery.selector, '$in')) {\n mustRunMatcher = true;\n }\n\n\n let lokiQuery = localState.collection\n .chain()\n .find(mustRunMatcher ? {} : preparedQuery.selector);\n\n if (preparedQuery.sort) {\n lokiQuery = lokiQuery.sort(getLokiSortComparator(this.schema, preparedQuery));\n }\n\n\n let foundDocuments = lokiQuery.data().map((lokiDoc: any) => stripLokiKey(lokiDoc));\n\n\n /**\n * LokiJS returned wrong results on some queries\n * with complex indexes. Therefore we run the query-match\n * over all result docs to patch this bug.\n * TODO create an issue at the LokiJS repository.\n */\n const queryMatcher = getQueryMatcher(\n this.schema,\n preparedQuery as any\n );\n foundDocuments = foundDocuments.filter((d: any) => queryMatcher(d));\n\n // always apply offset and limit like this, because\n // sylvieQuery.offset() and sylvieQuery.limit() results were inconsistent\n foundDocuments = foundDocuments.slice(skip, skipPlusLimit);\n\n return {\n documents: foundDocuments\n };\n }\n async count(\n preparedQuery: PreparedQuery\n ): Promise {\n const result = await this.query(preparedQuery);\n return {\n count: result.documents.length,\n mode: 'fast'\n };\n }\n getAttachmentData(_documentId: string, _attachmentId: string, _digest: string): Promise {\n throw new Error('Attachments are not implemented in the lokijs RxStorage. Make a pull request.');\n }\n\n changeStream(): Observable>, RxStorageDefaultCheckpoint>> {\n return this.changes$.asObservable();\n }\n\n async cleanup(minimumDeletedTime: number): Promise {\n const localState = await mustUseLocalState(this);\n if (!localState) {\n return requestRemoteInstance(this, 'cleanup', [minimumDeletedTime]);\n }\n\n const deleteAmountPerRun = 10;\n const maxDeletionTime = now() - minimumDeletedTime;\n const query = localState.collection\n .chain()\n .find({\n _deleted: true,\n '_meta.lwt': {\n $lt: maxDeletionTime\n }\n }).limit(deleteAmountPerRun);\n const foundDocuments = query.data();\n if (foundDocuments.length > 0) {\n localState.collection.remove(foundDocuments);\n localState.databaseState.saveQueue.addWrite();\n }\n\n return foundDocuments.length !== deleteAmountPerRun;\n }\n\n async close(): Promise {\n if (this.closed) {\n return this.closed;\n }\n this.closed = (async () => {\n this.changes$.complete();\n OPEN_LOKIJS_STORAGE_INSTANCES.delete(this);\n if (this.internals.localState) {\n const localState = await this.internals.localState;\n const dbState = await getLokiDatabase(\n this.databaseName,\n this.databaseSettings\n );\n await dbState.saveQueue.run();\n await closeLokiCollections(\n this.databaseName,\n [\n localState.collection\n ]\n );\n }\n })();\n return this.closed;\n }\n async remove(): Promise {\n const localState = await mustUseLocalState(this);\n if (!localState) {\n return requestRemoteInstance(this, 'remove', []);\n }\n localState.databaseState.database.removeCollection(localState.collection.name);\n await localState.databaseState.saveQueue.run();\n return this.close();\n }\n\n conflictResultionTasks(): Observable> {\n return new Subject();\n }\n async resolveConflictResultionTask(_taskSolution: RxConflictResultionTaskSolution): Promise { }\n\n}\n\nexport async function createLokiLocalState(\n params: RxStorageInstanceCreationParams,\n databaseSettings: LokiDatabaseSettings\n): Promise {\n if (!params.options) {\n params.options = {};\n }\n\n const databaseState = await getLokiDatabase(\n params.databaseName,\n databaseSettings\n );\n\n /**\n * Construct loki indexes from RxJsonSchema indexes.\n * TODO what about compound indexes? Are they possible in lokijs?\n */\n const indices: string[] = [];\n if (params.schema.indexes) {\n params.schema.indexes.forEach(idx => {\n if (!isMaybeReadonlyArray(idx)) {\n indices.push(idx);\n }\n });\n }\n /**\n * LokiJS has no concept of custom primary key, they use a number-id that is generated.\n * To be able to query fast by primary key, we always add an index to the primary.\n */\n const primaryKey = getPrimaryFieldOfPrimaryKey(params.schema.primaryKey);\n indices.push(primaryKey as string);\n\n const lokiCollectionName = params.collectionName + '-' + params.schema.version;\n const collectionOptions: Partial = Object.assign(\n {},\n lokiCollectionName,\n {\n indices: indices as string[],\n unique: [primaryKey]\n } as any,\n LOKIJS_COLLECTION_DEFAULT_OPTIONS\n );\n\n const collection: any = databaseState.database.addCollection(\n lokiCollectionName,\n collectionOptions as any\n );\n databaseState.collections[params.collectionName] = collection;\n const ret: LokiLocalDatabaseState = {\n databaseState,\n collection\n };\n\n return ret;\n}\n\n\nexport async function createLokiStorageInstance(\n storage: RxStorageLoki,\n params: RxStorageInstanceCreationParams,\n databaseSettings: LokiDatabaseSettings\n): Promise> {\n const internals: LokiStorageInternals = {};\n\n const broadcastChannelRefObject: DeepReadonly = {};\n\n\n if (params.multiInstance) {\n const leaderElector = getLokiLeaderElector(\n params.databaseInstanceToken,\n broadcastChannelRefObject,\n params.databaseName\n );\n internals.leaderElector = leaderElector;\n } else {\n // optimisation shortcut, directly create db is non multi instance.\n internals.localState = createLokiLocalState(params, databaseSettings);\n await internals.localState;\n }\n\n const instance = new RxStorageInstanceLoki(\n params.databaseInstanceToken,\n storage,\n params.databaseName,\n params.collectionName,\n params.schema,\n internals,\n params.options,\n databaseSettings\n );\n\n await addRxStorageMultiInstanceSupport(\n RX_STORAGE_NAME_LOKIJS,\n params,\n instance,\n internals.leaderElector ? internals.leaderElector.broadcastChannel : undefined\n );\n\n if (params.multiInstance) {\n /**\n * Clean up the broadcast-channel reference on close()\n */\n const closeBefore = instance.close.bind(instance);\n instance.close = function () {\n removeBroadcastChannelReference(\n params.databaseInstanceToken,\n broadcastChannelRefObject\n );\n return closeBefore();\n };\n const removeBefore = instance.remove.bind(instance);\n instance.remove = function () {\n removeBroadcastChannelReference(\n params.databaseInstanceToken,\n broadcastChannelRefObject\n );\n return removeBefore();\n };\n\n /**\n * Directly create the localState when/if the db becomes leader.\n */\n ensureNotFalsy(internals.leaderElector)\n .awaitLeadership()\n .then(() => {\n if (!instance.closed) {\n mustUseLocalState(instance);\n }\n });\n }\n\n\n return instance;\n}\n"],"mappings":";;;;;;;;AAAA,IAAAA,KAAA,GAAAC,OAAA;AAIA,IAAAC,MAAA,GAAAD,OAAA;AAWA,IAAAE,QAAA,GAAAF,OAAA;AAwBA,IAAAG,aAAA,GAAAH,OAAA;AAeA,IAAAI,eAAA,GAAAJ,OAAA;AACA,IAAAK,gBAAA,GAAAL,OAAA;AACA,IAAAM,uBAAA,GAAAN,OAAA;AAIA,IAAAO,cAAA,GAAAP,OAAA;AAEA,IAAIQ,UAAU,GAAG,IAAAC,UAAG,EAAC,CAAC;AACtB,IAAIC,kBAAkB,GAAG,KAAK;AAAC,IAElBC,qBAAqB,GAAAC,OAAA,CAAAD,qBAAA;EAa9B,SAAAA,sBACoBE,qBAA6B,EAC7BC,OAAsB,EACtBC,YAAoB,EACpBC,cAAsB,EACtBC,MAAyD,EACzDC,SAA+B,EAC/BC,OAA+B,EAC/BC,gBAAsC,EACxD;IAAA,KAdMC,QAAQ,GAAoG,IAAIC,aAAO,CAAC,CAAC;IAAA,KACjHd,UAAU,GAAGA,UAAU,EAAE;IAAA,KAKrBK,qBAA6B,GAA7BA,qBAA6B;IAAA,KAC7BC,OAAsB,GAAtBA,OAAsB;IAAA,KACtBC,YAAoB,GAApBA,YAAoB;IAAA,KACpBC,cAAsB,GAAtBA,cAAsB;IAAA,KACtBC,MAAyD,GAAzDA,MAAyD;IAAA,KACzDC,SAA+B,GAA/BA,SAA+B;IAAA,KAC/BC,OAA+B,GAA/BA,OAA+B;IAAA,KAC/BC,gBAAsC,GAAtCA,gBAAsC;IAEtD,IAAI,CAACG,WAAW,GAAG,IAAAC,2CAA2B,EAAC,IAAI,CAACP,MAAM,CAACQ,UAAU,CAAC;IACtEC,2CAA6B,CAACC,GAAG,CAAC,IAAI,CAAC;IACvC,IAAI,IAAI,CAACT,SAAS,CAACU,aAAa,EAAE;MAG9B;AACZ;AACA;AACA;AACA;AACA;MACY,IAAMC,UAAkD,GAAG;QACvDC,SAAS,EAAE,IAAI,CAACA,SAAS,CAACC,IAAI,CAAC,IAAI,CAAC;QACpCC,YAAY,EAAE,IAAI,CAACA,YAAY,CAACD,IAAI,CAAC,IAAI,CAAC;QAC1CE,OAAO,EAAE,IAAI,CAACA,OAAO,CAACF,IAAI,CAAC,IAAI,CAAC;QAChCG,KAAK,EAAE,IAAI,CAACA,KAAK,CAACH,IAAI,CAAC,IAAI,CAAC;QAC5BI,KAAK,EAAE,IAAI,CAACA,KAAK,CAACJ,IAAI,CAAC,IAAI,CAAC;QAC5BK,KAAK,EAAE,IAAI,CAACA,KAAK,CAACL,IAAI,CAAC,IAAI,CAAC;QAC5BM,iBAAiB,EAAE,IAAI,CAACA,iBAAiB,CAACN,IAAI,CAAC,IAAI,CAAC;QACpDf,cAAc,EAAE,IAAI,CAACA,cAAc;QACnCD,YAAY,EAAE,IAAI,CAACA,YAAY;QAC/BuB,sBAAsB,EAAE,IAAI,CAACA,sBAAsB,CAACP,IAAI,CAAC,IAAI,CAAC;QAC9DQ,iBAAiB,EAAE,IAAI,CAACA,iBAAiB,CAACR,IAAI,CAAC,IAAI,CAAC;QACpDb,SAAS,EAAE,IAAI,CAACA,SAAS;QACzBC,OAAO,EAAE,IAAI,CAACA,OAAO;QACrBqB,MAAM,EAAE,IAAI,CAACA,MAAM,CAACT,IAAI,CAAC,IAAI,CAAC;QAC9BU,4BAA4B,EAAE,IAAI,CAACA,4BAA4B,CAACV,IAAI,CAAC,IAAI,CAAC;QAC1Ed,MAAM,EAAE,IAAI,CAACA;MACjB,CAAC;MAED,IAAI,CAACC,SAAS,CAACU,aAAa,CAACc,eAAe,CAAC,CAAC,CAACC,IAAI,CAAC,MAAM;QACtD;QACA,IAAAC,qBAAc,EAAC,IAAI,CAAC1B,SAAS,CAACU,aAAa,CAAC,CAACiB,gBAAgB,CACxDC,gBAAgB,CAAC,SAAS,EAAGC,GAAG,IAAK,IAAAC,iCAAmB,EAACnB,UAAU,EAASkB,GAAG,CAAC,CAAC;MAC1F,CAAC,CAAC,CAACE,KAAK,CAAC,MAAM,CAAE,CAAC,CAAC;IACvB;EACJ;EAAC,IAAAC,MAAA,GAAAvC,qBAAA,CAAAwC,SAAA;EAAAD,MAAA,CAEKpB,SAAS,GAAf,eAAAA,UACIsB,cAAyC,EACzCC,OAAe,EAC+B;IAG9C,IACI,CAAC3C,kBAAkB,KAEf,CAAC4C,wBAAiB,CAACC,OAAO,IAC1B,OAAOD,wBAAiB,CAACC,OAAO,KAAK,QAAQ,IAC5C,OAAM,IAAAC,wBAAiB,EAACF,wBAAiB,CAACC,OAAO,CAAC,MAAKE,wBAAkB,CAC7E,EACH;MACEC,OAAO,CAACC,IAAI,CACR,CACI,yEAAyE,EACzE,+HAA+H,EAC/H,iIAAiI,EACjI,mJAAmJ,EACnJ,0CAA0C,EAC1C,6IAA6I,EAC7I,uEAAuE,CAC1E,CAACC,IAAI,CAAC,IAAI,CACf,CAAC;MACDlD,kBAAkB,GAAG,IAAI;IAC7B,CAAC,MAAM;MACHA,kBAAkB,GAAG,IAAI;IAC7B;IAEA,IAAI0C,cAAc,CAACS,MAAM,KAAK,CAAC,EAAE;MAC7B,MAAM,IAAAC,mBAAU,EAAC,IAAI,EAAE;QACnBC,IAAI,EAAE;UACFX;QACJ;MACJ,CAAC,CAAC;IACN;IACA,IAAMY,UAAU,GAAG,MAAM,IAAAC,+BAAiB,EAAC,IAAI,CAAC;IAChD,IAAI,CAACD,UAAU,EAAE;MACb,OAAO,IAAAE,mCAAqB,EAAC,IAAI,EAAE,WAAW,EAAE,CAACd,cAAc,CAAC,CAAC;IACrE;IAEA,IAAMe,GAA0C,GAAG;MAC/CC,OAAO,EAAE,EAAE;MACXC,KAAK,EAAE;IACX,CAAC;IAED,IAAMC,QAA0F,GAAG,IAAIC,GAAG,CAAC,CAAC;IAC5G,IAAMC,mBAGL,GAAG,IAAID,GAAG,CAAC,CAAC;IACbnB,cAAc,CAACqB,OAAO,CAACC,QAAQ,IAAI;MAC/B,IAAMC,EAAE,GAAGD,QAAQ,CAACE,QAAQ,CAAC,IAAI,CAACrD,WAAW,CAAC;MAC9C,IAAMsD,YAAY,GAAGb,UAAU,CAACc,UAAU,CAACC,EAAE,CAAC,IAAI,CAACxD,WAAW,EAAEoD,EAAE,CAAC;MACnE,IAAIE,YAAY,EAAE;QACdL,mBAAmB,CAACQ,GAAG,CAACL,EAAE,EAASE,YAAY,CAAC;QAChDP,QAAQ,CAACU,GAAG,CAACL,EAAE,EAAS,IAAAM,0BAAY,EAACJ,YAAY,CAAC,CAAC;MACvD;IACJ,CAAC,CAAC;IAEF,IAAMK,WAAW,GAAG,IAAAC,wCAAuB,EACvC,IAAI,EACJ,IAAI,CAAC5D,WAAW,EAChB+C,QAAQ,EACRlB,cAAc,EACdC,OACJ,CAAC;IACDc,GAAG,CAACE,KAAK,GAAGa,WAAW,CAACE,MAAM;IAE9BF,WAAW,CAACG,cAAc,CAACZ,OAAO,CAACC,QAAQ,IAAI;MAC3CV,UAAU,CAACc,UAAU,CAACQ,MAAM,CAAC,IAAAC,gBAAS,EAACb,QAAQ,CAACE,QAAQ,CAAC,CAAC;MAC1DT,GAAG,CAACC,OAAO,CAACoB,IAAI,CAACd,QAAQ,CAACE,QAAQ,CAAC;IACvC,CAAC,CAAC;IACFM,WAAW,CAACO,cAAc,CAAChB,OAAO,CAACC,QAAQ,IAAI;MAC3C,IAAMgB,KAAK,GAAGhB,QAAQ,CAACE,QAAQ,CAAC,IAAI,CAACrD,WAAW,CAAC;MACjD,IAAMoE,uBAAuB,GAAG,IAAAC,wBAAiB,EAACpB,mBAAmB,EAAEkB,KAAY,CAAC;MACpF,IAAMG,QAAa,GAAGC,MAAM,CAACC,MAAM,CAC/B,CAAC,CAAC,EACFrB,QAAQ,CAACE,QAAQ,EACjB;QACIoB,KAAK,EAAEL,uBAAuB,CAACK;MACnC,CACJ,CAAC;MACDhC,UAAU,CAACc,UAAU,CAACmB,MAAM,CAACJ,QAAQ,CAAC;MACtC1B,GAAG,CAACC,OAAO,CAACoB,IAAI,CAACd,QAAQ,CAACE,QAAQ,CAAC;IACvC,CAAC,CAAC;IACFZ,UAAU,CAACkC,aAAa,CAACC,SAAS,CAACC,QAAQ,CAAC,CAAC;IAE7C,IAAIlB,WAAW,CAACmB,SAAS,CAACC,MAAM,CAACzC,MAAM,GAAG,CAAC,EAAE;MACzC,IAAM0C,SAAS,GAAG,IAAA3D,qBAAc,EAACsC,WAAW,CAACsB,SAAS,CAAC,CAAC5B,QAAQ;MAChEM,WAAW,CAACmB,SAAS,CAACI,UAAU,GAAG;QAC/B9B,EAAE,EAAE4B,SAAS,CAAC,IAAI,CAAChF,WAAW,CAAC;QAC/BmF,GAAG,EAAEH,SAAS,CAACI,KAAK,CAACD;MACzB,CAAC;MACDxB,WAAW,CAACmB,SAAS,CAACO,OAAO,GAAG,IAAAnG,UAAG,EAAC,CAAC;MACrC,IAAI,CAACY,QAAQ,CAACwF,IAAI,CAAC3B,WAAW,CAACmB,SAAS,CAAC;IAC7C;IAEA,OAAOlC,GAAG;EACd,CAAC;EAAAjB,MAAA,CACKb,iBAAiB,GAAvB,eAAAA,kBAAwByE,GAAa,EAAEC,OAAgB,EAAwC;IAC3F,IAAM/C,UAAU,GAAG,MAAM,IAAAC,+BAAiB,EAAC,IAAI,CAAC;IAChD,IAAI,CAACD,UAAU,EAAE;MACb,OAAO,IAAAE,mCAAqB,EAAC,IAAI,EAAE,mBAAmB,EAAE,CAAC4C,GAAG,EAAEC,OAAO,CAAC,CAAC;IAC3E;IAEA,IAAM5C,GAAgC,GAAG,EAAE;IAC3C2C,GAAG,CAACrC,OAAO,CAACE,EAAE,IAAI;MACd,IAAME,YAAY,GAAGb,UAAU,CAACc,UAAU,CAACC,EAAE,CAAC,IAAI,CAACxD,WAAW,EAAEoD,EAAE,CAAC;MACnE,IACIE,YAAY,KACX,CAACA,YAAY,CAACmC,QAAQ,IAAID,OAAO,CAAC,EACrC;QACE5C,GAAG,CAACqB,IAAI,CAAC,IAAAP,0BAAY,EAACJ,YAAY,CAAC,CAAC;MACxC;IACJ,CAAC,CAAC;IACF,OAAOV,GAAG;EACd,CAAC;EAAAjB,MAAA,CACKf,KAAK,GAAX,eAAAA,MAAY8E,qBAA+C,EAA4C;IACnG,IAAMjD,UAAU,GAAG,MAAM,IAAAC,+BAAiB,EAAC,IAAI,CAAC;IAChD,IAAI,CAACD,UAAU,EAAE;MACb,OAAO,IAAAE,mCAAqB,EAAC,IAAI,EAAE,OAAO,EAAE,CAAC+C,qBAAqB,CAAC,CAAC;IACxE;IAEA,IAAIC,aAAa,GAAG,IAAAtE,qBAAc,EAACqE,qBAAqB,CAAC9E,KAAK,CAAC;IAC/D,IAAI+E,aAAa,CAACC,QAAQ,EAAE;MACxBD,aAAa,GAAG,IAAA3B,gBAAS,EAAC2B,aAAa,CAAC;MACxCA,aAAa,CAACC,QAAQ,GAAG,IAAAC,oCAAsB,EAACF,aAAa,CAACC,QAAQ,CAAC;IAC3E;IAEA,IAAMhF,KAAK,GAAG8E,qBAAqB,CAAC9E,KAAK;IACzC,IAAMkF,IAAI,GAAGlF,KAAK,CAACkF,IAAI,GAAGlF,KAAK,CAACkF,IAAI,GAAG,CAAC;IACxC,IAAMC,KAAK,GAAGnF,KAAK,CAACmF,KAAK,GAAGnF,KAAK,CAACmF,KAAK,GAAGC,QAAQ;IAClD,IAAMC,aAAa,GAAGH,IAAI,GAAGC,KAAK;;IAElC;AACR;AACA;AACA;AACA;AACA;IACQ,IAAIG,cAAc,GAAG,KAAK;IAC1B,IAAI,IAAAC,sBAAe,EAACR,aAAa,CAACC,QAAQ,EAAE,KAAK,CAAC,EAAE;MAChDM,cAAc,GAAG,IAAI;IACzB;IAGA,IAAIE,SAAS,GAAG3D,UAAU,CAACc,UAAU,CAChC8C,KAAK,CAAC,CAAC,CACPC,IAAI,CAACJ,cAAc,GAAG,CAAC,CAAC,GAAGP,aAAa,CAACC,QAAQ,CAAC;IAEvD,IAAID,aAAa,CAACY,IAAI,EAAE;MACpBH,SAAS,GAAGA,SAAS,CAACG,IAAI,CAAC,IAAAC,mCAAqB,EAAC,IAAI,CAAC9G,MAAM,EAAEiG,aAAa,CAAC,CAAC;IACjF;IAGA,IAAIc,cAAc,GAAGL,SAAS,CAACM,IAAI,CAAC,CAAC,CAACC,GAAG,CAAEC,OAAY,IAAK,IAAAlD,0BAAY,EAACkD,OAAO,CAAC,CAAC;;IAGlF;AACR;AACA;AACA;AACA;AACA;IACQ,IAAMC,YAAY,GAAG,IAAAC,8BAAe,EAChC,IAAI,CAACpH,MAAM,EACXiG,aACJ,CAAC;IACDc,cAAc,GAAGA,cAAc,CAACM,MAAM,CAAEC,CAAM,IAAKH,YAAY,CAACG,CAAC,CAAC,CAAC;;IAEnE;IACA;IACAP,cAAc,GAAGA,cAAc,CAACQ,KAAK,CAACnB,IAAI,EAAEG,aAAa,CAAC;IAE1D,OAAO;MACHiB,SAAS,EAAET;IACf,CAAC;EACL,CAAC;EAAA9E,MAAA,CACKd,KAAK,GAAX,eAAAA,MACI8E,aAAuC,EACV;IAC7B,IAAMwB,MAAM,GAAG,MAAM,IAAI,CAACvG,KAAK,CAAC+E,aAAa,CAAC;IAC9C,OAAO;MACH9E,KAAK,EAAEsG,MAAM,CAACD,SAAS,CAAC5E,MAAM;MAC9B8E,IAAI,EAAE;IACV,CAAC;EACL,CAAC;EAAAzF,MAAA,CACDX,iBAAiB,GAAjB,SAAAA,kBAAkBqG,WAAmB,EAAEC,aAAqB,EAAEC,OAAe,EAAmB;IAC5F,MAAM,IAAIC,KAAK,CAAC,+EAA+E,CAAC;EACpG,CAAC;EAAA7F,MAAA,CAEDlB,YAAY,GAAZ,SAAAA,aAAA,EAAmH;IAC/G,OAAO,IAAI,CAACX,QAAQ,CAAC2H,YAAY,CAAC,CAAC;EACvC,CAAC;EAAA9F,MAAA,CAEKjB,OAAO,GAAb,eAAAA,QAAcgH,kBAA0B,EAAoB;IACxD,IAAMjF,UAAU,GAAG,MAAM,IAAAC,+BAAiB,EAAC,IAAI,CAAC;IAChD,IAAI,CAACD,UAAU,EAAE;MACb,OAAO,IAAAE,mCAAqB,EAAC,IAAI,EAAE,SAAS,EAAE,CAAC+E,kBAAkB,CAAC,CAAC;IACvE;IAEA,IAAMC,kBAAkB,GAAG,EAAE;IAC7B,IAAMC,eAAe,GAAG,IAAA1I,UAAG,EAAC,CAAC,GAAGwI,kBAAkB;IAClD,IAAM9G,KAAK,GAAG6B,UAAU,CAACc,UAAU,CAC9B8C,KAAK,CAAC,CAAC,CACPC,IAAI,CAAC;MACFb,QAAQ,EAAE,IAAI;MACd,WAAW,EAAE;QACToC,GAAG,EAAED;MACT;IACJ,CAAC,CAAC,CAAC7B,KAAK,CAAC4B,kBAAkB,CAAC;IAChC,IAAMlB,cAAc,GAAG7F,KAAK,CAAC8F,IAAI,CAAC,CAAC;IACnC,IAAID,cAAc,CAACnE,MAAM,GAAG,CAAC,EAAE;MAC3BG,UAAU,CAACc,UAAU,CAACtC,MAAM,CAACwF,cAAc,CAAC;MAC5ChE,UAAU,CAACkC,aAAa,CAACC,SAAS,CAACC,QAAQ,CAAC,CAAC;IACjD;IAEA,OAAO4B,cAAc,CAACnE,MAAM,KAAKqF,kBAAkB;EACvD,CAAC;EAAAhG,MAAA,CAEKhB,KAAK,GAAX,eAAAA,MAAA,EAA6B;IACzB,IAAI,IAAI,CAACmH,MAAM,EAAE;MACb,OAAO,IAAI,CAACA,MAAM;IACtB;IACA,IAAI,CAACA,MAAM,GAAG,CAAC,YAAY;MACvB,IAAI,CAAChI,QAAQ,CAACiI,QAAQ,CAAC,CAAC;MACxB5H,2CAA6B,CAAC6H,MAAM,CAAC,IAAI,CAAC;MAC1C,IAAI,IAAI,CAACrI,SAAS,CAAC8C,UAAU,EAAE;QAC3B,IAAMA,UAAU,GAAG,MAAM,IAAI,CAAC9C,SAAS,CAAC8C,UAAU;QAClD,IAAMwF,OAAO,GAAG,MAAM,IAAAC,6BAAe,EACjC,IAAI,CAAC1I,YAAY,EACjB,IAAI,CAACK,gBACT,CAAC;QACD,MAAMoI,OAAO,CAACrD,SAAS,CAACuD,GAAG,CAAC,CAAC;QAC7B,MAAM,IAAAC,kCAAoB,EACtB,IAAI,CAAC5I,YAAY,EACjB,CACIiD,UAAU,CAACc,UAAU,CAE7B,CAAC;MACL;IACJ,CAAC,EAAE,CAAC;IACJ,OAAO,IAAI,CAACuE,MAAM;EACtB,CAAC;EAAAnG,MAAA,CACKV,MAAM,GAAZ,eAAAA,OAAA,EAA8B;IAC1B,IAAMwB,UAAU,GAAG,MAAM,IAAAC,+BAAiB,EAAC,IAAI,CAAC;IAChD,IAAI,CAACD,UAAU,EAAE;MACb,OAAO,IAAAE,mCAAqB,EAAC,IAAI,EAAE,QAAQ,EAAE,EAAE,CAAC;IACpD;IACAF,UAAU,CAACkC,aAAa,CAAC0D,QAAQ,CAACC,gBAAgB,CAAC7F,UAAU,CAACc,UAAU,CAACgF,IAAI,CAAC;IAC9E,MAAM9F,UAAU,CAACkC,aAAa,CAACC,SAAS,CAACuD,GAAG,CAAC,CAAC;IAC9C,OAAO,IAAI,CAACxH,KAAK,CAAC,CAAC;EACvB,CAAC;EAAAgB,MAAA,CAEDZ,sBAAsB,GAAtB,SAAAA,uBAAA,EAAyE;IACrE,OAAO,IAAIhB,aAAO,CAAC,CAAC;EACxB,CAAC;EAAA4B,MAAA,CACKT,4BAA4B,GAAlC,eAAAA,6BAAmCsH,aAAyD,EAAiB,CAAE,CAAC;EAAA,OAAApJ,qBAAA;AAAA;AAI7G,eAAeqJ,oBAAoBA,CACtCC,MAAgE,EAChE7I,gBAAsC,EACP;EAC/B,IAAI,CAAC6I,MAAM,CAAC9I,OAAO,EAAE;IACjB8I,MAAM,CAAC9I,OAAO,GAAG,CAAC,CAAC;EACvB;EAEA,IAAM+E,aAAa,GAAG,MAAM,IAAAuD,6BAAe,EACvCQ,MAAM,CAAClJ,YAAY,EACnBK,gBACJ,CAAC;;EAED;AACJ;AACA;AACA;EACI,IAAM8I,OAAiB,GAAG,EAAE;EAC5B,IAAID,MAAM,CAAChJ,MAAM,CAACkJ,OAAO,EAAE;IACvBF,MAAM,CAAChJ,MAAM,CAACkJ,OAAO,CAAC1F,OAAO,CAAC2F,GAAG,IAAI;MACjC,IAAI,CAAC,IAAAC,2BAAoB,EAACD,GAAG,CAAC,EAAE;QAC5BF,OAAO,CAAC1E,IAAI,CAAC4E,GAAG,CAAC;MACrB;IACJ,CAAC,CAAC;EACN;EACA;AACJ;AACA;AACA;EACI,IAAM3I,UAAU,GAAG,IAAAD,2CAA2B,EAACyI,MAAM,CAAChJ,MAAM,CAACQ,UAAU,CAAC;EACxEyI,OAAO,CAAC1E,IAAI,CAAC/D,UAAoB,CAAC;EAElC,IAAM6I,kBAAkB,GAAGL,MAAM,CAACjJ,cAAc,GAAG,GAAG,GAAGiJ,MAAM,CAAChJ,MAAM,CAACsJ,OAAO;EAC9E,IAAMC,iBAA+B,GAAG1E,MAAM,CAACC,MAAM,CACjD,CAAC,CAAC,EACFuE,kBAAkB,EAClB;IACIJ,OAAO,EAAEA,OAAmB;IAC5BO,MAAM,EAAE,CAAChJ,UAAU;EACvB,CAAC,EACDiJ,+CACJ,CAAC;EAED,IAAM5F,UAAe,GAAGoB,aAAa,CAAC0D,QAAQ,CAACe,aAAa,CACxDL,kBAAkB,EAClBE,iBACJ,CAAC;EACDtE,aAAa,CAAC0E,WAAW,CAACX,MAAM,CAACjJ,cAAc,CAAC,GAAG8D,UAAU;EAC7D,IAAMX,GAA2B,GAAG;IAChC+B,aAAa;IACbpB;EACJ,CAAC;EAED,OAAOX,GAAG;AACd;AAGO,eAAe0G,yBAAyBA,CAC3C/J,OAAsB,EACtBmJ,MAAgE,EAChE7I,gBAAsC,EACG;EACzC,IAAMF,SAA+B,GAAG,CAAC,CAAC;EAE1C,IAAM4J,yBAA4C,GAAG,CAAC,CAAC;EAGvD,IAAIb,MAAM,CAACc,aAAa,EAAE;IACtB,IAAMnJ,aAAa,GAAG,IAAAoJ,kCAAoB,EACtCf,MAAM,CAACpJ,qBAAqB,EAC5BiK,yBAAyB,EACzBb,MAAM,CAAClJ,YACX,CAAC;IACDG,SAAS,CAACU,aAAa,GAAGA,aAAa;EAC3C,CAAC,MAAM;IACH;IACAV,SAAS,CAAC8C,UAAU,GAAGgG,oBAAoB,CAACC,MAAM,EAAE7I,gBAAgB,CAAC;IACrE,MAAMF,SAAS,CAAC8C,UAAU;EAC9B;EAEA,IAAMiH,QAAQ,GAAG,IAAItK,qBAAqB,CACtCsJ,MAAM,CAACpJ,qBAAqB,EAC5BC,OAAO,EACPmJ,MAAM,CAAClJ,YAAY,EACnBkJ,MAAM,CAACjJ,cAAc,EACrBiJ,MAAM,CAAChJ,MAAM,EACbC,SAAS,EACT+I,MAAM,CAAC9I,OAAO,EACdC,gBACJ,CAAC;EAED,MAAM,IAAA8J,wDAAgC,EAClCC,oCAAsB,EACtBlB,MAAM,EACNgB,QAAQ,EACR/J,SAAS,CAACU,aAAa,GAAGV,SAAS,CAACU,aAAa,CAACiB,gBAAgB,GAAGuI,SACzE,CAAC;EAED,IAAInB,MAAM,CAACc,aAAa,EAAE;IACtB;AACR;AACA;IACQ,IAAMM,WAAW,GAAGJ,QAAQ,CAAC/I,KAAK,CAACH,IAAI,CAACkJ,QAAQ,CAAC;IACjDA,QAAQ,CAAC/I,KAAK,GAAG,YAAY;MACzB,IAAAoJ,uDAA+B,EAC3BrB,MAAM,CAACpJ,qBAAqB,EAC5BiK,yBACJ,CAAC;MACD,OAAOO,WAAW,CAAC,CAAC;IACxB,CAAC;IACD,IAAME,YAAY,GAAGN,QAAQ,CAACzI,MAAM,CAACT,IAAI,CAACkJ,QAAQ,CAAC;IACnDA,QAAQ,CAACzI,MAAM,GAAG,YAAY;MAC1B,IAAA8I,uDAA+B,EAC3BrB,MAAM,CAACpJ,qBAAqB,EAC5BiK,yBACJ,CAAC;MACD,OAAOS,YAAY,CAAC,CAAC;IACzB,CAAC;;IAED;AACR;AACA;IACQ,IAAA3I,qBAAc,EAAC1B,SAAS,CAACU,aAAa,CAAC,CAClCc,eAAe,CAAC,CAAC,CACjBC,IAAI,CAAC,MAAM;MACR,IAAI,CAACsI,QAAQ,CAAC5B,MAAM,EAAE;QAClB,IAAApF,+BAAiB,EAACgH,QAAQ,CAAC;MAC/B;IACJ,CAAC,CAAC;EACV;EAGA,OAAOA,QAAQ;AACnB","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/storage-lokijs/rx-storage-lokijs.js b/dist/cjs/plugins/storage-lokijs/rx-storage-lokijs.js deleted file mode 100644 index 1479ed50551..00000000000 --- a/dist/cjs/plugins/storage-lokijs/rx-storage-lokijs.js +++ /dev/null @@ -1,40 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.RxStorageLoki = void 0; -exports.getRxStorageLoki = getRxStorageLoki; -var _rxStorageInstanceLoki = require("./rx-storage-instance-loki.js"); -var _lokijsHelper = require("./lokijs-helper.js"); -var _rxStorageHelper = require("../../rx-storage-helper.js"); -var _utilsRxdbVersion = require("../utils/utils-rxdb-version.js"); -var RxStorageLoki = exports.RxStorageLoki = /*#__PURE__*/function () { - /** - * Create one leader elector by db name. - * This is done inside of the storage, not globally - * to make it easier to test multi-tab behavior. - */ - - function RxStorageLoki(databaseSettings) { - this.name = _lokijsHelper.RX_STORAGE_NAME_LOKIJS; - this.rxdbVersion = _utilsRxdbVersion.RXDB_VERSION; - this.leaderElectorByLokiDbName = new Map(); - this.databaseSettings = databaseSettings; - } - var _proto = RxStorageLoki.prototype; - _proto.createStorageInstance = function createStorageInstance(params) { - (0, _rxStorageHelper.ensureRxStorageInstanceParamsAreCorrect)(params); - return (0, _rxStorageInstanceLoki.createLokiStorageInstance)(this, params, this.databaseSettings); - }; - return RxStorageLoki; -}(); -/** - * @deprecated The lokijs RxStorage is deprecated, more info at: - * @link https://rxdb.info/rx-storage-lokijs.html - */ -function getRxStorageLoki(databaseSettings = {}) { - var storage = new RxStorageLoki(databaseSettings); - return storage; -} -//# sourceMappingURL=rx-storage-lokijs.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/storage-lokijs/rx-storage-lokijs.js.map b/dist/cjs/plugins/storage-lokijs/rx-storage-lokijs.js.map deleted file mode 100644 index 17b46bedc91..00000000000 --- a/dist/cjs/plugins/storage-lokijs/rx-storage-lokijs.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-storage-lokijs.js","names":["_rxStorageInstanceLoki","require","_lokijsHelper","_rxStorageHelper","_utilsRxdbVersion","RxStorageLoki","exports","databaseSettings","name","RX_STORAGE_NAME_LOKIJS","rxdbVersion","RXDB_VERSION","leaderElectorByLokiDbName","Map","_proto","prototype","createStorageInstance","params","ensureRxStorageInstanceParamsAreCorrect","createLokiStorageInstance","getRxStorageLoki","storage"],"sources":["../../../../src/plugins/storage-lokijs/rx-storage-lokijs.ts"],"sourcesContent":["import type {\n LokiDatabaseSettings,\n LokiSettings,\n LokiStorageInternals,\n RxStorage,\n RxStorageInstanceCreationParams\n} from '../../types/index.d.ts';\nimport {\n createLokiStorageInstance,\n RxStorageInstanceLoki\n} from './rx-storage-instance-loki.ts';\nimport { RX_STORAGE_NAME_LOKIJS } from './lokijs-helper.ts';\nimport type { LeaderElector } from 'broadcast-channel';\n\nimport { ensureRxStorageInstanceParamsAreCorrect } from '../../rx-storage-helper.ts';\nimport { RXDB_VERSION } from '../utils/utils-rxdb-version.ts';\n\nexport class RxStorageLoki implements RxStorage {\n public name = RX_STORAGE_NAME_LOKIJS;\n public readonly rxdbVersion = RXDB_VERSION;\n\n /**\n * Create one leader elector by db name.\n * This is done inside of the storage, not globally\n * to make it easier to test multi-tab behavior.\n */\n public leaderElectorByLokiDbName: Map = new Map();\n\n constructor(\n public databaseSettings: LokiDatabaseSettings\n ) { }\n\n public createStorageInstance(\n params: RxStorageInstanceCreationParams\n ): Promise> {\n ensureRxStorageInstanceParamsAreCorrect(params);\n return createLokiStorageInstance(this, params, this.databaseSettings);\n }\n}\n\n/**\n * @deprecated The lokijs RxStorage is deprecated, more info at:\n * @link https://rxdb.info/rx-storage-lokijs.html\n */\nexport function getRxStorageLoki(\n databaseSettings: LokiDatabaseSettings = {}\n): RxStorageLoki {\n const storage = new RxStorageLoki(databaseSettings);\n return storage;\n}\n"],"mappings":";;;;;;;AAOA,IAAAA,sBAAA,GAAAC,OAAA;AAIA,IAAAC,aAAA,GAAAD,OAAA;AAGA,IAAAE,gBAAA,GAAAF,OAAA;AACA,IAAAG,iBAAA,GAAAH,OAAA;AAA8D,IAEjDI,aAAa,GAAAC,OAAA,CAAAD,aAAA;EAItB;AACJ;AACA;AACA;AACA;;EAUI,SAAAA,cACWE,gBAAsC,EAC/C;IAAA,KAnBKC,IAAI,GAAGC,oCAAsB;IAAA,KACpBC,WAAW,GAAGC,8BAAY;IAAA,KAOnCC,yBAAyB,GAO3B,IAAIC,GAAG,CAAC,CAAC;IAAA,KAGHN,gBAAsC,GAAtCA,gBAAsC;EAC7C;EAAC,IAAAO,MAAA,GAAAT,aAAA,CAAAU,SAAA;EAAAD,MAAA,CAEEE,qBAAqB,GAA5B,SAAAA,sBACIC,MAAgE,EACvB;IACzC,IAAAC,wDAAuC,EAACD,MAAM,CAAC;IAC/C,OAAO,IAAAE,gDAAyB,EAAC,IAAI,EAAEF,MAAM,EAAE,IAAI,CAACV,gBAAgB,CAAC;EACzE,CAAC;EAAA,OAAAF,aAAA;AAAA;AAGL;AACA;AACA;AACA;AACO,SAASe,gBAAgBA,CAC5Bb,gBAAsC,GAAG,CAAC,CAAC,EAC9B;EACb,IAAMc,OAAO,GAAG,IAAIhB,aAAa,CAACE,gBAAgB,CAAC;EACnD,OAAOc,OAAO;AAClB","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/storage-memory/binary-search-bounds.js b/dist/cjs/plugins/storage-memory/binary-search-bounds.js deleted file mode 100644 index 5eccf4b6876..00000000000 --- a/dist/cjs/plugins/storage-memory/binary-search-bounds.js +++ /dev/null @@ -1,116 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.boundEQ = boundEQ; -exports.boundGE = boundGE; -exports.boundGT = boundGT; -exports.boundLE = boundLE; -exports.boundLT = boundLT; -/** - * Everything in this file was copied and adapted from - * @link https://github.com/mikolalysenko/binary-search-bounds - * - * TODO We should use the original npm module instead when this bug is fixed: - * @link https://github.com/mikolalysenko/binary-search-bounds/pull/14 - */ - -function ge(a, y, c, l, h) { - var i = h + 1; - while (l <= h) { - var m = l + h >>> 1; - var x = a[m]; - var p = c !== undefined ? c(x, y) : x - y; - if (p >= 0) { - i = m; - h = m - 1; - } else { - l = m + 1; - } - } - return i; -} -function gt(a, y, c, l, h) { - var i = h + 1; - while (l <= h) { - var m = l + h >>> 1; - var x = a[m]; - var p = c !== undefined ? c(x, y) : x - y; - if (p > 0) { - i = m; - h = m - 1; - } else { - l = m + 1; - } - } - return i; -} -function lt(a, y, c, l, h) { - var i = l - 1; - while (l <= h) { - var m = l + h >>> 1, - x = a[m]; - var p = c !== undefined ? c(x, y) : x - y; - if (p < 0) { - i = m; - l = m + 1; - } else { - h = m - 1; - } - } - return i; -} -function le(a, y, c, l, h) { - var i = l - 1; - while (l <= h) { - var m = l + h >>> 1, - x = a[m]; - var p = c !== undefined ? c(x, y) : x - y; - if (p <= 0) { - i = m; - l = m + 1; - } else { - h = m - 1; - } - } - return i; -} -function eq(a, y, c, l, h) { - while (l <= h) { - var m = l + h >>> 1, - x = a[m]; - var p = c !== undefined ? c(x, y) : x - y; - if (p === 0) { - return m; - } - if (p <= 0) { - l = m + 1; - } else { - h = m - 1; - } - } - return -1; -} -function norm(a, y, c, l, h, f) { - if (typeof c === 'function') { - return f(a, y, c, l === undefined ? 0 : l | 0, h === undefined ? a.length - 1 : h | 0); - } - return f(a, y, undefined, c === undefined ? 0 : c | 0, l === undefined ? a.length - 1 : l | 0); -} -function boundGE(a, y, c, l, h) { - return norm(a, y, c, l, h, ge); -} -function boundGT(a, y, c, l, h) { - return norm(a, y, c, l, h, gt); -} -function boundLT(a, y, c, l, h) { - return norm(a, y, c, l, h, lt); -} -function boundLE(a, y, c, l, h) { - return norm(a, y, c, l, h, le); -} -function boundEQ(a, y, c, l, h) { - return norm(a, y, c, l, h, eq); -} -//# sourceMappingURL=binary-search-bounds.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/storage-memory/binary-search-bounds.js.map b/dist/cjs/plugins/storage-memory/binary-search-bounds.js.map deleted file mode 100644 index 6935911e6b2..00000000000 --- a/dist/cjs/plugins/storage-memory/binary-search-bounds.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"binary-search-bounds.js","names":["ge","a","y","c","l","h","i","m","x","p","undefined","gt","lt","le","eq","norm","f","length","boundGE","boundGT","boundLT","boundLE","boundEQ"],"sources":["../../../../src/plugins/storage-memory/binary-search-bounds.ts"],"sourcesContent":["/**\n * Everything in this file was copied and adapted from\n * @link https://github.com/mikolalysenko/binary-search-bounds\n *\n * TODO We should use the original npm module instead when this bug is fixed:\n * @link https://github.com/mikolalysenko/binary-search-bounds/pull/14\n */\n\n\n\ntype Compare = ((a: T, b: T) => number | null | undefined);\n\nfunction ge(a: T[], y: T, c: Compare, l?: any, h?: any): number {\n let i: number = h + 1;\n while (l <= h) {\n const m = (l + h) >>> 1;\n const x: any = a[m];\n const p: any = (c !== undefined) ? c(x, y) : (x - (y as any));\n if (p >= 0) {\n i = m; h = m - 1;\n } else {\n l = m + 1;\n }\n }\n return i;\n}\n\nfunction gt(a: T[], y: T, c: Compare, l?: any, h?: any): number {\n let i = h + 1;\n while (l <= h) {\n const m = (l + h) >>> 1;\n const x = a[m];\n const p: any = (c !== undefined) ? c(x, y) : ((x as any) - (y as any));\n if (p > 0) {\n i = m; h = m - 1;\n } else {\n l = m + 1;\n }\n }\n return i;\n}\n\nfunction lt(a: T[], y: T, c: Compare, l?: any, h?: any): number {\n let i = l - 1;\n while (l <= h) {\n const m = (l + h) >>> 1, x = a[m];\n const p: any = (c !== undefined) ? c(x, y) : ((x as any) - (y as any));\n if (p < 0) {\n i = m; l = m + 1;\n } else {\n h = m - 1;\n }\n }\n return i;\n}\n\nfunction le(a: T[], y: T, c: Compare, l?: any, h?: any): number {\n let i = l - 1;\n while (l <= h) {\n const m = (l + h) >>> 1, x = a[m];\n const p: any = (c !== undefined) ? c(x, y) : ((x as any) - (y as any));\n if (p <= 0) {\n i = m; l = m + 1;\n } else {\n h = m - 1;\n }\n }\n return i;\n}\n\nfunction eq(a: T[], y: T, c: Compare, l?: any, h?: any): number {\n while (l <= h) {\n const m = (l + h) >>> 1, x = a[m];\n const p: any = (c !== undefined) ? c(x, y) : ((x as any) - (y as any));\n if (p === 0) {\n return m;\n }\n if (p <= 0) {\n l = m + 1;\n } else {\n h = m - 1;\n }\n }\n return -1;\n}\n\nfunction norm(a: T[], y: T, c: Compare, l: any, h: any, f: any) {\n if (typeof c === 'function') {\n return f(a, y, c, (l === undefined) ? 0 : l | 0, (h === undefined) ? a.length - 1 : h | 0);\n }\n return f(a, y, undefined, (c === undefined) ? 0 : c | 0, (l === undefined) ? a.length - 1 : l | 0);\n}\n\n\nexport function boundGE(a: T[], y: T, c: Compare, l?: any, h?: any) {\n return norm(a, y, c, l, h, ge);\n}\nexport function boundGT(a: T[], y: T, c: Compare, l?: any, h?: any) {\n return norm(a, y, c, l, h, gt);\n}\nexport function boundLT(a: T[], y: T, c: Compare, l?: any, h?: any) {\n return norm(a, y, c, l, h, lt);\n}\nexport function boundLE(a: T[], y: T, c: Compare, l?: any, h?: any) {\n return norm(a, y, c, l, h, le);\n}\nexport function boundEQ(a: T[], y: T, c: Compare, l?: any, h?: any) {\n return norm(a, y, c, l, h, eq);\n}\n"],"mappings":";;;;;;;;;;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;;AAMA,SAASA,EAAEA,CAAIC,CAAM,EAAEC,CAAI,EAAEC,CAAa,EAAEC,CAAO,EAAEC,CAAO,EAAU;EAClE,IAAIC,CAAS,GAAGD,CAAC,GAAG,CAAC;EACrB,OAAOD,CAAC,IAAIC,CAAC,EAAE;IACX,IAAME,CAAC,GAAIH,CAAC,GAAGC,CAAC,KAAM,CAAC;IACvB,IAAMG,CAAM,GAAGP,CAAC,CAACM,CAAC,CAAC;IACnB,IAAME,CAAM,GAAIN,CAAC,KAAKO,SAAS,GAAIP,CAAC,CAACK,CAAC,EAAEN,CAAC,CAAC,GAAIM,CAAC,GAAIN,CAAU;IAC7D,IAAIO,CAAC,IAAI,CAAC,EAAE;MACRH,CAAC,GAAGC,CAAC;MAAEF,CAAC,GAAGE,CAAC,GAAG,CAAC;IACpB,CAAC,MAAM;MACHH,CAAC,GAAGG,CAAC,GAAG,CAAC;IACb;EACJ;EACA,OAAOD,CAAC;AACZ;AAEA,SAASK,EAAEA,CAAIV,CAAM,EAAEC,CAAI,EAAEC,CAAa,EAAEC,CAAO,EAAEC,CAAO,EAAU;EAClE,IAAIC,CAAC,GAAGD,CAAC,GAAG,CAAC;EACb,OAAOD,CAAC,IAAIC,CAAC,EAAE;IACX,IAAME,CAAC,GAAIH,CAAC,GAAGC,CAAC,KAAM,CAAC;IACvB,IAAMG,CAAC,GAAGP,CAAC,CAACM,CAAC,CAAC;IACd,IAAME,CAAM,GAAIN,CAAC,KAAKO,SAAS,GAAIP,CAAC,CAACK,CAAC,EAAEN,CAAC,CAAC,GAAKM,CAAC,GAAYN,CAAU;IACtE,IAAIO,CAAC,GAAG,CAAC,EAAE;MACPH,CAAC,GAAGC,CAAC;MAAEF,CAAC,GAAGE,CAAC,GAAG,CAAC;IACpB,CAAC,MAAM;MACHH,CAAC,GAAGG,CAAC,GAAG,CAAC;IACb;EACJ;EACA,OAAOD,CAAC;AACZ;AAEA,SAASM,EAAEA,CAAIX,CAAM,EAAEC,CAAI,EAAEC,CAAa,EAAEC,CAAO,EAAEC,CAAO,EAAU;EAClE,IAAIC,CAAC,GAAGF,CAAC,GAAG,CAAC;EACb,OAAOA,CAAC,IAAIC,CAAC,EAAE;IACX,IAAME,CAAC,GAAIH,CAAC,GAAGC,CAAC,KAAM,CAAC;MAAEG,CAAC,GAAGP,CAAC,CAACM,CAAC,CAAC;IACjC,IAAME,CAAM,GAAIN,CAAC,KAAKO,SAAS,GAAIP,CAAC,CAACK,CAAC,EAAEN,CAAC,CAAC,GAAKM,CAAC,GAAYN,CAAU;IACtE,IAAIO,CAAC,GAAG,CAAC,EAAE;MACPH,CAAC,GAAGC,CAAC;MAAEH,CAAC,GAAGG,CAAC,GAAG,CAAC;IACpB,CAAC,MAAM;MACHF,CAAC,GAAGE,CAAC,GAAG,CAAC;IACb;EACJ;EACA,OAAOD,CAAC;AACZ;AAEA,SAASO,EAAEA,CAAIZ,CAAM,EAAEC,CAAI,EAAEC,CAAa,EAAEC,CAAO,EAAEC,CAAO,EAAU;EAClE,IAAIC,CAAC,GAAGF,CAAC,GAAG,CAAC;EACb,OAAOA,CAAC,IAAIC,CAAC,EAAE;IACX,IAAME,CAAC,GAAIH,CAAC,GAAGC,CAAC,KAAM,CAAC;MAAEG,CAAC,GAAGP,CAAC,CAACM,CAAC,CAAC;IACjC,IAAME,CAAM,GAAIN,CAAC,KAAKO,SAAS,GAAIP,CAAC,CAACK,CAAC,EAAEN,CAAC,CAAC,GAAKM,CAAC,GAAYN,CAAU;IACtE,IAAIO,CAAC,IAAI,CAAC,EAAE;MACRH,CAAC,GAAGC,CAAC;MAAEH,CAAC,GAAGG,CAAC,GAAG,CAAC;IACpB,CAAC,MAAM;MACHF,CAAC,GAAGE,CAAC,GAAG,CAAC;IACb;EACJ;EACA,OAAOD,CAAC;AACZ;AAEA,SAASQ,EAAEA,CAAIb,CAAM,EAAEC,CAAI,EAAEC,CAAa,EAAEC,CAAO,EAAEC,CAAO,EAAU;EAClE,OAAOD,CAAC,IAAIC,CAAC,EAAE;IACX,IAAME,CAAC,GAAIH,CAAC,GAAGC,CAAC,KAAM,CAAC;MAAEG,CAAC,GAAGP,CAAC,CAACM,CAAC,CAAC;IACjC,IAAME,CAAM,GAAIN,CAAC,KAAKO,SAAS,GAAIP,CAAC,CAACK,CAAC,EAAEN,CAAC,CAAC,GAAKM,CAAC,GAAYN,CAAU;IACtE,IAAIO,CAAC,KAAK,CAAC,EAAE;MACT,OAAOF,CAAC;IACZ;IACA,IAAIE,CAAC,IAAI,CAAC,EAAE;MACRL,CAAC,GAAGG,CAAC,GAAG,CAAC;IACb,CAAC,MAAM;MACHF,CAAC,GAAGE,CAAC,GAAG,CAAC;IACb;EACJ;EACA,OAAO,CAAC,CAAC;AACb;AAEA,SAASQ,IAAIA,CAAId,CAAM,EAAEC,CAAI,EAAEC,CAAa,EAAEC,CAAM,EAAEC,CAAM,EAAEW,CAAM,EAAE;EAClE,IAAI,OAAOb,CAAC,KAAK,UAAU,EAAE;IACzB,OAAOa,CAAC,CAACf,CAAC,EAAEC,CAAC,EAAEC,CAAC,EAAGC,CAAC,KAAKM,SAAS,GAAI,CAAC,GAAGN,CAAC,GAAG,CAAC,EAAGC,CAAC,KAAKK,SAAS,GAAIT,CAAC,CAACgB,MAAM,GAAG,CAAC,GAAGZ,CAAC,GAAG,CAAC,CAAC;EAC9F;EACA,OAAOW,CAAC,CAACf,CAAC,EAAEC,CAAC,EAAEQ,SAAS,EAAGP,CAAC,KAAKO,SAAS,GAAI,CAAC,GAAGP,CAAC,GAAG,CAAC,EAAGC,CAAC,KAAKM,SAAS,GAAIT,CAAC,CAACgB,MAAM,GAAG,CAAC,GAAGb,CAAC,GAAG,CAAC,CAAC;AACtG;AAGO,SAASc,OAAOA,CAAIjB,CAAM,EAAEC,CAAI,EAAEC,CAAa,EAAEC,CAAO,EAAEC,CAAO,EAAE;EACtE,OAAOU,IAAI,CAACd,CAAC,EAAEC,CAAC,EAAEC,CAAC,EAAEC,CAAC,EAAEC,CAAC,EAAEL,EAAE,CAAC;AAClC;AACO,SAASmB,OAAOA,CAAIlB,CAAM,EAAEC,CAAI,EAAEC,CAAa,EAAEC,CAAO,EAAEC,CAAO,EAAE;EACtE,OAAOU,IAAI,CAACd,CAAC,EAAEC,CAAC,EAAEC,CAAC,EAAEC,CAAC,EAAEC,CAAC,EAAEM,EAAE,CAAC;AAClC;AACO,SAASS,OAAOA,CAAInB,CAAM,EAAEC,CAAI,EAAEC,CAAa,EAAEC,CAAO,EAAEC,CAAO,EAAE;EACtE,OAAOU,IAAI,CAACd,CAAC,EAAEC,CAAC,EAAEC,CAAC,EAAEC,CAAC,EAAEC,CAAC,EAAEO,EAAE,CAAC;AAClC;AACO,SAASS,OAAOA,CAAIpB,CAAM,EAAEC,CAAI,EAAEC,CAAa,EAAEC,CAAO,EAAEC,CAAO,EAAE;EACtE,OAAOU,IAAI,CAACd,CAAC,EAAEC,CAAC,EAAEC,CAAC,EAAEC,CAAC,EAAEC,CAAC,EAAEQ,EAAE,CAAC;AAClC;AACO,SAASS,OAAOA,CAAIrB,CAAM,EAAEC,CAAI,EAAEC,CAAa,EAAEC,CAAO,EAAEC,CAAO,EAAE;EACtE,OAAOU,IAAI,CAACd,CAAC,EAAEC,CAAC,EAAEC,CAAC,EAAEC,CAAC,EAAEC,CAAC,EAAES,EAAE,CAAC;AAClC","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/storage-memory/index.js b/dist/cjs/plugins/storage-memory/index.js deleted file mode 100644 index 12b77d9bf86..00000000000 --- a/dist/cjs/plugins/storage-memory/index.js +++ /dev/null @@ -1,91 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -var _exportNames = { - getRxStorageMemory: true -}; -exports.getRxStorageMemory = getRxStorageMemory; -var _rxStorageHelper = require("../../rx-storage-helper.js"); -var _utilsRxdbVersion = require("../utils/utils-rxdb-version.js"); -var _rxStorageInstanceMemory = require("./rx-storage-instance-memory.js"); -Object.keys(_rxStorageInstanceMemory).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; - if (key in exports && exports[key] === _rxStorageInstanceMemory[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _rxStorageInstanceMemory[key]; - } - }); -}); -var _memoryHelper = require("./memory-helper.js"); -Object.keys(_memoryHelper).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; - if (key in exports && exports[key] === _memoryHelper[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _memoryHelper[key]; - } - }); -}); -var _binarySearchBounds = require("./binary-search-bounds.js"); -Object.keys(_binarySearchBounds).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; - if (key in exports && exports[key] === _binarySearchBounds[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _binarySearchBounds[key]; - } - }); -}); -var _memoryTypes = require("./memory-types.js"); -Object.keys(_memoryTypes).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; - if (key in exports && exports[key] === _memoryTypes[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _memoryTypes[key]; - } - }); -}); -var _memoryIndexes = require("./memory-indexes.js"); -Object.keys(_memoryIndexes).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; - if (key in exports && exports[key] === _memoryIndexes[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _memoryIndexes[key]; - } - }); -}); -/** - * Keep the state even when the storage instance is closed. - * This makes it easier to use the memory storage - * to test filesystem-like and multiInstance behaviors. - */ -var COLLECTION_STATES = new Map(); -function getRxStorageMemory(settings = {}) { - var storage = { - name: 'memory', - rxdbVersion: _utilsRxdbVersion.RXDB_VERSION, - collectionStates: COLLECTION_STATES, - createStorageInstance(params) { - (0, _rxStorageHelper.ensureRxStorageInstanceParamsAreCorrect)(params); - var useSettings = Object.assign({}, settings, params.options); - return (0, _rxStorageInstanceMemory.createMemoryStorageInstance)(this, params, useSettings); - } - }; - return storage; -} -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/storage-memory/index.js.map b/dist/cjs/plugins/storage-memory/index.js.map deleted file mode 100644 index 7b94c9b39af..00000000000 --- a/dist/cjs/plugins/storage-memory/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["_rxStorageHelper","require","_utilsRxdbVersion","_rxStorageInstanceMemory","Object","keys","forEach","key","prototype","hasOwnProperty","call","_exportNames","exports","defineProperty","enumerable","get","_memoryHelper","_binarySearchBounds","_memoryTypes","_memoryIndexes","COLLECTION_STATES","Map","getRxStorageMemory","settings","storage","name","rxdbVersion","RXDB_VERSION","collectionStates","createStorageInstance","params","ensureRxStorageInstanceParamsAreCorrect","useSettings","assign","options","createMemoryStorageInstance"],"sources":["../../../../src/plugins/storage-memory/index.ts"],"sourcesContent":["import { ensureRxStorageInstanceParamsAreCorrect } from '../../rx-storage-helper.ts';\nimport type { RxStorageInstanceCreationParams } from '../../types/index.d.ts';\nimport { RXDB_VERSION } from '../utils/utils-rxdb-version.ts';\nimport type {\n RxStorageMemory,\n RxStorageMemoryInstanceCreationOptions,\n RxStorageMemorySettings\n} from './memory-types.ts';\nimport {\n createMemoryStorageInstance,\n RxStorageInstanceMemory\n} from './rx-storage-instance-memory.ts';\n\n/**\n * Keep the state even when the storage instance is closed.\n * This makes it easier to use the memory storage\n * to test filesystem-like and multiInstance behaviors.\n */\nconst COLLECTION_STATES = new Map();\n\nexport function getRxStorageMemory(\n settings: RxStorageMemorySettings = {}\n): RxStorageMemory {\n\n const storage: RxStorageMemory = {\n name: 'memory',\n rxdbVersion: RXDB_VERSION,\n collectionStates: COLLECTION_STATES,\n createStorageInstance(\n params: RxStorageInstanceCreationParams\n ): Promise> {\n ensureRxStorageInstanceParamsAreCorrect(params);\n const useSettings = Object.assign(\n {},\n settings,\n params.options\n );\n\n\n return createMemoryStorageInstance(this, params, useSettings);\n }\n };\n\n return storage;\n}\n\n\nexport * from './memory-helper.ts';\nexport * from './binary-search-bounds.ts';\nexport * from './memory-types.ts';\nexport * from './memory-indexes.ts';\nexport * from './rx-storage-instance-memory.ts';\n"],"mappings":";;;;;;;;;AAAA,IAAAA,gBAAA,GAAAC,OAAA;AAEA,IAAAC,iBAAA,GAAAD,OAAA;AAMA,IAAAE,wBAAA,GAAAF,OAAA;AA2CAG,MAAA,CAAAC,IAAA,CAAAF,wBAAA,EAAAG,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAJ,wBAAA,CAAAI,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAZ,wBAAA,CAAAI,GAAA;IAAA;EAAA;AAAA;AAJA,IAAAS,aAAA,GAAAf,OAAA;AAAAG,MAAA,CAAAC,IAAA,CAAAW,aAAA,EAAAV,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAS,aAAA,CAAAT,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAC,aAAA,CAAAT,GAAA;IAAA;EAAA;AAAA;AACA,IAAAU,mBAAA,GAAAhB,OAAA;AAAAG,MAAA,CAAAC,IAAA,CAAAY,mBAAA,EAAAX,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAU,mBAAA,CAAAV,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAE,mBAAA,CAAAV,GAAA;IAAA;EAAA;AAAA;AACA,IAAAW,YAAA,GAAAjB,OAAA;AAAAG,MAAA,CAAAC,IAAA,CAAAa,YAAA,EAAAZ,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAW,YAAA,CAAAX,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAG,YAAA,CAAAX,GAAA;IAAA;EAAA;AAAA;AACA,IAAAY,cAAA,GAAAlB,OAAA;AAAAG,MAAA,CAAAC,IAAA,CAAAc,cAAA,EAAAb,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAY,cAAA,CAAAZ,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAI,cAAA,CAAAZ,GAAA;IAAA;EAAA;AAAA;AArCA;AACA;AACA;AACA;AACA;AACA,IAAMa,iBAAiB,GAAG,IAAIC,GAAG,CAAC,CAAC;AAE5B,SAASC,kBAAkBA,CAC9BC,QAAiC,GAAG,CAAC,CAAC,EACvB;EAEf,IAAMC,OAAwB,GAAG;IAC7BC,IAAI,EAAE,QAAQ;IACdC,WAAW,EAAEC,8BAAY;IACzBC,gBAAgB,EAAER,iBAAiB;IACnCS,qBAAqBA,CACjBC,MAA0F,EAC/C;MAC3C,IAAAC,wDAAuC,EAACD,MAAM,CAAC;MAC/C,IAAME,WAAW,GAAG5B,MAAM,CAAC6B,MAAM,CAC7B,CAAC,CAAC,EACFV,QAAQ,EACRO,MAAM,CAACI,OACX,CAAC;MAGD,OAAO,IAAAC,oDAA2B,EAAC,IAAI,EAAEL,MAAM,EAAEE,WAAW,CAAC;IACjE;EACJ,CAAC;EAED,OAAOR,OAAO;AAClB","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/storage-memory/memory-helper.js b/dist/cjs/plugins/storage-memory/memory-helper.js deleted file mode 100644 index 30d80fbcf0d..00000000000 --- a/dist/cjs/plugins/storage-memory/memory-helper.js +++ /dev/null @@ -1,112 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.attachmentMapKey = attachmentMapKey; -exports.compareDocsWithIndex = compareDocsWithIndex; -exports.ensureNotRemoved = ensureNotRemoved; -exports.getMemoryCollectionKey = getMemoryCollectionKey; -exports.putWriteRowToState = putWriteRowToState; -exports.removeDocFromState = removeDocFromState; -var _arrayPushAtSortPosition = require("array-push-at-sort-position"); -var _rxError = require("../../rx-error.js"); -var _binarySearchBounds = require("./binary-search-bounds.js"); -function getMemoryCollectionKey(databaseName, collectionName, schemaVersion) { - return [databaseName, collectionName, schemaVersion].join('--memory--'); -} -function ensureNotRemoved(instance) { - if (instance.internals.removed) { - throw new Error('removed'); - } -} -function attachmentMapKey(documentId, attachmentId) { - return documentId + '||' + attachmentId; -} -function sortByIndexStringComparator(a, b) { - if (a.indexString < b.indexString) { - return -1; - } else { - return 1; - } -} - -/** - * @hotPath - */ -function putWriteRowToState(docId, state, stateByIndex, row, docInState) { - var document = row.document; - state.documents.set(docId, document); - for (var i = 0; i < stateByIndex.length; ++i) { - var byIndex = stateByIndex[i]; - var docsWithIndex = byIndex.docsWithIndex; - var getIndexableString = byIndex.getIndexableString; - var newIndexString = getIndexableString(document); - var insertPosition = (0, _arrayPushAtSortPosition.pushAtSortPosition)(docsWithIndex, { - id: docId, - doc: document, - indexString: newIndexString - }, sortByIndexStringComparator, 0); - - /** - * Remove previous if it was in the state - */ - if (docInState) { - var previousIndexString = getIndexableString(docInState); - if (previousIndexString === newIndexString) { - /** - * Performance shortcut. - * If index was not changed -> The old doc must be before or after the new one. - */ - var prev = docsWithIndex[insertPosition - 1]; - if (prev && prev.id === docId) { - docsWithIndex.splice(insertPosition - 1, 1); - } else { - var next = docsWithIndex[insertPosition + 1]; - if (next.id === docId) { - docsWithIndex.splice(insertPosition + 1, 1); - } else { - throw (0, _rxError.newRxError)('SNH', { - args: { - row, - byIndex - } - }); - } - } - } else { - /** - * Index changed, we must search for the old one and remove it. - */ - var indexBefore = (0, _binarySearchBounds.boundEQ)(docsWithIndex, { - indexString: previousIndexString - }, compareDocsWithIndex); - docsWithIndex.splice(indexBefore, 1); - } - } - } -} -function removeDocFromState(primaryPath, schema, state, doc) { - var docId = doc[primaryPath]; - state.documents.delete(docId); - Object.values(state.byIndex).forEach(byIndex => { - var docsWithIndex = byIndex.docsWithIndex; - var indexString = byIndex.getIndexableString(doc); - var positionInIndex = (0, _binarySearchBounds.boundEQ)(docsWithIndex, { - indexString - }, compareDocsWithIndex); - docsWithIndex.splice(positionInIndex, 1); - }); -} -function compareDocsWithIndex(a, b) { - var indexStringA = a.indexString; - var indexStringB = b.indexString; - if (indexStringA < indexStringB) { - return -1; - } else if (indexStringA === indexStringB) { - return 0; - } else { - return 1; - } -} -//# sourceMappingURL=memory-helper.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/storage-memory/memory-helper.js.map b/dist/cjs/plugins/storage-memory/memory-helper.js.map deleted file mode 100644 index 164c98b61a1..00000000000 --- a/dist/cjs/plugins/storage-memory/memory-helper.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"memory-helper.js","names":["_arrayPushAtSortPosition","require","_rxError","_binarySearchBounds","getMemoryCollectionKey","databaseName","collectionName","schemaVersion","join","ensureNotRemoved","instance","internals","removed","Error","attachmentMapKey","documentId","attachmentId","sortByIndexStringComparator","a","b","indexString","putWriteRowToState","docId","state","stateByIndex","row","docInState","document","documents","set","i","length","byIndex","docsWithIndex","getIndexableString","newIndexString","insertPosition","pushAtSortPosition","id","doc","previousIndexString","prev","splice","next","newRxError","args","indexBefore","boundEQ","compareDocsWithIndex","removeDocFromState","primaryPath","schema","delete","Object","values","forEach","positionInIndex","indexStringA","indexStringB"],"sources":["../../../../src/plugins/storage-memory/memory-helper.ts"],"sourcesContent":["import type {\n BulkWriteRow,\n RxDocumentData,\n RxJsonSchema\n} from '../../types/index.d.ts';\nimport type {\n DocWithIndexString,\n MemoryStorageInternals,\n MemoryStorageInternalsByIndex\n} from './memory-types.ts';\nimport type { RxStorageInstanceMemory } from './rx-storage-instance-memory.ts';\nimport {\n pushAtSortPosition\n} from 'array-push-at-sort-position';\nimport { newRxError } from '../../rx-error.ts';\nimport { boundEQ } from './binary-search-bounds.ts';\n\n\nexport function getMemoryCollectionKey(\n databaseName: string,\n collectionName: string,\n schemaVersion: number\n): string {\n return [\n databaseName,\n collectionName,\n schemaVersion\n ].join('--memory--');\n}\n\n\nexport function ensureNotRemoved(\n instance: RxStorageInstanceMemory\n) {\n if (instance.internals.removed) {\n throw new Error('removed');\n }\n}\n\nexport function attachmentMapKey(documentId: string, attachmentId: string): string {\n return documentId + '||' + attachmentId;\n}\n\nfunction sortByIndexStringComparator(a: DocWithIndexString, b: DocWithIndexString) {\n if (a.indexString < b.indexString) {\n return -1;\n } else {\n return 1;\n }\n}\n\n\n\n/**\n * @hotPath\n */\nexport function putWriteRowToState(\n docId: string,\n state: MemoryStorageInternals,\n stateByIndex: MemoryStorageInternalsByIndex[],\n row: BulkWriteRow,\n docInState?: RxDocumentData\n) {\n const document = row.document;\n state.documents.set(docId, document as any);\n for (let i = 0; i < stateByIndex.length; ++i) {\n const byIndex = stateByIndex[i];\n const docsWithIndex = byIndex.docsWithIndex;\n const getIndexableString = byIndex.getIndexableString;\n const newIndexString = getIndexableString(document as any);\n const insertPosition = pushAtSortPosition(\n docsWithIndex,\n {\n id: docId,\n doc: document,\n indexString: newIndexString\n },\n sortByIndexStringComparator,\n 0\n );\n\n /**\n * Remove previous if it was in the state\n */\n if (docInState) {\n const previousIndexString = getIndexableString(docInState);\n if (previousIndexString === newIndexString) {\n /**\n * Performance shortcut.\n * If index was not changed -> The old doc must be before or after the new one.\n */\n const prev = docsWithIndex[insertPosition - 1];\n if (prev && prev.id === docId) {\n docsWithIndex.splice(insertPosition - 1, 1);\n } else {\n const next = docsWithIndex[insertPosition + 1];\n if (next.id === docId) {\n docsWithIndex.splice(insertPosition + 1, 1);\n } else {\n throw newRxError('SNH', {\n args: {\n row,\n byIndex\n }\n });\n }\n }\n } else {\n /**\n * Index changed, we must search for the old one and remove it.\n */\n const indexBefore = boundEQ(\n docsWithIndex,\n {\n indexString: previousIndexString\n } as any,\n compareDocsWithIndex\n );\n docsWithIndex.splice(indexBefore, 1);\n }\n }\n }\n}\n\n\nexport function removeDocFromState(\n primaryPath: string,\n schema: RxJsonSchema>,\n state: MemoryStorageInternals,\n doc: RxDocumentData\n) {\n const docId: string = (doc as any)[primaryPath];\n state.documents.delete(docId);\n\n Object.values(state.byIndex).forEach(byIndex => {\n const docsWithIndex = byIndex.docsWithIndex;\n const indexString = byIndex.getIndexableString(doc);\n\n const positionInIndex = boundEQ(\n docsWithIndex,\n {\n indexString\n } as any,\n compareDocsWithIndex\n );\n docsWithIndex.splice(positionInIndex, 1);\n });\n}\n\n\nexport function compareDocsWithIndex(\n a: DocWithIndexString,\n b: DocWithIndexString\n): 1 | 0 | -1 {\n const indexStringA = a.indexString;\n const indexStringB = b.indexString;\n if (indexStringA < indexStringB) {\n return -1;\n } else if (indexStringA === indexStringB) {\n return 0;\n } else {\n return 1;\n }\n}\n"],"mappings":";;;;;;;;;;;AAWA,IAAAA,wBAAA,GAAAC,OAAA;AAGA,IAAAC,QAAA,GAAAD,OAAA;AACA,IAAAE,mBAAA,GAAAF,OAAA;AAGO,SAASG,sBAAsBA,CAClCC,YAAoB,EACpBC,cAAsB,EACtBC,aAAqB,EACf;EACN,OAAO,CACHF,YAAY,EACZC,cAAc,EACdC,aAAa,CAChB,CAACC,IAAI,CAAC,YAAY,CAAC;AACxB;AAGO,SAASC,gBAAgBA,CAC5BC,QAAsC,EACxC;EACE,IAAIA,QAAQ,CAACC,SAAS,CAACC,OAAO,EAAE;IAC5B,MAAM,IAAIC,KAAK,CAAC,SAAS,CAAC;EAC9B;AACJ;AAEO,SAASC,gBAAgBA,CAACC,UAAkB,EAAEC,YAAoB,EAAU;EAC/E,OAAOD,UAAU,GAAG,IAAI,GAAGC,YAAY;AAC3C;AAEA,SAASC,2BAA2BA,CAAYC,CAAgC,EAAEC,CAAgC,EAAE;EAChH,IAAID,CAAC,CAACE,WAAW,GAAGD,CAAC,CAACC,WAAW,EAAE;IAC/B,OAAO,CAAC,CAAC;EACb,CAAC,MAAM;IACH,OAAO,CAAC;EACZ;AACJ;;AAIA;AACA;AACA;AACO,SAASC,kBAAkBA,CAC9BC,KAAa,EACbC,KAAwC,EACxCC,YAAwD,EACxDC,GAA4B,EAC5BC,UAAsC,EACxC;EACE,IAAMC,QAAQ,GAAGF,GAAG,CAACE,QAAQ;EAC7BJ,KAAK,CAACK,SAAS,CAACC,GAAG,CAACP,KAAK,EAAEK,QAAe,CAAC;EAC3C,KAAK,IAAIG,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGN,YAAY,CAACO,MAAM,EAAE,EAAED,CAAC,EAAE;IAC1C,IAAME,OAAO,GAAGR,YAAY,CAACM,CAAC,CAAC;IAC/B,IAAMG,aAAa,GAAGD,OAAO,CAACC,aAAa;IAC3C,IAAMC,kBAAkB,GAAGF,OAAO,CAACE,kBAAkB;IACrD,IAAMC,cAAc,GAAGD,kBAAkB,CAACP,QAAe,CAAC;IAC1D,IAAMS,cAAc,GAAG,IAAAC,2CAAkB,EACrCJ,aAAa,EACb;MACIK,EAAE,EAAEhB,KAAK;MACTiB,GAAG,EAAEZ,QAAQ;MACbP,WAAW,EAAEe;IACjB,CAAC,EACDlB,2BAA2B,EAC3B,CACJ,CAAC;;IAED;AACR;AACA;IACQ,IAAIS,UAAU,EAAE;MACZ,IAAMc,mBAAmB,GAAGN,kBAAkB,CAACR,UAAU,CAAC;MAC1D,IAAIc,mBAAmB,KAAKL,cAAc,EAAE;QACxC;AAChB;AACA;AACA;QACgB,IAAMM,IAAI,GAAGR,aAAa,CAACG,cAAc,GAAG,CAAC,CAAC;QAC9C,IAAIK,IAAI,IAAIA,IAAI,CAACH,EAAE,KAAKhB,KAAK,EAAE;UAC3BW,aAAa,CAACS,MAAM,CAACN,cAAc,GAAG,CAAC,EAAE,CAAC,CAAC;QAC/C,CAAC,MAAM;UACH,IAAMO,IAAI,GAAGV,aAAa,CAACG,cAAc,GAAG,CAAC,CAAC;UAC9C,IAAIO,IAAI,CAACL,EAAE,KAAKhB,KAAK,EAAE;YACnBW,aAAa,CAACS,MAAM,CAACN,cAAc,GAAG,CAAC,EAAE,CAAC,CAAC;UAC/C,CAAC,MAAM;YACH,MAAM,IAAAQ,mBAAU,EAAC,KAAK,EAAE;cACpBC,IAAI,EAAE;gBACFpB,GAAG;gBACHO;cACJ;YACJ,CAAC,CAAC;UACN;QACJ;MACJ,CAAC,MAAM;QACH;AAChB;AACA;QACgB,IAAMc,WAAW,GAAG,IAAAC,2BAAO,EACvBd,aAAa,EACb;UACIb,WAAW,EAAEoB;QACjB,CAAC,EACDQ,oBACJ,CAAC;QACDf,aAAa,CAACS,MAAM,CAACI,WAAW,EAAE,CAAC,CAAC;MACxC;IACJ;EACJ;AACJ;AAGO,SAASG,kBAAkBA,CAC9BC,WAAmB,EACnBC,MAA+C,EAC/C5B,KAAwC,EACxCgB,GAA8B,EAChC;EACE,IAAMjB,KAAa,GAAIiB,GAAG,CAASW,WAAW,CAAC;EAC/C3B,KAAK,CAACK,SAAS,CAACwB,MAAM,CAAC9B,KAAK,CAAC;EAE7B+B,MAAM,CAACC,MAAM,CAAC/B,KAAK,CAACS,OAAO,CAAC,CAACuB,OAAO,CAACvB,OAAO,IAAI;IAC5C,IAAMC,aAAa,GAAGD,OAAO,CAACC,aAAa;IAC3C,IAAMb,WAAW,GAAGY,OAAO,CAACE,kBAAkB,CAACK,GAAG,CAAC;IAEnD,IAAMiB,eAAe,GAAG,IAAAT,2BAAO,EAC3Bd,aAAa,EACb;MACIb;IACJ,CAAC,EACD4B,oBACJ,CAAC;IACDf,aAAa,CAACS,MAAM,CAACc,eAAe,EAAE,CAAC,CAAC;EAC5C,CAAC,CAAC;AACN;AAGO,SAASR,oBAAoBA,CAChC9B,CAAgC,EAChCC,CAAgC,EACtB;EACV,IAAMsC,YAAY,GAAGvC,CAAC,CAACE,WAAW;EAClC,IAAMsC,YAAY,GAAGvC,CAAC,CAACC,WAAW;EAClC,IAAIqC,YAAY,GAAGC,YAAY,EAAE;IAC7B,OAAO,CAAC,CAAC;EACb,CAAC,MAAM,IAAID,YAAY,KAAKC,YAAY,EAAE;IACtC,OAAO,CAAC;EACZ,CAAC,MAAM;IACH,OAAO,CAAC;EACZ;AACJ","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/storage-memory/memory-indexes.js b/dist/cjs/plugins/storage-memory/memory-indexes.js deleted file mode 100644 index ee1c74f9e6d..00000000000 --- a/dist/cjs/plugins/storage-memory/memory-indexes.js +++ /dev/null @@ -1,28 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.addIndexesToInternalsState = addIndexesToInternalsState; -exports.getMemoryIndexName = getMemoryIndexName; -var _customIndex = require("../../custom-index.js"); -var _rxSchemaHelper = require("../../rx-schema-helper.js"); -var _index = require("../../plugins/utils/index.js"); -function addIndexesToInternalsState(state, schema) { - var primaryPath = (0, _rxSchemaHelper.getPrimaryFieldOfPrimaryKey)(schema.primaryKey); - var useIndexes = !schema.indexes ? [] : schema.indexes.map(row => (0, _index.toArray)(row)); - - // we need this index for running cleanup() - useIndexes.push(['_deleted', '_meta.lwt', primaryPath]); - useIndexes.forEach(indexAr => { - state.byIndex[getMemoryIndexName(indexAr)] = { - index: indexAr, - docsWithIndex: [], - getIndexableString: (0, _customIndex.getIndexableStringMonad)(schema, indexAr) - }; - }); -} -function getMemoryIndexName(index) { - return index.join(','); -} -//# sourceMappingURL=memory-indexes.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/storage-memory/memory-indexes.js.map b/dist/cjs/plugins/storage-memory/memory-indexes.js.map deleted file mode 100644 index 0a74f8be1cb..00000000000 --- a/dist/cjs/plugins/storage-memory/memory-indexes.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"memory-indexes.js","names":["_customIndex","require","_rxSchemaHelper","_index","addIndexesToInternalsState","state","schema","primaryPath","getPrimaryFieldOfPrimaryKey","primaryKey","useIndexes","indexes","map","row","toArray","push","forEach","indexAr","byIndex","getMemoryIndexName","index","docsWithIndex","getIndexableString","getIndexableStringMonad","join"],"sources":["../../../../src/plugins/storage-memory/memory-indexes.ts"],"sourcesContent":["import { getIndexableStringMonad } from '../../custom-index.ts';\nimport { getPrimaryFieldOfPrimaryKey } from '../../rx-schema-helper.ts';\nimport type { RxDocumentData, RxJsonSchema } from '../../types/index.d.ts';\nimport { toArray } from '../../plugins/utils/index.ts';\nimport type { MemoryStorageInternals } from './memory-types.ts';\n\nexport function addIndexesToInternalsState(\n state: MemoryStorageInternals,\n schema: RxJsonSchema>\n) {\n const primaryPath = getPrimaryFieldOfPrimaryKey(schema.primaryKey);\n const useIndexes: string[][] = !schema.indexes ? [] : schema.indexes.map(row => toArray(row)) as any;\n\n // we need this index for running cleanup()\n useIndexes.push([\n '_deleted',\n '_meta.lwt',\n primaryPath\n ]);\n\n\n useIndexes.forEach(indexAr => {\n state.byIndex[getMemoryIndexName(indexAr)] = {\n index: indexAr,\n docsWithIndex: [],\n getIndexableString: getIndexableStringMonad(schema, indexAr)\n };\n });\n}\n\n\nexport function getMemoryIndexName(index: string[]): string {\n return index.join(',');\n}\n"],"mappings":";;;;;;;AAAA,IAAAA,YAAA,GAAAC,OAAA;AACA,IAAAC,eAAA,GAAAD,OAAA;AAEA,IAAAE,MAAA,GAAAF,OAAA;AAGO,SAASG,0BAA0BA,CACtCC,KAAwC,EACxCC,MAA+C,EACjD;EACE,IAAMC,WAAW,GAAG,IAAAC,2CAA2B,EAACF,MAAM,CAACG,UAAU,CAAC;EAClE,IAAMC,UAAsB,GAAG,CAACJ,MAAM,CAACK,OAAO,GAAG,EAAE,GAAGL,MAAM,CAACK,OAAO,CAACC,GAAG,CAACC,GAAG,IAAI,IAAAC,cAAO,EAACD,GAAG,CAAC,CAAQ;;EAEpG;EACAH,UAAU,CAACK,IAAI,CAAC,CACZ,UAAU,EACV,WAAW,EACXR,WAAW,CACd,CAAC;EAGFG,UAAU,CAACM,OAAO,CAACC,OAAO,IAAI;IAC1BZ,KAAK,CAACa,OAAO,CAACC,kBAAkB,CAACF,OAAO,CAAC,CAAC,GAAG;MACzCG,KAAK,EAAEH,OAAO;MACdI,aAAa,EAAE,EAAE;MACjBC,kBAAkB,EAAE,IAAAC,oCAAuB,EAACjB,MAAM,EAAEW,OAAO;IAC/D,CAAC;EACL,CAAC,CAAC;AACN;AAGO,SAASE,kBAAkBA,CAACC,KAAe,EAAU;EACxD,OAAOA,KAAK,CAACI,IAAI,CAAC,GAAG,CAAC;AAC1B","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/storage-memory/memory-types.js b/dist/cjs/plugins/storage-memory/memory-types.js deleted file mode 100644 index ee1da345fec..00000000000 --- a/dist/cjs/plugins/storage-memory/memory-types.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -//# sourceMappingURL=memory-types.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/storage-memory/memory-types.js.map b/dist/cjs/plugins/storage-memory/memory-types.js.map deleted file mode 100644 index 4520f15daa1..00000000000 --- a/dist/cjs/plugins/storage-memory/memory-types.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"memory-types.js","names":[],"sources":["../../../../src/plugins/storage-memory/memory-types.ts"],"sourcesContent":["import { Subject } from 'rxjs';\nimport type {\n CategorizeBulkWriteRowsOutput,\n EventBulk,\n RxAttachmentWriteData,\n RxConflictResultionTask,\n RxDocumentData,\n RxJsonSchema,\n RxStorage,\n RxStorageChangeEvent,\n RxStorageDefaultCheckpoint\n} from '../../types/index.d.ts';\n\nexport type RxStorageMemorySettings = {};\nexport type RxStorageMemoryInstanceCreationOptions = {};\nexport type RxStorageMemory = RxStorage, RxStorageMemoryInstanceCreationOptions> & {\n /**\n * State by collectionKey\n */\n collectionStates: Map>;\n};\n\nexport type MemoryStorageInternalsByIndex = {\n index: string[];\n docsWithIndex: DocWithIndexString[];\n getIndexableString: (docData: RxDocumentData) => string;\n};\n\n/**\n * The internals are shared between multiple storage instances\n * that have been created with the same [databaseName+collectionName] combination.\n */\nexport type MemoryStorageInternals = {\n // used to debug stuff and identify instances\n id: string;\n\n /**\n * Schema of the first instance created with the given settings.\n * Used to ensure that the same storage is not re-created with\n * a different schema.\n */\n schema: RxJsonSchema>;\n\n /**\n * We reuse the memory state when multiple instances\n * are created with the same params.\n * If refCount becomes 0, we can delete the state.\n */\n refCount: number;\n /**\n * If this becomes true,\n * it means that an instance has called remove()\n * so all other instances should also not work anymore.\n */\n removed: boolean;\n documents: Map>;\n /**\n * Attachments data, indexed by a combined string\n * consisting of [documentId + '||' + attachmentId]\n */\n attachments: Map;\n byIndex: {\n /**\n * Because RxDB requires a deterministic sorting\n * on all indexes, we can be sure that the composed index key\n * of each document is unique, because it contains the primaryKey\n * as last index part.\n * So we do not have to store the index-position when we want to do fast\n * writes. Instead we can do a binary search over the existing array\n * because RxDB also knows the previous state of the document when we do a bulkWrite().\n */\n [indexName: string]: MemoryStorageInternalsByIndex;\n };\n\n /**\n * We need these to do lazy writes.\n */\n ensurePersistenceTask?: CategorizeBulkWriteRowsOutput;\n ensurePersistenceIdlePromise?: Promise;\n\n /**\n * To easier test the conflict resolution,\n * the memory storage exposes the conflict resolution task subject\n * so that we can inject own tasks during tests.\n */\n conflictResultionTasks$: Subject>;\n changes$: Subject>, RxStorageDefaultCheckpoint>>;\n};\n\nexport type DocWithIndexString = {\n id: string;\n doc: RxDocumentData;\n indexString: string;\n};\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/storage-memory/rx-storage-instance-memory.js b/dist/cjs/plugins/storage-memory/rx-storage-instance-memory.js deleted file mode 100644 index df3399d2a4f..00000000000 --- a/dist/cjs/plugins/storage-memory/rx-storage-instance-memory.js +++ /dev/null @@ -1,340 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.RxStorageInstanceMemory = exports.OPEN_MEMORY_INSTANCES = void 0; -exports.createMemoryStorageInstance = createMemoryStorageInstance; -var _rxjs = require("rxjs"); -var _customIndex = require("../../custom-index.js"); -var _rxSchemaHelper = require("../../rx-schema-helper.js"); -var _rxStorageHelper = require("../../rx-storage-helper.js"); -var _index = require("../../plugins/utils/index.js"); -var _binarySearchBounds = require("./binary-search-bounds.js"); -var _memoryHelper = require("./memory-helper.js"); -var _memoryIndexes = require("./memory-indexes.js"); -var _rxQueryHelper = require("../../rx-query-helper.js"); -/** - * Used in tests to ensure everything - * is closed correctly - */ -var OPEN_MEMORY_INSTANCES = exports.OPEN_MEMORY_INSTANCES = new Set(); -var RxStorageInstanceMemory = exports.RxStorageInstanceMemory = /*#__PURE__*/function () { - /** - * Used by some plugins and storage wrappers - * to find out details about the internals of a write operation. - * For example if you want to know which documents really have been replaced - * or newly inserted. - */ - - function RxStorageInstanceMemory(storage, databaseName, collectionName, schema, internals, options, settings, devMode) { - this.closed = false; - this.categorizedByWriteInput = new WeakMap(); - this.storage = storage; - this.databaseName = databaseName; - this.collectionName = collectionName; - this.schema = schema; - this.internals = internals; - this.options = options; - this.settings = settings; - this.devMode = devMode; - OPEN_MEMORY_INSTANCES.add(this); - this.primaryPath = (0, _rxSchemaHelper.getPrimaryFieldOfPrimaryKey)(this.schema.primaryKey); - } - var _proto = RxStorageInstanceMemory.prototype; - _proto.bulkWrite = function bulkWrite(documentWrites, context) { - this.ensurePersistence(); - (0, _memoryHelper.ensureNotRemoved)(this); - var internals = this.internals; - var documentsById = this.internals.documents; - var primaryPath = this.primaryPath; - var categorized = (0, _rxStorageHelper.categorizeBulkWriteRows)(this, primaryPath, documentsById, documentWrites, context); - var error = categorized.errors; - var success = new Array(categorized.bulkInsertDocs.length); - /** - * @performance - * We have to return a Promise but we do not want to wait - * one tick, so we directly create the promise - * which makes it likely to be already resolved later. - */ - var awaitMe = Promise.resolve({ - success, - error - }); - var bulkInsertDocs = categorized.bulkInsertDocs; - for (var i = 0; i < bulkInsertDocs.length; ++i) { - var writeRow = bulkInsertDocs[i]; - var doc = writeRow.document; - success[i] = doc; - } - var bulkUpdateDocs = categorized.bulkUpdateDocs; - for (var _i = 0; _i < bulkUpdateDocs.length; ++_i) { - var _writeRow = bulkUpdateDocs[_i]; - var _doc = _writeRow.document; - success.push(_doc); - } - this.categorizedByWriteInput.set(documentWrites, categorized); - this.internals.ensurePersistenceTask = categorized; - if (!this.internals.ensurePersistenceIdlePromise) { - this.internals.ensurePersistenceIdlePromise = (0, _index.requestIdlePromiseNoQueue)().then(() => { - this.internals.ensurePersistenceIdlePromise = undefined; - this.ensurePersistence(); - }); - } - - /** - * Important: The events must be emitted AFTER the persistence - * task has been added. - */ - if (categorized.eventBulk.events.length > 0) { - var lastState = (0, _index.ensureNotFalsy)(categorized.newestRow).document; - categorized.eventBulk.checkpoint = { - id: lastState[primaryPath], - lwt: lastState._meta.lwt - }; - categorized.eventBulk.endTime = (0, _index.now)(); - internals.changes$.next(categorized.eventBulk); - } - return awaitMe; - } - - /** - * Instead of directly inserting the documents into all indexes, - * we do it lazy in the background. This gives the application time - * to directly work with the write-result and to do stuff like rendering DOM - * notes and processing RxDB queries. - * Then in some later time, or just before the next read/write, - * it is ensured that the indexes have been written. - */; - _proto.ensurePersistence = function ensurePersistence() { - if (!this.internals.ensurePersistenceTask) { - return; - } - var internals = this.internals; - var documentsById = this.internals.documents; - var primaryPath = this.primaryPath; - var categorized = this.internals.ensurePersistenceTask; - this.internals.ensurePersistenceTask = undefined; - - /** - * Do inserts/updates - */ - var stateByIndex = Object.values(this.internals.byIndex); - var bulkInsertDocs = categorized.bulkInsertDocs; - for (var i = 0; i < bulkInsertDocs.length; ++i) { - var writeRow = bulkInsertDocs[i]; - var doc = writeRow.document; - var docId = doc[primaryPath]; - (0, _memoryHelper.putWriteRowToState)(docId, internals, stateByIndex, writeRow, undefined); - } - var bulkUpdateDocs = categorized.bulkUpdateDocs; - for (var _i2 = 0; _i2 < bulkUpdateDocs.length; ++_i2) { - var _writeRow2 = bulkUpdateDocs[_i2]; - var _doc2 = _writeRow2.document; - var _docId = _doc2[primaryPath]; - (0, _memoryHelper.putWriteRowToState)(_docId, internals, stateByIndex, _writeRow2, documentsById.get(_docId)); - } - - /** - * Handle attachments - */ - if (this.schema.attachments) { - var attachmentsMap = internals.attachments; - categorized.attachmentsAdd.forEach(attachment => { - attachmentsMap.set((0, _memoryHelper.attachmentMapKey)(attachment.documentId, attachment.attachmentId), { - writeData: attachment.attachmentData, - digest: attachment.digest - }); - }); - if (this.schema.attachments) { - categorized.attachmentsUpdate.forEach(attachment => { - attachmentsMap.set((0, _memoryHelper.attachmentMapKey)(attachment.documentId, attachment.attachmentId), { - writeData: attachment.attachmentData, - digest: attachment.digest - }); - }); - categorized.attachmentsRemove.forEach(attachment => { - attachmentsMap.delete((0, _memoryHelper.attachmentMapKey)(attachment.documentId, attachment.attachmentId)); - }); - } - } - }; - _proto.findDocumentsById = function findDocumentsById(docIds, withDeleted) { - this.ensurePersistence(); - var documentsById = this.internals.documents; - var ret = []; - if (documentsById.size === 0) { - return Promise.resolve(ret); - } - for (var i = 0; i < docIds.length; ++i) { - var docId = docIds[i]; - var docInDb = documentsById.get(docId); - if (docInDb && (!docInDb._deleted || withDeleted)) { - ret.push(docInDb); - } - } - return Promise.resolve(ret); - }; - _proto.query = function query(preparedQuery) { - this.ensurePersistence(); - var queryPlan = preparedQuery.queryPlan; - var query = preparedQuery.query; - var skip = query.skip ? query.skip : 0; - var limit = query.limit ? query.limit : Infinity; - var skipPlusLimit = skip + limit; - var queryMatcher = false; - if (!queryPlan.selectorSatisfiedByIndex) { - queryMatcher = (0, _rxQueryHelper.getQueryMatcher)(this.schema, preparedQuery.query); - } - var queryPlanFields = queryPlan.index; - var mustManuallyResort = !queryPlan.sortSatisfiedByIndex; - var index = queryPlanFields; - var lowerBound = queryPlan.startKeys; - var lowerBoundString = (0, _customIndex.getStartIndexStringFromLowerBound)(this.schema, index, lowerBound); - var upperBound = queryPlan.endKeys; - upperBound = upperBound; - var upperBoundString = (0, _customIndex.getStartIndexStringFromUpperBound)(this.schema, index, upperBound); - var indexName = (0, _memoryIndexes.getMemoryIndexName)(index); - if (!this.internals.byIndex[indexName]) { - throw new Error('index does not exist ' + indexName); - } - var docsWithIndex = this.internals.byIndex[indexName].docsWithIndex; - var indexOfLower = (queryPlan.inclusiveStart ? _binarySearchBounds.boundGE : _binarySearchBounds.boundGT)(docsWithIndex, { - indexString: lowerBoundString - }, _memoryHelper.compareDocsWithIndex); - var indexOfUpper = (queryPlan.inclusiveEnd ? _binarySearchBounds.boundLE : _binarySearchBounds.boundLT)(docsWithIndex, { - indexString: upperBoundString - }, _memoryHelper.compareDocsWithIndex); - var rows = []; - var done = false; - while (!done) { - var currentRow = docsWithIndex[indexOfLower]; - if (!currentRow || indexOfLower > indexOfUpper) { - break; - } - var currentDoc = currentRow.doc; - if (!queryMatcher || queryMatcher(currentDoc)) { - rows.push(currentDoc); - } - if (rows.length >= skipPlusLimit && !mustManuallyResort) { - done = true; - } - indexOfLower++; - } - if (mustManuallyResort) { - var sortComparator = (0, _rxQueryHelper.getSortComparator)(this.schema, preparedQuery.query); - rows = rows.sort(sortComparator); - } - - // apply skip and limit boundaries. - rows = rows.slice(skip, skipPlusLimit); - return Promise.resolve({ - documents: rows - }); - }; - _proto.count = async function count(preparedQuery) { - this.ensurePersistence(); - var result = await this.query(preparedQuery); - return { - count: result.documents.length, - mode: 'fast' - }; - }; - _proto.cleanup = function cleanup(minimumDeletedTime) { - this.ensurePersistence(); - var maxDeletionTime = (0, _index.now)() - minimumDeletedTime; - var index = ['_deleted', '_meta.lwt', this.primaryPath]; - var indexName = (0, _memoryIndexes.getMemoryIndexName)(index); - var docsWithIndex = this.internals.byIndex[indexName].docsWithIndex; - var lowerBoundString = (0, _customIndex.getStartIndexStringFromLowerBound)(this.schema, index, [true, 0, '']); - var indexOfLower = (0, _binarySearchBounds.boundGT)(docsWithIndex, { - indexString: lowerBoundString - }, _memoryHelper.compareDocsWithIndex); - var done = false; - while (!done) { - var currentDoc = docsWithIndex[indexOfLower]; - if (!currentDoc || currentDoc.doc._meta.lwt > maxDeletionTime) { - done = true; - } else { - (0, _memoryHelper.removeDocFromState)(this.primaryPath, this.schema, this.internals, currentDoc.doc); - indexOfLower++; - } - } - return _index.PROMISE_RESOLVE_TRUE; - }; - _proto.getAttachmentData = function getAttachmentData(documentId, attachmentId, digest) { - this.ensurePersistence(); - (0, _memoryHelper.ensureNotRemoved)(this); - var key = (0, _memoryHelper.attachmentMapKey)(documentId, attachmentId); - var data = this.internals.attachments.get(key); - if (!digest || !data || data.digest !== digest) { - throw new Error('attachment does not exist: ' + key); - } - return Promise.resolve(data.writeData.data); - }; - _proto.changeStream = function changeStream() { - (0, _memoryHelper.ensureNotRemoved)(this); - return this.internals.changes$.asObservable(); - }; - _proto.remove = async function remove() { - if (this.closed) { - throw new Error('closed'); - } - this.ensurePersistence(); - (0, _memoryHelper.ensureNotRemoved)(this); - this.internals.removed = true; - this.storage.collectionStates.delete((0, _memoryHelper.getMemoryCollectionKey)(this.databaseName, this.collectionName, this.schema.version)); - await this.close(); - }; - _proto.close = function close() { - OPEN_MEMORY_INSTANCES.delete(this); - this.ensurePersistence(); - if (this.closed) { - return _index.PROMISE_RESOLVE_VOID; - } - this.closed = true; - this.internals.refCount = this.internals.refCount - 1; - return _index.PROMISE_RESOLVE_VOID; - }; - _proto.conflictResultionTasks = function conflictResultionTasks() { - return this.internals.conflictResultionTasks$.asObservable(); - }; - _proto.resolveConflictResultionTask = function resolveConflictResultionTask(_taskSolution) { - return _index.PROMISE_RESOLVE_VOID; - }; - return RxStorageInstanceMemory; -}(); -function createMemoryStorageInstance(storage, params, settings) { - var collectionKey = (0, _memoryHelper.getMemoryCollectionKey)(params.databaseName, params.collectionName, params.schema.version); - var internals = storage.collectionStates.get(collectionKey); - if (!internals) { - internals = { - id: (0, _index.randomCouchString)(5), - schema: params.schema, - removed: false, - refCount: 1, - documents: new Map(), - attachments: params.schema.attachments ? new Map() : undefined, - byIndex: {}, - conflictResultionTasks$: new _rxjs.Subject(), - changes$: new _rxjs.Subject() - }; - (0, _memoryIndexes.addIndexesToInternalsState)(internals, params.schema); - storage.collectionStates.set(collectionKey, internals); - } else { - /** - * Ensure that the storage was not already - * created with a different schema. - * This is very important because if this check - * does not exist here, we have hard-to-debug problems - * downstream. - */ - if (params.devMode && !(0, _index.deepEqual)(internals.schema, params.schema)) { - throw new Error('storage was already created with a different schema'); - } - internals.refCount = internals.refCount + 1; - } - var instance = new RxStorageInstanceMemory(storage, params.databaseName, params.collectionName, params.schema, internals, params.options, settings, params.devMode); - return Promise.resolve(instance); -} -//# sourceMappingURL=rx-storage-instance-memory.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/storage-memory/rx-storage-instance-memory.js.map b/dist/cjs/plugins/storage-memory/rx-storage-instance-memory.js.map deleted file mode 100644 index d57dcc49dcb..00000000000 --- a/dist/cjs/plugins/storage-memory/rx-storage-instance-memory.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-storage-instance-memory.js","names":["_rxjs","require","_customIndex","_rxSchemaHelper","_rxStorageHelper","_index","_binarySearchBounds","_memoryHelper","_memoryIndexes","_rxQueryHelper","OPEN_MEMORY_INSTANCES","exports","Set","RxStorageInstanceMemory","storage","databaseName","collectionName","schema","internals","options","settings","devMode","closed","categorizedByWriteInput","WeakMap","add","primaryPath","getPrimaryFieldOfPrimaryKey","primaryKey","_proto","prototype","bulkWrite","documentWrites","context","ensurePersistence","ensureNotRemoved","documentsById","documents","categorized","categorizeBulkWriteRows","error","errors","success","Array","bulkInsertDocs","length","awaitMe","Promise","resolve","i","writeRow","doc","document","bulkUpdateDocs","push","set","ensurePersistenceTask","ensurePersistenceIdlePromise","requestIdlePromiseNoQueue","then","undefined","eventBulk","events","lastState","ensureNotFalsy","newestRow","checkpoint","id","lwt","_meta","endTime","now","changes$","next","stateByIndex","Object","values","byIndex","docId","putWriteRowToState","get","attachments","attachmentsMap","attachmentsAdd","forEach","attachment","attachmentMapKey","documentId","attachmentId","writeData","attachmentData","digest","attachmentsUpdate","attachmentsRemove","delete","findDocumentsById","docIds","withDeleted","ret","size","docInDb","_deleted","query","preparedQuery","queryPlan","skip","limit","Infinity","skipPlusLimit","queryMatcher","selectorSatisfiedByIndex","getQueryMatcher","queryPlanFields","index","mustManuallyResort","sortSatisfiedByIndex","lowerBound","startKeys","lowerBoundString","getStartIndexStringFromLowerBound","upperBound","endKeys","upperBoundString","getStartIndexStringFromUpperBound","indexName","getMemoryIndexName","Error","docsWithIndex","indexOfLower","inclusiveStart","boundGE","boundGT","indexString","compareDocsWithIndex","indexOfUpper","inclusiveEnd","boundLE","boundLT","rows","done","currentRow","currentDoc","sortComparator","getSortComparator","sort","slice","count","result","mode","cleanup","minimumDeletedTime","maxDeletionTime","removeDocFromState","PROMISE_RESOLVE_TRUE","getAttachmentData","key","data","changeStream","asObservable","remove","removed","collectionStates","getMemoryCollectionKey","version","close","PROMISE_RESOLVE_VOID","refCount","conflictResultionTasks","conflictResultionTasks$","resolveConflictResultionTask","_taskSolution","createMemoryStorageInstance","params","collectionKey","randomCouchString","Map","Subject","addIndexesToInternalsState","deepEqual","instance"],"sources":["../../../../src/plugins/storage-memory/rx-storage-instance-memory.ts"],"sourcesContent":["import {\n Observable,\n Subject\n} from 'rxjs';\nimport {\n getStartIndexStringFromLowerBound,\n getStartIndexStringFromUpperBound\n} from '../../custom-index.ts';\nimport { getPrimaryFieldOfPrimaryKey } from '../../rx-schema-helper.ts';\nimport {\n categorizeBulkWriteRows\n} from '../../rx-storage-helper.ts';\nimport type {\n BulkWriteRow,\n CategorizeBulkWriteRowsOutput,\n EventBulk,\n PreparedQuery,\n QueryMatcher,\n RxConflictResultionTask,\n RxConflictResultionTaskSolution,\n RxDocumentData,\n RxJsonSchema,\n RxStorageBulkWriteResponse,\n RxStorageChangeEvent,\n RxStorageCountResult,\n RxStorageDefaultCheckpoint,\n RxStorageInstance,\n RxStorageInstanceCreationParams,\n RxStorageQueryResult,\n StringKeys\n} from '../../types/index.d.ts';\nimport {\n deepEqual,\n ensureNotFalsy,\n now,\n PROMISE_RESOLVE_TRUE,\n PROMISE_RESOLVE_VOID,\n promiseWait,\n randomCouchString,\n requestIdlePromiseNoQueue\n} from '../../plugins/utils/index.ts';\nimport {\n boundGE,\n boundGT,\n boundLE,\n boundLT\n} from './binary-search-bounds.ts';\nimport {\n attachmentMapKey,\n compareDocsWithIndex,\n ensureNotRemoved,\n getMemoryCollectionKey,\n putWriteRowToState,\n removeDocFromState\n} from './memory-helper.ts';\nimport {\n addIndexesToInternalsState,\n getMemoryIndexName\n} from './memory-indexes.ts';\nimport type {\n MemoryStorageInternals,\n RxStorageMemory,\n RxStorageMemoryInstanceCreationOptions,\n RxStorageMemorySettings\n} from './memory-types.ts';\nimport { getQueryMatcher, getSortComparator } from '../../rx-query-helper.ts';\n\n/**\n * Used in tests to ensure everything\n * is closed correctly\n */\nexport const OPEN_MEMORY_INSTANCES = new Set>();\n\nexport class RxStorageInstanceMemory implements RxStorageInstance<\n RxDocType,\n MemoryStorageInternals,\n RxStorageMemoryInstanceCreationOptions,\n RxStorageDefaultCheckpoint\n> {\n\n public readonly primaryPath: StringKeys>;\n public closed = false;\n\n /**\n * Used by some plugins and storage wrappers\n * to find out details about the internals of a write operation.\n * For example if you want to know which documents really have been replaced\n * or newly inserted.\n */\n public categorizedByWriteInput = new WeakMap[], CategorizeBulkWriteRowsOutput>();\n\n constructor(\n public readonly storage: RxStorageMemory,\n public readonly databaseName: string,\n public readonly collectionName: string,\n public readonly schema: Readonly>>,\n public readonly internals: MemoryStorageInternals,\n public readonly options: Readonly,\n public readonly settings: RxStorageMemorySettings,\n public readonly devMode: boolean\n ) {\n OPEN_MEMORY_INSTANCES.add(this);\n this.primaryPath = getPrimaryFieldOfPrimaryKey(this.schema.primaryKey);\n }\n\n bulkWrite(\n documentWrites: BulkWriteRow[],\n context: string\n ): Promise> {\n this.ensurePersistence();\n ensureNotRemoved(this);\n const internals = this.internals;\n const documentsById = this.internals.documents;\n const primaryPath = this.primaryPath;\n\n\n const categorized = categorizeBulkWriteRows(\n this,\n primaryPath as any,\n documentsById,\n documentWrites,\n context\n );\n const error = categorized.errors;\n let success: RxDocumentData[] = new Array(categorized.bulkInsertDocs.length);\n /**\n * @performance\n * We have to return a Promise but we do not want to wait\n * one tick, so we directly create the promise\n * which makes it likely to be already resolved later.\n */\n const awaitMe = Promise.resolve({ success, error });\n\n const bulkInsertDocs = categorized.bulkInsertDocs;\n for (let i = 0; i < bulkInsertDocs.length; ++i) {\n const writeRow = bulkInsertDocs[i];\n const doc = writeRow.document;\n success[i] = doc;\n }\n const bulkUpdateDocs = categorized.bulkUpdateDocs;\n for (let i = 0; i < bulkUpdateDocs.length; ++i) {\n const writeRow = bulkUpdateDocs[i];\n const doc = writeRow.document;\n success.push(doc);\n }\n\n this.categorizedByWriteInput.set(documentWrites, categorized);\n this.internals.ensurePersistenceTask = categorized;\n\n if (!this.internals.ensurePersistenceIdlePromise) {\n this.internals.ensurePersistenceIdlePromise = requestIdlePromiseNoQueue().then(() => {\n this.internals.ensurePersistenceIdlePromise = undefined;\n this.ensurePersistence();\n });\n }\n\n /**\n * Important: The events must be emitted AFTER the persistence\n * task has been added.\n */\n if (categorized.eventBulk.events.length > 0) {\n const lastState = ensureNotFalsy(categorized.newestRow).document;\n categorized.eventBulk.checkpoint = {\n id: lastState[primaryPath],\n lwt: lastState._meta.lwt\n };\n categorized.eventBulk.endTime = now();\n internals.changes$.next(categorized.eventBulk);\n }\n return awaitMe;\n }\n\n /**\n * Instead of directly inserting the documents into all indexes,\n * we do it lazy in the background. This gives the application time\n * to directly work with the write-result and to do stuff like rendering DOM\n * notes and processing RxDB queries.\n * Then in some later time, or just before the next read/write,\n * it is ensured that the indexes have been written.\n */\n public ensurePersistence() {\n if (\n !this.internals.ensurePersistenceTask\n ) {\n return;\n }\n const internals = this.internals;\n const documentsById = this.internals.documents;\n const primaryPath = this.primaryPath;\n\n const categorized = this.internals.ensurePersistenceTask;\n this.internals.ensurePersistenceTask = undefined;\n\n /**\n * Do inserts/updates\n */\n const stateByIndex = Object.values(this.internals.byIndex);\n\n const bulkInsertDocs = categorized.bulkInsertDocs;\n for (let i = 0; i < bulkInsertDocs.length; ++i) {\n const writeRow = bulkInsertDocs[i];\n const doc = writeRow.document;\n const docId = doc[primaryPath];\n putWriteRowToState(\n docId as any,\n internals,\n stateByIndex,\n writeRow,\n undefined\n );\n }\n\n const bulkUpdateDocs = categorized.bulkUpdateDocs;\n for (let i = 0; i < bulkUpdateDocs.length; ++i) {\n const writeRow = bulkUpdateDocs[i];\n const doc = writeRow.document;\n const docId = doc[primaryPath];\n putWriteRowToState(\n docId as any,\n internals,\n stateByIndex,\n writeRow,\n documentsById.get(docId as any)\n );\n }\n\n /**\n * Handle attachments\n */\n if (this.schema.attachments) {\n const attachmentsMap = internals.attachments;\n categorized.attachmentsAdd.forEach(attachment => {\n attachmentsMap.set(\n attachmentMapKey(attachment.documentId, attachment.attachmentId),\n {\n writeData: attachment.attachmentData,\n digest: attachment.digest\n }\n );\n });\n if (this.schema.attachments) {\n categorized.attachmentsUpdate.forEach(attachment => {\n attachmentsMap.set(\n attachmentMapKey(attachment.documentId, attachment.attachmentId),\n {\n writeData: attachment.attachmentData,\n digest: attachment.digest\n }\n );\n });\n categorized.attachmentsRemove.forEach(attachment => {\n attachmentsMap.delete(\n attachmentMapKey(attachment.documentId, attachment.attachmentId)\n );\n });\n }\n }\n }\n\n findDocumentsById(\n docIds: string[],\n withDeleted: boolean\n ): Promise[]> {\n this.ensurePersistence();\n const documentsById = this.internals.documents;\n const ret: RxDocumentData[] = [];\n if (documentsById.size === 0) {\n return Promise.resolve(ret);\n }\n for (let i = 0; i < docIds.length; ++i) {\n const docId = docIds[i];\n const docInDb = documentsById.get(docId);\n if (\n docInDb &&\n (\n !docInDb._deleted ||\n withDeleted\n )\n ) {\n ret.push(docInDb);\n }\n }\n return Promise.resolve(ret);\n }\n\n query(\n preparedQuery: PreparedQuery\n ): Promise> {\n this.ensurePersistence();\n\n const queryPlan = preparedQuery.queryPlan;\n const query = preparedQuery.query;\n\n const skip = query.skip ? query.skip : 0;\n const limit = query.limit ? query.limit : Infinity;\n const skipPlusLimit = skip + limit;\n\n let queryMatcher: QueryMatcher> | false = false;\n if (!queryPlan.selectorSatisfiedByIndex) {\n queryMatcher = getQueryMatcher(\n this.schema,\n preparedQuery.query\n );\n }\n\n const queryPlanFields: string[] = queryPlan.index;\n const mustManuallyResort = !queryPlan.sortSatisfiedByIndex;\n const index: string[] | undefined = queryPlanFields;\n const lowerBound: any[] = queryPlan.startKeys;\n const lowerBoundString = getStartIndexStringFromLowerBound(\n this.schema,\n index,\n lowerBound\n );\n\n let upperBound: any[] = queryPlan.endKeys;\n upperBound = upperBound;\n const upperBoundString = getStartIndexStringFromUpperBound(\n this.schema,\n index,\n upperBound\n );\n const indexName = getMemoryIndexName(index);\n\n if (!this.internals.byIndex[indexName]) {\n throw new Error('index does not exist ' + indexName);\n }\n const docsWithIndex = this.internals.byIndex[indexName].docsWithIndex;\n\n\n\n let indexOfLower = (queryPlan.inclusiveStart ? boundGE : boundGT)(\n docsWithIndex,\n {\n indexString: lowerBoundString\n } as any,\n compareDocsWithIndex\n );\n\n const indexOfUpper = (queryPlan.inclusiveEnd ? boundLE : boundLT)(\n docsWithIndex,\n {\n indexString: upperBoundString\n } as any,\n compareDocsWithIndex\n );\n\n let rows: RxDocumentData[] = [];\n let done = false;\n while (!done) {\n const currentRow = docsWithIndex[indexOfLower];\n if (\n !currentRow ||\n indexOfLower > indexOfUpper\n ) {\n break;\n }\n const currentDoc = currentRow.doc;\n\n if (!queryMatcher || queryMatcher(currentDoc)) {\n rows.push(currentDoc);\n }\n\n if (\n (rows.length >= skipPlusLimit && !mustManuallyResort)\n ) {\n done = true;\n }\n\n indexOfLower++;\n }\n\n if (mustManuallyResort) {\n const sortComparator = getSortComparator(this.schema, preparedQuery.query);\n rows = rows.sort(sortComparator);\n }\n\n // apply skip and limit boundaries.\n rows = rows.slice(skip, skipPlusLimit);\n return Promise.resolve({\n documents: rows\n });\n }\n\n async count(\n preparedQuery: PreparedQuery\n ): Promise {\n this.ensurePersistence();\n const result = await this.query(preparedQuery);\n return {\n count: result.documents.length,\n mode: 'fast'\n };\n }\n\n cleanup(minimumDeletedTime: number): Promise {\n this.ensurePersistence();\n const maxDeletionTime = now() - minimumDeletedTime;\n const index = ['_deleted', '_meta.lwt', this.primaryPath as any];\n const indexName = getMemoryIndexName(index);\n const docsWithIndex = this.internals.byIndex[indexName].docsWithIndex;\n\n const lowerBoundString = getStartIndexStringFromLowerBound(\n this.schema,\n index,\n [\n true,\n 0,\n ''\n ]\n );\n\n let indexOfLower = boundGT(\n docsWithIndex,\n {\n indexString: lowerBoundString\n } as any,\n compareDocsWithIndex\n );\n\n let done = false;\n while (!done) {\n const currentDoc = docsWithIndex[indexOfLower];\n if (!currentDoc || currentDoc.doc._meta.lwt > maxDeletionTime) {\n done = true;\n } else {\n removeDocFromState(\n this.primaryPath as any,\n this.schema,\n this.internals,\n currentDoc.doc\n );\n indexOfLower++;\n }\n }\n return PROMISE_RESOLVE_TRUE;\n }\n\n getAttachmentData(\n documentId: string,\n attachmentId: string,\n digest: string\n ): Promise {\n this.ensurePersistence();\n ensureNotRemoved(this);\n const key = attachmentMapKey(documentId, attachmentId);\n const data = this.internals.attachments.get(key);\n\n if (\n !digest ||\n !data ||\n data.digest !== digest\n ) {\n throw new Error('attachment does not exist: ' + key);\n }\n return Promise.resolve(data.writeData.data);\n }\n\n changeStream(): Observable>, RxStorageDefaultCheckpoint>> {\n ensureNotRemoved(this);\n return this.internals.changes$.asObservable();\n }\n\n async remove(): Promise {\n if (this.closed) {\n throw new Error('closed');\n }\n this.ensurePersistence();\n ensureNotRemoved(this);\n\n this.internals.removed = true;\n this.storage.collectionStates.delete(\n getMemoryCollectionKey(\n this.databaseName,\n this.collectionName,\n this.schema.version\n )\n );\n await this.close();\n }\n\n close(): Promise {\n OPEN_MEMORY_INSTANCES.delete(this);\n\n this.ensurePersistence();\n if (this.closed) {\n return PROMISE_RESOLVE_VOID;\n }\n this.closed = true;\n\n this.internals.refCount = this.internals.refCount - 1;\n return PROMISE_RESOLVE_VOID;\n }\n\n conflictResultionTasks(): Observable> {\n return this.internals.conflictResultionTasks$.asObservable();\n }\n resolveConflictResultionTask(_taskSolution: RxConflictResultionTaskSolution): Promise {\n return PROMISE_RESOLVE_VOID;\n }\n}\n\nexport function createMemoryStorageInstance(\n storage: RxStorageMemory,\n params: RxStorageInstanceCreationParams,\n settings: RxStorageMemorySettings\n): Promise> {\n const collectionKey = getMemoryCollectionKey(\n params.databaseName,\n params.collectionName,\n params.schema.version\n );\n\n let internals = storage.collectionStates.get(collectionKey);\n if (!internals) {\n internals = {\n id: randomCouchString(5),\n schema: params.schema,\n removed: false,\n refCount: 1,\n documents: new Map(),\n attachments: params.schema.attachments ? new Map() : undefined as any,\n byIndex: {},\n conflictResultionTasks$: new Subject(),\n changes$: new Subject()\n };\n addIndexesToInternalsState(internals, params.schema);\n storage.collectionStates.set(collectionKey, internals);\n } else {\n /**\n * Ensure that the storage was not already\n * created with a different schema.\n * This is very important because if this check\n * does not exist here, we have hard-to-debug problems\n * downstream.\n */\n if (\n params.devMode &&\n !deepEqual(internals.schema, params.schema)\n ) {\n throw new Error('storage was already created with a different schema');\n }\n internals.refCount = internals.refCount + 1;\n }\n\n const instance = new RxStorageInstanceMemory(\n storage,\n params.databaseName,\n params.collectionName,\n params.schema,\n internals,\n params.options,\n settings,\n params.devMode\n );\n return Promise.resolve(instance);\n}\n"],"mappings":";;;;;;;AAAA,IAAAA,KAAA,GAAAC,OAAA;AAIA,IAAAC,YAAA,GAAAD,OAAA;AAIA,IAAAE,eAAA,GAAAF,OAAA;AACA,IAAAG,gBAAA,GAAAH,OAAA;AAsBA,IAAAI,MAAA,GAAAJ,OAAA;AAUA,IAAAK,mBAAA,GAAAL,OAAA;AAMA,IAAAM,aAAA,GAAAN,OAAA;AAQA,IAAAO,cAAA,GAAAP,OAAA;AAUA,IAAAQ,cAAA,GAAAR,OAAA;AAEA;AACA;AACA;AACA;AACO,IAAMS,qBAAqB,GAAAC,OAAA,CAAAD,qBAAA,GAAG,IAAIE,GAAG,CAA+B,CAAC;AAAC,IAEhEC,uBAAuB,GAAAF,OAAA,CAAAE,uBAAA;EAUhC;AACJ;AACA;AACA;AACA;AACA;;EAGI,SAAAA,wBACoBC,OAAwB,EACxBC,YAAoB,EACpBC,cAAsB,EACtBC,MAAyD,EACzDC,SAA4C,EAC5CC,OAAyD,EACzDC,QAAiC,EACjCC,OAAgB,EAClC;IAAA,KAnBKC,MAAM,GAAG,KAAK;IAAA,KAQdC,uBAAuB,GAAG,IAAIC,OAAO,CAAsE,CAAC;IAAA,KAG/FV,OAAwB,GAAxBA,OAAwB;IAAA,KACxBC,YAAoB,GAApBA,YAAoB;IAAA,KACpBC,cAAsB,GAAtBA,cAAsB;IAAA,KACtBC,MAAyD,GAAzDA,MAAyD;IAAA,KACzDC,SAA4C,GAA5CA,SAA4C;IAAA,KAC5CC,OAAyD,GAAzDA,OAAyD;IAAA,KACzDC,QAAiC,GAAjCA,QAAiC;IAAA,KACjCC,OAAgB,GAAhBA,OAAgB;IAEhCX,qBAAqB,CAACe,GAAG,CAAC,IAAI,CAAC;IAC/B,IAAI,CAACC,WAAW,GAAG,IAAAC,2CAA2B,EAAC,IAAI,CAACV,MAAM,CAACW,UAAU,CAAC;EAC1E;EAAC,IAAAC,MAAA,GAAAhB,uBAAA,CAAAiB,SAAA;EAAAD,MAAA,CAEDE,SAAS,GAAT,SAAAA,UACIC,cAAyC,EACzCC,OAAe,EAC+B;IAC9C,IAAI,CAACC,iBAAiB,CAAC,CAAC;IACxB,IAAAC,8BAAgB,EAAC,IAAI,CAAC;IACtB,IAAMjB,SAAS,GAAG,IAAI,CAACA,SAAS;IAChC,IAAMkB,aAAa,GAAG,IAAI,CAAClB,SAAS,CAACmB,SAAS;IAC9C,IAAMX,WAAW,GAAG,IAAI,CAACA,WAAW;IAGpC,IAAMY,WAAW,GAAG,IAAAC,wCAAuB,EACvC,IAAI,EACJb,WAAW,EACXU,aAAa,EACbJ,cAAc,EACdC,OACJ,CAAC;IACD,IAAMO,KAAK,GAAGF,WAAW,CAACG,MAAM;IAChC,IAAIC,OAAoC,GAAG,IAAIC,KAAK,CAACL,WAAW,CAACM,cAAc,CAACC,MAAM,CAAC;IACvF;AACR;AACA;AACA;AACA;AACA;IACQ,IAAMC,OAAO,GAAGC,OAAO,CAACC,OAAO,CAAC;MAAEN,OAAO;MAAEF;IAAM,CAAC,CAAC;IAEnD,IAAMI,cAAc,GAAGN,WAAW,CAACM,cAAc;IACjD,KAAK,IAAIK,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGL,cAAc,CAACC,MAAM,EAAE,EAAEI,CAAC,EAAE;MAC5C,IAAMC,QAAQ,GAAGN,cAAc,CAACK,CAAC,CAAC;MAClC,IAAME,GAAG,GAAGD,QAAQ,CAACE,QAAQ;MAC7BV,OAAO,CAACO,CAAC,CAAC,GAAGE,GAAG;IACpB;IACA,IAAME,cAAc,GAAGf,WAAW,CAACe,cAAc;IACjD,KAAK,IAAIJ,EAAC,GAAG,CAAC,EAAEA,EAAC,GAAGI,cAAc,CAACR,MAAM,EAAE,EAAEI,EAAC,EAAE;MAC5C,IAAMC,SAAQ,GAAGG,cAAc,CAACJ,EAAC,CAAC;MAClC,IAAME,IAAG,GAAGD,SAAQ,CAACE,QAAQ;MAC7BV,OAAO,CAACY,IAAI,CAACH,IAAG,CAAC;IACrB;IAEA,IAAI,CAAC5B,uBAAuB,CAACgC,GAAG,CAACvB,cAAc,EAAEM,WAAW,CAAC;IAC7D,IAAI,CAACpB,SAAS,CAACsC,qBAAqB,GAAGlB,WAAW;IAElD,IAAI,CAAC,IAAI,CAACpB,SAAS,CAACuC,4BAA4B,EAAE;MAC9C,IAAI,CAACvC,SAAS,CAACuC,4BAA4B,GAAG,IAAAC,gCAAyB,EAAC,CAAC,CAACC,IAAI,CAAC,MAAM;QACjF,IAAI,CAACzC,SAAS,CAACuC,4BAA4B,GAAGG,SAAS;QACvD,IAAI,CAAC1B,iBAAiB,CAAC,CAAC;MAC5B,CAAC,CAAC;IACN;;IAEA;AACR;AACA;AACA;IACQ,IAAII,WAAW,CAACuB,SAAS,CAACC,MAAM,CAACjB,MAAM,GAAG,CAAC,EAAE;MACzC,IAAMkB,SAAS,GAAG,IAAAC,qBAAc,EAAC1B,WAAW,CAAC2B,SAAS,CAAC,CAACb,QAAQ;MAChEd,WAAW,CAACuB,SAAS,CAACK,UAAU,GAAG;QAC/BC,EAAE,EAAEJ,SAAS,CAACrC,WAAW,CAAC;QAC1B0C,GAAG,EAAEL,SAAS,CAACM,KAAK,CAACD;MACzB,CAAC;MACD9B,WAAW,CAACuB,SAAS,CAACS,OAAO,GAAG,IAAAC,UAAG,EAAC,CAAC;MACrCrD,SAAS,CAACsD,QAAQ,CAACC,IAAI,CAACnC,WAAW,CAACuB,SAAS,CAAC;IAClD;IACA,OAAOf,OAAO;EAClB;;EAEA;AACJ;AACA;AACA;AACA;AACA;AACA;AACA,KAPI;EAAAjB,MAAA,CAQOK,iBAAiB,GAAxB,SAAAA,kBAAA,EAA2B;IACvB,IACI,CAAC,IAAI,CAAChB,SAAS,CAACsC,qBAAqB,EACvC;MACE;IACJ;IACA,IAAMtC,SAAS,GAAG,IAAI,CAACA,SAAS;IAChC,IAAMkB,aAAa,GAAG,IAAI,CAAClB,SAAS,CAACmB,SAAS;IAC9C,IAAMX,WAAW,GAAG,IAAI,CAACA,WAAW;IAEpC,IAAMY,WAAW,GAAG,IAAI,CAACpB,SAAS,CAACsC,qBAAqB;IACxD,IAAI,CAACtC,SAAS,CAACsC,qBAAqB,GAAGI,SAAS;;IAEhD;AACR;AACA;IACQ,IAAMc,YAAY,GAAGC,MAAM,CAACC,MAAM,CAAC,IAAI,CAAC1D,SAAS,CAAC2D,OAAO,CAAC;IAE1D,IAAMjC,cAAc,GAAGN,WAAW,CAACM,cAAc;IACjD,KAAK,IAAIK,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGL,cAAc,CAACC,MAAM,EAAE,EAAEI,CAAC,EAAE;MAC5C,IAAMC,QAAQ,GAAGN,cAAc,CAACK,CAAC,CAAC;MAClC,IAAME,GAAG,GAAGD,QAAQ,CAACE,QAAQ;MAC7B,IAAM0B,KAAK,GAAG3B,GAAG,CAACzB,WAAW,CAAC;MAC9B,IAAAqD,gCAAkB,EACdD,KAAK,EACL5D,SAAS,EACTwD,YAAY,EACZxB,QAAQ,EACRU,SACJ,CAAC;IACL;IAEA,IAAMP,cAAc,GAAGf,WAAW,CAACe,cAAc;IACjD,KAAK,IAAIJ,GAAC,GAAG,CAAC,EAAEA,GAAC,GAAGI,cAAc,CAACR,MAAM,EAAE,EAAEI,GAAC,EAAE;MAC5C,IAAMC,UAAQ,GAAGG,cAAc,CAACJ,GAAC,CAAC;MAClC,IAAME,KAAG,GAAGD,UAAQ,CAACE,QAAQ;MAC7B,IAAM0B,MAAK,GAAG3B,KAAG,CAACzB,WAAW,CAAC;MAC9B,IAAAqD,gCAAkB,EACdD,MAAK,EACL5D,SAAS,EACTwD,YAAY,EACZxB,UAAQ,EACRd,aAAa,CAAC4C,GAAG,CAACF,MAAY,CAClC,CAAC;IACL;;IAEA;AACR;AACA;IACQ,IAAI,IAAI,CAAC7D,MAAM,CAACgE,WAAW,EAAE;MACzB,IAAMC,cAAc,GAAGhE,SAAS,CAAC+D,WAAW;MAC5C3C,WAAW,CAAC6C,cAAc,CAACC,OAAO,CAACC,UAAU,IAAI;QAC7CH,cAAc,CAAC3B,GAAG,CACd,IAAA+B,8BAAgB,EAACD,UAAU,CAACE,UAAU,EAAEF,UAAU,CAACG,YAAY,CAAC,EAChE;UACIC,SAAS,EAAEJ,UAAU,CAACK,cAAc;UACpCC,MAAM,EAAEN,UAAU,CAACM;QACvB,CACJ,CAAC;MACL,CAAC,CAAC;MACF,IAAI,IAAI,CAAC1E,MAAM,CAACgE,WAAW,EAAE;QACzB3C,WAAW,CAACsD,iBAAiB,CAACR,OAAO,CAACC,UAAU,IAAI;UAChDH,cAAc,CAAC3B,GAAG,CACd,IAAA+B,8BAAgB,EAACD,UAAU,CAACE,UAAU,EAAEF,UAAU,CAACG,YAAY,CAAC,EAChE;YACIC,SAAS,EAAEJ,UAAU,CAACK,cAAc;YACpCC,MAAM,EAAEN,UAAU,CAACM;UACvB,CACJ,CAAC;QACL,CAAC,CAAC;QACFrD,WAAW,CAACuD,iBAAiB,CAACT,OAAO,CAACC,UAAU,IAAI;UAChDH,cAAc,CAACY,MAAM,CACjB,IAAAR,8BAAgB,EAACD,UAAU,CAACE,UAAU,EAAEF,UAAU,CAACG,YAAY,CACnE,CAAC;QACL,CAAC,CAAC;MACN;IACJ;EACJ,CAAC;EAAA3D,MAAA,CAEDkE,iBAAiB,GAAjB,SAAAA,kBACIC,MAAgB,EAChBC,WAAoB,EACgB;IACpC,IAAI,CAAC/D,iBAAiB,CAAC,CAAC;IACxB,IAAME,aAAa,GAAG,IAAI,CAAClB,SAAS,CAACmB,SAAS;IAC9C,IAAM6D,GAAgC,GAAG,EAAE;IAC3C,IAAI9D,aAAa,CAAC+D,IAAI,KAAK,CAAC,EAAE;MAC1B,OAAOpD,OAAO,CAACC,OAAO,CAACkD,GAAG,CAAC;IAC/B;IACA,KAAK,IAAIjD,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAG+C,MAAM,CAACnD,MAAM,EAAE,EAAEI,CAAC,EAAE;MACpC,IAAM6B,KAAK,GAAGkB,MAAM,CAAC/C,CAAC,CAAC;MACvB,IAAMmD,OAAO,GAAGhE,aAAa,CAAC4C,GAAG,CAACF,KAAK,CAAC;MACxC,IACIsB,OAAO,KAEH,CAACA,OAAO,CAACC,QAAQ,IACjBJ,WAAW,CACd,EACH;QACEC,GAAG,CAAC5C,IAAI,CAAC8C,OAAO,CAAC;MACrB;IACJ;IACA,OAAOrD,OAAO,CAACC,OAAO,CAACkD,GAAG,CAAC;EAC/B,CAAC;EAAArE,MAAA,CAEDyE,KAAK,GAAL,SAAAA,MACIC,aAAuC,EACC;IACxC,IAAI,CAACrE,iBAAiB,CAAC,CAAC;IAExB,IAAMsE,SAAS,GAAGD,aAAa,CAACC,SAAS;IACzC,IAAMF,KAAK,GAAGC,aAAa,CAACD,KAAK;IAEjC,IAAMG,IAAI,GAAGH,KAAK,CAACG,IAAI,GAAGH,KAAK,CAACG,IAAI,GAAG,CAAC;IACxC,IAAMC,KAAK,GAAGJ,KAAK,CAACI,KAAK,GAAGJ,KAAK,CAACI,KAAK,GAAGC,QAAQ;IAClD,IAAMC,aAAa,GAAGH,IAAI,GAAGC,KAAK;IAElC,IAAIG,YAA6D,GAAG,KAAK;IACzE,IAAI,CAACL,SAAS,CAACM,wBAAwB,EAAE;MACrCD,YAAY,GAAG,IAAAE,8BAAe,EAC1B,IAAI,CAAC9F,MAAM,EACXsF,aAAa,CAACD,KAClB,CAAC;IACL;IAEA,IAAMU,eAAyB,GAAGR,SAAS,CAACS,KAAK;IACjD,IAAMC,kBAAkB,GAAG,CAACV,SAAS,CAACW,oBAAoB;IAC1D,IAAMF,KAA2B,GAAGD,eAAe;IACnD,IAAMI,UAAiB,GAAGZ,SAAS,CAACa,SAAS;IAC7C,IAAMC,gBAAgB,GAAG,IAAAC,8CAAiC,EACtD,IAAI,CAACtG,MAAM,EACXgG,KAAK,EACLG,UACJ,CAAC;IAED,IAAII,UAAiB,GAAGhB,SAAS,CAACiB,OAAO;IACzCD,UAAU,GAAGA,UAAU;IACvB,IAAME,gBAAgB,GAAG,IAAAC,8CAAiC,EACtD,IAAI,CAAC1G,MAAM,EACXgG,KAAK,EACLO,UACJ,CAAC;IACD,IAAMI,SAAS,GAAG,IAAAC,iCAAkB,EAACZ,KAAK,CAAC;IAE3C,IAAI,CAAC,IAAI,CAAC/F,SAAS,CAAC2D,OAAO,CAAC+C,SAAS,CAAC,EAAE;MACpC,MAAM,IAAIE,KAAK,CAAC,uBAAuB,GAAGF,SAAS,CAAC;IACxD;IACA,IAAMG,aAAa,GAAG,IAAI,CAAC7G,SAAS,CAAC2D,OAAO,CAAC+C,SAAS,CAAC,CAACG,aAAa;IAIrE,IAAIC,YAAY,GAAG,CAACxB,SAAS,CAACyB,cAAc,GAAGC,2BAAO,GAAGC,2BAAO,EAC5DJ,aAAa,EACb;MACIK,WAAW,EAAEd;IACjB,CAAC,EACDe,kCACJ,CAAC;IAED,IAAMC,YAAY,GAAG,CAAC9B,SAAS,CAAC+B,YAAY,GAAGC,2BAAO,GAAGC,2BAAO,EAC5DV,aAAa,EACb;MACIK,WAAW,EAAEV;IACjB,CAAC,EACDW,kCACJ,CAAC;IAED,IAAIK,IAAiC,GAAG,EAAE;IAC1C,IAAIC,IAAI,GAAG,KAAK;IAChB,OAAO,CAACA,IAAI,EAAE;MACV,IAAMC,UAAU,GAAGb,aAAa,CAACC,YAAY,CAAC;MAC9C,IACI,CAACY,UAAU,IACXZ,YAAY,GAAGM,YAAY,EAC7B;QACE;MACJ;MACA,IAAMO,UAAU,GAAGD,UAAU,CAACzF,GAAG;MAEjC,IAAI,CAAC0D,YAAY,IAAIA,YAAY,CAACgC,UAAU,CAAC,EAAE;QAC3CH,IAAI,CAACpF,IAAI,CAACuF,UAAU,CAAC;MACzB;MAEA,IACKH,IAAI,CAAC7F,MAAM,IAAI+D,aAAa,IAAI,CAACM,kBAAkB,EACtD;QACEyB,IAAI,GAAG,IAAI;MACf;MAEAX,YAAY,EAAE;IAClB;IAEA,IAAId,kBAAkB,EAAE;MACpB,IAAM4B,cAAc,GAAG,IAAAC,gCAAiB,EAAC,IAAI,CAAC9H,MAAM,EAAEsF,aAAa,CAACD,KAAK,CAAC;MAC1EoC,IAAI,GAAGA,IAAI,CAACM,IAAI,CAACF,cAAc,CAAC;IACpC;;IAEA;IACAJ,IAAI,GAAGA,IAAI,CAACO,KAAK,CAACxC,IAAI,EAAEG,aAAa,CAAC;IACtC,OAAO7D,OAAO,CAACC,OAAO,CAAC;MACnBX,SAAS,EAAEqG;IACf,CAAC,CAAC;EACN,CAAC;EAAA7G,MAAA,CAEKqH,KAAK,GAAX,eAAAA,MACI3C,aAAuC,EACV;IAC7B,IAAI,CAACrE,iBAAiB,CAAC,CAAC;IACxB,IAAMiH,MAAM,GAAG,MAAM,IAAI,CAAC7C,KAAK,CAACC,aAAa,CAAC;IAC9C,OAAO;MACH2C,KAAK,EAAEC,MAAM,CAAC9G,SAAS,CAACQ,MAAM;MAC9BuG,IAAI,EAAE;IACV,CAAC;EACL,CAAC;EAAAvH,MAAA,CAEDwH,OAAO,GAAP,SAAAA,QAAQC,kBAA0B,EAAoB;IAClD,IAAI,CAACpH,iBAAiB,CAAC,CAAC;IACxB,IAAMqH,eAAe,GAAG,IAAAhF,UAAG,EAAC,CAAC,GAAG+E,kBAAkB;IAClD,IAAMrC,KAAK,GAAG,CAAC,UAAU,EAAE,WAAW,EAAE,IAAI,CAACvF,WAAW,CAAQ;IAChE,IAAMkG,SAAS,GAAG,IAAAC,iCAAkB,EAACZ,KAAK,CAAC;IAC3C,IAAMc,aAAa,GAAG,IAAI,CAAC7G,SAAS,CAAC2D,OAAO,CAAC+C,SAAS,CAAC,CAACG,aAAa;IAErE,IAAMT,gBAAgB,GAAG,IAAAC,8CAAiC,EACtD,IAAI,CAACtG,MAAM,EACXgG,KAAK,EACL,CACI,IAAI,EACJ,CAAC,EACD,EAAE,CAEV,CAAC;IAED,IAAIe,YAAY,GAAG,IAAAG,2BAAO,EACtBJ,aAAa,EACb;MACIK,WAAW,EAAEd;IACjB,CAAC,EACDe,kCACJ,CAAC;IAED,IAAIM,IAAI,GAAG,KAAK;IAChB,OAAO,CAACA,IAAI,EAAE;MACV,IAAME,UAAU,GAAGd,aAAa,CAACC,YAAY,CAAC;MAC9C,IAAI,CAACa,UAAU,IAAIA,UAAU,CAAC1F,GAAG,CAACkB,KAAK,CAACD,GAAG,GAAGmF,eAAe,EAAE;QAC3DZ,IAAI,GAAG,IAAI;MACf,CAAC,MAAM;QACH,IAAAa,gCAAkB,EACd,IAAI,CAAC9H,WAAW,EAChB,IAAI,CAACT,MAAM,EACX,IAAI,CAACC,SAAS,EACd2H,UAAU,CAAC1F,GACf,CAAC;QACD6E,YAAY,EAAE;MAClB;IACJ;IACA,OAAOyB,2BAAoB;EAC/B,CAAC;EAAA5H,MAAA,CAED6H,iBAAiB,GAAjB,SAAAA,kBACInE,UAAkB,EAClBC,YAAoB,EACpBG,MAAc,EACC;IACf,IAAI,CAACzD,iBAAiB,CAAC,CAAC;IACxB,IAAAC,8BAAgB,EAAC,IAAI,CAAC;IACtB,IAAMwH,GAAG,GAAG,IAAArE,8BAAgB,EAACC,UAAU,EAAEC,YAAY,CAAC;IACtD,IAAMoE,IAAI,GAAG,IAAI,CAAC1I,SAAS,CAAC+D,WAAW,CAACD,GAAG,CAAC2E,GAAG,CAAC;IAEhD,IACI,CAAChE,MAAM,IACP,CAACiE,IAAI,IACLA,IAAI,CAACjE,MAAM,KAAKA,MAAM,EACxB;MACE,MAAM,IAAImC,KAAK,CAAC,6BAA6B,GAAG6B,GAAG,CAAC;IACxD;IACA,OAAO5G,OAAO,CAACC,OAAO,CAAC4G,IAAI,CAACnE,SAAS,CAACmE,IAAI,CAAC;EAC/C,CAAC;EAAA/H,MAAA,CAEDgI,YAAY,GAAZ,SAAAA,aAAA,EAAmH;IAC/G,IAAA1H,8BAAgB,EAAC,IAAI,CAAC;IACtB,OAAO,IAAI,CAACjB,SAAS,CAACsD,QAAQ,CAACsF,YAAY,CAAC,CAAC;EACjD,CAAC;EAAAjI,MAAA,CAEKkI,MAAM,GAAZ,eAAAA,OAAA,EAA8B;IAC1B,IAAI,IAAI,CAACzI,MAAM,EAAE;MACb,MAAM,IAAIwG,KAAK,CAAC,QAAQ,CAAC;IAC7B;IACA,IAAI,CAAC5F,iBAAiB,CAAC,CAAC;IACxB,IAAAC,8BAAgB,EAAC,IAAI,CAAC;IAEtB,IAAI,CAACjB,SAAS,CAAC8I,OAAO,GAAG,IAAI;IAC7B,IAAI,CAAClJ,OAAO,CAACmJ,gBAAgB,CAACnE,MAAM,CAChC,IAAAoE,oCAAsB,EAClB,IAAI,CAACnJ,YAAY,EACjB,IAAI,CAACC,cAAc,EACnB,IAAI,CAACC,MAAM,CAACkJ,OAChB,CACJ,CAAC;IACD,MAAM,IAAI,CAACC,KAAK,CAAC,CAAC;EACtB,CAAC;EAAAvI,MAAA,CAEDuI,KAAK,GAAL,SAAAA,MAAA,EAAuB;IACnB1J,qBAAqB,CAACoF,MAAM,CAAC,IAAI,CAAC;IAElC,IAAI,CAAC5D,iBAAiB,CAAC,CAAC;IACxB,IAAI,IAAI,CAACZ,MAAM,EAAE;MACb,OAAO+I,2BAAoB;IAC/B;IACA,IAAI,CAAC/I,MAAM,GAAG,IAAI;IAElB,IAAI,CAACJ,SAAS,CAACoJ,QAAQ,GAAG,IAAI,CAACpJ,SAAS,CAACoJ,QAAQ,GAAG,CAAC;IACrD,OAAOD,2BAAoB;EAC/B,CAAC;EAAAxI,MAAA,CAED0I,sBAAsB,GAAtB,SAAAA,uBAAA,EAAyE;IACrE,OAAO,IAAI,CAACrJ,SAAS,CAACsJ,uBAAuB,CAACV,YAAY,CAAC,CAAC;EAChE,CAAC;EAAAjI,MAAA,CACD4I,4BAA4B,GAA5B,SAAAA,6BAA6BC,aAAyD,EAAiB;IACnG,OAAOL,2BAAoB;EAC/B,CAAC;EAAA,OAAAxJ,uBAAA;AAAA;AAGE,SAAS8J,2BAA2BA,CACvC7J,OAAwB,EACxB8J,MAA0F,EAC1FxJ,QAAiC,EACU;EAC3C,IAAMyJ,aAAa,GAAG,IAAAX,oCAAsB,EACxCU,MAAM,CAAC7J,YAAY,EACnB6J,MAAM,CAAC5J,cAAc,EACrB4J,MAAM,CAAC3J,MAAM,CAACkJ,OAClB,CAAC;EAED,IAAIjJ,SAAS,GAAGJ,OAAO,CAACmJ,gBAAgB,CAACjF,GAAG,CAAC6F,aAAa,CAAC;EAC3D,IAAI,CAAC3J,SAAS,EAAE;IACZA,SAAS,GAAG;MACRiD,EAAE,EAAE,IAAA2G,wBAAiB,EAAC,CAAC,CAAC;MACxB7J,MAAM,EAAE2J,MAAM,CAAC3J,MAAM;MACrB+I,OAAO,EAAE,KAAK;MACdM,QAAQ,EAAE,CAAC;MACXjI,SAAS,EAAE,IAAI0I,GAAG,CAAC,CAAC;MACpB9F,WAAW,EAAE2F,MAAM,CAAC3J,MAAM,CAACgE,WAAW,GAAG,IAAI8F,GAAG,CAAC,CAAC,GAAGnH,SAAgB;MACrEiB,OAAO,EAAE,CAAC,CAAC;MACX2F,uBAAuB,EAAE,IAAIQ,aAAO,CAAC,CAAC;MACtCxG,QAAQ,EAAE,IAAIwG,aAAO,CAAC;IAC1B,CAAC;IACD,IAAAC,yCAA0B,EAAC/J,SAAS,EAAE0J,MAAM,CAAC3J,MAAM,CAAC;IACpDH,OAAO,CAACmJ,gBAAgB,CAAC1G,GAAG,CAACsH,aAAa,EAAE3J,SAAS,CAAC;EAC1D,CAAC,MAAM;IACH;AACR;AACA;AACA;AACA;AACA;AACA;IACQ,IACI0J,MAAM,CAACvJ,OAAO,IACd,CAAC,IAAA6J,gBAAS,EAAChK,SAAS,CAACD,MAAM,EAAE2J,MAAM,CAAC3J,MAAM,CAAC,EAC7C;MACE,MAAM,IAAI6G,KAAK,CAAC,qDAAqD,CAAC;IAC1E;IACA5G,SAAS,CAACoJ,QAAQ,GAAGpJ,SAAS,CAACoJ,QAAQ,GAAG,CAAC;EAC/C;EAEA,IAAMa,QAAQ,GAAG,IAAItK,uBAAuB,CACxCC,OAAO,EACP8J,MAAM,CAAC7J,YAAY,EACnB6J,MAAM,CAAC5J,cAAc,EACrB4J,MAAM,CAAC3J,MAAM,EACbC,SAAS,EACT0J,MAAM,CAACzJ,OAAO,EACdC,QAAQ,EACRwJ,MAAM,CAACvJ,OACX,CAAC;EACD,OAAO0B,OAAO,CAACC,OAAO,CAACmI,QAAQ,CAAC;AACpC","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/storage-mongodb/index.js b/dist/cjs/plugins/storage-mongodb/index.js deleted file mode 100644 index 17bad8354c4..00000000000 --- a/dist/cjs/plugins/storage-mongodb/index.js +++ /dev/null @@ -1,50 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -var _rxStorageMongodb = require("./rx-storage-mongodb.js"); -Object.keys(_rxStorageMongodb).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _rxStorageMongodb[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _rxStorageMongodb[key]; - } - }); -}); -var _rxStorageInstanceMongodb = require("./rx-storage-instance-mongodb.js"); -Object.keys(_rxStorageInstanceMongodb).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _rxStorageInstanceMongodb[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _rxStorageInstanceMongodb[key]; - } - }); -}); -var _mongodbHelper = require("./mongodb-helper.js"); -Object.keys(_mongodbHelper).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _mongodbHelper[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _mongodbHelper[key]; - } - }); -}); -var _mongodbTypes = require("./mongodb-types.js"); -Object.keys(_mongodbTypes).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _mongodbTypes[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _mongodbTypes[key]; - } - }); -}); -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/storage-mongodb/index.js.map b/dist/cjs/plugins/storage-mongodb/index.js.map deleted file mode 100644 index ca8be28fee7..00000000000 --- a/dist/cjs/plugins/storage-mongodb/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["_rxStorageMongodb","require","Object","keys","forEach","key","exports","defineProperty","enumerable","get","_rxStorageInstanceMongodb","_mongodbHelper","_mongodbTypes"],"sources":["../../../../src/plugins/storage-mongodb/index.ts"],"sourcesContent":["export * from './rx-storage-mongodb.ts';\nexport * from './rx-storage-instance-mongodb.ts';\nexport * from './mongodb-helper.ts';\nexport * from './mongodb-types.ts';\n"],"mappings":";;;;;AAAA,IAAAA,iBAAA,GAAAC,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAH,iBAAA,EAAAI,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAL,iBAAA,CAAAK,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAT,iBAAA,CAAAK,GAAA;IAAA;EAAA;AAAA;AACA,IAAAK,yBAAA,GAAAT,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAO,yBAAA,EAAAN,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAK,yBAAA,CAAAL,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAC,yBAAA,CAAAL,GAAA;IAAA;EAAA;AAAA;AACA,IAAAM,cAAA,GAAAV,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAQ,cAAA,EAAAP,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAM,cAAA,CAAAN,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAE,cAAA,CAAAN,GAAA;IAAA;EAAA;AAAA;AACA,IAAAO,aAAA,GAAAX,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAS,aAAA,EAAAR,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAO,aAAA,CAAAP,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAG,aAAA,CAAAP,GAAA;IAAA;EAAA;AAAA","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/storage-mongodb/mongodb-helper.js b/dist/cjs/plugins/storage-mongodb/mongodb-helper.js deleted file mode 100644 index 90affc237f3..00000000000 --- a/dist/cjs/plugins/storage-mongodb/mongodb-helper.js +++ /dev/null @@ -1,91 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.RX_STORAGE_NAME_MONGODB = exports.MONGO_ID_SUBSTITUTE_FIELDNAME = void 0; -exports.getMongoDBIndexName = getMongoDBIndexName; -exports.prepareMongoDBQuery = prepareMongoDBQuery; -exports.primarySwapMongoDBQuerySelector = primarySwapMongoDBQuerySelector; -exports.swapMongoToRxDoc = swapMongoToRxDoc; -exports.swapRxDocToMongo = swapRxDocToMongo; -exports.swapToMongoSort = swapToMongoSort; -var _index = require("../utils/index.js"); -var _rxSchemaHelper = require("../../rx-schema-helper.js"); -var RX_STORAGE_NAME_MONGODB = exports.RX_STORAGE_NAME_MONGODB = 'mongodb'; - -/** - * MongoDB uses the _id field by itself (max 12 bytes) - * so we have to substitute the _id field if - * it is used in the RxDocType. - */ -var MONGO_ID_SUBSTITUTE_FIELDNAME = exports.MONGO_ID_SUBSTITUTE_FIELDNAME = '__id'; -function primarySwapMongoDBQuerySelector(primaryKey, selector) { - selector = (0, _index.flatClone)(selector); - if (primaryKey !== '_id') { - return selector; - } - if (Array.isArray(selector)) { - return selector.map(item => primarySwapMongoDBQuerySelector(primaryKey, item)); - } else if (typeof selector === 'object') { - var ret = {}; - Object.entries(selector).forEach(([k, v]) => { - if (k === primaryKey) { - ret._id = v; - } else { - if (k.startsWith('$')) { - ret[k] = primarySwapMongoDBQuerySelector(primaryKey, v); - } else { - ret[k] = v; - } - } - }); - return ret; - } else { - return selector; - } -} -function prepareMongoDBQuery(schema, mutateableQuery) { - var primaryKey = (0, _rxSchemaHelper.getPrimaryFieldOfPrimaryKey)(schema.primaryKey); - var preparedQuery = { - query: mutateableQuery, - mongoSelector: primarySwapMongoDBQuerySelector(primaryKey, mutateableQuery.selector), - mongoSort: swapToMongoSort(mutateableQuery.sort) - }; - return preparedQuery; -} -; -function swapMongoToRxDoc(docData) { - docData = (0, _index.flatClone)(docData); - if (docData[MONGO_ID_SUBSTITUTE_FIELDNAME]) { - var value = docData[MONGO_ID_SUBSTITUTE_FIELDNAME]; - delete docData[MONGO_ID_SUBSTITUTE_FIELDNAME]; - docData._id = value; - } else { - delete docData._id; - } - return docData; -} -function swapRxDocToMongo(docData) { - docData = (0, _index.flatClone)(docData); - if (docData._id) { - var value = docData._id; - delete docData._id; - docData[MONGO_ID_SUBSTITUTE_FIELDNAME] = value; - } - return docData; -} -function swapToMongoSort(sort) { - var ret = {}; - sort.forEach(sortPart => { - var [key, direction] = Object.entries(sortPart)[0]; - var mongoKey = key === '_id' ? MONGO_ID_SUBSTITUTE_FIELDNAME : key; - var mongoDirection = direction === 'asc' ? 1 : -1; - ret[mongoKey] = mongoDirection; - }); - return ret; -} -function getMongoDBIndexName(index) { - return index.join('|'); -} -//# sourceMappingURL=mongodb-helper.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/storage-mongodb/mongodb-helper.js.map b/dist/cjs/plugins/storage-mongodb/mongodb-helper.js.map deleted file mode 100644 index d9197e4bcdf..00000000000 --- a/dist/cjs/plugins/storage-mongodb/mongodb-helper.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"mongodb-helper.js","names":["_index","require","_rxSchemaHelper","RX_STORAGE_NAME_MONGODB","exports","MONGO_ID_SUBSTITUTE_FIELDNAME","primarySwapMongoDBQuerySelector","primaryKey","selector","flatClone","Array","isArray","map","item","ret","Object","entries","forEach","k","v","_id","startsWith","prepareMongoDBQuery","schema","mutateableQuery","getPrimaryFieldOfPrimaryKey","preparedQuery","query","mongoSelector","mongoSort","swapToMongoSort","sort","swapMongoToRxDoc","docData","value","swapRxDocToMongo","sortPart","key","direction","mongoKey","mongoDirection","getMongoDBIndexName","index","join"],"sources":["../../../../src/plugins/storage-mongodb/mongodb-helper.ts"],"sourcesContent":["import type {\n FilledMangoQuery,\n MangoQuerySelector,\n MangoQuerySortPart,\n RxDocumentData,\n RxJsonSchema\n} from '../../types/index.d.ts';\nimport {\n Sort as MongoSort\n} from 'mongodb';\nimport { flatClone } from '../utils/index.ts';\nimport { MongoDBPreparedQuery, MongoQuerySelector } from './mongodb-types.ts';\nimport { getPrimaryFieldOfPrimaryKey } from '../../rx-schema-helper.ts';\nexport const RX_STORAGE_NAME_MONGODB = 'mongodb';\n\n/**\n * MongoDB uses the _id field by itself (max 12 bytes)\n * so we have to substitute the _id field if\n * it is used in the RxDocType.\n */\nexport const MONGO_ID_SUBSTITUTE_FIELDNAME = '__id';\n\nexport function primarySwapMongoDBQuerySelector(\n primaryKey: keyof RxDocType,\n selector: MangoQuerySelector\n): MongoQuerySelector {\n selector = flatClone(selector);\n\n if (primaryKey !== '_id') {\n return selector as any;\n }\n if (Array.isArray(selector)) {\n return selector.map(item => primarySwapMongoDBQuerySelector(primaryKey, item)) as any;\n } else if (typeof selector === 'object') {\n const ret: any = {};\n Object.entries(selector).forEach(([k, v]) => {\n if (k === primaryKey) {\n ret._id = v;\n } else {\n if (k.startsWith('$')) {\n ret[k] = primarySwapMongoDBQuerySelector(primaryKey, v as any);\n } else {\n ret[k] = v;\n }\n }\n });\n return ret;\n } else {\n return selector;\n }\n}\n\n\nexport function prepareMongoDBQuery(\n schema: RxJsonSchema>,\n mutateableQuery: FilledMangoQuery\n) {\n const primaryKey = getPrimaryFieldOfPrimaryKey(schema.primaryKey) as any;\n const preparedQuery: MongoDBPreparedQuery = {\n query: mutateableQuery,\n mongoSelector: primarySwapMongoDBQuerySelector(\n primaryKey,\n mutateableQuery.selector\n ),\n mongoSort: swapToMongoSort(mutateableQuery.sort)\n };\n return preparedQuery;\n};\n\n\nexport function swapMongoToRxDoc(\n docData: any\n): RxDocumentData {\n docData = flatClone(docData);\n if ((docData as any)[MONGO_ID_SUBSTITUTE_FIELDNAME]) {\n const value = (docData as any)[MONGO_ID_SUBSTITUTE_FIELDNAME];\n delete (docData as any)[MONGO_ID_SUBSTITUTE_FIELDNAME];\n (docData as any)._id = value;\n } else {\n delete (docData as any)._id;\n }\n return docData;\n}\n\nexport function swapRxDocToMongo(\n docData: RxDocumentData\n): any {\n docData = flatClone(docData);\n if ((docData as any)._id) {\n const value = (docData as any)._id;\n delete (docData as any)._id;\n (docData as any)[MONGO_ID_SUBSTITUTE_FIELDNAME] = value;\n }\n return docData;\n}\n\nexport function swapToMongoSort(\n sort: MangoQuerySortPart[]\n): MongoSort {\n const ret: MongoSort = {};\n sort.forEach(sortPart => {\n const [key, direction] = Object.entries(sortPart)[0];\n const mongoKey = key === '_id' ? MONGO_ID_SUBSTITUTE_FIELDNAME : key;\n const mongoDirection = direction === 'asc' ? 1 : -1;\n ret[mongoKey] = mongoDirection;\n });\n return ret;\n}\n\nexport function getMongoDBIndexName(index: string[]): string {\n return index.join('|');\n}\n"],"mappings":";;;;;;;;;;;;AAUA,IAAAA,MAAA,GAAAC,OAAA;AAEA,IAAAC,eAAA,GAAAD,OAAA;AACO,IAAME,uBAAuB,GAAAC,OAAA,CAAAD,uBAAA,GAAG,SAAS;;AAEhD;AACA;AACA;AACA;AACA;AACO,IAAME,6BAA6B,GAAAD,OAAA,CAAAC,6BAAA,GAAG,MAAM;AAE5C,SAASC,+BAA+BA,CAC3CC,UAA2B,EAC3BC,QAAuC,EACV;EAC7BA,QAAQ,GAAG,IAAAC,gBAAS,EAACD,QAAQ,CAAC;EAE9B,IAAID,UAAU,KAAK,KAAK,EAAE;IACtB,OAAOC,QAAQ;EACnB;EACA,IAAIE,KAAK,CAACC,OAAO,CAACH,QAAQ,CAAC,EAAE;IACzB,OAAOA,QAAQ,CAACI,GAAG,CAACC,IAAI,IAAIP,+BAA+B,CAACC,UAAU,EAAEM,IAAI,CAAC,CAAC;EAClF,CAAC,MAAM,IAAI,OAAOL,QAAQ,KAAK,QAAQ,EAAE;IACrC,IAAMM,GAAQ,GAAG,CAAC,CAAC;IACnBC,MAAM,CAACC,OAAO,CAACR,QAAQ,CAAC,CAACS,OAAO,CAAC,CAAC,CAACC,CAAC,EAAEC,CAAC,CAAC,KAAK;MACzC,IAAID,CAAC,KAAKX,UAAU,EAAE;QAClBO,GAAG,CAACM,GAAG,GAAGD,CAAC;MACf,CAAC,MAAM;QACH,IAAID,CAAC,CAACG,UAAU,CAAC,GAAG,CAAC,EAAE;UACnBP,GAAG,CAACI,CAAC,CAAC,GAAGZ,+BAA+B,CAACC,UAAU,EAAEY,CAAQ,CAAC;QAClE,CAAC,MAAM;UACHL,GAAG,CAACI,CAAC,CAAC,GAAGC,CAAC;QACd;MACJ;IACJ,CAAC,CAAC;IACF,OAAOL,GAAG;EACd,CAAC,MAAM;IACH,OAAON,QAAQ;EACnB;AACJ;AAGO,SAASc,mBAAmBA,CAC/BC,MAA+C,EAC/CC,eAA4C,EAC9C;EACE,IAAMjB,UAAU,GAAG,IAAAkB,2CAA2B,EAACF,MAAM,CAAChB,UAAU,CAAQ;EACxE,IAAMmB,aAA8C,GAAG;IACnDC,KAAK,EAAEH,eAAe;IACtBI,aAAa,EAAEtB,+BAA+B,CAC1CC,UAAU,EACViB,eAAe,CAAChB,QACpB,CAAC;IACDqB,SAAS,EAAEC,eAAe,CAACN,eAAe,CAACO,IAAI;EACnD,CAAC;EACD,OAAOL,aAAa;AACxB;AAAC;AAGM,SAASM,gBAAgBA,CAC5BC,OAAY,EACa;EACzBA,OAAO,GAAG,IAAAxB,gBAAS,EAACwB,OAAO,CAAC;EAC5B,IAAKA,OAAO,CAAS5B,6BAA6B,CAAC,EAAE;IACjD,IAAM6B,KAAK,GAAID,OAAO,CAAS5B,6BAA6B,CAAC;IAC7D,OAAQ4B,OAAO,CAAS5B,6BAA6B,CAAC;IACrD4B,OAAO,CAASb,GAAG,GAAGc,KAAK;EAChC,CAAC,MAAM;IACH,OAAQD,OAAO,CAASb,GAAG;EAC/B;EACA,OAAOa,OAAO;AAClB;AAEO,SAASE,gBAAgBA,CAC5BF,OAAkC,EAC/B;EACHA,OAAO,GAAG,IAAAxB,gBAAS,EAACwB,OAAO,CAAC;EAC5B,IAAKA,OAAO,CAASb,GAAG,EAAE;IACtB,IAAMc,KAAK,GAAID,OAAO,CAASb,GAAG;IAClC,OAAQa,OAAO,CAASb,GAAG;IAC1Ba,OAAO,CAAS5B,6BAA6B,CAAC,GAAG6B,KAAK;EAC3D;EACA,OAAOD,OAAO;AAClB;AAEO,SAASH,eAAeA,CAC3BC,IAAqC,EAC5B;EACT,IAAMjB,GAAc,GAAG,CAAC,CAAC;EACzBiB,IAAI,CAACd,OAAO,CAACmB,QAAQ,IAAI;IACrB,IAAM,CAACC,GAAG,EAAEC,SAAS,CAAC,GAAGvB,MAAM,CAACC,OAAO,CAACoB,QAAQ,CAAC,CAAC,CAAC,CAAC;IACpD,IAAMG,QAAQ,GAAGF,GAAG,KAAK,KAAK,GAAGhC,6BAA6B,GAAGgC,GAAG;IACpE,IAAMG,cAAc,GAAGF,SAAS,KAAK,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC;IACnDxB,GAAG,CAACyB,QAAQ,CAAC,GAAGC,cAAc;EAClC,CAAC,CAAC;EACF,OAAO1B,GAAG;AACd;AAEO,SAAS2B,mBAAmBA,CAACC,KAAe,EAAU;EACzD,OAAOA,KAAK,CAACC,IAAI,CAAC,GAAG,CAAC;AAC1B","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/storage-mongodb/mongodb-types.js b/dist/cjs/plugins/storage-mongodb/mongodb-types.js deleted file mode 100644 index 598b5703aa0..00000000000 --- a/dist/cjs/plugins/storage-mongodb/mongodb-types.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -//# sourceMappingURL=mongodb-types.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/storage-mongodb/mongodb-types.js.map b/dist/cjs/plugins/storage-mongodb/mongodb-types.js.map deleted file mode 100644 index d3ed6f50624..00000000000 --- a/dist/cjs/plugins/storage-mongodb/mongodb-types.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"mongodb-types.js","names":[],"sources":["../../../../src/plugins/storage-mongodb/mongodb-types.ts"],"sourcesContent":["import type {\n Filter as MongoQueryFilter,\n Sort as MongoSort,\n TransactionOptions\n} from 'mongodb';\nimport type {\n FilledMangoQuery, RxDocumentData\n} from '../../types/index.d.ts';\nexport type MongoQuerySelector = MongoQueryFilter;\nexport type MongoDBDatabaseSettings = {\n /**\n * MongoDB ConnectionString\n * Example: mongodb://localhost:\n */\n connection: string | 'mongodb://localhost:27017';\n transactionOptions?: TransactionOptions;\n};\n\nexport type MongoDBPreparedQuery = {\n query: FilledMangoQuery;\n mongoSelector: MongoQuerySelector>;\n mongoSort: MongoSort;\n};\n\nexport type MongoDBSettings = {};\nexport type MongoDBStorageInternals = {};\nexport type RxStorageMongoDBInstanceCreationOptions = {};\nexport type RxStorageMongoDBSettings = {};\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/storage-mongodb/rx-storage-instance-mongodb.js b/dist/cjs/plugins/storage-mongodb/rx-storage-instance-mongodb.js deleted file mode 100644 index b5204794998..00000000000 --- a/dist/cjs/plugins/storage-mongodb/rx-storage-instance-mongodb.js +++ /dev/null @@ -1,330 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.RxStorageInstanceMongoDB = void 0; -exports.createMongoDBStorageInstance = createMongoDBStorageInstance; -var _rxjs = require("rxjs"); -var _rxSchemaHelper = require("../../rx-schema-helper.js"); -var _index = require("../../plugins/utils/index.js"); -var _mongodb = require("mongodb"); -var _rxStorageHelper = require("../../rx-storage-helper.js"); -var _mongodbHelper = require("./mongodb-helper.js"); -var RxStorageInstanceMongoDB = exports.RxStorageInstanceMongoDB = /*#__PURE__*/function () { - // public mongoChangeStream?: MongoChangeStream>; - - /** - * Closing the connection must not happen when - * an operation is running, otherwise we get an error. - * So we store all running operations here so that - * they can be awaited. - */ - - /** - * We use this to be able to still fetch - * the objectId after transforming the document from mongo-style (with _id) - * to RxDB - */ - - function RxStorageInstanceMongoDB(storage, databaseName, collectionName, schema, internals, options, settings) { - this.changes$ = new _rxjs.Subject(); - this.runningOperations = new _rxjs.BehaviorSubject(0); - this.writeQueue = _index.PROMISE_RESOLVE_VOID; - this.mongoObjectIdCache = new WeakMap(); - this.storage = storage; - this.databaseName = databaseName; - this.collectionName = collectionName; - this.schema = schema; - this.internals = internals; - this.options = options; - this.settings = settings; - if (this.schema.attachments) { - throw new Error('attachments not supported in mongodb storage, make a PR if you need that'); - } - this.primaryPath = (0, _rxSchemaHelper.getPrimaryFieldOfPrimaryKey)(this.schema.primaryKey); - this.inMongoPrimaryPath = this.primaryPath === '_id' ? _mongodbHelper.MONGO_ID_SUBSTITUTE_FIELDNAME : this.primaryPath; - this.mongoClient = new _mongodb.MongoClient(storage.databaseSettings.connection); - this.mongoDatabase = this.mongoClient.db(databaseName + '-v' + this.schema.version); - var indexes = (this.schema.indexes ? this.schema.indexes.slice() : []).map(index => { - var arIndex = (0, _index.isMaybeReadonlyArray)(index) ? index.slice(0) : [index]; - return arIndex; - }); - indexes.push([this.inMongoPrimaryPath]); - this.mongoCollectionPromise = this.mongoDatabase.createCollection(collectionName).then(async mongoCollection => { - await mongoCollection.createIndexes(indexes.map(index => { - var mongoIndex = {}; - index.forEach(field => mongoIndex[field] = 1); - return { - name: (0, _mongodbHelper.getMongoDBIndexName)(index), - key: mongoIndex - }; - })); - - /** - * TODO in a setup where multiple servers run node.js - * processes that use the mongodb storage, we should propagate - * events by listening to the mongodb changestream. - * This maybe should be a premium feature. - */ - // this.mongoChangeStream = mongoCollection.watch( - // undefined, { - // batchSize: 100 - // } - // ).on('change', change => { - - // const eventBulkId = randomCouchString(10); - // const newDocData: RxDocumentData = (change as any).fullDocument; - // const documentId = newDocData[this.primaryPath] as any; - - // const eventBulk: EventBulk>, RxStorageDefaultCheckpoint> = { - // checkpoint: { - // id: newDocData[this.primaryPath] as any, - // lwt: newDocData._meta.lwt - // }, - // context: 'mongodb-write', - // id: eventBulkId, - // events: [{ - // documentData: newDocData, - // documentId, - // operation: 'INSERT', - // previousDocumentData: undefined, - // }], - // startTime: now(), - // endTime: now() - // }; - - // this.changes$.next(eventBulk); - // }); - - return mongoCollection; - }); - } - - /** - * Bulk writes on the mongodb storage. - * Notice that MongoDB does not support cross-document transactions - * so we have to do a update-if-previous-is-correct like operations. - * (Similar to what RxDB does with the revision system) - */ - var _proto = RxStorageInstanceMongoDB.prototype; - _proto.bulkWrite = function bulkWrite(documentWrites, context) { - this.writeQueue = this.writeQueue.then(async () => { - this.runningOperations.next(this.runningOperations.getValue() + 1); - var mongoCollection = await this.mongoCollectionPromise; - if (this.closed) { - return Promise.reject(new Error('already closed')); - } - var primaryPath = this.primaryPath; - var ret = { - success: [], - error: [] - }; - var docIds = documentWrites.map(d => d.document[primaryPath]); - var documentStates = await this.findDocumentsById(docIds, true); - var documentStatesMap = new Map(); - documentStates.forEach(doc => { - var docId = doc[primaryPath]; - documentStatesMap.set(docId, doc); - }); - var categorized = (0, _rxStorageHelper.categorizeBulkWriteRows)(this, primaryPath, documentStatesMap, documentWrites, context); - var changeByDocId = new Map(); - categorized.eventBulk.events.forEach(change => { - changeByDocId.set(change.documentId, change); - }); - ret.error = categorized.errors; - - /** - * Reset the event bulk because - * conflicts can still appear after the categorization - */ - var eventBulk = categorized.eventBulk; - eventBulk.events = []; - await Promise.all([ - /** - * Inserts - * @link https://sparkbyexamples.com/mongodb/mongodb-insert-if-not-exists/ - */ - Promise.all(categorized.bulkInsertDocs.map(async writeRow => { - var docId = writeRow.document[primaryPath]; - var writeResult = await mongoCollection.findOneAndUpdate({ - [this.inMongoPrimaryPath]: docId - }, { - $setOnInsert: (0, _mongodbHelper.swapRxDocToMongo)(writeRow.document) - }, { - upsert: true, - includeResultMetadata: true - }); - if (writeResult.value) { - // had insert conflict - var conflictError = { - status: 409, - documentId: docId, - writeRow, - documentInDb: (0, _mongodbHelper.swapMongoToRxDoc)((0, _index.ensureNotFalsy)(writeResult.value)), - isError: true - }; - ret.error.push(conflictError); - } else { - var event = changeByDocId.get(docId); - if (event) { - eventBulk.events.push(event); - } - ret.success.push(writeRow.document); - } - })), - /** - * Updates - */ - Promise.all(categorized.bulkUpdateDocs.map(async writeRow => { - var docId = writeRow.document[primaryPath]; - var writeResult = await mongoCollection.findOneAndReplace({ - [this.inMongoPrimaryPath]: docId, - _rev: (0, _index.ensureNotFalsy)(writeRow.previous)._rev - }, (0, _mongodbHelper.swapRxDocToMongo)(writeRow.document), { - includeResultMetadata: true, - upsert: false, - returnDocument: 'before' - }); - if (!writeResult.ok) { - var currentDocState = await this.findDocumentsById([docId], true); - var currentDoc = currentDocState[0]; - // had insert conflict - var conflictError = { - status: 409, - documentId: docId, - writeRow, - documentInDb: (0, _index.ensureNotFalsy)(currentDoc), - isError: true - }; - ret.error.push(conflictError); - } else { - var event = (0, _index.getFromMapOrThrow)(changeByDocId, docId); - eventBulk.events.push(event); - ret.success.push(writeRow.document); - } - }))]); - if (categorized.eventBulk.events.length > 0) { - var lastState = (0, _index.ensureNotFalsy)(categorized.newestRow).document; - categorized.eventBulk.checkpoint = { - id: lastState[primaryPath], - lwt: lastState._meta.lwt - }; - categorized.eventBulk.endTime = (0, _index.now)(); - this.changes$.next(categorized.eventBulk); - } - this.runningOperations.next(this.runningOperations.getValue() - 1); - return ret; - }); - return this.writeQueue; - }; - _proto.findDocumentsById = async function findDocumentsById(docIds, withDeleted, session) { - this.runningOperations.next(this.runningOperations.getValue() + 1); - var mongoCollection = await this.mongoCollectionPromise; - var primaryPath = this.primaryPath; - var plainQuery = { - [primaryPath]: { - $in: docIds - } - }; - if (!withDeleted) { - plainQuery._deleted = false; - } - var result = []; - var queryResult = await mongoCollection.find(plainQuery, { - session - }).toArray(); - queryResult.forEach(row => { - result.push((0, _mongodbHelper.swapMongoToRxDoc)(row)); - }); - this.runningOperations.next(this.runningOperations.getValue() - 1); - return result; - }; - _proto.query = async function query(originalPreparedQuery) { - var preparedQuery = (0, _mongodbHelper.prepareMongoDBQuery)(this.schema, originalPreparedQuery.query); - this.runningOperations.next(this.runningOperations.getValue() + 1); - await this.writeQueue; - var mongoCollection = await this.mongoCollectionPromise; - var query = mongoCollection.find(preparedQuery.mongoSelector); - if (preparedQuery.query.skip) { - query = query.skip(preparedQuery.query.skip); - } - if (preparedQuery.query.limit) { - query = query.limit(preparedQuery.query.limit); - } - if (preparedQuery.query.sort) { - query = query.sort(preparedQuery.mongoSort); - } - var resultDocs = await query.toArray(); - this.runningOperations.next(this.runningOperations.getValue() - 1); - return { - documents: resultDocs.map(d => (0, _mongodbHelper.swapMongoToRxDoc)(d)) - }; - }; - _proto.count = async function count(originalPreparedQuery) { - var preparedQuery = (0, _mongodbHelper.prepareMongoDBQuery)(this.schema, originalPreparedQuery.query); - this.runningOperations.next(this.runningOperations.getValue() + 1); - await this.writeQueue; - var mongoCollection = await this.mongoCollectionPromise; - var count = await mongoCollection.countDocuments(preparedQuery.mongoSelector); - this.runningOperations.next(this.runningOperations.getValue() - 1); - return { - count, - mode: 'fast' - }; - }; - _proto.cleanup = async function cleanup(minimumDeletedTime) { - this.runningOperations.next(this.runningOperations.getValue() + 1); - var mongoCollection = await this.mongoCollectionPromise; - var maxDeletionTime = (0, _index.now)() - minimumDeletedTime; - await mongoCollection.deleteMany({ - _deleted: true, - '_meta.lwt': { - $lt: maxDeletionTime - } - }); - this.runningOperations.next(this.runningOperations.getValue() - 1); - return true; - }; - _proto.getAttachmentData = async function getAttachmentData(_documentId, _attachmentId, _digest) { - await this.mongoCollectionPromise; - throw new Error('attachments not implemented, make a PR'); - }; - _proto.changeStream = function changeStream() { - return this.changes$; - }; - _proto.remove = async function remove() { - if (this.closed) { - throw new Error('already closed'); - } - this.runningOperations.next(this.runningOperations.getValue() + 1); - var mongoCollection = await this.mongoCollectionPromise; - await mongoCollection.drop(); - this.runningOperations.next(this.runningOperations.getValue() - 1); - await this.close(); - }; - _proto.close = async function close() { - // TODO without this next-tick we have random fails in the tests - await (0, _index.requestIdlePromise)(200); - if (this.closed) { - return this.closed; - } - this.closed = (async () => { - await this.mongoCollectionPromise; - await (0, _rxjs.firstValueFrom)(this.runningOperations.pipe((0, _rxjs.filter)(c => c === 0))); - // await ensureNotFalsy(this.mongoChangeStream).close(); - await this.mongoClient.close(); - })(); - return this.closed; - }; - _proto.conflictResultionTasks = function conflictResultionTasks() { - return new _rxjs.Subject(); - }; - _proto.resolveConflictResultionTask = async function resolveConflictResultionTask(_taskSolution) {}; - return RxStorageInstanceMongoDB; -}(); -function createMongoDBStorageInstance(storage, params, settings) { - var instance = new RxStorageInstanceMongoDB(storage, params.databaseName, params.collectionName, params.schema, {}, params.options, settings); - return Promise.resolve(instance); -} -//# sourceMappingURL=rx-storage-instance-mongodb.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/storage-mongodb/rx-storage-instance-mongodb.js.map b/dist/cjs/plugins/storage-mongodb/rx-storage-instance-mongodb.js.map deleted file mode 100644 index 75cd56a590f..00000000000 --- a/dist/cjs/plugins/storage-mongodb/rx-storage-instance-mongodb.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-storage-instance-mongodb.js","names":["_rxjs","require","_rxSchemaHelper","_index","_mongodb","_rxStorageHelper","_mongodbHelper","RxStorageInstanceMongoDB","exports","storage","databaseName","collectionName","schema","internals","options","settings","changes$","Subject","runningOperations","BehaviorSubject","writeQueue","PROMISE_RESOLVE_VOID","mongoObjectIdCache","WeakMap","attachments","Error","primaryPath","getPrimaryFieldOfPrimaryKey","primaryKey","inMongoPrimaryPath","MONGO_ID_SUBSTITUTE_FIELDNAME","mongoClient","MongoClient","databaseSettings","connection","mongoDatabase","db","version","indexes","slice","map","index","arIndex","isMaybeReadonlyArray","push","mongoCollectionPromise","createCollection","then","mongoCollection","createIndexes","mongoIndex","forEach","field","name","getMongoDBIndexName","key","_proto","prototype","bulkWrite","documentWrites","context","next","getValue","closed","Promise","reject","ret","success","error","docIds","d","document","documentStates","findDocumentsById","documentStatesMap","Map","doc","docId","set","categorized","categorizeBulkWriteRows","changeByDocId","eventBulk","events","change","documentId","errors","all","bulkInsertDocs","writeRow","writeResult","findOneAndUpdate","$setOnInsert","swapRxDocToMongo","upsert","includeResultMetadata","value","conflictError","status","documentInDb","swapMongoToRxDoc","ensureNotFalsy","isError","event","get","bulkUpdateDocs","findOneAndReplace","_rev","previous","returnDocument","ok","currentDocState","currentDoc","getFromMapOrThrow","length","lastState","newestRow","checkpoint","id","lwt","_meta","endTime","now","withDeleted","session","plainQuery","$in","_deleted","result","queryResult","find","toArray","row","query","originalPreparedQuery","preparedQuery","prepareMongoDBQuery","mongoSelector","skip","limit","sort","mongoSort","resultDocs","documents","count","countDocuments","mode","cleanup","minimumDeletedTime","maxDeletionTime","deleteMany","$lt","getAttachmentData","_documentId","_attachmentId","_digest","changeStream","remove","drop","close","requestIdlePromise","firstValueFrom","pipe","filter","c","conflictResultionTasks","resolveConflictResultionTask","_taskSolution","createMongoDBStorageInstance","params","instance","resolve"],"sources":["../../../../src/plugins/storage-mongodb/rx-storage-instance-mongodb.ts"],"sourcesContent":["import {\n BehaviorSubject,\n Observable,\n Subject,\n filter,\n firstValueFrom\n} from 'rxjs';\nimport { getPrimaryFieldOfPrimaryKey } from '../../rx-schema-helper.ts';\nimport type {\n BulkWriteRow,\n EventBulk,\n PreparedQuery,\n RxConflictResultionTask,\n RxConflictResultionTaskSolution,\n RxDocumentData,\n RxJsonSchema,\n RxStorageBulkWriteResponse,\n RxStorageChangeEvent,\n RxStorageCountResult,\n RxStorageDefaultCheckpoint,\n RxStorageInstance,\n RxStorageInstanceCreationParams,\n RxStorageQueryResult,\n RxStorageWriteErrorConflict,\n StringKeys\n} from '../../types/index.d.ts';\nimport {\n ensureNotFalsy,\n getFromMapOrThrow,\n isMaybeReadonlyArray,\n now,\n PROMISE_RESOLVE_VOID,\n requestIdlePromise\n} from '../../plugins/utils/index.ts';\nimport {\n MongoDBPreparedQuery,\n MongoDBStorageInternals,\n MongoQuerySelector,\n RxStorageMongoDBInstanceCreationOptions,\n RxStorageMongoDBSettings\n} from './mongodb-types.ts';\nimport { RxStorageMongoDB } from './rx-storage-mongodb.ts';\nimport {\n Db as MongoDatabase,\n Collection as MongoCollection,\n MongoClient,\n ObjectId,\n ClientSession\n} from 'mongodb';\nimport { categorizeBulkWriteRows } from '../../rx-storage-helper.ts';\nimport {\n MONGO_ID_SUBSTITUTE_FIELDNAME,\n getMongoDBIndexName,\n prepareMongoDBQuery,\n swapMongoToRxDoc,\n swapRxDocToMongo\n} from './mongodb-helper.ts';\n\nexport class RxStorageInstanceMongoDB implements RxStorageInstance<\n RxDocType,\n MongoDBStorageInternals,\n RxStorageMongoDBInstanceCreationOptions,\n RxStorageDefaultCheckpoint\n> {\n\n public readonly primaryPath: StringKeys>;\n public readonly inMongoPrimaryPath: string;\n public closed?: Promise;\n private readonly changes$: Subject>, RxStorageDefaultCheckpoint>> = new Subject();\n public readonly mongoClient: MongoClient;\n public readonly mongoDatabase: MongoDatabase;\n public readonly mongoCollectionPromise: Promise | any>>;\n // public mongoChangeStream?: MongoChangeStream>;\n\n\n /**\n * Closing the connection must not happen when\n * an operation is running, otherwise we get an error.\n * So we store all running operations here so that\n * they can be awaited.\n */\n public readonly runningOperations = new BehaviorSubject(0);\n public writeQueue: Promise = PROMISE_RESOLVE_VOID;\n\n /**\n * We use this to be able to still fetch\n * the objectId after transforming the document from mongo-style (with _id)\n * to RxDB\n */\n public readonly mongoObjectIdCache = new WeakMap, ObjectId>();\n\n constructor(\n public readonly storage: RxStorageMongoDB,\n public readonly databaseName: string,\n public readonly collectionName: string,\n public readonly schema: Readonly>>,\n public readonly internals: MongoDBStorageInternals,\n public readonly options: Readonly,\n public readonly settings: RxStorageMongoDBSettings\n ) {\n if (this.schema.attachments) {\n throw new Error('attachments not supported in mongodb storage, make a PR if you need that');\n }\n this.primaryPath = getPrimaryFieldOfPrimaryKey(this.schema.primaryKey);\n this.inMongoPrimaryPath = this.primaryPath === '_id' ? MONGO_ID_SUBSTITUTE_FIELDNAME : this.primaryPath;\n this.mongoClient = new MongoClient(storage.databaseSettings.connection);\n this.mongoDatabase = this.mongoClient.db(databaseName + '-v' + this.schema.version);\n\n const indexes = (this.schema.indexes ? this.schema.indexes.slice() : []).map(index => {\n const arIndex = isMaybeReadonlyArray(index) ? index.slice(0) : [index];\n return arIndex;\n });\n indexes.push([this.inMongoPrimaryPath]);\n\n this.mongoCollectionPromise = this.mongoDatabase.createCollection(collectionName)\n .then(async (mongoCollection) => {\n await mongoCollection.createIndexes(\n indexes.map(index => {\n const mongoIndex: any = {};\n index.forEach(field => mongoIndex[field] = 1);\n return { name: getMongoDBIndexName(index), key: mongoIndex };\n })\n );\n\n /**\n * TODO in a setup where multiple servers run node.js\n * processes that use the mongodb storage, we should propagate\n * events by listening to the mongodb changestream.\n * This maybe should be a premium feature.\n */\n // this.mongoChangeStream = mongoCollection.watch(\n // undefined, {\n // batchSize: 100\n // }\n // ).on('change', change => {\n\n\n // const eventBulkId = randomCouchString(10);\n // const newDocData: RxDocumentData = (change as any).fullDocument;\n // const documentId = newDocData[this.primaryPath] as any;\n\n // const eventBulk: EventBulk>, RxStorageDefaultCheckpoint> = {\n // checkpoint: {\n // id: newDocData[this.primaryPath] as any,\n // lwt: newDocData._meta.lwt\n // },\n // context: 'mongodb-write',\n // id: eventBulkId,\n // events: [{\n // documentData: newDocData,\n // documentId,\n // operation: 'INSERT',\n // previousDocumentData: undefined,\n // }],\n // startTime: now(),\n // endTime: now()\n // };\n\n // this.changes$.next(eventBulk);\n // });\n\n\n return mongoCollection;\n });\n\n\n }\n\n /**\n * Bulk writes on the mongodb storage.\n * Notice that MongoDB does not support cross-document transactions\n * so we have to do a update-if-previous-is-correct like operations.\n * (Similar to what RxDB does with the revision system)\n */\n bulkWrite(\n documentWrites: BulkWriteRow[],\n context: string\n ): Promise> {\n\n this.writeQueue = this.writeQueue.then(async () => {\n this.runningOperations.next(this.runningOperations.getValue() + 1);\n\n const mongoCollection = await this.mongoCollectionPromise;\n if (this.closed) {\n return Promise.reject(new Error('already closed'));\n }\n const primaryPath = this.primaryPath;\n const ret: RxStorageBulkWriteResponse = {\n success: [],\n error: []\n };\n\n\n const docIds = documentWrites.map(d => (d.document as any)[primaryPath]);\n const documentStates = await this.findDocumentsById(\n docIds,\n true\n );\n const documentStatesMap = new Map();\n documentStates.forEach(doc => {\n const docId = doc[primaryPath];\n documentStatesMap.set(docId, doc);\n });\n const categorized = categorizeBulkWriteRows(\n this,\n primaryPath as any,\n documentStatesMap,\n documentWrites,\n context\n );\n\n const changeByDocId = new Map>>();\n categorized.eventBulk.events.forEach(change => {\n changeByDocId.set(change.documentId, change);\n });\n\n\n ret.error = categorized.errors;\n\n /**\n * Reset the event bulk because\n * conflicts can still appear after the categorization\n */\n const eventBulk = categorized.eventBulk;\n eventBulk.events = [];\n\n await Promise.all([\n /**\n * Inserts\n * @link https://sparkbyexamples.com/mongodb/mongodb-insert-if-not-exists/\n */\n Promise.all(\n categorized.bulkInsertDocs.map(async (writeRow) => {\n const docId: string = writeRow.document[primaryPath] as any;\n const writeResult = await mongoCollection.findOneAndUpdate(\n {\n [this.inMongoPrimaryPath]: docId\n },\n {\n $setOnInsert: swapRxDocToMongo(writeRow.document)\n },\n {\n upsert: true,\n includeResultMetadata: true\n }\n );\n if (writeResult.value) {\n // had insert conflict\n const conflictError: RxStorageWriteErrorConflict = {\n status: 409,\n documentId: docId,\n writeRow,\n documentInDb: swapMongoToRxDoc(ensureNotFalsy(writeResult.value)),\n isError: true\n };\n ret.error.push(conflictError);\n } else {\n const event = changeByDocId.get(docId);\n if (event) {\n eventBulk.events.push(event);\n }\n ret.success.push(writeRow.document);\n }\n })\n ),\n /**\n * Updates\n */\n Promise.all(\n categorized.bulkUpdateDocs.map(async (writeRow) => {\n const docId = writeRow.document[primaryPath] as string;\n const writeResult = await mongoCollection.findOneAndReplace(\n {\n [this.inMongoPrimaryPath]: docId,\n _rev: ensureNotFalsy(writeRow.previous)._rev\n },\n swapRxDocToMongo(writeRow.document),\n {\n includeResultMetadata: true,\n upsert: false,\n returnDocument: 'before'\n }\n );\n if (!writeResult.ok) {\n const currentDocState = await this.findDocumentsById([docId], true);\n const currentDoc = currentDocState[0];\n // had insert conflict\n const conflictError: RxStorageWriteErrorConflict = {\n status: 409,\n documentId: docId,\n writeRow,\n documentInDb: ensureNotFalsy(currentDoc),\n isError: true\n };\n ret.error.push(conflictError);\n } else {\n const event = getFromMapOrThrow(changeByDocId, docId);\n eventBulk.events.push(event);\n ret.success.push(writeRow.document);\n }\n\n })\n )\n ]);\n\n if (categorized.eventBulk.events.length > 0) {\n const lastState = ensureNotFalsy(categorized.newestRow).document;\n categorized.eventBulk.checkpoint = {\n id: lastState[primaryPath],\n lwt: lastState._meta.lwt\n };\n categorized.eventBulk.endTime = now();\n this.changes$.next(categorized.eventBulk);\n }\n\n this.runningOperations.next(this.runningOperations.getValue() - 1);\n return ret;\n });\n return this.writeQueue;\n\n }\n\n async findDocumentsById(\n docIds: string[],\n withDeleted: boolean,\n session?: ClientSession\n ): Promise[]> {\n this.runningOperations.next(this.runningOperations.getValue() + 1);\n const mongoCollection = await this.mongoCollectionPromise;\n const primaryPath = this.primaryPath;\n\n const plainQuery: MongoQuerySelector = {\n [primaryPath]: {\n $in: docIds\n }\n };\n if (!withDeleted) {\n plainQuery._deleted = false;\n }\n const result: RxDocumentData[] = [];\n const queryResult = await mongoCollection.find(\n plainQuery,\n {\n session\n }\n ).toArray();\n queryResult.forEach(row => {\n result.push(\n swapMongoToRxDoc(\n row as any\n )\n );\n });\n this.runningOperations.next(this.runningOperations.getValue() - 1);\n return result;\n }\n\n async query(\n originalPreparedQuery: PreparedQuery\n ): Promise> {\n const preparedQuery = prepareMongoDBQuery(this.schema, originalPreparedQuery.query);\n\n this.runningOperations.next(this.runningOperations.getValue() + 1);\n await this.writeQueue;\n const mongoCollection = await this.mongoCollectionPromise;\n\n let query = mongoCollection.find(preparedQuery.mongoSelector);\n if (preparedQuery.query.skip) {\n query = query.skip(preparedQuery.query.skip);\n }\n if (preparedQuery.query.limit) {\n query = query.limit(preparedQuery.query.limit);\n }\n if (preparedQuery.query.sort) {\n query = query.sort(preparedQuery.mongoSort);\n }\n const resultDocs = await query.toArray();\n this.runningOperations.next(this.runningOperations.getValue() - 1);\n return {\n documents: resultDocs.map(d => swapMongoToRxDoc(d))\n };\n }\n\n async count(\n originalPreparedQuery: PreparedQuery\n ): Promise {\n const preparedQuery = prepareMongoDBQuery(this.schema, originalPreparedQuery.query);\n this.runningOperations.next(this.runningOperations.getValue() + 1);\n await this.writeQueue;\n const mongoCollection = await this.mongoCollectionPromise;\n const count = await mongoCollection.countDocuments(preparedQuery.mongoSelector);\n this.runningOperations.next(this.runningOperations.getValue() - 1);\n return {\n count,\n mode: 'fast'\n };\n }\n\n async cleanup(minimumDeletedTime: number): Promise {\n this.runningOperations.next(this.runningOperations.getValue() + 1);\n const mongoCollection = await this.mongoCollectionPromise;\n const maxDeletionTime = now() - minimumDeletedTime;\n await mongoCollection.deleteMany({\n _deleted: true,\n '_meta.lwt': {\n $lt: maxDeletionTime\n }\n });\n this.runningOperations.next(this.runningOperations.getValue() - 1);\n return true;\n }\n\n async getAttachmentData(\n _documentId: string,\n _attachmentId: string,\n _digest: string\n ): Promise {\n await this.mongoCollectionPromise;\n throw new Error('attachments not implemented, make a PR');\n }\n\n changeStream(): Observable>, RxStorageDefaultCheckpoint>> {\n return this.changes$;\n }\n\n async remove(): Promise {\n if (this.closed) {\n throw new Error('already closed');\n }\n this.runningOperations.next(this.runningOperations.getValue() + 1);\n const mongoCollection = await this.mongoCollectionPromise;\n await mongoCollection.drop();\n this.runningOperations.next(this.runningOperations.getValue() - 1);\n await this.close();\n }\n\n async close(): Promise {\n // TODO without this next-tick we have random fails in the tests\n await requestIdlePromise(200);\n\n if (this.closed) {\n return this.closed;\n }\n this.closed = (async () => {\n await this.mongoCollectionPromise;\n await firstValueFrom(this.runningOperations.pipe(filter(c => c === 0)));\n // await ensureNotFalsy(this.mongoChangeStream).close();\n await this.mongoClient.close();\n })();\n return this.closed;\n }\n\n conflictResultionTasks(): Observable> {\n return new Subject();\n }\n async resolveConflictResultionTask(_taskSolution: RxConflictResultionTaskSolution): Promise { }\n}\n\nexport function createMongoDBStorageInstance(\n storage: RxStorageMongoDB,\n params: RxStorageInstanceCreationParams,\n settings: RxStorageMongoDBSettings\n): Promise> {\n const instance = new RxStorageInstanceMongoDB(\n storage,\n params.databaseName,\n params.collectionName,\n params.schema,\n {},\n params.options,\n settings\n );\n return Promise.resolve(instance);\n}\n"],"mappings":";;;;;;;AAAA,IAAAA,KAAA,GAAAC,OAAA;AAOA,IAAAC,eAAA,GAAAD,OAAA;AAmBA,IAAAE,MAAA,GAAAF,OAAA;AAgBA,IAAAG,QAAA,GAAAH,OAAA;AAOA,IAAAI,gBAAA,GAAAJ,OAAA;AACA,IAAAK,cAAA,GAAAL,OAAA;AAM6B,IAEhBM,wBAAwB,GAAAC,OAAA,CAAAD,wBAAA;EAcjC;;EAGA;AACJ;AACA;AACA;AACA;AACA;;EAII;AACJ;AACA;AACA;AACA;;EAGI,SAAAA,yBACoBE,OAAyB,EACzBC,YAAoB,EACpBC,cAAsB,EACtBC,MAAyD,EACzDC,SAAkC,EAClCC,OAA0D,EAC1DC,QAAkC,EACpD;IAAA,KA/BeC,QAAQ,GAAoG,IAAIC,aAAO,CAAC,CAAC;IAAA,KAa1HC,iBAAiB,GAAG,IAAIC,qBAAe,CAAC,CAAC,CAAC;IAAA,KACnDC,UAAU,GAAiBC,2BAAoB;IAAA,KAOtCC,kBAAkB,GAAG,IAAIC,OAAO,CAAsC,CAAC;IAAA,KAGnEd,OAAyB,GAAzBA,OAAyB;IAAA,KACzBC,YAAoB,GAApBA,YAAoB;IAAA,KACpBC,cAAsB,GAAtBA,cAAsB;IAAA,KACtBC,MAAyD,GAAzDA,MAAyD;IAAA,KACzDC,SAAkC,GAAlCA,SAAkC;IAAA,KAClCC,OAA0D,GAA1DA,OAA0D;IAAA,KAC1DC,QAAkC,GAAlCA,QAAkC;IAElD,IAAI,IAAI,CAACH,MAAM,CAACY,WAAW,EAAE;MACzB,MAAM,IAAIC,KAAK,CAAC,0EAA0E,CAAC;IAC/F;IACA,IAAI,CAACC,WAAW,GAAG,IAAAC,2CAA2B,EAAC,IAAI,CAACf,MAAM,CAACgB,UAAU,CAAC;IACtE,IAAI,CAACC,kBAAkB,GAAG,IAAI,CAACH,WAAW,KAAK,KAAK,GAAGI,4CAA6B,GAAG,IAAI,CAACJ,WAAW;IACvG,IAAI,CAACK,WAAW,GAAG,IAAIC,oBAAW,CAACvB,OAAO,CAACwB,gBAAgB,CAACC,UAAU,CAAC;IACvE,IAAI,CAACC,aAAa,GAAG,IAAI,CAACJ,WAAW,CAACK,EAAE,CAAC1B,YAAY,GAAG,IAAI,GAAG,IAAI,CAACE,MAAM,CAACyB,OAAO,CAAC;IAEnF,IAAMC,OAAO,GAAG,CAAC,IAAI,CAAC1B,MAAM,CAAC0B,OAAO,GAAG,IAAI,CAAC1B,MAAM,CAAC0B,OAAO,CAACC,KAAK,CAAC,CAAC,GAAG,EAAE,EAAEC,GAAG,CAACC,KAAK,IAAI;MAClF,IAAMC,OAAO,GAAG,IAAAC,2BAAoB,EAACF,KAAK,CAAC,GAAGA,KAAK,CAACF,KAAK,CAAC,CAAC,CAAC,GAAG,CAACE,KAAK,CAAC;MACtE,OAAOC,OAAO;IAClB,CAAC,CAAC;IACFJ,OAAO,CAACM,IAAI,CAAC,CAAC,IAAI,CAACf,kBAAkB,CAAC,CAAC;IAEvC,IAAI,CAACgB,sBAAsB,GAAG,IAAI,CAACV,aAAa,CAACW,gBAAgB,CAACnC,cAAc,CAAC,CAC5EoC,IAAI,CAAC,MAAOC,eAAe,IAAK;MAC7B,MAAMA,eAAe,CAACC,aAAa,CAC/BX,OAAO,CAACE,GAAG,CAACC,KAAK,IAAI;QACjB,IAAMS,UAAe,GAAG,CAAC,CAAC;QAC1BT,KAAK,CAACU,OAAO,CAACC,KAAK,IAAIF,UAAU,CAACE,KAAK,CAAC,GAAG,CAAC,CAAC;QAC7C,OAAO;UAAEC,IAAI,EAAE,IAAAC,kCAAmB,EAACb,KAAK,CAAC;UAAEc,GAAG,EAAEL;QAAW,CAAC;MAChE,CAAC,CACL,CAAC;;MAED;AAChB;AACA;AACA;AACA;AACA;MACgB;MACA;MACA;MACA;MACA;;MAGA;MACA;MACA;;MAEA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;;MAEA;MACA;;MAGA,OAAOF,eAAe;IAC1B,CAAC,CAAC;EAGV;;EAEA;AACJ;AACA;AACA;AACA;AACA;EALI,IAAAQ,MAAA,GAAAjD,wBAAA,CAAAkD,SAAA;EAAAD,MAAA,CAMAE,SAAS,GAAT,SAAAA,UACIC,cAAyC,EACzCC,OAAe,EAC+B;IAE9C,IAAI,CAACxC,UAAU,GAAG,IAAI,CAACA,UAAU,CAAC2B,IAAI,CAAC,YAAY;MAC/C,IAAI,CAAC7B,iBAAiB,CAAC2C,IAAI,CAAC,IAAI,CAAC3C,iBAAiB,CAAC4C,QAAQ,CAAC,CAAC,GAAG,CAAC,CAAC;MAElE,IAAMd,eAAe,GAAG,MAAM,IAAI,CAACH,sBAAsB;MACzD,IAAI,IAAI,CAACkB,MAAM,EAAE;QACb,OAAOC,OAAO,CAACC,MAAM,CAAC,IAAIxC,KAAK,CAAC,gBAAgB,CAAC,CAAC;MACtD;MACA,IAAMC,WAAW,GAAG,IAAI,CAACA,WAAW;MACpC,IAAMwC,GAA0C,GAAG;QAC/CC,OAAO,EAAE,EAAE;QACXC,KAAK,EAAE;MACX,CAAC;MAGD,IAAMC,MAAM,GAAGV,cAAc,CAACnB,GAAG,CAAC8B,CAAC,IAAKA,CAAC,CAACC,QAAQ,CAAS7C,WAAW,CAAC,CAAC;MACxE,IAAM8C,cAAc,GAAG,MAAM,IAAI,CAACC,iBAAiB,CAC/CJ,MAAM,EACN,IACJ,CAAC;MACD,IAAMK,iBAAiB,GAAG,IAAIC,GAAG,CAAC,CAAC;MACnCH,cAAc,CAACrB,OAAO,CAACyB,GAAG,IAAI;QAC1B,IAAMC,KAAK,GAAGD,GAAG,CAAClD,WAAW,CAAC;QAC9BgD,iBAAiB,CAACI,GAAG,CAACD,KAAK,EAAED,GAAG,CAAC;MACrC,CAAC,CAAC;MACF,IAAMG,WAAW,GAAG,IAAAC,wCAAuB,EACvC,IAAI,EACJtD,WAAW,EACXgD,iBAAiB,EACjBf,cAAc,EACdC,OACJ,CAAC;MAED,IAAMqB,aAAa,GAAG,IAAIN,GAAG,CAA0D,CAAC;MACxFI,WAAW,CAACG,SAAS,CAACC,MAAM,CAAChC,OAAO,CAACiC,MAAM,IAAI;QAC3CH,aAAa,CAACH,GAAG,CAACM,MAAM,CAACC,UAAU,EAAED,MAAM,CAAC;MAChD,CAAC,CAAC;MAGFlB,GAAG,CAACE,KAAK,GAAGW,WAAW,CAACO,MAAM;;MAE9B;AACZ;AACA;AACA;MACY,IAAMJ,SAAS,GAAGH,WAAW,CAACG,SAAS;MACvCA,SAAS,CAACC,MAAM,GAAG,EAAE;MAErB,MAAMnB,OAAO,CAACuB,GAAG,CAAC;MACd;AAChB;AACA;AACA;MACgBvB,OAAO,CAACuB,GAAG,CACPR,WAAW,CAACS,cAAc,CAAChD,GAAG,CAAC,MAAOiD,QAAQ,IAAK;QAC/C,IAAMZ,KAAa,GAAGY,QAAQ,CAAClB,QAAQ,CAAC7C,WAAW,CAAQ;QAC3D,IAAMgE,WAAW,GAAG,MAAM1C,eAAe,CAAC2C,gBAAgB,CACtD;UACI,CAAC,IAAI,CAAC9D,kBAAkB,GAAGgD;QAC/B,CAAC,EACD;UACIe,YAAY,EAAE,IAAAC,+BAAgB,EAACJ,QAAQ,CAAClB,QAAQ;QACpD,CAAC,EACD;UACIuB,MAAM,EAAE,IAAI;UACZC,qBAAqB,EAAE;QAC3B,CACJ,CAAC;QACD,IAAIL,WAAW,CAACM,KAAK,EAAE;UACnB;UACA,IAAMC,aAAqD,GAAG;YAC1DC,MAAM,EAAE,GAAG;YACXb,UAAU,EAAER,KAAK;YACjBY,QAAQ;YACRU,YAAY,EAAE,IAAAC,+BAAgB,EAAC,IAAAC,qBAAc,EAACX,WAAW,CAACM,KAAK,CAAC,CAAC;YACjEM,OAAO,EAAE;UACb,CAAC;UACDpC,GAAG,CAACE,KAAK,CAACxB,IAAI,CAACqD,aAAa,CAAC;QACjC,CAAC,MAAM;UACH,IAAMM,KAAK,GAAGtB,aAAa,CAACuB,GAAG,CAAC3B,KAAK,CAAC;UACtC,IAAI0B,KAAK,EAAE;YACPrB,SAAS,CAACC,MAAM,CAACvC,IAAI,CAAC2D,KAAK,CAAC;UAChC;UACArC,GAAG,CAACC,OAAO,CAACvB,IAAI,CAAC6C,QAAQ,CAAClB,QAAQ,CAAC;QACvC;MACJ,CAAC,CACL,CAAC;MACD;AAChB;AACA;MACgBP,OAAO,CAACuB,GAAG,CACPR,WAAW,CAAC0B,cAAc,CAACjE,GAAG,CAAC,MAAOiD,QAAQ,IAAK;QAC/C,IAAMZ,KAAK,GAAGY,QAAQ,CAAClB,QAAQ,CAAC7C,WAAW,CAAW;QACtD,IAAMgE,WAAW,GAAG,MAAM1C,eAAe,CAAC0D,iBAAiB,CACvD;UACI,CAAC,IAAI,CAAC7E,kBAAkB,GAAGgD,KAAK;UAChC8B,IAAI,EAAE,IAAAN,qBAAc,EAACZ,QAAQ,CAACmB,QAAQ,CAAC,CAACD;QAC5C,CAAC,EACD,IAAAd,+BAAgB,EAACJ,QAAQ,CAAClB,QAAQ,CAAC,EACnC;UACIwB,qBAAqB,EAAE,IAAI;UAC3BD,MAAM,EAAE,KAAK;UACbe,cAAc,EAAE;QACpB,CACJ,CAAC;QACD,IAAI,CAACnB,WAAW,CAACoB,EAAE,EAAE;UACjB,IAAMC,eAAe,GAAG,MAAM,IAAI,CAACtC,iBAAiB,CAAC,CAACI,KAAK,CAAC,EAAE,IAAI,CAAC;UACnE,IAAMmC,UAAU,GAAGD,eAAe,CAAC,CAAC,CAAC;UACrC;UACA,IAAMd,aAAqD,GAAG;YAC1DC,MAAM,EAAE,GAAG;YACXb,UAAU,EAAER,KAAK;YACjBY,QAAQ;YACRU,YAAY,EAAE,IAAAE,qBAAc,EAACW,UAAU,CAAC;YACxCV,OAAO,EAAE;UACb,CAAC;UACDpC,GAAG,CAACE,KAAK,CAACxB,IAAI,CAACqD,aAAa,CAAC;QACjC,CAAC,MAAM;UACH,IAAMM,KAAK,GAAG,IAAAU,wBAAiB,EAAChC,aAAa,EAAEJ,KAAK,CAAC;UACrDK,SAAS,CAACC,MAAM,CAACvC,IAAI,CAAC2D,KAAK,CAAC;UAC5BrC,GAAG,CAACC,OAAO,CAACvB,IAAI,CAAC6C,QAAQ,CAAClB,QAAQ,CAAC;QACvC;MAEJ,CAAC,CACL,CAAC,CACJ,CAAC;MAEF,IAAIQ,WAAW,CAACG,SAAS,CAACC,MAAM,CAAC+B,MAAM,GAAG,CAAC,EAAE;QACzC,IAAMC,SAAS,GAAG,IAAAd,qBAAc,EAACtB,WAAW,CAACqC,SAAS,CAAC,CAAC7C,QAAQ;QAChEQ,WAAW,CAACG,SAAS,CAACmC,UAAU,GAAG;UAC/BC,EAAE,EAAEH,SAAS,CAACzF,WAAW,CAAC;UAC1B6F,GAAG,EAAEJ,SAAS,CAACK,KAAK,CAACD;QACzB,CAAC;QACDxC,WAAW,CAACG,SAAS,CAACuC,OAAO,GAAG,IAAAC,UAAG,EAAC,CAAC;QACrC,IAAI,CAAC1G,QAAQ,CAAC6C,IAAI,CAACkB,WAAW,CAACG,SAAS,CAAC;MAC7C;MAEA,IAAI,CAAChE,iBAAiB,CAAC2C,IAAI,CAAC,IAAI,CAAC3C,iBAAiB,CAAC4C,QAAQ,CAAC,CAAC,GAAG,CAAC,CAAC;MAClE,OAAOI,GAAG;IACd,CAAC,CAAC;IACF,OAAO,IAAI,CAAC9C,UAAU;EAE1B,CAAC;EAAAoC,MAAA,CAEKiB,iBAAiB,GAAvB,eAAAA,kBACIJ,MAAgB,EAChBsD,WAAoB,EACpBC,OAAuB,EACa;IACpC,IAAI,CAAC1G,iBAAiB,CAAC2C,IAAI,CAAC,IAAI,CAAC3C,iBAAiB,CAAC4C,QAAQ,CAAC,CAAC,GAAG,CAAC,CAAC;IAClE,IAAMd,eAAe,GAAG,MAAM,IAAI,CAACH,sBAAsB;IACzD,IAAMnB,WAAW,GAAG,IAAI,CAACA,WAAW;IAEpC,IAAMmG,UAAmC,GAAG;MACxC,CAACnG,WAAW,GAAG;QACXoG,GAAG,EAAEzD;MACT;IACJ,CAAC;IACD,IAAI,CAACsD,WAAW,EAAE;MACdE,UAAU,CAACE,QAAQ,GAAG,KAAK;IAC/B;IACA,IAAMC,MAAmC,GAAG,EAAE;IAC9C,IAAMC,WAAW,GAAG,MAAMjF,eAAe,CAACkF,IAAI,CAC1CL,UAAU,EACV;MACID;IACJ,CACJ,CAAC,CAACO,OAAO,CAAC,CAAC;IACXF,WAAW,CAAC9E,OAAO,CAACiF,GAAG,IAAI;MACvBJ,MAAM,CAACpF,IAAI,CACP,IAAAwD,+BAAgB,EACZgC,GACJ,CACJ,CAAC;IACL,CAAC,CAAC;IACF,IAAI,CAAClH,iBAAiB,CAAC2C,IAAI,CAAC,IAAI,CAAC3C,iBAAiB,CAAC4C,QAAQ,CAAC,CAAC,GAAG,CAAC,CAAC;IAClE,OAAOkE,MAAM;EACjB,CAAC;EAAAxE,MAAA,CAEK6E,KAAK,GAAX,eAAAA,MACIC,qBAA+C,EACP;IACxC,IAAMC,aAAa,GAAG,IAAAC,kCAAmB,EAAC,IAAI,CAAC5H,MAAM,EAAE0H,qBAAqB,CAACD,KAAK,CAAC;IAEnF,IAAI,CAACnH,iBAAiB,CAAC2C,IAAI,CAAC,IAAI,CAAC3C,iBAAiB,CAAC4C,QAAQ,CAAC,CAAC,GAAG,CAAC,CAAC;IAClE,MAAM,IAAI,CAAC1C,UAAU;IACrB,IAAM4B,eAAe,GAAG,MAAM,IAAI,CAACH,sBAAsB;IAEzD,IAAIwF,KAAK,GAAGrF,eAAe,CAACkF,IAAI,CAACK,aAAa,CAACE,aAAa,CAAC;IAC7D,IAAIF,aAAa,CAACF,KAAK,CAACK,IAAI,EAAE;MAC1BL,KAAK,GAAGA,KAAK,CAACK,IAAI,CAACH,aAAa,CAACF,KAAK,CAACK,IAAI,CAAC;IAChD;IACA,IAAIH,aAAa,CAACF,KAAK,CAACM,KAAK,EAAE;MAC3BN,KAAK,GAAGA,KAAK,CAACM,KAAK,CAACJ,aAAa,CAACF,KAAK,CAACM,KAAK,CAAC;IAClD;IACA,IAAIJ,aAAa,CAACF,KAAK,CAACO,IAAI,EAAE;MAC1BP,KAAK,GAAGA,KAAK,CAACO,IAAI,CAACL,aAAa,CAACM,SAAS,CAAC;IAC/C;IACA,IAAMC,UAAU,GAAG,MAAMT,KAAK,CAACF,OAAO,CAAC,CAAC;IACxC,IAAI,CAACjH,iBAAiB,CAAC2C,IAAI,CAAC,IAAI,CAAC3C,iBAAiB,CAAC4C,QAAQ,CAAC,CAAC,GAAG,CAAC,CAAC;IAClE,OAAO;MACHiF,SAAS,EAAED,UAAU,CAACtG,GAAG,CAAC8B,CAAC,IAAI,IAAA8B,+BAAgB,EAAC9B,CAAC,CAAC;IACtD,CAAC;EACL,CAAC;EAAAd,MAAA,CAEKwF,KAAK,GAAX,eAAAA,MACIV,qBAA+C,EAClB;IAC7B,IAAMC,aAAa,GAAG,IAAAC,kCAAmB,EAAC,IAAI,CAAC5H,MAAM,EAAE0H,qBAAqB,CAACD,KAAK,CAAC;IACnF,IAAI,CAACnH,iBAAiB,CAAC2C,IAAI,CAAC,IAAI,CAAC3C,iBAAiB,CAAC4C,QAAQ,CAAC,CAAC,GAAG,CAAC,CAAC;IAClE,MAAM,IAAI,CAAC1C,UAAU;IACrB,IAAM4B,eAAe,GAAG,MAAM,IAAI,CAACH,sBAAsB;IACzD,IAAMmG,KAAK,GAAG,MAAMhG,eAAe,CAACiG,cAAc,CAACV,aAAa,CAACE,aAAa,CAAC;IAC/E,IAAI,CAACvH,iBAAiB,CAAC2C,IAAI,CAAC,IAAI,CAAC3C,iBAAiB,CAAC4C,QAAQ,CAAC,CAAC,GAAG,CAAC,CAAC;IAClE,OAAO;MACHkF,KAAK;MACLE,IAAI,EAAE;IACV,CAAC;EACL,CAAC;EAAA1F,MAAA,CAEK2F,OAAO,GAAb,eAAAA,QAAcC,kBAA0B,EAAoB;IACxD,IAAI,CAAClI,iBAAiB,CAAC2C,IAAI,CAAC,IAAI,CAAC3C,iBAAiB,CAAC4C,QAAQ,CAAC,CAAC,GAAG,CAAC,CAAC;IAClE,IAAMd,eAAe,GAAG,MAAM,IAAI,CAACH,sBAAsB;IACzD,IAAMwG,eAAe,GAAG,IAAA3B,UAAG,EAAC,CAAC,GAAG0B,kBAAkB;IAClD,MAAMpG,eAAe,CAACsG,UAAU,CAAC;MAC7BvB,QAAQ,EAAE,IAAI;MACd,WAAW,EAAE;QACTwB,GAAG,EAAEF;MACT;IACJ,CAAC,CAAC;IACF,IAAI,CAACnI,iBAAiB,CAAC2C,IAAI,CAAC,IAAI,CAAC3C,iBAAiB,CAAC4C,QAAQ,CAAC,CAAC,GAAG,CAAC,CAAC;IAClE,OAAO,IAAI;EACf,CAAC;EAAAN,MAAA,CAEKgG,iBAAiB,GAAvB,eAAAA,kBACIC,WAAmB,EACnBC,aAAqB,EACrBC,OAAe,EACA;IACf,MAAM,IAAI,CAAC9G,sBAAsB;IACjC,MAAM,IAAIpB,KAAK,CAAC,wCAAwC,CAAC;EAC7D,CAAC;EAAA+B,MAAA,CAEDoG,YAAY,GAAZ,SAAAA,aAAA,EAAmH;IAC/G,OAAO,IAAI,CAAC5I,QAAQ;EACxB,CAAC;EAAAwC,MAAA,CAEKqG,MAAM,GAAZ,eAAAA,OAAA,EAA8B;IAC1B,IAAI,IAAI,CAAC9F,MAAM,EAAE;MACb,MAAM,IAAItC,KAAK,CAAC,gBAAgB,CAAC;IACrC;IACA,IAAI,CAACP,iBAAiB,CAAC2C,IAAI,CAAC,IAAI,CAAC3C,iBAAiB,CAAC4C,QAAQ,CAAC,CAAC,GAAG,CAAC,CAAC;IAClE,IAAMd,eAAe,GAAG,MAAM,IAAI,CAACH,sBAAsB;IACzD,MAAMG,eAAe,CAAC8G,IAAI,CAAC,CAAC;IAC5B,IAAI,CAAC5I,iBAAiB,CAAC2C,IAAI,CAAC,IAAI,CAAC3C,iBAAiB,CAAC4C,QAAQ,CAAC,CAAC,GAAG,CAAC,CAAC;IAClE,MAAM,IAAI,CAACiG,KAAK,CAAC,CAAC;EACtB,CAAC;EAAAvG,MAAA,CAEKuG,KAAK,GAAX,eAAAA,MAAA,EAA6B;IACzB;IACA,MAAM,IAAAC,yBAAkB,EAAC,GAAG,CAAC;IAE7B,IAAI,IAAI,CAACjG,MAAM,EAAE;MACb,OAAO,IAAI,CAACA,MAAM;IACtB;IACA,IAAI,CAACA,MAAM,GAAG,CAAC,YAAY;MACvB,MAAM,IAAI,CAAClB,sBAAsB;MACjC,MAAM,IAAAoH,oBAAc,EAAC,IAAI,CAAC/I,iBAAiB,CAACgJ,IAAI,CAAC,IAAAC,YAAM,EAACC,CAAC,IAAIA,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;MACvE;MACA,MAAM,IAAI,CAACrI,WAAW,CAACgI,KAAK,CAAC,CAAC;IAClC,CAAC,EAAE,CAAC;IACJ,OAAO,IAAI,CAAChG,MAAM;EACtB,CAAC;EAAAP,MAAA,CAED6G,sBAAsB,GAAtB,SAAAA,uBAAA,EAAyE;IACrE,OAAO,IAAIpJ,aAAO,CAAC,CAAC;EACxB,CAAC;EAAAuC,MAAA,CACK8G,4BAA4B,GAAlC,eAAAA,6BAAmCC,aAAyD,EAAiB,CAAE,CAAC;EAAA,OAAAhK,wBAAA;AAAA;AAG7G,SAASiK,4BAA4BA,CACxC/J,OAAyB,EACzBgK,MAA2F,EAC3F1J,QAAkC,EACU;EAC5C,IAAM2J,QAAQ,GAAG,IAAInK,wBAAwB,CACzCE,OAAO,EACPgK,MAAM,CAAC/J,YAAY,EACnB+J,MAAM,CAAC9J,cAAc,EACrB8J,MAAM,CAAC7J,MAAM,EACb,CAAC,CAAC,EACF6J,MAAM,CAAC3J,OAAO,EACdC,QACJ,CAAC;EACD,OAAOiD,OAAO,CAAC2G,OAAO,CAACD,QAAQ,CAAC;AACpC","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/storage-mongodb/rx-storage-mongodb.js b/dist/cjs/plugins/storage-mongodb/rx-storage-mongodb.js deleted file mode 100644 index 7fd75589c05..00000000000 --- a/dist/cjs/plugins/storage-mongodb/rx-storage-mongodb.js +++ /dev/null @@ -1,29 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.RxStorageMongoDB = void 0; -exports.getRxStorageMongoDB = getRxStorageMongoDB; -var _rxStorageHelper = require("../../rx-storage-helper.js"); -var _mongodbHelper = require("./mongodb-helper.js"); -var _rxStorageInstanceMongodb = require("./rx-storage-instance-mongodb.js"); -var _utilsRxdbVersion = require("../utils/utils-rxdb-version.js"); -var RxStorageMongoDB = exports.RxStorageMongoDB = /*#__PURE__*/function () { - function RxStorageMongoDB(databaseSettings) { - this.name = _mongodbHelper.RX_STORAGE_NAME_MONGODB; - this.rxdbVersion = _utilsRxdbVersion.RXDB_VERSION; - this.databaseSettings = databaseSettings; - } - var _proto = RxStorageMongoDB.prototype; - _proto.createStorageInstance = function createStorageInstance(params) { - (0, _rxStorageHelper.ensureRxStorageInstanceParamsAreCorrect)(params); - return (0, _rxStorageInstanceMongodb.createMongoDBStorageInstance)(this, params, this.databaseSettings); - }; - return RxStorageMongoDB; -}(); -function getRxStorageMongoDB(databaseSettings) { - var storage = new RxStorageMongoDB(databaseSettings); - return storage; -} -//# sourceMappingURL=rx-storage-mongodb.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/storage-mongodb/rx-storage-mongodb.js.map b/dist/cjs/plugins/storage-mongodb/rx-storage-mongodb.js.map deleted file mode 100644 index 2ae5549fc84..00000000000 --- a/dist/cjs/plugins/storage-mongodb/rx-storage-mongodb.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-storage-mongodb.js","names":["_rxStorageHelper","require","_mongodbHelper","_rxStorageInstanceMongodb","_utilsRxdbVersion","RxStorageMongoDB","exports","databaseSettings","name","RX_STORAGE_NAME_MONGODB","rxdbVersion","RXDB_VERSION","_proto","prototype","createStorageInstance","params","ensureRxStorageInstanceParamsAreCorrect","createMongoDBStorageInstance","getRxStorageMongoDB","storage"],"sources":["../../../../src/plugins/storage-mongodb/rx-storage-mongodb.ts"],"sourcesContent":["import type {\n RxStorage,\n RxStorageInstanceCreationParams\n} from '../../types/index.d.ts';\n\nimport { ensureRxStorageInstanceParamsAreCorrect } from '../../rx-storage-helper.ts';\nimport { RX_STORAGE_NAME_MONGODB } from './mongodb-helper.ts';\nimport type { MongoDBDatabaseSettings, MongoDBSettings, MongoDBStorageInternals } from './mongodb-types.ts';\nimport { RxStorageInstanceMongoDB, createMongoDBStorageInstance } from './rx-storage-instance-mongodb.ts';\nimport { RXDB_VERSION } from '../utils/utils-rxdb-version.ts';\n\nexport class RxStorageMongoDB implements RxStorage {\n public name = RX_STORAGE_NAME_MONGODB;\n public readonly rxdbVersion = RXDB_VERSION;\n\n constructor(\n public databaseSettings: MongoDBDatabaseSettings\n ) { }\n\n public createStorageInstance(\n params: RxStorageInstanceCreationParams\n ): Promise> {\n ensureRxStorageInstanceParamsAreCorrect(params);\n return createMongoDBStorageInstance(this, params, this.databaseSettings);\n }\n}\n\nexport function getRxStorageMongoDB(\n databaseSettings: MongoDBDatabaseSettings\n): RxStorageMongoDB {\n const storage = new RxStorageMongoDB(databaseSettings);\n return storage;\n}\n"],"mappings":";;;;;;;AAKA,IAAAA,gBAAA,GAAAC,OAAA;AACA,IAAAC,cAAA,GAAAD,OAAA;AAEA,IAAAE,yBAAA,GAAAF,OAAA;AACA,IAAAG,iBAAA,GAAAH,OAAA;AAA8D,IAEjDI,gBAAgB,GAAAC,OAAA,CAAAD,gBAAA;EAIzB,SAAAA,iBACWE,gBAAyC,EAClD;IAAA,KALKC,IAAI,GAAGC,sCAAuB;IAAA,KACrBC,WAAW,GAAGC,8BAAY;IAAA,KAG/BJ,gBAAyC,GAAzCA,gBAAyC;EAChD;EAAC,IAAAK,MAAA,GAAAP,gBAAA,CAAAQ,SAAA;EAAAD,MAAA,CAEEE,qBAAqB,GAA5B,SAAAA,sBACIC,MAAmE,EACvB;IAC5C,IAAAC,wDAAuC,EAACD,MAAM,CAAC;IAC/C,OAAO,IAAAE,sDAA4B,EAAC,IAAI,EAAEF,MAAM,EAAE,IAAI,CAACR,gBAAgB,CAAC;EAC5E,CAAC;EAAA,OAAAF,gBAAA;AAAA;AAGE,SAASa,mBAAmBA,CAC/BX,gBAAyC,EACzB;EAChB,IAAMY,OAAO,GAAG,IAAId,gBAAgB,CAACE,gBAAgB,CAAC;EACtD,OAAOY,OAAO;AAClB","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/storage-remote-websocket/index.js b/dist/cjs/plugins/storage-remote-websocket/index.js deleted file mode 100644 index 592889b2c44..00000000000 --- a/dist/cjs/plugins/storage-remote-websocket/index.js +++ /dev/null @@ -1,95 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -var _exportNames = { - startRxStorageRemoteWebsocketServer: true, - getRxStorageRemoteWebsocket: true -}; -exports.getRxStorageRemoteWebsocket = getRxStorageRemoteWebsocket; -exports.startRxStorageRemoteWebsocketServer = startRxStorageRemoteWebsocketServer; -var _rxjs = require("rxjs"); -var _index = require("../../plugins/utils/index.js"); -var _index2 = require("../replication-websocket/index.js"); -var _remote = require("../storage-remote/remote.js"); -var _rxStorageRemote = require("../storage-remote/rx-storage-remote.js"); -var _storageRemoteHelpers = require("../storage-remote/storage-remote-helpers.js"); -var _types = require("./types.js"); -Object.keys(_types).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; - if (key in exports && exports[key] === _types[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _types[key]; - } - }); -}); -function startRxStorageRemoteWebsocketServer(options) { - var serverState = (0, _index2.startSocketServer)(options); - var websocketByConnectionId = new Map(); - var messages$ = new _rxjs.Subject(); - var exposeSettings = { - messages$: messages$.asObservable(), - storage: options.storage, - database: options.database, - customRequestHandler: options.customRequestHandler, - send(msg) { - var ws = (0, _index.getFromMapOrThrow)(websocketByConnectionId, msg.connectionId); - ws.send(JSON.stringify(msg)); - } - }; - var exposeState = (0, _remote.exposeRxStorageRemote)(exposeSettings); - serverState.onConnection$.subscribe(ws => { - var onCloseHandlers = []; - ws.onclose = () => { - onCloseHandlers.map(fn => fn()); - }; - ws.on('message', messageString => { - var message = JSON.parse(messageString); - var connectionId = message.connectionId; - if (!websocketByConnectionId.has(connectionId)) { - /** - * If first message is not 'create', - * it is an error. - */ - if (message.method !== 'create' && message.method !== 'custom') { - ws.send(JSON.stringify((0, _storageRemoteHelpers.createErrorAnswer)(message, new Error('First call must be a create call but is: ' + JSON.stringify(message))))); - return; - } - websocketByConnectionId.set(connectionId, ws); - } - messages$.next(message); - }); - }); - return { - serverState, - exposeState - }; -} -function getRxStorageRemoteWebsocket(options) { - var identifier = [options.url, 'rx-remote-storage-websocket'].join(''); - var storage = (0, _rxStorageRemote.getRxStorageRemote)({ - identifier, - mode: options.mode, - async messageChannelCreator() { - var messages$ = new _rxjs.Subject(); - var websocketClient = await (0, _index2.createWebSocketClient)(options); - websocketClient.message$.subscribe(msg => messages$.next(msg)); - return { - messages$, - send(msg) { - return websocketClient.socket.send(JSON.stringify(msg)); - }, - close() { - websocketClient.socket.close(); - return _index.PROMISE_RESOLVE_VOID; - } - }; - } - }); - return storage; -} -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/storage-remote-websocket/index.js.map b/dist/cjs/plugins/storage-remote-websocket/index.js.map deleted file mode 100644 index 3c9b01ef96c..00000000000 --- a/dist/cjs/plugins/storage-remote-websocket/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["_rxjs","require","_index","_index2","_remote","_rxStorageRemote","_storageRemoteHelpers","_types","Object","keys","forEach","key","prototype","hasOwnProperty","call","_exportNames","exports","defineProperty","enumerable","get","startRxStorageRemoteWebsocketServer","options","serverState","startSocketServer","websocketByConnectionId","Map","messages$","Subject","exposeSettings","asObservable","storage","database","customRequestHandler","send","msg","ws","getFromMapOrThrow","connectionId","JSON","stringify","exposeState","exposeRxStorageRemote","onConnection$","subscribe","onCloseHandlers","onclose","map","fn","on","messageString","message","parse","has","method","createErrorAnswer","Error","set","next","getRxStorageRemoteWebsocket","identifier","url","join","getRxStorageRemote","mode","messageChannelCreator","websocketClient","createWebSocketClient","message$","socket","close","PROMISE_RESOLVE_VOID"],"sources":["../../../../src/plugins/storage-remote-websocket/index.ts"],"sourcesContent":["import { Subject } from 'rxjs';\nimport type {\n WebSocket\n} from 'ws';\nimport {\n PROMISE_RESOLVE_VOID,\n getFromMapOrThrow\n} from '../../plugins/utils/index.ts';\nimport {\n createWebSocketClient,\n startSocketServer\n} from '../replication-websocket/index.ts';\nimport { exposeRxStorageRemote } from '../storage-remote/remote.ts';\nimport { getRxStorageRemote } from '../storage-remote/rx-storage-remote.ts';\nimport { createErrorAnswer } from '../storage-remote/storage-remote-helpers.ts';\nimport type {\n MessageFromRemote,\n MessageToRemote,\n RxStorageRemoteExposeSettings\n} from '../storage-remote/storage-remote-types.ts';\nimport type {\n RxStorageRemoteWebsocketClient,\n RxStorageRemoteWebsocketClientOptions,\n RxStorageRemoteWebsocketServerOptions,\n RxStorageRemoteWebsocketServerState\n} from './types.ts';\nexport function startRxStorageRemoteWebsocketServer(\n options: RxStorageRemoteWebsocketServerOptions\n): RxStorageRemoteWebsocketServerState {\n const serverState = startSocketServer(options);\n\n const websocketByConnectionId = new Map();\n const messages$ = new Subject();\n const exposeSettings: RxStorageRemoteExposeSettings = {\n messages$: messages$.asObservable(),\n storage: options.storage as any,\n database: options.database as any,\n customRequestHandler: options.customRequestHandler,\n send(msg) {\n const ws = getFromMapOrThrow(websocketByConnectionId, msg.connectionId);\n ws.send(JSON.stringify(msg));\n }\n };\n const exposeState = exposeRxStorageRemote(exposeSettings);\n\n serverState.onConnection$.subscribe(ws => {\n const onCloseHandlers: Function[] = [];\n ws.onclose = () => {\n onCloseHandlers.map(fn => fn());\n };\n ws.on('message', (messageString: string) => {\n const message: MessageToRemote = JSON.parse(messageString);\n const connectionId = message.connectionId;\n if (!websocketByConnectionId.has(connectionId)) {\n /**\n * If first message is not 'create',\n * it is an error.\n */\n if (\n message.method !== 'create' &&\n message.method !== 'custom'\n ) {\n ws.send(\n JSON.stringify(\n createErrorAnswer(message, new Error('First call must be a create call but is: ' + JSON.stringify(message)))\n )\n );\n return;\n }\n websocketByConnectionId.set(connectionId, ws);\n }\n messages$.next(message);\n });\n });\n\n return {\n serverState,\n exposeState\n };\n}\n\nexport function getRxStorageRemoteWebsocket(\n options: RxStorageRemoteWebsocketClientOptions\n): RxStorageRemoteWebsocketClient {\n const identifier = [\n options.url,\n 'rx-remote-storage-websocket'\n ].join('');\n const storage = getRxStorageRemote({\n identifier,\n mode: options.mode,\n async messageChannelCreator() {\n const messages$ = new Subject();\n const websocketClient = await createWebSocketClient(options as any);\n websocketClient.message$.subscribe(msg => messages$.next(msg));\n return {\n messages$,\n send(msg) {\n return websocketClient.socket.send(JSON.stringify(msg));\n },\n close() {\n websocketClient.socket.close();\n return PROMISE_RESOLVE_VOID;\n }\n };\n\n }\n });\n return storage;\n}\n\n\nexport * from './types.ts';\n\n"],"mappings":";;;;;;;;;;;AAAA,IAAAA,KAAA,GAAAC,OAAA;AAIA,IAAAC,MAAA,GAAAD,OAAA;AAIA,IAAAE,OAAA,GAAAF,OAAA;AAIA,IAAAG,OAAA,GAAAH,OAAA;AACA,IAAAI,gBAAA,GAAAJ,OAAA;AACA,IAAAK,qBAAA,GAAAL,OAAA;AAkGA,IAAAM,MAAA,GAAAN,OAAA;AAAAO,MAAA,CAAAC,IAAA,CAAAF,MAAA,EAAAG,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAJ,MAAA,CAAAI,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAZ,MAAA,CAAAI,GAAA;IAAA;EAAA;AAAA;AAtFO,SAASS,mCAAmCA,CAC/CC,OAA8C,EACX;EACnC,IAAMC,WAAW,GAAG,IAAAC,yBAAiB,EAACF,OAAO,CAAC;EAE9C,IAAMG,uBAAuB,GAAG,IAAIC,GAAG,CAAoB,CAAC;EAC5D,IAAMC,SAAS,GAAG,IAAIC,aAAO,CAAkB,CAAC;EAChD,IAAMC,cAA6C,GAAG;IAClDF,SAAS,EAAEA,SAAS,CAACG,YAAY,CAAC,CAAC;IACnCC,OAAO,EAAET,OAAO,CAACS,OAAc;IAC/BC,QAAQ,EAAEV,OAAO,CAACU,QAAe;IACjCC,oBAAoB,EAAEX,OAAO,CAACW,oBAAoB;IAClDC,IAAIA,CAACC,GAAG,EAAE;MACN,IAAMC,EAAE,GAAG,IAAAC,wBAAiB,EAACZ,uBAAuB,EAAEU,GAAG,CAACG,YAAY,CAAC;MACvEF,EAAE,CAACF,IAAI,CAACK,IAAI,CAACC,SAAS,CAACL,GAAG,CAAC,CAAC;IAChC;EACJ,CAAC;EACD,IAAMM,WAAW,GAAG,IAAAC,6BAAqB,EAACb,cAAc,CAAC;EAEzDN,WAAW,CAACoB,aAAa,CAACC,SAAS,CAACR,EAAE,IAAI;IACtC,IAAMS,eAA2B,GAAG,EAAE;IACtCT,EAAE,CAACU,OAAO,GAAG,MAAM;MACfD,eAAe,CAACE,GAAG,CAACC,EAAE,IAAIA,EAAE,CAAC,CAAC,CAAC;IACnC,CAAC;IACDZ,EAAE,CAACa,EAAE,CAAC,SAAS,EAAGC,aAAqB,IAAK;MACxC,IAAMC,OAAwB,GAAGZ,IAAI,CAACa,KAAK,CAACF,aAAa,CAAC;MAC1D,IAAMZ,YAAY,GAAGa,OAAO,CAACb,YAAY;MACzC,IAAI,CAACb,uBAAuB,CAAC4B,GAAG,CAACf,YAAY,CAAC,EAAE;QAC5C;AAChB;AACA;AACA;QACgB,IACIa,OAAO,CAACG,MAAM,KAAK,QAAQ,IAC3BH,OAAO,CAACG,MAAM,KAAK,QAAQ,EAC7B;UACElB,EAAE,CAACF,IAAI,CACHK,IAAI,CAACC,SAAS,CACV,IAAAe,uCAAiB,EAACJ,OAAO,EAAE,IAAIK,KAAK,CAAC,2CAA2C,GAAGjB,IAAI,CAACC,SAAS,CAACW,OAAO,CAAC,CAAC,CAC/G,CACJ,CAAC;UACD;QACJ;QACA1B,uBAAuB,CAACgC,GAAG,CAACnB,YAAY,EAAEF,EAAE,CAAC;MACjD;MACAT,SAAS,CAAC+B,IAAI,CAACP,OAAO,CAAC;IAC3B,CAAC,CAAC;EACN,CAAC,CAAC;EAEF,OAAO;IACH5B,WAAW;IACXkB;EACJ,CAAC;AACL;AAEO,SAASkB,2BAA2BA,CACvCrC,OAA8C,EAChB;EAC9B,IAAMsC,UAAU,GAAG,CACftC,OAAO,CAACuC,GAAG,EACX,6BAA6B,CAChC,CAACC,IAAI,CAAC,EAAE,CAAC;EACV,IAAM/B,OAAO,GAAG,IAAAgC,mCAAkB,EAAC;IAC/BH,UAAU;IACVI,IAAI,EAAE1C,OAAO,CAAC0C,IAAI;IAClB,MAAMC,qBAAqBA,CAAA,EAAG;MAC1B,IAAMtC,SAAS,GAAG,IAAIC,aAAO,CAAoB,CAAC;MAClD,IAAMsC,eAAe,GAAG,MAAM,IAAAC,6BAAqB,EAAC7C,OAAc,CAAC;MACnE4C,eAAe,CAACE,QAAQ,CAACxB,SAAS,CAACT,GAAG,IAAIR,SAAS,CAAC+B,IAAI,CAACvB,GAAG,CAAC,CAAC;MAC9D,OAAO;QACHR,SAAS;QACTO,IAAIA,CAACC,GAAG,EAAE;UACN,OAAO+B,eAAe,CAACG,MAAM,CAACnC,IAAI,CAACK,IAAI,CAACC,SAAS,CAACL,GAAG,CAAC,CAAC;QAC3D,CAAC;QACDmC,KAAKA,CAAA,EAAG;UACJJ,eAAe,CAACG,MAAM,CAACC,KAAK,CAAC,CAAC;UAC9B,OAAOC,2BAAoB;QAC/B;MACJ,CAAC;IAEL;EACJ,CAAC,CAAC;EACF,OAAOxC,OAAO;AAClB","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/storage-remote-websocket/types.js b/dist/cjs/plugins/storage-remote-websocket/types.js deleted file mode 100644 index 2f0e4146c53..00000000000 --- a/dist/cjs/plugins/storage-remote-websocket/types.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -//# sourceMappingURL=types.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/storage-remote-websocket/types.js.map b/dist/cjs/plugins/storage-remote-websocket/types.js.map deleted file mode 100644 index dd77baacfbb..00000000000 --- a/dist/cjs/plugins/storage-remote-websocket/types.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"types.js","names":[],"sources":["../../../../src/plugins/storage-remote-websocket/types.ts"],"sourcesContent":["import type {\n WebsocketServerState\n} from '../replication-websocket/index.ts';\nimport type { ServerOptions, ClientOptions } from 'ws';\nimport type { RxDatabase, RxStorage } from '../../types/index.d.ts';\nimport type {\n CustomRequestHandler,\n RxStorageRemoteExposeType,\n RxStorageRemoteSettings\n} from '../storage-remote/storage-remote-types.ts';\nimport { RxStorageRemote } from '../storage-remote/index.ts';\n\nexport type RxStorageRemoteWebsocketServerOptions = ServerOptions & {\n storage?: RxStorage;\n database?: RxDatabase;\n customRequestHandler?: CustomRequestHandler;\n};\n\nexport type RxStorageRemoteWebsocketServerState = {\n serverState: WebsocketServerState;\n exposeState: RxStorageRemoteExposeType;\n};\n\nexport type RxStorageRemoteWebsocketClientOptions = ClientOptions & {\n url: string;\n mode: RxStorageRemoteSettings['mode'];\n};\n\nexport type RxStorageRemoteWebsocketClient = RxStorageRemote;\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/storage-remote/index.js b/dist/cjs/plugins/storage-remote/index.js deleted file mode 100644 index f4338ecdf67..00000000000 --- a/dist/cjs/plugins/storage-remote/index.js +++ /dev/null @@ -1,61 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -var _rxStorageRemote = require("./rx-storage-remote.js"); -Object.keys(_rxStorageRemote).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _rxStorageRemote[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _rxStorageRemote[key]; - } - }); -}); -var _storageRemoteTypes = require("./storage-remote-types.js"); -Object.keys(_storageRemoteTypes).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _storageRemoteTypes[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _storageRemoteTypes[key]; - } - }); -}); -var _storageRemoteHelpers = require("./storage-remote-helpers.js"); -Object.keys(_storageRemoteHelpers).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _storageRemoteHelpers[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _storageRemoteHelpers[key]; - } - }); -}); -var _messageChannelCache = require("./message-channel-cache.js"); -Object.keys(_messageChannelCache).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _messageChannelCache[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _messageChannelCache[key]; - } - }); -}); -var _remote = require("./remote.js"); -Object.keys(_remote).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _remote[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _remote[key]; - } - }); -}); -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/storage-remote/index.js.map b/dist/cjs/plugins/storage-remote/index.js.map deleted file mode 100644 index c95f8a35bcb..00000000000 --- a/dist/cjs/plugins/storage-remote/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["_rxStorageRemote","require","Object","keys","forEach","key","exports","defineProperty","enumerable","get","_storageRemoteTypes","_storageRemoteHelpers","_messageChannelCache","_remote"],"sources":["../../../../src/plugins/storage-remote/index.ts"],"sourcesContent":["export * from './rx-storage-remote.ts';\nexport * from './storage-remote-types.ts';\nexport * from './storage-remote-helpers.ts';\nexport * from './message-channel-cache.ts';\nexport * from './remote.ts';\n"],"mappings":";;;;;AAAA,IAAAA,gBAAA,GAAAC,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAH,gBAAA,EAAAI,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAL,gBAAA,CAAAK,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAT,gBAAA,CAAAK,GAAA;IAAA;EAAA;AAAA;AACA,IAAAK,mBAAA,GAAAT,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAO,mBAAA,EAAAN,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAK,mBAAA,CAAAL,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAC,mBAAA,CAAAL,GAAA;IAAA;EAAA;AAAA;AACA,IAAAM,qBAAA,GAAAV,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAQ,qBAAA,EAAAP,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAM,qBAAA,CAAAN,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAE,qBAAA,CAAAN,GAAA;IAAA;EAAA;AAAA;AACA,IAAAO,oBAAA,GAAAX,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAS,oBAAA,EAAAR,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAO,oBAAA,CAAAP,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAG,oBAAA,CAAAP,GAAA;IAAA;EAAA;AAAA;AACA,IAAAQ,OAAA,GAAAZ,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAU,OAAA,EAAAT,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAQ,OAAA,CAAAR,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAI,OAAA,CAAAR,GAAA;IAAA;EAAA;AAAA","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/storage-remote/message-channel-cache.js b/dist/cjs/plugins/storage-remote/message-channel-cache.js deleted file mode 100644 index f9d27506fb8..00000000000 --- a/dist/cjs/plugins/storage-remote/message-channel-cache.js +++ /dev/null @@ -1,52 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.OPEN_REMOTE_MESSAGE_CHANNELS = exports.MESSAGE_CHANNEL_CACHE_BY_IDENTIFIER = exports.CACHE_ITEM_BY_MESSAGE_CHANNEL = void 0; -exports.closeMessageChannel = closeMessageChannel; -exports.getMessageChannel = getMessageChannel; -var _index = require("../utils/index.js"); -var MESSAGE_CHANNEL_CACHE_BY_IDENTIFIER = exports.MESSAGE_CHANNEL_CACHE_BY_IDENTIFIER = new Map(); -var CACHE_ITEM_BY_MESSAGE_CHANNEL = exports.CACHE_ITEM_BY_MESSAGE_CHANNEL = new WeakMap(); -var OPEN_REMOTE_MESSAGE_CHANNELS = exports.OPEN_REMOTE_MESSAGE_CHANNELS = new Set(); -function getMessageChannelCache(identifier) { - return (0, _index.getFromMapOrCreate)(MESSAGE_CHANNEL_CACHE_BY_IDENTIFIER, identifier, () => new Map()); -} -function getMessageChannel(settings, cacheKeys, keepAlive = false) { - var cacheKey = getCacheKey(settings, cacheKeys); - var cacheItem = (0, _index.getFromMapOrCreate)(getMessageChannelCache(settings.identifier), cacheKey, () => { - var newCacheItem = { - identifier: settings.identifier, - cacheKey, - keepAlive, - refCount: 1, - messageChannel: settings.messageChannelCreator().then(messageChannel => { - OPEN_REMOTE_MESSAGE_CHANNELS.add(messageChannel); - CACHE_ITEM_BY_MESSAGE_CHANNEL.set(messageChannel, newCacheItem); - return messageChannel; - }) - }; - return newCacheItem; - }, existingCacheItem => { - existingCacheItem.refCount = existingCacheItem.refCount + 1; - }); - return cacheItem.messageChannel; -} -function closeMessageChannel(messageChannel) { - var cacheItem = (0, _index.getFromMapOrThrow)(CACHE_ITEM_BY_MESSAGE_CHANNEL, messageChannel); - cacheItem.refCount = cacheItem.refCount - 1; - if (cacheItem.refCount === 0 && !cacheItem.keepAlive) { - getMessageChannelCache(cacheItem.identifier).delete(cacheItem.cacheKey); - OPEN_REMOTE_MESSAGE_CHANNELS.delete(messageChannel); - return messageChannel.close(); - } else { - return _index.PROMISE_RESOLVE_VOID; - } -} -function getCacheKey(settings, cacheKeys) { - cacheKeys = cacheKeys.slice(0); - cacheKeys.unshift(settings.identifier); - return cacheKeys.join('||'); -} -//# sourceMappingURL=message-channel-cache.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/storage-remote/message-channel-cache.js.map b/dist/cjs/plugins/storage-remote/message-channel-cache.js.map deleted file mode 100644 index 8691fbe33ad..00000000000 --- a/dist/cjs/plugins/storage-remote/message-channel-cache.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"message-channel-cache.js","names":["_index","require","MESSAGE_CHANNEL_CACHE_BY_IDENTIFIER","exports","Map","CACHE_ITEM_BY_MESSAGE_CHANNEL","WeakMap","OPEN_REMOTE_MESSAGE_CHANNELS","Set","getMessageChannelCache","identifier","getFromMapOrCreate","getMessageChannel","settings","cacheKeys","keepAlive","cacheKey","getCacheKey","cacheItem","newCacheItem","refCount","messageChannel","messageChannelCreator","then","add","set","existingCacheItem","closeMessageChannel","getFromMapOrThrow","delete","close","PROMISE_RESOLVE_VOID","slice","unshift","join"],"sources":["../../../../src/plugins/storage-remote/message-channel-cache.ts"],"sourcesContent":["import {\n PROMISE_RESOLVE_VOID,\n getFromMapOrCreate,\n getFromMapOrThrow\n} from '../utils/index.ts';\nimport {\n RemoteMessageChannel,\n RxStorageRemoteSettings\n} from './storage-remote-types.ts';\n\nexport type RemoteMessageChannelCacheItem = {\n identifier: string;\n cacheKey: string;\n messageChannel: Promise;\n refCount: number;\n keepAlive: boolean;\n};\n\nexport const MESSAGE_CHANNEL_CACHE_BY_IDENTIFIER = new Map>();\nexport const CACHE_ITEM_BY_MESSAGE_CHANNEL = new WeakMap();\n\n\nexport const OPEN_REMOTE_MESSAGE_CHANNELS = new Set();\n\nfunction getMessageChannelCache(\n identifier: string\n) {\n return getFromMapOrCreate(\n MESSAGE_CHANNEL_CACHE_BY_IDENTIFIER,\n identifier,\n () => new Map()\n );\n}\n\nexport function getMessageChannel(\n settings: RxStorageRemoteSettings,\n cacheKeys: string[],\n keepAlive: boolean = false\n): Promise {\n const cacheKey = getCacheKey(settings, cacheKeys);\n const cacheItem = getFromMapOrCreate(\n getMessageChannelCache(settings.identifier),\n cacheKey,\n () => {\n const newCacheItem: RemoteMessageChannelCacheItem = {\n identifier: settings.identifier,\n cacheKey,\n keepAlive,\n refCount: 1,\n messageChannel: settings.messageChannelCreator()\n .then((messageChannel) => {\n OPEN_REMOTE_MESSAGE_CHANNELS.add(messageChannel);\n CACHE_ITEM_BY_MESSAGE_CHANNEL.set(messageChannel, newCacheItem);\n return messageChannel;\n }),\n };\n return newCacheItem;\n },\n (existingCacheItem) => {\n existingCacheItem.refCount = existingCacheItem.refCount + 1;\n }\n );\n return cacheItem.messageChannel;\n}\n\n\nexport function closeMessageChannel(\n messageChannel: RemoteMessageChannel\n): Promise {\n const cacheItem = getFromMapOrThrow(CACHE_ITEM_BY_MESSAGE_CHANNEL, messageChannel);\n cacheItem.refCount = cacheItem.refCount - 1;\n if (cacheItem.refCount === 0 && !cacheItem.keepAlive) {\n getMessageChannelCache(cacheItem.identifier).delete(cacheItem.cacheKey);\n OPEN_REMOTE_MESSAGE_CHANNELS.delete(messageChannel);\n return messageChannel.close();\n } else {\n return PROMISE_RESOLVE_VOID;\n }\n}\n\nfunction getCacheKey(\n settings: RxStorageRemoteSettings,\n cacheKeys: string[]\n): string {\n cacheKeys = cacheKeys.slice(0);\n cacheKeys.unshift(settings.identifier);\n return cacheKeys.join('||');\n}\n"],"mappings":";;;;;;;;AAAA,IAAAA,MAAA,GAAAC,OAAA;AAkBO,IAAMC,mCAAmC,GAAAC,OAAA,CAAAD,mCAAA,GAAG,IAAIE,GAAG,CAAqD,CAAC;AACzG,IAAMC,6BAA6B,GAAAF,OAAA,CAAAE,6BAAA,GAAG,IAAIC,OAAO,CAAsD,CAAC;AAGxG,IAAMC,4BAA4B,GAAAJ,OAAA,CAAAI,4BAAA,GAAG,IAAIC,GAAG,CAAuB,CAAC;AAE3E,SAASC,sBAAsBA,CAC3BC,UAAkB,EACpB;EACE,OAAO,IAAAC,yBAAkB,EACrBT,mCAAmC,EACnCQ,UAAU,EACV,MAAM,IAAIN,GAAG,CAAC,CAClB,CAAC;AACL;AAEO,SAASQ,iBAAiBA,CAC7BC,QAAiC,EACjCC,SAAmB,EACnBC,SAAkB,GAAG,KAAK,EACG;EAC7B,IAAMC,QAAQ,GAAGC,WAAW,CAACJ,QAAQ,EAAEC,SAAS,CAAC;EACjD,IAAMI,SAAS,GAAG,IAAAP,yBAAkB,EAChCF,sBAAsB,CAACI,QAAQ,CAACH,UAAU,CAAC,EAC3CM,QAAQ,EACR,MAAM;IACF,IAAMG,YAA2C,GAAG;MAChDT,UAAU,EAAEG,QAAQ,CAACH,UAAU;MAC/BM,QAAQ;MACRD,SAAS;MACTK,QAAQ,EAAE,CAAC;MACXC,cAAc,EAAER,QAAQ,CAACS,qBAAqB,CAAC,CAAC,CAC3CC,IAAI,CAAEF,cAAc,IAAK;QACtBd,4BAA4B,CAACiB,GAAG,CAACH,cAAc,CAAC;QAChDhB,6BAA6B,CAACoB,GAAG,CAACJ,cAAc,EAAEF,YAAY,CAAC;QAC/D,OAAOE,cAAc;MACzB,CAAC;IACT,CAAC;IACD,OAAOF,YAAY;EACvB,CAAC,EACAO,iBAAiB,IAAK;IACnBA,iBAAiB,CAACN,QAAQ,GAAGM,iBAAiB,CAACN,QAAQ,GAAG,CAAC;EAC/D,CACJ,CAAC;EACD,OAAOF,SAAS,CAACG,cAAc;AACnC;AAGO,SAASM,mBAAmBA,CAC/BN,cAAoC,EACvB;EACb,IAAMH,SAAS,GAAG,IAAAU,wBAAiB,EAACvB,6BAA6B,EAAEgB,cAAc,CAAC;EAClFH,SAAS,CAACE,QAAQ,GAAGF,SAAS,CAACE,QAAQ,GAAG,CAAC;EAC3C,IAAIF,SAAS,CAACE,QAAQ,KAAK,CAAC,IAAI,CAACF,SAAS,CAACH,SAAS,EAAE;IAClDN,sBAAsB,CAACS,SAAS,CAACR,UAAU,CAAC,CAACmB,MAAM,CAACX,SAAS,CAACF,QAAQ,CAAC;IACvET,4BAA4B,CAACsB,MAAM,CAACR,cAAc,CAAC;IACnD,OAAOA,cAAc,CAACS,KAAK,CAAC,CAAC;EACjC,CAAC,MAAM;IACH,OAAOC,2BAAoB;EAC/B;AACJ;AAEA,SAASd,WAAWA,CAChBJ,QAAiC,EACjCC,SAAmB,EACb;EACNA,SAAS,GAAGA,SAAS,CAACkB,KAAK,CAAC,CAAC,CAAC;EAC9BlB,SAAS,CAACmB,OAAO,CAACpB,QAAQ,CAACH,UAAU,CAAC;EACtC,OAAOI,SAAS,CAACoB,IAAI,CAAC,IAAI,CAAC;AAC/B","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/storage-remote/remote.js b/dist/cjs/plugins/storage-remote/remote.js deleted file mode 100644 index c48a3708aa8..00000000000 --- a/dist/cjs/plugins/storage-remote/remote.js +++ /dev/null @@ -1,207 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.exposeRxStorageRemote = exposeRxStorageRemote; -var _rxjs = require("rxjs"); -var _index = require("../../plugins/utils/index.js"); -var _storageRemoteHelpers = require("./storage-remote-helpers.js"); -var _rxStorageHelper = require("../../rx-storage-helper.js"); -/** - * Run this on the 'remote' part, - * so that RxStorageMessageChannel can connect to it. - */ -function exposeRxStorageRemote(settings) { - var instanceByFullName = new Map(); - settings.messages$.pipe((0, _rxjs.filter)(msg => msg.method === 'custom')).subscribe(async msg => { - if (!settings.customRequestHandler) { - settings.send((0, _storageRemoteHelpers.createErrorAnswer)(msg, new Error('Remote storage: cannot resolve custom request because settings.customRequestHandler is not set'))); - } else { - try { - var result = await settings.customRequestHandler(msg.params); - settings.send((0, _storageRemoteHelpers.createAnswer)(msg, result)); - } catch (err) { - settings.send((0, _storageRemoteHelpers.createErrorAnswer)(msg, err)); - } - } - }); - function getRxStorageInstance(params) { - if (settings.storage) { - return settings.storage.createStorageInstance(params); - } else if (settings.database) { - var storageInstances = Array.from(settings.database.storageInstances); - var collectionName = params.collectionName; - var storageInstance = storageInstances.find(instance => instance.collectionName === collectionName); - if (!storageInstance) { - console.dir(storageInstances); - throw new Error('storageInstance does not exist ' + JSON.stringify({ - collectionName - })); - } - var schema = params.schema; - if (!(0, _index.deepEqual)(schema, storageInstance.schema)) { - throw new Error('Wrong schema ' + JSON.stringify({ - schema, - existingSchema: storageInstance.schema - })); - } - return Promise.resolve(storageInstance); - } else { - throw new Error('no base given'); - } - } - settings.messages$.pipe((0, _rxjs.filter)(msg => msg.method === 'create')).subscribe(async msg => { - var connectionId = msg.connectionId; - - /** - * Do an isArray check here - * for runtime check types to ensure we have - * instance creation params and not method input params. - */ - if (Array.isArray(msg.params)) { - return; - } - var params = msg.params; - var collectionName = params.collectionName; - - /** - * We de-duplicate the storage instances. - * This makes sense in many environments like - * electron where on main process contains the storage - * for multiple renderer processes. Same goes for SharedWorkers etc. - */ - var fullName = [params.databaseName, params.collectionName, params.schema.version].join('|'); - var state = instanceByFullName.get(fullName); - if (!state) { - try { - state = { - /** - * We work with a promise here to ensure - * that parallel create-calls will still end up - * with exactly one instance and not more. - */ - storageInstancePromise: getRxStorageInstance(params), - connectionIds: new Set(), - params - }; - instanceByFullName.set(fullName, state); - - /** - * Must await the creation here - * so that in case of an error, - * it knows about the error message and can send - * that back to the main process. - */ - await state.storageInstancePromise; - } catch (err) { - settings.send((0, _storageRemoteHelpers.createErrorAnswer)(msg, err)); - return; - } - } else { - // if instance already existed, ensure that the schema is equal - if (!(0, _index.deepEqual)(params.schema, state.params.schema)) { - settings.send((0, _storageRemoteHelpers.createErrorAnswer)(msg, new Error('Remote storage: schema not equal to existing storage'))); - return; - } - } - state.connectionIds.add(msg.connectionId); - var subs = []; - var storageInstance = await state.storageInstancePromise; - /** - * Automatically subscribe to the changeStream() - * because we always need them. - */ - subs.push(storageInstance.changeStream().subscribe(changes => { - var message = { - connectionId, - answerTo: 'changestream', - method: 'changeStream', - return: changes - }; - settings.send(message); - })); - subs.push(storageInstance.conflictResultionTasks().subscribe(conflicts => { - var message = { - connectionId, - answerTo: 'conflictResultionTasks', - method: 'conflictResultionTasks', - return: conflicts - }; - settings.send(message); - })); - var connectionClosed = false; - function closeThisConnection() { - if (connectionClosed) { - return; - } - connectionClosed = true; - subs.forEach(sub => sub.unsubscribe()); - (0, _index.ensureNotFalsy)(state).connectionIds.delete(connectionId); - instanceByFullName.delete(fullName); - /** - * TODO how to notify the other ports on remove() ? - */ - } - - // also close the connection when the collection gets destroyed - if (settings.database) { - var database = settings.database; - var collection = database.collections[collectionName]; - if (collection) { - collection.onDestroy.push(() => closeThisConnection()); - } else { - database.onDestroy.push(() => closeThisConnection()); - } - } - subs.push(settings.messages$.pipe((0, _rxjs.filter)(subMsg => subMsg.connectionId === connectionId)).subscribe(async plainMessage => { - var message = plainMessage; - if (message.method === 'create' || message.method === 'custom') { - return; - } - if (!Array.isArray(message.params)) { - return; - } - var result; - try { - if (message.method === 'close' && settings.database) { - /** - * Do not close the storageInstance if it was taken from - * a running RxDatabase. - * In that case we only close the instance - * when the RxDatabase gets destroyed. - */ - settings.send((0, _storageRemoteHelpers.createAnswer)(message, null)); - return; - } - /** - * On calls to 'close()', - * we only close the main instance if there are no other - * ports connected. - */ - if (message.method === 'close' && (0, _index.ensureNotFalsy)(state).connectionIds.size > 1) { - settings.send((0, _storageRemoteHelpers.createAnswer)(message, null)); - (0, _index.ensureNotFalsy)(state).connectionIds.delete(connectionId); - subs.forEach(sub => sub.unsubscribe()); - return; - } - if (message.method === 'getChangedDocumentsSince' && !storageInstance.getChangedDocumentsSince) { - result = await (0, _rxStorageHelper.getChangedDocumentsSince)(storageInstance, message.params[0], message.params[1]); - } else { - result = await storageInstance[message.method](message.params[0], message.params[1], message.params[2], message.params[3]); - } - if (message.method === 'close' || message.method === 'remove') { - closeThisConnection(); - } - settings.send((0, _storageRemoteHelpers.createAnswer)(message, result)); - } catch (err) { - settings.send((0, _storageRemoteHelpers.createErrorAnswer)(message, err)); - } - })); - settings.send((0, _storageRemoteHelpers.createAnswer)(msg, 'ok')); - }); - return { - instanceByFullName - }; -} -//# sourceMappingURL=remote.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/storage-remote/remote.js.map b/dist/cjs/plugins/storage-remote/remote.js.map deleted file mode 100644 index 7be836c8759..00000000000 --- a/dist/cjs/plugins/storage-remote/remote.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"remote.js","names":["_rxjs","require","_index","_storageRemoteHelpers","_rxStorageHelper","exposeRxStorageRemote","settings","instanceByFullName","Map","messages$","pipe","filter","msg","method","subscribe","customRequestHandler","send","createErrorAnswer","Error","result","params","createAnswer","err","getRxStorageInstance","storage","createStorageInstance","database","storageInstances","Array","from","collectionName","storageInstance","find","instance","console","dir","JSON","stringify","schema","deepEqual","existingSchema","Promise","resolve","connectionId","isArray","fullName","databaseName","version","join","state","get","storageInstancePromise","connectionIds","Set","set","add","subs","push","changeStream","changes","message","answerTo","return","conflictResultionTasks","conflicts","connectionClosed","closeThisConnection","forEach","sub","unsubscribe","ensureNotFalsy","delete","collection","collections","onDestroy","subMsg","plainMessage","size","getChangedDocumentsSince"],"sources":["../../../../src/plugins/storage-remote/remote.ts"],"sourcesContent":["import { filter, Subscription } from 'rxjs';\nimport type {\n RxStorageInstance,\n RxStorageInstanceCreationParams\n} from '../../types/index.d.ts';\nimport {\n deepEqual,\n ensureNotFalsy\n} from '../../plugins/utils/index.ts';\nimport { createAnswer, createErrorAnswer } from './storage-remote-helpers.ts';\nimport type {\n MessageFromRemote,\n MessageToRemote,\n RxStorageRemoteExposeSettings,\n RxStorageRemoteExposeSettingsRxDatabase,\n RxStorageRemoteExposeSettingsRxStorage,\n RxStorageRemoteExposeType\n} from './storage-remote-types.ts';\nimport { getChangedDocumentsSince } from '../../rx-storage-helper.ts';\n\n/**\n * Run this on the 'remote' part,\n * so that RxStorageMessageChannel can connect to it.\n */\nexport function exposeRxStorageRemote(settings: RxStorageRemoteExposeSettings): RxStorageRemoteExposeType {\n type InstanceState = {\n storageInstancePromise: Promise>;\n connectionIds: Set;\n params: RxStorageInstanceCreationParams;\n };\n const instanceByFullName: Map = new Map();\n\n\n settings.messages$.pipe(\n filter(msg => msg.method === 'custom')\n ).subscribe(async (msg) => {\n if (!settings.customRequestHandler) {\n settings.send(createErrorAnswer(\n msg,\n new Error('Remote storage: cannot resolve custom request because settings.customRequestHandler is not set')\n ));\n } else {\n try {\n const result = await settings.customRequestHandler(msg.params);\n settings.send(createAnswer(msg, result));\n } catch (err: any) {\n settings.send(createErrorAnswer(\n msg,\n err\n ));\n }\n }\n });\n\n\n function getRxStorageInstance(params: any): Promise> {\n if ((settings as RxStorageRemoteExposeSettingsRxStorage).storage) {\n return (settings as RxStorageRemoteExposeSettingsRxStorage).storage.createStorageInstance(params);\n } else if ((settings as RxStorageRemoteExposeSettingsRxDatabase).database) {\n const storageInstances = Array.from((settings as RxStorageRemoteExposeSettingsRxDatabase).database.storageInstances);\n const collectionName = params.collectionName;\n const storageInstance = storageInstances.find(instance => instance.collectionName === collectionName);\n if (!storageInstance) {\n console.dir(storageInstances);\n throw new Error('storageInstance does not exist ' + JSON.stringify({\n collectionName\n }));\n }\n const schema = params.schema;\n if (!deepEqual(schema, storageInstance.schema)) {\n throw new Error('Wrong schema ' + JSON.stringify({\n schema,\n existingSchema: storageInstance.schema\n }));\n }\n return Promise.resolve(storageInstance);\n } else {\n throw new Error('no base given');\n }\n }\n\n settings.messages$.pipe(\n filter(msg => msg.method === 'create')\n ).subscribe(async (msg) => {\n const connectionId = msg.connectionId;\n\n /**\n * Do an isArray check here\n * for runtime check types to ensure we have\n * instance creation params and not method input params.\n */\n if (Array.isArray(msg.params)) {\n return;\n }\n const params = msg.params;\n const collectionName = params.collectionName;\n\n /**\n * We de-duplicate the storage instances.\n * This makes sense in many environments like\n * electron where on main process contains the storage\n * for multiple renderer processes. Same goes for SharedWorkers etc.\n */\n const fullName = [\n params.databaseName,\n params.collectionName,\n params.schema.version\n ].join('|');\n let state = instanceByFullName.get(fullName);\n if (!state) {\n try {\n state = {\n /**\n * We work with a promise here to ensure\n * that parallel create-calls will still end up\n * with exactly one instance and not more.\n */\n storageInstancePromise: getRxStorageInstance(params),\n connectionIds: new Set(),\n params\n };\n instanceByFullName.set(fullName, state);\n\n /**\n * Must await the creation here\n * so that in case of an error,\n * it knows about the error message and can send\n * that back to the main process. \n */\n await state.storageInstancePromise;\n } catch (err: any) {\n settings.send(createErrorAnswer(msg, err));\n return;\n }\n } else {\n // if instance already existed, ensure that the schema is equal\n if (!deepEqual(params.schema, state.params.schema)) {\n settings.send(createErrorAnswer(msg, new Error('Remote storage: schema not equal to existing storage')));\n return;\n }\n }\n state.connectionIds.add(msg.connectionId);\n const subs: Subscription[] = [];\n\n const storageInstance = await state.storageInstancePromise;\n /**\n * Automatically subscribe to the changeStream()\n * because we always need them.\n */\n subs.push(\n storageInstance.changeStream().subscribe(changes => {\n const message: MessageFromRemote = {\n connectionId,\n answerTo: 'changestream',\n method: 'changeStream',\n return: changes\n };\n settings.send(message);\n })\n );\n subs.push(\n storageInstance.conflictResultionTasks().subscribe(conflicts => {\n const message: MessageFromRemote = {\n connectionId,\n answerTo: 'conflictResultionTasks',\n method: 'conflictResultionTasks',\n return: conflicts\n };\n settings.send(message);\n })\n );\n\n\n let connectionClosed = false;\n function closeThisConnection() {\n if (connectionClosed) {\n return;\n }\n connectionClosed = true;\n subs.forEach(sub => sub.unsubscribe());\n ensureNotFalsy(state).connectionIds.delete(connectionId);\n instanceByFullName.delete(fullName);\n /**\n * TODO how to notify the other ports on remove() ?\n */\n }\n\n // also close the connection when the collection gets destroyed\n if ((settings as RxStorageRemoteExposeSettingsRxDatabase).database) {\n const database = (settings as RxStorageRemoteExposeSettingsRxDatabase).database;\n const collection = database.collections[collectionName];\n if (collection) {\n collection.onDestroy.push(() => closeThisConnection());\n } else {\n database.onDestroy.push(() => closeThisConnection());\n }\n }\n\n subs.push(\n settings.messages$.pipe(\n filter(subMsg => (subMsg as MessageToRemote).connectionId === connectionId)\n ).subscribe(async (plainMessage) => {\n const message: MessageToRemote = plainMessage as any;\n if (\n message.method === 'create' ||\n message.method === 'custom'\n ) {\n return;\n }\n if (!Array.isArray(message.params)) {\n return;\n }\n let result;\n try {\n if (\n message.method === 'close' &&\n (settings as RxStorageRemoteExposeSettingsRxDatabase).database\n ) {\n /**\n * Do not close the storageInstance if it was taken from\n * a running RxDatabase.\n * In that case we only close the instance\n * when the RxDatabase gets destroyed.\n */\n settings.send(createAnswer(message, null));\n return;\n }\n /**\n * On calls to 'close()',\n * we only close the main instance if there are no other\n * ports connected.\n */\n if (\n message.method === 'close' &&\n ensureNotFalsy(state).connectionIds.size > 1\n ) {\n settings.send(createAnswer(message, null));\n ensureNotFalsy(state).connectionIds.delete(connectionId);\n subs.forEach(sub => sub.unsubscribe());\n return;\n }\n\n if (message.method === 'getChangedDocumentsSince' && !storageInstance.getChangedDocumentsSince) {\n result = await getChangedDocumentsSince(\n storageInstance,\n message.params[0],\n message.params[1]\n );\n } else {\n result = await (storageInstance as any)[message.method](\n message.params[0],\n message.params[1],\n message.params[2],\n message.params[3]\n );\n }\n if (\n message.method === 'close' ||\n message.method === 'remove'\n ) {\n closeThisConnection();\n }\n settings.send(createAnswer(message, result));\n } catch (err: any) {\n settings.send(createErrorAnswer(message, err));\n }\n })\n );\n\n settings.send(createAnswer(msg, 'ok'));\n });\n\n return {\n instanceByFullName\n };\n}\n"],"mappings":";;;;;;AAAA,IAAAA,KAAA,GAAAC,OAAA;AAKA,IAAAC,MAAA,GAAAD,OAAA;AAIA,IAAAE,qBAAA,GAAAF,OAAA;AASA,IAAAG,gBAAA,GAAAH,OAAA;AAEA;AACA;AACA;AACA;AACO,SAASI,qBAAqBA,CAACC,QAAuC,EAA6B;EAMtG,IAAMC,kBAA8C,GAAG,IAAIC,GAAG,CAAC,CAAC;EAGhEF,QAAQ,CAACG,SAAS,CAACC,IAAI,CACnB,IAAAC,YAAM,EAACC,GAAG,IAAIA,GAAG,CAACC,MAAM,KAAK,QAAQ,CACzC,CAAC,CAACC,SAAS,CAAC,MAAOF,GAAG,IAAK;IACvB,IAAI,CAACN,QAAQ,CAACS,oBAAoB,EAAE;MAChCT,QAAQ,CAACU,IAAI,CAAC,IAAAC,uCAAiB,EAC3BL,GAAG,EACH,IAAIM,KAAK,CAAC,gGAAgG,CAC9G,CAAC,CAAC;IACN,CAAC,MAAM;MACH,IAAI;QACA,IAAMC,MAAM,GAAG,MAAMb,QAAQ,CAACS,oBAAoB,CAACH,GAAG,CAACQ,MAAM,CAAC;QAC9Dd,QAAQ,CAACU,IAAI,CAAC,IAAAK,kCAAY,EAACT,GAAG,EAAEO,MAAM,CAAC,CAAC;MAC5C,CAAC,CAAC,OAAOG,GAAQ,EAAE;QACfhB,QAAQ,CAACU,IAAI,CAAC,IAAAC,uCAAiB,EAC3BL,GAAG,EACHU,GACJ,CAAC,CAAC;MACN;IACJ;EACJ,CAAC,CAAC;EAGF,SAASC,oBAAoBA,CAAYH,MAAW,EAAwD;IACxG,IAAKd,QAAQ,CAA4CkB,OAAO,EAAE;MAC9D,OAAQlB,QAAQ,CAA4CkB,OAAO,CAACC,qBAAqB,CAACL,MAAM,CAAC;IACrG,CAAC,MAAM,IAAKd,QAAQ,CAA6CoB,QAAQ,EAAE;MACvE,IAAMC,gBAAgB,GAAGC,KAAK,CAACC,IAAI,CAAEvB,QAAQ,CAA6CoB,QAAQ,CAACC,gBAAgB,CAAC;MACpH,IAAMG,cAAc,GAAGV,MAAM,CAACU,cAAc;MAC5C,IAAMC,eAAe,GAAGJ,gBAAgB,CAACK,IAAI,CAACC,QAAQ,IAAIA,QAAQ,CAACH,cAAc,KAAKA,cAAc,CAAC;MACrG,IAAI,CAACC,eAAe,EAAE;QAClBG,OAAO,CAACC,GAAG,CAACR,gBAAgB,CAAC;QAC7B,MAAM,IAAIT,KAAK,CAAC,iCAAiC,GAAGkB,IAAI,CAACC,SAAS,CAAC;UAC/DP;QACJ,CAAC,CAAC,CAAC;MACP;MACA,IAAMQ,MAAM,GAAGlB,MAAM,CAACkB,MAAM;MAC5B,IAAI,CAAC,IAAAC,gBAAS,EAACD,MAAM,EAAEP,eAAe,CAACO,MAAM,CAAC,EAAE;QAC5C,MAAM,IAAIpB,KAAK,CAAC,eAAe,GAAGkB,IAAI,CAACC,SAAS,CAAC;UAC7CC,MAAM;UACNE,cAAc,EAAET,eAAe,CAACO;QACpC,CAAC,CAAC,CAAC;MACP;MACA,OAAOG,OAAO,CAACC,OAAO,CAACX,eAAe,CAAC;IAC3C,CAAC,MAAM;MACH,MAAM,IAAIb,KAAK,CAAC,eAAe,CAAC;IACpC;EACJ;EAEAZ,QAAQ,CAACG,SAAS,CAACC,IAAI,CACnB,IAAAC,YAAM,EAACC,GAAG,IAAIA,GAAG,CAACC,MAAM,KAAK,QAAQ,CACzC,CAAC,CAACC,SAAS,CAAC,MAAOF,GAAG,IAAK;IACvB,IAAM+B,YAAY,GAAG/B,GAAG,CAAC+B,YAAY;;IAErC;AACR;AACA;AACA;AACA;IACQ,IAAIf,KAAK,CAACgB,OAAO,CAAChC,GAAG,CAACQ,MAAM,CAAC,EAAE;MAC3B;IACJ;IACA,IAAMA,MAAM,GAAGR,GAAG,CAACQ,MAAM;IACzB,IAAMU,cAAc,GAAGV,MAAM,CAACU,cAAc;;IAE5C;AACR;AACA;AACA;AACA;AACA;IACQ,IAAMe,QAAQ,GAAG,CACbzB,MAAM,CAAC0B,YAAY,EACnB1B,MAAM,CAACU,cAAc,EACrBV,MAAM,CAACkB,MAAM,CAACS,OAAO,CACxB,CAACC,IAAI,CAAC,GAAG,CAAC;IACX,IAAIC,KAAK,GAAG1C,kBAAkB,CAAC2C,GAAG,CAACL,QAAQ,CAAC;IAC5C,IAAI,CAACI,KAAK,EAAE;MACR,IAAI;QACAA,KAAK,GAAG;UACJ;AACpB;AACA;AACA;AACA;UACoBE,sBAAsB,EAAE5B,oBAAoB,CAACH,MAAM,CAAC;UACpDgC,aAAa,EAAE,IAAIC,GAAG,CAAC,CAAC;UACxBjC;QACJ,CAAC;QACDb,kBAAkB,CAAC+C,GAAG,CAACT,QAAQ,EAAEI,KAAK,CAAC;;QAEvC;AAChB;AACA;AACA;AACA;AACA;QACgB,MAAMA,KAAK,CAACE,sBAAsB;MACtC,CAAC,CAAC,OAAO7B,GAAQ,EAAE;QACfhB,QAAQ,CAACU,IAAI,CAAC,IAAAC,uCAAiB,EAACL,GAAG,EAAEU,GAAG,CAAC,CAAC;QAC1C;MACJ;IACJ,CAAC,MAAM;MACH;MACA,IAAI,CAAC,IAAAiB,gBAAS,EAACnB,MAAM,CAACkB,MAAM,EAAEW,KAAK,CAAC7B,MAAM,CAACkB,MAAM,CAAC,EAAE;QAChDhC,QAAQ,CAACU,IAAI,CAAC,IAAAC,uCAAiB,EAACL,GAAG,EAAE,IAAIM,KAAK,CAAC,sDAAsD,CAAC,CAAC,CAAC;QACxG;MACJ;IACJ;IACA+B,KAAK,CAACG,aAAa,CAACG,GAAG,CAAC3C,GAAG,CAAC+B,YAAY,CAAC;IACzC,IAAMa,IAAoB,GAAG,EAAE;IAE/B,IAAMzB,eAAe,GAAG,MAAMkB,KAAK,CAACE,sBAAsB;IAC1D;AACR;AACA;AACA;IACQK,IAAI,CAACC,IAAI,CACL1B,eAAe,CAAC2B,YAAY,CAAC,CAAC,CAAC5C,SAAS,CAAC6C,OAAO,IAAI;MAChD,IAAMC,OAA0B,GAAG;QAC/BjB,YAAY;QACZkB,QAAQ,EAAE,cAAc;QACxBhD,MAAM,EAAE,cAAc;QACtBiD,MAAM,EAAEH;MACZ,CAAC;MACDrD,QAAQ,CAACU,IAAI,CAAC4C,OAAO,CAAC;IAC1B,CAAC,CACL,CAAC;IACDJ,IAAI,CAACC,IAAI,CACL1B,eAAe,CAACgC,sBAAsB,CAAC,CAAC,CAACjD,SAAS,CAACkD,SAAS,IAAI;MAC5D,IAAMJ,OAA0B,GAAG;QAC/BjB,YAAY;QACZkB,QAAQ,EAAE,wBAAwB;QAClChD,MAAM,EAAE,wBAAwB;QAChCiD,MAAM,EAAEE;MACZ,CAAC;MACD1D,QAAQ,CAACU,IAAI,CAAC4C,OAAO,CAAC;IAC1B,CAAC,CACL,CAAC;IAGD,IAAIK,gBAAgB,GAAG,KAAK;IAC5B,SAASC,mBAAmBA,CAAA,EAAG;MAC3B,IAAID,gBAAgB,EAAE;QAClB;MACJ;MACAA,gBAAgB,GAAG,IAAI;MACvBT,IAAI,CAACW,OAAO,CAACC,GAAG,IAAIA,GAAG,CAACC,WAAW,CAAC,CAAC,CAAC;MACtC,IAAAC,qBAAc,EAACrB,KAAK,CAAC,CAACG,aAAa,CAACmB,MAAM,CAAC5B,YAAY,CAAC;MACxDpC,kBAAkB,CAACgE,MAAM,CAAC1B,QAAQ,CAAC;MACnC;AACZ;AACA;IACQ;;IAEA;IACA,IAAKvC,QAAQ,CAA6CoB,QAAQ,EAAE;MAChE,IAAMA,QAAQ,GAAIpB,QAAQ,CAA6CoB,QAAQ;MAC/E,IAAM8C,UAAU,GAAG9C,QAAQ,CAAC+C,WAAW,CAAC3C,cAAc,CAAC;MACvD,IAAI0C,UAAU,EAAE;QACZA,UAAU,CAACE,SAAS,CAACjB,IAAI,CAAC,MAAMS,mBAAmB,CAAC,CAAC,CAAC;MAC1D,CAAC,MAAM;QACHxC,QAAQ,CAACgD,SAAS,CAACjB,IAAI,CAAC,MAAMS,mBAAmB,CAAC,CAAC,CAAC;MACxD;IACJ;IAEAV,IAAI,CAACC,IAAI,CACLnD,QAAQ,CAACG,SAAS,CAACC,IAAI,CACnB,IAAAC,YAAM,EAACgE,MAAM,IAAKA,MAAM,CAAqBhC,YAAY,KAAKA,YAAY,CAC9E,CAAC,CAAC7B,SAAS,CAAC,MAAO8D,YAAY,IAAK;MAChC,IAAMhB,OAAwB,GAAGgB,YAAmB;MACpD,IACIhB,OAAO,CAAC/C,MAAM,KAAK,QAAQ,IAC3B+C,OAAO,CAAC/C,MAAM,KAAK,QAAQ,EAC7B;QACE;MACJ;MACA,IAAI,CAACe,KAAK,CAACgB,OAAO,CAACgB,OAAO,CAACxC,MAAM,CAAC,EAAE;QAChC;MACJ;MACA,IAAID,MAAM;MACV,IAAI;QACA,IACIyC,OAAO,CAAC/C,MAAM,KAAK,OAAO,IACzBP,QAAQ,CAA6CoB,QAAQ,EAChE;UACE;AACxB;AACA;AACA;AACA;AACA;UACwBpB,QAAQ,CAACU,IAAI,CAAC,IAAAK,kCAAY,EAACuC,OAAO,EAAE,IAAI,CAAC,CAAC;UAC1C;QACJ;QACA;AACpB;AACA;AACA;AACA;QACoB,IACIA,OAAO,CAAC/C,MAAM,KAAK,OAAO,IAC1B,IAAAyD,qBAAc,EAACrB,KAAK,CAAC,CAACG,aAAa,CAACyB,IAAI,GAAG,CAAC,EAC9C;UACEvE,QAAQ,CAACU,IAAI,CAAC,IAAAK,kCAAY,EAACuC,OAAO,EAAE,IAAI,CAAC,CAAC;UAC1C,IAAAU,qBAAc,EAACrB,KAAK,CAAC,CAACG,aAAa,CAACmB,MAAM,CAAC5B,YAAY,CAAC;UACxDa,IAAI,CAACW,OAAO,CAACC,GAAG,IAAIA,GAAG,CAACC,WAAW,CAAC,CAAC,CAAC;UACtC;QACJ;QAEA,IAAIT,OAAO,CAAC/C,MAAM,KAAK,0BAA0B,IAAI,CAACkB,eAAe,CAAC+C,wBAAwB,EAAE;UAC5F3D,MAAM,GAAG,MAAM,IAAA2D,yCAAwB,EACnC/C,eAAe,EACf6B,OAAO,CAACxC,MAAM,CAAC,CAAC,CAAC,EACjBwC,OAAO,CAACxC,MAAM,CAAC,CAAC,CACpB,CAAC;QACL,CAAC,MAAM;UACHD,MAAM,GAAG,MAAOY,eAAe,CAAS6B,OAAO,CAAC/C,MAAM,CAAC,CACnD+C,OAAO,CAACxC,MAAM,CAAC,CAAC,CAAC,EACjBwC,OAAO,CAACxC,MAAM,CAAC,CAAC,CAAC,EACjBwC,OAAO,CAACxC,MAAM,CAAC,CAAC,CAAC,EACjBwC,OAAO,CAACxC,MAAM,CAAC,CAAC,CACpB,CAAC;QACL;QACA,IACIwC,OAAO,CAAC/C,MAAM,KAAK,OAAO,IAC1B+C,OAAO,CAAC/C,MAAM,KAAK,QAAQ,EAC7B;UACEqD,mBAAmB,CAAC,CAAC;QACzB;QACA5D,QAAQ,CAACU,IAAI,CAAC,IAAAK,kCAAY,EAACuC,OAAO,EAAEzC,MAAM,CAAC,CAAC;MAChD,CAAC,CAAC,OAAOG,GAAQ,EAAE;QACfhB,QAAQ,CAACU,IAAI,CAAC,IAAAC,uCAAiB,EAAC2C,OAAO,EAAEtC,GAAG,CAAC,CAAC;MAClD;IACJ,CAAC,CACL,CAAC;IAEDhB,QAAQ,CAACU,IAAI,CAAC,IAAAK,kCAAY,EAACT,GAAG,EAAE,IAAI,CAAC,CAAC;EAC1C,CAAC,CAAC;EAEF,OAAO;IACHL;EACJ,CAAC;AACL","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/storage-remote/rx-storage-remote.js b/dist/cjs/plugins/storage-remote/rx-storage-remote.js deleted file mode 100644 index 1e2a05ab6a2..00000000000 --- a/dist/cjs/plugins/storage-remote/rx-storage-remote.js +++ /dev/null @@ -1,206 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.RxStorageRemote = exports.RxStorageInstanceRemote = void 0; -exports.getRxStorageRemote = getRxStorageRemote; -var _rxjs = require("rxjs"); -var _index = require("../../plugins/utils/index.js"); -var _messageChannelCache = require("./message-channel-cache.js"); -var RxStorageRemote = exports.RxStorageRemote = /*#__PURE__*/function () { - function RxStorageRemote(settings) { - this.name = 'remote'; - this.rxdbVersion = _index.RXDB_VERSION; - this.seed = (0, _index.randomCouchString)(10); - this.lastRequestId = 0; - this.settings = settings; - if (settings.mode === 'one') { - this.messageChannelIfOneMode = (0, _messageChannelCache.getMessageChannel)(settings, [], true); - } - } - var _proto = RxStorageRemote.prototype; - _proto.getRequestId = function getRequestId() { - var newId = this.lastRequestId++; - return this.seed + '|' + newId; - }; - _proto.createStorageInstance = async function createStorageInstance(params) { - var connectionId = 'c|' + this.getRequestId(); - var cacheKeys = ['mode-' + this.settings.mode]; - switch (this.settings.mode) { - case 'collection': - cacheKeys.push('collection-' + params.collectionName); - // eslint-disable-next-line no-fallthrough - case 'database': - cacheKeys.push('database-' + params.databaseName); - // eslint-disable-next-line no-fallthrough - case 'storage': - cacheKeys.push('seed-' + this.seed); - } - var messageChannel = await (this.messageChannelIfOneMode ? this.messageChannelIfOneMode : (0, _messageChannelCache.getMessageChannel)(this.settings, cacheKeys)); - var requestId = this.getRequestId(); - var waitForOkPromise = (0, _rxjs.firstValueFrom)(messageChannel.messages$.pipe((0, _rxjs.filter)(msg => msg.answerTo === requestId))); - messageChannel.send({ - connectionId, - method: 'create', - requestId, - params - }); - var waitForOkResult = await waitForOkPromise; - if (waitForOkResult.error) { - await (0, _messageChannelCache.closeMessageChannel)(messageChannel); - throw new Error('could not create instance ' + JSON.stringify(waitForOkResult.error)); - } - return new RxStorageInstanceRemote(this, params.databaseName, params.collectionName, params.schema, { - params, - connectionId, - messageChannel - }, params.options); - }; - _proto.customRequest = async function customRequest(data) { - var messageChannel = await this.settings.messageChannelCreator(); - var requestId = this.getRequestId(); - var connectionId = 'custom|request|' + requestId; - var waitForAnswerPromise = (0, _rxjs.firstValueFrom)(messageChannel.messages$.pipe((0, _rxjs.filter)(msg => msg.answerTo === requestId))); - messageChannel.send({ - connectionId, - method: 'custom', - requestId, - params: data - }); - var response = await waitForAnswerPromise; - if (response.error) { - await messageChannel.close(); - throw new Error('could not run customRequest(): ' + JSON.stringify({ - data, - error: response.error - })); - } else { - await messageChannel.close(); - return response.return; - } - }; - return RxStorageRemote; -}(); -/** - * Because postMessage() can be very slow on complex objects, - * and some RxStorage implementations do need a JSON-string internally - * anyway, it is allowed to transfer a string instead of an object - * which must then be JSON.parse()-ed before RxDB can use it. - * @link https://surma.dev/things/is-postmessage-slow/ - */ -function getMessageReturn(msg) { - if (msg.method === 'getAttachmentData') { - return msg.return; - } else { - if (typeof msg.return === 'string') { - return JSON.parse(msg.return); - } else { - return msg.return; - } - } -} -var RxStorageInstanceRemote = exports.RxStorageInstanceRemote = /*#__PURE__*/function () { - function RxStorageInstanceRemote(storage, databaseName, collectionName, schema, internals, options) { - this.changes$ = new _rxjs.Subject(); - this.conflicts$ = new _rxjs.Subject(); - this.subs = []; - this.storage = storage; - this.databaseName = databaseName; - this.collectionName = collectionName; - this.schema = schema; - this.internals = internals; - this.options = options; - this.messages$ = this.internals.messageChannel.messages$.pipe((0, _rxjs.filter)(msg => msg.connectionId === this.internals.connectionId)); - this.subs.push(this.messages$.subscribe(msg => { - if (msg.method === 'changeStream') { - this.changes$.next(getMessageReturn(msg)); - } - if (msg.method === 'conflictResultionTasks') { - this.conflicts$.next(msg.return); - } - })); - } - var _proto2 = RxStorageInstanceRemote.prototype; - _proto2.requestRemote = async function requestRemote(methodName, params) { - var requestId = this.storage.getRequestId(); - var responsePromise = (0, _rxjs.firstValueFrom)(this.messages$.pipe((0, _rxjs.filter)(msg => msg.answerTo === requestId))); - var message = { - connectionId: this.internals.connectionId, - requestId, - method: methodName, - params - }; - this.internals.messageChannel.send(message); - var response = await responsePromise; - if (response.error) { - throw new Error('could not requestRemote: ' + JSON.stringify({ - methodName, - params, - error: response.error - }, null, 4)); - } else { - return getMessageReturn(response); - } - }; - _proto2.bulkWrite = function bulkWrite(documentWrites, context) { - return this.requestRemote('bulkWrite', [documentWrites, context]); - }; - _proto2.findDocumentsById = function findDocumentsById(ids, deleted) { - return this.requestRemote('findDocumentsById', [ids, deleted]); - }; - _proto2.query = function query(preparedQuery) { - return this.requestRemote('query', [preparedQuery]); - }; - _proto2.count = function count(preparedQuery) { - return this.requestRemote('count', [preparedQuery]); - }; - _proto2.getAttachmentData = function getAttachmentData(documentId, attachmentId, digest) { - return this.requestRemote('getAttachmentData', [documentId, attachmentId, digest]); - }; - _proto2.getChangedDocumentsSince = function getChangedDocumentsSince(limit, checkpoint) { - return this.requestRemote('getChangedDocumentsSince', [limit, checkpoint]); - }; - _proto2.changeStream = function changeStream() { - return this.changes$.asObservable(); - }; - _proto2.cleanup = function cleanup(minDeletedTime) { - return this.requestRemote('cleanup', [minDeletedTime]); - }; - _proto2.close = async function close() { - if (this.closed) { - return this.closed; - } - this.closed = (async () => { - this.subs.forEach(sub => sub.unsubscribe()); - this.changes$.complete(); - await this.requestRemote('close', []); - await (0, _messageChannelCache.closeMessageChannel)(this.internals.messageChannel); - })(); - return this.closed; - }; - _proto2.remove = async function remove() { - if (this.closed) { - throw new Error('already closed'); - } - this.closed = (async () => { - await this.requestRemote('remove', []); - await (0, _messageChannelCache.closeMessageChannel)(this.internals.messageChannel); - })(); - return this.closed; - }; - _proto2.conflictResultionTasks = function conflictResultionTasks() { - return this.conflicts$; - }; - _proto2.resolveConflictResultionTask = async function resolveConflictResultionTask(taskSolution) { - await this.requestRemote('resolveConflictResultionTask', [taskSolution]); - }; - return RxStorageInstanceRemote; -}(); -function getRxStorageRemote(settings) { - var withDefaults = Object.assign({ - mode: 'storage' - }, settings); - return new RxStorageRemote(withDefaults); -} -//# sourceMappingURL=rx-storage-remote.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/storage-remote/rx-storage-remote.js.map b/dist/cjs/plugins/storage-remote/rx-storage-remote.js.map deleted file mode 100644 index f83a1067766..00000000000 --- a/dist/cjs/plugins/storage-remote/rx-storage-remote.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-storage-remote.js","names":["_rxjs","require","_index","_messageChannelCache","RxStorageRemote","exports","settings","name","rxdbVersion","RXDB_VERSION","seed","randomCouchString","lastRequestId","mode","messageChannelIfOneMode","getMessageChannel","_proto","prototype","getRequestId","newId","createStorageInstance","params","connectionId","cacheKeys","push","collectionName","databaseName","messageChannel","requestId","waitForOkPromise","firstValueFrom","messages$","pipe","filter","msg","answerTo","send","method","waitForOkResult","error","closeMessageChannel","Error","JSON","stringify","RxStorageInstanceRemote","schema","options","customRequest","data","messageChannelCreator","waitForAnswerPromise","response","close","return","getMessageReturn","parse","storage","internals","changes$","Subject","conflicts$","subs","subscribe","next","_proto2","requestRemote","methodName","responsePromise","message","bulkWrite","documentWrites","context","findDocumentsById","ids","deleted","query","preparedQuery","count","getAttachmentData","documentId","attachmentId","digest","getChangedDocumentsSince","limit","checkpoint","changeStream","asObservable","cleanup","minDeletedTime","closed","forEach","sub","unsubscribe","complete","remove","conflictResultionTasks","resolveConflictResultionTask","taskSolution","getRxStorageRemote","withDefaults","Object","assign"],"sources":["../../../../src/plugins/storage-remote/rx-storage-remote.ts"],"sourcesContent":["import {\n firstValueFrom,\n filter,\n Observable,\n Subject,\n Subscription\n} from 'rxjs';\nimport type {\n BulkWriteRow,\n EventBulk,\n RxConflictResultionTask,\n RxConflictResultionTaskSolution,\n RxDocumentData,\n RxJsonSchema,\n RxStorage,\n RxStorageBulkWriteResponse,\n RxStorageChangeEvent,\n RxStorageCountResult,\n RxStorageInstance,\n RxStorageInstanceCreationParams,\n RxStorageQueryResult\n} from '../../types/index.d.ts';\nimport {\n RXDB_VERSION,\n randomCouchString\n} from '../../plugins/utils/index.ts';\nimport type {\n MessageFromRemote,\n MessageToRemote,\n RemoteMessageChannel,\n RxStorageRemoteInternals,\n RxStorageRemoteSettings\n} from './storage-remote-types.ts';\nimport { closeMessageChannel, getMessageChannel } from './message-channel-cache.ts';\nimport { ensureRxStorageInstanceParamsAreCorrect } from '../../rx-storage-helper.ts';\n\n\nexport class RxStorageRemote implements RxStorage {\n public readonly name: string = 'remote';\n public readonly rxdbVersion = RXDB_VERSION;\n\n private seed: string = randomCouchString(10);\n private lastRequestId: number = 0;\n public messageChannelIfOneMode?: Promise;\n constructor(\n public readonly settings: RxStorageRemoteSettings\n ) {\n if (settings.mode === 'one') {\n this.messageChannelIfOneMode = getMessageChannel(\n settings,\n [],\n true\n );\n }\n }\n\n public getRequestId() {\n const newId = this.lastRequestId++;\n return this.seed + '|' + newId;\n }\n\n async createStorageInstance(\n params: RxStorageInstanceCreationParams\n ): Promise> {\n const connectionId = 'c|' + this.getRequestId();\n\n const cacheKeys: string[] = [\n 'mode-' + this.settings.mode\n ];\n switch (this.settings.mode) {\n case 'collection':\n cacheKeys.push('collection-' + params.collectionName);\n // eslint-disable-next-line no-fallthrough\n case 'database':\n cacheKeys.push('database-' + params.databaseName);\n // eslint-disable-next-line no-fallthrough\n case 'storage':\n cacheKeys.push('seed-' + this.seed);\n }\n const messageChannel = await (this.messageChannelIfOneMode ?\n this.messageChannelIfOneMode :\n getMessageChannel(\n this.settings,\n cacheKeys\n )\n );\n\n const requestId = this.getRequestId();\n const waitForOkPromise = firstValueFrom(messageChannel.messages$.pipe(\n filter(msg => msg.answerTo === requestId)\n ));\n messageChannel.send({\n connectionId,\n method: 'create',\n requestId,\n params\n });\n\n const waitForOkResult = await waitForOkPromise;\n if (waitForOkResult.error) {\n await closeMessageChannel(messageChannel);\n throw new Error('could not create instance ' + JSON.stringify(waitForOkResult.error));\n }\n\n return new RxStorageInstanceRemote(\n this,\n params.databaseName,\n params.collectionName,\n params.schema,\n {\n params,\n connectionId,\n messageChannel\n },\n params.options\n );\n }\n\n async customRequest(data: In): Promise {\n const messageChannel = await this.settings.messageChannelCreator();\n const requestId = this.getRequestId();\n const connectionId = 'custom|request|' + requestId;\n const waitForAnswerPromise = firstValueFrom(messageChannel.messages$.pipe(\n filter(msg => msg.answerTo === requestId)\n ));\n messageChannel.send({\n connectionId,\n method: 'custom',\n requestId,\n params: data\n });\n const response = await waitForAnswerPromise;\n if (response.error) {\n await messageChannel.close();\n throw new Error('could not run customRequest(): ' + JSON.stringify({\n data,\n error: response.error\n }));\n } else {\n await messageChannel.close();\n return response.return;\n }\n\n }\n}\n\n/**\n * Because postMessage() can be very slow on complex objects,\n * and some RxStorage implementations do need a JSON-string internally\n * anyway, it is allowed to transfer a string instead of an object\n * which must then be JSON.parse()-ed before RxDB can use it.\n * @link https://surma.dev/things/is-postmessage-slow/\n */\nfunction getMessageReturn(\n msg: MessageFromRemote\n) {\n if (msg.method === 'getAttachmentData') {\n return msg.return;\n } else {\n if (typeof msg.return === 'string') {\n return JSON.parse(msg.return);\n } else {\n return msg.return;\n }\n }\n}\n\nexport class RxStorageInstanceRemote implements RxStorageInstance {\n private changes$: Subject>, any>> = new Subject();\n private conflicts$: Subject> = new Subject();\n private subs: Subscription[] = [];\n\n private closed?: Promise;\n messages$: Observable;\n\n constructor(\n public readonly storage: RxStorageRemote,\n public readonly databaseName: string,\n public readonly collectionName: string,\n public readonly schema: Readonly>>,\n public readonly internals: RxStorageRemoteInternals,\n public readonly options: Readonly\n ) {\n this.messages$ = this.internals.messageChannel.messages$.pipe(\n filter(msg => msg.connectionId === this.internals.connectionId)\n );\n this.subs.push(\n this.messages$.subscribe(msg => {\n if (msg.method === 'changeStream') {\n this.changes$.next(getMessageReturn(msg));\n }\n if (msg.method === 'conflictResultionTasks') {\n this.conflicts$.next(msg.return);\n }\n })\n );\n }\n\n private async requestRemote(\n methodName: keyof RxStorageInstance,\n params: any\n ) {\n const requestId = this.storage.getRequestId();\n const responsePromise = firstValueFrom(\n this.messages$.pipe(\n filter(msg => msg.answerTo === requestId)\n )\n );\n const message: MessageToRemote = {\n connectionId: this.internals.connectionId,\n requestId,\n method: methodName,\n params\n };\n this.internals.messageChannel.send(message);\n const response = await responsePromise;\n if (response.error) {\n throw new Error('could not requestRemote: ' + JSON.stringify({\n methodName,\n params,\n error: response.error\n }, null, 4));\n } else {\n return getMessageReturn(response);\n }\n }\n bulkWrite(\n documentWrites: BulkWriteRow[],\n context: string\n ): Promise> {\n return this.requestRemote('bulkWrite', [documentWrites, context]);\n }\n findDocumentsById(ids: string[], deleted: boolean): Promise[]> {\n return this.requestRemote('findDocumentsById', [ids, deleted]);\n }\n query(preparedQuery: any): Promise> {\n return this.requestRemote('query', [preparedQuery]);\n }\n count(preparedQuery: any): Promise {\n return this.requestRemote('count', [preparedQuery]);\n }\n getAttachmentData(documentId: string, attachmentId: string, digest: string): Promise {\n return this.requestRemote('getAttachmentData', [documentId, attachmentId, digest]);\n }\n getChangedDocumentsSince(\n limit: number,\n checkpoint?: any\n ): Promise<\n {\n documents: RxDocumentData[];\n checkpoint: any;\n }> {\n return this.requestRemote('getChangedDocumentsSince', [limit, checkpoint]);\n }\n changeStream(): Observable>, any>> {\n return this.changes$.asObservable();\n }\n cleanup(minDeletedTime: number): Promise {\n return this.requestRemote('cleanup', [minDeletedTime]);\n }\n async close(): Promise {\n if (this.closed) {\n return this.closed;\n }\n this.closed = (async () => {\n this.subs.forEach(sub => sub.unsubscribe());\n this.changes$.complete();\n await this.requestRemote('close', []);\n await closeMessageChannel(this.internals.messageChannel);\n })();\n return this.closed;\n }\n async remove(): Promise {\n if (this.closed) {\n throw new Error('already closed');\n }\n this.closed = (async () => {\n await this.requestRemote('remove', []);\n await closeMessageChannel(this.internals.messageChannel);\n })();\n return this.closed;\n }\n conflictResultionTasks(): Observable> {\n return this.conflicts$;\n }\n async resolveConflictResultionTask(taskSolution: RxConflictResultionTaskSolution): Promise {\n await this.requestRemote('resolveConflictResultionTask', [taskSolution]);\n }\n}\n\nexport function getRxStorageRemote(settings: RxStorageRemoteSettings): RxStorageRemote {\n const withDefaults = Object.assign({\n mode: 'storage'\n }, settings);\n return new RxStorageRemote(withDefaults);\n}\n"],"mappings":";;;;;;;AAAA,IAAAA,KAAA,GAAAC,OAAA;AAsBA,IAAAC,MAAA,GAAAD,OAAA;AAWA,IAAAE,oBAAA,GAAAF,OAAA;AAAoF,IAIvEG,eAAe,GAAAC,OAAA,CAAAD,eAAA;EAOxB,SAAAA,gBACoBE,QAAiC,EACnD;IAAA,KARcC,IAAI,GAAW,QAAQ;IAAA,KACvBC,WAAW,GAAGC,mBAAY;IAAA,KAElCC,IAAI,GAAW,IAAAC,wBAAiB,EAAC,EAAE,CAAC;IAAA,KACpCC,aAAa,GAAW,CAAC;IAAA,KAGbN,QAAiC,GAAjCA,QAAiC;IAEjD,IAAIA,QAAQ,CAACO,IAAI,KAAK,KAAK,EAAE;MACzB,IAAI,CAACC,uBAAuB,GAAG,IAAAC,sCAAiB,EAC5CT,QAAQ,EACR,EAAE,EACF,IACJ,CAAC;IACL;EACJ;EAAC,IAAAU,MAAA,GAAAZ,eAAA,CAAAa,SAAA;EAAAD,MAAA,CAEME,YAAY,GAAnB,SAAAA,aAAA,EAAsB;IAClB,IAAMC,KAAK,GAAG,IAAI,CAACP,aAAa,EAAE;IAClC,OAAO,IAAI,CAACF,IAAI,GAAG,GAAG,GAAGS,KAAK;EAClC,CAAC;EAAAH,MAAA,CAEKI,qBAAqB,GAA3B,eAAAA,sBACIC,MAAuD,EACZ;IAC3C,IAAMC,YAAY,GAAG,IAAI,GAAG,IAAI,CAACJ,YAAY,CAAC,CAAC;IAE/C,IAAMK,SAAmB,GAAG,CACxB,OAAO,GAAG,IAAI,CAACjB,QAAQ,CAACO,IAAI,CAC/B;IACD,QAAQ,IAAI,CAACP,QAAQ,CAACO,IAAI;MACtB,KAAK,YAAY;QACbU,SAAS,CAACC,IAAI,CAAC,aAAa,GAAGH,MAAM,CAACI,cAAc,CAAC;MACzD;MACA,KAAK,UAAU;QACXF,SAAS,CAACC,IAAI,CAAC,WAAW,GAAGH,MAAM,CAACK,YAAY,CAAC;MACrD;MACA,KAAK,SAAS;QACVH,SAAS,CAACC,IAAI,CAAC,OAAO,GAAG,IAAI,CAACd,IAAI,CAAC;IAC3C;IACA,IAAMiB,cAAc,GAAG,OAAO,IAAI,CAACb,uBAAuB,GACtD,IAAI,CAACA,uBAAuB,GAC5B,IAAAC,sCAAiB,EACb,IAAI,CAACT,QAAQ,EACbiB,SACJ,CAAC,CACJ;IAED,IAAMK,SAAS,GAAG,IAAI,CAACV,YAAY,CAAC,CAAC;IACrC,IAAMW,gBAAgB,GAAG,IAAAC,oBAAc,EAACH,cAAc,CAACI,SAAS,CAACC,IAAI,CACjE,IAAAC,YAAM,EAACC,GAAG,IAAIA,GAAG,CAACC,QAAQ,KAAKP,SAAS,CAC5C,CAAC,CAAC;IACFD,cAAc,CAACS,IAAI,CAAC;MAChBd,YAAY;MACZe,MAAM,EAAE,QAAQ;MAChBT,SAAS;MACTP;IACJ,CAAC,CAAC;IAEF,IAAMiB,eAAe,GAAG,MAAMT,gBAAgB;IAC9C,IAAIS,eAAe,CAACC,KAAK,EAAE;MACvB,MAAM,IAAAC,wCAAmB,EAACb,cAAc,CAAC;MACzC,MAAM,IAAIc,KAAK,CAAC,4BAA4B,GAAGC,IAAI,CAACC,SAAS,CAACL,eAAe,CAACC,KAAK,CAAC,CAAC;IACzF;IAEA,OAAO,IAAIK,uBAAuB,CAC9B,IAAI,EACJvB,MAAM,CAACK,YAAY,EACnBL,MAAM,CAACI,cAAc,EACrBJ,MAAM,CAACwB,MAAM,EACb;MACIxB,MAAM;MACNC,YAAY;MACZK;IACJ,CAAC,EACDN,MAAM,CAACyB,OACX,CAAC;EACL,CAAC;EAAA9B,MAAA,CAEK+B,aAAa,GAAnB,eAAAA,cAA6BC,IAAQ,EAAgB;IACjD,IAAMrB,cAAc,GAAG,MAAM,IAAI,CAACrB,QAAQ,CAAC2C,qBAAqB,CAAC,CAAC;IAClE,IAAMrB,SAAS,GAAG,IAAI,CAACV,YAAY,CAAC,CAAC;IACrC,IAAMI,YAAY,GAAG,iBAAiB,GAAGM,SAAS;IAClD,IAAMsB,oBAAoB,GAAG,IAAApB,oBAAc,EAACH,cAAc,CAACI,SAAS,CAACC,IAAI,CACrE,IAAAC,YAAM,EAACC,GAAG,IAAIA,GAAG,CAACC,QAAQ,KAAKP,SAAS,CAC5C,CAAC,CAAC;IACFD,cAAc,CAACS,IAAI,CAAC;MAChBd,YAAY;MACZe,MAAM,EAAE,QAAQ;MAChBT,SAAS;MACTP,MAAM,EAAE2B;IACZ,CAAC,CAAC;IACF,IAAMG,QAAQ,GAAG,MAAMD,oBAAoB;IAC3C,IAAIC,QAAQ,CAACZ,KAAK,EAAE;MAChB,MAAMZ,cAAc,CAACyB,KAAK,CAAC,CAAC;MAC5B,MAAM,IAAIX,KAAK,CAAC,iCAAiC,GAAGC,IAAI,CAACC,SAAS,CAAC;QAC/DK,IAAI;QACJT,KAAK,EAAEY,QAAQ,CAACZ;MACpB,CAAC,CAAC,CAAC;IACP,CAAC,MAAM;MACH,MAAMZ,cAAc,CAACyB,KAAK,CAAC,CAAC;MAC5B,OAAOD,QAAQ,CAACE,MAAM;IAC1B;EAEJ,CAAC;EAAA,OAAAjD,eAAA;AAAA;AAGL;AACA;AACA;AACA;AACA;AACA;AACA;AACA,SAASkD,gBAAgBA,CACrBpB,GAAsB,EACxB;EACE,IAAIA,GAAG,CAACG,MAAM,KAAK,mBAAmB,EAAE;IACpC,OAAOH,GAAG,CAACmB,MAAM;EACrB,CAAC,MAAM;IACH,IAAI,OAAOnB,GAAG,CAACmB,MAAM,KAAK,QAAQ,EAAE;MAChC,OAAOX,IAAI,CAACa,KAAK,CAACrB,GAAG,CAACmB,MAAM,CAAC;IACjC,CAAC,MAAM;MACH,OAAOnB,GAAG,CAACmB,MAAM;IACrB;EACJ;AACJ;AAAC,IAEYT,uBAAuB,GAAAvC,OAAA,CAAAuC,uBAAA;EAQhC,SAAAA,wBACoBY,OAAwB,EACxB9B,YAAoB,EACpBD,cAAsB,EACtBoB,MAAyD,EACzDY,SAAmC,EACnCX,OAAsB,EACxC;IAAA,KAdMY,QAAQ,GAA6E,IAAIC,aAAO,CAAC,CAAC;IAAA,KAClGC,UAAU,GAAgD,IAAID,aAAO,CAAC,CAAC;IAAA,KACvEE,IAAI,GAAmB,EAAE;IAAA,KAMbL,OAAwB,GAAxBA,OAAwB;IAAA,KACxB9B,YAAoB,GAApBA,YAAoB;IAAA,KACpBD,cAAsB,GAAtBA,cAAsB;IAAA,KACtBoB,MAAyD,GAAzDA,MAAyD;IAAA,KACzDY,SAAmC,GAAnCA,SAAmC;IAAA,KACnCX,OAAsB,GAAtBA,OAAsB;IAEtC,IAAI,CAACf,SAAS,GAAG,IAAI,CAAC0B,SAAS,CAAC9B,cAAc,CAACI,SAAS,CAACC,IAAI,CACzD,IAAAC,YAAM,EAACC,GAAG,IAAIA,GAAG,CAACZ,YAAY,KAAK,IAAI,CAACmC,SAAS,CAACnC,YAAY,CAClE,CAAC;IACD,IAAI,CAACuC,IAAI,CAACrC,IAAI,CACV,IAAI,CAACO,SAAS,CAAC+B,SAAS,CAAC5B,GAAG,IAAI;MAC5B,IAAIA,GAAG,CAACG,MAAM,KAAK,cAAc,EAAE;QAC/B,IAAI,CAACqB,QAAQ,CAACK,IAAI,CAACT,gBAAgB,CAACpB,GAAG,CAAC,CAAC;MAC7C;MACA,IAAIA,GAAG,CAACG,MAAM,KAAK,wBAAwB,EAAE;QACzC,IAAI,CAACuB,UAAU,CAACG,IAAI,CAAC7B,GAAG,CAACmB,MAAM,CAAC;MACpC;IACJ,CAAC,CACL,CAAC;EACL;EAAC,IAAAW,OAAA,GAAApB,uBAAA,CAAA3B,SAAA;EAAA+C,OAAA,CAEaC,aAAa,GAA3B,eAAAA,cACIC,UAAkD,EAClD7C,MAAW,EACb;IACE,IAAMO,SAAS,GAAG,IAAI,CAAC4B,OAAO,CAACtC,YAAY,CAAC,CAAC;IAC7C,IAAMiD,eAAe,GAAG,IAAArC,oBAAc,EAClC,IAAI,CAACC,SAAS,CAACC,IAAI,CACf,IAAAC,YAAM,EAACC,GAAG,IAAIA,GAAG,CAACC,QAAQ,KAAKP,SAAS,CAC5C,CACJ,CAAC;IACD,IAAMwC,OAAwB,GAAG;MAC7B9C,YAAY,EAAE,IAAI,CAACmC,SAAS,CAACnC,YAAY;MACzCM,SAAS;MACTS,MAAM,EAAE6B,UAAU;MAClB7C;IACJ,CAAC;IACD,IAAI,CAACoC,SAAS,CAAC9B,cAAc,CAACS,IAAI,CAACgC,OAAO,CAAC;IAC3C,IAAMjB,QAAQ,GAAG,MAAMgB,eAAe;IACtC,IAAIhB,QAAQ,CAACZ,KAAK,EAAE;MAChB,MAAM,IAAIE,KAAK,CAAC,2BAA2B,GAAGC,IAAI,CAACC,SAAS,CAAC;QACzDuB,UAAU;QACV7C,MAAM;QACNkB,KAAK,EAAEY,QAAQ,CAACZ;MACpB,CAAC,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC;IAChB,CAAC,MAAM;MACH,OAAOe,gBAAgB,CAACH,QAAQ,CAAC;IACrC;EACJ,CAAC;EAAAa,OAAA,CACDK,SAAS,GAAT,SAAAA,UACIC,cAAyC,EACzCC,OAAe,EAC+B;IAC9C,OAAO,IAAI,CAACN,aAAa,CAAC,WAAW,EAAE,CAACK,cAAc,EAAEC,OAAO,CAAC,CAAC;EACrE,CAAC;EAAAP,OAAA,CACDQ,iBAAiB,GAAjB,SAAAA,kBAAkBC,GAAa,EAAEC,OAAgB,EAAwC;IACrF,OAAO,IAAI,CAACT,aAAa,CAAC,mBAAmB,EAAE,CAACQ,GAAG,EAAEC,OAAO,CAAC,CAAC;EAClE,CAAC;EAAAV,OAAA,CACDW,KAAK,GAAL,SAAAA,MAAMC,aAAkB,EAA4C;IAChE,OAAO,IAAI,CAACX,aAAa,CAAC,OAAO,EAAE,CAACW,aAAa,CAAC,CAAC;EACvD,CAAC;EAAAZ,OAAA,CACDa,KAAK,GAAL,SAAAA,MAAMD,aAAkB,EAAiC;IACrD,OAAO,IAAI,CAACX,aAAa,CAAC,OAAO,EAAE,CAACW,aAAa,CAAC,CAAC;EACvD,CAAC;EAAAZ,OAAA,CACDc,iBAAiB,GAAjB,SAAAA,kBAAkBC,UAAkB,EAAEC,YAAoB,EAAEC,MAAc,EAAmB;IACzF,OAAO,IAAI,CAAChB,aAAa,CAAC,mBAAmB,EAAE,CAACc,UAAU,EAAEC,YAAY,EAAEC,MAAM,CAAC,CAAC;EACtF,CAAC;EAAAjB,OAAA,CACDkB,wBAAwB,GAAxB,SAAAA,yBACIC,KAAa,EACbC,UAAgB,EAKb;IACH,OAAO,IAAI,CAACnB,aAAa,CAAC,0BAA0B,EAAE,CAACkB,KAAK,EAAEC,UAAU,CAAC,CAAC;EAC9E,CAAC;EAAApB,OAAA,CACDqB,YAAY,GAAZ,SAAAA,aAAA,EAA4F;IACxF,OAAO,IAAI,CAAC3B,QAAQ,CAAC4B,YAAY,CAAC,CAAC;EACvC,CAAC;EAAAtB,OAAA,CACDuB,OAAO,GAAP,SAAAA,QAAQC,cAAsB,EAAoB;IAC9C,OAAO,IAAI,CAACvB,aAAa,CAAC,SAAS,EAAE,CAACuB,cAAc,CAAC,CAAC;EAC1D,CAAC;EAAAxB,OAAA,CACKZ,KAAK,GAAX,eAAAA,MAAA,EAA6B;IACzB,IAAI,IAAI,CAACqC,MAAM,EAAE;MACb,OAAO,IAAI,CAACA,MAAM;IACtB;IACA,IAAI,CAACA,MAAM,GAAG,CAAC,YAAY;MACvB,IAAI,CAAC5B,IAAI,CAAC6B,OAAO,CAACC,GAAG,IAAIA,GAAG,CAACC,WAAW,CAAC,CAAC,CAAC;MAC3C,IAAI,CAAClC,QAAQ,CAACmC,QAAQ,CAAC,CAAC;MACxB,MAAM,IAAI,CAAC5B,aAAa,CAAC,OAAO,EAAE,EAAE,CAAC;MACrC,MAAM,IAAAzB,wCAAmB,EAAC,IAAI,CAACiB,SAAS,CAAC9B,cAAc,CAAC;IAC5D,CAAC,EAAE,CAAC;IACJ,OAAO,IAAI,CAAC8D,MAAM;EACtB,CAAC;EAAAzB,OAAA,CACK8B,MAAM,GAAZ,eAAAA,OAAA,EAA8B;IAC1B,IAAI,IAAI,CAACL,MAAM,EAAE;MACb,MAAM,IAAIhD,KAAK,CAAC,gBAAgB,CAAC;IACrC;IACA,IAAI,CAACgD,MAAM,GAAG,CAAC,YAAY;MACvB,MAAM,IAAI,CAACxB,aAAa,CAAC,QAAQ,EAAE,EAAE,CAAC;MACtC,MAAM,IAAAzB,wCAAmB,EAAC,IAAI,CAACiB,SAAS,CAAC9B,cAAc,CAAC;IAC5D,CAAC,EAAE,CAAC;IACJ,OAAO,IAAI,CAAC8D,MAAM;EACtB,CAAC;EAAAzB,OAAA,CACD+B,sBAAsB,GAAtB,SAAAA,uBAAA,EAAyE;IACrE,OAAO,IAAI,CAACnC,UAAU;EAC1B,CAAC;EAAAI,OAAA,CACKgC,4BAA4B,GAAlC,eAAAA,6BAAmCC,YAAwD,EAAiB;IACxG,MAAM,IAAI,CAAChC,aAAa,CAAC,8BAA8B,EAAE,CAACgC,YAAY,CAAC,CAAC;EAC5E,CAAC;EAAA,OAAArD,uBAAA;AAAA;AAGE,SAASsD,kBAAkBA,CAAC5F,QAAiC,EAAmB;EACnF,IAAM6F,YAAY,GAAGC,MAAM,CAACC,MAAM,CAAC;IAC/BxF,IAAI,EAAE;EACV,CAAC,EAAEP,QAAQ,CAAC;EACZ,OAAO,IAAIF,eAAe,CAAC+F,YAAY,CAAC;AAC5C","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/storage-remote/storage-remote-helpers.js b/dist/cjs/plugins/storage-remote/storage-remote-helpers.js deleted file mode 100644 index 3bb688fd0de..00000000000 --- a/dist/cjs/plugins/storage-remote/storage-remote-helpers.js +++ /dev/null @@ -1,25 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.createAnswer = createAnswer; -exports.createErrorAnswer = createErrorAnswer; -var _index = require("../../plugins/utils/index.js"); -function createErrorAnswer(msg, error) { - return { - connectionId: msg.connectionId, - answerTo: msg.requestId, - method: msg.method, - error: (0, _index.errorToPlainJson)(error) - }; -} -function createAnswer(msg, ret) { - return { - connectionId: msg.connectionId, - answerTo: msg.requestId, - method: msg.method, - return: ret - }; -} -//# sourceMappingURL=storage-remote-helpers.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/storage-remote/storage-remote-helpers.js.map b/dist/cjs/plugins/storage-remote/storage-remote-helpers.js.map deleted file mode 100644 index b1e7e153f11..00000000000 --- a/dist/cjs/plugins/storage-remote/storage-remote-helpers.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"storage-remote-helpers.js","names":["_index","require","createErrorAnswer","msg","error","connectionId","answerTo","requestId","method","errorToPlainJson","createAnswer","ret","return"],"sources":["../../../../src/plugins/storage-remote/storage-remote-helpers.ts"],"sourcesContent":["import type {\n RxError,\n RxTypeError\n} from '../../types/index.d.ts';\nimport { errorToPlainJson } from '../../plugins/utils/index.ts';\nimport type {\n MessageFromRemote,\n MessageToRemote\n} from './storage-remote-types.ts';\n\nexport function createErrorAnswer(\n msg: MessageToRemote,\n error: Error | TypeError | RxError | RxTypeError\n): MessageFromRemote {\n return {\n connectionId: msg.connectionId,\n answerTo: msg.requestId,\n method: msg.method,\n error: errorToPlainJson(error)\n };\n}\n\nexport function createAnswer(\n msg: MessageToRemote,\n ret: any\n): MessageFromRemote {\n return {\n connectionId: msg.connectionId,\n answerTo: msg.requestId,\n method: msg.method,\n return: ret\n };\n}\n"],"mappings":";;;;;;;AAIA,IAAAA,MAAA,GAAAC,OAAA;AAMO,SAASC,iBAAiBA,CAC7BC,GAAoB,EACpBC,KAAgD,EAC/B;EACjB,OAAO;IACHC,YAAY,EAAEF,GAAG,CAACE,YAAY;IAC9BC,QAAQ,EAAEH,GAAG,CAACI,SAAS;IACvBC,MAAM,EAAEL,GAAG,CAACK,MAAM;IAClBJ,KAAK,EAAE,IAAAK,uBAAgB,EAACL,KAAK;EACjC,CAAC;AACL;AAEO,SAASM,YAAYA,CACxBP,GAAoB,EACpBQ,GAAQ,EACS;EACjB,OAAO;IACHN,YAAY,EAAEF,GAAG,CAACE,YAAY;IAC9BC,QAAQ,EAAEH,GAAG,CAACI,SAAS;IACvBC,MAAM,EAAEL,GAAG,CAACK,MAAM;IAClBI,MAAM,EAAED;EACZ,CAAC;AACL","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/storage-remote/storage-remote-types.js b/dist/cjs/plugins/storage-remote/storage-remote-types.js deleted file mode 100644 index b5e53315292..00000000000 --- a/dist/cjs/plugins/storage-remote/storage-remote-types.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -//# sourceMappingURL=storage-remote-types.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/storage-remote/storage-remote-types.js.map b/dist/cjs/plugins/storage-remote/storage-remote-types.js.map deleted file mode 100644 index c14574b1815..00000000000 --- a/dist/cjs/plugins/storage-remote/storage-remote-types.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"storage-remote-types.js","names":[],"sources":["../../../../src/plugins/storage-remote/storage-remote-types.ts"],"sourcesContent":["import type { Observable } from 'rxjs';\nimport type {\n MaybePromise,\n PlainJsonError,\n RxDatabase,\n RxStorage,\n RxStorageInstance,\n RxStorageInstanceCreationParams\n} from '../../types/index.d.ts';\n\n\n\nexport type MessageFromRemote = {\n connectionId: string;\n answerTo: string; // id of the request\n method: keyof RxStorageInstance | 'create' | 'custom';\n error?: PlainJsonError;\n return?: any | string;\n};\n\nexport type MessageToRemote = {\n connectionId: string;\n /**\n * Unique ID of the request\n */\n requestId: string;\n method: keyof RxStorageInstance | 'create' | 'custom';\n params:\n RxStorageInstanceCreationParams | // used in the create call\n any[] | // used to call RxStorageInstance methods\n any; // used in custom requests\n};\n\n\n/**\n * A message channel represents a single\n * channel that is able to communicate with the remote.\n * For example a single websocket connection or WebWorker instance.\n * The storage must be able to open and close MessageChannels\n * according to the modes settings.\n */\nexport type RemoteMessageChannel = {\n send(msg: MessageToRemote): void;\n messages$: Observable;\n close(): Promise;\n};\n\nexport type RxStorageRemoteSettings = {\n identifier: string;\n /**\n * There are different modes\n * that determine how many message channels are used.\n * These modes can have different performance patterns.\n *\n * [default='storage']\n */\n mode?:\n // create exactly one RemoteMessageChannel and reuse that everywhere.\n | 'one'\n // storage: create one RemoteMessageChannel per call to getRxStorage...()\n | 'storage'\n // database: create one RemoteMessageChannel for each database\n | 'database'\n // collection: create one RemoteMessageChannel for each collection\n | 'collection';\n messageChannelCreator: () => Promise;\n};\n\nexport type RxStorageRemoteInternals = {\n params: RxStorageInstanceCreationParams;\n connectionId: string;\n messageChannel: RemoteMessageChannel;\n};\n\nexport type RxStorageRemoteExposeSettingsBase = {\n send(msg: MessageFromRemote): void;\n messages$: Observable;\n customRequestHandler?: CustomRequestHandler;\n};\n\nexport type RxStorageRemoteExposeSettingsRxDatabase = RxStorageRemoteExposeSettingsBase & {\n /**\n * The database which must be mapped to the remote storage server.\n */\n database: RxDatabase;\n};\n\nexport type RxStorageRemoteExposeSettingsRxStorage = RxStorageRemoteExposeSettingsBase & {\n /**\n * The original storage\n * which actually stores the data.\n */\n storage: RxStorage;\n};\n\nexport type RxStorageRemoteExposeSettings = RxStorageRemoteExposeSettingsRxDatabase | RxStorageRemoteExposeSettingsRxStorage;\n\nexport type RxStorageRemoteExposeType = {\n instanceByFullName: Map;\n};\n\n/**\n * If set, the clients can send RxDB-unrelated custom messages\n * to the remote storage and it will answer them.\n */\nexport type CustomRequestHandler = (data: In) => MaybePromise;\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/test-utils/config.js b/dist/cjs/plugins/test-utils/config.js deleted file mode 100644 index 9ff71b6f3a2..00000000000 --- a/dist/cjs/plugins/test-utils/config.js +++ /dev/null @@ -1,123 +0,0 @@ -"use strict"; - -var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault"); -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.ENV_VARIABLES = exports.DEFAULT_STORAGE = void 0; -exports.getConfig = getConfig; -exports.getEncryptedStorage = getEncryptedStorage; -exports.getPassword = getPassword; -exports.initTestEnvironment = initTestEnvironment; -exports.isDeno = exports.isBun = void 0; -exports.isFastMode = isFastMode; -exports.isNode = void 0; -exports.isNotOneOfTheseStorages = isNotOneOfTheseStorages; -exports.setConfig = setConfig; -var _index = require("../utils/index.js"); -var _broadcastChannel = require("broadcast-channel"); -var _nodeEvents = _interopRequireDefault(require("node:events")); -var _index2 = require("../encryption-crypto-js/index.js"); -/// - -var isDeno = exports.isDeno = typeof window !== 'undefined' && 'Deno' in window; -var isBun = exports.isBun = typeof process !== 'undefined' && !!process.versions.bun; -var isNode = exports.isNode = !isDeno && !isBun && typeof window === 'undefined'; -var config; -function setConfig(newConfig) { - config = newConfig; -} -var initDone = false; -function getConfig() { - if (!initDone) { - initTestEnvironment(); - initDone = true; - } - return (0, _index.ensureNotFalsy)(config, 'testConfig not set'); -} -function getEnvVariables() { - if (isDeno) { - var ret = {}; - ['DEFAULT_STORAGE', 'NODE_ENV'].forEach(k => { - ret[k] = Deno.env.get(k); - }); - return ret; - } - return isBun || isNode ? process.env : window.__karma__.config.env; -} -var ENV_VARIABLES = exports.ENV_VARIABLES = getEnvVariables(); -var DEFAULT_STORAGE = exports.DEFAULT_STORAGE = ENV_VARIABLES.DEFAULT_STORAGE; -function isFastMode() { - try { - return ENV_VARIABLES.NODE_ENV === 'fast'; - } catch (err) { - return false; - } -} -function initTestEnvironment() { - if (ENV_VARIABLES.NODE_ENV === 'fast') { - (0, _broadcastChannel.enforceOptions)({ - type: 'simulate' - }); - } - - /** - * Overwrite the console for easier debugging - */ - var oldConsoleLog = console.log.bind(console); - var oldConsoleDir = console.dir.bind(console); - function newLog(value) { - if ((0, _index.isPromise)(value)) { - oldConsoleDir(value); - throw new Error('cannot log Promise(), you should await it first'); - } - if (typeof value === 'string' || typeof value === 'number') { - oldConsoleLog(value); - return; - } - try { - JSON.stringify(value); - oldConsoleLog(JSON.stringify(value, null, 4)); - } catch (err) { - oldConsoleDir(value); - } - } - console.log = newLog.bind(console); - console.dir = newLog.bind(console); - console.log('DEFAULT_STORAGE: ' + DEFAULT_STORAGE); - if (isNode) { - process.setMaxListeners(100); - _nodeEvents.default.EventEmitter.defaultMaxListeners = 100; - - /** - * Add a global function to process, so we can debug timings - */ - process.startTime = performance.now(); - process.logTime = (msg = '') => { - var diff = performance.now() - process.startTime; - console.log('process logTime(' + msg + ') ' + diff + 'ms'); - }; - } -} -function getEncryptedStorage(baseStorage = getConfig().storage.getStorage()) { - var ret = config.storage.hasEncryption ? baseStorage : (0, _index2.wrappedKeyEncryptionCryptoJsStorage)({ - storage: baseStorage - }); - return ret; -} -function isNotOneOfTheseStorages(storageNames) { - var isName = getConfig().storage.name; - if (storageNames.includes(isName)) { - return false; - } else { - return true; - } -} -function getPassword() { - if (getConfig().storage.hasEncryption) { - return (0, _index.ensureNotFalsy)(getConfig().storage.hasEncryption)(); - } else { - return Promise.resolve('test-password-' + (0, _index.randomCouchString)(10)); - } -} -//# sourceMappingURL=config.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/test-utils/config.js.map b/dist/cjs/plugins/test-utils/config.js.map deleted file mode 100644 index f5bafab684e..00000000000 --- a/dist/cjs/plugins/test-utils/config.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"config.js","names":["_index","require","_broadcastChannel","_nodeEvents","_interopRequireDefault","_index2","isDeno","exports","window","isBun","process","versions","bun","isNode","config","setConfig","newConfig","initDone","getConfig","initTestEnvironment","ensureNotFalsy","getEnvVariables","ret","forEach","k","Deno","env","get","__karma__","ENV_VARIABLES","DEFAULT_STORAGE","isFastMode","NODE_ENV","err","broadcastChannelEnforceOptions","type","oldConsoleLog","console","log","bind","oldConsoleDir","dir","newLog","value","isPromise","Error","JSON","stringify","setMaxListeners","events","EventEmitter","defaultMaxListeners","startTime","performance","now","logTime","msg","diff","getEncryptedStorage","baseStorage","storage","getStorage","hasEncryption","wrappedKeyEncryptionCryptoJsStorage","isNotOneOfTheseStorages","storageNames","isName","name","includes","getPassword","Promise","resolve","randomCouchString"],"sources":["../../../../src/plugins/test-utils/config.ts"],"sourcesContent":["/// \nimport {\n ensureNotFalsy,\n isPromise,\n randomCouchString\n} from '../utils/index.ts';\nimport {\n enforceOptions as broadcastChannelEnforceOptions\n} from 'broadcast-channel';\nimport events from 'node:events';\nimport * as path from 'node:path';\nimport url from 'node:url';\nimport type { RxStorage, RxTestStorage } from '../../types';\nimport { wrappedKeyEncryptionCryptoJsStorage } from '../encryption-crypto-js/index.ts';\n\nexport type TestConfig = {\n storage: RxTestStorage;\n};\n\nexport const isDeno = typeof window !== 'undefined' && 'Deno' in window;\nexport const isBun = typeof process !== 'undefined' && !!process.versions.bun;\nexport const isNode = !isDeno && !isBun && typeof window === 'undefined';\n\nlet config: TestConfig;\n\nexport function setConfig(newConfig: TestConfig) {\n config = newConfig;\n}\n\nlet initDone = false;\nexport function getConfig() {\n if (!initDone) {\n initTestEnvironment();\n initDone = true;\n }\n return ensureNotFalsy(config, 'testConfig not set')\n}\n\n\ndeclare const Deno: any;\nfunction getEnvVariables() {\n if (isDeno) {\n const ret: any = {};\n [\n 'DEFAULT_STORAGE',\n 'NODE_ENV'\n ].forEach(k => {\n ret[k] = Deno.env.get(k);\n });\n return ret;\n }\n\n return isBun || isNode ? process.env : (window as any).__karma__.config.env;\n}\nexport const ENV_VARIABLES = getEnvVariables();\nexport const DEFAULT_STORAGE = ENV_VARIABLES.DEFAULT_STORAGE as string;\n\nexport function isFastMode(): boolean {\n try {\n return ENV_VARIABLES.NODE_ENV === 'fast';\n } catch (err) {\n return false;\n }\n}\n\nexport function initTestEnvironment() {\n if (ENV_VARIABLES.NODE_ENV === 'fast') {\n broadcastChannelEnforceOptions({\n type: 'simulate'\n });\n }\n\n /**\n * Overwrite the console for easier debugging\n */\n const oldConsoleLog = console.log.bind(console);\n const oldConsoleDir = console.dir.bind(console);\n function newLog(this: typeof console, value: any) {\n if (isPromise(value)) {\n oldConsoleDir(value);\n throw new Error('cannot log Promise(), you should await it first');\n }\n if (typeof value === 'string' || typeof value === 'number') {\n oldConsoleLog(value);\n return;\n }\n try {\n JSON.stringify(value);\n oldConsoleLog(JSON.stringify(value, null, 4));\n } catch (err) {\n oldConsoleDir(value);\n }\n }\n console.log = newLog.bind(console);\n console.dir = newLog.bind(console);\n\n console.log('DEFAULT_STORAGE: ' + DEFAULT_STORAGE);\n\n if (isNode) {\n process.setMaxListeners(100);\n\n events.EventEmitter.defaultMaxListeners = 100;\n\n /**\n * Add a global function to process, so we can debug timings\n */\n (process as any).startTime = performance.now();\n (process as any).logTime = (msg: string = '') => {\n const diff = performance.now() - (process as any).startTime;\n console.log('process logTime(' + msg + ') ' + diff + 'ms');\n };\n }\n}\n\nexport function getEncryptedStorage(baseStorage = getConfig().storage.getStorage()): RxStorage {\n const ret = config.storage.hasEncryption ?\n baseStorage :\n wrappedKeyEncryptionCryptoJsStorage({\n storage: baseStorage\n });\n return ret;\n}\n\nexport function isNotOneOfTheseStorages(storageNames: string[]) {\n const isName = getConfig().storage.name;\n if (storageNames.includes(isName)) {\n return false;\n } else {\n return true;\n }\n}\n\n\nexport function getPassword(): Promise {\n if (getConfig().storage.hasEncryption) {\n return ensureNotFalsy(getConfig().storage.hasEncryption)();\n } else {\n return Promise.resolve('test-password-' + randomCouchString(10));\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;AACA,IAAAA,MAAA,GAAAC,OAAA;AAKA,IAAAC,iBAAA,GAAAD,OAAA;AAGA,IAAAE,WAAA,GAAAC,sBAAA,CAAAH,OAAA;AAIA,IAAAI,OAAA,GAAAJ,OAAA;AAbA;;AAmBO,IAAMK,MAAM,GAAAC,OAAA,CAAAD,MAAA,GAAG,OAAOE,MAAM,KAAK,WAAW,IAAI,MAAM,IAAIA,MAAM;AAChE,IAAMC,KAAK,GAAAF,OAAA,CAAAE,KAAA,GAAG,OAAOC,OAAO,KAAK,WAAW,IAAI,CAAC,CAACA,OAAO,CAACC,QAAQ,CAACC,GAAG;AACtE,IAAMC,MAAM,GAAAN,OAAA,CAAAM,MAAA,GAAG,CAACP,MAAM,IAAI,CAACG,KAAK,IAAI,OAAOD,MAAM,KAAK,WAAW;AAExE,IAAIM,MAAkB;AAEf,SAASC,SAASA,CAACC,SAAqB,EAAE;EAC7CF,MAAM,GAAGE,SAAS;AACtB;AAEA,IAAIC,QAAQ,GAAG,KAAK;AACb,SAASC,SAASA,CAAA,EAAG;EACxB,IAAI,CAACD,QAAQ,EAAE;IACXE,mBAAmB,CAAC,CAAC;IACrBF,QAAQ,GAAG,IAAI;EACnB;EACA,OAAO,IAAAG,qBAAc,EAACN,MAAM,EAAE,oBAAoB,CAAC;AACvD;AAIA,SAASO,eAAeA,CAAA,EAAG;EACvB,IAAIf,MAAM,EAAE;IACR,IAAMgB,GAAQ,GAAG,CAAC,CAAC;IACnB,CACI,iBAAiB,EACjB,UAAU,CACb,CAACC,OAAO,CAACC,CAAC,IAAI;MACXF,GAAG,CAACE,CAAC,CAAC,GAAGC,IAAI,CAACC,GAAG,CAACC,GAAG,CAACH,CAAC,CAAC;IAC5B,CAAC,CAAC;IACF,OAAOF,GAAG;EACd;EAEA,OAAOb,KAAK,IAAII,MAAM,GAAGH,OAAO,CAACgB,GAAG,GAAIlB,MAAM,CAASoB,SAAS,CAACd,MAAM,CAACY,GAAG;AAC/E;AACO,IAAMG,aAAa,GAAAtB,OAAA,CAAAsB,aAAA,GAAGR,eAAe,CAAC,CAAC;AACvC,IAAMS,eAAe,GAAAvB,OAAA,CAAAuB,eAAA,GAAGD,aAAa,CAACC,eAAyB;AAE/D,SAASC,UAAUA,CAAA,EAAY;EAClC,IAAI;IACA,OAAOF,aAAa,CAACG,QAAQ,KAAK,MAAM;EAC5C,CAAC,CAAC,OAAOC,GAAG,EAAE;IACV,OAAO,KAAK;EAChB;AACJ;AAEO,SAASd,mBAAmBA,CAAA,EAAG;EAClC,IAAIU,aAAa,CAACG,QAAQ,KAAK,MAAM,EAAE;IACnC,IAAAE,gCAA8B,EAAC;MAC3BC,IAAI,EAAE;IACV,CAAC,CAAC;EACN;;EAEA;AACJ;AACA;EACI,IAAMC,aAAa,GAAGC,OAAO,CAACC,GAAG,CAACC,IAAI,CAACF,OAAO,CAAC;EAC/C,IAAMG,aAAa,GAAGH,OAAO,CAACI,GAAG,CAACF,IAAI,CAACF,OAAO,CAAC;EAC/C,SAASK,MAAMA,CAAuBC,KAAU,EAAE;IAC9C,IAAI,IAAAC,gBAAS,EAACD,KAAK,CAAC,EAAE;MAClBH,aAAa,CAACG,KAAK,CAAC;MACpB,MAAM,IAAIE,KAAK,CAAC,iDAAiD,CAAC;IACtE;IACA,IAAI,OAAOF,KAAK,KAAK,QAAQ,IAAI,OAAOA,KAAK,KAAK,QAAQ,EAAE;MACxDP,aAAa,CAACO,KAAK,CAAC;MACpB;IACJ;IACA,IAAI;MACAG,IAAI,CAACC,SAAS,CAACJ,KAAK,CAAC;MACrBP,aAAa,CAACU,IAAI,CAACC,SAAS,CAACJ,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC;IACjD,CAAC,CAAC,OAAOV,GAAG,EAAE;MACVO,aAAa,CAACG,KAAK,CAAC;IACxB;EACJ;EACAN,OAAO,CAACC,GAAG,GAAGI,MAAM,CAACH,IAAI,CAACF,OAAO,CAAC;EAClCA,OAAO,CAACI,GAAG,GAAGC,MAAM,CAACH,IAAI,CAACF,OAAO,CAAC;EAElCA,OAAO,CAACC,GAAG,CAAC,mBAAmB,GAAGR,eAAe,CAAC;EAElD,IAAIjB,MAAM,EAAE;IACRH,OAAO,CAACsC,eAAe,CAAC,GAAG,CAAC;IAE5BC,mBAAM,CAACC,YAAY,CAACC,mBAAmB,GAAG,GAAG;;IAE7C;AACR;AACA;IACSzC,OAAO,CAAS0C,SAAS,GAAGC,WAAW,CAACC,GAAG,CAAC,CAAC;IAC7C5C,OAAO,CAAS6C,OAAO,GAAG,CAACC,GAAW,GAAG,EAAE,KAAK;MAC7C,IAAMC,IAAI,GAAGJ,WAAW,CAACC,GAAG,CAAC,CAAC,GAAI5C,OAAO,CAAS0C,SAAS;MAC3Df,OAAO,CAACC,GAAG,CAAC,kBAAkB,GAAGkB,GAAG,GAAG,IAAI,GAAGC,IAAI,GAAG,IAAI,CAAC;IAC9D,CAAC;EACL;AACJ;AAEO,SAASC,mBAAmBA,CAACC,WAAW,GAAGzC,SAAS,CAAC,CAAC,CAAC0C,OAAO,CAACC,UAAU,CAAC,CAAC,EAAuB;EACrG,IAAMvC,GAAG,GAAGR,MAAM,CAAC8C,OAAO,CAACE,aAAa,GACpCH,WAAW,GACX,IAAAI,2CAAmC,EAAC;IAChCH,OAAO,EAAED;EACb,CAAC,CAAC;EACN,OAAOrC,GAAG;AACd;AAEO,SAAS0C,uBAAuBA,CAACC,YAAsB,EAAE;EAC5D,IAAMC,MAAM,GAAGhD,SAAS,CAAC,CAAC,CAAC0C,OAAO,CAACO,IAAI;EACvC,IAAIF,YAAY,CAACG,QAAQ,CAACF,MAAM,CAAC,EAAE;IAC/B,OAAO,KAAK;EAChB,CAAC,MAAM;IACH,OAAO,IAAI;EACf;AACJ;AAGO,SAASG,WAAWA,CAAA,EAAoB;EAC3C,IAAInD,SAAS,CAAC,CAAC,CAAC0C,OAAO,CAACE,aAAa,EAAE;IACnC,OAAO,IAAA1C,qBAAc,EAACF,SAAS,CAAC,CAAC,CAAC0C,OAAO,CAACE,aAAa,CAAC,CAAC,CAAC;EAC9D,CAAC,MAAM;IACH,OAAOQ,OAAO,CAACC,OAAO,CAAC,gBAAgB,GAAG,IAAAC,wBAAiB,EAAC,EAAE,CAAC,CAAC;EACpE;AACJ","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/test-utils/humans-collection.js b/dist/cjs/plugins/test-utils/humans-collection.js deleted file mode 100644 index 3ed9dbe5dbf..00000000000 --- a/dist/cjs/plugins/test-utils/humans-collection.js +++ /dev/null @@ -1,393 +0,0 @@ -"use strict"; - -var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault"); -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.create = create; -exports.createAgeIndex = createAgeIndex; -exports.createAttachments = createAttachments; -exports.createBySchema = createBySchema; -exports.createDeepNested = createDeepNested; -exports.createHumanWithTimestamp = createHumanWithTimestamp; -exports.createIdAndAgeIndex = createIdAndAgeIndex; -exports.createMigrationCollection = createMigrationCollection; -exports.createMultiInstance = createMultiInstance; -exports.createNested = createNested; -exports.createNoCompression = createNoCompression; -exports.createPrimary = createPrimary; -exports.createRelated = createRelated; -exports.createRelatedNested = createRelatedNested; -exports.multipleOnSameDB = multipleOnSameDB; -var _clone = _interopRequireDefault(require("clone")); -var schemas = _interopRequireWildcard(require("./schemas.js")); -var schemaObjects = _interopRequireWildcard(require("./schema-objects.js")); -var _config = require("./config.js"); -var _assert = _interopRequireDefault(require("assert")); -var _index = require("../../index.js"); -function _getRequireWildcardCache(e) { if ("function" != typeof WeakMap) return null; var r = new WeakMap(), t = new WeakMap(); return (_getRequireWildcardCache = function (e) { return e ? t : r; })(e); } -function _interopRequireWildcard(e, r) { if (!r && e && e.__esModule) return e; if (null === e || "object" != typeof e && "function" != typeof e) return { default: e }; var t = _getRequireWildcardCache(r); if (t && t.has(e)) return t.get(e); var n = { __proto__: null }, a = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var u in e) if ("default" !== u && {}.hasOwnProperty.call(e, u)) { var i = a ? Object.getOwnPropertyDescriptor(e, u) : null; i && (i.get || i.set) ? Object.defineProperty(n, u, i) : n[u] = e[u]; } return n.default = e, t && t.set(e, n), n; } -async function create(size = 20, collectionName = 'human', multiInstance = true, eventReduce = true, storage = (0, _config.getConfig)().storage.getStorage()) { - var db = await (0, _index.createRxDatabase)({ - name: (0, _index.randomCouchString)(10), - storage, - multiInstance, - eventReduce, - ignoreDuplicate: true, - localDocuments: true - }); - var collections = await db.addCollections({ - [collectionName]: { - schema: schemas.human, - localDocuments: true - } - }); - - // insert data - if (size > 0) { - var docsData = new Array(size).fill(0).map(() => schemaObjects.humanData()); - var writeResult = await collections[collectionName].bulkInsert(docsData); - _assert.default.deepStrictEqual(writeResult.error, []); - } - return collections[collectionName]; -} -async function createBySchema(schema, name = 'human', storage = (0, _config.getConfig)().storage.getStorage(), migrationStrategies) { - var db = await (0, _index.createRxDatabase)({ - name: (0, _index.randomCouchString)(10), - storage, - multiInstance: true, - eventReduce: true, - ignoreDuplicate: true - }); - var collections = await db.addCollections({ - [name]: { - schema, - migrationStrategies - } - }); - return collections[name]; -} -async function createAttachments(size = 20, name = 'human', multiInstance = true) { - if (!name) { - name = 'human'; - } - var db = await (0, _index.createRxDatabase)({ - name: (0, _index.randomCouchString)(10), - storage: (0, _config.getConfig)().storage.getStorage(), - multiInstance, - eventReduce: true, - ignoreDuplicate: true - }); - var schemaJson = (0, _clone.default)(schemas.human); - schemaJson.attachments = {}; - var collections = await db.addCollections({ - [name]: { - schema: schemaJson - } - }); - - // insert data - if (size > 0) { - var docsData = new Array(size).fill(0).map(() => schemaObjects.humanData()); - await collections[name].bulkInsert(docsData); - } - return collections[name]; -} -async function createNoCompression(size = 20, name = 'human') { - var db = await (0, _index.createRxDatabase)({ - name: (0, _index.randomCouchString)(10), - storage: (0, _config.getConfig)().storage.getStorage(), - eventReduce: true, - ignoreDuplicate: true - }); - var schemaJSON = (0, _clone.default)(schemas.human); - schemaJSON.keyCompression = false; - // setTimeout(() => db.destroy(), dbLifetime); - var collections = await db.addCollections({ - [name]: { - schema: schemaJSON - } - }); - - // insert data - if (size > 0) { - var docsData = new Array(size).fill(0).map(() => schemaObjects.humanData()); - await collections[name].bulkInsert(docsData); - } - return collections[name]; -} -async function createAgeIndex(amount = 20) { - var db = await (0, _index.createRxDatabase)({ - name: (0, _index.randomCouchString)(10), - storage: (0, _config.getConfig)().storage.getStorage(), - eventReduce: true, - ignoreDuplicate: true - }); - // setTimeout(() => db.destroy(), dbLifetime); - var collections = await db.addCollections({ - humana: { - schema: schemas.humanAgeIndex - } - }); - - // insert data - if (amount > 0) { - var docsData = new Array(amount).fill(0).map(() => schemaObjects.humanData()); - await collections.humana.bulkInsert(docsData); - } - return collections.humana; -} -async function multipleOnSameDB(size = 10) { - var db = await (0, _index.createRxDatabase)({ - name: (0, _index.randomCouchString)(10), - storage: (0, _config.getConfig)().storage.getStorage(), - eventReduce: true, - ignoreDuplicate: true - }); - // setTimeout(() => db.destroy(), dbLifetime); - var collections = await db.addCollections({ - human: { - schema: schemas.human - }, - human2: { - schema: schemas.human - } - }); - - // insert data - if (size > 0) { - var docsData = new Array(size).fill(0).map(() => schemaObjects.humanData()); - await collections.human.bulkInsert(docsData); - var docsData2 = new Array(size).fill(0).map(() => schemaObjects.humanData()); - await collections.human2.bulkInsert(docsData2); - } - return { - db, - collection: collections.human, - collection2: collections.human2 - }; -} -async function createNested(amount = 5) { - var db = await (0, _index.createRxDatabase)({ - name: (0, _index.randomCouchString)(10), - storage: (0, _config.getConfig)().storage.getStorage(), - eventReduce: true, - ignoreDuplicate: true - }); - // setTimeout(() => db.destroy(), dbLifetime); - var collections = await db.addCollections({ - nestedhuman: { - schema: schemas.nestedHuman - } - }); - - // insert data - if (amount > 0) { - var docsData = new Array(amount).fill(0).map(() => schemaObjects.nestedHumanData()); - await collections.nestedhuman.bulkInsert(docsData); - } - return collections.nestedhuman; -} -async function createDeepNested(amount = 5) { - var db = await (0, _index.createRxDatabase)({ - name: (0, _index.randomCouchString)(10), - storage: (0, _config.getConfig)().storage.getStorage(), - eventReduce: true - }); - // setTimeout(() => db.destroy(), dbLifetime); - var collections = await db.addCollections({ - nestedhuman: { - schema: schemas.deepNestedHuman - } - }); - - // insert data - if (amount > 0) { - var docsData = new Array(amount).fill(0).map(() => schemaObjects.deepNestedHumanData()); - await collections.nestedhuman.bulkInsert(docsData); - } - return collections.nestedhuman; -} -async function createMultiInstance(name, amount = 0, password = undefined, storage = (0, _config.getConfig)().storage.getStorage()) { - if (!(0, _config.getConfig)().storage.hasMultiInstance) { - throw new Error('createMultiInstance() cannot be called on a storage with hasMultiInstance:false'); - } - var db = await (0, _index.createRxDatabase)({ - name, - storage, - password, - multiInstance: true, - eventReduce: true, - ignoreDuplicate: true, - localDocuments: true - }); - // setTimeout(() => db.destroy(), dbLifetime); - var collections = await db.addCollections({ - human: { - schema: schemas.human, - localDocuments: true - } - }); - // insert data - if (amount > 0) { - var docsData = new Array(amount).fill(0).map(() => schemaObjects.humanData()); - await collections.human.bulkInsert(docsData); - } - return collections.human; -} -async function createPrimary(amount = 10, name = (0, _index.randomCouchString)(10)) { - var db = await (0, _index.createRxDatabase)({ - name, - storage: (0, _config.getConfig)().storage.getStorage(), - multiInstance: true, - eventReduce: true, - ignoreDuplicate: true - }); - // setTimeout(() => db.destroy(), dbLifetime); - var collections = await db.addCollections({ - human: { - schema: schemas.primaryHuman - } - }); - - // insert data - if (amount > 0) { - var docsData = new Array(amount).fill(0).map(() => schemaObjects.simpleHumanData()); - await collections.human.bulkInsert(docsData); - } - return collections.human; -} -async function createHumanWithTimestamp(amount = 0, databaseName = (0, _index.randomCouchString)(10), multiInstance = true, storage = (0, _config.getConfig)().storage.getStorage()) { - var db = await (0, _index.createRxDatabase)({ - name: databaseName, - storage, - multiInstance, - eventReduce: true, - ignoreDuplicate: true - }); - // setTimeout(() => db.destroy(), dbLifetime); - var collections = await db.addCollections({ - humans: { - schema: schemas.humanWithTimestamp - } - }); - - // insert data - if (amount > 0) { - var docsData = new Array(amount).fill(0).map(() => schemaObjects.humanWithTimestampData()); - await collections.humans.bulkInsert(docsData); - } - return collections.humans; -} -async function createMigrationCollection(amount = 0, addMigrationStrategies = {}, name = (0, _index.randomCouchString)(10), autoMigrate = false, attachment) { - var migrationStrategies = Object.assign({ - 1: doc => doc, - 2: doc => doc, - 3: doc => doc - }, addMigrationStrategies); - var colName = 'human'; - var db = await (0, _index.createRxDatabase)({ - name, - storage: (0, _config.getConfig)().storage.getStorage(), - eventReduce: true, - ignoreDuplicate: true - }); - var cols = await db.addCollections({ - [colName]: { - schema: attachment !== undefined ? { - ...schemas.simpleHuman, - attachments: {} - } : schemas.simpleHuman, - autoMigrate: false - } - }); - await Promise.all(new Array(amount).fill(0).map(() => cols[colName].insert(schemaObjects.simpleHumanAge()).then(doc => { - if (attachment !== undefined) { - return doc.putAttachment(attachment); - } - }))); - await db.destroy(); - var db2 = await (0, _index.createRxDatabase)({ - name, - storage: (0, _config.getConfig)().storage.getStorage(), - eventReduce: true, - ignoreDuplicate: true - }); - var cols2 = await db2.addCollections({ - [colName]: { - schema: attachment !== undefined ? { - ...schemas.simpleHumanV3, - attachments: {} - } : schemas.simpleHumanV3, - autoMigrate, - migrationStrategies - } - }); - return cols2[colName]; -} -async function createRelated(name = (0, _index.randomCouchString)(10)) { - var db = await (0, _index.createRxDatabase)({ - name, - storage: (0, _config.getConfig)().storage.getStorage(), - multiInstance: true, - eventReduce: true, - ignoreDuplicate: true - }); - // setTimeout(() => db.destroy(), dbLifetime); - var collections = await db.addCollections({ - human: { - schema: schemas.refHuman - } - }); - var doc1 = schemaObjects.refHumanData(); - var doc2 = schemaObjects.refHumanData(doc1.name); - doc1.bestFriend = doc2.name; // cross-relation - - await collections.human.insert(doc1); - await collections.human.insert(doc2); - return collections.human; -} -async function createRelatedNested(name = (0, _index.randomCouchString)(10)) { - var db = await (0, _index.createRxDatabase)({ - name, - storage: (0, _config.getConfig)().storage.getStorage(), - multiInstance: true, - eventReduce: true, - ignoreDuplicate: true - }); - // setTimeout(() => db.destroy(), dbLifetime); - var collections = await db.addCollections({ - human: { - schema: schemas.refHumanNested - } - }); - var doc1 = schemaObjects.refHumanNestedData(); - var doc2 = schemaObjects.refHumanNestedData(doc1.name); - doc1.foo.bestFriend = doc2.name; // cross-relation - - await collections.human.insert(doc1); - await collections.human.insert(doc2); - return collections.human; -} -async function createIdAndAgeIndex(amount = 20) { - var db = await (0, _index.createRxDatabase)({ - name: (0, _index.randomCouchString)(10), - storage: (0, _config.getConfig)().storage.getStorage(), - eventReduce: true, - ignoreDuplicate: true - }); - // setTimeout(() => db.destroy(), dbLifetime); - var collections = await db.addCollections({ - humana: { - schema: schemas.humanIdAndAgeIndex - } - }); - - // insert data - if (amount > 0) { - var docsData = new Array(amount).fill(0).map(() => schemaObjects.humanWithIdAndAgeIndexDocumentType()); - await collections.humana.bulkInsert(docsData); - } - return collections.humana; -} -//# sourceMappingURL=humans-collection.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/test-utils/humans-collection.js.map b/dist/cjs/plugins/test-utils/humans-collection.js.map deleted file mode 100644 index ab485d59f29..00000000000 --- a/dist/cjs/plugins/test-utils/humans-collection.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"humans-collection.js","names":["_clone","_interopRequireDefault","require","schemas","_interopRequireWildcard","schemaObjects","_config","_assert","_index","_getRequireWildcardCache","e","WeakMap","r","t","__esModule","default","has","get","n","__proto__","a","Object","defineProperty","getOwnPropertyDescriptor","u","hasOwnProperty","call","i","set","create","size","collectionName","multiInstance","eventReduce","storage","getConfig","getStorage","db","createRxDatabase","name","randomCouchString","ignoreDuplicate","localDocuments","collections","addCollections","schema","human","docsData","Array","fill","map","humanData","writeResult","bulkInsert","assert","deepStrictEqual","error","createBySchema","migrationStrategies","createAttachments","schemaJson","clone","attachments","createNoCompression","schemaJSON","keyCompression","createAgeIndex","amount","humana","humanAgeIndex","multipleOnSameDB","human2","docsData2","collection","collection2","createNested","nestedhuman","nestedHuman","nestedHumanData","createDeepNested","deepNestedHuman","deepNestedHumanData","createMultiInstance","password","undefined","hasMultiInstance","Error","createPrimary","primaryHuman","simpleHumanData","createHumanWithTimestamp","databaseName","humans","humanWithTimestamp","humanWithTimestampData","createMigrationCollection","addMigrationStrategies","autoMigrate","attachment","assign","doc","colName","cols","simpleHuman","Promise","all","insert","simpleHumanAge","then","putAttachment","destroy","db2","cols2","simpleHumanV3","createRelated","refHuman","doc1","refHumanData","doc2","bestFriend","createRelatedNested","refHumanNested","refHumanNestedData","foo","createIdAndAgeIndex","humanIdAndAgeIndex","humanWithIdAndAgeIndexDocumentType"],"sources":["../../../../src/plugins/test-utils/humans-collection.ts"],"sourcesContent":["import clone from 'clone';\nimport * as schemas from './schemas.ts';\nimport * as schemaObjects from './schema-objects.ts';\nimport { getConfig } from './config.ts';\nimport assert from 'assert';\n\nimport {\n createRxDatabase,\n RxJsonSchema,\n RxCollection,\n RxDatabase,\n randomCouchString,\n MigrationStrategies,\n RxAttachmentCreator,\n RxStorage\n} from '../../index.ts';\n\nimport { HumanDocumentType } from './schemas.ts';\n\nexport async function create(\n size: number = 20,\n collectionName: string = 'human',\n multiInstance: boolean = true,\n eventReduce: boolean = true,\n storage: RxStorage = getConfig().storage.getStorage()\n\n): Promise> {\n const db = await createRxDatabase<{ human: RxCollection; }>({\n name: randomCouchString(10),\n storage,\n multiInstance,\n eventReduce,\n ignoreDuplicate: true,\n localDocuments: true\n });\n\n const collections = await db.addCollections({\n [collectionName]: {\n schema: schemas.human,\n localDocuments: true\n }\n });\n\n // insert data\n if (size > 0) {\n const docsData = new Array(size)\n .fill(0)\n .map(() => schemaObjects.humanData());\n const writeResult = await collections[collectionName].bulkInsert(docsData);\n assert.deepStrictEqual(writeResult.error, []);\n }\n return collections[collectionName];\n}\n\nexport async function createBySchema(\n schema: RxJsonSchema,\n name = 'human',\n storage = getConfig().storage.getStorage(),\n migrationStrategies?: MigrationStrategies\n): Promise> {\n const db = await createRxDatabase<{ [prop: string]: RxCollection; }>({\n name: randomCouchString(10),\n storage,\n multiInstance: true,\n eventReduce: true,\n ignoreDuplicate: true\n });\n\n const collections = await db.addCollections({\n [name]: {\n schema,\n migrationStrategies\n }\n });\n\n return collections[name];\n}\n\nexport async function createAttachments(\n size = 20,\n name = 'human',\n multiInstance = true\n): Promise> {\n if (!name) {\n name = 'human';\n }\n const db = await createRxDatabase<{ [prop: string]: RxCollection; }>({\n name: randomCouchString(10),\n storage: getConfig().storage.getStorage(),\n multiInstance,\n eventReduce: true,\n ignoreDuplicate: true\n });\n\n const schemaJson = clone(schemas.human);\n schemaJson.attachments = {};\n\n const collections = await db.addCollections({\n [name]: {\n schema: schemaJson\n }\n });\n\n // insert data\n if (size > 0) {\n const docsData = new Array(size)\n .fill(0)\n .map(() => schemaObjects.humanData());\n await collections[name].bulkInsert(docsData);\n }\n\n return collections[name];\n}\n\nexport async function createNoCompression(\n size = 20,\n name = 'human'\n): Promise> {\n const db = await createRxDatabase<{ [prop: string]: RxCollection; }>({\n name: randomCouchString(10),\n storage: getConfig().storage.getStorage(),\n eventReduce: true,\n ignoreDuplicate: true\n });\n const schemaJSON = clone(schemas.human);\n schemaJSON.keyCompression = false;\n // setTimeout(() => db.destroy(), dbLifetime);\n const collections = await db.addCollections({\n [name]: {\n schema: schemaJSON\n }\n });\n\n // insert data\n if (size > 0) {\n const docsData = new Array(size)\n .fill(0)\n .map(() => schemaObjects.humanData());\n await collections[name].bulkInsert(docsData);\n }\n\n return collections[name];\n}\n\nexport async function createAgeIndex(\n amount = 20\n): Promise> {\n const db = await createRxDatabase<{ humana: RxCollection; }>({\n name: randomCouchString(10),\n storage: getConfig().storage.getStorage(),\n eventReduce: true,\n ignoreDuplicate: true\n });\n // setTimeout(() => db.destroy(), dbLifetime);\n const collections = await db.addCollections({\n humana: {\n schema: schemas.humanAgeIndex\n }\n });\n\n // insert data\n if (amount > 0) {\n const docsData = new Array(amount)\n .fill(0)\n .map(() => schemaObjects.humanData());\n await collections.humana.bulkInsert(docsData);\n }\n\n return collections.humana;\n}\n\nexport async function multipleOnSameDB(\n size = 10\n): Promise<{\n db: RxDatabase<{\n human: RxCollection;\n human2: RxCollection;\n }>;\n collection: RxCollection;\n collection2: RxCollection;\n}> {\n const db = await createRxDatabase<{\n human: RxCollection;\n human2: RxCollection;\n }>({\n name: randomCouchString(10),\n storage: getConfig().storage.getStorage(),\n eventReduce: true,\n ignoreDuplicate: true\n });\n // setTimeout(() => db.destroy(), dbLifetime);\n const collections = await db.addCollections({\n human: {\n schema: schemas.human\n },\n human2: {\n schema: schemas.human\n }\n });\n\n // insert data\n if (size > 0) {\n const docsData = new Array(size)\n .fill(0)\n .map(() => schemaObjects.humanData());\n await collections.human.bulkInsert(docsData);\n\n const docsData2 = new Array(size)\n .fill(0)\n .map(() => schemaObjects.humanData());\n await collections.human2.bulkInsert(docsData2);\n }\n\n return {\n db,\n collection: collections.human,\n collection2: collections.human2\n };\n}\n\nexport async function createNested(\n amount = 5\n): Promise> {\n const db = await createRxDatabase<{ nestedhuman: RxCollection; }>({\n name: randomCouchString(10),\n storage: getConfig().storage.getStorage(),\n eventReduce: true,\n ignoreDuplicate: true\n });\n // setTimeout(() => db.destroy(), dbLifetime);\n const collections = await db.addCollections({\n nestedhuman: {\n schema: schemas.nestedHuman\n }\n });\n\n // insert data\n if (amount > 0) {\n const docsData = new Array(amount)\n .fill(0)\n .map(() => schemaObjects.nestedHumanData());\n await collections.nestedhuman.bulkInsert(docsData);\n }\n\n return collections.nestedhuman;\n}\n\nexport async function createDeepNested(\n amount = 5\n): Promise> {\n const db = await createRxDatabase<{ nestedhuman: RxCollection; }>({\n name: randomCouchString(10),\n storage: getConfig().storage.getStorage(),\n eventReduce: true,\n });\n // setTimeout(() => db.destroy(), dbLifetime);\n const collections = await db.addCollections({\n nestedhuman: {\n schema: schemas.deepNestedHuman\n }\n });\n\n // insert data\n if (amount > 0) {\n const docsData = new Array(amount)\n .fill(0)\n .map(() => schemaObjects.deepNestedHumanData());\n await collections.nestedhuman.bulkInsert(docsData);\n }\n\n return collections.nestedhuman;\n}\n\nexport async function createMultiInstance(\n name: string,\n amount = 0,\n password = undefined,\n storage: RxStorage = getConfig().storage.getStorage()\n): Promise> {\n if (!getConfig().storage.hasMultiInstance) {\n throw new Error('createMultiInstance() cannot be called on a storage with hasMultiInstance:false');\n }\n\n const db = await createRxDatabase<{ human: RxCollection; }>({\n name,\n storage,\n password,\n multiInstance: true,\n eventReduce: true,\n ignoreDuplicate: true,\n localDocuments: true\n });\n // setTimeout(() => db.destroy(), dbLifetime);\n const collections = await db.addCollections({\n human: {\n schema: schemas.human,\n localDocuments: true\n }\n });\n // insert data\n if (amount > 0) {\n const docsData = new Array(amount)\n .fill(0)\n .map(() => schemaObjects.humanData());\n await collections.human.bulkInsert(docsData);\n }\n\n return collections.human;\n}\n\nexport async function createPrimary(\n amount = 10,\n name = randomCouchString(10)\n): Promise> {\n\n const db = await createRxDatabase<{ human: RxCollection; }>({\n name,\n storage: getConfig().storage.getStorage(),\n multiInstance: true,\n eventReduce: true,\n ignoreDuplicate: true\n });\n // setTimeout(() => db.destroy(), dbLifetime);\n const collections = await db.addCollections({\n human: {\n schema: schemas.primaryHuman\n }\n });\n\n // insert data\n if (amount > 0) {\n const docsData = new Array(amount)\n .fill(0)\n .map(() => schemaObjects.simpleHumanData());\n await collections.human.bulkInsert(docsData);\n }\n\n return collections.human;\n}\n\nexport async function createHumanWithTimestamp(\n amount = 0,\n databaseName = randomCouchString(10),\n multiInstance = true,\n storage = getConfig().storage.getStorage()\n): Promise> {\n\n const db = await createRxDatabase<{ humans: RxCollection; }>({\n name: databaseName,\n storage,\n multiInstance,\n eventReduce: true,\n ignoreDuplicate: true\n });\n // setTimeout(() => db.destroy(), dbLifetime);\n const collections = await db.addCollections({\n humans: {\n schema: schemas.humanWithTimestamp\n }\n });\n\n // insert data\n if (amount > 0) {\n const docsData = new Array(amount)\n .fill(0)\n .map(() => schemaObjects.humanWithTimestampData());\n await collections.humans.bulkInsert(docsData);\n }\n\n return collections.humans;\n}\n\nexport async function createMigrationCollection(\n amount = 0,\n addMigrationStrategies: MigrationStrategies = {},\n name = randomCouchString(10),\n autoMigrate = false,\n attachment?: RxAttachmentCreator\n): Promise> {\n\n const migrationStrategies: any = Object.assign(\n {\n 1: (doc: any) => doc,\n 2: (doc: any) => doc,\n 3: (doc: any) => doc\n },\n addMigrationStrategies\n );\n\n\n const colName = 'human';\n const db = await createRxDatabase<{ human: RxCollection; }>({\n name,\n storage: getConfig().storage.getStorage(),\n eventReduce: true,\n ignoreDuplicate: true\n });\n const cols = await db.addCollections({\n [colName]: {\n schema: attachment !== undefined ? { ...schemas.simpleHuman, attachments: {} } : schemas.simpleHuman,\n autoMigrate: false\n }\n });\n\n await Promise.all(\n new Array(amount)\n .fill(0)\n .map(() => cols[colName].insert(schemaObjects.simpleHumanAge()).then(doc => {\n if (attachment !== undefined) {\n return doc.putAttachment(attachment);\n }\n }))\n );\n await db.destroy();\n\n const db2 = await createRxDatabase<{ human: RxCollection; }>({\n name,\n storage: getConfig().storage.getStorage(),\n eventReduce: true,\n ignoreDuplicate: true\n });\n const cols2 = await db2.addCollections({\n [colName]: {\n schema: attachment !== undefined ? { ...schemas.simpleHumanV3, attachments: {} } : schemas.simpleHumanV3,\n autoMigrate,\n migrationStrategies\n }\n });\n\n return cols2[colName];\n}\n\nexport async function createRelated(\n name = randomCouchString(10)\n): Promise> {\n const db = await createRxDatabase<{ human: RxCollection; }>({\n name,\n storage: getConfig().storage.getStorage(),\n multiInstance: true,\n eventReduce: true,\n ignoreDuplicate: true\n });\n // setTimeout(() => db.destroy(), dbLifetime);\n const collections = await db.addCollections({\n human: {\n schema: schemas.refHuman\n }\n });\n\n const doc1 = schemaObjects.refHumanData();\n const doc2 = schemaObjects.refHumanData(doc1.name);\n doc1.bestFriend = doc2.name; // cross-relation\n\n await collections.human.insert(doc1);\n await collections.human.insert(doc2);\n\n return collections.human;\n}\n\nexport async function createRelatedNested(\n name = randomCouchString(10)\n): Promise> {\n\n const db = await createRxDatabase<{ human: RxCollection; }>({\n name,\n storage: getConfig().storage.getStorage(),\n multiInstance: true,\n eventReduce: true,\n ignoreDuplicate: true\n });\n // setTimeout(() => db.destroy(), dbLifetime);\n const collections = await db.addCollections({\n human: {\n schema: schemas.refHumanNested\n }\n });\n\n const doc1 = schemaObjects.refHumanNestedData();\n const doc2 = schemaObjects.refHumanNestedData(doc1.name);\n doc1.foo.bestFriend = doc2.name; // cross-relation\n\n await collections.human.insert(doc1);\n await collections.human.insert(doc2);\n\n return collections.human;\n}\n\nexport async function createIdAndAgeIndex(\n amount = 20\n): Promise> {\n const db = await createRxDatabase<{ humana: RxCollection; }>({\n name: randomCouchString(10),\n storage: getConfig().storage.getStorage(),\n eventReduce: true,\n ignoreDuplicate: true\n });\n // setTimeout(() => db.destroy(), dbLifetime);\n const collections = await db.addCollections({\n humana: {\n schema: schemas.humanIdAndAgeIndex\n }\n });\n\n // insert data\n if (amount > 0) {\n const docsData = new Array(amount)\n .fill(0)\n .map(() => schemaObjects.humanWithIdAndAgeIndexDocumentType());\n await collections.humana.bulkInsert(docsData);\n }\n\n return collections.humana;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;AAAA,IAAAA,MAAA,GAAAC,sBAAA,CAAAC,OAAA;AACA,IAAAC,OAAA,GAAAC,uBAAA,CAAAF,OAAA;AACA,IAAAG,aAAA,GAAAD,uBAAA,CAAAF,OAAA;AACA,IAAAI,OAAA,GAAAJ,OAAA;AACA,IAAAK,OAAA,GAAAN,sBAAA,CAAAC,OAAA;AAEA,IAAAM,MAAA,GAAAN,OAAA;AASwB,SAAAO,yBAAAC,CAAA,6BAAAC,OAAA,mBAAAC,CAAA,OAAAD,OAAA,IAAAE,CAAA,OAAAF,OAAA,YAAAF,wBAAA,YAAAA,CAAAC,CAAA,WAAAA,CAAA,GAAAG,CAAA,GAAAD,CAAA,KAAAF,CAAA;AAAA,SAAAN,wBAAAM,CAAA,EAAAE,CAAA,SAAAA,CAAA,IAAAF,CAAA,IAAAA,CAAA,CAAAI,UAAA,SAAAJ,CAAA,eAAAA,CAAA,uBAAAA,CAAA,yBAAAA,CAAA,WAAAK,OAAA,EAAAL,CAAA,QAAAG,CAAA,GAAAJ,wBAAA,CAAAG,CAAA,OAAAC,CAAA,IAAAA,CAAA,CAAAG,GAAA,CAAAN,CAAA,UAAAG,CAAA,CAAAI,GAAA,CAAAP,CAAA,OAAAQ,CAAA,KAAAC,SAAA,UAAAC,CAAA,GAAAC,MAAA,CAAAC,cAAA,IAAAD,MAAA,CAAAE,wBAAA,WAAAC,CAAA,IAAAd,CAAA,oBAAAc,CAAA,OAAAC,cAAA,CAAAC,IAAA,CAAAhB,CAAA,EAAAc,CAAA,SAAAG,CAAA,GAAAP,CAAA,GAAAC,MAAA,CAAAE,wBAAA,CAAAb,CAAA,EAAAc,CAAA,UAAAG,CAAA,KAAAA,CAAA,CAAAV,GAAA,IAAAU,CAAA,CAAAC,GAAA,IAAAP,MAAA,CAAAC,cAAA,CAAAJ,CAAA,EAAAM,CAAA,EAAAG,CAAA,IAAAT,CAAA,CAAAM,CAAA,IAAAd,CAAA,CAAAc,CAAA,YAAAN,CAAA,CAAAH,OAAA,GAAAL,CAAA,EAAAG,CAAA,IAAAA,CAAA,CAAAe,GAAA,CAAAlB,CAAA,EAAAQ,CAAA,GAAAA,CAAA;AAIjB,eAAeW,MAAMA,CACxBC,IAAY,GAAG,EAAE,EACjBC,cAAsB,GAAG,OAAO,EAChCC,aAAsB,GAAG,IAAI,EAC7BC,WAAoB,GAAG,IAAI,EAC3BC,OAA4B,GAAG,IAAAC,iBAAS,EAAC,CAAC,CAACD,OAAO,CAACE,UAAU,CAAC,CAAC,EAEf;EAChD,IAAMC,EAAE,GAAG,MAAM,IAAAC,uBAAgB,EAA8C;IAC3EC,IAAI,EAAE,IAAAC,wBAAiB,EAAC,EAAE,CAAC;IAC3BN,OAAO;IACPF,aAAa;IACbC,WAAW;IACXQ,eAAe,EAAE,IAAI;IACrBC,cAAc,EAAE;EACpB,CAAC,CAAC;EAEF,IAAMC,WAAW,GAAG,MAAMN,EAAE,CAACO,cAAc,CAAC;IACxC,CAACb,cAAc,GAAG;MACdc,MAAM,EAAE1C,OAAO,CAAC2C,KAAK;MACrBJ,cAAc,EAAE;IACpB;EACJ,CAAC,CAAC;;EAEF;EACA,IAAIZ,IAAI,GAAG,CAAC,EAAE;IACV,IAAMiB,QAAQ,GAAG,IAAIC,KAAK,CAAClB,IAAI,CAAC,CAC3BmB,IAAI,CAAC,CAAC,CAAC,CACPC,GAAG,CAAC,MAAM7C,aAAa,CAAC8C,SAAS,CAAC,CAAC,CAAC;IACzC,IAAMC,WAAW,GAAG,MAAMT,WAAW,CAACZ,cAAc,CAAC,CAACsB,UAAU,CAACN,QAAQ,CAAC;IAC1EO,eAAM,CAACC,eAAe,CAACH,WAAW,CAACI,KAAK,EAAE,EAAE,CAAC;EACjD;EACA,OAAOb,WAAW,CAACZ,cAAc,CAAC;AACtC;AAEO,eAAe0B,cAAcA,CAChCZ,MAAoC,EACpCN,IAAI,GAAG,OAAO,EACdL,OAAO,GAAG,IAAAC,iBAAS,EAAC,CAAC,CAACD,OAAO,CAACE,UAAU,CAAC,CAAC,EAC1CsB,mBAAyC,EACI;EAC7C,IAAMrB,EAAE,GAAG,MAAM,IAAAC,uBAAgB,EAAoD;IACjFC,IAAI,EAAE,IAAAC,wBAAiB,EAAC,EAAE,CAAC;IAC3BN,OAAO;IACPF,aAAa,EAAE,IAAI;IACnBC,WAAW,EAAE,IAAI;IACjBQ,eAAe,EAAE;EACrB,CAAC,CAAC;EAEF,IAAME,WAAW,GAAG,MAAMN,EAAE,CAACO,cAAc,CAAC;IACxC,CAACL,IAAI,GAAG;MACJM,MAAM;MACNa;IACJ;EACJ,CAAC,CAAC;EAEF,OAAOf,WAAW,CAACJ,IAAI,CAAC;AAC5B;AAEO,eAAeoB,iBAAiBA,CACnC7B,IAAI,GAAG,EAAE,EACTS,IAAI,GAAG,OAAO,EACdP,aAAa,GAAG,IAAI,EAC4B;EAChD,IAAI,CAACO,IAAI,EAAE;IACPA,IAAI,GAAG,OAAO;EAClB;EACA,IAAMF,EAAE,GAAG,MAAM,IAAAC,uBAAgB,EAAuD;IACpFC,IAAI,EAAE,IAAAC,wBAAiB,EAAC,EAAE,CAAC;IAC3BN,OAAO,EAAE,IAAAC,iBAAS,EAAC,CAAC,CAACD,OAAO,CAACE,UAAU,CAAC,CAAC;IACzCJ,aAAa;IACbC,WAAW,EAAE,IAAI;IACjBQ,eAAe,EAAE;EACrB,CAAC,CAAC;EAEF,IAAMmB,UAAU,GAAG,IAAAC,cAAK,EAAC1D,OAAO,CAAC2C,KAAK,CAAC;EACvCc,UAAU,CAACE,WAAW,GAAG,CAAC,CAAC;EAE3B,IAAMnB,WAAW,GAAG,MAAMN,EAAE,CAACO,cAAc,CAAC;IACxC,CAACL,IAAI,GAAG;MACJM,MAAM,EAAEe;IACZ;EACJ,CAAC,CAAC;;EAEF;EACA,IAAI9B,IAAI,GAAG,CAAC,EAAE;IACV,IAAMiB,QAAQ,GAAG,IAAIC,KAAK,CAAClB,IAAI,CAAC,CAC3BmB,IAAI,CAAC,CAAC,CAAC,CACPC,GAAG,CAAC,MAAM7C,aAAa,CAAC8C,SAAS,CAAC,CAAC,CAAC;IACzC,MAAMR,WAAW,CAACJ,IAAI,CAAC,CAACc,UAAU,CAACN,QAAQ,CAAC;EAChD;EAEA,OAAOJ,WAAW,CAACJ,IAAI,CAAC;AAC5B;AAEO,eAAewB,mBAAmBA,CACrCjC,IAAI,GAAG,EAAE,EACTS,IAAI,GAAG,OAAO,EAC0B;EACxC,IAAMF,EAAE,GAAG,MAAM,IAAAC,uBAAgB,EAAuD;IACpFC,IAAI,EAAE,IAAAC,wBAAiB,EAAC,EAAE,CAAC;IAC3BN,OAAO,EAAE,IAAAC,iBAAS,EAAC,CAAC,CAACD,OAAO,CAACE,UAAU,CAAC,CAAC;IACzCH,WAAW,EAAE,IAAI;IACjBQ,eAAe,EAAE;EACrB,CAAC,CAAC;EACF,IAAMuB,UAAU,GAAG,IAAAH,cAAK,EAAC1D,OAAO,CAAC2C,KAAK,CAAC;EACvCkB,UAAU,CAACC,cAAc,GAAG,KAAK;EACjC;EACA,IAAMtB,WAAW,GAAG,MAAMN,EAAE,CAACO,cAAc,CAAC;IACxC,CAACL,IAAI,GAAG;MACJM,MAAM,EAAEmB;IACZ;EACJ,CAAC,CAAC;;EAEF;EACA,IAAIlC,IAAI,GAAG,CAAC,EAAE;IACV,IAAMiB,QAAQ,GAAG,IAAIC,KAAK,CAAClB,IAAI,CAAC,CAC3BmB,IAAI,CAAC,CAAC,CAAC,CACPC,GAAG,CAAC,MAAM7C,aAAa,CAAC8C,SAAS,CAAC,CAAC,CAAC;IACzC,MAAMR,WAAW,CAACJ,IAAI,CAAC,CAACc,UAAU,CAACN,QAAQ,CAAC;EAChD;EAEA,OAAOJ,WAAW,CAACJ,IAAI,CAAC;AAC5B;AAEO,eAAe2B,cAAcA,CAChCC,MAAM,GAAG,EAAE,EAC6B;EACxC,IAAM9B,EAAE,GAAG,MAAM,IAAAC,uBAAgB,EAA+C;IAC5EC,IAAI,EAAE,IAAAC,wBAAiB,EAAC,EAAE,CAAC;IAC3BN,OAAO,EAAE,IAAAC,iBAAS,EAAC,CAAC,CAACD,OAAO,CAACE,UAAU,CAAC,CAAC;IACzCH,WAAW,EAAE,IAAI;IACjBQ,eAAe,EAAE;EACrB,CAAC,CAAC;EACF;EACA,IAAME,WAAW,GAAG,MAAMN,EAAE,CAACO,cAAc,CAAC;IACxCwB,MAAM,EAAE;MACJvB,MAAM,EAAE1C,OAAO,CAACkE;IACpB;EACJ,CAAC,CAAC;;EAEF;EACA,IAAIF,MAAM,GAAG,CAAC,EAAE;IACZ,IAAMpB,QAAQ,GAAG,IAAIC,KAAK,CAACmB,MAAM,CAAC,CAC7BlB,IAAI,CAAC,CAAC,CAAC,CACPC,GAAG,CAAC,MAAM7C,aAAa,CAAC8C,SAAS,CAAC,CAAC,CAAC;IACzC,MAAMR,WAAW,CAACyB,MAAM,CAACf,UAAU,CAACN,QAAQ,CAAC;EACjD;EAEA,OAAOJ,WAAW,CAACyB,MAAM;AAC7B;AAEO,eAAeE,gBAAgBA,CAClCxC,IAAI,GAAG,EAAE,EAQV;EACC,IAAMO,EAAE,GAAG,MAAM,IAAAC,uBAAgB,EAG9B;IACCC,IAAI,EAAE,IAAAC,wBAAiB,EAAC,EAAE,CAAC;IAC3BN,OAAO,EAAE,IAAAC,iBAAS,EAAC,CAAC,CAACD,OAAO,CAACE,UAAU,CAAC,CAAC;IACzCH,WAAW,EAAE,IAAI;IACjBQ,eAAe,EAAE;EACrB,CAAC,CAAC;EACF;EACA,IAAME,WAAW,GAAG,MAAMN,EAAE,CAACO,cAAc,CAAC;IACxCE,KAAK,EAAE;MACHD,MAAM,EAAE1C,OAAO,CAAC2C;IACpB,CAAC;IACDyB,MAAM,EAAE;MACJ1B,MAAM,EAAE1C,OAAO,CAAC2C;IACpB;EACJ,CAAC,CAAC;;EAEF;EACA,IAAIhB,IAAI,GAAG,CAAC,EAAE;IACV,IAAMiB,QAAQ,GAAG,IAAIC,KAAK,CAAClB,IAAI,CAAC,CAC3BmB,IAAI,CAAC,CAAC,CAAC,CACPC,GAAG,CAAC,MAAM7C,aAAa,CAAC8C,SAAS,CAAC,CAAC,CAAC;IACzC,MAAMR,WAAW,CAACG,KAAK,CAACO,UAAU,CAACN,QAAQ,CAAC;IAE5C,IAAMyB,SAAS,GAAG,IAAIxB,KAAK,CAAClB,IAAI,CAAC,CAC5BmB,IAAI,CAAC,CAAC,CAAC,CACPC,GAAG,CAAC,MAAM7C,aAAa,CAAC8C,SAAS,CAAC,CAAC,CAAC;IACzC,MAAMR,WAAW,CAAC4B,MAAM,CAAClB,UAAU,CAACmB,SAAS,CAAC;EAClD;EAEA,OAAO;IACHnC,EAAE;IACFoC,UAAU,EAAE9B,WAAW,CAACG,KAAK;IAC7B4B,WAAW,EAAE/B,WAAW,CAAC4B;EAC7B,CAAC;AACL;AAEO,eAAeI,YAAYA,CAC9BR,MAAM,GAAG,CAAC,EACkD;EAC5D,IAAM9B,EAAE,GAAG,MAAM,IAAAC,uBAAgB,EAAwE;IACrGC,IAAI,EAAE,IAAAC,wBAAiB,EAAC,EAAE,CAAC;IAC3BN,OAAO,EAAE,IAAAC,iBAAS,EAAC,CAAC,CAACD,OAAO,CAACE,UAAU,CAAC,CAAC;IACzCH,WAAW,EAAE,IAAI;IACjBQ,eAAe,EAAE;EACrB,CAAC,CAAC;EACF;EACA,IAAME,WAAW,GAAG,MAAMN,EAAE,CAACO,cAAc,CAAC;IACxCgC,WAAW,EAAE;MACT/B,MAAM,EAAE1C,OAAO,CAAC0E;IACpB;EACJ,CAAC,CAAC;;EAEF;EACA,IAAIV,MAAM,GAAG,CAAC,EAAE;IACZ,IAAMpB,QAAQ,GAAG,IAAIC,KAAK,CAACmB,MAAM,CAAC,CAC7BlB,IAAI,CAAC,CAAC,CAAC,CACPC,GAAG,CAAC,MAAM7C,aAAa,CAACyE,eAAe,CAAC,CAAC,CAAC;IAC/C,MAAMnC,WAAW,CAACiC,WAAW,CAACvB,UAAU,CAACN,QAAQ,CAAC;EACtD;EAEA,OAAOJ,WAAW,CAACiC,WAAW;AAClC;AAEO,eAAeG,gBAAgBA,CAClCZ,MAAM,GAAG,CAAC,EACsD;EAChE,IAAM9B,EAAE,GAAG,MAAM,IAAAC,uBAAgB,EAA4E;IACzGC,IAAI,EAAE,IAAAC,wBAAiB,EAAC,EAAE,CAAC;IAC3BN,OAAO,EAAE,IAAAC,iBAAS,EAAC,CAAC,CAACD,OAAO,CAACE,UAAU,CAAC,CAAC;IACzCH,WAAW,EAAE;EACjB,CAAC,CAAC;EACF;EACA,IAAMU,WAAW,GAAG,MAAMN,EAAE,CAACO,cAAc,CAAC;IACxCgC,WAAW,EAAE;MACT/B,MAAM,EAAE1C,OAAO,CAAC6E;IACpB;EACJ,CAAC,CAAC;;EAEF;EACA,IAAIb,MAAM,GAAG,CAAC,EAAE;IACZ,IAAMpB,QAAQ,GAAG,IAAIC,KAAK,CAACmB,MAAM,CAAC,CAC7BlB,IAAI,CAAC,CAAC,CAAC,CACPC,GAAG,CAAC,MAAM7C,aAAa,CAAC4E,mBAAmB,CAAC,CAAC,CAAC;IACnD,MAAMtC,WAAW,CAACiC,WAAW,CAACvB,UAAU,CAACN,QAAQ,CAAC;EACtD;EAEA,OAAOJ,WAAW,CAACiC,WAAW;AAClC;AAEO,eAAeM,mBAAmBA,CACrC3C,IAAY,EACZ4B,MAAM,GAAG,CAAC,EACVgB,QAAQ,GAAGC,SAAS,EACpBlD,OAA4B,GAAG,IAAAC,iBAAS,EAAC,CAAC,CAACD,OAAO,CAACE,UAAU,CAAC,CAAC,EACf;EAChD,IAAI,CAAC,IAAAD,iBAAS,EAAC,CAAC,CAACD,OAAO,CAACmD,gBAAgB,EAAE;IACvC,MAAM,IAAIC,KAAK,CAAC,iFAAiF,CAAC;EACtG;EAEA,IAAMjD,EAAE,GAAG,MAAM,IAAAC,uBAAgB,EAA8C;IAC3EC,IAAI;IACJL,OAAO;IACPiD,QAAQ;IACRnD,aAAa,EAAE,IAAI;IACnBC,WAAW,EAAE,IAAI;IACjBQ,eAAe,EAAE,IAAI;IACrBC,cAAc,EAAE;EACpB,CAAC,CAAC;EACF;EACA,IAAMC,WAAW,GAAG,MAAMN,EAAE,CAACO,cAAc,CAAC;IACxCE,KAAK,EAAE;MACHD,MAAM,EAAE1C,OAAO,CAAC2C,KAAK;MACrBJ,cAAc,EAAE;IACpB;EACJ,CAAC,CAAC;EACF;EACA,IAAIyB,MAAM,GAAG,CAAC,EAAE;IACZ,IAAMpB,QAAQ,GAAG,IAAIC,KAAK,CAACmB,MAAM,CAAC,CAC7BlB,IAAI,CAAC,CAAC,CAAC,CACPC,GAAG,CAAC,MAAM7C,aAAa,CAAC8C,SAAS,CAAC,CAAC,CAAC;IACzC,MAAMR,WAAW,CAACG,KAAK,CAACO,UAAU,CAACN,QAAQ,CAAC;EAChD;EAEA,OAAOJ,WAAW,CAACG,KAAK;AAC5B;AAEO,eAAeyC,aAAaA,CAC/BpB,MAAM,GAAG,EAAE,EACX5B,IAAI,GAAG,IAAAC,wBAAiB,EAAC,EAAE,CAAC,EACgC;EAE5D,IAAMH,EAAE,GAAG,MAAM,IAAAC,uBAAgB,EAAkE;IAC/FC,IAAI;IACJL,OAAO,EAAE,IAAAC,iBAAS,EAAC,CAAC,CAACD,OAAO,CAACE,UAAU,CAAC,CAAC;IACzCJ,aAAa,EAAE,IAAI;IACnBC,WAAW,EAAE,IAAI;IACjBQ,eAAe,EAAE;EACrB,CAAC,CAAC;EACF;EACA,IAAME,WAAW,GAAG,MAAMN,EAAE,CAACO,cAAc,CAAC;IACxCE,KAAK,EAAE;MACHD,MAAM,EAAE1C,OAAO,CAACqF;IACpB;EACJ,CAAC,CAAC;;EAEF;EACA,IAAIrB,MAAM,GAAG,CAAC,EAAE;IACZ,IAAMpB,QAAQ,GAAG,IAAIC,KAAK,CAACmB,MAAM,CAAC,CAC7BlB,IAAI,CAAC,CAAC,CAAC,CACPC,GAAG,CAAC,MAAM7C,aAAa,CAACoF,eAAe,CAAC,CAAC,CAAC;IAC/C,MAAM9C,WAAW,CAACG,KAAK,CAACO,UAAU,CAACN,QAAQ,CAAC;EAChD;EAEA,OAAOJ,WAAW,CAACG,KAAK;AAC5B;AAEO,eAAe4C,wBAAwBA,CAC1CvB,MAAM,GAAG,CAAC,EACVwB,YAAY,GAAG,IAAAnD,wBAAiB,EAAC,EAAE,CAAC,EACpCR,aAAa,GAAG,IAAI,EACpBE,OAAO,GAAG,IAAAC,iBAAS,EAAC,CAAC,CAACD,OAAO,CAACE,UAAU,CAAC,CAAC,EACyB;EAEnE,IAAMC,EAAE,GAAG,MAAM,IAAAC,uBAAgB,EAA0E;IACvGC,IAAI,EAAEoD,YAAY;IAClBzD,OAAO;IACPF,aAAa;IACbC,WAAW,EAAE,IAAI;IACjBQ,eAAe,EAAE;EACrB,CAAC,CAAC;EACF;EACA,IAAME,WAAW,GAAG,MAAMN,EAAE,CAACO,cAAc,CAAC;IACxCgD,MAAM,EAAE;MACJ/C,MAAM,EAAE1C,OAAO,CAAC0F;IACpB;EACJ,CAAC,CAAC;;EAEF;EACA,IAAI1B,MAAM,GAAG,CAAC,EAAE;IACZ,IAAMpB,QAAQ,GAAG,IAAIC,KAAK,CAACmB,MAAM,CAAC,CAC7BlB,IAAI,CAAC,CAAC,CAAC,CACPC,GAAG,CAAC,MAAM7C,aAAa,CAACyF,sBAAsB,CAAC,CAAC,CAAC;IACtD,MAAMnD,WAAW,CAACiD,MAAM,CAACvC,UAAU,CAACN,QAAQ,CAAC;EACjD;EAEA,OAAOJ,WAAW,CAACiD,MAAM;AAC7B;AAEO,eAAeG,yBAAyBA,CAC3C5B,MAAM,GAAG,CAAC,EACV6B,sBAA2C,GAAG,CAAC,CAAC,EAChDzD,IAAI,GAAG,IAAAC,wBAAiB,EAAC,EAAE,CAAC,EAC5ByD,WAAW,GAAG,KAAK,EACnBC,UAAgC,EAC8B;EAE9D,IAAMxC,mBAAwB,GAAGrC,MAAM,CAAC8E,MAAM,CAC1C;IACI,CAAC,EAAGC,GAAQ,IAAKA,GAAG;IACpB,CAAC,EAAGA,GAAQ,IAAKA,GAAG;IACpB,CAAC,EAAGA,GAAQ,IAAKA;EACrB,CAAC,EACDJ,sBACJ,CAAC;EAGD,IAAMK,OAAO,GAAG,OAAO;EACvB,IAAMhE,EAAE,GAAG,MAAM,IAAAC,uBAAgB,EAAqE;IAClGC,IAAI;IACJL,OAAO,EAAE,IAAAC,iBAAS,EAAC,CAAC,CAACD,OAAO,CAACE,UAAU,CAAC,CAAC;IACzCH,WAAW,EAAE,IAAI;IACjBQ,eAAe,EAAE;EACrB,CAAC,CAAC;EACF,IAAM6D,IAAI,GAAG,MAAMjE,EAAE,CAACO,cAAc,CAAC;IACjC,CAACyD,OAAO,GAAG;MACPxD,MAAM,EAAEqD,UAAU,KAAKd,SAAS,GAAG;QAAE,GAAGjF,OAAO,CAACoG,WAAW;QAAEzC,WAAW,EAAE,CAAC;MAAE,CAAC,GAAG3D,OAAO,CAACoG,WAAW;MACpGN,WAAW,EAAE;IACjB;EACJ,CAAC,CAAC;EAEF,MAAMO,OAAO,CAACC,GAAG,CACb,IAAIzD,KAAK,CAACmB,MAAM,CAAC,CACZlB,IAAI,CAAC,CAAC,CAAC,CACPC,GAAG,CAAC,MAAMoD,IAAI,CAACD,OAAO,CAAC,CAACK,MAAM,CAACrG,aAAa,CAACsG,cAAc,CAAC,CAAC,CAAC,CAACC,IAAI,CAACR,GAAG,IAAI;IACxE,IAAIF,UAAU,KAAKd,SAAS,EAAE;MAC1B,OAAOgB,GAAG,CAACS,aAAa,CAACX,UAAU,CAAC;IACxC;EACJ,CAAC,CAAC,CACV,CAAC;EACD,MAAM7D,EAAE,CAACyE,OAAO,CAAC,CAAC;EAElB,IAAMC,GAAG,GAAG,MAAM,IAAAzE,uBAAgB,EAAoE;IAClGC,IAAI;IACJL,OAAO,EAAE,IAAAC,iBAAS,EAAC,CAAC,CAACD,OAAO,CAACE,UAAU,CAAC,CAAC;IACzCH,WAAW,EAAE,IAAI;IACjBQ,eAAe,EAAE;EACrB,CAAC,CAAC;EACF,IAAMuE,KAAK,GAAG,MAAMD,GAAG,CAACnE,cAAc,CAAC;IACnC,CAACyD,OAAO,GAAG;MACPxD,MAAM,EAAEqD,UAAU,KAAKd,SAAS,GAAG;QAAE,GAAGjF,OAAO,CAAC8G,aAAa;QAAEnD,WAAW,EAAE,CAAC;MAAE,CAAC,GAAG3D,OAAO,CAAC8G,aAAa;MACxGhB,WAAW;MACXvC;IACJ;EACJ,CAAC,CAAC;EAEF,OAAOsD,KAAK,CAACX,OAAO,CAAC;AACzB;AAEO,eAAea,aAAaA,CAC/B3E,IAAI,GAAG,IAAAC,wBAAiB,EAAC,EAAE,CAAC,EAC6B;EACzD,IAAMH,EAAE,GAAG,MAAM,IAAAC,uBAAgB,EAA+D;IAC5FC,IAAI;IACJL,OAAO,EAAE,IAAAC,iBAAS,EAAC,CAAC,CAACD,OAAO,CAACE,UAAU,CAAC,CAAC;IACzCJ,aAAa,EAAE,IAAI;IACnBC,WAAW,EAAE,IAAI;IACjBQ,eAAe,EAAE;EACrB,CAAC,CAAC;EACF;EACA,IAAME,WAAW,GAAG,MAAMN,EAAE,CAACO,cAAc,CAAC;IACxCE,KAAK,EAAE;MACHD,MAAM,EAAE1C,OAAO,CAACgH;IACpB;EACJ,CAAC,CAAC;EAEF,IAAMC,IAAI,GAAG/G,aAAa,CAACgH,YAAY,CAAC,CAAC;EACzC,IAAMC,IAAI,GAAGjH,aAAa,CAACgH,YAAY,CAACD,IAAI,CAAC7E,IAAI,CAAC;EAClD6E,IAAI,CAACG,UAAU,GAAGD,IAAI,CAAC/E,IAAI,CAAC,CAAC;;EAE7B,MAAMI,WAAW,CAACG,KAAK,CAAC4D,MAAM,CAACU,IAAI,CAAC;EACpC,MAAMzE,WAAW,CAACG,KAAK,CAAC4D,MAAM,CAACY,IAAI,CAAC;EAEpC,OAAO3E,WAAW,CAACG,KAAK;AAC5B;AAEO,eAAe0E,mBAAmBA,CACrCjF,IAAI,GAAG,IAAAC,wBAAiB,EAAC,EAAE,CAAC,EACmC;EAE/D,IAAMH,EAAE,GAAG,MAAM,IAAAC,uBAAgB,EAAqE;IAClGC,IAAI;IACJL,OAAO,EAAE,IAAAC,iBAAS,EAAC,CAAC,CAACD,OAAO,CAACE,UAAU,CAAC,CAAC;IACzCJ,aAAa,EAAE,IAAI;IACnBC,WAAW,EAAE,IAAI;IACjBQ,eAAe,EAAE;EACrB,CAAC,CAAC;EACF;EACA,IAAME,WAAW,GAAG,MAAMN,EAAE,CAACO,cAAc,CAAC;IACxCE,KAAK,EAAE;MACHD,MAAM,EAAE1C,OAAO,CAACsH;IACpB;EACJ,CAAC,CAAC;EAEF,IAAML,IAAI,GAAG/G,aAAa,CAACqH,kBAAkB,CAAC,CAAC;EAC/C,IAAMJ,IAAI,GAAGjH,aAAa,CAACqH,kBAAkB,CAACN,IAAI,CAAC7E,IAAI,CAAC;EACxD6E,IAAI,CAACO,GAAG,CAACJ,UAAU,GAAGD,IAAI,CAAC/E,IAAI,CAAC,CAAC;;EAEjC,MAAMI,WAAW,CAACG,KAAK,CAAC4D,MAAM,CAACU,IAAI,CAAC;EACpC,MAAMzE,WAAW,CAACG,KAAK,CAAC4D,MAAM,CAACY,IAAI,CAAC;EAEpC,OAAO3E,WAAW,CAACG,KAAK;AAC5B;AAEO,eAAe8E,mBAAmBA,CACrCzD,MAAM,GAAG,EAAE,EAC4D;EACvE,IAAM9B,EAAE,GAAG,MAAM,IAAAC,uBAAgB,EAA8E;IAC3GC,IAAI,EAAE,IAAAC,wBAAiB,EAAC,EAAE,CAAC;IAC3BN,OAAO,EAAE,IAAAC,iBAAS,EAAC,CAAC,CAACD,OAAO,CAACE,UAAU,CAAC,CAAC;IACzCH,WAAW,EAAE,IAAI;IACjBQ,eAAe,EAAE;EACrB,CAAC,CAAC;EACF;EACA,IAAME,WAAW,GAAG,MAAMN,EAAE,CAACO,cAAc,CAAC;IACxCwB,MAAM,EAAE;MACJvB,MAAM,EAAE1C,OAAO,CAAC0H;IACpB;EACJ,CAAC,CAAC;;EAEF;EACA,IAAI1D,MAAM,GAAG,CAAC,EAAE;IACZ,IAAMpB,QAAQ,GAAG,IAAIC,KAAK,CAACmB,MAAM,CAAC,CAC7BlB,IAAI,CAAC,CAAC,CAAC,CACPC,GAAG,CAAC,MAAM7C,aAAa,CAACyH,kCAAkC,CAAC,CAAC,CAAC;IAClE,MAAMnF,WAAW,CAACyB,MAAM,CAACf,UAAU,CAACN,QAAQ,CAAC;EACjD;EAEA,OAAOJ,WAAW,CAACyB,MAAM;AAC7B","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/test-utils/index.js b/dist/cjs/plugins/test-utils/index.js deleted file mode 100644 index 51562c77932..00000000000 --- a/dist/cjs/plugins/test-utils/index.js +++ /dev/null @@ -1,107 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -var _exportNames = { - humansCollection: true, - schemas: true, - schemaObjects: true -}; -exports.schemas = exports.schemaObjects = exports.humansCollection = void 0; -var _config = require("./config.js"); -Object.keys(_config).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; - if (key in exports && exports[key] === _config[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _config[key]; - } - }); -}); -var humansCollectionConst = _interopRequireWildcard(require("./humans-collection.js")); -Object.keys(humansCollectionConst).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; - if (key in exports && exports[key] === humansCollectionConst[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return humansCollectionConst[key]; - } - }); -}); -var _portManager = require("./port-manager.js"); -Object.keys(_portManager).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; - if (key in exports && exports[key] === _portManager[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _portManager[key]; - } - }); -}); -var _revisions = require("./revisions.js"); -Object.keys(_revisions).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; - if (key in exports && exports[key] === _revisions[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _revisions[key]; - } - }); -}); -var _testUtil = require("./test-util.js"); -Object.keys(_testUtil).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; - if (key in exports && exports[key] === _testUtil[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _testUtil[key]; - } - }); -}); -var schemaObjectsConst = _interopRequireWildcard(require("./schema-objects.js")); -Object.keys(schemaObjectsConst).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; - if (key in exports && exports[key] === schemaObjectsConst[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return schemaObjectsConst[key]; - } - }); -}); -var schemasConst = _interopRequireWildcard(require("./schemas.js")); -Object.keys(schemasConst).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; - if (key in exports && exports[key] === schemasConst[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return schemasConst[key]; - } - }); -}); -function _getRequireWildcardCache(e) { if ("function" != typeof WeakMap) return null; var r = new WeakMap(), t = new WeakMap(); return (_getRequireWildcardCache = function (e) { return e ? t : r; })(e); } -function _interopRequireWildcard(e, r) { if (!r && e && e.__esModule) return e; if (null === e || "object" != typeof e && "function" != typeof e) return { default: e }; var t = _getRequireWildcardCache(r); if (t && t.has(e)) return t.get(e); var n = { __proto__: null }, a = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var u in e) if ("default" !== u && {}.hasOwnProperty.call(e, u)) { var i = a ? Object.getOwnPropertyDescriptor(e, u) : null; i && (i.get || i.set) ? Object.defineProperty(n, u, i) : n[u] = e[u]; } return n.default = e, t && t.set(e, n), n; } -/** - * This plugins contains thing that are needed for testing - * in RxDB related context. Mostly used in the unit tests and - * also in the tests for the premium and the server repository. - */ - -var humansCollection = exports.humansCollection = humansCollectionConst; -var schemas = exports.schemas = schemasConst; -var schemaObjects = exports.schemaObjects = schemaObjectsConst; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/test-utils/index.js.map b/dist/cjs/plugins/test-utils/index.js.map deleted file mode 100644 index 13f7c96f173..00000000000 --- a/dist/cjs/plugins/test-utils/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["_config","require","Object","keys","forEach","key","prototype","hasOwnProperty","call","_exportNames","exports","defineProperty","enumerable","get","humansCollectionConst","_interopRequireWildcard","_portManager","_revisions","_testUtil","schemaObjectsConst","schemasConst","_getRequireWildcardCache","e","WeakMap","r","t","__esModule","default","has","n","__proto__","a","getOwnPropertyDescriptor","u","i","set","humansCollection","schemas","schemaObjects"],"sources":["../../../../src/plugins/test-utils/index.ts"],"sourcesContent":["/**\n * This plugins contains thing that are needed for testing\n * in RxDB related context. Mostly used in the unit tests and\n * also in the tests for the premium and the server repository.\n */\n\nexport * from './config.ts';\nexport * from './humans-collection.ts';\nexport * from './port-manager.ts';\nexport * from './revisions.ts';\nexport * from './test-util.ts';\n\nexport * from './schema-objects.ts';\nexport * from './schemas.ts';\n\nimport * as humansCollectionConst from './humans-collection.ts';\nexport const humansCollection = humansCollectionConst;\n\nimport * as schemasConst from './schemas.ts';\nexport const schemas = schemasConst;\nimport * as schemaObjectsConst from './schema-objects.ts';\nexport const schemaObjects = schemaObjectsConst;\n"],"mappings":";;;;;;;;;;;AAMA,IAAAA,OAAA,GAAAC,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAH,OAAA,EAAAI,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAL,OAAA,CAAAK,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAb,OAAA,CAAAK,GAAA;IAAA;EAAA;AAAA;AACA,IAAAS,qBAAA,GAAAC,uBAAA,CAAAd,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAW,qBAAA,EAAAV,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAS,qBAAA,CAAAT,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAC,qBAAA,CAAAT,GAAA;IAAA;EAAA;AAAA;AACA,IAAAW,YAAA,GAAAf,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAa,YAAA,EAAAZ,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAW,YAAA,CAAAX,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAG,YAAA,CAAAX,GAAA;IAAA;EAAA;AAAA;AACA,IAAAY,UAAA,GAAAhB,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAc,UAAA,EAAAb,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAY,UAAA,CAAAZ,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAI,UAAA,CAAAZ,GAAA;IAAA;EAAA;AAAA;AACA,IAAAa,SAAA,GAAAjB,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAe,SAAA,EAAAd,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAa,SAAA,CAAAb,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAK,SAAA,CAAAb,GAAA;IAAA;EAAA;AAAA;AAEA,IAAAc,kBAAA,GAAAJ,uBAAA,CAAAd,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAgB,kBAAA,EAAAf,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAc,kBAAA,CAAAd,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAM,kBAAA,CAAAd,GAAA;IAAA;EAAA;AAAA;AACA,IAAAe,YAAA,GAAAL,uBAAA,CAAAd,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAiB,YAAA,EAAAhB,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAe,YAAA,CAAAf,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAO,YAAA,CAAAf,GAAA;IAAA;EAAA;AAAA;AAA6B,SAAAgB,yBAAAC,CAAA,6BAAAC,OAAA,mBAAAC,CAAA,OAAAD,OAAA,IAAAE,CAAA,OAAAF,OAAA,YAAAF,wBAAA,YAAAA,CAAAC,CAAA,WAAAA,CAAA,GAAAG,CAAA,GAAAD,CAAA,KAAAF,CAAA;AAAA,SAAAP,wBAAAO,CAAA,EAAAE,CAAA,SAAAA,CAAA,IAAAF,CAAA,IAAAA,CAAA,CAAAI,UAAA,SAAAJ,CAAA,eAAAA,CAAA,uBAAAA,CAAA,yBAAAA,CAAA,WAAAK,OAAA,EAAAL,CAAA,QAAAG,CAAA,GAAAJ,wBAAA,CAAAG,CAAA,OAAAC,CAAA,IAAAA,CAAA,CAAAG,GAAA,CAAAN,CAAA,UAAAG,CAAA,CAAAZ,GAAA,CAAAS,CAAA,OAAAO,CAAA,KAAAC,SAAA,UAAAC,CAAA,GAAA7B,MAAA,CAAAS,cAAA,IAAAT,MAAA,CAAA8B,wBAAA,WAAAC,CAAA,IAAAX,CAAA,oBAAAW,CAAA,OAAA1B,cAAA,CAAAC,IAAA,CAAAc,CAAA,EAAAW,CAAA,SAAAC,CAAA,GAAAH,CAAA,GAAA7B,MAAA,CAAA8B,wBAAA,CAAAV,CAAA,EAAAW,CAAA,UAAAC,CAAA,KAAAA,CAAA,CAAArB,GAAA,IAAAqB,CAAA,CAAAC,GAAA,IAAAjC,MAAA,CAAAS,cAAA,CAAAkB,CAAA,EAAAI,CAAA,EAAAC,CAAA,IAAAL,CAAA,CAAAI,CAAA,IAAAX,CAAA,CAAAW,CAAA,YAAAJ,CAAA,CAAAF,OAAA,GAAAL,CAAA,EAAAG,CAAA,IAAAA,CAAA,CAAAU,GAAA,CAAAb,CAAA,EAAAO,CAAA,GAAAA,CAAA;AAb7B;AACA;AACA;AACA;AACA;;AAYO,IAAMO,gBAAgB,GAAA1B,OAAA,CAAA0B,gBAAA,GAAGtB,qBAAqB;AAG9C,IAAMuB,OAAO,GAAA3B,OAAA,CAAA2B,OAAA,GAAGjB,YAAY;AAE5B,IAAMkB,aAAa,GAAA5B,OAAA,CAAA4B,aAAA,GAAGnB,kBAAkB","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/test-utils/port-manager.js b/dist/cjs/plugins/test-utils/port-manager.js deleted file mode 100644 index eaa1fe73d08..00000000000 --- a/dist/cjs/plugins/test-utils/port-manager.js +++ /dev/null @@ -1,35 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.nextPort = nextPort; -var _getPort = _interopRequireWildcard(require("get-port")); -var _index = require("../utils/index.js"); -function _getRequireWildcardCache(e) { if ("function" != typeof WeakMap) return null; var r = new WeakMap(), t = new WeakMap(); return (_getRequireWildcardCache = function (e) { return e ? t : r; })(e); } -function _interopRequireWildcard(e, r) { if (!r && e && e.__esModule) return e; if (null === e || "object" != typeof e && "function" != typeof e) return { default: e }; var t = _getRequireWildcardCache(r); if (t && t.has(e)) return t.get(e); var n = { __proto__: null }, a = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var u in e) if ("default" !== u && {}.hasOwnProperty.call(e, u)) { var i = a ? Object.getOwnPropertyDescriptor(e, u) : null; i && (i.get || i.set) ? Object.defineProperty(n, u, i) : n[u] = e[u]; } return n.default = e, t && t.set(e, n), n; } -/** - * For easier debugging, we increase the port each time - * to ensure that no port is reused in the tests. - */ -var startPort = 18669; -var PORT_MAX = 65535; -var portQueue = _index.PROMISE_RESOLVE_VOID; - -/** - * Returns an unused port. - * Used to ensure that different tests - * do not accidentally use the same port. - */ -function nextPort() { - portQueue = portQueue.then(async () => { - var port = await (0, _getPort.default)({ - port: (0, _getPort.makeRange)(startPort, PORT_MAX), - host: '0.0.0.0' - }); - startPort = port + 1; - return port; - }); - return portQueue; -} -//# sourceMappingURL=port-manager.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/test-utils/port-manager.js.map b/dist/cjs/plugins/test-utils/port-manager.js.map deleted file mode 100644 index 4372f68d762..00000000000 --- a/dist/cjs/plugins/test-utils/port-manager.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"port-manager.js","names":["_getPort","_interopRequireWildcard","require","_index","_getRequireWildcardCache","e","WeakMap","r","t","__esModule","default","has","get","n","__proto__","a","Object","defineProperty","getOwnPropertyDescriptor","u","hasOwnProperty","call","i","set","startPort","PORT_MAX","portQueue","PROMISE_RESOLVE_VOID","nextPort","then","port","getPort","makeRange","host"],"sources":["../../../../src/plugins/test-utils/port-manager.ts"],"sourcesContent":["import getPort, { makeRange } from 'get-port';\nimport { PROMISE_RESOLVE_VOID } from '../utils/index.ts';\n\n/**\n * For easier debugging, we increase the port each time\n * to ensure that no port is reused in the tests.\n */\nlet startPort = 18669;\n\nconst PORT_MAX = 65535;\nlet portQueue: Promise = PROMISE_RESOLVE_VOID as any;\n\n/**\n * Returns an unused port.\n * Used to ensure that different tests\n * do not accidentally use the same port.\n */\nexport function nextPort(): Promise {\n portQueue = portQueue.then(async () => {\n const port = await getPort({\n port: makeRange(startPort, PORT_MAX),\n host: '0.0.0.0',\n });\n startPort = port + 1;\n return port;\n });\n return portQueue;\n}\n"],"mappings":";;;;;;AAAA,IAAAA,QAAA,GAAAC,uBAAA,CAAAC,OAAA;AACA,IAAAC,MAAA,GAAAD,OAAA;AAAyD,SAAAE,yBAAAC,CAAA,6BAAAC,OAAA,mBAAAC,CAAA,OAAAD,OAAA,IAAAE,CAAA,OAAAF,OAAA,YAAAF,wBAAA,YAAAA,CAAAC,CAAA,WAAAA,CAAA,GAAAG,CAAA,GAAAD,CAAA,KAAAF,CAAA;AAAA,SAAAJ,wBAAAI,CAAA,EAAAE,CAAA,SAAAA,CAAA,IAAAF,CAAA,IAAAA,CAAA,CAAAI,UAAA,SAAAJ,CAAA,eAAAA,CAAA,uBAAAA,CAAA,yBAAAA,CAAA,WAAAK,OAAA,EAAAL,CAAA,QAAAG,CAAA,GAAAJ,wBAAA,CAAAG,CAAA,OAAAC,CAAA,IAAAA,CAAA,CAAAG,GAAA,CAAAN,CAAA,UAAAG,CAAA,CAAAI,GAAA,CAAAP,CAAA,OAAAQ,CAAA,KAAAC,SAAA,UAAAC,CAAA,GAAAC,MAAA,CAAAC,cAAA,IAAAD,MAAA,CAAAE,wBAAA,WAAAC,CAAA,IAAAd,CAAA,oBAAAc,CAAA,OAAAC,cAAA,CAAAC,IAAA,CAAAhB,CAAA,EAAAc,CAAA,SAAAG,CAAA,GAAAP,CAAA,GAAAC,MAAA,CAAAE,wBAAA,CAAAb,CAAA,EAAAc,CAAA,UAAAG,CAAA,KAAAA,CAAA,CAAAV,GAAA,IAAAU,CAAA,CAAAC,GAAA,IAAAP,MAAA,CAAAC,cAAA,CAAAJ,CAAA,EAAAM,CAAA,EAAAG,CAAA,IAAAT,CAAA,CAAAM,CAAA,IAAAd,CAAA,CAAAc,CAAA,YAAAN,CAAA,CAAAH,OAAA,GAAAL,CAAA,EAAAG,CAAA,IAAAA,CAAA,CAAAe,GAAA,CAAAlB,CAAA,EAAAQ,CAAA,GAAAA,CAAA;AAEzD;AACA;AACA;AACA;AACA,IAAIW,SAAS,GAAG,KAAK;AAErB,IAAMC,QAAQ,GAAG,KAAK;AACtB,IAAIC,SAA0B,GAAGC,2BAA2B;;AAE5D;AACA;AACA;AACA;AACA;AACO,SAASC,QAAQA,CAAA,EAAoB;EACxCF,SAAS,GAAGA,SAAS,CAACG,IAAI,CAAC,YAAY;IACnC,IAAMC,IAAI,GAAG,MAAM,IAAAC,gBAAO,EAAC;MACvBD,IAAI,EAAE,IAAAE,kBAAS,EAACR,SAAS,EAAEC,QAAQ,CAAC;MACpCQ,IAAI,EAAE;IACV,CAAC,CAAC;IACFT,SAAS,GAAGM,IAAI,GAAG,CAAC;IACpB,OAAOA,IAAI;EACf,CAAC,CAAC;EACF,OAAOJ,SAAS;AACpB","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/test-utils/revisions.js b/dist/cjs/plugins/test-utils/revisions.js deleted file mode 100644 index 6880dc629f5..00000000000 --- a/dist/cjs/plugins/test-utils/revisions.js +++ /dev/null @@ -1,11 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.EXAMPLE_REVISION_4 = exports.EXAMPLE_REVISION_3 = exports.EXAMPLE_REVISION_2 = exports.EXAMPLE_REVISION_1 = void 0; -var EXAMPLE_REVISION_1 = exports.EXAMPLE_REVISION_1 = '1-12080c42d471e3d2625e49dcca3b8e1a'; -var EXAMPLE_REVISION_2 = exports.EXAMPLE_REVISION_2 = '2-22080c42d471e3d2625e49dcca3b8e2b'; -var EXAMPLE_REVISION_3 = exports.EXAMPLE_REVISION_3 = '3-32080c42d471e3d2625e49dcca3b8e3c'; -var EXAMPLE_REVISION_4 = exports.EXAMPLE_REVISION_4 = '4-42080c42d471e3d2625e49dcca3b8e3c'; -//# sourceMappingURL=revisions.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/test-utils/revisions.js.map b/dist/cjs/plugins/test-utils/revisions.js.map deleted file mode 100644 index adefdfe6e3e..00000000000 --- a/dist/cjs/plugins/test-utils/revisions.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"revisions.js","names":["EXAMPLE_REVISION_1","exports","EXAMPLE_REVISION_2","EXAMPLE_REVISION_3","EXAMPLE_REVISION_4"],"sources":["../../../../src/plugins/test-utils/revisions.ts"],"sourcesContent":["export const EXAMPLE_REVISION_1 = '1-12080c42d471e3d2625e49dcca3b8e1a';\nexport const EXAMPLE_REVISION_2 = '2-22080c42d471e3d2625e49dcca3b8e2b';\nexport const EXAMPLE_REVISION_3 = '3-32080c42d471e3d2625e49dcca3b8e3c';\nexport const EXAMPLE_REVISION_4 = '4-42080c42d471e3d2625e49dcca3b8e3c';\n"],"mappings":";;;;;;AAAO,IAAMA,kBAAkB,GAAAC,OAAA,CAAAD,kBAAA,GAAG,oCAAoC;AAC/D,IAAME,kBAAkB,GAAAD,OAAA,CAAAC,kBAAA,GAAG,oCAAoC;AAC/D,IAAMC,kBAAkB,GAAAF,OAAA,CAAAE,kBAAA,GAAG,oCAAoC;AAC/D,IAAMC,kBAAkB,GAAAH,OAAA,CAAAG,kBAAA,GAAG,oCAAoC","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/test-utils/schema-objects.js b/dist/cjs/plugins/test-utils/schema-objects.js deleted file mode 100644 index fef9d0b4699..00000000000 --- a/dist/cjs/plugins/test-utils/schema-objects.js +++ /dev/null @@ -1,273 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.NoIndexHuman = NoIndexHuman; -exports.TEST_DATA_CHARSET_LAST_SORTED = exports.TEST_DATA_CHARSET = void 0; -exports.averageSchemaData = averageSchemaData; -exports.bigHumanDocumentType = bigHumanDocumentType; -exports.compoundIndexData = compoundIndexData; -exports.compoundIndexNoStringData = compoundIndexNoStringData; -exports.deepNestedHumanData = deepNestedHumanData; -exports.encryptedDeepHumanDocumentType = encryptedDeepHumanDocumentType; -exports.encryptedHumanData = encryptedHumanData; -exports.encryptedObjectHumanData = encryptedObjectHumanData; -exports.heroArrayData = heroArrayData; -exports.humanData = humanData; -exports.humanWithCompositePrimary = humanWithCompositePrimary; -exports.humanWithIdAndAgeIndexDocumentType = humanWithIdAndAgeIndexDocumentType; -exports.humanWithSubOther = humanWithSubOther; -exports.humanWithTimestampData = humanWithTimestampData; -exports.nestedHumanData = nestedHumanData; -exports.nostringIndex = nostringIndex; -exports.pointData = pointData; -exports.randomStringWithSpecialChars = randomStringWithSpecialChars; -exports.refHumanData = refHumanData; -exports.refHumanNestedData = refHumanNestedData; -exports.simpleHeroArray = simpleHeroArray; -exports.simpleHumanAge = simpleHumanAge; -exports.simpleHumanData = simpleHumanData; -exports.simpleHumanV3Data = simpleHumanV3Data; -var _faker = require("@faker-js/faker"); -var _asyncTestUtil = require("async-test-util"); -var schemas = _interopRequireWildcard(require("./schemas.js")); -var _index = require("../utils/index.js"); -function _getRequireWildcardCache(e) { if ("function" != typeof WeakMap) return null; var r = new WeakMap(), t = new WeakMap(); return (_getRequireWildcardCache = function (e) { return e ? t : r; })(e); } -function _interopRequireWildcard(e, r) { if (!r && e && e.__esModule) return e; if (null === e || "object" != typeof e && "function" != typeof e) return { default: e }; var t = _getRequireWildcardCache(r); if (t && t.has(e)) return t.get(e); var n = { __proto__: null }, a = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var u in e) if ("default" !== u && {}.hasOwnProperty.call(e, u)) { var i = a ? Object.getOwnPropertyDescriptor(e, u) : null; i && (i.get || i.set) ? Object.defineProperty(n, u, i) : n[u] = e[u]; } return n.default = e, t && t.set(e, n), n; } -/** - * this file contains objects which match the schemas in schemas.js - */ - -/** - * Some storages had problems with umlauts and other special chars. - * So we add these to all test strings. - * TODO add emojis - */ -var TEST_DATA_CHARSET = exports.TEST_DATA_CHARSET = '0987654321ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzäöüÖÄßÜ[]{}\''; -var TEST_DATA_CHARSET_LAST_SORTED = exports.TEST_DATA_CHARSET_LAST_SORTED = (0, _index.ensureNotFalsy)((0, _index.lastOfArray)(TEST_DATA_CHARSET.split('').sort())); -// const someEmojis = '😊💩👵🍌'; -function randomStringWithSpecialChars(length) { - return (0, _asyncTestUtil.randomString)(length, TEST_DATA_CHARSET); -} -function humanData(passportId = randomStringWithSpecialChars(12), age = (0, _asyncTestUtil.randomNumber)(10, 50), firstName = _faker.faker.person.firstName()) { - return { - passportId: passportId, - firstName, - lastName: _faker.faker.person.lastName(), - age - }; -} -function simpleHumanData() { - return { - passportId: randomStringWithSpecialChars(12), - firstName: _faker.faker.person.firstName(), - lastName: _faker.faker.person.lastName() - }; -} -function simpleHumanV3Data(partial = {}) { - var defaultObj = { - passportId: randomStringWithSpecialChars(12), - age: (0, _asyncTestUtil.randomNumber)(10, 50) - }; - return Object.assign(defaultObj, partial); -} -function simpleHumanAge(partial = {}) { - var defaultObj = { - passportId: randomStringWithSpecialChars(12), - age: (0, _asyncTestUtil.randomNumber)(10, 50) + '' - }; - return Object.assign(defaultObj, partial); -} -function humanWithSubOther() { - return { - passportId: randomStringWithSpecialChars(12), - other: { - age: (0, _asyncTestUtil.randomNumber)(10, 50) - } - }; -} -function NoIndexHuman() { - return { - firstName: _faker.faker.person.firstName(), - lastName: _faker.faker.person.lastName() - }; -} -function nestedHumanData(partial = {}) { - var defaultObj = { - passportId: randomStringWithSpecialChars(12), - firstName: _faker.faker.person.firstName(), - mainSkill: { - name: randomStringWithSpecialChars(6), - level: 5 - } - }; - return Object.assign(defaultObj, partial); -} -function deepNestedHumanData() { - return { - passportId: randomStringWithSpecialChars(12), - mainSkill: { - name: randomStringWithSpecialChars(6), - attack: { - good: false, - count: 5 - } - } - }; -} -function bigHumanDocumentType() { - return { - passportId: randomStringWithSpecialChars(12), - dnaHash: randomStringWithSpecialChars(12), - firstName: _faker.faker.person.firstName(), - lastName: _faker.faker.person.lastName(), - age: (0, _asyncTestUtil.randomNumber)(10, 50) - }; -} -function heroArrayData() { - return { - name: randomStringWithSpecialChars(6), - skills: new Array(3).fill(0).map(() => { - return { - name: randomStringWithSpecialChars(6), - damage: (0, _asyncTestUtil.randomNumber)(10, 50) - }; - }) - }; -} -function simpleHeroArray(partial = {}) { - var defaultObj = { - name: randomStringWithSpecialChars(6), - skills: new Array(3).fill(0).map(() => randomStringWithSpecialChars(6)) - }; - return Object.assign(defaultObj, partial); -} -function encryptedHumanData(secret = randomStringWithSpecialChars(12)) { - return { - passportId: randomStringWithSpecialChars(12), - firstName: _faker.faker.person.firstName(), - secret - }; -} -function encryptedObjectHumanData() { - return { - passportId: randomStringWithSpecialChars(12), - firstName: _faker.faker.person.firstName(), - secret: { - name: randomStringWithSpecialChars(12), - subname: randomStringWithSpecialChars(12) - } - }; -} -function encryptedDeepHumanDocumentType() { - return { - passportId: randomStringWithSpecialChars(12), - firstName: _faker.faker.person.firstName(), - firstLevelPassword: randomStringWithSpecialChars(12), - secretData: { - pw: randomStringWithSpecialChars(12) - }, - deepSecret: { - darkhole: { - pw: randomStringWithSpecialChars(12) - } - }, - nestedSecret: { - darkhole: { - pw: randomStringWithSpecialChars(12) - } - } - }; -} -function compoundIndexData() { - return { - passportId: randomStringWithSpecialChars(12), - passportCountry: randomStringWithSpecialChars(12), - age: (0, _asyncTestUtil.randomNumber)(10, 50) - }; -} -function compoundIndexNoStringData() { - return { - passportId: randomStringWithSpecialChars(12), - passportCountry: { - [randomStringWithSpecialChars(12)]: randomStringWithSpecialChars(12) - }, - age: (0, _asyncTestUtil.randomNumber)(10, 50) - }; -} -function nostringIndex() { - return { - passportId: {}, - firstName: _faker.faker.person.firstName() - }; -} -function refHumanData(bestFriend) { - return { - name: randomStringWithSpecialChars(12), - bestFriend - }; -} -function refHumanNestedData(bestFriend) { - return { - name: randomStringWithSpecialChars(12), - foo: { - bestFriend - } - }; -} -function humanWithTimestampData(givenData = {}) { - var ret = { - id: randomStringWithSpecialChars(12), - name: _faker.faker.person.firstName(), - age: (0, _asyncTestUtil.randomNumber)(1, 100), - // use some time in the past week - updatedAt: Date.now() - }; - ret = Object.assign({}, ret, givenData); - return ret; -} -var averageSchemaForFieldLength = schemas.averageSchema(); -function averageSchemaData(partial = {}) { - return Object.assign({}, { - id: randomStringWithSpecialChars((0, _index.ensureNotFalsy)(averageSchemaForFieldLength.properties.id.maxLength)), - var1: randomStringWithSpecialChars((0, _index.ensureNotFalsy)(averageSchemaForFieldLength.properties.var1.maxLength)), - var2: (0, _asyncTestUtil.randomNumber)(100, (0, _index.ensureNotFalsy)(averageSchemaForFieldLength.properties.var2.maximum)), - deep: { - deep1: randomStringWithSpecialChars((0, _index.ensureNotFalsy)(averageSchemaForFieldLength.properties.deep.properties.deep1.maxLength)), - deep2: randomStringWithSpecialChars((0, _index.ensureNotFalsy)(averageSchemaForFieldLength.properties.deep.properties.deep2.maxLength)), - deeper: { - deepNr: (0, _asyncTestUtil.randomNumber)(0, 10) - } - }, - list: new Array(5).fill(0).map(() => ({ - deep1: randomStringWithSpecialChars(5), - deep2: randomStringWithSpecialChars(8) - })) - }, partial); -} -function pointData() { - return { - id: randomStringWithSpecialChars(12), - x: _faker.faker.number.int(), - y: _faker.faker.number.int() - }; -} -function humanWithIdAndAgeIndexDocumentType(age = (0, _asyncTestUtil.randomNumber)(1, 100)) { - return { - id: randomStringWithSpecialChars(12), - name: _faker.faker.person.firstName(), - age - }; -} -function humanWithCompositePrimary(partial = {}) { - var defaultObj = { - firstName: _faker.faker.person.firstName(), - lastName: _faker.faker.person.lastName(), - info: { - age: (0, _asyncTestUtil.randomNumber)(10, 50) - } - }; - return Object.assign(defaultObj, partial); -} -//# sourceMappingURL=schema-objects.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/test-utils/schema-objects.js.map b/dist/cjs/plugins/test-utils/schema-objects.js.map deleted file mode 100644 index 9085ca1b012..00000000000 --- a/dist/cjs/plugins/test-utils/schema-objects.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"schema-objects.js","names":["_faker","require","_asyncTestUtil","schemas","_interopRequireWildcard","_index","_getRequireWildcardCache","e","WeakMap","r","t","__esModule","default","has","get","n","__proto__","a","Object","defineProperty","getOwnPropertyDescriptor","u","hasOwnProperty","call","i","set","TEST_DATA_CHARSET","exports","TEST_DATA_CHARSET_LAST_SORTED","ensureNotFalsy","lastOfArray","split","sort","randomStringWithSpecialChars","length","randomString","humanData","passportId","age","randomNumber","firstName","faker","person","lastName","simpleHumanData","simpleHumanV3Data","partial","defaultObj","assign","simpleHumanAge","humanWithSubOther","other","NoIndexHuman","nestedHumanData","mainSkill","name","level","deepNestedHumanData","attack","good","count","bigHumanDocumentType","dnaHash","heroArrayData","skills","Array","fill","map","damage","simpleHeroArray","encryptedHumanData","secret","encryptedObjectHumanData","subname","encryptedDeepHumanDocumentType","firstLevelPassword","secretData","pw","deepSecret","darkhole","nestedSecret","compoundIndexData","passportCountry","compoundIndexNoStringData","nostringIndex","refHumanData","bestFriend","refHumanNestedData","foo","humanWithTimestampData","givenData","ret","id","updatedAt","Date","now","averageSchemaForFieldLength","averageSchema","averageSchemaData","properties","maxLength","var1","var2","maximum","deep","deep1","deep2","deeper","deepNr","list","pointData","x","number","int","y","humanWithIdAndAgeIndexDocumentType","humanWithCompositePrimary","info"],"sources":["../../../../src/plugins/test-utils/schema-objects.ts"],"sourcesContent":["/**\n * this file contains objects which match the schemas in schemas.js\n */\n\nimport { faker } from '@faker-js/faker';\n\nimport {\n randomNumber,\n randomString\n} from 'async-test-util';\nimport { HumanDocumentType } from './schemas.ts';\nimport * as schemas from './schemas.ts';\nimport { ensureNotFalsy, lastOfArray } from '../utils/index.ts';\n\n/**\n * Some storages had problems with umlauts and other special chars.\n * So we add these to all test strings.\n * TODO add emojis\n */\nexport const TEST_DATA_CHARSET = '0987654321ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzäöüÖÄßÜ[]{}\\'';\nexport const TEST_DATA_CHARSET_LAST_SORTED = ensureNotFalsy(lastOfArray(TEST_DATA_CHARSET.split('').sort()));\n// const someEmojis = '😊💩👵🍌';\nexport function randomStringWithSpecialChars(length: number) {\n return randomString(length, TEST_DATA_CHARSET);\n}\n\n\nexport interface SimpleHumanDocumentType {\n passportId: string;\n firstName: string;\n lastName: string;\n}\n\nexport function humanData(\n passportId: string = randomStringWithSpecialChars(12),\n age: number = randomNumber(10, 50),\n firstName: string = faker.person.firstName()\n): HumanDocumentType {\n return {\n passportId: passportId,\n firstName,\n lastName: faker.person.lastName(),\n age\n };\n}\n\nexport function simpleHumanData(): SimpleHumanDocumentType {\n return {\n passportId: randomStringWithSpecialChars(12),\n firstName: faker.person.firstName(),\n lastName: faker.person.lastName()\n };\n}\n\nexport interface SimpleHumanV3DocumentType {\n passportId: string;\n age: number;\n oneOptional?: string;\n}\nexport function simpleHumanV3Data(partial: Partial = {}): SimpleHumanV3DocumentType {\n const defaultObj = {\n passportId: randomStringWithSpecialChars(12),\n age: randomNumber(10, 50)\n };\n return Object.assign(\n defaultObj,\n partial\n );\n}\n\nexport interface SimpleHumanAgeDocumentType {\n passportId: string;\n age: string;\n}\nexport function simpleHumanAge(partial: Partial = {}): SimpleHumanAgeDocumentType {\n const defaultObj = {\n passportId: randomStringWithSpecialChars(12),\n age: randomNumber(10, 50) + ''\n };\n return Object.assign(\n defaultObj,\n partial\n );\n}\n\nexport interface HumanWithSubOtherDocumentType {\n passportId: string;\n other: {\n age: number;\n };\n}\nexport function humanWithSubOther(): HumanWithSubOtherDocumentType {\n return {\n passportId: randomStringWithSpecialChars(12),\n other: {\n age: randomNumber(10, 50)\n }\n };\n}\n\nexport interface NoIndexHumanDocumentType {\n firstName: string;\n lastName: string;\n}\nexport function NoIndexHuman(): NoIndexHumanDocumentType {\n return {\n firstName: faker.person.firstName(),\n lastName: faker.person.lastName()\n };\n}\n\nexport interface NestedHumanDocumentType {\n passportId: string;\n firstName: string;\n mainSkill: {\n name: string;\n level: number;\n };\n}\nexport function nestedHumanData(partial: Partial = {}): NestedHumanDocumentType {\n const defaultObj = {\n passportId: randomStringWithSpecialChars(12),\n firstName: faker.person.firstName(),\n mainSkill: {\n name: randomStringWithSpecialChars(6),\n level: 5\n }\n };\n return Object.assign(\n defaultObj,\n partial\n );\n}\n\nexport interface DeepNestedHumanDocumentType {\n passportId: string;\n mainSkill: {\n name: string;\n attack: {\n good: boolean;\n count: number;\n };\n };\n}\nexport function deepNestedHumanData(): DeepNestedHumanDocumentType {\n return {\n passportId: randomStringWithSpecialChars(12),\n mainSkill: {\n name: randomStringWithSpecialChars(6),\n attack: {\n good: false,\n count: 5\n }\n }\n };\n}\n\nexport interface BigHumanDocumentType {\n passportId: string;\n dnaHash: string;\n firstName: string;\n lastName: string;\n age: number;\n}\nexport function bigHumanDocumentType(): BigHumanDocumentType {\n return {\n passportId: randomStringWithSpecialChars(12),\n dnaHash: randomStringWithSpecialChars(12),\n firstName: faker.person.firstName(),\n lastName: faker.person.lastName(),\n age: randomNumber(10, 50)\n };\n}\n\nexport interface HeroArrayDocumentType {\n name: string;\n skills: {\n name: string;\n damage: number;\n }[];\n}\nexport function heroArrayData(): HeroArrayDocumentType {\n return {\n name: randomStringWithSpecialChars(6),\n skills: new Array(3).fill(0).map(() => {\n return {\n name: randomStringWithSpecialChars(6),\n damage: randomNumber(10, 50)\n };\n })\n };\n}\n\nexport interface SimpleHeroArrayDocumentType {\n name: string;\n skills: string[];\n}\nexport function simpleHeroArray(partial: Partial = {}): SimpleHeroArrayDocumentType {\n const defaultObj = {\n name: randomStringWithSpecialChars(6),\n skills: new Array(3).fill(0).map(() => randomStringWithSpecialChars(6))\n };\n return Object.assign(\n defaultObj,\n partial\n );\n}\n\nexport interface EncryptedHumanDocumentType {\n passportId: string;\n firstName: string;\n secret: string;\n}\nexport function encryptedHumanData(secret = randomStringWithSpecialChars(12)): EncryptedHumanDocumentType {\n return {\n passportId: randomStringWithSpecialChars(12),\n firstName: faker.person.firstName(),\n secret\n };\n}\n\nexport interface EncryptedObjectHumanDocumentType {\n passportId: string;\n firstName: string;\n secret: {\n name: string;\n subname: string;\n };\n}\nexport function encryptedObjectHumanData(): EncryptedObjectHumanDocumentType {\n return {\n passportId: randomStringWithSpecialChars(12),\n firstName: faker.person.firstName(),\n secret: {\n name: randomStringWithSpecialChars(12),\n subname: randomStringWithSpecialChars(12)\n }\n };\n}\n\nexport interface EncryptedDeepHumanDocumentType {\n passportId: string;\n firstName: string;\n firstLevelPassword: string;\n secretData: {\n pw: string;\n };\n deepSecret: {\n darkhole: {\n pw: string;\n };\n };\n nestedSecret: {\n darkhole: {\n pw: string;\n };\n };\n}\nexport function encryptedDeepHumanDocumentType(): EncryptedDeepHumanDocumentType {\n return {\n passportId: randomStringWithSpecialChars(12),\n firstName: faker.person.firstName(),\n firstLevelPassword: randomStringWithSpecialChars(12),\n secretData: {\n pw: randomStringWithSpecialChars(12)\n },\n deepSecret: {\n darkhole: {\n pw: randomStringWithSpecialChars(12)\n }\n },\n nestedSecret: {\n darkhole: {\n pw: randomStringWithSpecialChars(12)\n }\n }\n };\n}\n\nexport interface CompoundIndexDocumentType {\n passportId: string;\n passportCountry: string;\n age: number;\n}\nexport function compoundIndexData(): CompoundIndexDocumentType {\n return {\n passportId: randomStringWithSpecialChars(12),\n passportCountry: randomStringWithSpecialChars(12),\n age: randomNumber(10, 50)\n };\n}\n\nexport interface CompoundIndexNoStringDocumentType {\n passportId: string;\n passportCountry: { [prop: string]: string; };\n age: number;\n}\nexport function compoundIndexNoStringData(): CompoundIndexNoStringDocumentType {\n return {\n passportId: randomStringWithSpecialChars(12),\n passportCountry: { [randomStringWithSpecialChars(12)]: randomStringWithSpecialChars(12) },\n age: randomNumber(10, 50)\n };\n}\n\nexport interface NostringIndexDocumentType {\n passportId: {};\n firstName: string;\n}\nexport function nostringIndex(): NostringIndexDocumentType {\n return {\n passportId: {},\n firstName: faker.person.firstName()\n };\n}\n\nexport interface RefHumanDocumentType {\n name: string;\n bestFriend: string;\n}\nexport function refHumanData(bestFriend?: string): RefHumanDocumentType {\n return {\n name: randomStringWithSpecialChars(12),\n bestFriend\n } as any;\n}\n\nexport interface RefHumanNestedDocumentType {\n name: string;\n foo: {\n bestFriend: string;\n };\n}\nexport function refHumanNestedData(bestFriend?: string): RefHumanNestedDocumentType {\n return {\n name: randomStringWithSpecialChars(12),\n foo: {\n bestFriend\n } as any\n };\n}\nexport interface HumanWithTimestampNestedDocumentType extends HumanWithTimestampDocumentType {\n address?: {\n street: string;\n suite: string;\n city: string;\n zipcode: string;\n geo: {\n lat: string;\n lng: string;\n };\n }\n}\n\nexport interface HumanWithTimestampDocumentType {\n id: string;\n name: string;\n age: number;\n updatedAt: number;\n deletedAt?: number;\n}\nexport function humanWithTimestampData(givenData: Partial = {}): HumanWithTimestampDocumentType {\n let ret = {\n id: randomStringWithSpecialChars(12),\n name: faker.person.firstName(),\n age: randomNumber(1, 100),\n // use some time in the past week\n updatedAt: Date.now()\n };\n ret = Object.assign({}, ret, givenData);\n return ret;\n}\n\nexport interface AverageSchemaDocumentType {\n id: string;\n var1: string;\n var2: number;\n deep: {\n deep1: string;\n deep2: string;\n deeper: {\n deepNr: number;\n };\n };\n list: {\n deep1: string;\n deep2: string;\n }[];\n}\n\n\nconst averageSchemaForFieldLength = schemas.averageSchema() as any;\nexport function averageSchemaData(\n partial: Partial = {}\n): AverageSchemaDocumentType {\n return Object.assign(\n {},\n {\n id: randomStringWithSpecialChars(ensureNotFalsy(averageSchemaForFieldLength.properties.id.maxLength)),\n var1: randomStringWithSpecialChars(ensureNotFalsy(averageSchemaForFieldLength.properties.var1.maxLength)),\n var2: randomNumber(100, ensureNotFalsy(averageSchemaForFieldLength.properties.var2.maximum)),\n deep: {\n deep1: randomStringWithSpecialChars(ensureNotFalsy(averageSchemaForFieldLength.properties.deep.properties.deep1.maxLength)),\n deep2: randomStringWithSpecialChars(ensureNotFalsy(averageSchemaForFieldLength.properties.deep.properties.deep2.maxLength)),\n deeper: {\n deepNr: randomNumber(0, 10)\n }\n },\n list: new Array(5).fill(0).map(() => ({\n deep1: randomStringWithSpecialChars(5),\n deep2: randomStringWithSpecialChars(8)\n }))\n },\n partial\n );\n}\n\nexport interface PointDocumentType {\n id: string;\n x: number;\n y: number;\n}\nexport function pointData(): PointDocumentType {\n return {\n id: randomStringWithSpecialChars(12),\n x: faker.number.int(),\n y: faker.number.int()\n };\n}\n\nexport interface HumanWithIdAndAgeIndexDocumentType {\n id: string;\n name: string;\n age: number;\n}\nexport function humanWithIdAndAgeIndexDocumentType(\n age: number = randomNumber(1, 100)\n): HumanWithIdAndAgeIndexDocumentType {\n return {\n id: randomStringWithSpecialChars(12),\n name: faker.person.firstName(),\n age\n };\n}\n\nexport type HumanWithCompositePrimary = {\n // optional because it might be created by RxDB and not known before\n id?: string;\n firstName: string;\n lastName: string;\n info: {\n age: number;\n };\n};\nexport function humanWithCompositePrimary(partial: Partial = {}): HumanWithCompositePrimary {\n const defaultObj = {\n firstName: faker.person.firstName(),\n lastName: faker.person.lastName(),\n info: {\n age: randomNumber(10, 50)\n }\n };\n return Object.assign(\n defaultObj,\n partial\n );\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAIA,IAAAA,MAAA,GAAAC,OAAA;AAEA,IAAAC,cAAA,GAAAD,OAAA;AAKA,IAAAE,OAAA,GAAAC,uBAAA,CAAAH,OAAA;AACA,IAAAI,MAAA,GAAAJ,OAAA;AAAgE,SAAAK,yBAAAC,CAAA,6BAAAC,OAAA,mBAAAC,CAAA,OAAAD,OAAA,IAAAE,CAAA,OAAAF,OAAA,YAAAF,wBAAA,YAAAA,CAAAC,CAAA,WAAAA,CAAA,GAAAG,CAAA,GAAAD,CAAA,KAAAF,CAAA;AAAA,SAAAH,wBAAAG,CAAA,EAAAE,CAAA,SAAAA,CAAA,IAAAF,CAAA,IAAAA,CAAA,CAAAI,UAAA,SAAAJ,CAAA,eAAAA,CAAA,uBAAAA,CAAA,yBAAAA,CAAA,WAAAK,OAAA,EAAAL,CAAA,QAAAG,CAAA,GAAAJ,wBAAA,CAAAG,CAAA,OAAAC,CAAA,IAAAA,CAAA,CAAAG,GAAA,CAAAN,CAAA,UAAAG,CAAA,CAAAI,GAAA,CAAAP,CAAA,OAAAQ,CAAA,KAAAC,SAAA,UAAAC,CAAA,GAAAC,MAAA,CAAAC,cAAA,IAAAD,MAAA,CAAAE,wBAAA,WAAAC,CAAA,IAAAd,CAAA,oBAAAc,CAAA,OAAAC,cAAA,CAAAC,IAAA,CAAAhB,CAAA,EAAAc,CAAA,SAAAG,CAAA,GAAAP,CAAA,GAAAC,MAAA,CAAAE,wBAAA,CAAAb,CAAA,EAAAc,CAAA,UAAAG,CAAA,KAAAA,CAAA,CAAAV,GAAA,IAAAU,CAAA,CAAAC,GAAA,IAAAP,MAAA,CAAAC,cAAA,CAAAJ,CAAA,EAAAM,CAAA,EAAAG,CAAA,IAAAT,CAAA,CAAAM,CAAA,IAAAd,CAAA,CAAAc,CAAA,YAAAN,CAAA,CAAAH,OAAA,GAAAL,CAAA,EAAAG,CAAA,IAAAA,CAAA,CAAAe,GAAA,CAAAlB,CAAA,EAAAQ,CAAA,GAAAA,CAAA;AAZhE;AACA;AACA;;AAYA;AACA;AACA;AACA;AACA;AACO,IAAMW,iBAAiB,GAAAC,OAAA,CAAAD,iBAAA,GAAG,6EAA6E;AACvG,IAAME,6BAA6B,GAAAD,OAAA,CAAAC,6BAAA,GAAG,IAAAC,qBAAc,EAAC,IAAAC,kBAAW,EAACJ,iBAAiB,CAACK,KAAK,CAAC,EAAE,CAAC,CAACC,IAAI,CAAC,CAAC,CAAC,CAAC;AAC5G;AACO,SAASC,4BAA4BA,CAACC,MAAc,EAAE;EACzD,OAAO,IAAAC,2BAAY,EAACD,MAAM,EAAER,iBAAiB,CAAC;AAClD;AASO,SAASU,SAASA,CACrBC,UAAkB,GAAGJ,4BAA4B,CAAC,EAAE,CAAC,EACrDK,GAAW,GAAG,IAAAC,2BAAY,EAAC,EAAE,EAAE,EAAE,CAAC,EAClCC,SAAiB,GAAGC,YAAK,CAACC,MAAM,CAACF,SAAS,CAAC,CAAC,EAC3B;EACjB,OAAO;IACHH,UAAU,EAAEA,UAAU;IACtBG,SAAS;IACTG,QAAQ,EAAEF,YAAK,CAACC,MAAM,CAACC,QAAQ,CAAC,CAAC;IACjCL;EACJ,CAAC;AACL;AAEO,SAASM,eAAeA,CAAA,EAA4B;EACvD,OAAO;IACHP,UAAU,EAAEJ,4BAA4B,CAAC,EAAE,CAAC;IAC5CO,SAAS,EAAEC,YAAK,CAACC,MAAM,CAACF,SAAS,CAAC,CAAC;IACnCG,QAAQ,EAAEF,YAAK,CAACC,MAAM,CAACC,QAAQ,CAAC;EACpC,CAAC;AACL;AAOO,SAASE,iBAAiBA,CAACC,OAA2C,GAAG,CAAC,CAAC,EAA6B;EAC3G,IAAMC,UAAU,GAAG;IACfV,UAAU,EAAEJ,4BAA4B,CAAC,EAAE,CAAC;IAC5CK,GAAG,EAAE,IAAAC,2BAAY,EAAC,EAAE,EAAE,EAAE;EAC5B,CAAC;EACD,OAAOrB,MAAM,CAAC8B,MAAM,CAChBD,UAAU,EACVD,OACJ,CAAC;AACL;AAMO,SAASG,cAAcA,CAACH,OAA4C,GAAG,CAAC,CAAC,EAA8B;EAC1G,IAAMC,UAAU,GAAG;IACfV,UAAU,EAAEJ,4BAA4B,CAAC,EAAE,CAAC;IAC5CK,GAAG,EAAE,IAAAC,2BAAY,EAAC,EAAE,EAAE,EAAE,CAAC,GAAG;EAChC,CAAC;EACD,OAAOrB,MAAM,CAAC8B,MAAM,CAChBD,UAAU,EACVD,OACJ,CAAC;AACL;AAQO,SAASI,iBAAiBA,CAAA,EAAkC;EAC/D,OAAO;IACHb,UAAU,EAAEJ,4BAA4B,CAAC,EAAE,CAAC;IAC5CkB,KAAK,EAAE;MACHb,GAAG,EAAE,IAAAC,2BAAY,EAAC,EAAE,EAAE,EAAE;IAC5B;EACJ,CAAC;AACL;AAMO,SAASa,YAAYA,CAAA,EAA6B;EACrD,OAAO;IACHZ,SAAS,EAAEC,YAAK,CAACC,MAAM,CAACF,SAAS,CAAC,CAAC;IACnCG,QAAQ,EAAEF,YAAK,CAACC,MAAM,CAACC,QAAQ,CAAC;EACpC,CAAC;AACL;AAUO,SAASU,eAAeA,CAACP,OAAyC,GAAG,CAAC,CAAC,EAA2B;EACrG,IAAMC,UAAU,GAAG;IACfV,UAAU,EAAEJ,4BAA4B,CAAC,EAAE,CAAC;IAC5CO,SAAS,EAAEC,YAAK,CAACC,MAAM,CAACF,SAAS,CAAC,CAAC;IACnCc,SAAS,EAAE;MACPC,IAAI,EAAEtB,4BAA4B,CAAC,CAAC,CAAC;MACrCuB,KAAK,EAAE;IACX;EACJ,CAAC;EACD,OAAOtC,MAAM,CAAC8B,MAAM,CAChBD,UAAU,EACVD,OACJ,CAAC;AACL;AAYO,SAASW,mBAAmBA,CAAA,EAAgC;EAC/D,OAAO;IACHpB,UAAU,EAAEJ,4BAA4B,CAAC,EAAE,CAAC;IAC5CqB,SAAS,EAAE;MACPC,IAAI,EAAEtB,4BAA4B,CAAC,CAAC,CAAC;MACrCyB,MAAM,EAAE;QACJC,IAAI,EAAE,KAAK;QACXC,KAAK,EAAE;MACX;IACJ;EACJ,CAAC;AACL;AASO,SAASC,oBAAoBA,CAAA,EAAyB;EACzD,OAAO;IACHxB,UAAU,EAAEJ,4BAA4B,CAAC,EAAE,CAAC;IAC5C6B,OAAO,EAAE7B,4BAA4B,CAAC,EAAE,CAAC;IACzCO,SAAS,EAAEC,YAAK,CAACC,MAAM,CAACF,SAAS,CAAC,CAAC;IACnCG,QAAQ,EAAEF,YAAK,CAACC,MAAM,CAACC,QAAQ,CAAC,CAAC;IACjCL,GAAG,EAAE,IAAAC,2BAAY,EAAC,EAAE,EAAE,EAAE;EAC5B,CAAC;AACL;AASO,SAASwB,aAAaA,CAAA,EAA0B;EACnD,OAAO;IACHR,IAAI,EAAEtB,4BAA4B,CAAC,CAAC,CAAC;IACrC+B,MAAM,EAAE,IAAIC,KAAK,CAAC,CAAC,CAAC,CAACC,IAAI,CAAC,CAAC,CAAC,CAACC,GAAG,CAAC,MAAM;MACnC,OAAO;QACHZ,IAAI,EAAEtB,4BAA4B,CAAC,CAAC,CAAC;QACrCmC,MAAM,EAAE,IAAA7B,2BAAY,EAAC,EAAE,EAAE,EAAE;MAC/B,CAAC;IACL,CAAC;EACL,CAAC;AACL;AAMO,SAAS8B,eAAeA,CAACvB,OAA6C,GAAG,CAAC,CAAC,EAA+B;EAC7G,IAAMC,UAAU,GAAG;IACfQ,IAAI,EAAEtB,4BAA4B,CAAC,CAAC,CAAC;IACrC+B,MAAM,EAAE,IAAIC,KAAK,CAAC,CAAC,CAAC,CAACC,IAAI,CAAC,CAAC,CAAC,CAACC,GAAG,CAAC,MAAMlC,4BAA4B,CAAC,CAAC,CAAC;EAC1E,CAAC;EACD,OAAOf,MAAM,CAAC8B,MAAM,CAChBD,UAAU,EACVD,OACJ,CAAC;AACL;AAOO,SAASwB,kBAAkBA,CAACC,MAAM,GAAGtC,4BAA4B,CAAC,EAAE,CAAC,EAA8B;EACtG,OAAO;IACHI,UAAU,EAAEJ,4BAA4B,CAAC,EAAE,CAAC;IAC5CO,SAAS,EAAEC,YAAK,CAACC,MAAM,CAACF,SAAS,CAAC,CAAC;IACnC+B;EACJ,CAAC;AACL;AAUO,SAASC,wBAAwBA,CAAA,EAAqC;EACzE,OAAO;IACHnC,UAAU,EAAEJ,4BAA4B,CAAC,EAAE,CAAC;IAC5CO,SAAS,EAAEC,YAAK,CAACC,MAAM,CAACF,SAAS,CAAC,CAAC;IACnC+B,MAAM,EAAE;MACJhB,IAAI,EAAEtB,4BAA4B,CAAC,EAAE,CAAC;MACtCwC,OAAO,EAAExC,4BAA4B,CAAC,EAAE;IAC5C;EACJ,CAAC;AACL;AAoBO,SAASyC,8BAA8BA,CAAA,EAAmC;EAC7E,OAAO;IACHrC,UAAU,EAAEJ,4BAA4B,CAAC,EAAE,CAAC;IAC5CO,SAAS,EAAEC,YAAK,CAACC,MAAM,CAACF,SAAS,CAAC,CAAC;IACnCmC,kBAAkB,EAAE1C,4BAA4B,CAAC,EAAE,CAAC;IACpD2C,UAAU,EAAE;MACRC,EAAE,EAAE5C,4BAA4B,CAAC,EAAE;IACvC,CAAC;IACD6C,UAAU,EAAE;MACRC,QAAQ,EAAE;QACNF,EAAE,EAAE5C,4BAA4B,CAAC,EAAE;MACvC;IACJ,CAAC;IACD+C,YAAY,EAAE;MACVD,QAAQ,EAAE;QACNF,EAAE,EAAE5C,4BAA4B,CAAC,EAAE;MACvC;IACJ;EACJ,CAAC;AACL;AAOO,SAASgD,iBAAiBA,CAAA,EAA8B;EAC3D,OAAO;IACH5C,UAAU,EAAEJ,4BAA4B,CAAC,EAAE,CAAC;IAC5CiD,eAAe,EAAEjD,4BAA4B,CAAC,EAAE,CAAC;IACjDK,GAAG,EAAE,IAAAC,2BAAY,EAAC,EAAE,EAAE,EAAE;EAC5B,CAAC;AACL;AAOO,SAAS4C,yBAAyBA,CAAA,EAAsC;EAC3E,OAAO;IACH9C,UAAU,EAAEJ,4BAA4B,CAAC,EAAE,CAAC;IAC5CiD,eAAe,EAAE;MAAE,CAACjD,4BAA4B,CAAC,EAAE,CAAC,GAAGA,4BAA4B,CAAC,EAAE;IAAE,CAAC;IACzFK,GAAG,EAAE,IAAAC,2BAAY,EAAC,EAAE,EAAE,EAAE;EAC5B,CAAC;AACL;AAMO,SAAS6C,aAAaA,CAAA,EAA8B;EACvD,OAAO;IACH/C,UAAU,EAAE,CAAC,CAAC;IACdG,SAAS,EAAEC,YAAK,CAACC,MAAM,CAACF,SAAS,CAAC;EACtC,CAAC;AACL;AAMO,SAAS6C,YAAYA,CAACC,UAAmB,EAAwB;EACpE,OAAO;IACH/B,IAAI,EAAEtB,4BAA4B,CAAC,EAAE,CAAC;IACtCqD;EACJ,CAAC;AACL;AAQO,SAASC,kBAAkBA,CAACD,UAAmB,EAA8B;EAChF,OAAO;IACH/B,IAAI,EAAEtB,4BAA4B,CAAC,EAAE,CAAC;IACtCuD,GAAG,EAAE;MACDF;IACJ;EACJ,CAAC;AACL;AAqBO,SAASG,sBAAsBA,CAACC,SAAkD,GAAG,CAAC,CAAC,EAAkC;EAC5H,IAAIC,GAAG,GAAG;IACNC,EAAE,EAAE3D,4BAA4B,CAAC,EAAE,CAAC;IACpCsB,IAAI,EAAEd,YAAK,CAACC,MAAM,CAACF,SAAS,CAAC,CAAC;IAC9BF,GAAG,EAAE,IAAAC,2BAAY,EAAC,CAAC,EAAE,GAAG,CAAC;IACzB;IACAsD,SAAS,EAAEC,IAAI,CAACC,GAAG,CAAC;EACxB,CAAC;EACDJ,GAAG,GAAGzE,MAAM,CAAC8B,MAAM,CAAC,CAAC,CAAC,EAAE2C,GAAG,EAAED,SAAS,CAAC;EACvC,OAAOC,GAAG;AACd;AAoBA,IAAMK,2BAA2B,GAAG7F,OAAO,CAAC8F,aAAa,CAAC,CAAQ;AAC3D,SAASC,iBAAiBA,CAC7BpD,OAA2C,GAAG,CAAC,CAAC,EACvB;EACzB,OAAO5B,MAAM,CAAC8B,MAAM,CAChB,CAAC,CAAC,EACF;IACI4C,EAAE,EAAE3D,4BAA4B,CAAC,IAAAJ,qBAAc,EAACmE,2BAA2B,CAACG,UAAU,CAACP,EAAE,CAACQ,SAAS,CAAC,CAAC;IACrGC,IAAI,EAAEpE,4BAA4B,CAAC,IAAAJ,qBAAc,EAACmE,2BAA2B,CAACG,UAAU,CAACE,IAAI,CAACD,SAAS,CAAC,CAAC;IACzGE,IAAI,EAAE,IAAA/D,2BAAY,EAAC,GAAG,EAAE,IAAAV,qBAAc,EAACmE,2BAA2B,CAACG,UAAU,CAACG,IAAI,CAACC,OAAO,CAAC,CAAC;IAC5FC,IAAI,EAAE;MACFC,KAAK,EAAExE,4BAA4B,CAAC,IAAAJ,qBAAc,EAACmE,2BAA2B,CAACG,UAAU,CAACK,IAAI,CAACL,UAAU,CAACM,KAAK,CAACL,SAAS,CAAC,CAAC;MAC3HM,KAAK,EAAEzE,4BAA4B,CAAC,IAAAJ,qBAAc,EAACmE,2BAA2B,CAACG,UAAU,CAACK,IAAI,CAACL,UAAU,CAACO,KAAK,CAACN,SAAS,CAAC,CAAC;MAC3HO,MAAM,EAAE;QACJC,MAAM,EAAE,IAAArE,2BAAY,EAAC,CAAC,EAAE,EAAE;MAC9B;IACJ,CAAC;IACDsE,IAAI,EAAE,IAAI5C,KAAK,CAAC,CAAC,CAAC,CAACC,IAAI,CAAC,CAAC,CAAC,CAACC,GAAG,CAAC,OAAO;MAClCsC,KAAK,EAAExE,4BAA4B,CAAC,CAAC,CAAC;MACtCyE,KAAK,EAAEzE,4BAA4B,CAAC,CAAC;IACzC,CAAC,CAAC;EACN,CAAC,EACDa,OACJ,CAAC;AACL;AAOO,SAASgE,SAASA,CAAA,EAAsB;EAC3C,OAAO;IACHlB,EAAE,EAAE3D,4BAA4B,CAAC,EAAE,CAAC;IACpC8E,CAAC,EAAEtE,YAAK,CAACuE,MAAM,CAACC,GAAG,CAAC,CAAC;IACrBC,CAAC,EAAEzE,YAAK,CAACuE,MAAM,CAACC,GAAG,CAAC;EACxB,CAAC;AACL;AAOO,SAASE,kCAAkCA,CAC9C7E,GAAW,GAAG,IAAAC,2BAAY,EAAC,CAAC,EAAE,GAAG,CAAC,EACA;EAClC,OAAO;IACHqD,EAAE,EAAE3D,4BAA4B,CAAC,EAAE,CAAC;IACpCsB,IAAI,EAAEd,YAAK,CAACC,MAAM,CAACF,SAAS,CAAC,CAAC;IAC9BF;EACJ,CAAC;AACL;AAWO,SAAS8E,yBAAyBA,CAACtE,OAA2C,GAAG,CAAC,CAAC,EAA6B;EACnH,IAAMC,UAAU,GAAG;IACfP,SAAS,EAAEC,YAAK,CAACC,MAAM,CAACF,SAAS,CAAC,CAAC;IACnCG,QAAQ,EAAEF,YAAK,CAACC,MAAM,CAACC,QAAQ,CAAC,CAAC;IACjC0E,IAAI,EAAE;MACF/E,GAAG,EAAE,IAAAC,2BAAY,EAAC,EAAE,EAAE,EAAE;IAC5B;EACJ,CAAC;EACD,OAAOrB,MAAM,CAAC8B,MAAM,CAChBD,UAAU,EACVD,OACJ,CAAC;AACL","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/test-utils/schemas.js b/dist/cjs/plugins/test-utils/schemas.js deleted file mode 100644 index 1d706e282bc..00000000000 --- a/dist/cjs/plugins/test-utils/schemas.js +++ /dev/null @@ -1,1215 +0,0 @@ -"use strict"; - -var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault"); -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.averageSchema = averageSchema; -exports.empty = exports.deepNestedHuman = exports.compoundIndexNoString = exports.compoundIndex = exports.bigHuman = void 0; -exports.enableKeyCompression = enableKeyCompression; -exports.simpleHumanV3 = exports.simpleHuman = exports.simpleArrayHero = exports.refHumanNested = exports.refHuman = exports.primaryHumanLiteral = exports.primaryHuman = exports.point = exports.notExistingIndex = exports.noStringIndex = exports.noIndexHuman = exports.nestedHuman = exports.humanWithTimestampNested = exports.humanWithTimestampAllIndex = exports.humanWithTimestamp = exports.humanWithSimpleAndCompoundIndexes = exports.humanWithDeepNestedIndexes = exports.humanWithAllIndex = exports.humanSubIndex = exports.humanSchemaLiteral = exports.humanNormalizeSchema2 = exports.humanNormalizeSchema1Literal = exports.humanNormalizeSchema1 = exports.humanMinimalBroken = exports.humanMinimal = exports.humanIdAndAgeIndex = exports.humanFinal = exports.humanDefault = exports.humanCompositePrimarySchemaLiteral = exports.humanCompositePrimary = exports.humanAgeIndex = exports.human = exports.heroArray = exports.encryptedObjectHuman = exports.encryptedHuman = exports.encryptedDeepHuman = void 0; -var _asyncTestUtil = _interopRequireDefault(require("async-test-util")); -var _overwritable = require("../../overwritable.js"); -var _rxSchema = require("../../rx-schema.js"); -var _index = require("../utils/index.js"); -var humanSchemaLiteral = exports.humanSchemaLiteral = _overwritable.overwritable.deepFreezeWhenDevMode({ - title: 'human schema', - description: 'describes a human being', - version: 0, - keyCompression: false, - primaryKey: 'passportId', - type: 'object', - properties: { - passportId: { - type: 'string', - maxLength: 100 - }, - firstName: { - type: 'string', - maxLength: 100 - }, - lastName: { - type: 'string', - maxLength: 100 - }, - age: { - description: 'age in years', - type: 'integer', - minimum: 0, - maximum: 150, - multipleOf: 1 - } - }, - required: ['firstName', 'lastName', 'passportId', 'age'], - indexes: ['firstName'] -}); -var humanSchemaTyped = (0, _rxSchema.toTypedRxJsonSchema)(humanSchemaLiteral); -var human = exports.human = humanSchemaLiteral; -var humanDefault = exports.humanDefault = _overwritable.overwritable.deepFreezeWhenDevMode({ - title: 'human schema', - version: 0, - description: 'describes a human being', - keyCompression: false, - primaryKey: 'passportId', - type: 'object', - properties: { - passportId: { - type: 'string', - maxLength: 100 - }, - firstName: { - type: 'string', - maxLength: 100 - }, - lastName: { - type: 'string', - maxLength: 100 - }, - age: { - description: 'age in years', - type: 'integer', - minimum: 0, - maximum: 150, - default: 20 - } - }, - indexes: [], - required: ['passportId'] -}); -var humanFinal = exports.humanFinal = _overwritable.overwritable.deepFreezeWhenDevMode({ - title: 'human schema with age set final', - version: 0, - keyCompression: false, - type: 'object', - primaryKey: 'passportId', - properties: { - passportId: { - type: 'string', - maxLength: 100 - }, - firstName: { - type: 'string' - }, - lastName: { - type: 'string' - }, - age: { - type: 'integer', - minimum: 0, - maximum: 150, - final: true - } - }, - required: ['passportId'] -}); -var simpleHuman = exports.simpleHuman = _overwritable.overwritable.deepFreezeWhenDevMode({ - title: 'human schema', - version: 0, - keyCompression: false, - description: 'describes a simple human being', - primaryKey: 'passportId', - type: 'object', - properties: { - passportId: { - type: 'string', - maxLength: 100 - }, - age: { - type: 'string', - maxLength: 100 - }, - oneOptional: { - type: 'string' - } - }, - indexes: ['age'], - required: ['passportId', 'age'] -}); -var simpleHumanV3 = exports.simpleHumanV3 = _overwritable.overwritable.deepFreezeWhenDevMode({ - title: 'human schema', - version: 3, - keyCompression: false, - description: 'describes a simple human being', - type: 'object', - primaryKey: 'passportId', - properties: { - passportId: { - type: 'string', - maxLength: 100 - }, - age: { - type: 'number', - minimum: 0, - maximum: 1000, - multipleOf: 1 - }, - oneOptional: { - type: 'string' - } - }, - indexes: ['age'], - required: ['passportId', 'age'] -}); -var humanAgeIndex = exports.humanAgeIndex = _overwritable.overwritable.deepFreezeWhenDevMode({ - title: 'human schema', - version: 0, - keyCompression: false, - description: 'describes a human being', - primaryKey: 'passportId', - type: 'object', - properties: { - passportId: { - type: 'string', - maxLength: 100 - }, - firstName: { - type: 'string' - }, - lastName: { - type: 'string' - }, - age: { - description: 'Age in years', - type: 'integer', - minimum: 0, - maximum: 150, - multipleOf: 1 - } - }, - required: ['firstName', 'lastName', 'age'], - indexes: ['age'] -}); -var humanSubIndex = exports.humanSubIndex = _overwritable.overwritable.deepFreezeWhenDevMode({ - title: 'human schema', - version: 0, - description: 'describes a human being where other.age is index', - keyCompression: false, - primaryKey: 'passportId', - type: 'object', - properties: { - passportId: { - type: 'string', - maxLength: 100 - }, - other: { - type: 'object', - properties: { - age: { - description: 'Age in years', - type: 'integer', - minimum: 0, - maximum: 150, - multipleOf: 1 - } - } - } - }, - required: ['passportId'], - indexes: ['other.age'] -}); - -/** - * each field is an index, - * use this to slow down inserts in tests - */ -var humanWithAllIndex = exports.humanWithAllIndex = _overwritable.overwritable.deepFreezeWhenDevMode({ - title: 'human schema', - description: 'describes a human being', - version: 0, - keyCompression: false, - primaryKey: 'passportId', - type: 'object', - properties: { - passportId: { - type: 'string', - maxLength: 100 - }, - firstName: { - type: 'string', - maxLength: 100 - }, - lastName: { - type: 'string', - maxLength: 100 - }, - age: { - description: 'age in years', - type: 'integer', - minimum: 0, - maximum: 150, - multipleOf: 1 - } - }, - indexes: ['firstName', 'lastName', 'age'], - required: ['firstName', 'lastName'] -}); -var nestedHuman = exports.nestedHuman = { - title: 'human nested', - version: 0, - description: 'describes a human being with a nested field', - keyCompression: false, - primaryKey: 'passportId', - type: 'object', - properties: { - passportId: { - type: 'string', - maxLength: 100 - }, - firstName: { - type: 'string', - maxLength: 100 - }, - mainSkill: { - type: 'object', - properties: { - name: { - type: 'string', - maxLength: 10 - }, - level: { - type: 'number', - minimum: 0, - maximum: 10, - multipleOf: 1 - } - }, - required: ['name', 'level'], - additionalProperties: false - } - }, - required: ['firstName'], - indexes: [] -}; -var deepNestedHuman = exports.deepNestedHuman = { - title: 'deep human nested', - version: 0, - keyCompression: false, - description: 'describes a human being with a nested field', - primaryKey: 'passportId', - type: 'object', - properties: { - passportId: { - type: 'string', - maxLength: 100 - }, - mainSkill: { - type: 'object', - properties: { - name: { - type: 'string' - }, - attack: { - type: 'object', - properties: { - good: { - type: 'boolean' - }, - count: { - type: 'number' - } - } - } - }, - required: ['name'] - } - }, - indexes: [], - required: ['mainSkill'] -}; -var noIndexHuman = exports.noIndexHuman = _overwritable.overwritable.deepFreezeWhenDevMode({ - title: 'human schema', - version: 0, - description: 'this schema has no index', - keyCompression: false, - primaryKey: 'firstName', - type: 'object', - properties: { - firstName: { - type: 'string', - maxLength: 100 - }, - lastName: { - type: 'string' - } - }, - required: ['lastName'] -}); -var noStringIndex = exports.noStringIndex = _overwritable.overwritable.deepFreezeWhenDevMode({ - description: 'the index has no type:string', - version: 0, - keyCompression: false, - primaryKey: 'passportId', - type: 'object', - properties: { - passportId: { - type: 'object', - maxLength: 100 - }, - firstName: { - type: 'string' - } - }, - required: ['firstName', 'passportId'], - indexes: [] -}); -var bigHuman = exports.bigHuman = _overwritable.overwritable.deepFreezeWhenDevMode({ - title: 'human schema', - version: 0, - description: 'describes a human being with 2 indexes', - keyCompression: false, - primaryKey: 'passportId', - type: 'object', - properties: { - passportId: { - type: 'string', - maxLength: 100 - }, - dnaHash: { - type: 'string', - maxLength: 100 - }, - firstName: { - type: 'string', - maxLength: 100 - }, - lastName: { - type: 'string' - }, - age: { - description: 'Age in years', - type: 'integer', - minimum: 0 - } - }, - required: ['firstName', 'lastName'], - indexes: ['firstName', 'dnaHash'] -}); -var encryptedHuman = exports.encryptedHuman = { - title: 'human encrypted', - version: 0, - description: 'uses an encrypted field', - primaryKey: 'passportId', - type: 'object', - keyCompression: false, - properties: { - passportId: { - type: 'string', - maxLength: 100 - }, - firstName: { - type: 'string' - }, - secret: { - type: 'string' - } - }, - indexes: [], - required: ['firstName', 'secret'], - encrypted: ['secret'] -}; -var encryptedObjectHuman = exports.encryptedObjectHuman = { - title: 'human encrypted', - version: 0, - keyCompression: false, - description: 'uses an encrypted field', - primaryKey: 'passportId', - type: 'object', - properties: { - passportId: { - type: 'string', - maxLength: 100 - }, - firstName: { - type: 'string' - }, - secret: { - type: 'object', - properties: { - name: { - type: 'string' - }, - subname: { - type: 'string' - } - } - } - }, - indexes: [], - required: ['firstName', 'secret'], - encrypted: ['secret'] -}; -var encryptedDeepHuman = exports.encryptedDeepHuman = { - title: 'human encrypted', - version: 0, - keyCompression: false, - description: 'uses an encrypted field', - primaryKey: 'passportId', - type: 'object', - properties: { - passportId: { - type: 'string', - maxLength: 100 - }, - firstName: { - type: 'string' - }, - firstLevelPassword: { - type: 'string' - }, - secretData: { - type: 'object', - properties: { - pw: { - type: 'string' - } - } - }, - deepSecret: { - type: 'object', - properties: { - darkhole: { - type: 'object', - properties: { - pw: { - type: 'string' - } - } - } - } - }, - nestedSecret: { - type: 'object', - properties: { - darkhole: { - type: 'object', - properties: { - pw: { - type: 'string' - } - } - } - } - } - }, - indexes: [], - required: ['firstName', 'secretData'], - encrypted: ['firstLevelPassword', 'secretData', 'deepSecret.darkhole.pw', 'nestedSecret.darkhole.pw'] -}; -var notExistingIndex = exports.notExistingIndex = { - title: 'index', - version: 0, - description: 'this schema has a specified index which does not exists', - primaryKey: 'passportId', - type: 'object', - keyCompression: false, - properties: { - passportId: { - type: 'string', - maxLength: 100 - }, - address: { - type: 'object', - properties: { - street: { - type: 'string' - } - } - } - }, - required: ['passportId'], - indexes: ['address.apartment'] -}; -var compoundIndex = exports.compoundIndex = _overwritable.overwritable.deepFreezeWhenDevMode({ - title: 'compound index', - version: 0, - description: 'this schema has a compoundIndex', - primaryKey: 'passportId', - type: 'object', - keyCompression: false, - properties: { - passportId: { - type: 'string', - maxLength: 100 - }, - passportCountry: { - type: 'string', - maxLength: 100 - }, - age: { - type: 'integer', - minimum: 0, - maximum: 150, - multipleOf: 1 - } - }, - required: ['passportId'], - indexes: [['age', 'passportCountry']] -}); -var compoundIndexNoString = exports.compoundIndexNoString = { - title: 'compound index', - version: 0, - description: 'this schema has a compoundIndex', - primaryKey: 'passportId', - keyCompression: false, - type: 'object', - properties: { - passportId: { - type: 'string', - maxLength: 100 - }, - passportCountry: { - type: 'object' - }, - age: { - type: 'integer' - } - }, - indexes: [[10, 'passportCountry']] -}; -var empty = exports.empty = { - title: 'empty schema', - version: 0, - type: 'object', - primaryKey: 'id', - properties: { - id: { - type: 'string', - maxLength: 100 - } - }, - required: ['id'] -}; -var heroArray = exports.heroArray = _overwritable.overwritable.deepFreezeWhenDevMode({ - title: 'hero schema', - version: 0, - keyCompression: false, - description: 'describes a hero with an array-field', - primaryKey: 'name', - type: 'object', - properties: { - name: { - type: 'string', - maxLength: 100 - }, - skills: { - type: 'array', - maxItems: 5, - uniqueItems: true, - items: { - type: 'object', - properties: { - name: { - type: 'string' - }, - damage: { - type: 'number' - } - } - } - } - }, - required: ['name'] -}); -var simpleArrayHero = exports.simpleArrayHero = _overwritable.overwritable.deepFreezeWhenDevMode({ - title: 'hero schema', - version: 0, - description: 'describes a hero with a string-array-field', - keyCompression: false, - primaryKey: 'name', - type: 'object', - properties: { - name: { - type: 'string', - maxLength: 100 - }, - skills: { - type: 'array', - maxItems: 5, - uniqueItems: true, - items: { - type: 'string' - } - } - }, - required: ['name'] -}); -var primaryHumanLiteral = exports.primaryHumanLiteral = _overwritable.overwritable.deepFreezeWhenDevMode({ - title: 'human schema with primary', - version: 0, - description: 'describes a human being with passportID as primary', - keyCompression: false, - primaryKey: 'passportId', - type: 'object', - properties: { - passportId: { - type: 'string', - minLength: 4, - maxLength: 100 - }, - firstName: { - type: 'string', - maxLength: 100 - }, - lastName: { - type: 'string', - maxLength: 500 - }, - age: { - type: 'integer', - minimum: 0, - maximum: 150, - multipleOf: 1 - } - }, - required: ['passportId', 'firstName', 'lastName'] -}); -var primaryHumanTypedSchema = (0, _rxSchema.toTypedRxJsonSchema)(primaryHumanLiteral); -var primaryHuman = exports.primaryHuman = primaryHumanLiteral; -var humanNormalizeSchema1Literal = exports.humanNormalizeSchema1Literal = _overwritable.overwritable.deepFreezeWhenDevMode({ - title: 'human schema', - version: 0, - keyCompression: false, - description: 'describes a human being', - primaryKey: 'passportId', - type: 'object', - properties: { - passportId: { - type: 'string', - minLength: 4, - maxLength: 100 - }, - age: { - description: 'age in years', - type: 'integer', - minimum: 0, - maximum: 150, - multipleOf: 1 - } - }, - required: ['age', 'passportId'] -}); -var humanNormalizeSchema1Typed = (0, _rxSchema.toTypedRxJsonSchema)(humanNormalizeSchema1Literal); -var humanNormalizeSchema1 = exports.humanNormalizeSchema1 = humanNormalizeSchema1Literal; -var humanNormalizeSchema2 = exports.humanNormalizeSchema2 = _overwritable.overwritable.deepFreezeWhenDevMode({ - title: 'human schema', - version: 0, - keyCompression: false, - primaryKey: 'passportId', - type: 'object', - properties: { - passportId: { - type: 'string', - minLength: 4, - maxLength: 100 - }, - age: { - minimum: 0, - type: 'integer', - description: 'age in years', - maximum: 150, - multipleOf: 1 - } - }, - description: 'describes a human being', - required: ['age', 'passportId'] -}); -var refHuman = exports.refHuman = _overwritable.overwritable.deepFreezeWhenDevMode({ - title: 'human related to other human', - version: 0, - keyCompression: false, - primaryKey: 'name', - type: 'object', - properties: { - name: { - type: 'string', - maxLength: 100 - }, - bestFriend: { - ref: 'human', - type: 'string' - } - }, - required: ['name'] -}); -var humanCompositePrimary = exports.humanCompositePrimary = { - title: 'human schema', - description: 'describes a human being', - version: 0, - keyCompression: false, - primaryKey: { - key: 'id', - fields: ['firstName', 'info.age'], - separator: '|' - }, - type: 'object', - properties: { - id: { - type: 'string', - maxLength: 100 - }, - firstName: { - type: 'string', - maxLength: 100 - }, - lastName: { - type: 'string' - }, - info: { - type: 'object', - properties: { - age: { - description: 'age in years', - type: 'integer', - minimum: 0, - maximum: 150 - } - }, - required: ['age'] - } - }, - required: ['id', 'firstName', 'lastName', 'info'], - indexes: ['firstName'] -}; -var humanCompositePrimarySchemaLiteral = exports.humanCompositePrimarySchemaLiteral = _overwritable.overwritable.deepFreezeWhenDevMode({ - title: 'human schema', - description: 'describes a human being', - version: 0, - keyCompression: false, - primaryKey: { - key: 'id', - fields: ['firstName', 'info.age'], - separator: '|' - }, - encrypted: [], - type: 'object', - properties: { - id: { - type: 'string', - maxLength: 100 - }, - firstName: { - type: 'string', - maxLength: 100 - }, - lastName: { - type: 'string' - }, - info: { - type: 'object', - properties: { - age: { - description: 'age in years', - type: 'integer', - minimum: 0, - maximum: 150 - } - }, - required: ['age'] - }, - readonlyProps: { - allOf: [], - anyOf: [], - oneOf: [], - type: [], - dependencies: { - someDep: ['asd'] - }, - items: [], - required: [], - enum: [] - } - }, - required: ['id', 'firstName', 'lastName', 'info'], - indexes: ['firstName'] -}); -var humanCompositePrimarySchemaTyped = (0, _rxSchema.toTypedRxJsonSchema)(humanCompositePrimarySchemaLiteral); -var refHumanNested = exports.refHumanNested = _overwritable.overwritable.deepFreezeWhenDevMode({ - title: 'human related to other human', - version: 0, - keyCompression: false, - primaryKey: 'name', - type: 'object', - properties: { - name: { - type: 'string', - maxLength: 100 - }, - foo: { - type: 'object', - properties: { - bestFriend: { - ref: 'human', - type: 'string' - } - } - } - }, - required: ['name'] -}); - -/** - * an average schema used in performance-tests - */ -function averageSchema() { - var ret = { - title: 'averageSchema_' + _asyncTestUtil.default.randomString(5), - // randomisation used so hash differs - version: 0, - primaryKey: 'id', - type: 'object', - keyCompression: false, - properties: { - id: { - type: 'string', - maxLength: 12 - }, - var1: { - type: 'string', - maxLength: 12 - }, - var2: { - type: 'number', - minimum: 0, - maximum: 50000, - multipleOf: 1 - }, - deep: { - type: 'object', - properties: { - deep1: { - type: 'string', - maxLength: 10 - }, - deep2: { - type: 'string', - maxLength: 10 - } - } - }, - list: { - type: 'array', - items: { - type: 'object', - properties: { - deep1: { - type: 'string' - }, - deep2: { - type: 'string' - } - } - } - } - }, - required: ['id'], - indexes: ['var1', 'var2', 'deep.deep1', - // one compound index - ['var2', 'var1']], - sharding: { - shards: 6, - mode: 'collection' - } - }; - return ret; -} -var point = exports.point = _overwritable.overwritable.deepFreezeWhenDevMode({ - title: 'point schema', - version: 0, - description: 'describes coordinates in 2d space', - primaryKey: 'id', - type: 'object', - properties: { - id: { - type: 'string', - maxLength: 100 - }, - x: { - type: 'number' - }, - y: { - type: 'number' - } - }, - required: ['x', 'y'] -}); -var humanMinimal = exports.humanMinimal = _overwritable.overwritable.deepFreezeWhenDevMode({ - title: 'human schema', - description: 'describes a human being', - version: 0, - keyCompression: false, - primaryKey: 'passportId', - type: 'object', - properties: { - passportId: { - type: 'string', - maxLength: 100 - }, - age: { - type: 'integer' - }, - oneOptional: { - type: 'string' - } - }, - indexes: [], - required: ['passportId', 'age'] -}); -var humanMinimalBroken = exports.humanMinimalBroken = { - title: 'human schema', - description: 'describes a human being', - version: 0, - keyCompression: false, - primaryKey: 'passportId', - type: 'object', - properties: { - passportId: { - type: 'string', - maxLength: 100 - }, - broken: { - type: 'integer' - } - }, - indexes: [], - required: ['passportId', 'broken'] -}; - -/** - * used in the graphql-test - * contains timestamp - */ -var humanWithTimestamp = exports.humanWithTimestamp = _overwritable.overwritable.deepFreezeWhenDevMode({ - version: 0, - type: 'object', - primaryKey: 'id', - properties: { - id: { - type: 'string', - maxLength: 100 - }, - name: { - type: 'string', - maxLength: 1000 - }, - age: { - type: 'number' - }, - updatedAt: { - type: 'number', - minimum: 0, - maximum: 10000000000000000, - multipleOf: 1 - }, - deletedAt: { - type: 'number' - } - }, - indexes: ['updatedAt'], - required: ['id', 'name', 'age', 'updatedAt'] -}); -var humanWithTimestampNested = exports.humanWithTimestampNested = _overwritable.overwritable.deepFreezeWhenDevMode({ - version: 0, - type: 'object', - primaryKey: 'id', - properties: { - id: { - type: 'string', - maxLength: 100 - }, - name: { - type: 'string', - maxLength: 1000 - }, - age: { - type: 'number' - }, - updatedAt: { - type: 'number', - minimum: 0, - maximum: 10000000000000000, - multipleOf: 1 - }, - deletedAt: { - type: 'number' - }, - address: { - type: 'object', - properties: { - street: { - type: 'string' - }, - suite: { - type: 'string' - }, - city: { - type: 'string' - }, - zipcode: { - type: 'string' - }, - geo: { - type: 'object', - properties: { - lat: { - type: 'string' - }, - lng: { - type: 'string' - } - } - } - } - } - }, - indexes: ['updatedAt'], - required: ['id', 'name', 'age', 'updatedAt'] -}); - -/** - * each field is an index, - * use this to slow down inserts in tests - */ -var humanWithTimestampAllIndex = exports.humanWithTimestampAllIndex = _overwritable.overwritable.deepFreezeWhenDevMode({ - version: 0, - type: 'object', - primaryKey: 'id', - properties: { - id: { - type: 'string', - maxLength: 100 - }, - name: { - type: 'string', - maxLength: 100 - }, - age: { - type: 'number', - minimum: 0, - maximum: 1500, - multipleOf: 1 - }, - updatedAt: { - type: 'number', - minimum: 0, - maximum: 10000000000000000, - multipleOf: 1 - }, - deletedAt: { - type: 'number' - } - }, - indexes: ['name', 'age', 'updatedAt'], - required: ['id', 'name', 'age', 'updatedAt'] -}); -var humanWithSimpleAndCompoundIndexes = exports.humanWithSimpleAndCompoundIndexes = _overwritable.overwritable.deepFreezeWhenDevMode({ - version: 0, - primaryKey: 'id', - type: 'object', - properties: { - id: { - type: 'string', - maxLength: 100 - }, - name: { - type: 'string', - maxLength: 100 - }, - age: { - type: 'number', - minimum: 0, - maximum: 1500, - multipleOf: 1 - }, - createdAt: { - type: 'number', - minimum: 0, - maximum: 10000000000000000, - multipleOf: 1 - }, - updatedAt: { - type: 'number', - minimum: 0, - maximum: 10000000000000000, - multipleOf: 1 - } - }, - indexes: [['name', 'id'], ['age', 'id'], ['createdAt', 'updatedAt', 'id']], - required: ['id', 'name', 'age', 'updatedAt'] -}); -var humanWithDeepNestedIndexes = exports.humanWithDeepNestedIndexes = _overwritable.overwritable.deepFreezeWhenDevMode({ - version: 0, - primaryKey: 'id', - type: 'object', - properties: { - id: { - type: 'string', - maxLength: 100 - }, - name: { - type: 'string', - maxLength: 100 - }, - job: { - type: 'object', - properties: { - name: { - type: 'string', - maxLength: 100 - }, - manager: { - type: 'object', - properties: { - fullName: { - type: 'string', - maxLength: 100 - }, - previousJobs: { - type: 'array', - items: { - type: 'object', - properties: { - name: { - type: 'string', - maxLength: 100 - } - } - } - } - } - } - } - } - }, - required: ['id'], - indexes: ['name', 'job.name', 'job.manager.fullName'] -}); -var humanIdAndAgeIndex = exports.humanIdAndAgeIndex = _overwritable.overwritable.deepFreezeWhenDevMode({ - version: 0, - description: 'uses a compound index with id as lowest level', - primaryKey: 'id', - type: 'object', - properties: { - id: { - type: 'string', - maxLength: 100 - }, - name: { - type: 'string' - }, - age: { - description: 'Age in years', - type: 'integer', - minimum: 0, - maximum: 150, - multipleOf: 1 - } - }, - required: ['id', 'name', 'age'], - indexes: [['age', 'id']] -}); -function enableKeyCompression(schema) { - var ret = (0, _index.flatClone)(schema); - ret.keyCompression = true; - return ret; -} -//# sourceMappingURL=schemas.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/test-utils/schemas.js.map b/dist/cjs/plugins/test-utils/schemas.js.map deleted file mode 100644 index ebfeed97994..00000000000 --- a/dist/cjs/plugins/test-utils/schemas.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"schemas.js","names":["_asyncTestUtil","_interopRequireDefault","require","_overwritable","_rxSchema","_index","humanSchemaLiteral","exports","overwritable","deepFreezeWhenDevMode","title","description","version","keyCompression","primaryKey","type","properties","passportId","maxLength","firstName","lastName","age","minimum","maximum","multipleOf","required","indexes","humanSchemaTyped","toTypedRxJsonSchema","human","humanDefault","default","humanFinal","final","simpleHuman","oneOptional","simpleHumanV3","humanAgeIndex","humanSubIndex","other","humanWithAllIndex","nestedHuman","mainSkill","name","level","additionalProperties","deepNestedHuman","attack","good","count","noIndexHuman","noStringIndex","bigHuman","dnaHash","encryptedHuman","secret","encrypted","encryptedObjectHuman","subname","encryptedDeepHuman","firstLevelPassword","secretData","pw","deepSecret","darkhole","nestedSecret","notExistingIndex","address","street","compoundIndex","passportCountry","compoundIndexNoString","empty","id","heroArray","skills","maxItems","uniqueItems","items","damage","simpleArrayHero","primaryHumanLiteral","minLength","primaryHumanTypedSchema","primaryHuman","humanNormalizeSchema1Literal","humanNormalizeSchema1Typed","humanNormalizeSchema1","humanNormalizeSchema2","refHuman","bestFriend","ref","humanCompositePrimary","key","fields","separator","info","humanCompositePrimarySchemaLiteral","readonlyProps","allOf","anyOf","oneOf","dependencies","someDep","enum","humanCompositePrimarySchemaTyped","refHumanNested","foo","averageSchema","ret","AsyncTestUtil","randomString","var1","var2","deep","deep1","deep2","list","sharding","shards","mode","point","x","y","humanMinimal","humanMinimalBroken","broken","humanWithTimestamp","updatedAt","deletedAt","humanWithTimestampNested","suite","city","zipcode","geo","lat","lng","humanWithTimestampAllIndex","humanWithSimpleAndCompoundIndexes","createdAt","humanWithDeepNestedIndexes","job","manager","fullName","previousJobs","humanIdAndAgeIndex","enableKeyCompression","schema","flatClone"],"sources":["../../../../src/plugins/test-utils/schemas.ts"],"sourcesContent":["import AsyncTestUtil from 'async-test-util';\n\nimport {\n SimpleHumanV3DocumentType,\n HumanWithSubOtherDocumentType,\n NestedHumanDocumentType,\n DeepNestedHumanDocumentType,\n EncryptedHumanDocumentType,\n EncryptedObjectHumanDocumentType,\n EncryptedDeepHumanDocumentType,\n CompoundIndexDocumentType,\n CompoundIndexNoStringDocumentType,\n HeroArrayDocumentType,\n SimpleHeroArrayDocumentType,\n RefHumanDocumentType,\n RefHumanNestedDocumentType,\n AverageSchemaDocumentType,\n PointDocumentType,\n HumanWithTimestampDocumentType,\n BigHumanDocumentType,\n NostringIndexDocumentType,\n NoIndexHumanDocumentType,\n HumanWithCompositePrimary,\n HumanWithTimestampNestedDocumentType\n} from './schema-objects.ts';\nimport { overwritable } from '../../overwritable.ts';\nimport { toTypedRxJsonSchema } from '../../rx-schema.ts';\nimport type {\n ExtractDocumentTypeFromTypedRxJsonSchema,\n RxJsonSchema\n} from '../../types/rx-schema';\nimport { flatClone } from '../utils/index.ts';\n\n\nexport const humanSchemaLiteral = overwritable.deepFreezeWhenDevMode({\n title: 'human schema',\n description: 'describes a human being',\n version: 0,\n keyCompression: false,\n primaryKey: 'passportId',\n type: 'object',\n properties: {\n passportId: {\n type: 'string',\n maxLength: 100\n },\n firstName: {\n type: 'string',\n maxLength: 100\n },\n lastName: {\n type: 'string',\n maxLength: 100\n },\n age: {\n description: 'age in years',\n type: 'integer',\n minimum: 0,\n maximum: 150,\n multipleOf: 1\n }\n },\n required: ['firstName', 'lastName', 'passportId', 'age'],\n indexes: ['firstName']\n} as const);\nconst humanSchemaTyped = toTypedRxJsonSchema(humanSchemaLiteral);\nexport type HumanDocumentType = ExtractDocumentTypeFromTypedRxJsonSchema;\nexport const human: RxJsonSchema = humanSchemaLiteral;\n\n\nexport const humanDefault: RxJsonSchema = overwritable.deepFreezeWhenDevMode({\n title: 'human schema',\n version: 0,\n description: 'describes a human being',\n keyCompression: false,\n primaryKey: 'passportId',\n type: 'object',\n properties: {\n passportId: {\n type: 'string',\n maxLength: 100\n },\n firstName: {\n type: 'string',\n maxLength: 100\n },\n lastName: {\n type: 'string',\n maxLength: 100\n },\n age: {\n description: 'age in years',\n type: 'integer',\n minimum: 0,\n maximum: 150,\n default: 20\n }\n },\n indexes: [],\n required: ['passportId']\n});\n\nexport const humanFinal: RxJsonSchema = overwritable.deepFreezeWhenDevMode({\n title: 'human schema with age set final',\n version: 0,\n keyCompression: false,\n type: 'object',\n primaryKey: 'passportId',\n properties: {\n passportId: {\n type: 'string',\n maxLength: 100\n },\n firstName: {\n type: 'string'\n },\n lastName: {\n type: 'string'\n },\n age: {\n type: 'integer',\n minimum: 0,\n maximum: 150,\n final: true\n }\n },\n required: [\n 'passportId'\n ]\n});\n\nexport const simpleHuman: RxJsonSchema = overwritable.deepFreezeWhenDevMode({\n title: 'human schema',\n version: 0,\n keyCompression: false,\n description: 'describes a simple human being',\n primaryKey: 'passportId',\n type: 'object',\n properties: {\n passportId: {\n type: 'string',\n maxLength: 100\n },\n age: {\n type: 'string',\n maxLength: 100\n },\n oneOptional: {\n type: 'string'\n }\n },\n indexes: ['age'],\n required: ['passportId', 'age']\n});\n\nexport const simpleHumanV3: RxJsonSchema = overwritable.deepFreezeWhenDevMode({\n title: 'human schema',\n version: 3,\n keyCompression: false,\n description: 'describes a simple human being',\n type: 'object',\n primaryKey: 'passportId',\n properties: {\n passportId: {\n type: 'string',\n maxLength: 100\n },\n age: {\n type: 'number',\n minimum: 0,\n maximum: 1000,\n multipleOf: 1\n },\n oneOptional: {\n type: 'string'\n }\n },\n indexes: ['age'],\n required: ['passportId', 'age']\n});\n\nexport const humanAgeIndex: RxJsonSchema = overwritable.deepFreezeWhenDevMode({\n title: 'human schema',\n version: 0,\n keyCompression: false,\n description: 'describes a human being',\n primaryKey: 'passportId',\n type: 'object',\n properties: {\n passportId: {\n type: 'string',\n maxLength: 100\n },\n firstName: {\n type: 'string'\n },\n lastName: {\n type: 'string'\n },\n age: {\n description: 'Age in years',\n type: 'integer',\n minimum: 0,\n maximum: 150,\n multipleOf: 1\n }\n },\n required: ['firstName', 'lastName', 'age'],\n indexes: ['age']\n});\n\nexport const humanSubIndex: RxJsonSchema = overwritable.deepFreezeWhenDevMode({\n title: 'human schema',\n version: 0,\n description: 'describes a human being where other.age is index',\n keyCompression: false,\n primaryKey: 'passportId',\n type: 'object',\n properties: {\n passportId: {\n type: 'string',\n maxLength: 100\n },\n other: {\n type: 'object',\n properties: {\n age: {\n description: 'Age in years',\n type: 'integer',\n minimum: 0,\n maximum: 150,\n multipleOf: 1\n }\n }\n }\n },\n required: [\n 'passportId'\n ],\n indexes: ['other.age']\n});\n\n/**\n * each field is an index,\n * use this to slow down inserts in tests\n */\nexport const humanWithAllIndex: RxJsonSchema = overwritable.deepFreezeWhenDevMode({\n title: 'human schema',\n description: 'describes a human being',\n version: 0,\n keyCompression: false,\n primaryKey: 'passportId',\n type: 'object',\n properties: {\n passportId: {\n type: 'string',\n maxLength: 100\n },\n firstName: {\n type: 'string',\n maxLength: 100\n },\n lastName: {\n type: 'string',\n maxLength: 100\n },\n age: {\n description: 'age in years',\n type: 'integer',\n minimum: 0,\n maximum: 150,\n multipleOf: 1\n }\n },\n indexes: ['firstName', 'lastName', 'age'],\n required: ['firstName', 'lastName']\n});\n\nexport const nestedHuman: RxJsonSchema = {\n title: 'human nested',\n version: 0,\n description: 'describes a human being with a nested field',\n keyCompression: false,\n primaryKey: 'passportId',\n type: 'object',\n properties: {\n passportId: {\n type: 'string',\n maxLength: 100\n },\n firstName: {\n type: 'string',\n maxLength: 100\n },\n mainSkill: {\n type: 'object',\n properties: {\n name: {\n type: 'string',\n maxLength: 10\n },\n level: {\n type: 'number',\n minimum: 0,\n maximum: 10,\n multipleOf: 1\n }\n },\n required: ['name', 'level'],\n additionalProperties: false\n }\n },\n required: ['firstName'],\n indexes: []\n};\n\nexport const deepNestedHuman: RxJsonSchema = {\n title: 'deep human nested',\n version: 0,\n keyCompression: false,\n description: 'describes a human being with a nested field',\n primaryKey: 'passportId',\n type: 'object',\n properties: {\n passportId: {\n type: 'string',\n maxLength: 100\n },\n mainSkill: {\n type: 'object',\n properties: {\n name: {\n type: 'string'\n },\n attack: {\n type: 'object',\n properties: {\n good: {\n type: 'boolean'\n },\n count: {\n type: 'number'\n }\n }\n }\n },\n required: ['name']\n }\n },\n indexes: [],\n required: ['mainSkill']\n};\n\nexport const noIndexHuman: RxJsonSchema = overwritable.deepFreezeWhenDevMode({\n title: 'human schema',\n version: 0,\n description: 'this schema has no index',\n keyCompression: false,\n primaryKey: 'firstName',\n type: 'object',\n properties: {\n firstName: {\n type: 'string',\n maxLength: 100\n },\n lastName: {\n type: 'string'\n }\n },\n required: ['lastName']\n});\n\nexport const noStringIndex: RxJsonSchema = overwritable.deepFreezeWhenDevMode({\n description: 'the index has no type:string',\n version: 0,\n keyCompression: false,\n primaryKey: 'passportId',\n type: 'object',\n properties: {\n passportId: {\n type: 'object',\n maxLength: 100\n },\n firstName: {\n type: 'string'\n }\n },\n required: ['firstName', 'passportId'],\n indexes: []\n});\n\n\nexport const bigHuman: RxJsonSchema = overwritable.deepFreezeWhenDevMode({\n title: 'human schema',\n version: 0,\n description: 'describes a human being with 2 indexes',\n keyCompression: false,\n primaryKey: 'passportId',\n type: 'object',\n properties: {\n passportId: {\n type: 'string',\n maxLength: 100\n },\n dnaHash: {\n type: 'string',\n maxLength: 100\n },\n firstName: {\n type: 'string',\n maxLength: 100\n },\n lastName: {\n type: 'string'\n },\n age: {\n description: 'Age in years',\n type: 'integer',\n minimum: 0\n }\n },\n required: ['firstName', 'lastName'],\n indexes: ['firstName', 'dnaHash']\n});\n\nexport const encryptedHuman: RxJsonSchema = {\n title: 'human encrypted',\n version: 0,\n description: 'uses an encrypted field',\n primaryKey: 'passportId',\n type: 'object',\n keyCompression: false,\n properties: {\n passportId: {\n type: 'string',\n maxLength: 100\n },\n firstName: {\n type: 'string'\n },\n secret: {\n type: 'string'\n }\n },\n indexes: [],\n required: ['firstName', 'secret'],\n encrypted: ['secret']\n};\n\nexport const encryptedObjectHuman: RxJsonSchema = {\n title: 'human encrypted',\n version: 0,\n keyCompression: false,\n description: 'uses an encrypted field',\n primaryKey: 'passportId',\n type: 'object',\n properties: {\n passportId: {\n type: 'string',\n maxLength: 100\n },\n firstName: {\n type: 'string'\n },\n secret: {\n type: 'object',\n properties: {\n name: {\n type: 'string'\n },\n subname: {\n type: 'string'\n }\n }\n }\n },\n indexes: [],\n required: ['firstName', 'secret'],\n encrypted: ['secret']\n};\n\nexport const encryptedDeepHuman: RxJsonSchema = {\n title: 'human encrypted',\n version: 0,\n keyCompression: false,\n description: 'uses an encrypted field',\n primaryKey: 'passportId',\n type: 'object',\n properties: {\n passportId: {\n type: 'string',\n maxLength: 100\n },\n firstName: {\n type: 'string'\n },\n firstLevelPassword: {\n type: 'string',\n },\n secretData: {\n type: 'object',\n properties: {\n pw: {\n type: 'string'\n }\n }\n },\n deepSecret: {\n type: 'object',\n properties: {\n darkhole: {\n type: 'object',\n properties: {\n pw: {\n type: 'string'\n }\n }\n }\n }\n },\n nestedSecret: {\n type: 'object',\n properties: {\n darkhole: {\n type: 'object',\n properties: {\n pw: {\n type: 'string'\n }\n }\n }\n }\n }\n\n },\n indexes: [],\n required: ['firstName', 'secretData'],\n encrypted: [\n 'firstLevelPassword',\n 'secretData',\n 'deepSecret.darkhole.pw',\n 'nestedSecret.darkhole.pw'\n ]\n};\n\nexport const notExistingIndex: RxJsonSchema<{ passportId: string; address: { street: string; }; }> = {\n title: 'index',\n version: 0,\n description: 'this schema has a specified index which does not exists',\n primaryKey: 'passportId',\n type: 'object',\n keyCompression: false,\n properties: {\n passportId: {\n type: 'string',\n maxLength: 100\n },\n address: {\n type: 'object',\n properties: {\n street: { type: 'string' }\n }\n }\n },\n required: [\n 'passportId'\n ],\n indexes: ['address.apartment']\n};\n\nexport const compoundIndex: RxJsonSchema = overwritable.deepFreezeWhenDevMode({\n title: 'compound index',\n version: 0,\n description: 'this schema has a compoundIndex',\n primaryKey: 'passportId',\n type: 'object',\n keyCompression: false,\n properties: {\n passportId: {\n type: 'string',\n maxLength: 100\n },\n passportCountry: {\n type: 'string',\n maxLength: 100\n },\n age: {\n type: 'integer',\n minimum: 0,\n maximum: 150,\n multipleOf: 1\n }\n },\n required: [\n 'passportId'\n ],\n indexes: [\n ['age', 'passportCountry']\n ]\n});\n\nexport const compoundIndexNoString: RxJsonSchema = {\n title: 'compound index',\n version: 0,\n description: 'this schema has a compoundIndex',\n primaryKey: 'passportId',\n keyCompression: false,\n type: 'object',\n properties: {\n passportId: {\n type: 'string',\n maxLength: 100\n },\n passportCountry: {\n type: 'object'\n },\n age: {\n type: 'integer'\n }\n },\n indexes: [\n [10, 'passportCountry']\n ]\n} as RxJsonSchema;\n\nexport const empty: RxJsonSchema = {\n title: 'empty schema',\n version: 0,\n type: 'object',\n primaryKey: 'id',\n properties: {\n id: {\n type: 'string',\n maxLength: 100\n }\n },\n required: ['id']\n};\n\nexport const heroArray: RxJsonSchema = overwritable.deepFreezeWhenDevMode({\n title: 'hero schema',\n version: 0,\n keyCompression: false,\n description: 'describes a hero with an array-field',\n primaryKey: 'name',\n type: 'object',\n properties: {\n name: {\n type: 'string',\n maxLength: 100\n },\n skills: {\n type: 'array',\n maxItems: 5,\n uniqueItems: true,\n items: {\n type: 'object',\n properties: {\n name: {\n type: 'string'\n },\n damage: {\n type: 'number'\n }\n }\n }\n }\n },\n required: [\n 'name'\n ]\n});\n\nexport const simpleArrayHero: RxJsonSchema = overwritable.deepFreezeWhenDevMode({\n title: 'hero schema',\n version: 0,\n description: 'describes a hero with a string-array-field',\n keyCompression: false,\n primaryKey: 'name',\n type: 'object',\n properties: {\n name: {\n type: 'string',\n maxLength: 100\n },\n skills: {\n type: 'array',\n maxItems: 5,\n uniqueItems: true,\n items: {\n type: 'string',\n }\n }\n },\n required: [\n 'name'\n ]\n});\n\nexport const primaryHumanLiteral = overwritable.deepFreezeWhenDevMode({\n title: 'human schema with primary',\n version: 0,\n description: 'describes a human being with passportID as primary',\n keyCompression: false,\n primaryKey: 'passportId',\n type: 'object',\n properties: {\n passportId: {\n type: 'string',\n minLength: 4,\n maxLength: 100\n },\n firstName: {\n type: 'string',\n maxLength: 100\n },\n lastName: {\n type: 'string',\n maxLength: 500\n },\n age: {\n type: 'integer',\n minimum: 0,\n maximum: 150,\n multipleOf: 1\n }\n },\n required: ['passportId', 'firstName', 'lastName']\n} as const);\nconst primaryHumanTypedSchema = toTypedRxJsonSchema(primaryHumanLiteral);\nexport type PrimaryHumanDocType = ExtractDocumentTypeFromTypedRxJsonSchema;\nexport const primaryHuman: RxJsonSchema = primaryHumanLiteral;\n\nexport const humanNormalizeSchema1Literal = overwritable.deepFreezeWhenDevMode({\n title: 'human schema',\n version: 0,\n keyCompression: false,\n description: 'describes a human being',\n primaryKey: 'passportId',\n type: 'object',\n properties: {\n passportId: {\n type: 'string',\n minLength: 4,\n maxLength: 100\n },\n age: {\n description: 'age in years',\n type: 'integer',\n minimum: 0,\n maximum: 150,\n multipleOf: 1\n }\n },\n required: ['age', 'passportId']\n} as const);\nconst humanNormalizeSchema1Typed = toTypedRxJsonSchema(humanNormalizeSchema1Literal);\nexport type AgeHumanDocumentType = ExtractDocumentTypeFromTypedRxJsonSchema;\nexport const humanNormalizeSchema1: RxJsonSchema = humanNormalizeSchema1Literal;\n\nexport const humanNormalizeSchema2: RxJsonSchema = overwritable.deepFreezeWhenDevMode({\n title: 'human schema',\n version: 0,\n keyCompression: false,\n primaryKey: 'passportId',\n type: 'object',\n properties: {\n passportId: {\n type: 'string',\n minLength: 4,\n maxLength: 100\n },\n age: {\n minimum: 0,\n type: 'integer',\n description: 'age in years',\n maximum: 150,\n multipleOf: 1\n }\n },\n description: 'describes a human being',\n required: ['age', 'passportId']\n});\n\nexport const refHuman: RxJsonSchema = overwritable.deepFreezeWhenDevMode({\n title: 'human related to other human',\n version: 0,\n keyCompression: false,\n primaryKey: 'name',\n type: 'object',\n properties: {\n name: {\n type: 'string',\n maxLength: 100\n },\n bestFriend: {\n ref: 'human',\n type: 'string'\n }\n },\n required: [\n 'name'\n ]\n});\n\nexport const humanCompositePrimary: RxJsonSchema = {\n title: 'human schema',\n description: 'describes a human being',\n version: 0,\n keyCompression: false,\n primaryKey: {\n key: 'id',\n fields: [\n 'firstName',\n 'info.age'\n ],\n separator: '|'\n },\n type: 'object',\n properties: {\n id: {\n type: 'string',\n maxLength: 100\n },\n firstName: {\n type: 'string',\n maxLength: 100\n },\n lastName: {\n type: 'string'\n },\n info: {\n type: 'object',\n properties: {\n age: {\n description: 'age in years',\n type: 'integer',\n minimum: 0,\n maximum: 150\n }\n },\n required: ['age']\n }\n },\n required: [\n 'id',\n 'firstName',\n 'lastName',\n 'info'\n ],\n indexes: ['firstName']\n};\n\nexport const humanCompositePrimarySchemaLiteral = overwritable.deepFreezeWhenDevMode({\n title: 'human schema',\n description: 'describes a human being',\n version: 0,\n keyCompression: false,\n primaryKey: {\n key: 'id',\n fields: [\n 'firstName',\n 'info.age'\n ],\n separator: '|'\n },\n encrypted: [],\n type: 'object',\n properties: {\n id: {\n type: 'string',\n maxLength: 100\n },\n firstName: {\n type: 'string',\n maxLength: 100\n },\n lastName: {\n type: 'string'\n },\n info: {\n type: 'object',\n properties: {\n age: {\n description: 'age in years',\n type: 'integer',\n minimum: 0,\n maximum: 150\n }\n },\n required: ['age']\n },\n readonlyProps: {\n allOf: [],\n anyOf: [],\n oneOf: [],\n type: [],\n dependencies: {\n someDep: ['asd'],\n },\n items: [],\n required: [],\n enum: [],\n }\n },\n required: [\n 'id',\n 'firstName',\n 'lastName',\n 'info'\n ],\n indexes: ['firstName']\n} as const);\n\nconst humanCompositePrimarySchemaTyped = toTypedRxJsonSchema(humanCompositePrimarySchemaLiteral);\nexport type HumanCompositePrimaryDocType = ExtractDocumentTypeFromTypedRxJsonSchema;\n\nexport const refHumanNested: RxJsonSchema = overwritable.deepFreezeWhenDevMode({\n title: 'human related to other human',\n version: 0,\n keyCompression: false,\n primaryKey: 'name',\n type: 'object',\n properties: {\n name: {\n type: 'string',\n maxLength: 100\n },\n foo: {\n type: 'object',\n properties: {\n bestFriend: {\n ref: 'human',\n type: 'string'\n }\n }\n }\n },\n required: [\n 'name'\n ]\n});\n\n/**\n * an average schema used in performance-tests\n */\nexport function averageSchema(): RxJsonSchema {\n const ret: RxJsonSchema = {\n title: 'averageSchema_' + AsyncTestUtil.randomString(5), // randomisation used so hash differs\n version: 0,\n primaryKey: 'id',\n type: 'object',\n keyCompression: false,\n properties: {\n id: {\n type: 'string',\n maxLength: 12\n },\n var1: {\n type: 'string',\n maxLength: 12\n },\n var2: {\n type: 'number',\n minimum: 0,\n maximum: 50000,\n multipleOf: 1\n },\n deep: {\n type: 'object',\n properties: {\n deep1: {\n type: 'string',\n maxLength: 10\n },\n deep2: {\n type: 'string',\n maxLength: 10\n }\n }\n },\n list: {\n type: 'array',\n items: {\n type: 'object',\n properties: {\n deep1: {\n type: 'string'\n },\n deep2: {\n type: 'string'\n }\n }\n }\n }\n },\n required: [\n 'id'\n ],\n indexes: [\n 'var1',\n 'var2',\n 'deep.deep1',\n // one compound index\n [\n 'var2',\n 'var1'\n ]\n ],\n sharding: {\n shards: 6,\n mode: 'collection'\n }\n };\n return ret;\n}\n\nexport const point: RxJsonSchema = overwritable.deepFreezeWhenDevMode({\n title: 'point schema',\n version: 0,\n description: 'describes coordinates in 2d space',\n primaryKey: 'id',\n type: 'object',\n properties: {\n id: {\n type: 'string',\n maxLength: 100\n },\n x: {\n type: 'number'\n },\n y: {\n type: 'number'\n }\n },\n required: ['x', 'y']\n});\n\nexport const humanMinimal: RxJsonSchema = overwritable.deepFreezeWhenDevMode({\n title: 'human schema',\n description: 'describes a human being',\n version: 0,\n keyCompression: false,\n primaryKey: 'passportId',\n type: 'object',\n properties: {\n passportId: {\n type: 'string',\n maxLength: 100\n },\n age: {\n type: 'integer'\n },\n oneOptional: {\n type: 'string'\n }\n },\n indexes: [],\n required: ['passportId', 'age']\n});\n\nexport const humanMinimalBroken: RxJsonSchema<{ passportId: string; broken: number; }> = {\n title: 'human schema',\n description: 'describes a human being',\n version: 0,\n keyCompression: false,\n primaryKey: 'passportId',\n type: 'object',\n properties: {\n passportId: {\n type: 'string',\n maxLength: 100\n },\n broken: {\n type: 'integer'\n }\n },\n indexes: [],\n required: ['passportId', 'broken']\n} as unknown as RxJsonSchema;\n\n\n/**\n * used in the graphql-test\n * contains timestamp\n */\nexport const humanWithTimestamp: RxJsonSchema = overwritable.deepFreezeWhenDevMode({\n version: 0,\n type: 'object',\n primaryKey: 'id',\n properties: {\n id: {\n type: 'string',\n maxLength: 100\n },\n name: {\n type: 'string',\n maxLength: 1000\n },\n age: {\n type: 'number'\n },\n updatedAt: {\n type: 'number',\n minimum: 0,\n maximum: 10000000000000000,\n multipleOf: 1\n\n },\n deletedAt: {\n type: 'number'\n }\n },\n indexes: ['updatedAt'],\n required: ['id', 'name', 'age', 'updatedAt']\n});\n\nexport const humanWithTimestampNested: RxJsonSchema = overwritable.deepFreezeWhenDevMode({\n version: 0,\n type: 'object',\n primaryKey: 'id',\n properties: {\n id: {\n type: 'string',\n maxLength: 100\n },\n name: {\n type: 'string',\n maxLength: 1000\n },\n age: {\n type: 'number'\n },\n updatedAt: {\n type: 'number',\n minimum: 0,\n maximum: 10000000000000000,\n multipleOf: 1\n\n },\n deletedAt: {\n type: 'number'\n },\n address: {\n type: 'object',\n properties: {\n street: {\n type: 'string',\n },\n suite: {\n type: 'string',\n },\n city: {\n type: 'string',\n },\n zipcode: {\n type: 'string',\n },\n geo: {\n type: 'object',\n properties: {\n lat: {\n type: 'string',\n },\n lng: {\n type: 'string',\n },\n },\n },\n },\n },\n },\n indexes: ['updatedAt'],\n required: ['id', 'name', 'age', 'updatedAt']\n});\n\n\n/**\n * each field is an index,\n * use this to slow down inserts in tests\n */\nexport const humanWithTimestampAllIndex: RxJsonSchema = overwritable.deepFreezeWhenDevMode({\n version: 0,\n type: 'object',\n primaryKey: 'id',\n properties: {\n id: {\n type: 'string',\n maxLength: 100\n },\n name: {\n type: 'string',\n maxLength: 100\n },\n age: {\n type: 'number',\n minimum: 0,\n maximum: 1500,\n multipleOf: 1\n },\n updatedAt: {\n type: 'number',\n minimum: 0,\n maximum: 10000000000000000,\n multipleOf: 1\n },\n deletedAt: {\n type: 'number'\n }\n },\n indexes: ['name', 'age', 'updatedAt'],\n required: ['id', 'name', 'age', 'updatedAt']\n});\n\nexport const humanWithSimpleAndCompoundIndexes: RxJsonSchema<{\n id: string;\n name: string;\n age: number;\n createdAt: number;\n updatedAt: number;\n}> = overwritable.deepFreezeWhenDevMode({\n version: 0,\n primaryKey: 'id',\n type: 'object',\n properties: {\n id: {\n type: 'string',\n maxLength: 100\n },\n name: {\n type: 'string',\n maxLength: 100\n },\n age: {\n type: 'number',\n minimum: 0,\n maximum: 1500,\n multipleOf: 1\n },\n createdAt: {\n type: 'number',\n minimum: 0,\n maximum: 10000000000000000,\n multipleOf: 1\n },\n updatedAt: {\n type: 'number',\n minimum: 0,\n maximum: 10000000000000000,\n multipleOf: 1\n }\n },\n indexes: [\n ['name', 'id'],\n ['age', 'id'],\n ['createdAt', 'updatedAt', 'id']\n ],\n required: ['id', 'name', 'age', 'updatedAt']\n});\n\nexport const humanWithDeepNestedIndexes: RxJsonSchema<{ id: string; name: string; job: any; }> = overwritable.deepFreezeWhenDevMode({\n version: 0,\n primaryKey: 'id',\n type: 'object',\n properties: {\n id: {\n type: 'string',\n maxLength: 100\n },\n name: {\n type: 'string',\n maxLength: 100\n },\n job: {\n type: 'object',\n properties: {\n name: {\n type: 'string',\n maxLength: 100\n },\n manager: {\n type: 'object',\n properties: {\n fullName: {\n type: 'string',\n maxLength: 100\n },\n previousJobs: {\n type: 'array',\n items: {\n type: 'object',\n properties: {\n name: {\n type: 'string',\n maxLength: 100\n }\n }\n }\n }\n }\n }\n }\n }\n },\n required: [\n 'id'\n ],\n indexes: [\n 'name',\n 'job.name',\n 'job.manager.fullName'\n ]\n});\n\nexport const humanIdAndAgeIndex: RxJsonSchema<{ id: string; name: string; age: number; }> = overwritable.deepFreezeWhenDevMode({\n version: 0,\n description: 'uses a compound index with id as lowest level',\n primaryKey: 'id',\n type: 'object',\n properties: {\n id: {\n type: 'string',\n maxLength: 100\n },\n name: {\n type: 'string'\n },\n age: {\n description: 'Age in years',\n type: 'integer',\n minimum: 0,\n maximum: 150,\n multipleOf: 1\n }\n },\n required: ['id', 'name', 'age'],\n indexes: [\n ['age', 'id']\n ]\n});\n\n\nexport function enableKeyCompression(\n schema: RxJsonSchema\n): RxJsonSchema {\n const ret = flatClone(schema);\n ret.keyCompression = true;\n return ret;\n}\n"],"mappings":";;;;;;;;;;AAAA,IAAAA,cAAA,GAAAC,sBAAA,CAAAC,OAAA;AAyBA,IAAAC,aAAA,GAAAD,OAAA;AACA,IAAAE,SAAA,GAAAF,OAAA;AAKA,IAAAG,MAAA,GAAAH,OAAA;AAGO,IAAMI,kBAAkB,GAAAC,OAAA,CAAAD,kBAAA,GAAGE,0BAAY,CAACC,qBAAqB,CAAC;EACjEC,KAAK,EAAE,cAAc;EACrBC,WAAW,EAAE,yBAAyB;EACtCC,OAAO,EAAE,CAAC;EACVC,cAAc,EAAE,KAAK;EACrBC,UAAU,EAAE,YAAY;EACxBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRC,UAAU,EAAE;MACRF,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDC,SAAS,EAAE;MACPJ,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDE,QAAQ,EAAE;MACNL,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDG,GAAG,EAAE;MACDV,WAAW,EAAE,cAAc;MAC3BI,IAAI,EAAE,SAAS;MACfO,OAAO,EAAE,CAAC;MACVC,OAAO,EAAE,GAAG;MACZC,UAAU,EAAE;IAChB;EACJ,CAAC;EACDC,QAAQ,EAAE,CAAC,WAAW,EAAE,UAAU,EAAE,YAAY,EAAE,KAAK,CAAC;EACxDC,OAAO,EAAE,CAAC,WAAW;AACzB,CAAU,CAAC;AACX,IAAMC,gBAAgB,GAAG,IAAAC,6BAAmB,EAACtB,kBAAkB,CAAC;AAEzD,IAAMuB,KAAsC,GAAAtB,OAAA,CAAAsB,KAAA,GAAGvB,kBAAkB;AAGjE,IAAMwB,YAA6C,GAAAvB,OAAA,CAAAuB,YAAA,GAAGtB,0BAAY,CAACC,qBAAqB,CAAC;EAC5FC,KAAK,EAAE,cAAc;EACrBE,OAAO,EAAE,CAAC;EACVD,WAAW,EAAE,yBAAyB;EACtCE,cAAc,EAAE,KAAK;EACrBC,UAAU,EAAE,YAAY;EACxBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRC,UAAU,EAAE;MACRF,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDC,SAAS,EAAE;MACPJ,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDE,QAAQ,EAAE;MACNL,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDG,GAAG,EAAE;MACDV,WAAW,EAAE,cAAc;MAC3BI,IAAI,EAAE,SAAS;MACfO,OAAO,EAAE,CAAC;MACVC,OAAO,EAAE,GAAG;MACZQ,OAAO,EAAE;IACb;EACJ,CAAC;EACDL,OAAO,EAAE,EAAE;EACXD,QAAQ,EAAE,CAAC,YAAY;AAC3B,CAAC,CAAC;AAEK,IAAMO,UAA2C,GAAAzB,OAAA,CAAAyB,UAAA,GAAGxB,0BAAY,CAACC,qBAAqB,CAAC;EAC1FC,KAAK,EAAE,iCAAiC;EACxCE,OAAO,EAAE,CAAC;EACVC,cAAc,EAAE,KAAK;EACrBE,IAAI,EAAE,QAAQ;EACdD,UAAU,EAAE,YAAY;EACxBE,UAAU,EAAE;IACRC,UAAU,EAAE;MACRF,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDC,SAAS,EAAE;MACPJ,IAAI,EAAE;IACV,CAAC;IACDK,QAAQ,EAAE;MACNL,IAAI,EAAE;IACV,CAAC;IACDM,GAAG,EAAE;MACDN,IAAI,EAAE,SAAS;MACfO,OAAO,EAAE,CAAC;MACVC,OAAO,EAAE,GAAG;MACZU,KAAK,EAAE;IACX;EACJ,CAAC;EACDR,QAAQ,EAAE,CACN,YAAY;AAEpB,CAAC,CAAC;AAEK,IAAMS,WAAoD,GAAA3B,OAAA,CAAA2B,WAAA,GAAG1B,0BAAY,CAACC,qBAAqB,CAAC;EACnGC,KAAK,EAAE,cAAc;EACrBE,OAAO,EAAE,CAAC;EACVC,cAAc,EAAE,KAAK;EACrBF,WAAW,EAAE,gCAAgC;EAC7CG,UAAU,EAAE,YAAY;EACxBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRC,UAAU,EAAE;MACRF,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDG,GAAG,EAAE;MACDN,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDiB,WAAW,EAAE;MACTpB,IAAI,EAAE;IACV;EACJ,CAAC;EACDW,OAAO,EAAE,CAAC,KAAK,CAAC;EAChBD,QAAQ,EAAE,CAAC,YAAY,EAAE,KAAK;AAClC,CAAC,CAAC;AAEK,IAAMW,aAAsD,GAAA7B,OAAA,CAAA6B,aAAA,GAAG5B,0BAAY,CAACC,qBAAqB,CAAC;EACrGC,KAAK,EAAE,cAAc;EACrBE,OAAO,EAAE,CAAC;EACVC,cAAc,EAAE,KAAK;EACrBF,WAAW,EAAE,gCAAgC;EAC7CI,IAAI,EAAE,QAAQ;EACdD,UAAU,EAAE,YAAY;EACxBE,UAAU,EAAE;IACRC,UAAU,EAAE;MACRF,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDG,GAAG,EAAE;MACDN,IAAI,EAAE,QAAQ;MACdO,OAAO,EAAE,CAAC;MACVC,OAAO,EAAE,IAAI;MACbC,UAAU,EAAE;IAChB,CAAC;IACDW,WAAW,EAAE;MACTpB,IAAI,EAAE;IACV;EACJ,CAAC;EACDW,OAAO,EAAE,CAAC,KAAK,CAAC;EAChBD,QAAQ,EAAE,CAAC,YAAY,EAAE,KAAK;AAClC,CAAC,CAAC;AAEK,IAAMY,aAA8C,GAAA9B,OAAA,CAAA8B,aAAA,GAAG7B,0BAAY,CAACC,qBAAqB,CAAC;EAC7FC,KAAK,EAAE,cAAc;EACrBE,OAAO,EAAE,CAAC;EACVC,cAAc,EAAE,KAAK;EACrBF,WAAW,EAAE,yBAAyB;EACtCG,UAAU,EAAE,YAAY;EACxBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRC,UAAU,EAAE;MACRF,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDC,SAAS,EAAE;MACPJ,IAAI,EAAE;IACV,CAAC;IACDK,QAAQ,EAAE;MACNL,IAAI,EAAE;IACV,CAAC;IACDM,GAAG,EAAE;MACDV,WAAW,EAAE,cAAc;MAC3BI,IAAI,EAAE,SAAS;MACfO,OAAO,EAAE,CAAC;MACVC,OAAO,EAAE,GAAG;MACZC,UAAU,EAAE;IAChB;EACJ,CAAC;EACDC,QAAQ,EAAE,CAAC,WAAW,EAAE,UAAU,EAAE,KAAK,CAAC;EAC1CC,OAAO,EAAE,CAAC,KAAK;AACnB,CAAC,CAAC;AAEK,IAAMY,aAA0D,GAAA/B,OAAA,CAAA+B,aAAA,GAAG9B,0BAAY,CAACC,qBAAqB,CAAC;EACzGC,KAAK,EAAE,cAAc;EACrBE,OAAO,EAAE,CAAC;EACVD,WAAW,EAAE,kDAAkD;EAC/DE,cAAc,EAAE,KAAK;EACrBC,UAAU,EAAE,YAAY;EACxBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRC,UAAU,EAAE;MACRF,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDqB,KAAK,EAAE;MACHxB,IAAI,EAAE,QAAQ;MACdC,UAAU,EAAE;QACRK,GAAG,EAAE;UACDV,WAAW,EAAE,cAAc;UAC3BI,IAAI,EAAE,SAAS;UACfO,OAAO,EAAE,CAAC;UACVC,OAAO,EAAE,GAAG;UACZC,UAAU,EAAE;QAChB;MACJ;IACJ;EACJ,CAAC;EACDC,QAAQ,EAAE,CACN,YAAY,CACf;EACDC,OAAO,EAAE,CAAC,WAAW;AACzB,CAAC,CAAC;;AAEF;AACA;AACA;AACA;AACO,IAAMc,iBAAkD,GAAAjC,OAAA,CAAAiC,iBAAA,GAAGhC,0BAAY,CAACC,qBAAqB,CAAC;EACjGC,KAAK,EAAE,cAAc;EACrBC,WAAW,EAAE,yBAAyB;EACtCC,OAAO,EAAE,CAAC;EACVC,cAAc,EAAE,KAAK;EACrBC,UAAU,EAAE,YAAY;EACxBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRC,UAAU,EAAE;MACRF,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDC,SAAS,EAAE;MACPJ,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDE,QAAQ,EAAE;MACNL,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDG,GAAG,EAAE;MACDV,WAAW,EAAE,cAAc;MAC3BI,IAAI,EAAE,SAAS;MACfO,OAAO,EAAE,CAAC;MACVC,OAAO,EAAE,GAAG;MACZC,UAAU,EAAE;IAChB;EACJ,CAAC;EACDE,OAAO,EAAE,CAAC,WAAW,EAAE,UAAU,EAAE,KAAK,CAAC;EACzCD,QAAQ,EAAE,CAAC,WAAW,EAAE,UAAU;AACtC,CAAC,CAAC;AAEK,IAAMgB,WAAkD,GAAAlC,OAAA,CAAAkC,WAAA,GAAG;EAC9D/B,KAAK,EAAE,cAAc;EACrBE,OAAO,EAAE,CAAC;EACVD,WAAW,EAAE,6CAA6C;EAC1DE,cAAc,EAAE,KAAK;EACrBC,UAAU,EAAE,YAAY;EACxBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRC,UAAU,EAAE;MACRF,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDC,SAAS,EAAE;MACPJ,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDwB,SAAS,EAAE;MACP3B,IAAI,EAAE,QAAQ;MACdC,UAAU,EAAE;QACR2B,IAAI,EAAE;UACF5B,IAAI,EAAE,QAAQ;UACdG,SAAS,EAAE;QACf,CAAC;QACD0B,KAAK,EAAE;UACH7B,IAAI,EAAE,QAAQ;UACdO,OAAO,EAAE,CAAC;UACVC,OAAO,EAAE,EAAE;UACXC,UAAU,EAAE;QAChB;MACJ,CAAC;MACDC,QAAQ,EAAE,CAAC,MAAM,EAAE,OAAO,CAAC;MAC3BoB,oBAAoB,EAAE;IAC1B;EACJ,CAAC;EACDpB,QAAQ,EAAE,CAAC,WAAW,CAAC;EACvBC,OAAO,EAAE;AACb,CAAC;AAEM,IAAMoB,eAA0D,GAAAvC,OAAA,CAAAuC,eAAA,GAAG;EACtEpC,KAAK,EAAE,mBAAmB;EAC1BE,OAAO,EAAE,CAAC;EACVC,cAAc,EAAE,KAAK;EACrBF,WAAW,EAAE,6CAA6C;EAC1DG,UAAU,EAAE,YAAY;EACxBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRC,UAAU,EAAE;MACRF,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDwB,SAAS,EAAE;MACP3B,IAAI,EAAE,QAAQ;MACdC,UAAU,EAAE;QACR2B,IAAI,EAAE;UACF5B,IAAI,EAAE;QACV,CAAC;QACDgC,MAAM,EAAE;UACJhC,IAAI,EAAE,QAAQ;UACdC,UAAU,EAAE;YACRgC,IAAI,EAAE;cACFjC,IAAI,EAAE;YACV,CAAC;YACDkC,KAAK,EAAE;cACHlC,IAAI,EAAE;YACV;UACJ;QACJ;MACJ,CAAC;MACDU,QAAQ,EAAE,CAAC,MAAM;IACrB;EACJ,CAAC;EACDC,OAAO,EAAE,EAAE;EACXD,QAAQ,EAAE,CAAC,WAAW;AAC1B,CAAC;AAEM,IAAMyB,YAAoD,GAAA3C,OAAA,CAAA2C,YAAA,GAAG1C,0BAAY,CAACC,qBAAqB,CAAC;EACnGC,KAAK,EAAE,cAAc;EACrBE,OAAO,EAAE,CAAC;EACVD,WAAW,EAAE,0BAA0B;EACvCE,cAAc,EAAE,KAAK;EACrBC,UAAU,EAAE,WAAW;EACvBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRG,SAAS,EAAE;MACPJ,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDE,QAAQ,EAAE;MACNL,IAAI,EAAE;IACV;EACJ,CAAC;EACDU,QAAQ,EAAE,CAAC,UAAU;AACzB,CAAC,CAAC;AAEK,IAAM0B,aAAsD,GAAA5C,OAAA,CAAA4C,aAAA,GAAG3C,0BAAY,CAACC,qBAAqB,CAAC;EACrGE,WAAW,EAAE,8BAA8B;EAC3CC,OAAO,EAAE,CAAC;EACVC,cAAc,EAAE,KAAK;EACrBC,UAAU,EAAE,YAAY;EACxBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRC,UAAU,EAAE;MACRF,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDC,SAAS,EAAE;MACPJ,IAAI,EAAE;IACV;EACJ,CAAC;EACDU,QAAQ,EAAE,CAAC,WAAW,EAAE,YAAY,CAAC;EACrCC,OAAO,EAAE;AACb,CAAC,CAAC;AAGK,IAAM0B,QAA4C,GAAA7C,OAAA,CAAA6C,QAAA,GAAG5C,0BAAY,CAACC,qBAAqB,CAAC;EAC3FC,KAAK,EAAE,cAAc;EACrBE,OAAO,EAAE,CAAC;EACVD,WAAW,EAAE,wCAAwC;EACrDE,cAAc,EAAE,KAAK;EACrBC,UAAU,EAAE,YAAY;EACxBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRC,UAAU,EAAE;MACRF,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDmC,OAAO,EAAE;MACLtC,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDC,SAAS,EAAE;MACPJ,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDE,QAAQ,EAAE;MACNL,IAAI,EAAE;IACV,CAAC;IACDM,GAAG,EAAE;MACDV,WAAW,EAAE,cAAc;MAC3BI,IAAI,EAAE,SAAS;MACfO,OAAO,EAAE;IACb;EACJ,CAAC;EACDG,QAAQ,EAAE,CAAC,WAAW,EAAE,UAAU,CAAC;EACnCC,OAAO,EAAE,CAAC,WAAW,EAAE,SAAS;AACpC,CAAC,CAAC;AAEK,IAAM4B,cAAwD,GAAA/C,OAAA,CAAA+C,cAAA,GAAG;EACpE5C,KAAK,EAAE,iBAAiB;EACxBE,OAAO,EAAE,CAAC;EACVD,WAAW,EAAE,yBAAyB;EACtCG,UAAU,EAAE,YAAY;EACxBC,IAAI,EAAE,QAAQ;EACdF,cAAc,EAAE,KAAK;EACrBG,UAAU,EAAE;IACRC,UAAU,EAAE;MACRF,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDC,SAAS,EAAE;MACPJ,IAAI,EAAE;IACV,CAAC;IACDwC,MAAM,EAAE;MACJxC,IAAI,EAAE;IACV;EACJ,CAAC;EACDW,OAAO,EAAE,EAAE;EACXD,QAAQ,EAAE,CAAC,WAAW,EAAE,QAAQ,CAAC;EACjC+B,SAAS,EAAE,CAAC,QAAQ;AACxB,CAAC;AAEM,IAAMC,oBAAoE,GAAAlD,OAAA,CAAAkD,oBAAA,GAAG;EAChF/C,KAAK,EAAE,iBAAiB;EACxBE,OAAO,EAAE,CAAC;EACVC,cAAc,EAAE,KAAK;EACrBF,WAAW,EAAE,yBAAyB;EACtCG,UAAU,EAAE,YAAY;EACxBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRC,UAAU,EAAE;MACRF,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDC,SAAS,EAAE;MACPJ,IAAI,EAAE;IACV,CAAC;IACDwC,MAAM,EAAE;MACJxC,IAAI,EAAE,QAAQ;MACdC,UAAU,EAAE;QACR2B,IAAI,EAAE;UACF5B,IAAI,EAAE;QACV,CAAC;QACD2C,OAAO,EAAE;UACL3C,IAAI,EAAE;QACV;MACJ;IACJ;EACJ,CAAC;EACDW,OAAO,EAAE,EAAE;EACXD,QAAQ,EAAE,CAAC,WAAW,EAAE,QAAQ,CAAC;EACjC+B,SAAS,EAAE,CAAC,QAAQ;AACxB,CAAC;AAEM,IAAMG,kBAAgE,GAAApD,OAAA,CAAAoD,kBAAA,GAAG;EAC5EjD,KAAK,EAAE,iBAAiB;EACxBE,OAAO,EAAE,CAAC;EACVC,cAAc,EAAE,KAAK;EACrBF,WAAW,EAAE,yBAAyB;EACtCG,UAAU,EAAE,YAAY;EACxBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRC,UAAU,EAAE;MACRF,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDC,SAAS,EAAE;MACPJ,IAAI,EAAE;IACV,CAAC;IACD6C,kBAAkB,EAAE;MAChB7C,IAAI,EAAE;IACV,CAAC;IACD8C,UAAU,EAAE;MACR9C,IAAI,EAAE,QAAQ;MACdC,UAAU,EAAE;QACR8C,EAAE,EAAE;UACA/C,IAAI,EAAE;QACV;MACJ;IACJ,CAAC;IACDgD,UAAU,EAAE;MACRhD,IAAI,EAAE,QAAQ;MACdC,UAAU,EAAE;QACRgD,QAAQ,EAAE;UACNjD,IAAI,EAAE,QAAQ;UACdC,UAAU,EAAE;YACR8C,EAAE,EAAE;cACA/C,IAAI,EAAE;YACV;UACJ;QACJ;MACJ;IACJ,CAAC;IACDkD,YAAY,EAAE;MACVlD,IAAI,EAAE,QAAQ;MACdC,UAAU,EAAE;QACRgD,QAAQ,EAAE;UACNjD,IAAI,EAAE,QAAQ;UACdC,UAAU,EAAE;YACR8C,EAAE,EAAE;cACA/C,IAAI,EAAE;YACV;UACJ;QACJ;MACJ;IACJ;EAEJ,CAAC;EACDW,OAAO,EAAE,EAAE;EACXD,QAAQ,EAAE,CAAC,WAAW,EAAE,YAAY,CAAC;EACrC+B,SAAS,EAAE,CACP,oBAAoB,EACpB,YAAY,EACZ,wBAAwB,EACxB,0BAA0B;AAElC,CAAC;AAEM,IAAMU,gBAAqF,GAAA3D,OAAA,CAAA2D,gBAAA,GAAG;EACjGxD,KAAK,EAAE,OAAO;EACdE,OAAO,EAAE,CAAC;EACVD,WAAW,EAAE,yDAAyD;EACtEG,UAAU,EAAE,YAAY;EACxBC,IAAI,EAAE,QAAQ;EACdF,cAAc,EAAE,KAAK;EACrBG,UAAU,EAAE;IACRC,UAAU,EAAE;MACRF,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDiD,OAAO,EAAE;MACLpD,IAAI,EAAE,QAAQ;MACdC,UAAU,EAAE;QACRoD,MAAM,EAAE;UAAErD,IAAI,EAAE;QAAS;MAC7B;IACJ;EACJ,CAAC;EACDU,QAAQ,EAAE,CACN,YAAY,CACf;EACDC,OAAO,EAAE,CAAC,mBAAmB;AACjC,CAAC;AAEM,IAAM2C,aAAsD,GAAA9D,OAAA,CAAA8D,aAAA,GAAG7D,0BAAY,CAACC,qBAAqB,CAAC;EACrGC,KAAK,EAAE,gBAAgB;EACvBE,OAAO,EAAE,CAAC;EACVD,WAAW,EAAE,iCAAiC;EAC9CG,UAAU,EAAE,YAAY;EACxBC,IAAI,EAAE,QAAQ;EACdF,cAAc,EAAE,KAAK;EACrBG,UAAU,EAAE;IACRC,UAAU,EAAE;MACRF,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDoD,eAAe,EAAE;MACbvD,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDG,GAAG,EAAE;MACDN,IAAI,EAAE,SAAS;MACfO,OAAO,EAAE,CAAC;MACVC,OAAO,EAAE,GAAG;MACZC,UAAU,EAAE;IAChB;EACJ,CAAC;EACDC,QAAQ,EAAE,CACN,YAAY,CACf;EACDC,OAAO,EAAE,CACL,CAAC,KAAK,EAAE,iBAAiB,CAAC;AAElC,CAAC,CAAC;AAEK,IAAM6C,qBAAsE,GAAAhE,OAAA,CAAAgE,qBAAA,GAAG;EAClF7D,KAAK,EAAE,gBAAgB;EACvBE,OAAO,EAAE,CAAC;EACVD,WAAW,EAAE,iCAAiC;EAC9CG,UAAU,EAAE,YAAY;EACxBD,cAAc,EAAE,KAAK;EACrBE,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRC,UAAU,EAAE;MACRF,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDoD,eAAe,EAAE;MACbvD,IAAI,EAAE;IACV,CAAC;IACDM,GAAG,EAAE;MACDN,IAAI,EAAE;IACV;EACJ,CAAC;EACDW,OAAO,EAAE,CACL,CAAC,EAAE,EAAE,iBAAiB,CAAC;AAE/B,CAAoD;AAE7C,IAAM8C,KAAwB,GAAAjE,OAAA,CAAAiE,KAAA,GAAG;EACpC9D,KAAK,EAAE,cAAc;EACrBE,OAAO,EAAE,CAAC;EACVG,IAAI,EAAE,QAAQ;EACdD,UAAU,EAAE,IAAI;EAChBE,UAAU,EAAE;IACRyD,EAAE,EAAE;MACA1D,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf;EACJ,CAAC;EACDO,QAAQ,EAAE,CAAC,IAAI;AACnB,CAAC;AAEM,IAAMiD,SAA8C,GAAAnE,OAAA,CAAAmE,SAAA,GAAGlE,0BAAY,CAACC,qBAAqB,CAAC;EAC7FC,KAAK,EAAE,aAAa;EACpBE,OAAO,EAAE,CAAC;EACVC,cAAc,EAAE,KAAK;EACrBF,WAAW,EAAE,sCAAsC;EACnDG,UAAU,EAAE,MAAM;EAClBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACR2B,IAAI,EAAE;MACF5B,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDyD,MAAM,EAAE;MACJ5D,IAAI,EAAE,OAAO;MACb6D,QAAQ,EAAE,CAAC;MACXC,WAAW,EAAE,IAAI;MACjBC,KAAK,EAAE;QACH/D,IAAI,EAAE,QAAQ;QACdC,UAAU,EAAE;UACR2B,IAAI,EAAE;YACF5B,IAAI,EAAE;UACV,CAAC;UACDgE,MAAM,EAAE;YACJhE,IAAI,EAAE;UACV;QACJ;MACJ;IACJ;EACJ,CAAC;EACDU,QAAQ,EAAE,CACN,MAAM;AAEd,CAAC,CAAC;AAEK,IAAMuD,eAA0D,GAAAzE,OAAA,CAAAyE,eAAA,GAAGxE,0BAAY,CAACC,qBAAqB,CAAC;EACzGC,KAAK,EAAE,aAAa;EACpBE,OAAO,EAAE,CAAC;EACVD,WAAW,EAAE,4CAA4C;EACzDE,cAAc,EAAE,KAAK;EACrBC,UAAU,EAAE,MAAM;EAClBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACR2B,IAAI,EAAE;MACF5B,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDyD,MAAM,EAAE;MACJ5D,IAAI,EAAE,OAAO;MACb6D,QAAQ,EAAE,CAAC;MACXC,WAAW,EAAE,IAAI;MACjBC,KAAK,EAAE;QACH/D,IAAI,EAAE;MACV;IACJ;EACJ,CAAC;EACDU,QAAQ,EAAE,CACN,MAAM;AAEd,CAAC,CAAC;AAEK,IAAMwD,mBAAmB,GAAA1E,OAAA,CAAA0E,mBAAA,GAAGzE,0BAAY,CAACC,qBAAqB,CAAC;EAClEC,KAAK,EAAE,2BAA2B;EAClCE,OAAO,EAAE,CAAC;EACVD,WAAW,EAAE,oDAAoD;EACjEE,cAAc,EAAE,KAAK;EACrBC,UAAU,EAAE,YAAY;EACxBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRC,UAAU,EAAE;MACRF,IAAI,EAAE,QAAQ;MACdmE,SAAS,EAAE,CAAC;MACZhE,SAAS,EAAE;IACf,CAAC;IACDC,SAAS,EAAE;MACPJ,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDE,QAAQ,EAAE;MACNL,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDG,GAAG,EAAE;MACDN,IAAI,EAAE,SAAS;MACfO,OAAO,EAAE,CAAC;MACVC,OAAO,EAAE,GAAG;MACZC,UAAU,EAAE;IAChB;EACJ,CAAC;EACDC,QAAQ,EAAE,CAAC,YAAY,EAAE,WAAW,EAAE,UAAU;AACpD,CAAU,CAAC;AACX,IAAM0D,uBAAuB,GAAG,IAAAvD,6BAAmB,EAACqD,mBAAmB,CAAC;AAEjE,IAAMG,YAA+C,GAAA7E,OAAA,CAAA6E,YAAA,GAAGH,mBAAmB;AAE3E,IAAMI,4BAA4B,GAAA9E,OAAA,CAAA8E,4BAAA,GAAG7E,0BAAY,CAACC,qBAAqB,CAAC;EAC3EC,KAAK,EAAE,cAAc;EACrBE,OAAO,EAAE,CAAC;EACVC,cAAc,EAAE,KAAK;EACrBF,WAAW,EAAE,yBAAyB;EACtCG,UAAU,EAAE,YAAY;EACxBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRC,UAAU,EAAE;MACRF,IAAI,EAAE,QAAQ;MACdmE,SAAS,EAAE,CAAC;MACZhE,SAAS,EAAE;IACf,CAAC;IACDG,GAAG,EAAE;MACDV,WAAW,EAAE,cAAc;MAC3BI,IAAI,EAAE,SAAS;MACfO,OAAO,EAAE,CAAC;MACVC,OAAO,EAAE,GAAG;MACZC,UAAU,EAAE;IAChB;EACJ,CAAC;EACDC,QAAQ,EAAE,CAAC,KAAK,EAAE,YAAY;AAClC,CAAU,CAAC;AACX,IAAM6D,0BAA0B,GAAG,IAAA1D,6BAAmB,EAACyD,4BAA4B,CAAC;AAE7E,IAAME,qBAAyD,GAAAhF,OAAA,CAAAgF,qBAAA,GAAGF,4BAA4B;AAE9F,IAAMG,qBAAyD,GAAAjF,OAAA,CAAAiF,qBAAA,GAAGhF,0BAAY,CAACC,qBAAqB,CAAC;EACxGC,KAAK,EAAE,cAAc;EACrBE,OAAO,EAAE,CAAC;EACVC,cAAc,EAAE,KAAK;EACrBC,UAAU,EAAE,YAAY;EACxBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRC,UAAU,EAAE;MACRF,IAAI,EAAE,QAAQ;MACdmE,SAAS,EAAE,CAAC;MACZhE,SAAS,EAAE;IACf,CAAC;IACDG,GAAG,EAAE;MACDC,OAAO,EAAE,CAAC;MACVP,IAAI,EAAE,SAAS;MACfJ,WAAW,EAAE,cAAc;MAC3BY,OAAO,EAAE,GAAG;MACZC,UAAU,EAAE;IAChB;EACJ,CAAC;EACDb,WAAW,EAAE,yBAAyB;EACtCc,QAAQ,EAAE,CAAC,KAAK,EAAE,YAAY;AAClC,CAAC,CAAC;AAEK,IAAMgE,QAA4C,GAAAlF,OAAA,CAAAkF,QAAA,GAAGjF,0BAAY,CAACC,qBAAqB,CAAC;EAC3FC,KAAK,EAAE,8BAA8B;EACrCE,OAAO,EAAE,CAAC;EACVC,cAAc,EAAE,KAAK;EACrBC,UAAU,EAAE,MAAM;EAClBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACR2B,IAAI,EAAE;MACF5B,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDwE,UAAU,EAAE;MACRC,GAAG,EAAE,OAAO;MACZ5E,IAAI,EAAE;IACV;EACJ,CAAC;EACDU,QAAQ,EAAE,CACN,MAAM;AAEd,CAAC,CAAC;AAEK,IAAMmE,qBAA8D,GAAArF,OAAA,CAAAqF,qBAAA,GAAG;EAC1ElF,KAAK,EAAE,cAAc;EACrBC,WAAW,EAAE,yBAAyB;EACtCC,OAAO,EAAE,CAAC;EACVC,cAAc,EAAE,KAAK;EACrBC,UAAU,EAAE;IACR+E,GAAG,EAAE,IAAI;IACTC,MAAM,EAAE,CACJ,WAAW,EACX,UAAU,CACb;IACDC,SAAS,EAAE;EACf,CAAC;EACDhF,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRyD,EAAE,EAAE;MACA1D,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDC,SAAS,EAAE;MACPJ,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDE,QAAQ,EAAE;MACNL,IAAI,EAAE;IACV,CAAC;IACDiF,IAAI,EAAE;MACFjF,IAAI,EAAE,QAAQ;MACdC,UAAU,EAAE;QACRK,GAAG,EAAE;UACDV,WAAW,EAAE,cAAc;UAC3BI,IAAI,EAAE,SAAS;UACfO,OAAO,EAAE,CAAC;UACVC,OAAO,EAAE;QACb;MACJ,CAAC;MACDE,QAAQ,EAAE,CAAC,KAAK;IACpB;EACJ,CAAC;EACDA,QAAQ,EAAE,CACN,IAAI,EACJ,WAAW,EACX,UAAU,EACV,MAAM,CACT;EACDC,OAAO,EAAE,CAAC,WAAW;AACzB,CAAC;AAEM,IAAMuE,kCAAkC,GAAA1F,OAAA,CAAA0F,kCAAA,GAAGzF,0BAAY,CAACC,qBAAqB,CAAC;EACjFC,KAAK,EAAE,cAAc;EACrBC,WAAW,EAAE,yBAAyB;EACtCC,OAAO,EAAE,CAAC;EACVC,cAAc,EAAE,KAAK;EACrBC,UAAU,EAAE;IACR+E,GAAG,EAAE,IAAI;IACTC,MAAM,EAAE,CACJ,WAAW,EACX,UAAU,CACb;IACDC,SAAS,EAAE;EACf,CAAC;EACDvC,SAAS,EAAE,EAAE;EACbzC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRyD,EAAE,EAAE;MACA1D,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDC,SAAS,EAAE;MACPJ,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDE,QAAQ,EAAE;MACNL,IAAI,EAAE;IACV,CAAC;IACDiF,IAAI,EAAE;MACFjF,IAAI,EAAE,QAAQ;MACdC,UAAU,EAAE;QACRK,GAAG,EAAE;UACDV,WAAW,EAAE,cAAc;UAC3BI,IAAI,EAAE,SAAS;UACfO,OAAO,EAAE,CAAC;UACVC,OAAO,EAAE;QACb;MACJ,CAAC;MACDE,QAAQ,EAAE,CAAC,KAAK;IACpB,CAAC;IACDyE,aAAa,EAAE;MACXC,KAAK,EAAE,EAAE;MACTC,KAAK,EAAE,EAAE;MACTC,KAAK,EAAE,EAAE;MACTtF,IAAI,EAAE,EAAE;MACRuF,YAAY,EAAE;QACVC,OAAO,EAAE,CAAC,KAAK;MACnB,CAAC;MACDzB,KAAK,EAAE,EAAE;MACTrD,QAAQ,EAAE,EAAE;MACZ+E,IAAI,EAAE;IACV;EACJ,CAAC;EACD/E,QAAQ,EAAE,CACN,IAAI,EACJ,WAAW,EACX,UAAU,EACV,MAAM,CACT;EACDC,OAAO,EAAE,CAAC,WAAW;AACzB,CAAU,CAAC;AAEX,IAAM+E,gCAAgC,GAAG,IAAA7E,6BAAmB,EAACqE,kCAAkC,CAAC;AAGzF,IAAMS,cAAwD,GAAAnG,OAAA,CAAAmG,cAAA,GAAGlG,0BAAY,CAACC,qBAAqB,CAAC;EACvGC,KAAK,EAAE,8BAA8B;EACrCE,OAAO,EAAE,CAAC;EACVC,cAAc,EAAE,KAAK;EACrBC,UAAU,EAAE,MAAM;EAClBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACR2B,IAAI,EAAE;MACF5B,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDyF,GAAG,EAAE;MACD5F,IAAI,EAAE,QAAQ;MACdC,UAAU,EAAE;QACR0E,UAAU,EAAE;UACRC,GAAG,EAAE,OAAO;UACZ5E,IAAI,EAAE;QACV;MACJ;IACJ;EACJ,CAAC;EACDU,QAAQ,EAAE,CACN,MAAM;AAEd,CAAC,CAAC;;AAEF;AACA;AACA;AACO,SAASmF,aAAaA,CAAA,EAA4C;EACrE,IAAMC,GAA4C,GAAG;IACjDnG,KAAK,EAAE,gBAAgB,GAAGoG,sBAAa,CAACC,YAAY,CAAC,CAAC,CAAC;IAAE;IACzDnG,OAAO,EAAE,CAAC;IACVE,UAAU,EAAE,IAAI;IAChBC,IAAI,EAAE,QAAQ;IACdF,cAAc,EAAE,KAAK;IACrBG,UAAU,EAAE;MACRyD,EAAE,EAAE;QACA1D,IAAI,EAAE,QAAQ;QACdG,SAAS,EAAE;MACf,CAAC;MACD8F,IAAI,EAAE;QACFjG,IAAI,EAAE,QAAQ;QACdG,SAAS,EAAE;MACf,CAAC;MACD+F,IAAI,EAAE;QACFlG,IAAI,EAAE,QAAQ;QACdO,OAAO,EAAE,CAAC;QACVC,OAAO,EAAE,KAAK;QACdC,UAAU,EAAE;MAChB,CAAC;MACD0F,IAAI,EAAE;QACFnG,IAAI,EAAE,QAAQ;QACdC,UAAU,EAAE;UACRmG,KAAK,EAAE;YACHpG,IAAI,EAAE,QAAQ;YACdG,SAAS,EAAE;UACf,CAAC;UACDkG,KAAK,EAAE;YACHrG,IAAI,EAAE,QAAQ;YACdG,SAAS,EAAE;UACf;QACJ;MACJ,CAAC;MACDmG,IAAI,EAAE;QACFtG,IAAI,EAAE,OAAO;QACb+D,KAAK,EAAE;UACH/D,IAAI,EAAE,QAAQ;UACdC,UAAU,EAAE;YACRmG,KAAK,EAAE;cACHpG,IAAI,EAAE;YACV,CAAC;YACDqG,KAAK,EAAE;cACHrG,IAAI,EAAE;YACV;UACJ;QACJ;MACJ;IACJ,CAAC;IACDU,QAAQ,EAAE,CACN,IAAI,CACP;IACDC,OAAO,EAAE,CACL,MAAM,EACN,MAAM,EACN,YAAY;IACZ;IACA,CACI,MAAM,EACN,MAAM,CACT,CACJ;IACD4F,QAAQ,EAAE;MACNC,MAAM,EAAE,CAAC;MACTC,IAAI,EAAE;IACV;EACJ,CAAC;EACD,OAAOX,GAAG;AACd;AAEO,IAAMY,KAAsC,GAAAlH,OAAA,CAAAkH,KAAA,GAAGjH,0BAAY,CAACC,qBAAqB,CAAC;EACrFC,KAAK,EAAE,cAAc;EACrBE,OAAO,EAAE,CAAC;EACVD,WAAW,EAAE,mCAAmC;EAChDG,UAAU,EAAE,IAAI;EAChBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRyD,EAAE,EAAE;MACA1D,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDwG,CAAC,EAAE;MACC3G,IAAI,EAAE;IACV,CAAC;IACD4G,CAAC,EAAE;MACC5G,IAAI,EAAE;IACV;EACJ,CAAC;EACDU,QAAQ,EAAE,CAAC,GAAG,EAAE,GAAG;AACvB,CAAC,CAAC;AAEK,IAAMmG,YAAqD,GAAArH,OAAA,CAAAqH,YAAA,GAAGpH,0BAAY,CAACC,qBAAqB,CAAC;EACpGC,KAAK,EAAE,cAAc;EACrBC,WAAW,EAAE,yBAAyB;EACtCC,OAAO,EAAE,CAAC;EACVC,cAAc,EAAE,KAAK;EACrBC,UAAU,EAAE,YAAY;EACxBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRC,UAAU,EAAE;MACRF,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDG,GAAG,EAAE;MACDN,IAAI,EAAE;IACV,CAAC;IACDoB,WAAW,EAAE;MACTpB,IAAI,EAAE;IACV;EACJ,CAAC;EACDW,OAAO,EAAE,EAAE;EACXD,QAAQ,EAAE,CAAC,YAAY,EAAE,KAAK;AAClC,CAAC,CAAC;AAEK,IAAMoG,kBAAyE,GAAAtH,OAAA,CAAAsH,kBAAA,GAAG;EACrFnH,KAAK,EAAE,cAAc;EACrBC,WAAW,EAAE,yBAAyB;EACtCC,OAAO,EAAE,CAAC;EACVC,cAAc,EAAE,KAAK;EACrBC,UAAU,EAAE,YAAY;EACxBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRC,UAAU,EAAE;MACRF,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACD4G,MAAM,EAAE;MACJ/G,IAAI,EAAE;IACV;EACJ,CAAC;EACDW,OAAO,EAAE,EAAE;EACXD,QAAQ,EAAE,CAAC,YAAY,EAAE,QAAQ;AACrC,CAAiC;;AAGjC;AACA;AACA;AACA;AACO,IAAMsG,kBAAgE,GAAAxH,OAAA,CAAAwH,kBAAA,GAAGvH,0BAAY,CAACC,qBAAqB,CAAC;EAC/GG,OAAO,EAAE,CAAC;EACVG,IAAI,EAAE,QAAQ;EACdD,UAAU,EAAE,IAAI;EAChBE,UAAU,EAAE;IACRyD,EAAE,EAAE;MACA1D,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDyB,IAAI,EAAE;MACF5B,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDG,GAAG,EAAE;MACDN,IAAI,EAAE;IACV,CAAC;IACDiH,SAAS,EAAE;MACPjH,IAAI,EAAE,QAAQ;MACdO,OAAO,EAAE,CAAC;MACVC,OAAO,EAAE,iBAAiB;MAC1BC,UAAU,EAAE;IAEhB,CAAC;IACDyG,SAAS,EAAE;MACPlH,IAAI,EAAE;IACV;EACJ,CAAC;EACDW,OAAO,EAAE,CAAC,WAAW,CAAC;EACtBD,QAAQ,EAAE,CAAC,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,WAAW;AAC/C,CAAC,CAAC;AAEK,IAAMyG,wBAA4E,GAAA3H,OAAA,CAAA2H,wBAAA,GAAG1H,0BAAY,CAACC,qBAAqB,CAAC;EAC3HG,OAAO,EAAE,CAAC;EACVG,IAAI,EAAE,QAAQ;EACdD,UAAU,EAAE,IAAI;EAChBE,UAAU,EAAE;IACRyD,EAAE,EAAE;MACA1D,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDyB,IAAI,EAAE;MACF5B,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDG,GAAG,EAAE;MACDN,IAAI,EAAE;IACV,CAAC;IACDiH,SAAS,EAAE;MACPjH,IAAI,EAAE,QAAQ;MACdO,OAAO,EAAE,CAAC;MACVC,OAAO,EAAE,iBAAiB;MAC1BC,UAAU,EAAE;IAEhB,CAAC;IACDyG,SAAS,EAAE;MACPlH,IAAI,EAAE;IACV,CAAC;IACDoD,OAAO,EAAE;MACLpD,IAAI,EAAE,QAAQ;MACdC,UAAU,EAAE;QACRoD,MAAM,EAAE;UACJrD,IAAI,EAAE;QACV,CAAC;QACDoH,KAAK,EAAE;UACHpH,IAAI,EAAE;QACV,CAAC;QACDqH,IAAI,EAAE;UACFrH,IAAI,EAAE;QACV,CAAC;QACDsH,OAAO,EAAE;UACLtH,IAAI,EAAE;QACV,CAAC;QACDuH,GAAG,EAAE;UACDvH,IAAI,EAAE,QAAQ;UACdC,UAAU,EAAE;YACRuH,GAAG,EAAE;cACDxH,IAAI,EAAE;YACV,CAAC;YACDyH,GAAG,EAAE;cACDzH,IAAI,EAAE;YACV;UACJ;QACJ;MACJ;IACJ;EACJ,CAAC;EACDW,OAAO,EAAE,CAAC,WAAW,CAAC;EACtBD,QAAQ,EAAE,CAAC,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,WAAW;AAC/C,CAAC,CAAC;;AAGF;AACA;AACA;AACA;AACO,IAAMgH,0BAAwE,GAAAlI,OAAA,CAAAkI,0BAAA,GAAGjI,0BAAY,CAACC,qBAAqB,CAAC;EACvHG,OAAO,EAAE,CAAC;EACVG,IAAI,EAAE,QAAQ;EACdD,UAAU,EAAE,IAAI;EAChBE,UAAU,EAAE;IACRyD,EAAE,EAAE;MACA1D,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDyB,IAAI,EAAE;MACF5B,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDG,GAAG,EAAE;MACDN,IAAI,EAAE,QAAQ;MACdO,OAAO,EAAE,CAAC;MACVC,OAAO,EAAE,IAAI;MACbC,UAAU,EAAE;IAChB,CAAC;IACDwG,SAAS,EAAE;MACPjH,IAAI,EAAE,QAAQ;MACdO,OAAO,EAAE,CAAC;MACVC,OAAO,EAAE,iBAAiB;MAC1BC,UAAU,EAAE;IAChB,CAAC;IACDyG,SAAS,EAAE;MACPlH,IAAI,EAAE;IACV;EACJ,CAAC;EACDW,OAAO,EAAE,CAAC,MAAM,EAAE,KAAK,EAAE,WAAW,CAAC;EACrCD,QAAQ,EAAE,CAAC,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,WAAW;AAC/C,CAAC,CAAC;AAEK,IAAMiH,iCAMX,GAAAnI,OAAA,CAAAmI,iCAAA,GAAGlI,0BAAY,CAACC,qBAAqB,CAAC;EACpCG,OAAO,EAAE,CAAC;EACVE,UAAU,EAAE,IAAI;EAChBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRyD,EAAE,EAAE;MACA1D,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDyB,IAAI,EAAE;MACF5B,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDG,GAAG,EAAE;MACDN,IAAI,EAAE,QAAQ;MACdO,OAAO,EAAE,CAAC;MACVC,OAAO,EAAE,IAAI;MACbC,UAAU,EAAE;IAChB,CAAC;IACDmH,SAAS,EAAE;MACP5H,IAAI,EAAE,QAAQ;MACdO,OAAO,EAAE,CAAC;MACVC,OAAO,EAAE,iBAAiB;MAC1BC,UAAU,EAAE;IAChB,CAAC;IACDwG,SAAS,EAAE;MACPjH,IAAI,EAAE,QAAQ;MACdO,OAAO,EAAE,CAAC;MACVC,OAAO,EAAE,iBAAiB;MAC1BC,UAAU,EAAE;IAChB;EACJ,CAAC;EACDE,OAAO,EAAE,CACL,CAAC,MAAM,EAAE,IAAI,CAAC,EACd,CAAC,KAAK,EAAE,IAAI,CAAC,EACb,CAAC,WAAW,EAAE,WAAW,EAAE,IAAI,CAAC,CACnC;EACDD,QAAQ,EAAE,CAAC,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,WAAW;AAC/C,CAAC,CAAC;AAEK,IAAMmH,0BAAiF,GAAArI,OAAA,CAAAqI,0BAAA,GAAGpI,0BAAY,CAACC,qBAAqB,CAAC;EAChIG,OAAO,EAAE,CAAC;EACVE,UAAU,EAAE,IAAI;EAChBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRyD,EAAE,EAAE;MACA1D,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDyB,IAAI,EAAE;MACF5B,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACD2H,GAAG,EAAE;MACD9H,IAAI,EAAE,QAAQ;MACdC,UAAU,EAAE;QACR2B,IAAI,EAAE;UACF5B,IAAI,EAAE,QAAQ;UACdG,SAAS,EAAE;QACf,CAAC;QACD4H,OAAO,EAAE;UACL/H,IAAI,EAAE,QAAQ;UACdC,UAAU,EAAE;YACR+H,QAAQ,EAAE;cACNhI,IAAI,EAAE,QAAQ;cACdG,SAAS,EAAE;YACf,CAAC;YACD8H,YAAY,EAAE;cACVjI,IAAI,EAAE,OAAO;cACb+D,KAAK,EAAE;gBACH/D,IAAI,EAAE,QAAQ;gBACdC,UAAU,EAAE;kBACR2B,IAAI,EAAE;oBACF5B,IAAI,EAAE,QAAQ;oBACdG,SAAS,EAAE;kBACf;gBACJ;cACJ;YACJ;UACJ;QACJ;MACJ;IACJ;EACJ,CAAC;EACDO,QAAQ,EAAE,CACN,IAAI,CACP;EACDC,OAAO,EAAE,CACL,MAAM,EACN,UAAU,EACV,sBAAsB;AAE9B,CAAC,CAAC;AAEK,IAAMuH,kBAA4E,GAAA1I,OAAA,CAAA0I,kBAAA,GAAGzI,0BAAY,CAACC,qBAAqB,CAAC;EAC3HG,OAAO,EAAE,CAAC;EACVD,WAAW,EAAE,+CAA+C;EAC5DG,UAAU,EAAE,IAAI;EAChBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRyD,EAAE,EAAE;MACA1D,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDyB,IAAI,EAAE;MACF5B,IAAI,EAAE;IACV,CAAC;IACDM,GAAG,EAAE;MACDV,WAAW,EAAE,cAAc;MAC3BI,IAAI,EAAE,SAAS;MACfO,OAAO,EAAE,CAAC;MACVC,OAAO,EAAE,GAAG;MACZC,UAAU,EAAE;IAChB;EACJ,CAAC;EACDC,QAAQ,EAAE,CAAC,IAAI,EAAE,MAAM,EAAE,KAAK,CAAC;EAC/BC,OAAO,EAAE,CACL,CAAC,KAAK,EAAE,IAAI,CAAC;AAErB,CAAC,CAAC;AAGK,SAASwH,oBAAoBA,CAChCC,MAA+B,EACR;EACvB,IAAMtC,GAAG,GAAG,IAAAuC,gBAAS,EAACD,MAAM,CAAC;EAC7BtC,GAAG,CAAChG,cAAc,GAAG,IAAI;EACzB,OAAOgG,GAAG;AACd","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/test-utils/test-util.js b/dist/cjs/plugins/test-utils/test-util.js deleted file mode 100644 index aa3e1309dd9..00000000000 --- a/dist/cjs/plugins/test-utils/test-util.js +++ /dev/null @@ -1,49 +0,0 @@ -"use strict"; - -var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault"); -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.ensureCollectionsHaveEqualState = ensureCollectionsHaveEqualState; -exports.ensureReplicationHasNoErrors = ensureReplicationHasNoErrors; -exports.testMultipleTimes = testMultipleTimes; -var _assert = _interopRequireDefault(require("assert")); -var _index = require("../utils/index.js"); -function testMultipleTimes(times, title, test) { - new Array(times).fill(0).forEach(() => { - it(title, test); - }); -} -async function ensureCollectionsHaveEqualState(c1, c2) { - await (0, _index.requestIdlePromise)(); - var getJson = async collection => { - var docs = await collection.find().exec(); - return docs.map(d => d.toJSON()); - }; - var json1 = await getJson(c1); - var json2 = await getJson(c2); - try { - _assert.default.deepStrictEqual(json1, json2); - } catch (err) { - console.error('ensureCollectionsHaveEqualState() states not equal:'); - console.dir({ - [c1.name]: json1, - [c2.name]: json2 - }); - throw err; - } -} -function ensureReplicationHasNoErrors(replicationState) { - /** - * We do not have to unsubscribe because the observable will cancel anyway. - */ - replicationState.error$.subscribe(err => { - console.error('ensureReplicationHasNoErrors() has error:'); - console.log(err); - if (err?.parameters?.errors) { - throw err.parameters.errors[0]; - } - throw err; - }); -} -//# sourceMappingURL=test-util.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/test-utils/test-util.js.map b/dist/cjs/plugins/test-utils/test-util.js.map deleted file mode 100644 index 8f143eac285..00000000000 --- a/dist/cjs/plugins/test-utils/test-util.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"test-util.js","names":["_assert","_interopRequireDefault","require","_index","testMultipleTimes","times","title","test","Array","fill","forEach","it","ensureCollectionsHaveEqualState","c1","c2","requestIdlePromise","getJson","collection","docs","find","exec","map","d","toJSON","json1","json2","assert","deepStrictEqual","err","console","error","dir","name","ensureReplicationHasNoErrors","replicationState","error$","subscribe","log","parameters","errors"],"sources":["../../../../src/plugins/test-utils/test-util.ts"],"sourcesContent":["import type { Func } from 'mocha';\nimport assert from 'assert';\nimport type { RxCollection } from '../../types';\nimport { requestIdlePromise } from '../utils/index.ts';\nimport type { RxReplicationState } from '../replication/index.ts';\n\nexport function testMultipleTimes(times: number, title: string, test: Func) {\n new Array(times).fill(0).forEach(() => {\n it(title, test);\n });\n}\n\nexport async function ensureCollectionsHaveEqualState(\n c1: RxCollection,\n c2: RxCollection\n) {\n await requestIdlePromise();\n const getJson = async (collection: RxCollection) => {\n const docs = await collection.find().exec();\n return docs.map(d => d.toJSON());\n };\n const json1 = await getJson(c1);\n const json2 = await getJson(c2);\n try {\n assert.deepStrictEqual(\n json1,\n json2\n );\n } catch (err) {\n console.error('ensureCollectionsHaveEqualState() states not equal:');\n console.dir({\n [c1.name]: json1,\n [c2.name]: json2\n });\n throw err;\n }\n}\n\nexport function ensureReplicationHasNoErrors(replicationState: RxReplicationState) {\n /**\n * We do not have to unsubscribe because the observable will cancel anyway.\n */\n replicationState.error$.subscribe(err => {\n console.error('ensureReplicationHasNoErrors() has error:');\n console.log(err);\n if (err?.parameters?.errors) {\n throw err.parameters.errors[0];\n }\n throw err;\n });\n}\n"],"mappings":";;;;;;;;;AACA,IAAAA,OAAA,GAAAC,sBAAA,CAAAC,OAAA;AAEA,IAAAC,MAAA,GAAAD,OAAA;AAGO,SAASE,iBAAiBA,CAACC,KAAa,EAAEC,KAAa,EAAEC,IAAU,EAAE;EACxE,IAAIC,KAAK,CAACH,KAAK,CAAC,CAACI,IAAI,CAAC,CAAC,CAAC,CAACC,OAAO,CAAC,MAAM;IACnCC,EAAE,CAACL,KAAK,EAAEC,IAAI,CAAC;EACnB,CAAC,CAAC;AACN;AAEO,eAAeK,+BAA+BA,CACjDC,EAA2B,EAC3BC,EAA2B,EAC7B;EACE,MAAM,IAAAC,yBAAkB,EAAC,CAAC;EAC1B,IAAMC,OAAO,GAAG,MAAOC,UAAmC,IAAK;IAC3D,IAAMC,IAAI,GAAG,MAAMD,UAAU,CAACE,IAAI,CAAC,CAAC,CAACC,IAAI,CAAC,CAAC;IAC3C,OAAOF,IAAI,CAACG,GAAG,CAACC,CAAC,IAAIA,CAAC,CAACC,MAAM,CAAC,CAAC,CAAC;EACpC,CAAC;EACD,IAAMC,KAAK,GAAG,MAAMR,OAAO,CAACH,EAAE,CAAC;EAC/B,IAAMY,KAAK,GAAG,MAAMT,OAAO,CAACF,EAAE,CAAC;EAC/B,IAAI;IACAY,eAAM,CAACC,eAAe,CAClBH,KAAK,EACLC,KACJ,CAAC;EACL,CAAC,CAAC,OAAOG,GAAG,EAAE;IACVC,OAAO,CAACC,KAAK,CAAC,qDAAqD,CAAC;IACpED,OAAO,CAACE,GAAG,CAAC;MACR,CAAClB,EAAE,CAACmB,IAAI,GAAGR,KAAK;MAChB,CAACV,EAAE,CAACkB,IAAI,GAAGP;IACf,CAAC,CAAC;IACF,MAAMG,GAAG;EACb;AACJ;AAEO,SAASK,4BAA4BA,CAACC,gBAA8C,EAAE;EACzF;AACJ;AACA;EACIA,gBAAgB,CAACC,MAAM,CAACC,SAAS,CAACR,GAAG,IAAI;IACrCC,OAAO,CAACC,KAAK,CAAC,2CAA2C,CAAC;IAC1DD,OAAO,CAACQ,GAAG,CAACT,GAAG,CAAC;IAChB,IAAIA,GAAG,EAAEU,UAAU,EAAEC,MAAM,EAAE;MACzB,MAAMX,GAAG,CAACU,UAAU,CAACC,MAAM,CAAC,CAAC,CAAC;IAClC;IACA,MAAMX,GAAG;EACb,CAAC,CAAC;AACN","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/update/index.js b/dist/cjs/plugins/update/index.js deleted file mode 100644 index 5ec320baf72..00000000000 --- a/dist/cjs/plugins/update/index.js +++ /dev/null @@ -1,45 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.RxDBUpdatePlugin = void 0; -exports.RxQueryUpdate = RxQueryUpdate; -exports.incrementalUpdate = incrementalUpdate; -exports.update = update; -var _rxQueryHelper = require("../../rx-query-helper.js"); -var _mingoUpdater = require("./mingo-updater.js"); -/** - * this plugin allows delta-updates with mongo-like-syntax - * It's using mingo internally - * @link https://github.com/kofrasa/mingo - */ - -function incrementalUpdate(updateObj) { - return this.incrementalModify(docData => { - var newDocData = (0, _mingoUpdater.mingoUpdater)(docData, updateObj); - return newDocData; - }); -} -function update(updateObj) { - var oldDocData = this._data; - var newDocData = (0, _mingoUpdater.mingoUpdater)(oldDocData, updateObj); - return this._saveData(newDocData, oldDocData); -} -async function RxQueryUpdate(updateObj) { - return (0, _rxQueryHelper.runQueryUpdateFunction)(this.asRxQuery, doc => doc.update(updateObj)); -} -var RxDBUpdatePlugin = exports.RxDBUpdatePlugin = { - name: 'update', - rxdb: true, - prototypes: { - RxDocument: proto => { - proto.update = update; - proto.incrementalUpdate = incrementalUpdate; - }, - RxQuery: proto => { - proto.update = RxQueryUpdate; - } - } -}; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/update/index.js.map b/dist/cjs/plugins/update/index.js.map deleted file mode 100644 index 9281ecb06f3..00000000000 --- a/dist/cjs/plugins/update/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["_rxQueryHelper","require","_mingoUpdater","incrementalUpdate","updateObj","incrementalModify","docData","newDocData","mingoUpdater","update","oldDocData","_data","_saveData","RxQueryUpdate","runQueryUpdateFunction","asRxQuery","doc","RxDBUpdatePlugin","exports","name","rxdb","prototypes","RxDocument","proto","RxQuery"],"sources":["../../../../src/plugins/update/index.ts"],"sourcesContent":["/**\n * this plugin allows delta-updates with mongo-like-syntax\n * It's using mingo internally\n * @link https://github.com/kofrasa/mingo\n */\nimport { runQueryUpdateFunction } from '../../rx-query-helper.ts';\nimport type {\n RxDocument,\n RxQuery,\n RxPlugin,\n UpdateQuery\n} from '../../types/index.d.ts';\nimport { mingoUpdater } from './mingo-updater.ts';\n\nexport function incrementalUpdate(\n this: RxDocument,\n updateObj: UpdateQuery\n): Promise> {\n return this.incrementalModify((docData) => {\n const newDocData = mingoUpdater(docData, updateObj);\n return newDocData;\n });\n}\n\nexport function update(\n this: RxDocument,\n updateObj: UpdateQuery\n): Promise> {\n const oldDocData = this._data;\n const newDocData = mingoUpdater(oldDocData, updateObj);\n return this._saveData(newDocData, oldDocData);\n}\n\nexport async function RxQueryUpdate(\n this: RxQuery,\n updateObj: UpdateQuery\n): Promise {\n return runQueryUpdateFunction(\n this.asRxQuery,\n (doc) => doc.update(updateObj),\n );\n}\n\n\nexport const RxDBUpdatePlugin: RxPlugin = {\n name: 'update',\n rxdb: true,\n prototypes: {\n RxDocument: (proto: any) => {\n proto.update = update;\n proto.incrementalUpdate = incrementalUpdate;\n },\n RxQuery: (proto: any) => {\n proto.update = RxQueryUpdate;\n }\n }\n};\n"],"mappings":";;;;;;;;;AAKA,IAAAA,cAAA,GAAAC,OAAA;AAOA,IAAAC,aAAA,GAAAD,OAAA;AAZA;AACA;AACA;AACA;AACA;;AAUO,SAASE,iBAAiBA,CAE7BC,SAAiC,EACH;EAC9B,OAAO,IAAI,CAACC,iBAAiB,CAAEC,OAAO,IAAK;IACvC,IAAMC,UAAU,GAAG,IAAAC,0BAAY,EAAYF,OAAO,EAAEF,SAAS,CAAC;IAC9D,OAAOG,UAAU;EACrB,CAAC,CAAC;AACN;AAEO,SAASE,MAAMA,CAElBL,SAAiC,EACH;EAC9B,IAAMM,UAAU,GAAG,IAAI,CAACC,KAAK;EAC7B,IAAMJ,UAAU,GAAG,IAAAC,0BAAY,EAACE,UAAU,EAAEN,SAAS,CAAC;EACtD,OAAO,IAAI,CAACQ,SAAS,CAACL,UAAU,EAAEG,UAAU,CAAC;AACjD;AAEO,eAAeG,aAAaA,CAE/BT,SAA2B,EACf;EACZ,OAAO,IAAAU,qCAAsB,EACzB,IAAI,CAACC,SAAS,EACbC,GAAG,IAAKA,GAAG,CAACP,MAAM,CAACL,SAAS,CACjC,CAAC;AACL;AAGO,IAAMa,gBAA0B,GAAAC,OAAA,CAAAD,gBAAA,GAAG;EACtCE,IAAI,EAAE,QAAQ;EACdC,IAAI,EAAE,IAAI;EACVC,UAAU,EAAE;IACRC,UAAU,EAAGC,KAAU,IAAK;MACxBA,KAAK,CAACd,MAAM,GAAGA,MAAM;MACrBc,KAAK,CAACpB,iBAAiB,GAAGA,iBAAiB;IAC/C,CAAC;IACDqB,OAAO,EAAGD,KAAU,IAAK;MACrBA,KAAK,CAACd,MAAM,GAAGI,aAAa;IAChC;EACJ;AACJ,CAAC","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/update/mingo-updater.js b/dist/cjs/plugins/update/mingo-updater.js deleted file mode 100644 index edd75f0d7a2..00000000000 --- a/dist/cjs/plugins/update/mingo-updater.js +++ /dev/null @@ -1,27 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.mingoUpdater = mingoUpdater; -var _updater = require("mingo/updater"); -var _index = require("../utils/index.js"); -/** - * Custom build of the mingo updater for smaller build size - */ - -var updater; -function mingoUpdater(d, op) { - if (!updater) { - var updateObject = (0, _updater.createUpdater)({ - cloneMode: "none" - }); - updater = (d, op) => { - var cloned = (0, _index.clone)(d); - updateObject(cloned, op); - return cloned; - }; - } - return updater(d, op); -} -//# sourceMappingURL=mingo-updater.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/update/mingo-updater.js.map b/dist/cjs/plugins/update/mingo-updater.js.map deleted file mode 100644 index 8a439b2ebb0..00000000000 --- a/dist/cjs/plugins/update/mingo-updater.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"mingo-updater.js","names":["_updater","require","_index","updater","mingoUpdater","d","op","updateObject","createUpdater","cloneMode","cloned","clone"],"sources":["../../../../src/plugins/update/mingo-updater.ts"],"sourcesContent":["/**\n * Custom build of the mingo updater for smaller build size\n */\n\nimport {\n createUpdater\n} from \"mingo/updater\";\nimport {\n clone\n} from '../utils/index.ts';\nimport type {\n UpdateQuery\n} from '../../types/index';\n\nlet updater: any;\nexport function mingoUpdater(\n d: T, op: UpdateQuery\n): T {\n if (!updater) {\n const updateObject = createUpdater({ cloneMode: \"none\" });\n updater = (d: T, op: UpdateQuery) => {\n const cloned = clone(d);\n updateObject(cloned as any, op as any);\n return cloned;\n }\n }\n return updater(d, op);\n}\n"],"mappings":";;;;;;AAIA,IAAAA,QAAA,GAAAC,OAAA;AAGA,IAAAC,MAAA,GAAAD,OAAA;AAPA;AACA;AACA;;AAYA,IAAIE,OAAY;AACT,SAASC,YAAYA,CACxBC,CAAI,EAAEC,EAAkB,EACvB;EACD,IAAI,CAACH,OAAO,EAAE;IACV,IAAMI,YAAY,GAAG,IAAAC,sBAAa,EAAC;MAAEC,SAAS,EAAE;IAAO,CAAC,CAAC;IACzDN,OAAO,GAAGA,CAACE,CAAI,EAAEC,EAAkB,KAAK;MACpC,IAAMI,MAAM,GAAG,IAAAC,YAAK,EAACN,CAAC,CAAC;MACvBE,YAAY,CAACG,MAAM,EAASJ,EAAS,CAAC;MACtC,OAAOI,MAAM;IACjB,CAAC;EACL;EACA,OAAOP,OAAO,CAACE,CAAC,EAAEC,EAAE,CAAC;AACzB","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/utils/index.js b/dist/cjs/plugins/utils/index.js deleted file mode 100644 index 8a560f74707..00000000000 --- a/dist/cjs/plugins/utils/index.js +++ /dev/null @@ -1,204 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -var _utilsArray = require("./utils-array.js"); -Object.keys(_utilsArray).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _utilsArray[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _utilsArray[key]; - } - }); -}); -var _utilsBlob = require("./utils-blob.js"); -Object.keys(_utilsBlob).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _utilsBlob[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _utilsBlob[key]; - } - }); -}); -var _utilsBase = require("./utils-base64.js"); -Object.keys(_utilsBase).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _utilsBase[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _utilsBase[key]; - } - }); -}); -var _utilsRevision = require("./utils-revision.js"); -Object.keys(_utilsRevision).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _utilsRevision[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _utilsRevision[key]; - } - }); -}); -var _utilsDocument = require("./utils-document.js"); -Object.keys(_utilsDocument).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _utilsDocument[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _utilsDocument[key]; - } - }); -}); -var _utilsHash = require("./utils-hash.js"); -Object.keys(_utilsHash).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _utilsHash[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _utilsHash[key]; - } - }); -}); -var _utilsPromise = require("./utils-promise.js"); -Object.keys(_utilsPromise).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _utilsPromise[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _utilsPromise[key]; - } - }); -}); -var _utilsRegex = require("./utils-regex.js"); -Object.keys(_utilsRegex).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _utilsRegex[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _utilsRegex[key]; - } - }); -}); -var _utilsString = require("./utils-string.js"); -Object.keys(_utilsString).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _utilsString[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _utilsString[key]; - } - }); -}); -var _utilsObjectDeepEqual = require("./utils-object-deep-equal.js"); -Object.keys(_utilsObjectDeepEqual).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _utilsObjectDeepEqual[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _utilsObjectDeepEqual[key]; - } - }); -}); -var _utilsObjectDotProp = require("./utils-object-dot-prop.js"); -Object.keys(_utilsObjectDotProp).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _utilsObjectDotProp[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _utilsObjectDotProp[key]; - } - }); -}); -var _utilsObject = require("./utils-object.js"); -Object.keys(_utilsObject).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _utilsObject[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _utilsObject[key]; - } - }); -}); -var _utilsMap = require("./utils-map.js"); -Object.keys(_utilsMap).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _utilsMap[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _utilsMap[key]; - } - }); -}); -var _utilsError = require("./utils-error.js"); -Object.keys(_utilsError).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _utilsError[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _utilsError[key]; - } - }); -}); -var _utilsTime = require("./utils-time.js"); -Object.keys(_utilsTime).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _utilsTime[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _utilsTime[key]; - } - }); -}); -var _utilsOther = require("./utils-other.js"); -Object.keys(_utilsOther).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _utilsOther[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _utilsOther[key]; - } - }); -}); -var _utilsRxdbVersion = require("./utils-rxdb-version.js"); -Object.keys(_utilsRxdbVersion).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _utilsRxdbVersion[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _utilsRxdbVersion[key]; - } - }); -}); -var _utilsGlobal = require("./utils-global.js"); -Object.keys(_utilsGlobal).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (key in exports && exports[key] === _utilsGlobal[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _utilsGlobal[key]; - } - }); -}); -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/utils/index.js.map b/dist/cjs/plugins/utils/index.js.map deleted file mode 100644 index b4358153d5f..00000000000 --- a/dist/cjs/plugins/utils/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["_utilsArray","require","Object","keys","forEach","key","exports","defineProperty","enumerable","get","_utilsBlob","_utilsBase","_utilsRevision","_utilsDocument","_utilsHash","_utilsPromise","_utilsRegex","_utilsString","_utilsObjectDeepEqual","_utilsObjectDotProp","_utilsObject","_utilsMap","_utilsError","_utilsTime","_utilsOther","_utilsRxdbVersion","_utilsGlobal"],"sources":["../../../../src/plugins/utils/index.ts"],"sourcesContent":["export * from './utils-array.ts';\nexport * from './utils-blob.ts';\nexport * from './utils-base64.ts';\nexport * from './utils-revision.ts';\nexport * from './utils-document.ts';\nexport * from './utils-hash.ts';\nexport * from './utils-promise.ts';\nexport * from './utils-regex.ts';\nexport * from './utils-string.ts';\nexport * from './utils-object-deep-equal.ts';\nexport * from './utils-object-dot-prop.ts';\nexport * from './utils-object.ts';\nexport * from './utils-map.ts';\nexport * from './utils-error.ts';\nexport * from './utils-time.ts';\nexport * from './utils-other.ts';\nexport * from './utils-rxdb-version.ts';\nexport * from './utils-global.ts';\n"],"mappings":";;;;;AAAA,IAAAA,WAAA,GAAAC,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAH,WAAA,EAAAI,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAL,WAAA,CAAAK,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAT,WAAA,CAAAK,GAAA;IAAA;EAAA;AAAA;AACA,IAAAK,UAAA,GAAAT,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAO,UAAA,EAAAN,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAK,UAAA,CAAAL,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAC,UAAA,CAAAL,GAAA;IAAA;EAAA;AAAA;AACA,IAAAM,UAAA,GAAAV,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAQ,UAAA,EAAAP,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAM,UAAA,CAAAN,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAE,UAAA,CAAAN,GAAA;IAAA;EAAA;AAAA;AACA,IAAAO,cAAA,GAAAX,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAS,cAAA,EAAAR,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAO,cAAA,CAAAP,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAG,cAAA,CAAAP,GAAA;IAAA;EAAA;AAAA;AACA,IAAAQ,cAAA,GAAAZ,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAU,cAAA,EAAAT,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAQ,cAAA,CAAAR,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAI,cAAA,CAAAR,GAAA;IAAA;EAAA;AAAA;AACA,IAAAS,UAAA,GAAAb,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAW,UAAA,EAAAV,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAS,UAAA,CAAAT,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAK,UAAA,CAAAT,GAAA;IAAA;EAAA;AAAA;AACA,IAAAU,aAAA,GAAAd,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAY,aAAA,EAAAX,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAU,aAAA,CAAAV,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAM,aAAA,CAAAV,GAAA;IAAA;EAAA;AAAA;AACA,IAAAW,WAAA,GAAAf,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAa,WAAA,EAAAZ,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAW,WAAA,CAAAX,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAO,WAAA,CAAAX,GAAA;IAAA;EAAA;AAAA;AACA,IAAAY,YAAA,GAAAhB,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAc,YAAA,EAAAb,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAY,YAAA,CAAAZ,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAQ,YAAA,CAAAZ,GAAA;IAAA;EAAA;AAAA;AACA,IAAAa,qBAAA,GAAAjB,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAe,qBAAA,EAAAd,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAa,qBAAA,CAAAb,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAS,qBAAA,CAAAb,GAAA;IAAA;EAAA;AAAA;AACA,IAAAc,mBAAA,GAAAlB,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAgB,mBAAA,EAAAf,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAc,mBAAA,CAAAd,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAU,mBAAA,CAAAd,GAAA;IAAA;EAAA;AAAA;AACA,IAAAe,YAAA,GAAAnB,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAiB,YAAA,EAAAhB,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAe,YAAA,CAAAf,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAW,YAAA,CAAAf,GAAA;IAAA;EAAA;AAAA;AACA,IAAAgB,SAAA,GAAApB,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAkB,SAAA,EAAAjB,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAgB,SAAA,CAAAhB,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAY,SAAA,CAAAhB,GAAA;IAAA;EAAA;AAAA;AACA,IAAAiB,WAAA,GAAArB,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAmB,WAAA,EAAAlB,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAiB,WAAA,CAAAjB,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAa,WAAA,CAAAjB,GAAA;IAAA;EAAA;AAAA;AACA,IAAAkB,UAAA,GAAAtB,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAoB,UAAA,EAAAnB,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAkB,UAAA,CAAAlB,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAc,UAAA,CAAAlB,GAAA;IAAA;EAAA;AAAA;AACA,IAAAmB,WAAA,GAAAvB,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAqB,WAAA,EAAApB,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAmB,WAAA,CAAAnB,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAe,WAAA,CAAAnB,GAAA;IAAA;EAAA;AAAA;AACA,IAAAoB,iBAAA,GAAAxB,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAsB,iBAAA,EAAArB,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAoB,iBAAA,CAAApB,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAgB,iBAAA,CAAApB,GAAA;IAAA;EAAA;AAAA;AACA,IAAAqB,YAAA,GAAAzB,OAAA;AAAAC,MAAA,CAAAC,IAAA,CAAAuB,YAAA,EAAAtB,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAA,GAAA,IAAAC,OAAA,IAAAA,OAAA,CAAAD,GAAA,MAAAqB,YAAA,CAAArB,GAAA;EAAAH,MAAA,CAAAK,cAAA,CAAAD,OAAA,EAAAD,GAAA;IAAAG,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAiB,YAAA,CAAArB,GAAA;IAAA;EAAA;AAAA","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/utils/utils-array.js b/dist/cjs/plugins/utils/utils-array.js deleted file mode 100644 index 9e27e56bb08..00000000000 --- a/dist/cjs/plugins/utils/utils-array.js +++ /dev/null @@ -1,160 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.appendToArray = appendToArray; -exports.arrayFilterNotEmpty = arrayFilterNotEmpty; -exports.asyncFilter = asyncFilter; -exports.batchArray = batchArray; -exports.countUntilNotMatching = countUntilNotMatching; -exports.isMaybeReadonlyArray = isMaybeReadonlyArray; -exports.isOneItemOfArrayInOtherArray = isOneItemOfArrayInOtherArray; -exports.lastOfArray = lastOfArray; -exports.maxOfNumbers = maxOfNumbers; -exports.randomOfArray = randomOfArray; -exports.removeOneFromArrayIfMatches = removeOneFromArrayIfMatches; -exports.shuffleArray = shuffleArray; -exports.sumNumberArray = sumNumberArray; -exports.toArray = toArray; -exports.uniqueArray = uniqueArray; -function lastOfArray(ar) { - return ar[ar.length - 1]; -} - -/** - * shuffle the given array - */ -function shuffleArray(arr) { - return arr.slice(0).sort(() => Math.random() - 0.5); -} -function randomOfArray(arr) { - var randomElement = arr[Math.floor(Math.random() * arr.length)]; - return randomElement; -} -function toArray(input) { - return Array.isArray(input) ? input.slice(0) : [input]; -} - -/** - * Split array with items into smaller arrays with items - * @link https://stackoverflow.com/a/7273794/3443137 - */ -function batchArray(array, batchSize) { - array = array.slice(0); - var ret = []; - while (array.length) { - var batch = array.splice(0, batchSize); - ret.push(batch); - } - return ret; -} - -/** - * @link https://stackoverflow.com/a/15996017 - */ -function removeOneFromArrayIfMatches(ar, condition) { - ar = ar.slice(); - var i = ar.length; - var done = false; - while (i-- && !done) { - if (condition(ar[i])) { - done = true; - ar.splice(i, 1); - } - } - return ar; -} - -/** - * returns true if the supplied argument is either an Array or a Readonly> - */ -function isMaybeReadonlyArray(x) { - // While this looks strange, it's a workaround for an issue in TypeScript: - // https://github.com/microsoft/TypeScript/issues/17002 - // - // The problem is that `Array.isArray` as a type guard returns `false` for a readonly array, - // but at runtime the object is an array and the runtime call to `Array.isArray` would return `true`. - // The type predicate here allows for both `Array` and `Readonly>` to pass a type check while - // still performing runtime type inspection. - return Array.isArray(x); -} -function isOneItemOfArrayInOtherArray(ar1, ar2) { - for (var i = 0; i < ar1.length; i++) { - var el = ar1[i]; - var has = ar2.includes(el); - if (has) { - return true; - } - } - return false; -} - -/** - * Use this in array.filter() to remove all empty slots - * and have the correct typings afterwards. - * @link https://stackoverflow.com/a/46700791/3443137 - */ -function arrayFilterNotEmpty(value) { - if (value === null || value === undefined) { - return false; - } - return true; -} -function countUntilNotMatching(ar, matchingFn) { - var count = 0; - var idx = -1; - for (var item of ar) { - idx = idx + 1; - var matching = matchingFn(item, idx); - if (matching) { - count = count + 1; - } else { - break; - } - } - return count; -} -async function asyncFilter(array, predicate) { - var filters = await Promise.all(array.map(predicate)); - return array.filter((...[, index]) => filters[index]); -} - -/** - * @link https://stackoverflow.com/a/3762735 - */ -function sumNumberArray(array) { - var count = 0; - for (var i = array.length; i--;) { - count += array[i]; - } - return count; -} -function maxOfNumbers(arr) { - return Math.max(...arr); -} - -/** - * Appends the given documents to the given array. - * This will mutate the first given array. - * Mostly used as faster alternative to Array.concat() - * because .concat() is so slow. - * @link https://www.measurethat.net/Benchmarks/Show/4223/0/array-concat-vs-spread-operator-vs-push#latest_results_block - */ -function appendToArray(ar, add) { - var amount = add.length; - for (var i = 0; i < amount; ++i) { - var element = add[i]; - ar.push(element); - } -} - -/** - * @link https://gist.github.com/telekosmos/3b62a31a5c43f40849bb - */ -function uniqueArray(arrArg) { - return arrArg.filter(function (elem, pos, arr) { - return arr.indexOf(elem) === pos; - }); -} -//# sourceMappingURL=utils-array.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/utils/utils-array.js.map b/dist/cjs/plugins/utils/utils-array.js.map deleted file mode 100644 index 7e06d1cf4f8..00000000000 --- a/dist/cjs/plugins/utils/utils-array.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"utils-array.js","names":["lastOfArray","ar","length","shuffleArray","arr","slice","sort","Math","random","randomOfArray","randomElement","floor","toArray","input","Array","isArray","batchArray","array","batchSize","ret","batch","splice","push","removeOneFromArrayIfMatches","condition","i","done","isMaybeReadonlyArray","x","isOneItemOfArrayInOtherArray","ar1","ar2","el","has","includes","arrayFilterNotEmpty","value","undefined","countUntilNotMatching","matchingFn","count","idx","item","matching","asyncFilter","predicate","filters","Promise","all","map","filter","index","sumNumberArray","maxOfNumbers","max","appendToArray","add","amount","element","uniqueArray","arrArg","elem","pos","indexOf"],"sources":["../../../../src/plugins/utils/utils-array.ts"],"sourcesContent":["import type {\n MaybePromise,\n MaybeReadonly\n} from '../../types/index.d.ts';\n\nexport function lastOfArray(ar: T[]): T | undefined {\n return ar[ar.length - 1];\n}\n\n/**\n * shuffle the given array\n */\nexport function shuffleArray(arr: T[]): T[] {\n return arr.slice(0).sort(() => (Math.random() - 0.5));\n}\n\nexport function randomOfArray(arr: T[]): T {\n const randomElement = arr[Math.floor(Math.random() * arr.length)];\n return randomElement;\n}\n\n\nexport function toArray(input: T | T[] | Readonly | Readonly): T[] {\n return Array.isArray(input) ? (input as any[]).slice(0) : [input];\n}\n\n/**\n * Split array with items into smaller arrays with items\n * @link https://stackoverflow.com/a/7273794/3443137\n */\nexport function batchArray(array: T[], batchSize: number): T[][] {\n array = array.slice(0);\n const ret: T[][] = [];\n while (array.length) {\n const batch = array.splice(0, batchSize);\n ret.push(batch);\n }\n return ret;\n}\n\n/**\n * @link https://stackoverflow.com/a/15996017\n */\nexport function removeOneFromArrayIfMatches(ar: T[], condition: (x: T) => boolean): T[] {\n ar = ar.slice();\n let i = ar.length;\n let done = false;\n while (i-- && !done) {\n if (condition(ar[i])) {\n done = true;\n ar.splice(i, 1);\n }\n }\n return ar;\n}\n\n/**\n * returns true if the supplied argument is either an Array or a Readonly>\n */\nexport function isMaybeReadonlyArray(x: any): x is MaybeReadonly {\n // While this looks strange, it's a workaround for an issue in TypeScript:\n // https://github.com/microsoft/TypeScript/issues/17002\n //\n // The problem is that `Array.isArray` as a type guard returns `false` for a readonly array,\n // but at runtime the object is an array and the runtime call to `Array.isArray` would return `true`.\n // The type predicate here allows for both `Array` and `Readonly>` to pass a type check while\n // still performing runtime type inspection.\n return Array.isArray(x);\n}\n\n\n\nexport function isOneItemOfArrayInOtherArray(ar1: T[], ar2: T[]): boolean {\n for (let i = 0; i < ar1.length; i++) {\n const el = ar1[i];\n const has = ar2.includes(el);\n if (has) {\n return true;\n }\n }\n return false;\n}\n\n\n/**\n * Use this in array.filter() to remove all empty slots\n * and have the correct typings afterwards.\n * @link https://stackoverflow.com/a/46700791/3443137\n */\nexport function arrayFilterNotEmpty(value: TValue | null | undefined): value is TValue {\n if (value === null || value === undefined) {\n return false;\n }\n return true;\n}\n\nexport function countUntilNotMatching(\n ar: T[],\n matchingFn: (v: T, idx: number) => boolean\n): number {\n let count = 0;\n let idx = -1;\n for (const item of ar) {\n idx = idx + 1;\n const matching = matchingFn(item, idx);\n if (matching) {\n count = count + 1;\n } else {\n break;\n }\n }\n return count;\n}\n\nexport async function asyncFilter(array: T[], predicate: (item: T, index: number, a: T[]) => MaybePromise): Promise {\n const filters = await Promise.all(\n array.map(predicate)\n );\n\n return array.filter((...[, index]) => filters[index]);\n}\n\n/**\n * @link https://stackoverflow.com/a/3762735\n */\nexport function sumNumberArray(array: number[]): number {\n let count = 0;\n for (let i = array.length; i--;) {\n count += array[i];\n }\n return count;\n}\n\nexport function maxOfNumbers(arr: number[]): number {\n return Math.max(...arr);\n}\n\n\n/**\n * Appends the given documents to the given array.\n * This will mutate the first given array.\n * Mostly used as faster alternative to Array.concat()\n * because .concat() is so slow.\n * @link https://www.measurethat.net/Benchmarks/Show/4223/0/array-concat-vs-spread-operator-vs-push#latest_results_block\n */\nexport function appendToArray(ar: T[], add: T[] | readonly T[]): void {\n const amount = add.length;\n for (let i = 0; i < amount; ++i) {\n const element = add[i];\n ar.push(element);\n }\n}\n\n/**\n * @link https://gist.github.com/telekosmos/3b62a31a5c43f40849bb\n */\nexport function uniqueArray(arrArg: string[]): string[] {\n return arrArg.filter(function (elem, pos, arr) {\n return arr.indexOf(elem) === pos;\n });\n}\n\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAKO,SAASA,WAAWA,CAAIC,EAAO,EAAiB;EACnD,OAAOA,EAAE,CAACA,EAAE,CAACC,MAAM,GAAG,CAAC,CAAC;AAC5B;;AAEA;AACA;AACA;AACO,SAASC,YAAYA,CAAIC,GAAQ,EAAO;EAC3C,OAAOA,GAAG,CAACC,KAAK,CAAC,CAAC,CAAC,CAACC,IAAI,CAAC,MAAOC,IAAI,CAACC,MAAM,CAAC,CAAC,GAAG,GAAI,CAAC;AACzD;AAEO,SAASC,aAAaA,CAAIL,GAAQ,EAAK;EAC1C,IAAMM,aAAa,GAAGN,GAAG,CAACG,IAAI,CAACI,KAAK,CAACJ,IAAI,CAACC,MAAM,CAAC,CAAC,GAAGJ,GAAG,CAACF,MAAM,CAAC,CAAC;EACjE,OAAOQ,aAAa;AACxB;AAGO,SAASE,OAAOA,CAAIC,KAA4C,EAAO;EAC1E,OAAOC,KAAK,CAACC,OAAO,CAACF,KAAK,CAAC,GAAIA,KAAK,CAAWR,KAAK,CAAC,CAAC,CAAC,GAAG,CAACQ,KAAK,CAAC;AACrE;;AAEA;AACA;AACA;AACA;AACO,SAASG,UAAUA,CAAIC,KAAU,EAAEC,SAAiB,EAAS;EAChED,KAAK,GAAGA,KAAK,CAACZ,KAAK,CAAC,CAAC,CAAC;EACtB,IAAMc,GAAU,GAAG,EAAE;EACrB,OAAOF,KAAK,CAACf,MAAM,EAAE;IACjB,IAAMkB,KAAK,GAAGH,KAAK,CAACI,MAAM,CAAC,CAAC,EAAEH,SAAS,CAAC;IACxCC,GAAG,CAACG,IAAI,CAACF,KAAK,CAAC;EACnB;EACA,OAAOD,GAAG;AACd;;AAEA;AACA;AACA;AACO,SAASI,2BAA2BA,CAAItB,EAAO,EAAEuB,SAA4B,EAAO;EACvFvB,EAAE,GAAGA,EAAE,CAACI,KAAK,CAAC,CAAC;EACf,IAAIoB,CAAC,GAAGxB,EAAE,CAACC,MAAM;EACjB,IAAIwB,IAAI,GAAG,KAAK;EAChB,OAAOD,CAAC,EAAE,IAAI,CAACC,IAAI,EAAE;IACjB,IAAIF,SAAS,CAACvB,EAAE,CAACwB,CAAC,CAAC,CAAC,EAAE;MAClBC,IAAI,GAAG,IAAI;MACXzB,EAAE,CAACoB,MAAM,CAACI,CAAC,EAAE,CAAC,CAAC;IACnB;EACJ;EACA,OAAOxB,EAAE;AACb;;AAEA;AACA;AACA;AACO,SAAS0B,oBAAoBA,CAACC,CAAM,EAA6B;EACpE;EACA;EACA;EACA;EACA;EACA;EACA;EACA,OAAOd,KAAK,CAACC,OAAO,CAACa,CAAC,CAAC;AAC3B;AAIO,SAASC,4BAA4BA,CAAIC,GAAQ,EAAEC,GAAQ,EAAW;EACzE,KAAK,IAAIN,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGK,GAAG,CAAC5B,MAAM,EAAEuB,CAAC,EAAE,EAAE;IACjC,IAAMO,EAAE,GAAGF,GAAG,CAACL,CAAC,CAAC;IACjB,IAAMQ,GAAG,GAAGF,GAAG,CAACG,QAAQ,CAACF,EAAE,CAAC;IAC5B,IAAIC,GAAG,EAAE;MACL,OAAO,IAAI;IACf;EACJ;EACA,OAAO,KAAK;AAChB;;AAGA;AACA;AACA;AACA;AACA;AACO,SAASE,mBAAmBA,CAASC,KAAgC,EAAmB;EAC3F,IAAIA,KAAK,KAAK,IAAI,IAAIA,KAAK,KAAKC,SAAS,EAAE;IACvC,OAAO,KAAK;EAChB;EACA,OAAO,IAAI;AACf;AAEO,SAASC,qBAAqBA,CACjCrC,EAAO,EACPsC,UAA0C,EACpC;EACN,IAAIC,KAAK,GAAG,CAAC;EACb,IAAIC,GAAG,GAAG,CAAC,CAAC;EACZ,KAAK,IAAMC,IAAI,IAAIzC,EAAE,EAAE;IACnBwC,GAAG,GAAGA,GAAG,GAAG,CAAC;IACb,IAAME,QAAQ,GAAGJ,UAAU,CAACG,IAAI,EAAED,GAAG,CAAC;IACtC,IAAIE,QAAQ,EAAE;MACVH,KAAK,GAAGA,KAAK,GAAG,CAAC;IACrB,CAAC,MAAM;MACH;IACJ;EACJ;EACA,OAAOA,KAAK;AAChB;AAEO,eAAeI,WAAWA,CAAI3B,KAAU,EAAE4B,SAAoE,EAAgB;EACjI,IAAMC,OAAO,GAAG,MAAMC,OAAO,CAACC,GAAG,CAC7B/B,KAAK,CAACgC,GAAG,CAACJ,SAAS,CACvB,CAAC;EAED,OAAO5B,KAAK,CAACiC,MAAM,CAAC,CAAC,GAAG,GAAGC,KAAK,CAAC,KAAKL,OAAO,CAACK,KAAK,CAAC,CAAC;AACzD;;AAEA;AACA;AACA;AACO,SAASC,cAAcA,CAACnC,KAAe,EAAU;EACpD,IAAIuB,KAAK,GAAG,CAAC;EACb,KAAK,IAAIf,CAAC,GAAGR,KAAK,CAACf,MAAM,EAAEuB,CAAC,EAAE,GAAG;IAC7Be,KAAK,IAAIvB,KAAK,CAACQ,CAAC,CAAC;EACrB;EACA,OAAOe,KAAK;AAChB;AAEO,SAASa,YAAYA,CAACjD,GAAa,EAAU;EAChD,OAAOG,IAAI,CAAC+C,GAAG,CAAC,GAAGlD,GAAG,CAAC;AAC3B;;AAGA;AACA;AACA;AACA;AACA;AACA;AACA;AACO,SAASmD,aAAaA,CAAItD,EAAO,EAAEuD,GAAuB,EAAQ;EACrE,IAAMC,MAAM,GAAGD,GAAG,CAACtD,MAAM;EACzB,KAAK,IAAIuB,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGgC,MAAM,EAAE,EAAEhC,CAAC,EAAE;IAC7B,IAAMiC,OAAO,GAAGF,GAAG,CAAC/B,CAAC,CAAC;IACtBxB,EAAE,CAACqB,IAAI,CAACoC,OAAO,CAAC;EACpB;AACJ;;AAEA;AACA;AACA;AACO,SAASC,WAAWA,CAACC,MAAgB,EAAY;EACpD,OAAOA,MAAM,CAACV,MAAM,CAAC,UAAUW,IAAI,EAAEC,GAAG,EAAE1D,GAAG,EAAE;IAC3C,OAAOA,GAAG,CAAC2D,OAAO,CAACF,IAAI,CAAC,KAAKC,GAAG;EACpC,CAAC,CAAC;AACN","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/utils/utils-base64.js b/dist/cjs/plugins/utils/utils-base64.js deleted file mode 100644 index a3e0c40ab4e..00000000000 --- a/dist/cjs/plugins/utils/utils-base64.js +++ /dev/null @@ -1,57 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.arrayBufferToBase64 = arrayBufferToBase64; -exports.b64DecodeUnicode = b64DecodeUnicode; -exports.b64EncodeUnicode = b64EncodeUnicode; -exports.base64ToArrayBuffer = base64ToArrayBuffer; -var _jsBase = require("js-base64"); -/** - * NO! We cannot just use btoa() and atob() - * because they do not work correctly with binary data. - * @link https://stackoverflow.com/q/30106476/3443137 - */ - -/** - * atob() and btoa() do not work well with non ascii chars, - * so we have to use these helper methods instead. - * @link https://stackoverflow.com/a/30106551/3443137 - */ -// Encoding UTF8 -> base64 -function b64EncodeUnicode(str) { - return (0, _jsBase.encode)(str); -} - -// Decoding base64 -> UTF8 -function b64DecodeUnicode(str) { - return (0, _jsBase.decode)(str); -} - -/** - * @link https://stackoverflow.com/a/9458996/3443137 - */ -function arrayBufferToBase64(buffer) { - var binary = ''; - var bytes = new Uint8Array(buffer); - var len = bytes.byteLength; - for (var i = 0; i < len; i++) { - binary += String.fromCharCode(bytes[i]); - } - return btoa(binary); -} - -/** - * @link https://stackoverflow.com/a/21797381 - */ -function base64ToArrayBuffer(base64) { - var binary_string = atob(base64); - var len = binary_string.length; - var bytes = new Uint8Array(len); - for (var i = 0; i < len; i++) { - bytes[i] = binary_string.charCodeAt(i); - } - return bytes.buffer; -} -//# sourceMappingURL=utils-base64.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/utils/utils-base64.js.map b/dist/cjs/plugins/utils/utils-base64.js.map deleted file mode 100644 index bdb0488c36b..00000000000 --- a/dist/cjs/plugins/utils/utils-base64.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"utils-base64.js","names":["_jsBase","require","b64EncodeUnicode","str","encode","b64DecodeUnicode","decode","arrayBufferToBase64","buffer","binary","bytes","Uint8Array","len","byteLength","i","String","fromCharCode","btoa","base64ToArrayBuffer","base64","binary_string","atob","length","charCodeAt"],"sources":["../../../../src/plugins/utils/utils-base64.ts"],"sourcesContent":["\n/**\n * NO! We cannot just use btoa() and atob()\n * because they do not work correctly with binary data.\n * @link https://stackoverflow.com/q/30106476/3443137\n */\nimport { encode, decode } from 'js-base64';\n\n/**\n * atob() and btoa() do not work well with non ascii chars,\n * so we have to use these helper methods instead.\n * @link https://stackoverflow.com/a/30106551/3443137\n */\n// Encoding UTF8 -> base64\nexport function b64EncodeUnicode(str: string) {\n return encode(str);\n}\n\n// Decoding base64 -> UTF8\nexport function b64DecodeUnicode(str: string) {\n return decode(str);\n}\n\n/**\n * @link https://stackoverflow.com/a/9458996/3443137\n */\nexport function arrayBufferToBase64(buffer: ArrayBuffer) {\n let binary = '';\n const bytes = new Uint8Array(buffer);\n const len = bytes.byteLength;\n for (let i = 0; i < len; i++) {\n binary += String.fromCharCode(bytes[i]);\n }\n return btoa(binary);\n}\n\n\n/**\n * @link https://stackoverflow.com/a/21797381\n */\nexport function base64ToArrayBuffer(base64: string): ArrayBuffer {\n const binary_string = atob(base64);\n const len = binary_string.length;\n const bytes = new Uint8Array(len);\n for (let i = 0; i < len; i++) {\n bytes[i] = binary_string.charCodeAt(i);\n }\n return bytes.buffer;\n}\n"],"mappings":";;;;;;;;;AAMA,IAAAA,OAAA,GAAAC,OAAA;AALA;AACA;AACA;AACA;AACA;;AAGA;AACA;AACA;AACA;AACA;AACA;AACO,SAASC,gBAAgBA,CAACC,GAAW,EAAE;EAC1C,OAAO,IAAAC,cAAM,EAACD,GAAG,CAAC;AACtB;;AAEA;AACO,SAASE,gBAAgBA,CAACF,GAAW,EAAE;EAC1C,OAAO,IAAAG,cAAM,EAACH,GAAG,CAAC;AACtB;;AAEA;AACA;AACA;AACO,SAASI,mBAAmBA,CAACC,MAAmB,EAAE;EACrD,IAAIC,MAAM,GAAG,EAAE;EACf,IAAMC,KAAK,GAAG,IAAIC,UAAU,CAACH,MAAM,CAAC;EACpC,IAAMI,GAAG,GAAGF,KAAK,CAACG,UAAU;EAC5B,KAAK,IAAIC,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGF,GAAG,EAAEE,CAAC,EAAE,EAAE;IAC1BL,MAAM,IAAIM,MAAM,CAACC,YAAY,CAACN,KAAK,CAACI,CAAC,CAAC,CAAC;EAC3C;EACA,OAAOG,IAAI,CAACR,MAAM,CAAC;AACvB;;AAGA;AACA;AACA;AACO,SAASS,mBAAmBA,CAACC,MAAc,EAAe;EAC7D,IAAMC,aAAa,GAAGC,IAAI,CAACF,MAAM,CAAC;EAClC,IAAMP,GAAG,GAAGQ,aAAa,CAACE,MAAM;EAChC,IAAMZ,KAAK,GAAG,IAAIC,UAAU,CAACC,GAAG,CAAC;EACjC,KAAK,IAAIE,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGF,GAAG,EAAEE,CAAC,EAAE,EAAE;IAC1BJ,KAAK,CAACI,CAAC,CAAC,GAAGM,aAAa,CAACG,UAAU,CAACT,CAAC,CAAC;EAC1C;EACA,OAAOJ,KAAK,CAACF,MAAM;AACvB","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/utils/utils-blob.js b/dist/cjs/plugins/utils/utils-blob.js deleted file mode 100644 index a70f0cedfc8..00000000000 --- a/dist/cjs/plugins/utils/utils-blob.js +++ /dev/null @@ -1,66 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.blobToBase64String = blobToBase64String; -exports.blobToString = blobToString; -exports.createBlob = createBlob; -exports.createBlobFromBase64 = createBlobFromBase64; -exports.getBlobSize = getBlobSize; -var _utilsBase = require("./utils-base64.js"); -/** - * Since RxDB 13.0.0 we only use Blob instead of falling back to Buffer, - * because Node.js >18 supports Blobs anyway. - */ -/** - * depending if we are on node or browser, - * we have to use Buffer(node) or Blob(browser) - */ -function createBlob(data, type) { - var blob = new Blob([data], { - type - }); - return blob; -} -async function createBlobFromBase64(base64String, type) { - var base64Response = await fetch("data:" + type + ";base64," + base64String); - var blob = await base64Response.blob(); - return blob; -} -function blobToString(blob) { - /** - * in the electron-renderer we have a typed array instead of a blob - * so we have to transform it. - * @link https://github.com/pubkey/rxdb/issues/1371 - */ - var blobType = Object.prototype.toString.call(blob); - if (blobType === '[object Uint8Array]') { - blob = new Blob([blob]); - } - if (typeof blob === 'string') { - return Promise.resolve(blob); - } - return blob.text(); -} -async function blobToBase64String(blob) { - if (typeof blob === 'string') { - return blob; - } - - /** - * in the electron-renderer we have a typed array instead of a blob - * so we have to transform it. - * @link https://github.com/pubkey/rxdb/issues/1371 - */ - var blobType = Object.prototype.toString.call(blob); - if (blobType === '[object Uint8Array]') { - blob = new Blob([blob]); - } - var arrayBuffer = await blob.arrayBuffer(); - return (0, _utilsBase.arrayBufferToBase64)(arrayBuffer); -} -function getBlobSize(blob) { - return blob.size; -} -//# sourceMappingURL=utils-blob.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/utils/utils-blob.js.map b/dist/cjs/plugins/utils/utils-blob.js.map deleted file mode 100644 index 32c7aaefa32..00000000000 --- a/dist/cjs/plugins/utils/utils-blob.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"utils-blob.js","names":["_utilsBase","require","createBlob","data","type","blob","Blob","createBlobFromBase64","base64String","base64Response","fetch","blobToString","blobType","Object","prototype","toString","call","Promise","resolve","text","blobToBase64String","arrayBuffer","arrayBufferToBase64","getBlobSize","size"],"sources":["../../../../src/plugins/utils/utils-blob.ts"],"sourcesContent":["import { arrayBufferToBase64 } from './utils-base64.ts';\n\n/**\n * Since RxDB 13.0.0 we only use Blob instead of falling back to Buffer,\n * because Node.js >18 supports Blobs anyway.\n */\n/**\n * depending if we are on node or browser,\n * we have to use Buffer(node) or Blob(browser)\n */\nexport function createBlob(\n data: string,\n type: string\n): Blob {\n const blob = new Blob([data], {\n type\n });\n return blob;\n}\n\nexport async function createBlobFromBase64(\n base64String: string,\n type: string\n): Promise {\n const base64Response = await fetch(`data:${type};base64,${base64String}`);\n const blob = await base64Response.blob();\n return blob;\n}\n\nexport function blobToString(blob: Blob | string): Promise {\n /**\n * in the electron-renderer we have a typed array instead of a blob\n * so we have to transform it.\n * @link https://github.com/pubkey/rxdb/issues/1371\n */\n const blobType = Object.prototype.toString.call(blob);\n if (blobType === '[object Uint8Array]') {\n blob = new Blob([blob]);\n }\n if (typeof blob === 'string') {\n return Promise.resolve(blob);\n }\n\n return blob.text();\n}\n\nexport async function blobToBase64String(blob: Blob | string): Promise {\n if (typeof blob === 'string') {\n return blob;\n }\n\n /**\n * in the electron-renderer we have a typed array instead of a blob\n * so we have to transform it.\n * @link https://github.com/pubkey/rxdb/issues/1371\n */\n const blobType = Object.prototype.toString.call(blob);\n if (blobType === '[object Uint8Array]') {\n blob = new Blob([blob]);\n }\n\n const arrayBuffer = await blob.arrayBuffer();\n return arrayBufferToBase64(arrayBuffer);\n}\n\nexport function getBlobSize(blob: Blob): number {\n return blob.size;\n}\n"],"mappings":";;;;;;;;;;AAAA,IAAAA,UAAA,GAAAC,OAAA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACO,SAASC,UAAUA,CACtBC,IAAY,EACZC,IAAY,EACR;EACJ,IAAMC,IAAI,GAAG,IAAIC,IAAI,CAAC,CAACH,IAAI,CAAC,EAAE;IAC1BC;EACJ,CAAC,CAAC;EACF,OAAOC,IAAI;AACf;AAEO,eAAeE,oBAAoBA,CACtCC,YAAoB,EACpBJ,IAAY,EACC;EACb,IAAMK,cAAc,GAAG,MAAMC,KAAK,WAASN,IAAI,gBAAWI,YAAc,CAAC;EACzE,IAAMH,IAAI,GAAG,MAAMI,cAAc,CAACJ,IAAI,CAAC,CAAC;EACxC,OAAOA,IAAI;AACf;AAEO,SAASM,YAAYA,CAACN,IAAmB,EAAmB;EAC/D;AACJ;AACA;AACA;AACA;EACI,IAAMO,QAAQ,GAAGC,MAAM,CAACC,SAAS,CAACC,QAAQ,CAACC,IAAI,CAACX,IAAI,CAAC;EACrD,IAAIO,QAAQ,KAAK,qBAAqB,EAAE;IACpCP,IAAI,GAAG,IAAIC,IAAI,CAAC,CAACD,IAAI,CAAC,CAAC;EAC3B;EACA,IAAI,OAAOA,IAAI,KAAK,QAAQ,EAAE;IAC1B,OAAOY,OAAO,CAACC,OAAO,CAACb,IAAI,CAAC;EAChC;EAEA,OAAOA,IAAI,CAACc,IAAI,CAAC,CAAC;AACtB;AAEO,eAAeC,kBAAkBA,CAACf,IAAmB,EAAmB;EAC3E,IAAI,OAAOA,IAAI,KAAK,QAAQ,EAAE;IAC1B,OAAOA,IAAI;EACf;;EAEA;AACJ;AACA;AACA;AACA;EACI,IAAMO,QAAQ,GAAGC,MAAM,CAACC,SAAS,CAACC,QAAQ,CAACC,IAAI,CAACX,IAAI,CAAC;EACrD,IAAIO,QAAQ,KAAK,qBAAqB,EAAE;IACpCP,IAAI,GAAG,IAAIC,IAAI,CAAC,CAACD,IAAI,CAAC,CAAC;EAC3B;EAEA,IAAMgB,WAAW,GAAG,MAAMhB,IAAI,CAACgB,WAAW,CAAC,CAAC;EAC5C,OAAO,IAAAC,8BAAmB,EAACD,WAAW,CAAC;AAC3C;AAEO,SAASE,WAAWA,CAAClB,IAAU,EAAU;EAC5C,OAAOA,IAAI,CAACmB,IAAI;AACpB","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/utils/utils-document.js b/dist/cjs/plugins/utils/utils-document.js deleted file mode 100644 index 5f9cd6adf83..00000000000 --- a/dist/cjs/plugins/utils/utils-document.js +++ /dev/null @@ -1,100 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.RX_META_LWT_MINIMUM = void 0; -exports.areRxDocumentArraysEqual = areRxDocumentArraysEqual; -exports.getDefaultRevision = getDefaultRevision; -exports.getDefaultRxDocumentMeta = getDefaultRxDocumentMeta; -exports.getSortDocumentsByLastWriteTimeComparator = getSortDocumentsByLastWriteTimeComparator; -exports.sortDocumentsByLastWriteTime = sortDocumentsByLastWriteTime; -exports.stripMetaDataFromDocument = stripMetaDataFromDocument; -exports.toWithDeleted = toWithDeleted; -var _utilsObject = require("./utils-object.js"); -/** - * We use 1 as minimum so that the value is never falsy. - * This const is used in several places because querying - * with a value lower then the minimum could give false results. - */ -var RX_META_LWT_MINIMUM = exports.RX_META_LWT_MINIMUM = 1; -function getDefaultRxDocumentMeta() { - return { - /** - * Set this to 1 to not waste performance - * while calling new Date().. - * The storage wrappers will anyway update - * the lastWrite time while calling transformDocumentDataFromRxDBToRxStorage() - */ - lwt: RX_META_LWT_MINIMUM - }; -} - -/** - * Returns a revision that is not valid. - * Use this to have correct typings - * while the storage wrapper anyway will overwrite the revision. - */ -function getDefaultRevision() { - /** - * Use a non-valid revision format, - * to ensure that the RxStorage will throw - * when the revision is not replaced downstream. - */ - return ''; -} -function stripMetaDataFromDocument(docData) { - return Object.assign({}, docData, { - _meta: undefined, - _deleted: undefined, - _rev: undefined - }); -} - -/** - * Faster way to check the equality of document lists - * compared to doing a deep-equal. - * Here we only check the ids and revisions. - */ -function areRxDocumentArraysEqual(primaryPath, ar1, ar2) { - if (ar1.length !== ar2.length) { - return false; - } - var i = 0; - var len = ar1.length; - while (i < len) { - var row1 = ar1[i]; - var row2 = ar2[i]; - i++; - if (row1._rev !== row2._rev || row1[primaryPath] !== row2[primaryPath]) { - return false; - } - } - return true; -} -function getSortDocumentsByLastWriteTimeComparator(primaryPath) { - return (a, b) => { - if (a._meta.lwt === b._meta.lwt) { - if (b[primaryPath] < a[primaryPath]) { - return 1; - } else { - return -1; - } - } else { - return a._meta.lwt - b._meta.lwt; - } - }; -} -function sortDocumentsByLastWriteTime(primaryPath, docs) { - return docs.sort(getSortDocumentsByLastWriteTimeComparator(primaryPath)); -} -function toWithDeleted(docData) { - docData = (0, _utilsObject.flatClone)(docData); - docData._deleted = !!docData._deleted; - return Object.assign(docData, { - _attachments: undefined, - _meta: undefined, - _rev: undefined - }); -} -//# sourceMappingURL=utils-document.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/utils/utils-document.js.map b/dist/cjs/plugins/utils/utils-document.js.map deleted file mode 100644 index 6a9a0f44683..00000000000 --- a/dist/cjs/plugins/utils/utils-document.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"utils-document.js","names":["_utilsObject","require","RX_META_LWT_MINIMUM","exports","getDefaultRxDocumentMeta","lwt","getDefaultRevision","stripMetaDataFromDocument","docData","Object","assign","_meta","undefined","_deleted","_rev","areRxDocumentArraysEqual","primaryPath","ar1","ar2","length","i","len","row1","row2","getSortDocumentsByLastWriteTimeComparator","a","b","sortDocumentsByLastWriteTime","docs","sort","toWithDeleted","flatClone","_attachments"],"sources":["../../../../src/plugins/utils/utils-document.ts"],"sourcesContent":["import type {\n DeepReadonly,\n RxDocumentData,\n RxDocumentMeta,\n StringKeys,\n WithDeleted,\n WithDeletedAndAttachments\n} from '../../types/index.d.ts';\nimport { flatClone } from './utils-object.ts';\n/**\n * We use 1 as minimum so that the value is never falsy.\n * This const is used in several places because querying\n * with a value lower then the minimum could give false results.\n */\nexport const RX_META_LWT_MINIMUM = 1;\n\nexport function getDefaultRxDocumentMeta(): RxDocumentMeta {\n return {\n /**\n * Set this to 1 to not waste performance\n * while calling new Date()..\n * The storage wrappers will anyway update\n * the lastWrite time while calling transformDocumentDataFromRxDBToRxStorage()\n */\n lwt: RX_META_LWT_MINIMUM\n };\n}\n\n/**\n * Returns a revision that is not valid.\n * Use this to have correct typings\n * while the storage wrapper anyway will overwrite the revision.\n */\nexport function getDefaultRevision(): string {\n /**\n * Use a non-valid revision format,\n * to ensure that the RxStorage will throw\n * when the revision is not replaced downstream.\n */\n return '';\n}\n\n\nexport function stripMetaDataFromDocument(docData: RxDocumentData): RxDocType {\n return Object.assign({}, docData, {\n _meta: undefined,\n _deleted: undefined,\n _rev: undefined\n });\n}\n\n\n/**\n * Faster way to check the equality of document lists\n * compared to doing a deep-equal.\n * Here we only check the ids and revisions.\n */\nexport function areRxDocumentArraysEqual(\n primaryPath: StringKeys>,\n ar1: RxDocumentData[],\n ar2: RxDocumentData[]\n): boolean {\n if (ar1.length !== ar2.length) {\n return false;\n }\n let i = 0;\n const len = ar1.length;\n while (i < len) {\n const row1 = ar1[i];\n const row2 = ar2[i];\n i++;\n\n if (\n row1._rev !== row2._rev ||\n row1[primaryPath] !== row2[primaryPath]\n ) {\n return false;\n }\n }\n return true;\n}\n\n\n\nexport function getSortDocumentsByLastWriteTimeComparator(primaryPath: string) {\n return (a: RxDocumentData, b: RxDocumentData) => {\n if (a._meta.lwt === b._meta.lwt) {\n if ((b as any)[primaryPath] < (a as any)[primaryPath]) {\n return 1;\n } else {\n return -1;\n }\n } else {\n return a._meta.lwt - b._meta.lwt;\n }\n };\n}\nexport function sortDocumentsByLastWriteTime(\n primaryPath: string,\n docs: RxDocumentData[]\n): RxDocumentData[] {\n return docs.sort(getSortDocumentsByLastWriteTimeComparator(primaryPath));\n}\n\ntype AnyDocFormat = RxDocType | WithDeleted | RxDocumentData | WithDeletedAndAttachments;\nexport function toWithDeleted(\n docData: AnyDocFormat | DeepReadonly>\n): WithDeleted {\n docData = flatClone(docData);\n (docData as any)._deleted = !!(docData as any)._deleted;\n return Object.assign(docData as any, {\n _attachments: undefined,\n _meta: undefined,\n _rev: undefined\n }) as any;\n}\n"],"mappings":";;;;;;;;;;;;;AAQA,IAAAA,YAAA,GAAAC,OAAA;AACA;AACA;AACA;AACA;AACA;AACO,IAAMC,mBAAmB,GAAAC,OAAA,CAAAD,mBAAA,GAAG,CAAC;AAE7B,SAASE,wBAAwBA,CAAA,EAAmB;EACvD,OAAO;IACH;AACR;AACA;AACA;AACA;AACA;IACQC,GAAG,EAAEH;EACT,CAAC;AACL;;AAEA;AACA;AACA;AACA;AACA;AACO,SAASI,kBAAkBA,CAAA,EAAW;EACzC;AACJ;AACA;AACA;AACA;EACI,OAAO,EAAE;AACb;AAGO,SAASC,yBAAyBA,CAAYC,OAAkC,EAAa;EAChG,OAAOC,MAAM,CAACC,MAAM,CAAC,CAAC,CAAC,EAAEF,OAAO,EAAE;IAC9BG,KAAK,EAAEC,SAAS;IAChBC,QAAQ,EAAED,SAAS;IACnBE,IAAI,EAAEF;EACV,CAAC,CAAC;AACN;;AAGA;AACA;AACA;AACA;AACA;AACO,SAASG,wBAAwBA,CACpCC,WAAkD,EAClDC,GAAgC,EAChCC,GAAgC,EACzB;EACP,IAAID,GAAG,CAACE,MAAM,KAAKD,GAAG,CAACC,MAAM,EAAE;IAC3B,OAAO,KAAK;EAChB;EACA,IAAIC,CAAC,GAAG,CAAC;EACT,IAAMC,GAAG,GAAGJ,GAAG,CAACE,MAAM;EACtB,OAAOC,CAAC,GAAGC,GAAG,EAAE;IACZ,IAAMC,IAAI,GAAGL,GAAG,CAACG,CAAC,CAAC;IACnB,IAAMG,IAAI,GAAGL,GAAG,CAACE,CAAC,CAAC;IACnBA,CAAC,EAAE;IAEH,IACIE,IAAI,CAACR,IAAI,KAAKS,IAAI,CAACT,IAAI,IACvBQ,IAAI,CAACN,WAAW,CAAC,KAAKO,IAAI,CAACP,WAAW,CAAC,EACzC;MACE,OAAO,KAAK;IAChB;EACJ;EACA,OAAO,IAAI;AACf;AAIO,SAASQ,yCAAyCA,CAAYR,WAAmB,EAAE;EACtF,OAAO,CAACS,CAA4B,EAAEC,CAA4B,KAAK;IACnE,IAAID,CAAC,CAACd,KAAK,CAACN,GAAG,KAAKqB,CAAC,CAACf,KAAK,CAACN,GAAG,EAAE;MAC7B,IAAKqB,CAAC,CAASV,WAAW,CAAC,GAAIS,CAAC,CAAST,WAAW,CAAC,EAAE;QACnD,OAAO,CAAC;MACZ,CAAC,MAAM;QACH,OAAO,CAAC,CAAC;MACb;IACJ,CAAC,MAAM;MACH,OAAOS,CAAC,CAACd,KAAK,CAACN,GAAG,GAAGqB,CAAC,CAACf,KAAK,CAACN,GAAG;IACpC;EACJ,CAAC;AACL;AACO,SAASsB,4BAA4BA,CACxCX,WAAmB,EACnBY,IAAiC,EACN;EAC3B,OAAOA,IAAI,CAACC,IAAI,CAACL,yCAAyC,CAACR,WAAW,CAAC,CAAC;AAC5E;AAGO,SAASc,aAAaA,CACzBtB,OAAwE,EAClD;EACtBA,OAAO,GAAG,IAAAuB,sBAAS,EAACvB,OAAO,CAAC;EAC3BA,OAAO,CAASK,QAAQ,GAAG,CAAC,CAAEL,OAAO,CAASK,QAAQ;EACvD,OAAOJ,MAAM,CAACC,MAAM,CAACF,OAAO,EAAS;IACjCwB,YAAY,EAAEpB,SAAS;IACvBD,KAAK,EAAEC,SAAS;IAChBE,IAAI,EAAEF;EACV,CAAC,CAAC;AACN","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/utils/utils-error.js b/dist/cjs/plugins/utils/utils-error.js deleted file mode 100644 index bdfc8d22308..00000000000 --- a/dist/cjs/plugins/utils/utils-error.js +++ /dev/null @@ -1,42 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.errorToPlainJson = errorToPlainJson; -exports.pluginMissing = pluginMissing; -var _utilsString = require("./utils-string.js"); -/** - * Returns an error that indicates that a plugin is missing - * We do not throw a RxError because this should not be handled - * programmatically but by using the correct import - */ -function pluginMissing(pluginKey) { - var keyParts = pluginKey.split('-'); - var pluginName = 'RxDB'; - keyParts.forEach(part => { - pluginName += (0, _utilsString.ucfirst)(part); - }); - pluginName += 'Plugin'; - return new Error("You are using a function which must be overwritten by a plugin.\n You should either prevent the usage of this function or add the plugin via:\n import { " + pluginName + " } from 'rxdb/plugins/" + pluginKey + "';\n addRxPlugin(" + pluginName + ");\n "); -} -function errorToPlainJson(err) { - var ret = { - name: err.name, - message: err.message, - rxdb: err.rxdb, - parameters: err.parameters, - extensions: err.extensions, - code: err.code, - url: err.url, - /** - * stack must be last to make it easier to read the json in a console. - * Also we ensure that each linebreak is spaced so that the chrome devtools - * shows urls to the source code that can be clicked to inspect - * the correct place in the code. - */ - stack: !err.stack ? undefined : err.stack.replace(/\n/g, ' \n ') - }; - return ret; -} -//# sourceMappingURL=utils-error.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/utils/utils-error.js.map b/dist/cjs/plugins/utils/utils-error.js.map deleted file mode 100644 index eae738f028e..00000000000 --- a/dist/cjs/plugins/utils/utils-error.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"utils-error.js","names":["_utilsString","require","pluginMissing","pluginKey","keyParts","split","pluginName","forEach","part","ucfirst","Error","errorToPlainJson","err","ret","name","message","rxdb","parameters","extensions","code","url","stack","undefined","replace"],"sources":["../../../../src/plugins/utils/utils-error.ts"],"sourcesContent":["import type {\n PlainJsonError,\n RxError,\n RxTypeError\n} from '../../types/index.d.ts';\nimport { ucfirst } from './utils-string.ts';\n\n\n\n/**\n * Returns an error that indicates that a plugin is missing\n * We do not throw a RxError because this should not be handled\n * programmatically but by using the correct import\n */\nexport function pluginMissing(\n pluginKey: string\n): Error {\n const keyParts = pluginKey.split('-');\n let pluginName = 'RxDB';\n keyParts.forEach(part => {\n pluginName += ucfirst(part);\n });\n pluginName += 'Plugin';\n return new Error(\n `You are using a function which must be overwritten by a plugin.\n You should either prevent the usage of this function or add the plugin via:\n import { ${pluginName} } from 'rxdb/plugins/${pluginKey}';\n addRxPlugin(${pluginName});\n `\n );\n}\n\n\n\nexport function errorToPlainJson(err: Error | TypeError | RxError | RxTypeError): PlainJsonError {\n const ret: PlainJsonError = {\n name: err.name,\n message: err.message,\n rxdb: (err as any).rxdb,\n parameters: (err as RxError).parameters,\n extensions: (err as any).extensions,\n code: (err as RxError).code,\n url: (err as RxError).url,\n /**\n * stack must be last to make it easier to read the json in a console.\n * Also we ensure that each linebreak is spaced so that the chrome devtools\n * shows urls to the source code that can be clicked to inspect\n * the correct place in the code.\n */\n stack: !err.stack ? undefined : err.stack.replace(/\\n/g, ' \\n ')\n };\n return ret;\n}\n"],"mappings":";;;;;;;AAKA,IAAAA,YAAA,GAAAC,OAAA;AAIA;AACA;AACA;AACA;AACA;AACO,SAASC,aAAaA,CACzBC,SAAiB,EACZ;EACL,IAAMC,QAAQ,GAAGD,SAAS,CAACE,KAAK,CAAC,GAAG,CAAC;EACrC,IAAIC,UAAU,GAAG,MAAM;EACvBF,QAAQ,CAACG,OAAO,CAACC,IAAI,IAAI;IACrBF,UAAU,IAAI,IAAAG,oBAAO,EAACD,IAAI,CAAC;EAC/B,CAAC,CAAC;EACFF,UAAU,IAAI,QAAQ;EACtB,OAAO,IAAII,KAAK,iLAGGJ,UAAU,8BAAyBH,SAAS,oCACzCG,UAAU,iBAEhC,CAAC;AACL;AAIO,SAASK,gBAAgBA,CAACC,GAA8C,EAAkB;EAC7F,IAAMC,GAAmB,GAAG;IACxBC,IAAI,EAAEF,GAAG,CAACE,IAAI;IACdC,OAAO,EAAEH,GAAG,CAACG,OAAO;IACpBC,IAAI,EAAGJ,GAAG,CAASI,IAAI;IACvBC,UAAU,EAAGL,GAAG,CAAaK,UAAU;IACvCC,UAAU,EAAGN,GAAG,CAASM,UAAU;IACnCC,IAAI,EAAGP,GAAG,CAAaO,IAAI;IAC3BC,GAAG,EAAGR,GAAG,CAAaQ,GAAG;IACzB;AACR;AACA;AACA;AACA;AACA;IACQC,KAAK,EAAE,CAACT,GAAG,CAACS,KAAK,GAAGC,SAAS,GAAGV,GAAG,CAACS,KAAK,CAACE,OAAO,CAAC,KAAK,EAAE,MAAM;EACnE,CAAC;EACD,OAAOV,GAAG;AACd","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/utils/utils-global.js b/dist/cjs/plugins/utils/utils-global.js deleted file mode 100644 index c80c4463756..00000000000 --- a/dist/cjs/plugins/utils/utils-global.js +++ /dev/null @@ -1,13 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.RXDB_UTILS_GLOBAL = exports.PREMIUM_FLAG_HASH = void 0; -/** - * Can be used by some plugins to have a "global" object that - * can be imported and mutated at will. - */ -var RXDB_UTILS_GLOBAL = exports.RXDB_UTILS_GLOBAL = {}; -var PREMIUM_FLAG_HASH = exports.PREMIUM_FLAG_HASH = '6da4936d1425ff3a5c44c02342c6daf791d266be3ae8479b8ec59e261df41b93'; -//# sourceMappingURL=utils-global.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/utils/utils-global.js.map b/dist/cjs/plugins/utils/utils-global.js.map deleted file mode 100644 index 3fa18aca035..00000000000 --- a/dist/cjs/plugins/utils/utils-global.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"utils-global.js","names":["RXDB_UTILS_GLOBAL","exports","PREMIUM_FLAG_HASH"],"sources":["../../../../src/plugins/utils/utils-global.ts"],"sourcesContent":["/**\n * Can be used by some plugins to have a \"global\" object that\n * can be imported and mutated at will.\n */\nexport const RXDB_UTILS_GLOBAL: any = {};\n\n\nexport const PREMIUM_FLAG_HASH = '6da4936d1425ff3a5c44c02342c6daf791d266be3ae8479b8ec59e261df41b93';\n"],"mappings":";;;;;;AAAA;AACA;AACA;AACA;AACO,IAAMA,iBAAsB,GAAAC,OAAA,CAAAD,iBAAA,GAAG,CAAC,CAAC;AAGjC,IAAME,iBAAiB,GAAAD,OAAA,CAAAC,iBAAA,GAAG,kEAAkE","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/utils/utils-hash.js b/dist/cjs/plugins/utils/utils-hash.js deleted file mode 100644 index 90773c345cf..00000000000 --- a/dist/cjs/plugins/utils/utils-hash.js +++ /dev/null @@ -1,49 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.defaultHashSha256 = exports.canUseCryptoSubtle = void 0; -exports.hashStringToNumber = hashStringToNumber; -exports.jsSha256 = jsSha256; -exports.nativeSha256 = nativeSha256; -var _ohash = require("ohash"); -/** - * TODO in the future we should no longer provide a - * fallback to crypto.subtle.digest. - * Instead users without crypto.subtle.digest support, should have to provide their own - * hash function. - */ -function jsSha256(input) { - return Promise.resolve((0, _ohash.sha256)(input)); -} -async function nativeSha256(input) { - var data = new TextEncoder().encode(input); - var hashBuffer = await crypto.subtle.digest('SHA-256', data); - /** - * @link https://jameshfisher.com/2017/10/30/web-cryptography-api-hello-world/ - */ - var hash = Array.prototype.map.call(new Uint8Array(hashBuffer), x => ('00' + x.toString(16)).slice(-2)).join(''); - return hash; -} -var canUseCryptoSubtle = exports.canUseCryptoSubtle = typeof crypto !== 'undefined' && typeof crypto.subtle !== 'undefined' && typeof crypto.subtle.digest === 'function'; - -/** - * Default hash method used to hash - * strings and do equal comparisons. - * - * IMPORTANT: Changing the default hashing method - * requires a BREAKING change! - */ - -var defaultHashSha256 = exports.defaultHashSha256 = canUseCryptoSubtle ? nativeSha256 : jsSha256; -function hashStringToNumber(str) { - var nr = 0; - var len = str.length; - for (var i = 0; i < len; i++) { - nr = nr + str.charCodeAt(i); - nr |= 0; // Convert to 32bit integer, improves performance - } - return nr; -} -//# sourceMappingURL=utils-hash.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/utils/utils-hash.js.map b/dist/cjs/plugins/utils/utils-hash.js.map deleted file mode 100644 index 4a2f83da670..00000000000 --- a/dist/cjs/plugins/utils/utils-hash.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"utils-hash.js","names":["_ohash","require","jsSha256","input","Promise","resolve","sha256","nativeSha256","data","TextEncoder","encode","hashBuffer","crypto","subtle","digest","hash","Array","prototype","map","call","Uint8Array","x","toString","slice","join","canUseCryptoSubtle","exports","defaultHashSha256","hashStringToNumber","str","nr","len","length","i","charCodeAt"],"sources":["../../../../src/plugins/utils/utils-hash.ts"],"sourcesContent":["import { sha256 } from 'ohash';\nimport type { HashFunction } from '../../types/index.d.ts';\n\n\n/**\n * TODO in the future we should no longer provide a\n * fallback to crypto.subtle.digest.\n * Instead users without crypto.subtle.digest support, should have to provide their own\n * hash function.\n */\nexport function jsSha256(input: string) {\n return Promise.resolve(sha256(input));\n}\n\nexport async function nativeSha256(input: string) {\n const data = new TextEncoder().encode(input);\n const hashBuffer = await crypto.subtle.digest('SHA-256', data);\n /**\n * @link https://jameshfisher.com/2017/10/30/web-cryptography-api-hello-world/\n */\n const hash = Array.prototype.map.call(\n new Uint8Array(hashBuffer),\n x => (('00' + x.toString(16)).slice(-2))\n ).join('');\n return hash;\n}\n\n\nexport const canUseCryptoSubtle = typeof crypto !== 'undefined' &&\n typeof crypto.subtle !== 'undefined' &&\n typeof crypto.subtle.digest === 'function';\n\n/**\n * Default hash method used to hash\n * strings and do equal comparisons.\n *\n * IMPORTANT: Changing the default hashing method\n * requires a BREAKING change!\n */\n\nexport const defaultHashSha256: HashFunction = canUseCryptoSubtle ? nativeSha256 : jsSha256;\n\n\nexport function hashStringToNumber(str: string): number {\n let nr = 0;\n const len = str.length;\n for (let i = 0; i < len; i++) {\n nr = nr + str.charCodeAt(i);\n nr |= 0; // Convert to 32bit integer, improves performance\n }\n return nr;\n}\n"],"mappings":";;;;;;;;;AAAA,IAAAA,MAAA,GAAAC,OAAA;AAIA;AACA;AACA;AACA;AACA;AACA;AACO,SAASC,QAAQA,CAACC,KAAa,EAAE;EACpC,OAAOC,OAAO,CAACC,OAAO,CAAC,IAAAC,aAAM,EAACH,KAAK,CAAC,CAAC;AACzC;AAEO,eAAeI,YAAYA,CAACJ,KAAa,EAAE;EAC9C,IAAMK,IAAI,GAAG,IAAIC,WAAW,CAAC,CAAC,CAACC,MAAM,CAACP,KAAK,CAAC;EAC5C,IAAMQ,UAAU,GAAG,MAAMC,MAAM,CAACC,MAAM,CAACC,MAAM,CAAC,SAAS,EAAEN,IAAI,CAAC;EAC9D;AACJ;AACA;EACI,IAAMO,IAAI,GAAGC,KAAK,CAACC,SAAS,CAACC,GAAG,CAACC,IAAI,CACjC,IAAIC,UAAU,CAACT,UAAU,CAAC,EAC1BU,CAAC,IAAK,CAAC,IAAI,GAAGA,CAAC,CAACC,QAAQ,CAAC,EAAE,CAAC,EAAEC,KAAK,CAAC,CAAC,CAAC,CAC1C,CAAC,CAACC,IAAI,CAAC,EAAE,CAAC;EACV,OAAOT,IAAI;AACf;AAGO,IAAMU,kBAAkB,GAAAC,OAAA,CAAAD,kBAAA,GAAG,OAAOb,MAAM,KAAK,WAAW,IAC3D,OAAOA,MAAM,CAACC,MAAM,KAAK,WAAW,IACpC,OAAOD,MAAM,CAACC,MAAM,CAACC,MAAM,KAAK,UAAU;;AAE9C;AACA;AACA;AACA;AACA;AACA;AACA;;AAEO,IAAMa,iBAA+B,GAAAD,OAAA,CAAAC,iBAAA,GAAGF,kBAAkB,GAAGlB,YAAY,GAAGL,QAAQ;AAGpF,SAAS0B,kBAAkBA,CAACC,GAAW,EAAU;EACpD,IAAIC,EAAE,GAAG,CAAC;EACV,IAAMC,GAAG,GAAGF,GAAG,CAACG,MAAM;EACtB,KAAK,IAAIC,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGF,GAAG,EAAEE,CAAC,EAAE,EAAE;IAC1BH,EAAE,GAAGA,EAAE,GAAGD,GAAG,CAACK,UAAU,CAACD,CAAC,CAAC;IAC3BH,EAAE,IAAI,CAAC,CAAC,CAAC;EACb;EACA,OAAOA,EAAE;AACb","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/utils/utils-map.js b/dist/cjs/plugins/utils/utils-map.js deleted file mode 100644 index 05de58012fa..00000000000 --- a/dist/cjs/plugins/utils/utils-map.js +++ /dev/null @@ -1,25 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.getFromMapOrCreate = getFromMapOrCreate; -exports.getFromMapOrThrow = getFromMapOrThrow; -function getFromMapOrThrow(map, key) { - var val = map.get(key); - if (typeof val === 'undefined') { - throw new Error('missing value from map ' + key); - } - return val; -} -function getFromMapOrCreate(map, index, creator, ifWasThere) { - var value = map.get(index); - if (typeof value === 'undefined') { - value = creator(); - map.set(index, value); - } else if (ifWasThere) { - ifWasThere(value); - } - return value; -} -//# sourceMappingURL=utils-map.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/utils/utils-map.js.map b/dist/cjs/plugins/utils/utils-map.js.map deleted file mode 100644 index 089c96d3282..00000000000 --- a/dist/cjs/plugins/utils/utils-map.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"utils-map.js","names":["getFromMapOrThrow","map","key","val","get","Error","getFromMapOrCreate","index","creator","ifWasThere","value","set"],"sources":["../../../../src/plugins/utils/utils-map.ts"],"sourcesContent":["\n\n\nexport function getFromMapOrThrow(map: Map | WeakMap, key: K): V {\n const val = map.get(key);\n if (typeof val === 'undefined') {\n throw new Error('missing value from map ' + key);\n }\n return val;\n}\n\nexport function getFromMapOrCreate(\n map: Map | WeakMap,\n index: MapIndex,\n creator: () => MapValue,\n ifWasThere?: (value: MapValue) => void\n): MapValue {\n let value = map.get(index);\n if (typeof value === 'undefined') {\n value = creator();\n map.set(index, value);\n } else if (ifWasThere) {\n ifWasThere(value);\n }\n return value;\n}\n"],"mappings":";;;;;;;AAGO,SAASA,iBAAiBA,CAAOC,GAAgC,EAAEC,GAAM,EAAK;EACjF,IAAMC,GAAG,GAAGF,GAAG,CAACG,GAAG,CAACF,GAAG,CAAC;EACxB,IAAI,OAAOC,GAAG,KAAK,WAAW,EAAE;IAC5B,MAAM,IAAIE,KAAK,CAAC,yBAAyB,GAAGH,GAAG,CAAC;EACpD;EACA,OAAOC,GAAG;AACd;AAEO,SAASG,kBAAkBA,CAC9BL,GAAqD,EACrDM,KAAe,EACfC,OAAuB,EACvBC,UAAsC,EAC9B;EACR,IAAIC,KAAK,GAAGT,GAAG,CAACG,GAAG,CAACG,KAAK,CAAC;EAC1B,IAAI,OAAOG,KAAK,KAAK,WAAW,EAAE;IAC9BA,KAAK,GAAGF,OAAO,CAAC,CAAC;IACjBP,GAAG,CAACU,GAAG,CAACJ,KAAK,EAAEG,KAAK,CAAC;EACzB,CAAC,MAAM,IAAID,UAAU,EAAE;IACnBA,UAAU,CAACC,KAAK,CAAC;EACrB;EACA,OAAOA,KAAK;AAChB","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/utils/utils-number.js b/dist/cjs/plugins/utils/utils-number.js deleted file mode 100644 index 3b50b2ada54..00000000000 --- a/dist/cjs/plugins/utils/utils-number.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -//# sourceMappingURL=utils-number.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/utils/utils-number.js.map b/dist/cjs/plugins/utils/utils-number.js.map deleted file mode 100644 index fcfcf02d8f5..00000000000 --- a/dist/cjs/plugins/utils/utils-number.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"utils-number.js","names":[],"sources":["../../../../src/plugins/utils/utils-number.ts"],"sourcesContent":[""],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/utils/utils-object-deep-equal.js b/dist/cjs/plugins/utils/utils-object-deep-equal.js deleted file mode 100644 index d27d5c9637d..00000000000 --- a/dist/cjs/plugins/utils/utils-object-deep-equal.js +++ /dev/null @@ -1,42 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.deepEqual = deepEqual; -/** - * Copied from the fast-deep-equal package - * because it does not support es modules and causes optimization bailouts. - * TODO use the npm package again when this is merged: - * @link https://github.com/epoberezkin/fast-deep-equal/pull/105 - */ -function deepEqual(a, b) { - if (a === b) return true; - if (a && b && typeof a == 'object' && typeof b == 'object') { - if (a.constructor !== b.constructor) return false; - var length; - var i; - if (Array.isArray(a)) { - length = a.length; - if (length !== b.length) return false; - for (i = length; i-- !== 0;) if (!deepEqual(a[i], b[i])) return false; - return true; - } - if (a.constructor === RegExp) return a.source === b.source && a.flags === b.flags; - if (a.valueOf !== Object.prototype.valueOf) return a.valueOf() === b.valueOf(); - if (a.toString !== Object.prototype.toString) return a.toString() === b.toString(); - var keys = Object.keys(a); - length = keys.length; - if (length !== Object.keys(b).length) return false; - for (i = length; i-- !== 0;) if (!Object.prototype.hasOwnProperty.call(b, keys[i])) return false; - for (i = length; i-- !== 0;) { - var key = keys[i]; - if (!deepEqual(a[key], b[key])) return false; - } - return true; - } - - // true if both NaN, false otherwise - return a !== a && b !== b; -} -//# sourceMappingURL=utils-object-deep-equal.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/utils/utils-object-deep-equal.js.map b/dist/cjs/plugins/utils/utils-object-deep-equal.js.map deleted file mode 100644 index a7bd97572a8..00000000000 --- a/dist/cjs/plugins/utils/utils-object-deep-equal.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"utils-object-deep-equal.js","names":["deepEqual","a","b","constructor","length","i","Array","isArray","RegExp","source","flags","valueOf","Object","prototype","toString","keys","hasOwnProperty","call","key"],"sources":["../../../../src/plugins/utils/utils-object-deep-equal.ts"],"sourcesContent":["\n/**\n * Copied from the fast-deep-equal package\n * because it does not support es modules and causes optimization bailouts.\n * TODO use the npm package again when this is merged:\n * @link https://github.com/epoberezkin/fast-deep-equal/pull/105\n */\nexport function deepEqual(a: any, b: any): boolean {\n if (a === b) return true;\n\n if (a && b && typeof a == 'object' && typeof b == 'object') {\n if (a.constructor !== b.constructor) return false;\n\n let length;\n let i;\n if (Array.isArray(a)) {\n length = a.length;\n if (length !== b.length) return false;\n for (i = length; i-- !== 0;)\n if (!deepEqual(a[i], b[i])) return false;\n return true;\n }\n\n\n if (a.constructor === RegExp) return a.source === b.source && a.flags === b.flags;\n if (a.valueOf !== Object.prototype.valueOf) return a.valueOf() === b.valueOf();\n if (a.toString !== Object.prototype.toString) return a.toString() === b.toString();\n\n const keys = Object.keys(a);\n length = keys.length;\n if (length !== Object.keys(b).length) return false;\n\n for (i = length; i-- !== 0;)\n if (!Object.prototype.hasOwnProperty.call(b, keys[i])) return false;\n\n for (i = length; i-- !== 0;) {\n const key = keys[i];\n if (!deepEqual(a[key], b[key])) return false;\n }\n\n return true;\n }\n\n // true if both NaN, false otherwise\n return a !== a && b !== b;\n}\n"],"mappings":";;;;;;AACA;AACA;AACA;AACA;AACA;AACA;AACO,SAASA,SAASA,CAACC,CAAM,EAAEC,CAAM,EAAW;EAC/C,IAAID,CAAC,KAAKC,CAAC,EAAE,OAAO,IAAI;EAExB,IAAID,CAAC,IAAIC,CAAC,IAAI,OAAOD,CAAC,IAAI,QAAQ,IAAI,OAAOC,CAAC,IAAI,QAAQ,EAAE;IACxD,IAAID,CAAC,CAACE,WAAW,KAAKD,CAAC,CAACC,WAAW,EAAE,OAAO,KAAK;IAEjD,IAAIC,MAAM;IACV,IAAIC,CAAC;IACL,IAAIC,KAAK,CAACC,OAAO,CAACN,CAAC,CAAC,EAAE;MAClBG,MAAM,GAAGH,CAAC,CAACG,MAAM;MACjB,IAAIA,MAAM,KAAKF,CAAC,CAACE,MAAM,EAAE,OAAO,KAAK;MACrC,KAAKC,CAAC,GAAGD,MAAM,EAAEC,CAAC,EAAE,KAAK,CAAC,GACtB,IAAI,CAACL,SAAS,CAACC,CAAC,CAACI,CAAC,CAAC,EAAEH,CAAC,CAACG,CAAC,CAAC,CAAC,EAAE,OAAO,KAAK;MAC5C,OAAO,IAAI;IACf;IAGA,IAAIJ,CAAC,CAACE,WAAW,KAAKK,MAAM,EAAE,OAAOP,CAAC,CAACQ,MAAM,KAAKP,CAAC,CAACO,MAAM,IAAIR,CAAC,CAACS,KAAK,KAAKR,CAAC,CAACQ,KAAK;IACjF,IAAIT,CAAC,CAACU,OAAO,KAAKC,MAAM,CAACC,SAAS,CAACF,OAAO,EAAE,OAAOV,CAAC,CAACU,OAAO,CAAC,CAAC,KAAKT,CAAC,CAACS,OAAO,CAAC,CAAC;IAC9E,IAAIV,CAAC,CAACa,QAAQ,KAAKF,MAAM,CAACC,SAAS,CAACC,QAAQ,EAAE,OAAOb,CAAC,CAACa,QAAQ,CAAC,CAAC,KAAKZ,CAAC,CAACY,QAAQ,CAAC,CAAC;IAElF,IAAMC,IAAI,GAAGH,MAAM,CAACG,IAAI,CAACd,CAAC,CAAC;IAC3BG,MAAM,GAAGW,IAAI,CAACX,MAAM;IACpB,IAAIA,MAAM,KAAKQ,MAAM,CAACG,IAAI,CAACb,CAAC,CAAC,CAACE,MAAM,EAAE,OAAO,KAAK;IAElD,KAAKC,CAAC,GAAGD,MAAM,EAAEC,CAAC,EAAE,KAAK,CAAC,GACtB,IAAI,CAACO,MAAM,CAACC,SAAS,CAACG,cAAc,CAACC,IAAI,CAACf,CAAC,EAAEa,IAAI,CAACV,CAAC,CAAC,CAAC,EAAE,OAAO,KAAK;IAEvE,KAAKA,CAAC,GAAGD,MAAM,EAAEC,CAAC,EAAE,KAAK,CAAC,GAAG;MACzB,IAAMa,GAAG,GAAGH,IAAI,CAACV,CAAC,CAAC;MACnB,IAAI,CAACL,SAAS,CAACC,CAAC,CAACiB,GAAG,CAAC,EAAEhB,CAAC,CAACgB,GAAG,CAAC,CAAC,EAAE,OAAO,KAAK;IAChD;IAEA,OAAO,IAAI;EACf;;EAEA;EACA,OAAOjB,CAAC,KAAKA,CAAC,IAAIC,CAAC,KAAKA,CAAC;AAC7B","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/utils/utils-object-dot-prop.js b/dist/cjs/plugins/utils/utils-object-dot-prop.js deleted file mode 100644 index 2d730af7d80..00000000000 --- a/dist/cjs/plugins/utils/utils-object-dot-prop.js +++ /dev/null @@ -1,306 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.deepKeys = deepKeys; -exports.deleteProperty = deleteProperty; -exports.getProperty = getProperty; -exports.hasProperty = hasProperty; -exports.setProperty = setProperty; -/** - * Copied from - * @link https://github.com/sindresorhus/dot-prop/blob/main/index.js - * because it is currently an esm only module. - * TODO use the npm package again when RxDB is also fully esm. - */ - -var isObject = value => { - var type = typeof value; - return value !== null && (type === 'object' || type === 'function'); -}; -var disallowedKeys = new Set(['__proto__', 'prototype', 'constructor']); -var digits = new Set('0123456789'); -function getPathSegments(path) { - var parts = []; - var currentSegment = ''; - var currentPart = 'start'; - var isIgnoring = false; - for (var character of path) { - switch (character) { - case '\\': - { - if (currentPart === 'index') { - throw new Error('Invalid character in an index'); - } - if (currentPart === 'indexEnd') { - throw new Error('Invalid character after an index'); - } - if (isIgnoring) { - currentSegment += character; - } - currentPart = 'property'; - isIgnoring = !isIgnoring; - break; - } - case '.': - { - if (currentPart === 'index') { - throw new Error('Invalid character in an index'); - } - if (currentPart === 'indexEnd') { - currentPart = 'property'; - break; - } - if (isIgnoring) { - isIgnoring = false; - currentSegment += character; - break; - } - if (disallowedKeys.has(currentSegment)) { - return []; - } - parts.push(currentSegment); - currentSegment = ''; - currentPart = 'property'; - break; - } - case '[': - { - if (currentPart === 'index') { - throw new Error('Invalid character in an index'); - } - if (currentPart === 'indexEnd') { - currentPart = 'index'; - break; - } - if (isIgnoring) { - isIgnoring = false; - currentSegment += character; - break; - } - if (currentPart === 'property') { - if (disallowedKeys.has(currentSegment)) { - return []; - } - parts.push(currentSegment); - currentSegment = ''; - } - currentPart = 'index'; - break; - } - case ']': - { - if (currentPart === 'index') { - parts.push(Number.parseInt(currentSegment, 10)); - currentSegment = ''; - currentPart = 'indexEnd'; - break; - } - if (currentPart === 'indexEnd') { - throw new Error('Invalid character after an index'); - } - - // Falls through - } - default: - { - if (currentPart === 'index' && !digits.has(character)) { - throw new Error('Invalid character in an index'); - } - if (currentPart === 'indexEnd') { - throw new Error('Invalid character after an index'); - } - if (currentPart === 'start') { - currentPart = 'property'; - } - if (isIgnoring) { - isIgnoring = false; - currentSegment += '\\'; - } - currentSegment += character; - } - } - } - if (isIgnoring) { - currentSegment += '\\'; - } - switch (currentPart) { - case 'property': - { - if (disallowedKeys.has(currentSegment)) { - return []; - } - parts.push(currentSegment); - break; - } - case 'index': - { - throw new Error('Index was not closed'); - } - case 'start': - { - parts.push(''); - break; - } - // No default - } - return parts; -} -function isStringIndex(object, key) { - if (typeof key !== 'number' && Array.isArray(object)) { - var index = Number.parseInt(key, 10); - return Number.isInteger(index) && object[index] === object[key]; - } - return false; -} -function assertNotStringIndex(object, key) { - if (isStringIndex(object, key)) { - throw new Error('Cannot use string index'); - } -} - -/** - * TODO we need some performance tests and improvements here. - */ -function getProperty(object, path, value) { - if (Array.isArray(path)) { - path = path.join('.'); - } - - /** - * Performance shortcut. - * In most cases we just have a simple property name - * so we can directly return it. - */ - if (!path.includes('.') && !path.includes('[')) { - return object[path]; - } - if (!isObject(object) || typeof path !== 'string') { - return value === undefined ? object : value; - } - var pathArray = getPathSegments(path); - if (pathArray.length === 0) { - return value; - } - for (var index = 0; index < pathArray.length; index++) { - var key = pathArray[index]; - if (isStringIndex(object, key)) { - object = index === pathArray.length - 1 ? undefined : null; - } else { - object = object[key]; - } - if (object === undefined || object === null) { - // `object` is either `undefined` or `null` so we want to stop the loop, and - // if this is not the last bit of the path, and - // if it didn't return `undefined` - // it would return `null` if `object` is `null` - // but we want `get({foo: null}, 'foo.bar')` to equal `undefined`, or the supplied value, not `null` - if (index !== pathArray.length - 1) { - return value; - } - break; - } - } - return object === undefined ? value : object; -} -function setProperty(object, path, value) { - if (Array.isArray(path)) { - path = path.join('.'); - } - if (!isObject(object) || typeof path !== 'string') { - return object; - } - var root = object; - var pathArray = getPathSegments(path); - for (var index = 0; index < pathArray.length; index++) { - var key = pathArray[index]; - assertNotStringIndex(object, key); - if (index === pathArray.length - 1) { - object[key] = value; - } else if (!isObject(object[key])) { - object[key] = typeof pathArray[index + 1] === 'number' ? [] : {}; - } - object = object[key]; - } - return root; -} -function deleteProperty(object, path) { - if (!isObject(object) || typeof path !== 'string') { - return false; - } - var pathArray = getPathSegments(path); - for (var index = 0; index < pathArray.length; index++) { - var key = pathArray[index]; - assertNotStringIndex(object, key); - if (index === pathArray.length - 1) { - delete object[key]; - return true; - } - object = object[key]; - if (!isObject(object)) { - return false; - } - } -} -function hasProperty(object, path) { - if (!isObject(object) || typeof path !== 'string') { - return false; - } - var pathArray = getPathSegments(path); - if (pathArray.length === 0) { - return false; - } - for (var key of pathArray) { - if (!isObject(object) || !(key in object) || isStringIndex(object, key)) { - return false; - } - object = object[key]; - } - return true; -} - -// TODO: Backslashes with no effect should not be escaped -function escapePath(path) { - if (typeof path !== 'string') { - throw new TypeError('Expected a string'); - } - return path.replace(/[\\.[]/g, '\\$&'); -} - -// The keys returned by Object.entries() for arrays are strings -function entries(value) { - if (Array.isArray(value)) { - return value.map((v, index) => [index, v]); - } - return Object.entries(value); -} -function stringifyPath(pathSegments) { - var result = ''; - - // eslint-disable-next-line prefer-const - for (var [index, segment] of entries(pathSegments)) { - if (typeof segment === 'number') { - result += "[" + segment + "]"; - } else { - segment = escapePath(segment); - result += index === 0 ? segment : "." + segment; - } - } - return result; -} -function* deepKeysIterator(object, currentPath = []) { - if (!isObject(object)) { - if (currentPath.length > 0) { - yield stringifyPath(currentPath); - } - return; - } - for (var [key, value] of entries(object)) { - yield* deepKeysIterator(value, [...currentPath, key]); - } -} -function deepKeys(object) { - return [...deepKeysIterator(object)]; -} -//# sourceMappingURL=utils-object-dot-prop.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/utils/utils-object-dot-prop.js.map b/dist/cjs/plugins/utils/utils-object-dot-prop.js.map deleted file mode 100644 index 905da7da1bf..00000000000 --- a/dist/cjs/plugins/utils/utils-object-dot-prop.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"utils-object-dot-prop.js","names":["isObject","value","type","disallowedKeys","Set","digits","getPathSegments","path","parts","currentSegment","currentPart","isIgnoring","character","Error","has","push","Number","parseInt","isStringIndex","object","key","Array","isArray","index","isInteger","assertNotStringIndex","getProperty","join","includes","undefined","pathArray","length","setProperty","root","deleteProperty","hasProperty","escapePath","TypeError","replace","entries","map","v","Object","stringifyPath","pathSegments","result","segment","deepKeysIterator","currentPath","deepKeys"],"sources":["../../../../src/plugins/utils/utils-object-dot-prop.ts"],"sourcesContent":["/**\n * Copied from\n * @link https://github.com/sindresorhus/dot-prop/blob/main/index.js\n * because it is currently an esm only module.\n * TODO use the npm package again when RxDB is also fully esm.\n */\n\nconst isObject = (value: null) => {\n const type = typeof value;\n return value !== null && (type === 'object' || type === 'function');\n};\n\nconst disallowedKeys = new Set([\n '__proto__',\n 'prototype',\n 'constructor',\n]);\n\nconst digits = new Set('0123456789');\n\nfunction getPathSegments(path: string) {\n const parts = [];\n let currentSegment = '';\n let currentPart = 'start';\n let isIgnoring = false;\n\n for (const character of path) {\n switch (character) {\n case '\\\\': {\n if (currentPart === 'index') {\n throw new Error('Invalid character in an index');\n }\n\n if (currentPart === 'indexEnd') {\n throw new Error('Invalid character after an index');\n }\n\n if (isIgnoring) {\n currentSegment += character;\n }\n\n currentPart = 'property';\n isIgnoring = !isIgnoring;\n break;\n }\n\n case '.': {\n if (currentPart === 'index') {\n throw new Error('Invalid character in an index');\n }\n\n if (currentPart === 'indexEnd') {\n currentPart = 'property';\n break;\n }\n\n if (isIgnoring) {\n isIgnoring = false;\n currentSegment += character;\n break;\n }\n\n if (disallowedKeys.has(currentSegment)) {\n return [];\n }\n\n parts.push(currentSegment);\n currentSegment = '';\n currentPart = 'property';\n break;\n }\n\n case '[': {\n if (currentPart === 'index') {\n throw new Error('Invalid character in an index');\n }\n\n if (currentPart === 'indexEnd') {\n currentPart = 'index';\n break;\n }\n\n if (isIgnoring) {\n isIgnoring = false;\n currentSegment += character;\n break;\n }\n\n if (currentPart === 'property') {\n if (disallowedKeys.has(currentSegment)) {\n return [];\n }\n\n parts.push(currentSegment);\n currentSegment = '';\n }\n\n currentPart = 'index';\n break;\n }\n\n case ']': {\n if (currentPart === 'index') {\n parts.push(Number.parseInt(currentSegment, 10));\n currentSegment = '';\n currentPart = 'indexEnd';\n break;\n }\n\n if (currentPart === 'indexEnd') {\n throw new Error('Invalid character after an index');\n }\n\n // Falls through\n }\n\n default: {\n if (currentPart === 'index' && !digits.has(character)) {\n throw new Error('Invalid character in an index');\n }\n\n if (currentPart === 'indexEnd') {\n throw new Error('Invalid character after an index');\n }\n\n if (currentPart === 'start') {\n currentPart = 'property';\n }\n\n if (isIgnoring) {\n isIgnoring = false;\n currentSegment += '\\\\';\n }\n\n currentSegment += character;\n }\n }\n }\n\n if (isIgnoring) {\n currentSegment += '\\\\';\n }\n\n switch (currentPart) {\n case 'property': {\n if (disallowedKeys.has(currentSegment)) {\n return [];\n }\n\n parts.push(currentSegment);\n\n break;\n }\n\n case 'index': {\n throw new Error('Index was not closed');\n }\n\n case 'start': {\n parts.push('');\n\n break;\n }\n // No default\n }\n\n return parts;\n}\n\nfunction isStringIndex(object: any[], key: string) {\n if (typeof key !== 'number' && Array.isArray(object)) {\n const index = Number.parseInt(key, 10);\n return Number.isInteger(index) && object[index] === object[key as any];\n }\n\n return false;\n}\n\nfunction assertNotStringIndex(object: any, key: string | number) {\n if (isStringIndex(object, key as any)) {\n throw new Error('Cannot use string index');\n }\n}\n\n/**\n * TODO we need some performance tests and improvements here.\n */\nexport function getProperty(object: any, path: string | string[], value?: any) {\n if (Array.isArray(path)) {\n path = path.join('.');\n }\n\n /**\n * Performance shortcut.\n * In most cases we just have a simple property name\n * so we can directly return it.\n */\n if (\n !path.includes('.') &&\n !path.includes('[')\n ) {\n return object[path];\n }\n\n if (!isObject(object as any) || typeof path !== 'string') {\n return value === undefined ? object : value;\n }\n\n const pathArray = getPathSegments(path);\n if (pathArray.length === 0) {\n return value;\n }\n\n for (let index = 0; index < pathArray.length; index++) {\n const key = pathArray[index];\n\n if (isStringIndex(object as any, key as any)) {\n object = index === pathArray.length - 1 ? undefined : null;\n } else {\n object = (object as any)[key];\n }\n\n if (object === undefined || object === null) {\n // `object` is either `undefined` or `null` so we want to stop the loop, and\n // if this is not the last bit of the path, and\n // if it didn't return `undefined`\n // it would return `null` if `object` is `null`\n // but we want `get({foo: null}, 'foo.bar')` to equal `undefined`, or the supplied value, not `null`\n if (index !== pathArray.length - 1) {\n return value;\n }\n\n break;\n }\n }\n\n return object === undefined ? value : object;\n}\n\nexport function setProperty(object: any, path: string, value: any) {\n if (Array.isArray(path)) {\n path = path.join('.');\n }\n\n if (!isObject(object as any) || typeof path !== 'string') {\n return object;\n }\n\n const root = object;\n const pathArray = getPathSegments(path);\n\n for (let index = 0; index < pathArray.length; index++) {\n const key = pathArray[index];\n\n assertNotStringIndex(object, key);\n\n if (index === pathArray.length - 1) {\n object[key] = value;\n } else if (!isObject(object[key])) {\n object[key] = typeof pathArray[index + 1] === 'number' ? [] : {};\n }\n\n object = object[key];\n }\n\n return root;\n}\n\nexport function deleteProperty(object: any, path: string) {\n if (!isObject(object as any) || typeof path !== 'string') {\n return false;\n }\n\n const pathArray = getPathSegments(path);\n\n for (let index = 0; index < pathArray.length; index++) {\n const key = pathArray[index];\n\n assertNotStringIndex(object, key);\n\n if (index === pathArray.length - 1) {\n delete object[key];\n return true;\n }\n\n object = object[key];\n\n if (!isObject(object as any)) {\n return false;\n }\n }\n}\n\nexport function hasProperty(object: any, path: string) {\n if (!isObject(object) || typeof path !== 'string') {\n return false;\n }\n\n const pathArray = getPathSegments(path);\n if (pathArray.length === 0) {\n return false;\n }\n\n for (const key of pathArray) {\n if (!isObject(object) || !(key in object) || isStringIndex(object, key as any)) {\n return false;\n }\n\n object = object[key];\n }\n\n return true;\n}\n\n// TODO: Backslashes with no effect should not be escaped\nfunction escapePath(path: string) {\n if (typeof path !== 'string') {\n throw new TypeError('Expected a string');\n }\n\n return path.replace(/[\\\\.[]/g, '\\\\$&');\n}\n\n// The keys returned by Object.entries() for arrays are strings\nfunction entries(value: any) {\n if (Array.isArray(value)) {\n return value.map((v, index) => [index, v]);\n }\n\n return Object.entries(value);\n}\n\nfunction stringifyPath(pathSegments: never[]) {\n let result = '';\n\n // eslint-disable-next-line prefer-const\n for (let [index, segment] of entries(pathSegments)) {\n if (typeof segment === 'number') {\n result += `[${segment}]`;\n } else {\n segment = escapePath(segment);\n result += index === 0 ? segment : `.${segment}`;\n }\n }\n\n return result;\n}\n\nfunction* deepKeysIterator(object: any, currentPath = []): any {\n if (!isObject(object)) {\n if (currentPath.length > 0) {\n yield stringifyPath(currentPath);\n }\n\n return;\n }\n\n for (const [key, value] of entries(object)) {\n yield* deepKeysIterator(value, [...currentPath, key] as any);\n }\n}\n\nexport function deepKeys(object: any) {\n return [...deepKeysIterator(object)];\n}\n"],"mappings":";;;;;;;;;;AAAA;AACA;AACA;AACA;AACA;AACA;;AAEA,IAAMA,QAAQ,GAAIC,KAAW,IAAK;EAC9B,IAAMC,IAAI,GAAG,OAAOD,KAAK;EACzB,OAAOA,KAAK,KAAK,IAAI,KAAKC,IAAI,KAAK,QAAQ,IAAIA,IAAI,KAAK,UAAU,CAAC;AACvE,CAAC;AAED,IAAMC,cAAc,GAAG,IAAIC,GAAG,CAAC,CAC3B,WAAW,EACX,WAAW,EACX,aAAa,CAChB,CAAC;AAEF,IAAMC,MAAM,GAAG,IAAID,GAAG,CAAC,YAAY,CAAC;AAEpC,SAASE,eAAeA,CAACC,IAAY,EAAE;EACnC,IAAMC,KAAK,GAAG,EAAE;EAChB,IAAIC,cAAc,GAAG,EAAE;EACvB,IAAIC,WAAW,GAAG,OAAO;EACzB,IAAIC,UAAU,GAAG,KAAK;EAEtB,KAAK,IAAMC,SAAS,IAAIL,IAAI,EAAE;IAC1B,QAAQK,SAAS;MACb,KAAK,IAAI;QAAE;UACP,IAAIF,WAAW,KAAK,OAAO,EAAE;YACzB,MAAM,IAAIG,KAAK,CAAC,+BAA+B,CAAC;UACpD;UAEA,IAAIH,WAAW,KAAK,UAAU,EAAE;YAC5B,MAAM,IAAIG,KAAK,CAAC,kCAAkC,CAAC;UACvD;UAEA,IAAIF,UAAU,EAAE;YACZF,cAAc,IAAIG,SAAS;UAC/B;UAEAF,WAAW,GAAG,UAAU;UACxBC,UAAU,GAAG,CAACA,UAAU;UACxB;QACJ;MAEA,KAAK,GAAG;QAAE;UACN,IAAID,WAAW,KAAK,OAAO,EAAE;YACzB,MAAM,IAAIG,KAAK,CAAC,+BAA+B,CAAC;UACpD;UAEA,IAAIH,WAAW,KAAK,UAAU,EAAE;YAC5BA,WAAW,GAAG,UAAU;YACxB;UACJ;UAEA,IAAIC,UAAU,EAAE;YACZA,UAAU,GAAG,KAAK;YAClBF,cAAc,IAAIG,SAAS;YAC3B;UACJ;UAEA,IAAIT,cAAc,CAACW,GAAG,CAACL,cAAc,CAAC,EAAE;YACpC,OAAO,EAAE;UACb;UAEAD,KAAK,CAACO,IAAI,CAACN,cAAc,CAAC;UAC1BA,cAAc,GAAG,EAAE;UACnBC,WAAW,GAAG,UAAU;UACxB;QACJ;MAEA,KAAK,GAAG;QAAE;UACN,IAAIA,WAAW,KAAK,OAAO,EAAE;YACzB,MAAM,IAAIG,KAAK,CAAC,+BAA+B,CAAC;UACpD;UAEA,IAAIH,WAAW,KAAK,UAAU,EAAE;YAC5BA,WAAW,GAAG,OAAO;YACrB;UACJ;UAEA,IAAIC,UAAU,EAAE;YACZA,UAAU,GAAG,KAAK;YAClBF,cAAc,IAAIG,SAAS;YAC3B;UACJ;UAEA,IAAIF,WAAW,KAAK,UAAU,EAAE;YAC5B,IAAIP,cAAc,CAACW,GAAG,CAACL,cAAc,CAAC,EAAE;cACpC,OAAO,EAAE;YACb;YAEAD,KAAK,CAACO,IAAI,CAACN,cAAc,CAAC;YAC1BA,cAAc,GAAG,EAAE;UACvB;UAEAC,WAAW,GAAG,OAAO;UACrB;QACJ;MAEA,KAAK,GAAG;QAAE;UACN,IAAIA,WAAW,KAAK,OAAO,EAAE;YACzBF,KAAK,CAACO,IAAI,CAACC,MAAM,CAACC,QAAQ,CAACR,cAAc,EAAE,EAAE,CAAC,CAAC;YAC/CA,cAAc,GAAG,EAAE;YACnBC,WAAW,GAAG,UAAU;YACxB;UACJ;UAEA,IAAIA,WAAW,KAAK,UAAU,EAAE;YAC5B,MAAM,IAAIG,KAAK,CAAC,kCAAkC,CAAC;UACvD;;UAEA;QACJ;MAEA;QAAS;UACL,IAAIH,WAAW,KAAK,OAAO,IAAI,CAACL,MAAM,CAACS,GAAG,CAACF,SAAS,CAAC,EAAE;YACnD,MAAM,IAAIC,KAAK,CAAC,+BAA+B,CAAC;UACpD;UAEA,IAAIH,WAAW,KAAK,UAAU,EAAE;YAC5B,MAAM,IAAIG,KAAK,CAAC,kCAAkC,CAAC;UACvD;UAEA,IAAIH,WAAW,KAAK,OAAO,EAAE;YACzBA,WAAW,GAAG,UAAU;UAC5B;UAEA,IAAIC,UAAU,EAAE;YACZA,UAAU,GAAG,KAAK;YAClBF,cAAc,IAAI,IAAI;UAC1B;UAEAA,cAAc,IAAIG,SAAS;QAC/B;IACJ;EACJ;EAEA,IAAID,UAAU,EAAE;IACZF,cAAc,IAAI,IAAI;EAC1B;EAEA,QAAQC,WAAW;IACf,KAAK,UAAU;MAAE;QACb,IAAIP,cAAc,CAACW,GAAG,CAACL,cAAc,CAAC,EAAE;UACpC,OAAO,EAAE;QACb;QAEAD,KAAK,CAACO,IAAI,CAACN,cAAc,CAAC;QAE1B;MACJ;IAEA,KAAK,OAAO;MAAE;QACV,MAAM,IAAII,KAAK,CAAC,sBAAsB,CAAC;MAC3C;IAEA,KAAK,OAAO;MAAE;QACVL,KAAK,CAACO,IAAI,CAAC,EAAE,CAAC;QAEd;MACJ;IACA;EACJ;EAEA,OAAOP,KAAK;AAChB;AAEA,SAASU,aAAaA,CAACC,MAAa,EAAEC,GAAW,EAAE;EAC/C,IAAI,OAAOA,GAAG,KAAK,QAAQ,IAAIC,KAAK,CAACC,OAAO,CAACH,MAAM,CAAC,EAAE;IAClD,IAAMI,KAAK,GAAGP,MAAM,CAACC,QAAQ,CAACG,GAAG,EAAE,EAAE,CAAC;IACtC,OAAOJ,MAAM,CAACQ,SAAS,CAACD,KAAK,CAAC,IAAIJ,MAAM,CAACI,KAAK,CAAC,KAAKJ,MAAM,CAACC,GAAG,CAAQ;EAC1E;EAEA,OAAO,KAAK;AAChB;AAEA,SAASK,oBAAoBA,CAACN,MAAW,EAAEC,GAAoB,EAAE;EAC7D,IAAIF,aAAa,CAACC,MAAM,EAAEC,GAAU,CAAC,EAAE;IACnC,MAAM,IAAIP,KAAK,CAAC,yBAAyB,CAAC;EAC9C;AACJ;;AAEA;AACA;AACA;AACO,SAASa,WAAWA,CAACP,MAAW,EAAEZ,IAAuB,EAAEN,KAAW,EAAE;EAC3E,IAAIoB,KAAK,CAACC,OAAO,CAACf,IAAI,CAAC,EAAE;IACrBA,IAAI,GAAGA,IAAI,CAACoB,IAAI,CAAC,GAAG,CAAC;EACzB;;EAEA;AACJ;AACA;AACA;AACA;EACI,IACI,CAACpB,IAAI,CAACqB,QAAQ,CAAC,GAAG,CAAC,IACnB,CAACrB,IAAI,CAACqB,QAAQ,CAAC,GAAG,CAAC,EACrB;IACE,OAAOT,MAAM,CAACZ,IAAI,CAAC;EACvB;EAEA,IAAI,CAACP,QAAQ,CAACmB,MAAa,CAAC,IAAI,OAAOZ,IAAI,KAAK,QAAQ,EAAE;IACtD,OAAON,KAAK,KAAK4B,SAAS,GAAGV,MAAM,GAAGlB,KAAK;EAC/C;EAEA,IAAM6B,SAAS,GAAGxB,eAAe,CAACC,IAAI,CAAC;EACvC,IAAIuB,SAAS,CAACC,MAAM,KAAK,CAAC,EAAE;IACxB,OAAO9B,KAAK;EAChB;EAEA,KAAK,IAAIsB,KAAK,GAAG,CAAC,EAAEA,KAAK,GAAGO,SAAS,CAACC,MAAM,EAAER,KAAK,EAAE,EAAE;IACnD,IAAMH,GAAG,GAAGU,SAAS,CAACP,KAAK,CAAC;IAE5B,IAAIL,aAAa,CAACC,MAAM,EAASC,GAAU,CAAC,EAAE;MAC1CD,MAAM,GAAGI,KAAK,KAAKO,SAAS,CAACC,MAAM,GAAG,CAAC,GAAGF,SAAS,GAAG,IAAI;IAC9D,CAAC,MAAM;MACHV,MAAM,GAAIA,MAAM,CAASC,GAAG,CAAC;IACjC;IAEA,IAAID,MAAM,KAAKU,SAAS,IAAIV,MAAM,KAAK,IAAI,EAAE;MACzC;MACA;MACA;MACA;MACA;MACA,IAAII,KAAK,KAAKO,SAAS,CAACC,MAAM,GAAG,CAAC,EAAE;QAChC,OAAO9B,KAAK;MAChB;MAEA;IACJ;EACJ;EAEA,OAAOkB,MAAM,KAAKU,SAAS,GAAG5B,KAAK,GAAGkB,MAAM;AAChD;AAEO,SAASa,WAAWA,CAACb,MAAW,EAAEZ,IAAY,EAAEN,KAAU,EAAE;EAC/D,IAAIoB,KAAK,CAACC,OAAO,CAACf,IAAI,CAAC,EAAE;IACrBA,IAAI,GAAGA,IAAI,CAACoB,IAAI,CAAC,GAAG,CAAC;EACzB;EAEA,IAAI,CAAC3B,QAAQ,CAACmB,MAAa,CAAC,IAAI,OAAOZ,IAAI,KAAK,QAAQ,EAAE;IACtD,OAAOY,MAAM;EACjB;EAEA,IAAMc,IAAI,GAAGd,MAAM;EACnB,IAAMW,SAAS,GAAGxB,eAAe,CAACC,IAAI,CAAC;EAEvC,KAAK,IAAIgB,KAAK,GAAG,CAAC,EAAEA,KAAK,GAAGO,SAAS,CAACC,MAAM,EAAER,KAAK,EAAE,EAAE;IACnD,IAAMH,GAAG,GAAGU,SAAS,CAACP,KAAK,CAAC;IAE5BE,oBAAoB,CAACN,MAAM,EAAEC,GAAG,CAAC;IAEjC,IAAIG,KAAK,KAAKO,SAAS,CAACC,MAAM,GAAG,CAAC,EAAE;MAChCZ,MAAM,CAACC,GAAG,CAAC,GAAGnB,KAAK;IACvB,CAAC,MAAM,IAAI,CAACD,QAAQ,CAACmB,MAAM,CAACC,GAAG,CAAC,CAAC,EAAE;MAC/BD,MAAM,CAACC,GAAG,CAAC,GAAG,OAAOU,SAAS,CAACP,KAAK,GAAG,CAAC,CAAC,KAAK,QAAQ,GAAG,EAAE,GAAG,CAAC,CAAC;IACpE;IAEAJ,MAAM,GAAGA,MAAM,CAACC,GAAG,CAAC;EACxB;EAEA,OAAOa,IAAI;AACf;AAEO,SAASC,cAAcA,CAACf,MAAW,EAAEZ,IAAY,EAAE;EACtD,IAAI,CAACP,QAAQ,CAACmB,MAAa,CAAC,IAAI,OAAOZ,IAAI,KAAK,QAAQ,EAAE;IACtD,OAAO,KAAK;EAChB;EAEA,IAAMuB,SAAS,GAAGxB,eAAe,CAACC,IAAI,CAAC;EAEvC,KAAK,IAAIgB,KAAK,GAAG,CAAC,EAAEA,KAAK,GAAGO,SAAS,CAACC,MAAM,EAAER,KAAK,EAAE,EAAE;IACnD,IAAMH,GAAG,GAAGU,SAAS,CAACP,KAAK,CAAC;IAE5BE,oBAAoB,CAACN,MAAM,EAAEC,GAAG,CAAC;IAEjC,IAAIG,KAAK,KAAKO,SAAS,CAACC,MAAM,GAAG,CAAC,EAAE;MAChC,OAAOZ,MAAM,CAACC,GAAG,CAAC;MAClB,OAAO,IAAI;IACf;IAEAD,MAAM,GAAGA,MAAM,CAACC,GAAG,CAAC;IAEpB,IAAI,CAACpB,QAAQ,CAACmB,MAAa,CAAC,EAAE;MAC1B,OAAO,KAAK;IAChB;EACJ;AACJ;AAEO,SAASgB,WAAWA,CAAChB,MAAW,EAAEZ,IAAY,EAAE;EACnD,IAAI,CAACP,QAAQ,CAACmB,MAAM,CAAC,IAAI,OAAOZ,IAAI,KAAK,QAAQ,EAAE;IAC/C,OAAO,KAAK;EAChB;EAEA,IAAMuB,SAAS,GAAGxB,eAAe,CAACC,IAAI,CAAC;EACvC,IAAIuB,SAAS,CAACC,MAAM,KAAK,CAAC,EAAE;IACxB,OAAO,KAAK;EAChB;EAEA,KAAK,IAAMX,GAAG,IAAIU,SAAS,EAAE;IACzB,IAAI,CAAC9B,QAAQ,CAACmB,MAAM,CAAC,IAAI,EAAEC,GAAG,IAAID,MAAM,CAAC,IAAID,aAAa,CAACC,MAAM,EAAEC,GAAU,CAAC,EAAE;MAC5E,OAAO,KAAK;IAChB;IAEAD,MAAM,GAAGA,MAAM,CAACC,GAAG,CAAC;EACxB;EAEA,OAAO,IAAI;AACf;;AAEA;AACA,SAASgB,UAAUA,CAAC7B,IAAY,EAAE;EAC9B,IAAI,OAAOA,IAAI,KAAK,QAAQ,EAAE;IAC1B,MAAM,IAAI8B,SAAS,CAAC,mBAAmB,CAAC;EAC5C;EAEA,OAAO9B,IAAI,CAAC+B,OAAO,CAAC,SAAS,EAAE,MAAM,CAAC;AAC1C;;AAEA;AACA,SAASC,OAAOA,CAACtC,KAAU,EAAE;EACzB,IAAIoB,KAAK,CAACC,OAAO,CAACrB,KAAK,CAAC,EAAE;IACtB,OAAOA,KAAK,CAACuC,GAAG,CAAC,CAACC,CAAC,EAAElB,KAAK,KAAK,CAACA,KAAK,EAAEkB,CAAC,CAAC,CAAC;EAC9C;EAEA,OAAOC,MAAM,CAACH,OAAO,CAACtC,KAAK,CAAC;AAChC;AAEA,SAAS0C,aAAaA,CAACC,YAAqB,EAAE;EAC1C,IAAIC,MAAM,GAAG,EAAE;;EAEf;EACA,KAAK,IAAI,CAACtB,KAAK,EAAEuB,OAAO,CAAC,IAAIP,OAAO,CAACK,YAAY,CAAC,EAAE;IAChD,IAAI,OAAOE,OAAO,KAAK,QAAQ,EAAE;MAC7BD,MAAM,UAAQC,OAAO,MAAG;IAC5B,CAAC,MAAM;MACHA,OAAO,GAAGV,UAAU,CAACU,OAAO,CAAC;MAC7BD,MAAM,IAAItB,KAAK,KAAK,CAAC,GAAGuB,OAAO,SAAOA,OAAS;IACnD;EACJ;EAEA,OAAOD,MAAM;AACjB;AAEA,UAAUE,gBAAgBA,CAAC5B,MAAW,EAAE6B,WAAW,GAAG,EAAE,EAAO;EAC3D,IAAI,CAAChD,QAAQ,CAACmB,MAAM,CAAC,EAAE;IACnB,IAAI6B,WAAW,CAACjB,MAAM,GAAG,CAAC,EAAE;MACxB,MAAMY,aAAa,CAACK,WAAW,CAAC;IACpC;IAEA;EACJ;EAEA,KAAK,IAAM,CAAC5B,GAAG,EAAEnB,KAAK,CAAC,IAAIsC,OAAO,CAACpB,MAAM,CAAC,EAAE;IACxC,OAAO4B,gBAAgB,CAAC9C,KAAK,EAAE,CAAC,GAAG+C,WAAW,EAAE5B,GAAG,CAAQ,CAAC;EAChE;AACJ;AAEO,SAAS6B,QAAQA,CAAC9B,MAAW,EAAE;EAClC,OAAO,CAAC,GAAG4B,gBAAgB,CAAC5B,MAAM,CAAC,CAAC;AACxC","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/utils/utils-object.js b/dist/cjs/plugins/utils/utils-object.js deleted file mode 100644 index b75ffa6d353..00000000000 --- a/dist/cjs/plugins/utils/utils-object.js +++ /dev/null @@ -1,205 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.clone = void 0; -exports.deepFreeze = deepFreeze; -exports.firstPropertyNameOfObject = firstPropertyNameOfObject; -exports.firstPropertyValueOfObject = firstPropertyValueOfObject; -exports.flatClone = flatClone; -exports.flattenObject = flattenObject; -exports.getFromObjectOrThrow = getFromObjectOrThrow; -exports.hasDeepProperty = hasDeepProperty; -exports.objectPathMonad = objectPathMonad; -exports.overwriteGetterForCaching = overwriteGetterForCaching; -exports.sortObject = sortObject; -function deepFreeze(o) { - Object.freeze(o); - Object.getOwnPropertyNames(o).forEach(function (prop) { - if (Object.prototype.hasOwnProperty.call(o, prop) && o[prop] !== null && (typeof o[prop] === 'object' || typeof o[prop] === 'function') && !Object.isFrozen(o[prop])) { - deepFreeze(o[prop]); - } - }); - return o; -} - -/** - * To get specific nested path values from objects, - * RxDB normally uses the 'dot-prop' npm module. - * But when performance is really relevant, this is not fast enough. - * Instead we use a monad that can prepare some stuff up front - * and we can reuse the generated function. - */ - -function objectPathMonad(objectPath) { - var split = objectPath.split('.'); - - // reuse this variable for better performance. - var splitLength = split.length; - - /** - * Performance shortcut, - * if no nested path is used, - * directly return the field of the object. - */ - if (splitLength === 1) { - return obj => obj[objectPath]; - } - return obj => { - var currentVal = obj; - for (var i = 0; i < splitLength; ++i) { - var subPath = split[i]; - currentVal = currentVal[subPath]; - if (typeof currentVal === 'undefined') { - return currentVal; - } - } - return currentVal; - }; -} -function getFromObjectOrThrow(obj, key) { - var val = obj[key]; - if (!val) { - throw new Error('missing value from object ' + key); - } - return val; -} - -/** - * returns a flattened object - * @link https://gist.github.com/penguinboy/762197 - */ -function flattenObject(ob) { - var toReturn = {}; - for (var i in ob) { - if (!Object.prototype.hasOwnProperty.call(ob, i)) continue; - if (typeof ob[i] === 'object') { - var flatObject = flattenObject(ob[i]); - for (var x in flatObject) { - if (!Object.prototype.hasOwnProperty.call(flatObject, x)) continue; - toReturn[i + '.' + x] = flatObject[x]; - } - } else { - toReturn[i] = ob[i]; - } - } - return toReturn; -} - -/** - * does a flat copy on the objects, - * is about 3 times faster then using deepClone - * @link https://jsperf.com/object-rest-spread-vs-clone/2 - */ -function flatClone(obj) { - return Object.assign({}, obj); -} - -/** - * @link https://stackoverflow.com/a/11509718/3443137 - */ -function firstPropertyNameOfObject(obj) { - return Object.keys(obj)[0]; -} -function firstPropertyValueOfObject(obj) { - var key = Object.keys(obj)[0]; - return obj[key]; -} - -/** - * deep-sort an object so its attributes are in lexical order. - * Also sorts the arrays inside of the object if no-array-sort not set - */ -function sortObject(obj, noArraySort = false) { - if (!obj) return obj; // do not sort null, false or undefined - - // array - if (!noArraySort && Array.isArray(obj)) { - return obj.sort((a, b) => { - if (typeof a === 'string' && typeof b === 'string') return a.localeCompare(b); - if (typeof a === 'object') return 1;else return -1; - }).map(i => sortObject(i, noArraySort)); - } - - // object - // array is also of type object - if (typeof obj === 'object' && !Array.isArray(obj)) { - var out = {}; - Object.keys(obj).sort((a, b) => a.localeCompare(b)).forEach(key => { - out[key] = sortObject(obj[key], noArraySort); - }); - return out; - } - - // everything else - return obj; -} - -/** - * Deep clone a plain json object. - * Does not work with recursive stuff - * or non-plain-json. - * IMPORTANT: Performance of this is very important, - * do not change it without running performance tests! - * - * @link https://github.com/zxdong262/deep-copy/blob/master/src/index.ts - */ -function deepClone(src) { - if (!src) { - return src; - } - if (src === null || typeof src !== 'object') { - return src; - } - if (Array.isArray(src)) { - var ret = new Array(src.length); - var i = ret.length; - while (i--) { - ret[i] = deepClone(src[i]); - } - return ret; - } - var dest = {}; - // eslint-disable-next-line guard-for-in - for (var key in src) { - dest[key] = deepClone(src[key]); - } - return dest; -} -var clone = exports.clone = deepClone; - -/** - * overwrites the getter with the actual value - * Mostly used for caching stuff on the first run - */ -function overwriteGetterForCaching(obj, getterName, value) { - Object.defineProperty(obj, getterName, { - get: function () { - return value; - } - }); - return value; -} -function hasDeepProperty(obj, property) { - if (obj.hasOwnProperty(property)) { - return true; - } - if (Array.isArray(obj)) { - var has = !!obj.find(item => hasDeepProperty(item, property)); - return has; - } - - // Recursively check for property in nested objects - for (var key in obj) { - if (typeof obj[key] === 'object' && obj[key] !== null) { - if (hasDeepProperty(obj[key], property)) { - return true; - } - } - } - - // Return false if 'foobar' is not found at any level - return false; -} -//# sourceMappingURL=utils-object.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/utils/utils-object.js.map b/dist/cjs/plugins/utils/utils-object.js.map deleted file mode 100644 index dca6ffaaaa5..00000000000 --- a/dist/cjs/plugins/utils/utils-object.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"utils-object.js","names":["deepFreeze","o","Object","freeze","getOwnPropertyNames","forEach","prop","prototype","hasOwnProperty","call","isFrozen","objectPathMonad","objectPath","split","splitLength","length","obj","currentVal","i","subPath","getFromObjectOrThrow","key","val","Error","flattenObject","ob","toReturn","flatObject","x","flatClone","assign","firstPropertyNameOfObject","keys","firstPropertyValueOfObject","sortObject","noArraySort","Array","isArray","sort","a","b","localeCompare","map","out","deepClone","src","ret","dest","clone","exports","overwriteGetterForCaching","getterName","value","defineProperty","get","hasDeepProperty","property","has","find","item"],"sources":["../../../../src/plugins/utils/utils-object.ts"],"sourcesContent":["import type {\n DeepReadonlyObject\n} from '../../types/index.d.ts';\n\nexport function deepFreeze(o: T): T {\n Object.freeze(o);\n Object.getOwnPropertyNames(o).forEach(function (prop) {\n if (\n Object.prototype.hasOwnProperty.call(o, prop) &&\n (o as any)[prop] !== null &&\n (\n typeof (o as any)[prop] === 'object'\n ||\n typeof (o as any)[prop] === 'function'\n ) &&\n !Object.isFrozen((o as any)[prop])\n ) {\n deepFreeze((o as any)[prop]);\n }\n });\n return o;\n}\n\n\n\n/**\n * To get specific nested path values from objects,\n * RxDB normally uses the 'dot-prop' npm module.\n * But when performance is really relevant, this is not fast enough.\n * Instead we use a monad that can prepare some stuff up front\n * and we can reuse the generated function.\n */\nexport type ObjectPathMonadFunction = (obj: T) => R;\nexport function objectPathMonad(objectPath: string): ObjectPathMonadFunction {\n const split = objectPath.split('.');\n\n // reuse this variable for better performance.\n const splitLength = split.length;\n\n /**\n * Performance shortcut,\n * if no nested path is used,\n * directly return the field of the object.\n */\n if (splitLength === 1) {\n return (obj: T) => (obj as any)[objectPath];\n }\n\n\n return (obj: T) => {\n let currentVal: any = obj;\n for (let i = 0; i < splitLength; ++i) {\n const subPath = split[i];\n currentVal = currentVal[subPath];\n if (typeof currentVal === 'undefined') {\n return currentVal;\n }\n }\n return currentVal;\n };\n}\n\n\nexport function getFromObjectOrThrow(\n obj: { [k: string]: V; },\n key: string\n): V {\n const val = obj[key];\n if (!val) {\n throw new Error('missing value from object ' + key);\n }\n return val;\n}\n\n/**\n * returns a flattened object\n * @link https://gist.github.com/penguinboy/762197\n */\nexport function flattenObject(ob: any) {\n const toReturn: any = {};\n\n for (const i in ob) {\n if (!Object.prototype.hasOwnProperty.call(ob, i)) continue;\n if ((typeof ob[i]) === 'object') {\n const flatObject = flattenObject(ob[i]);\n for (const x in flatObject) {\n if (!Object.prototype.hasOwnProperty.call(flatObject, x)) continue;\n toReturn[i + '.' + x] = flatObject[x];\n }\n } else {\n toReturn[i] = ob[i];\n }\n }\n return toReturn;\n}\n\n\n/**\n * does a flat copy on the objects,\n * is about 3 times faster then using deepClone\n * @link https://jsperf.com/object-rest-spread-vs-clone/2\n */\nexport function flatClone(obj: T | DeepReadonlyObject | Readonly): T {\n return Object.assign({}, obj) as any;\n}\n\n/**\n * @link https://stackoverflow.com/a/11509718/3443137\n */\nexport function firstPropertyNameOfObject(obj: any): string {\n return Object.keys(obj)[0];\n}\nexport function firstPropertyValueOfObject(obj: { [k: string]: T; }): T {\n const key = Object.keys(obj)[0];\n return obj[key];\n}\n\n\n/**\n * deep-sort an object so its attributes are in lexical order.\n * Also sorts the arrays inside of the object if no-array-sort not set\n */\nexport function sortObject(obj: any, noArraySort = false): any {\n if (!obj) return obj; // do not sort null, false or undefined\n\n // array\n if (!noArraySort && Array.isArray(obj)) {\n return obj\n .sort((a, b) => {\n if (typeof a === 'string' && typeof b === 'string')\n return a.localeCompare(b);\n\n if (typeof a === 'object') return 1;\n else return -1;\n })\n .map(i => sortObject(i, noArraySort));\n }\n\n // object\n // array is also of type object\n if (typeof obj === 'object' && !Array.isArray(obj)) {\n const out: any = {};\n Object.keys(obj)\n .sort((a, b) => a.localeCompare(b))\n .forEach(key => {\n out[key] = sortObject(obj[key], noArraySort);\n });\n return out;\n }\n\n // everything else\n return obj;\n}\n\n\n\n/**\n * Deep clone a plain json object.\n * Does not work with recursive stuff\n * or non-plain-json.\n * IMPORTANT: Performance of this is very important,\n * do not change it without running performance tests!\n *\n * @link https://github.com/zxdong262/deep-copy/blob/master/src/index.ts\n */\nfunction deepClone(src: T | DeepReadonlyObject): T {\n if (!src) {\n return src;\n }\n if (src === null || typeof (src) !== 'object') {\n return src;\n }\n if (Array.isArray(src)) {\n const ret = new Array(src.length);\n let i = ret.length;\n while (i--) {\n ret[i] = deepClone(src[i]);\n }\n return ret as any;\n }\n const dest: any = {};\n // eslint-disable-next-line guard-for-in\n for (const key in src) {\n dest[key] = deepClone(src[key]);\n }\n return dest;\n}\nexport const clone = deepClone;\n\n\n\n/**\n * overwrites the getter with the actual value\n * Mostly used for caching stuff on the first run\n */\nexport function overwriteGetterForCaching(\n obj: any,\n getterName: string,\n value: ValueType\n): ValueType {\n Object.defineProperty(obj, getterName, {\n get: function () {\n return value;\n }\n });\n return value;\n}\n\n\nexport function hasDeepProperty(obj: any, property: string): boolean {\n if (obj.hasOwnProperty(property)) {\n return true;\n }\n\n if (Array.isArray(obj)) {\n const has = !!obj.find(item => hasDeepProperty(item, property));\n return has;\n }\n\n // Recursively check for property in nested objects\n for (const key in obj) {\n if (typeof obj[key] === 'object' && obj[key] !== null) {\n if (hasDeepProperty(obj[key], property)) {\n return true;\n }\n }\n }\n\n // Return false if 'foobar' is not found at any level\n return false;\n}\n"],"mappings":";;;;;;;;;;;;;;;;AAIO,SAASA,UAAUA,CAAIC,CAAI,EAAK;EACnCC,MAAM,CAACC,MAAM,CAACF,CAAC,CAAC;EAChBC,MAAM,CAACE,mBAAmB,CAACH,CAAC,CAAC,CAACI,OAAO,CAAC,UAAUC,IAAI,EAAE;IAClD,IACIJ,MAAM,CAACK,SAAS,CAACC,cAAc,CAACC,IAAI,CAACR,CAAC,EAAEK,IAAI,CAAC,IAC5CL,CAAC,CAASK,IAAI,CAAC,KAAK,IAAI,KAErB,OAAQL,CAAC,CAASK,IAAI,CAAC,KAAK,QAAQ,IAEpC,OAAQL,CAAC,CAASK,IAAI,CAAC,KAAK,UAAU,CACzC,IACD,CAACJ,MAAM,CAACQ,QAAQ,CAAET,CAAC,CAASK,IAAI,CAAC,CAAC,EACpC;MACEN,UAAU,CAAEC,CAAC,CAASK,IAAI,CAAC,CAAC;IAChC;EACJ,CAAC,CAAC;EACF,OAAOL,CAAC;AACZ;;AAIA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEO,SAASU,eAAeA,CAAaC,UAAkB,EAAiC;EAC3F,IAAMC,KAAK,GAAGD,UAAU,CAACC,KAAK,CAAC,GAAG,CAAC;;EAEnC;EACA,IAAMC,WAAW,GAAGD,KAAK,CAACE,MAAM;;EAEhC;AACJ;AACA;AACA;AACA;EACI,IAAID,WAAW,KAAK,CAAC,EAAE;IACnB,OAAQE,GAAM,IAAMA,GAAG,CAASJ,UAAU,CAAC;EAC/C;EAGA,OAAQI,GAAM,IAAK;IACf,IAAIC,UAAe,GAAGD,GAAG;IACzB,KAAK,IAAIE,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGJ,WAAW,EAAE,EAAEI,CAAC,EAAE;MAClC,IAAMC,OAAO,GAAGN,KAAK,CAACK,CAAC,CAAC;MACxBD,UAAU,GAAGA,UAAU,CAACE,OAAO,CAAC;MAChC,IAAI,OAAOF,UAAU,KAAK,WAAW,EAAE;QACnC,OAAOA,UAAU;MACrB;IACJ;IACA,OAAOA,UAAU;EACrB,CAAC;AACL;AAGO,SAASG,oBAAoBA,CAChCJ,GAAwB,EACxBK,GAAW,EACV;EACD,IAAMC,GAAG,GAAGN,GAAG,CAACK,GAAG,CAAC;EACpB,IAAI,CAACC,GAAG,EAAE;IACN,MAAM,IAAIC,KAAK,CAAC,4BAA4B,GAAGF,GAAG,CAAC;EACvD;EACA,OAAOC,GAAG;AACd;;AAEA;AACA;AACA;AACA;AACO,SAASE,aAAaA,CAACC,EAAO,EAAE;EACnC,IAAMC,QAAa,GAAG,CAAC,CAAC;EAExB,KAAK,IAAMR,CAAC,IAAIO,EAAE,EAAE;IAChB,IAAI,CAACvB,MAAM,CAACK,SAAS,CAACC,cAAc,CAACC,IAAI,CAACgB,EAAE,EAAEP,CAAC,CAAC,EAAE;IAClD,IAAK,OAAOO,EAAE,CAACP,CAAC,CAAC,KAAM,QAAQ,EAAE;MAC7B,IAAMS,UAAU,GAAGH,aAAa,CAACC,EAAE,CAACP,CAAC,CAAC,CAAC;MACvC,KAAK,IAAMU,CAAC,IAAID,UAAU,EAAE;QACxB,IAAI,CAACzB,MAAM,CAACK,SAAS,CAACC,cAAc,CAACC,IAAI,CAACkB,UAAU,EAAEC,CAAC,CAAC,EAAE;QAC1DF,QAAQ,CAACR,CAAC,GAAG,GAAG,GAAGU,CAAC,CAAC,GAAGD,UAAU,CAACC,CAAC,CAAC;MACzC;IACJ,CAAC,MAAM;MACHF,QAAQ,CAACR,CAAC,CAAC,GAAGO,EAAE,CAACP,CAAC,CAAC;IACvB;EACJ;EACA,OAAOQ,QAAQ;AACnB;;AAGA;AACA;AACA;AACA;AACA;AACO,SAASG,SAASA,CAAIb,GAA4C,EAAK;EAC1E,OAAOd,MAAM,CAAC4B,MAAM,CAAC,CAAC,CAAC,EAAEd,GAAG,CAAC;AACjC;;AAEA;AACA;AACA;AACO,SAASe,yBAAyBA,CAACf,GAAQ,EAAU;EACxD,OAAOd,MAAM,CAAC8B,IAAI,CAAChB,GAAG,CAAC,CAAC,CAAC,CAAC;AAC9B;AACO,SAASiB,0BAA0BA,CAAIjB,GAAwB,EAAK;EACvE,IAAMK,GAAG,GAAGnB,MAAM,CAAC8B,IAAI,CAAChB,GAAG,CAAC,CAAC,CAAC,CAAC;EAC/B,OAAOA,GAAG,CAACK,GAAG,CAAC;AACnB;;AAGA;AACA;AACA;AACA;AACO,SAASa,UAAUA,CAAClB,GAAQ,EAAEmB,WAAW,GAAG,KAAK,EAAO;EAC3D,IAAI,CAACnB,GAAG,EAAE,OAAOA,GAAG,CAAC,CAAC;;EAEtB;EACA,IAAI,CAACmB,WAAW,IAAIC,KAAK,CAACC,OAAO,CAACrB,GAAG,CAAC,EAAE;IACpC,OAAOA,GAAG,CACLsB,IAAI,CAAC,CAACC,CAAC,EAAEC,CAAC,KAAK;MACZ,IAAI,OAAOD,CAAC,KAAK,QAAQ,IAAI,OAAOC,CAAC,KAAK,QAAQ,EAC9C,OAAOD,CAAC,CAACE,aAAa,CAACD,CAAC,CAAC;MAE7B,IAAI,OAAOD,CAAC,KAAK,QAAQ,EAAE,OAAO,CAAC,CAAC,KAC/B,OAAO,CAAC,CAAC;IAClB,CAAC,CAAC,CACDG,GAAG,CAACxB,CAAC,IAAIgB,UAAU,CAAChB,CAAC,EAAEiB,WAAW,CAAC,CAAC;EAC7C;;EAEA;EACA;EACA,IAAI,OAAOnB,GAAG,KAAK,QAAQ,IAAI,CAACoB,KAAK,CAACC,OAAO,CAACrB,GAAG,CAAC,EAAE;IAChD,IAAM2B,GAAQ,GAAG,CAAC,CAAC;IACnBzC,MAAM,CAAC8B,IAAI,CAAChB,GAAG,CAAC,CACXsB,IAAI,CAAC,CAACC,CAAC,EAAEC,CAAC,KAAKD,CAAC,CAACE,aAAa,CAACD,CAAC,CAAC,CAAC,CAClCnC,OAAO,CAACgB,GAAG,IAAI;MACZsB,GAAG,CAACtB,GAAG,CAAC,GAAGa,UAAU,CAAClB,GAAG,CAACK,GAAG,CAAC,EAAEc,WAAW,CAAC;IAChD,CAAC,CAAC;IACN,OAAOQ,GAAG;EACd;;EAEA;EACA,OAAO3B,GAAG;AACd;;AAIA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,SAAS4B,SAASA,CAAIC,GAA8B,EAAK;EACrD,IAAI,CAACA,GAAG,EAAE;IACN,OAAOA,GAAG;EACd;EACA,IAAIA,GAAG,KAAK,IAAI,IAAI,OAAQA,GAAI,KAAK,QAAQ,EAAE;IAC3C,OAAOA,GAAG;EACd;EACA,IAAIT,KAAK,CAACC,OAAO,CAACQ,GAAG,CAAC,EAAE;IACpB,IAAMC,GAAG,GAAG,IAAIV,KAAK,CAACS,GAAG,CAAC9B,MAAM,CAAC;IACjC,IAAIG,CAAC,GAAG4B,GAAG,CAAC/B,MAAM;IAClB,OAAOG,CAAC,EAAE,EAAE;MACR4B,GAAG,CAAC5B,CAAC,CAAC,GAAG0B,SAAS,CAACC,GAAG,CAAC3B,CAAC,CAAC,CAAC;IAC9B;IACA,OAAO4B,GAAG;EACd;EACA,IAAMC,IAAS,GAAG,CAAC,CAAC;EACpB;EACA,KAAK,IAAM1B,GAAG,IAAIwB,GAAG,EAAE;IACnBE,IAAI,CAAC1B,GAAG,CAAC,GAAGuB,SAAS,CAACC,GAAG,CAACxB,GAAG,CAAC,CAAC;EACnC;EACA,OAAO0B,IAAI;AACf;AACO,IAAMC,KAAK,GAAAC,OAAA,CAAAD,KAAA,GAAGJ,SAAS;;AAI9B;AACA;AACA;AACA;AACO,SAASM,yBAAyBA,CACrClC,GAAQ,EACRmC,UAAkB,EAClBC,KAAgB,EACP;EACTlD,MAAM,CAACmD,cAAc,CAACrC,GAAG,EAAEmC,UAAU,EAAE;IACnCG,GAAG,EAAE,SAAAA,CAAA,EAAY;MACb,OAAOF,KAAK;IAChB;EACJ,CAAC,CAAC;EACF,OAAOA,KAAK;AAChB;AAGO,SAASG,eAAeA,CAACvC,GAAQ,EAAEwC,QAAgB,EAAW;EACjE,IAAIxC,GAAG,CAACR,cAAc,CAACgD,QAAQ,CAAC,EAAE;IAC9B,OAAO,IAAI;EACf;EAEA,IAAIpB,KAAK,CAACC,OAAO,CAACrB,GAAG,CAAC,EAAE;IACpB,IAAMyC,GAAG,GAAG,CAAC,CAACzC,GAAG,CAAC0C,IAAI,CAACC,IAAI,IAAIJ,eAAe,CAACI,IAAI,EAAEH,QAAQ,CAAC,CAAC;IAC/D,OAAOC,GAAG;EACd;;EAEA;EACA,KAAK,IAAMpC,GAAG,IAAIL,GAAG,EAAE;IACnB,IAAI,OAAOA,GAAG,CAACK,GAAG,CAAC,KAAK,QAAQ,IAAIL,GAAG,CAACK,GAAG,CAAC,KAAK,IAAI,EAAE;MACnD,IAAIkC,eAAe,CAACvC,GAAG,CAACK,GAAG,CAAC,EAAEmC,QAAQ,CAAC,EAAE;QACrC,OAAO,IAAI;MACf;IACJ;EACJ;;EAEA;EACA,OAAO,KAAK;AAChB","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/utils/utils-other.js b/dist/cjs/plugins/utils/utils-other.js deleted file mode 100644 index 34977d5d50c..00000000000 --- a/dist/cjs/plugins/utils/utils-other.js +++ /dev/null @@ -1,39 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.RXJS_SHARE_REPLAY_DEFAULTS = void 0; -exports.ensureInteger = ensureInteger; -exports.ensureNotFalsy = ensureNotFalsy; -exports.runXTimes = runXTimes; -function runXTimes(xTimes, fn) { - new Array(xTimes).fill(0).forEach((_v, idx) => fn(idx)); -} -function ensureNotFalsy(obj, message) { - if (!obj) { - if (!message) { - message = ''; - } - throw new Error('ensureNotFalsy() is falsy: ' + message); - } - return obj; -} -function ensureInteger(obj) { - if (!Number.isInteger(obj)) { - throw new Error('ensureInteger() is falsy'); - } - return obj; -} - -/** - * Using shareReplay() without settings will not unsubscribe - * if there are no more subscribers. - * So we use these defaults. - * @link https://cartant.medium.com/rxjs-whats-changed-with-sharereplay-65c098843e95 - */ -var RXJS_SHARE_REPLAY_DEFAULTS = exports.RXJS_SHARE_REPLAY_DEFAULTS = { - bufferSize: 1, - refCount: true -}; -//# sourceMappingURL=utils-other.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/utils/utils-other.js.map b/dist/cjs/plugins/utils/utils-other.js.map deleted file mode 100644 index ddce0142008..00000000000 --- a/dist/cjs/plugins/utils/utils-other.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"utils-other.js","names":["runXTimes","xTimes","fn","Array","fill","forEach","_v","idx","ensureNotFalsy","obj","message","Error","ensureInteger","Number","isInteger","RXJS_SHARE_REPLAY_DEFAULTS","exports","bufferSize","refCount"],"sources":["../../../../src/plugins/utils/utils-other.ts"],"sourcesContent":["export function runXTimes(xTimes: number, fn: (idx: number) => void) {\n new Array(xTimes).fill(0).forEach((_v, idx) => fn(idx));\n}\n\nexport function ensureNotFalsy(obj: T | false | undefined | null, message?: string): T {\n if (!obj) {\n if (!message) {\n message = '';\n }\n throw new Error('ensureNotFalsy() is falsy: ' + message);\n }\n return obj;\n}\n\nexport function ensureInteger(obj: unknown): number {\n if (!Number.isInteger(obj)) {\n throw new Error('ensureInteger() is falsy');\n }\n return obj as number;\n}\n\n/**\n * Using shareReplay() without settings will not unsubscribe\n * if there are no more subscribers.\n * So we use these defaults.\n * @link https://cartant.medium.com/rxjs-whats-changed-with-sharereplay-65c098843e95\n */\nexport const RXJS_SHARE_REPLAY_DEFAULTS = {\n bufferSize: 1,\n refCount: true\n};\n"],"mappings":";;;;;;;;;AAAO,SAASA,SAASA,CAACC,MAAc,EAAEC,EAAyB,EAAE;EACjE,IAAIC,KAAK,CAACF,MAAM,CAAC,CAACG,IAAI,CAAC,CAAC,CAAC,CAACC,OAAO,CAAC,CAACC,EAAE,EAAEC,GAAG,KAAKL,EAAE,CAACK,GAAG,CAAC,CAAC;AAC3D;AAEO,SAASC,cAAcA,CAAIC,GAAiC,EAAEC,OAAgB,EAAK;EACtF,IAAI,CAACD,GAAG,EAAE;IACN,IAAI,CAACC,OAAO,EAAE;MACVA,OAAO,GAAG,EAAE;IAChB;IACA,MAAM,IAAIC,KAAK,CAAC,6BAA6B,GAAGD,OAAO,CAAC;EAC5D;EACA,OAAOD,GAAG;AACd;AAEO,SAASG,aAAaA,CAACH,GAAY,EAAU;EAChD,IAAI,CAACI,MAAM,CAACC,SAAS,CAACL,GAAG,CAAC,EAAE;IACxB,MAAM,IAAIE,KAAK,CAAC,0BAA0B,CAAC;EAC/C;EACA,OAAOF,GAAG;AACd;;AAEA;AACA;AACA;AACA;AACA;AACA;AACO,IAAMM,0BAA0B,GAAAC,OAAA,CAAAD,0BAAA,GAAG;EACtCE,UAAU,EAAE,CAAC;EACbC,QAAQ,EAAE;AACd,CAAC","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/utils/utils-promise.js b/dist/cjs/plugins/utils/utils-promise.js deleted file mode 100644 index 65ad3ff8d43..00000000000 --- a/dist/cjs/plugins/utils/utils-promise.js +++ /dev/null @@ -1,118 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.PROMISE_RESOLVE_VOID = exports.PROMISE_RESOLVE_TRUE = exports.PROMISE_RESOLVE_NULL = exports.PROMISE_RESOLVE_FALSE = void 0; -exports.isPromise = isPromise; -exports.nextTick = nextTick; -exports.promiseSeries = promiseSeries; -exports.promiseWait = promiseWait; -exports.requestIdleCallbackIfAvailable = requestIdleCallbackIfAvailable; -exports.requestIdlePromise = requestIdlePromise; -exports.requestIdlePromiseNoQueue = requestIdlePromiseNoQueue; -exports.toPromise = toPromise; -/** - * returns a promise that resolves on the next tick - */ -function nextTick() { - return new Promise(res => setTimeout(res, 0)); -} -function promiseWait(ms = 0) { - return new Promise(res => setTimeout(res, ms)); -} -function toPromise(maybePromise) { - if (maybePromise && typeof maybePromise.then === 'function') { - // is promise - return maybePromise; - } else { - return Promise.resolve(maybePromise); - } -} - -/** - * returns true if promise is given - */ -function isPromise(value) { - if (typeof value !== 'undefined' && typeof value.then === 'function') { - return true; - } - return false; -} - -/** - * Reusing resolved promises has a better - * performance than creating new ones each time. - */ -var PROMISE_RESOLVE_TRUE = exports.PROMISE_RESOLVE_TRUE = Promise.resolve(true); -var PROMISE_RESOLVE_FALSE = exports.PROMISE_RESOLVE_FALSE = Promise.resolve(false); -var PROMISE_RESOLVE_NULL = exports.PROMISE_RESOLVE_NULL = Promise.resolve(null); -var PROMISE_RESOLVE_VOID = exports.PROMISE_RESOLVE_VOID = Promise.resolve(); -function requestIdlePromiseNoQueue( -/** - * We always set a timeout! - * RxDB might be used on the server side where the - * server runs 24/4 on 99% CPU. So without a timeout - * this would never resolve which could cause a memory leak. - */ -timeout = 10000) { - /** - * Do not use window.requestIdleCallback - * because some javascript runtimes like react-native, - * do not have a window object, but still have a global - * requestIdleCallback function. - * @link https://github.com/pubkey/rxdb/issues/4804 - */ - if (typeof requestIdleCallback === 'function') { - return new Promise(res => { - requestIdleCallback(() => res(), { - timeout - }); - }); - } else { - return promiseWait(0); - } -} - -/** - * If multiple operations wait for an requestIdlePromise - * we do not want them to resolve all at the same time. - * So we have to queue the calls. - */ -var idlePromiseQueue = PROMISE_RESOLVE_VOID; -function requestIdlePromise(timeout = undefined) { - idlePromiseQueue = idlePromiseQueue.then(() => { - return requestIdlePromiseNoQueue(timeout); - }); - return idlePromiseQueue; -} - -/** - * run the callback if requestIdleCallback available - * do nothing if not - * @link https://developer.mozilla.org/de/docs/Web/API/Window/requestIdleCallback - */ -function requestIdleCallbackIfAvailable(fun) { - /** - * Do not use window.requestIdleCallback - * because some javascript runtimes like react-native, - * do not have a window object, but still have a global - * requestIdleCallback function. - * @link https://github.com/pubkey/rxdb/issues/4804 - */ - if (typeof requestIdleCallback === 'function') { - requestIdleCallback(() => { - fun(); - }); - } -} - -/** - * like Promise.all() but runs in series instead of parallel - * @link https://github.com/egoist/promise.series/blob/master/index.js - * @param tasks array with functions that return a promise - */ -function promiseSeries(tasks, initial) { - return tasks.reduce((current, next) => current.then(next), Promise.resolve(initial)); -} -//# sourceMappingURL=utils-promise.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/utils/utils-promise.js.map b/dist/cjs/plugins/utils/utils-promise.js.map deleted file mode 100644 index 0eefad7da01..00000000000 --- a/dist/cjs/plugins/utils/utils-promise.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"utils-promise.js","names":["nextTick","Promise","res","setTimeout","promiseWait","ms","toPromise","maybePromise","then","resolve","isPromise","value","PROMISE_RESOLVE_TRUE","exports","PROMISE_RESOLVE_FALSE","PROMISE_RESOLVE_NULL","PROMISE_RESOLVE_VOID","requestIdlePromiseNoQueue","timeout","requestIdleCallback","idlePromiseQueue","requestIdlePromise","undefined","requestIdleCallbackIfAvailable","fun","promiseSeries","tasks","initial","reduce","current","next"],"sources":["../../../../src/plugins/utils/utils-promise.ts"],"sourcesContent":["/**\n * returns a promise that resolves on the next tick\n */\nexport function nextTick(): Promise {\n return new Promise(res => setTimeout(res, 0));\n}\n\nexport function promiseWait(ms: number = 0): Promise {\n return new Promise(res => setTimeout(res, ms));\n}\n\nexport function toPromise(maybePromise: Promise | T): Promise {\n if (maybePromise && typeof (maybePromise as any).then === 'function') {\n // is promise\n return maybePromise as any;\n } else {\n return Promise.resolve(maybePromise);\n }\n}\n\n/**\n * returns true if promise is given\n */\nexport function isPromise(value: any): boolean {\n if (\n typeof value !== 'undefined' &&\n typeof value.then === 'function'\n ) {\n return true;\n }\n return false;\n}\n\n/**\n * Reusing resolved promises has a better\n * performance than creating new ones each time.\n */\nexport const PROMISE_RESOLVE_TRUE: Promise = Promise.resolve(true);\nexport const PROMISE_RESOLVE_FALSE: Promise = Promise.resolve(false);\nexport const PROMISE_RESOLVE_NULL: Promise = Promise.resolve(null);\nexport const PROMISE_RESOLVE_VOID: Promise = Promise.resolve();\n\n\nexport function requestIdlePromiseNoQueue(\n /**\n * We always set a timeout!\n * RxDB might be used on the server side where the\n * server runs 24/4 on 99% CPU. So without a timeout\n * this would never resolve which could cause a memory leak.\n */\n timeout: number | undefined = 10000\n) {\n /**\n * Do not use window.requestIdleCallback\n * because some javascript runtimes like react-native,\n * do not have a window object, but still have a global\n * requestIdleCallback function.\n * @link https://github.com/pubkey/rxdb/issues/4804\n */\n if (\n typeof requestIdleCallback === 'function'\n ) {\n return new Promise(res => {\n requestIdleCallback(\n () => res(),\n {\n timeout\n }\n );\n });\n } else {\n return promiseWait(0);\n }\n}\n\n/**\n * If multiple operations wait for an requestIdlePromise\n * we do not want them to resolve all at the same time.\n * So we have to queue the calls.\n */\nlet idlePromiseQueue = PROMISE_RESOLVE_VOID;\nexport function requestIdlePromise(\n timeout: number | undefined = undefined\n) {\n idlePromiseQueue = idlePromiseQueue.then(() => {\n return requestIdlePromiseNoQueue(timeout);\n });\n return idlePromiseQueue;\n}\n\n\n/**\n * run the callback if requestIdleCallback available\n * do nothing if not\n * @link https://developer.mozilla.org/de/docs/Web/API/Window/requestIdleCallback\n */\nexport function requestIdleCallbackIfAvailable(fun: Function): void {\n /**\n * Do not use window.requestIdleCallback\n * because some javascript runtimes like react-native,\n * do not have a window object, but still have a global\n * requestIdleCallback function.\n * @link https://github.com/pubkey/rxdb/issues/4804\n */\n if (\n typeof requestIdleCallback === 'function'\n ) {\n requestIdleCallback(() => {\n fun();\n });\n }\n}\n\n\n/**\n * like Promise.all() but runs in series instead of parallel\n * @link https://github.com/egoist/promise.series/blob/master/index.js\n * @param tasks array with functions that return a promise\n */\nexport function promiseSeries(\n tasks: Function[],\n initial?: any\n): Promise {\n return tasks\n .reduce(\n (current, next) => (current as any).then(next),\n Promise.resolve(initial)\n );\n}\n"],"mappings":";;;;;;;;;;;;;;AAAA;AACA;AACA;AACO,SAASA,QAAQA,CAAA,EAAkB;EACtC,OAAO,IAAIC,OAAO,CAACC,GAAG,IAAIC,UAAU,CAACD,GAAG,EAAE,CAAC,CAAC,CAAC;AACjD;AAEO,SAASE,WAAWA,CAACC,EAAU,GAAG,CAAC,EAAiB;EACvD,OAAO,IAAIJ,OAAO,CAACC,GAAG,IAAIC,UAAU,CAACD,GAAG,EAAEG,EAAE,CAAC,CAAC;AAClD;AAEO,SAASC,SAASA,CAAIC,YAA4B,EAAc;EACnE,IAAIA,YAAY,IAAI,OAAQA,YAAY,CAASC,IAAI,KAAK,UAAU,EAAE;IAClE;IACA,OAAOD,YAAY;EACvB,CAAC,MAAM;IACH,OAAON,OAAO,CAACQ,OAAO,CAACF,YAAY,CAAC;EACxC;AACJ;;AAEA;AACA;AACA;AACO,SAASG,SAASA,CAACC,KAAU,EAAW;EAC3C,IACI,OAAOA,KAAK,KAAK,WAAW,IAC5B,OAAOA,KAAK,CAACH,IAAI,KAAK,UAAU,EAClC;IACE,OAAO,IAAI;EACf;EACA,OAAO,KAAK;AAChB;;AAEA;AACA;AACA;AACA;AACO,IAAMI,oBAAmC,GAAAC,OAAA,CAAAD,oBAAA,GAAGX,OAAO,CAACQ,OAAO,CAAC,IAAI,CAAC;AACjE,IAAMK,qBAAqC,GAAAD,OAAA,CAAAC,qBAAA,GAAGb,OAAO,CAACQ,OAAO,CAAC,KAAK,CAAC;AACpE,IAAMM,oBAAmC,GAAAF,OAAA,CAAAE,oBAAA,GAAGd,OAAO,CAACQ,OAAO,CAAC,IAAI,CAAC;AACjE,IAAMO,oBAAmC,GAAAH,OAAA,CAAAG,oBAAA,GAAGf,OAAO,CAACQ,OAAO,CAAC,CAAC;AAG7D,SAASQ,yBAAyBA;AACrC;AACJ;AACA;AACA;AACA;AACA;AACIC,OAA2B,GAAG,KAAK,EACrC;EACE;AACJ;AACA;AACA;AACA;AACA;AACA;EACI,IACI,OAAOC,mBAAmB,KAAK,UAAU,EAC3C;IACE,OAAO,IAAIlB,OAAO,CAAOC,GAAG,IAAI;MAC5BiB,mBAAmB,CACf,MAAMjB,GAAG,CAAC,CAAC,EACX;QACIgB;MACJ,CACJ,CAAC;IACL,CAAC,CAAC;EACN,CAAC,MAAM;IACH,OAAOd,WAAW,CAAC,CAAC,CAAC;EACzB;AACJ;;AAEA;AACA;AACA;AACA;AACA;AACA,IAAIgB,gBAAgB,GAAGJ,oBAAoB;AACpC,SAASK,kBAAkBA,CAC9BH,OAA2B,GAAGI,SAAS,EACzC;EACEF,gBAAgB,GAAGA,gBAAgB,CAACZ,IAAI,CAAC,MAAM;IAC3C,OAAOS,yBAAyB,CAACC,OAAO,CAAC;EAC7C,CAAC,CAAC;EACF,OAAOE,gBAAgB;AAC3B;;AAGA;AACA;AACA;AACA;AACA;AACO,SAASG,8BAA8BA,CAACC,GAAa,EAAQ;EAChE;AACJ;AACA;AACA;AACA;AACA;AACA;EACI,IACI,OAAOL,mBAAmB,KAAK,UAAU,EAC3C;IACEA,mBAAmB,CAAC,MAAM;MACtBK,GAAG,CAAC,CAAC;IACT,CAAC,CAAC;EACN;AACJ;;AAGA;AACA;AACA;AACA;AACA;AACO,SAASC,aAAaA,CACzBC,KAAiB,EACjBC,OAAa,EACC;EACd,OAAOD,KAAK,CACPE,MAAM,CACH,CAACC,OAAO,EAAEC,IAAI,KAAMD,OAAO,CAASrB,IAAI,CAACsB,IAAI,CAAC,EAC9C7B,OAAO,CAACQ,OAAO,CAACkB,OAAO,CAC3B,CAAC;AACT","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/utils/utils-regex.js b/dist/cjs/plugins/utils/utils-regex.js deleted file mode 100644 index ed2c0a1fb47..00000000000 --- a/dist/cjs/plugins/utils/utils-regex.js +++ /dev/null @@ -1,9 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.REGEX_ALL_PIPES = exports.REGEX_ALL_DOTS = void 0; -var REGEX_ALL_DOTS = exports.REGEX_ALL_DOTS = /\./g; -var REGEX_ALL_PIPES = exports.REGEX_ALL_PIPES = /\|/g; -//# sourceMappingURL=utils-regex.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/utils/utils-regex.js.map b/dist/cjs/plugins/utils/utils-regex.js.map deleted file mode 100644 index 52c10cbc437..00000000000 --- a/dist/cjs/plugins/utils/utils-regex.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"utils-regex.js","names":["REGEX_ALL_DOTS","exports","REGEX_ALL_PIPES"],"sources":["../../../../src/plugins/utils/utils-regex.ts"],"sourcesContent":["export const REGEX_ALL_DOTS = /\\./g;\nexport const REGEX_ALL_PIPES = /\\|/g;\n"],"mappings":";;;;;;AAAO,IAAMA,cAAc,GAAAC,OAAA,CAAAD,cAAA,GAAG,KAAK;AAC5B,IAAME,eAAe,GAAAD,OAAA,CAAAC,eAAA,GAAG,KAAK","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/utils/utils-revision.js b/dist/cjs/plugins/utils/utils-revision.js deleted file mode 100644 index dc45ceaeb6a..00000000000 --- a/dist/cjs/plugins/utils/utils-revision.js +++ /dev/null @@ -1,50 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.createRevision = createRevision; -exports.getHeightOfRevision = getHeightOfRevision; -exports.parseRevision = parseRevision; -/** - * Parses the full revision. - * Do NOT use this if you only need the revision height, - * then use getHeightOfRevision() instead which is faster. - */ -function parseRevision(revision) { - var split = revision.split('-'); - if (split.length !== 2) { - throw new Error('malformatted revision: ' + revision); - } - return { - height: parseInt(split[0], 10), - hash: split[1] - }; -} - -/** - * @hotPath Performance is very important here - * because we need to parse the revision height very often. - * Do not use `parseInt(revision.split('-')[0], 10)` because - * only fetching the start-number chars is faster. - */ -function getHeightOfRevision(revision) { - var useChars = ''; - for (var index = 0; index < revision.length; index++) { - var char = revision[index]; - if (char === '-') { - return parseInt(useChars, 10); - } - useChars += char; - } - throw new Error('malformatted revision: ' + revision); -} - -/** - * Creates the next write revision for a given document. - */ -function createRevision(databaseInstanceToken, previousDocData) { - var newRevisionHeight = !previousDocData ? 1 : getHeightOfRevision(previousDocData._rev) + 1; - return newRevisionHeight + '-' + databaseInstanceToken; -} -//# sourceMappingURL=utils-revision.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/utils/utils-revision.js.map b/dist/cjs/plugins/utils/utils-revision.js.map deleted file mode 100644 index 223c7fbb8aa..00000000000 --- a/dist/cjs/plugins/utils/utils-revision.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"utils-revision.js","names":["parseRevision","revision","split","length","Error","height","parseInt","hash","getHeightOfRevision","useChars","index","char","createRevision","databaseInstanceToken","previousDocData","newRevisionHeight","_rev"],"sources":["../../../../src/plugins/utils/utils-revision.ts"],"sourcesContent":["import type {\n RxDocumentData\n} from '../../types/index.d.ts';\n\n/**\n * Parses the full revision.\n * Do NOT use this if you only need the revision height,\n * then use getHeightOfRevision() instead which is faster.\n */\nexport function parseRevision(revision: string): { height: number; hash: string; } {\n const split = revision.split('-');\n if (split.length !== 2) {\n throw new Error('malformatted revision: ' + revision);\n }\n return {\n height: parseInt(split[0], 10),\n hash: split[1]\n };\n}\n\n/**\n * @hotPath Performance is very important here\n * because we need to parse the revision height very often.\n * Do not use `parseInt(revision.split('-')[0], 10)` because\n * only fetching the start-number chars is faster.\n */\nexport function getHeightOfRevision(revision: string): number {\n let useChars = '';\n for (let index = 0; index < revision.length; index++) {\n const char = revision[index];\n if (char === '-') {\n return parseInt(useChars, 10);\n }\n useChars += char;\n }\n throw new Error('malformatted revision: ' + revision);\n}\n\n\n/**\n * Creates the next write revision for a given document.\n */\nexport function createRevision(\n databaseInstanceToken: string,\n previousDocData?: RxDocumentData\n): string {\n const newRevisionHeight = !previousDocData ? 1 : getHeightOfRevision(previousDocData._rev) + 1\n return newRevisionHeight + '-' + databaseInstanceToken;\n}\n\n"],"mappings":";;;;;;;;AAIA;AACA;AACA;AACA;AACA;AACO,SAASA,aAAaA,CAACC,QAAgB,EAAqC;EAC/E,IAAMC,KAAK,GAAGD,QAAQ,CAACC,KAAK,CAAC,GAAG,CAAC;EACjC,IAAIA,KAAK,CAACC,MAAM,KAAK,CAAC,EAAE;IACpB,MAAM,IAAIC,KAAK,CAAC,yBAAyB,GAAGH,QAAQ,CAAC;EACzD;EACA,OAAO;IACHI,MAAM,EAAEC,QAAQ,CAACJ,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC;IAC9BK,IAAI,EAAEL,KAAK,CAAC,CAAC;EACjB,CAAC;AACL;;AAEA;AACA;AACA;AACA;AACA;AACA;AACO,SAASM,mBAAmBA,CAACP,QAAgB,EAAU;EAC1D,IAAIQ,QAAQ,GAAG,EAAE;EACjB,KAAK,IAAIC,KAAK,GAAG,CAAC,EAAEA,KAAK,GAAGT,QAAQ,CAACE,MAAM,EAAEO,KAAK,EAAE,EAAE;IAClD,IAAMC,IAAI,GAAGV,QAAQ,CAACS,KAAK,CAAC;IAC5B,IAAIC,IAAI,KAAK,GAAG,EAAE;MACd,OAAOL,QAAQ,CAACG,QAAQ,EAAE,EAAE,CAAC;IACjC;IACAA,QAAQ,IAAIE,IAAI;EACpB;EACA,MAAM,IAAIP,KAAK,CAAC,yBAAyB,GAAGH,QAAQ,CAAC;AACzD;;AAGA;AACA;AACA;AACO,SAASW,cAAcA,CAC1BC,qBAA6B,EAC7BC,eAA2C,EACrC;EACN,IAAMC,iBAAiB,GAAG,CAACD,eAAe,GAAG,CAAC,GAAGN,mBAAmB,CAACM,eAAe,CAACE,IAAI,CAAC,GAAG,CAAC;EAC9F,OAAOD,iBAAiB,GAAG,GAAG,GAAGF,qBAAqB;AAC1D","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/utils/utils-rxdb-version.js b/dist/cjs/plugins/utils/utils-rxdb-version.js deleted file mode 100644 index f0ab7808b22..00000000000 --- a/dist/cjs/plugins/utils/utils-rxdb-version.js +++ /dev/null @@ -1,11 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.RXDB_VERSION = void 0; -/** - * This file is replaced in the 'npm run build:version' script. - */ -var RXDB_VERSION = exports.RXDB_VERSION = '15.24.0'; -//# sourceMappingURL=utils-rxdb-version.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/utils/utils-rxdb-version.js.map b/dist/cjs/plugins/utils/utils-rxdb-version.js.map deleted file mode 100644 index 650c0688f04..00000000000 --- a/dist/cjs/plugins/utils/utils-rxdb-version.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"utils-rxdb-version.js","names":["RXDB_VERSION","exports"],"sources":["../../../../src/plugins/utils/utils-rxdb-version.ts"],"sourcesContent":["/**\n * This file is replaced in the 'npm run build:version' script.\n */\nexport const RXDB_VERSION = '15.24.0';\n"],"mappings":";;;;;;AAAA;AACA;AACA;AACO,IAAMA,YAAY,GAAAC,OAAA,CAAAD,YAAA,GAAG,SAAS","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/utils/utils-rxdb-version.template.js b/dist/cjs/plugins/utils/utils-rxdb-version.template.js deleted file mode 100644 index 709265a0d66..00000000000 --- a/dist/cjs/plugins/utils/utils-rxdb-version.template.js +++ /dev/null @@ -1,11 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.RXDB_VERSION = void 0; -/** - * This file is replaced in the 'npm run build:version' script. - */ -var RXDB_VERSION = exports.RXDB_VERSION = '|PLACEHOLDER|'; -//# sourceMappingURL=utils-rxdb-version.template.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/utils/utils-rxdb-version.template.js.map b/dist/cjs/plugins/utils/utils-rxdb-version.template.js.map deleted file mode 100644 index 64044837bf9..00000000000 --- a/dist/cjs/plugins/utils/utils-rxdb-version.template.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"utils-rxdb-version.template.js","names":["RXDB_VERSION","exports"],"sources":["../../../../src/plugins/utils/utils-rxdb-version.template.ts"],"sourcesContent":["/**\n * This file is replaced in the 'npm run build:version' script.\n */\nexport const RXDB_VERSION = '|PLACEHOLDER|';\n"],"mappings":";;;;;;AAAA;AACA;AACA;AACO,IAAMA,YAAY,GAAAC,OAAA,CAAAD,YAAA,GAAG,eAAe","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/utils/utils-string.js b/dist/cjs/plugins/utils/utils-string.js deleted file mode 100644 index 381d06e7ad4..00000000000 --- a/dist/cjs/plugins/utils/utils-string.js +++ /dev/null @@ -1,105 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.RANDOM_STRING = void 0; -exports.arrayBufferToString = arrayBufferToString; -exports.isFolderPath = isFolderPath; -exports.lastCharOfString = lastCharOfString; -exports.normalizeString = normalizeString; -exports.randomCouchString = randomCouchString; -exports.stringToArrayBuffer = stringToArrayBuffer; -exports.trimDots = trimDots; -exports.ucfirst = ucfirst; -var COUCH_NAME_CHARS = 'abcdefghijklmnopqrstuvwxyz'; -/** - * get a random string which can be used with couchdb - * @link http://stackoverflow.com/a/1349426/3443137 - */ -function randomCouchString(length = 10) { - var text = ''; - for (var i = 0; i < length; i++) { - text += COUCH_NAME_CHARS.charAt(Math.floor(Math.random() * COUCH_NAME_CHARS.length)); - } - return text; -} - -/** - * A random string that is never inside of any storage - */ -var RANDOM_STRING = exports.RANDOM_STRING = 'Fz7SZXPmYJujkzjY1rpXWvlWBqoGAfAX'; - -/** - * uppercase first char - */ -function ucfirst(str) { - str += ''; - var f = str.charAt(0).toUpperCase(); - return f + str.substr(1); -} - -/** - * removes trailing and ending dots from the string - */ -function trimDots(str) { - // start - while (str.charAt(0) === '.') { - str = str.substr(1); - } - - // end - while (str.slice(-1) === '.') { - str = str.slice(0, -1); - } - return str; -} - -/** - * @link https://stackoverflow.com/a/44950500/3443137 - */ -function lastCharOfString(str) { - return str.charAt(str.length - 1); -} - -/** - * returns true if the given name is likely a folder path - */ -function isFolderPath(name) { - // do not check, if foldername is given - if (name.includes('/') || - // unix - name.includes('\\') // windows - ) { - return true; - } else { - return false; - } -} - -/** - * @link https://gist.github.com/andreburgaud/6f73fd2d690b629346b8 - * @link https://stackoverflow.com/a/76240378/3443137 - */ -function arrayBufferToString(arrayBuffer) { - var chunkSize = 8192; - var str = ''; - var len = arrayBuffer.byteLength; - for (var i = 0; i < len; i += chunkSize) { - var chunk = new Uint8Array(arrayBuffer, i, Math.min(chunkSize, len - i)); - str += String.fromCharCode.apply(null, chunk); - } - return str; -} -function stringToArrayBuffer(str) { - var buf = new ArrayBuffer(str.length); - var bufView = new Uint8Array(buf); - for (var i = 0, strLen = str.length; i < strLen; i++) { - bufView[i] = str.charCodeAt(i); - } - return buf; -} -function normalizeString(str) { - return str.trim().replace(/[\n\s]+/g, ''); -} -//# sourceMappingURL=utils-string.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/utils/utils-string.js.map b/dist/cjs/plugins/utils/utils-string.js.map deleted file mode 100644 index bcc446a9f83..00000000000 --- a/dist/cjs/plugins/utils/utils-string.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"utils-string.js","names":["COUCH_NAME_CHARS","randomCouchString","length","text","i","charAt","Math","floor","random","RANDOM_STRING","exports","ucfirst","str","f","toUpperCase","substr","trimDots","slice","lastCharOfString","isFolderPath","name","includes","arrayBufferToString","arrayBuffer","chunkSize","len","byteLength","chunk","Uint8Array","min","String","fromCharCode","apply","stringToArrayBuffer","buf","ArrayBuffer","bufView","strLen","charCodeAt","normalizeString","trim","replace"],"sources":["../../../../src/plugins/utils/utils-string.ts"],"sourcesContent":["const COUCH_NAME_CHARS = 'abcdefghijklmnopqrstuvwxyz';\n/**\n * get a random string which can be used with couchdb\n * @link http://stackoverflow.com/a/1349426/3443137\n */\nexport function randomCouchString(length: number = 10): string {\n let text = '';\n\n for (let i = 0; i < length; i++) {\n text += COUCH_NAME_CHARS.charAt(Math.floor(Math.random() * COUCH_NAME_CHARS.length));\n }\n\n return text;\n}\n\n\n/**\n * A random string that is never inside of any storage\n */\nexport const RANDOM_STRING = 'Fz7SZXPmYJujkzjY1rpXWvlWBqoGAfAX';\n\n/**\n * uppercase first char\n */\nexport function ucfirst(str: string): string {\n str += '';\n const f = str.charAt(0)\n .toUpperCase();\n return f + str.substr(1);\n}\n\n/**\n * removes trailing and ending dots from the string\n */\nexport function trimDots(str: string): string {\n // start\n while (str.charAt(0) === '.') {\n str = str.substr(1);\n }\n\n // end\n while (str.slice(-1) === '.') {\n str = str.slice(0, -1);\n }\n\n return str;\n}\n\n/**\n * @link https://stackoverflow.com/a/44950500/3443137\n */\nexport function lastCharOfString(str: string): string {\n return str.charAt(str.length - 1);\n}\n\n/**\n * returns true if the given name is likely a folder path\n */\nexport function isFolderPath(name: string) {\n // do not check, if foldername is given\n if (\n name.includes('/') || // unix\n name.includes('\\\\') // windows\n ) {\n return true;\n } else {\n return false;\n }\n}\n\n\n/**\n * @link https://gist.github.com/andreburgaud/6f73fd2d690b629346b8\n * @link https://stackoverflow.com/a/76240378/3443137\n */\nexport function arrayBufferToString(arrayBuffer: ArrayBuffer): string {\n const chunkSize = 8192;\n let str = '';\n var len = arrayBuffer.byteLength;\n for (let i = 0; i < len; i += chunkSize) {\n const chunk = new Uint8Array(\n arrayBuffer,\n i,\n Math.min(chunkSize, len - i)\n );\n str += String.fromCharCode.apply(null, chunk as any);\n }\n return str;\n}\n\nexport function stringToArrayBuffer(str: string): ArrayBuffer {\n const buf = new ArrayBuffer(str.length);\n const bufView = new Uint8Array(buf);\n for (let i = 0, strLen = str.length; i < strLen; i++) {\n bufView[i] = str.charCodeAt(i);\n }\n return buf;\n}\n\n\nexport function normalizeString(str: string) : string {\n return str.trim().replace(/[\\n\\s]+/g, '');\n}\n"],"mappings":";;;;;;;;;;;;;;AAAA,IAAMA,gBAAgB,GAAG,4BAA4B;AACrD;AACA;AACA;AACA;AACO,SAASC,iBAAiBA,CAACC,MAAc,GAAG,EAAE,EAAU;EAC3D,IAAIC,IAAI,GAAG,EAAE;EAEb,KAAK,IAAIC,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGF,MAAM,EAAEE,CAAC,EAAE,EAAE;IAC7BD,IAAI,IAAIH,gBAAgB,CAACK,MAAM,CAACC,IAAI,CAACC,KAAK,CAACD,IAAI,CAACE,MAAM,CAAC,CAAC,GAAGR,gBAAgB,CAACE,MAAM,CAAC,CAAC;EACxF;EAEA,OAAOC,IAAI;AACf;;AAGA;AACA;AACA;AACO,IAAMM,aAAa,GAAAC,OAAA,CAAAD,aAAA,GAAG,kCAAkC;;AAE/D;AACA;AACA;AACO,SAASE,OAAOA,CAACC,GAAW,EAAU;EACzCA,GAAG,IAAI,EAAE;EACT,IAAMC,CAAC,GAAGD,GAAG,CAACP,MAAM,CAAC,CAAC,CAAC,CAClBS,WAAW,CAAC,CAAC;EAClB,OAAOD,CAAC,GAAGD,GAAG,CAACG,MAAM,CAAC,CAAC,CAAC;AAC5B;;AAEA;AACA;AACA;AACO,SAASC,QAAQA,CAACJ,GAAW,EAAU;EAC1C;EACA,OAAOA,GAAG,CAACP,MAAM,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE;IAC1BO,GAAG,GAAGA,GAAG,CAACG,MAAM,CAAC,CAAC,CAAC;EACvB;;EAEA;EACA,OAAOH,GAAG,CAACK,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE;IAC1BL,GAAG,GAAGA,GAAG,CAACK,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;EAC1B;EAEA,OAAOL,GAAG;AACd;;AAEA;AACA;AACA;AACO,SAASM,gBAAgBA,CAACN,GAAW,EAAU;EAClD,OAAOA,GAAG,CAACP,MAAM,CAACO,GAAG,CAACV,MAAM,GAAG,CAAC,CAAC;AACrC;;AAEA;AACA;AACA;AACO,SAASiB,YAAYA,CAACC,IAAY,EAAE;EACvC;EACA,IACIA,IAAI,CAACC,QAAQ,CAAC,GAAG,CAAC;EAAI;EACtBD,IAAI,CAACC,QAAQ,CAAC,IAAI,CAAC,CAAC;EAAA,EACtB;IACE,OAAO,IAAI;EACf,CAAC,MAAM;IACH,OAAO,KAAK;EAChB;AACJ;;AAGA;AACA;AACA;AACA;AACO,SAASC,mBAAmBA,CAACC,WAAwB,EAAU;EAClE,IAAMC,SAAS,GAAG,IAAI;EACtB,IAAIZ,GAAG,GAAG,EAAE;EACZ,IAAIa,GAAG,GAAGF,WAAW,CAACG,UAAU;EAChC,KAAK,IAAItB,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGqB,GAAG,EAAErB,CAAC,IAAIoB,SAAS,EAAE;IACrC,IAAMG,KAAK,GAAG,IAAIC,UAAU,CACxBL,WAAW,EACXnB,CAAC,EACDE,IAAI,CAACuB,GAAG,CAACL,SAAS,EAAEC,GAAG,GAAGrB,CAAC,CAC/B,CAAC;IACDQ,GAAG,IAAIkB,MAAM,CAACC,YAAY,CAACC,KAAK,CAAC,IAAI,EAAEL,KAAY,CAAC;EACxD;EACA,OAAOf,GAAG;AACd;AAEO,SAASqB,mBAAmBA,CAACrB,GAAW,EAAe;EAC1D,IAAMsB,GAAG,GAAG,IAAIC,WAAW,CAACvB,GAAG,CAACV,MAAM,CAAC;EACvC,IAAMkC,OAAO,GAAG,IAAIR,UAAU,CAACM,GAAG,CAAC;EACnC,KAAK,IAAI9B,CAAC,GAAG,CAAC,EAAEiC,MAAM,GAAGzB,GAAG,CAACV,MAAM,EAAEE,CAAC,GAAGiC,MAAM,EAAEjC,CAAC,EAAE,EAAE;IAClDgC,OAAO,CAAChC,CAAC,CAAC,GAAGQ,GAAG,CAAC0B,UAAU,CAAClC,CAAC,CAAC;EAClC;EACA,OAAO8B,GAAG;AACd;AAGO,SAASK,eAAeA,CAAC3B,GAAW,EAAW;EAClD,OAAOA,GAAG,CAAC4B,IAAI,CAAC,CAAC,CAACC,OAAO,CAAC,UAAU,EAAE,EAAE,CAAC;AAC7C","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/utils/utils-time.js b/dist/cjs/plugins/utils/utils-time.js deleted file mode 100644 index b04a948e247..00000000000 --- a/dist/cjs/plugins/utils/utils-time.js +++ /dev/null @@ -1,41 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.now = now; -/** - * Returns the current unix time in milliseconds (with two decimals!) - * Because the accuracy of getTime() in javascript is bad, - * and we cannot rely on performance.now() on all platforms, - * this method implements a way to never return the same value twice. - * This ensures that when now() is called often, we do not loose the information - * about which call came first and which came after. - * - * We had to move from having no decimals, to having two decimal - * because it turned out that some storages are such fast that - * calling this method too often would return 'the future'. - */ -var _lastNow = 0; -/** - * Returns the current time in milliseconds, - * also ensures to not return the same value twice. - */ -function now() { - var ret = Date.now(); - ret = ret + 0.01; - if (ret <= _lastNow) { - ret = _lastNow + 0.01; - } - - /** - * Strip the returned number to max two decimals. - * In theory we would not need this but - * in practice JavaScript has no such good number precision - * so rounding errors could add another decimal place. - */ - var twoDecimals = parseFloat(ret.toFixed(2)); - _lastNow = twoDecimals; - return twoDecimals; -} -//# sourceMappingURL=utils-time.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/utils/utils-time.js.map b/dist/cjs/plugins/utils/utils-time.js.map deleted file mode 100644 index cf2bf086c3b..00000000000 --- a/dist/cjs/plugins/utils/utils-time.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"utils-time.js","names":["_lastNow","now","ret","Date","twoDecimals","parseFloat","toFixed"],"sources":["../../../../src/plugins/utils/utils-time.ts"],"sourcesContent":["\n/**\n * Returns the current unix time in milliseconds (with two decimals!)\n * Because the accuracy of getTime() in javascript is bad,\n * and we cannot rely on performance.now() on all platforms,\n * this method implements a way to never return the same value twice.\n * This ensures that when now() is called often, we do not loose the information\n * about which call came first and which came after.\n *\n * We had to move from having no decimals, to having two decimal\n * because it turned out that some storages are such fast that\n * calling this method too often would return 'the future'.\n */\nlet _lastNow: number = 0;\n/**\n * Returns the current time in milliseconds,\n * also ensures to not return the same value twice.\n */\nexport function now(): number {\n let ret = Date.now();\n ret = ret + 0.01;\n if (ret <= _lastNow) {\n ret = _lastNow + 0.01;\n }\n\n /**\n * Strip the returned number to max two decimals.\n * In theory we would not need this but\n * in practice JavaScript has no such good number precision\n * so rounding errors could add another decimal place.\n */\n const twoDecimals = parseFloat(ret.toFixed(2));\n\n _lastNow = twoDecimals;\n return twoDecimals;\n}\n"],"mappings":";;;;;;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,IAAIA,QAAgB,GAAG,CAAC;AACxB;AACA;AACA;AACA;AACO,SAASC,GAAGA,CAAA,EAAW;EAC1B,IAAIC,GAAG,GAAGC,IAAI,CAACF,GAAG,CAAC,CAAC;EACpBC,GAAG,GAAGA,GAAG,GAAG,IAAI;EAChB,IAAIA,GAAG,IAAIF,QAAQ,EAAE;IACjBE,GAAG,GAAGF,QAAQ,GAAG,IAAI;EACzB;;EAEA;AACJ;AACA;AACA;AACA;AACA;EACI,IAAMI,WAAW,GAAGC,UAAU,CAACH,GAAG,CAACI,OAAO,CAAC,CAAC,CAAC,CAAC;EAE9CN,QAAQ,GAAGI,WAAW;EACtB,OAAOA,WAAW;AACtB","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/validate-ajv/index.js b/dist/cjs/plugins/validate-ajv/index.js deleted file mode 100644 index 1ea86f72055..00000000000 --- a/dist/cjs/plugins/validate-ajv/index.js +++ /dev/null @@ -1,33 +0,0 @@ -"use strict"; - -var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault"); -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.getValidator = getValidator; -exports.wrappedValidateAjvStorage = void 0; -var _ajv = _interopRequireDefault(require("ajv")); -var _pluginHelpers = require("../../plugin-helpers.js"); -/** - * this plugin validates documents before they can be inserted into the RxCollection. - * It's using ajv as jsonschema-validator - * @link https://github.com/epoberezkin/ajv - * @link https://github.com/ajv-validator/ajv/issues/2132#issuecomment-1537224620 - */ - -var ajv = new _ajv.default({ - strict: false -}); -function getValidator(schema) { - var validator = ajv.compile(schema); - return docData => { - var isValid = validator(docData); - if (isValid) { - return []; - } else { - return validator.errors; - } - }; -} -var wrappedValidateAjvStorage = exports.wrappedValidateAjvStorage = (0, _pluginHelpers.wrappedValidateStorageFactory)(getValidator, 'ajv'); -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/validate-ajv/index.js.map b/dist/cjs/plugins/validate-ajv/index.js.map deleted file mode 100644 index f7017ba8780..00000000000 --- a/dist/cjs/plugins/validate-ajv/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["_ajv","_interopRequireDefault","require","_pluginHelpers","ajv","Ajv","strict","getValidator","schema","validator","compile","docData","isValid","errors","wrappedValidateAjvStorage","exports","wrappedValidateStorageFactory"],"sources":["../../../../src/plugins/validate-ajv/index.ts"],"sourcesContent":["/**\n * this plugin validates documents before they can be inserted into the RxCollection.\n * It's using ajv as jsonschema-validator\n * @link https://github.com/epoberezkin/ajv\n * @link https://github.com/ajv-validator/ajv/issues/2132#issuecomment-1537224620\n */\nimport Ajv from 'ajv';\nimport type {\n RxDocumentData,\n RxJsonSchema\n} from '../../types/index.d.ts';\nimport { wrappedValidateStorageFactory } from '../../plugin-helpers.ts';\n\n\nconst ajv = new Ajv({\n strict: false\n});\n\n\nexport function getValidator(\n schema: RxJsonSchema\n) {\n const validator = ajv.compile(schema);\n return (docData: RxDocumentData) => {\n const isValid = validator(docData);\n if (isValid) {\n return [];\n } else {\n return validator.errors as any;\n }\n };\n}\n\nexport const wrappedValidateAjvStorage = wrappedValidateStorageFactory(\n getValidator,\n 'ajv'\n);\n"],"mappings":";;;;;;;;AAMA,IAAAA,IAAA,GAAAC,sBAAA,CAAAC,OAAA;AAKA,IAAAC,cAAA,GAAAD,OAAA;AAXA;AACA;AACA;AACA;AACA;AACA;;AASA,IAAME,GAAG,GAAG,IAAIC,YAAG,CAAC;EAChBC,MAAM,EAAE;AACZ,CAAC,CAAC;AAGK,SAASC,YAAYA,CACxBC,MAAyB,EAC3B;EACE,IAAMC,SAAS,GAAGL,GAAG,CAACM,OAAO,CAACF,MAAM,CAAC;EACrC,OAAQG,OAA4B,IAAK;IACrC,IAAMC,OAAO,GAAGH,SAAS,CAACE,OAAO,CAAC;IAClC,IAAIC,OAAO,EAAE;MACT,OAAO,EAAE;IACb,CAAC,MAAM;MACH,OAAOH,SAAS,CAACI,MAAM;IAC3B;EACJ,CAAC;AACL;AAEO,IAAMC,yBAAyB,GAAAC,OAAA,CAAAD,yBAAA,GAAG,IAAAE,4CAA6B,EAClET,YAAY,EACZ,KACJ,CAAC","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/validate-is-my-json-valid/index.js b/dist/cjs/plugins/validate-is-my-json-valid/index.js deleted file mode 100644 index 55cd21cbb0c..00000000000 --- a/dist/cjs/plugins/validate-is-my-json-valid/index.js +++ /dev/null @@ -1,29 +0,0 @@ -"use strict"; - -var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault"); -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.getValidator = getValidator; -exports.wrappedValidateIsMyJsonValidStorage = void 0; -var _isMyJsonValid = _interopRequireDefault(require("is-my-json-valid")); -var _pluginHelpers = require("../../plugin-helpers.js"); -/** - * this plugin validates documents before they can be inserted into the RxCollection. - * It's using is-my-json-valid as jsonschema-validator - * @link https://github.com/mafintosh/is-my-json-valid - */ - -function getValidator(schema) { - var validator = (0, _isMyJsonValid.default)(schema); - return docData => { - var isValid = validator(docData); - if (isValid) { - return []; - } else { - return validator.errors; - } - }; -} -var wrappedValidateIsMyJsonValidStorage = exports.wrappedValidateIsMyJsonValidStorage = (0, _pluginHelpers.wrappedValidateStorageFactory)(getValidator, 'is-my-json-valid'); -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/validate-is-my-json-valid/index.js.map b/dist/cjs/plugins/validate-is-my-json-valid/index.js.map deleted file mode 100644 index 5ce0496547e..00000000000 --- a/dist/cjs/plugins/validate-is-my-json-valid/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["_isMyJsonValid","_interopRequireDefault","require","_pluginHelpers","getValidator","schema","validator","isMyJsonValid","docData","isValid","errors","wrappedValidateIsMyJsonValidStorage","exports","wrappedValidateStorageFactory"],"sources":["../../../../src/plugins/validate-is-my-json-valid/index.ts"],"sourcesContent":["/**\n * this plugin validates documents before they can be inserted into the RxCollection.\n * It's using is-my-json-valid as jsonschema-validator\n * @link https://github.com/mafintosh/is-my-json-valid\n */\nimport isMyJsonValid from 'is-my-json-valid';\nimport type {\n RxJsonSchema\n} from '../../types/index.d.ts';\nimport { wrappedValidateStorageFactory } from '../../plugin-helpers.ts';\n\n\nexport function getValidator(\n schema: RxJsonSchema\n) {\n const validator = isMyJsonValid(schema as any);\n return (docData: any) => {\n const isValid = validator(docData);\n if (isValid) {\n return [];\n } else {\n return validator.errors as any;\n }\n };\n}\n\nexport const wrappedValidateIsMyJsonValidStorage = wrappedValidateStorageFactory(\n getValidator,\n 'is-my-json-valid'\n);\n"],"mappings":";;;;;;;;AAKA,IAAAA,cAAA,GAAAC,sBAAA,CAAAC,OAAA;AAIA,IAAAC,cAAA,GAAAD,OAAA;AATA;AACA;AACA;AACA;AACA;;AAQO,SAASE,YAAYA,CACxBC,MAAyB,EAC3B;EACE,IAAMC,SAAS,GAAG,IAAAC,sBAAa,EAACF,MAAa,CAAC;EAC9C,OAAQG,OAAY,IAAK;IACrB,IAAMC,OAAO,GAAGH,SAAS,CAACE,OAAO,CAAC;IAClC,IAAIC,OAAO,EAAE;MACT,OAAO,EAAE;IACb,CAAC,MAAM;MACH,OAAOH,SAAS,CAACI,MAAM;IAC3B;EACJ,CAAC;AACL;AAEO,IAAMC,mCAAmC,GAAAC,OAAA,CAAAD,mCAAA,GAAG,IAAAE,4CAA6B,EAC5ET,YAAY,EACZ,kBACJ,CAAC","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/plugins/validate-z-schema/index.js b/dist/cjs/plugins/validate-z-schema/index.js deleted file mode 100644 index 00a615f1b69..00000000000 --- a/dist/cjs/plugins/validate-z-schema/index.js +++ /dev/null @@ -1,48 +0,0 @@ -"use strict"; - -var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault"); -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.getValidator = getValidator; -exports.wrappedValidateZSchemaStorage = void 0; -var _zSchema = _interopRequireDefault(require("z-schema")); -var _pluginHelpers = require("../../plugin-helpers.js"); -/** - * this plugin validates documents before they can be inserted into the RxCollection. - * It's using z-schema as jsonschema-validator - * @link https://github.com/zaggino/z-schema - */ - -function getValidator(schema) { - var validatorInstance = new _zSchema.default(); - var validator = obj => { - validatorInstance.validate(obj, schema); - return validatorInstance; - }; - return docData => { - var useValidator = validator(docData); - if (useValidator === true) { - return; - } - var errors = useValidator.getLastErrors(); - if (errors) { - var formattedZSchemaErrors = errors.map(({ - title, - description, - message, - path - }) => ({ - title, - description, - message, - path - })); - return formattedZSchemaErrors; - } else { - return []; - } - }; -} -var wrappedValidateZSchemaStorage = exports.wrappedValidateZSchemaStorage = (0, _pluginHelpers.wrappedValidateStorageFactory)(getValidator, 'z-schema'); -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/cjs/plugins/validate-z-schema/index.js.map b/dist/cjs/plugins/validate-z-schema/index.js.map deleted file mode 100644 index f90252a18e0..00000000000 --- a/dist/cjs/plugins/validate-z-schema/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["_zSchema","_interopRequireDefault","require","_pluginHelpers","getValidator","schema","validatorInstance","ZSchema","validator","obj","validate","docData","useValidator","errors","getLastErrors","formattedZSchemaErrors","map","title","description","message","path","wrappedValidateZSchemaStorage","exports","wrappedValidateStorageFactory"],"sources":["../../../../src/plugins/validate-z-schema/index.ts"],"sourcesContent":["/**\n * this plugin validates documents before they can be inserted into the RxCollection.\n * It's using z-schema as jsonschema-validator\n * @link https://github.com/zaggino/z-schema\n */\nimport ZSchema from 'z-schema';\nimport type { RxJsonSchema } from '../../types/index.d.ts';\nimport { wrappedValidateStorageFactory } from '../../plugin-helpers.ts';\n\n\nexport function getValidator(\n schema: RxJsonSchema\n) {\n const validatorInstance = new (ZSchema as any)();\n const validator = (obj: any) => {\n validatorInstance.validate(obj, schema);\n return validatorInstance;\n };\n return (docData: any) => {\n const useValidator = validator(docData);\n if (useValidator === true) {\n return;\n }\n const errors: ZSchema.SchemaErrorDetail[] = (useValidator as any).getLastErrors();\n if (errors) {\n const formattedZSchemaErrors = (errors as any).map(({\n title,\n description,\n message,\n path\n }: any) => ({\n title,\n description,\n message,\n path\n }));\n return formattedZSchemaErrors;\n } else {\n return [];\n }\n };\n}\n\nexport const wrappedValidateZSchemaStorage = wrappedValidateStorageFactory(\n getValidator,\n 'z-schema'\n);\n"],"mappings":";;;;;;;;AAKA,IAAAA,QAAA,GAAAC,sBAAA,CAAAC,OAAA;AAEA,IAAAC,cAAA,GAAAD,OAAA;AAPA;AACA;AACA;AACA;AACA;;AAMO,SAASE,YAAYA,CACxBC,MAAyB,EAC3B;EACE,IAAMC,iBAAiB,GAAG,IAAKC,gBAAO,CAAS,CAAC;EAChD,IAAMC,SAAS,GAAIC,GAAQ,IAAK;IAC5BH,iBAAiB,CAACI,QAAQ,CAACD,GAAG,EAAEJ,MAAM,CAAC;IACvC,OAAOC,iBAAiB;EAC5B,CAAC;EACD,OAAQK,OAAY,IAAK;IACrB,IAAMC,YAAY,GAAGJ,SAAS,CAACG,OAAO,CAAC;IACvC,IAAIC,YAAY,KAAK,IAAI,EAAE;MACvB;IACJ;IACA,IAAMC,MAAmC,GAAID,YAAY,CAASE,aAAa,CAAC,CAAC;IACjF,IAAID,MAAM,EAAE;MACR,IAAME,sBAAsB,GAAIF,MAAM,CAASG,GAAG,CAAC,CAAC;QAChDC,KAAK;QACLC,WAAW;QACXC,OAAO;QACPC;MACC,CAAC,MAAM;QACRH,KAAK;QACLC,WAAW;QACXC,OAAO;QACPC;MACJ,CAAC,CAAC,CAAC;MACH,OAAOL,sBAAsB;IACjC,CAAC,MAAM;MACH,OAAO,EAAE;IACb;EACJ,CAAC;AACL;AAEO,IAAMM,6BAA6B,GAAAC,OAAA,CAAAD,6BAAA,GAAG,IAAAE,4CAA6B,EACtEnB,YAAY,EACZ,UACJ,CAAC","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/query-cache.js b/dist/cjs/query-cache.js deleted file mode 100644 index dced9fa3e94..00000000000 --- a/dist/cjs/query-cache.js +++ /dev/null @@ -1,109 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.QueryCache = exports.DEFAULT_UNEXECUTED_LIFETIME = exports.DEFAULT_TRY_TO_KEEP_MAX = exports.COLLECTIONS_WITH_RUNNING_CLEANUP = void 0; -exports.countRxQuerySubscribers = countRxQuerySubscribers; -exports.createQueryCache = createQueryCache; -exports.defaultCacheReplacementPolicyMonad = exports.defaultCacheReplacementPolicy = void 0; -exports.triggerCacheReplacement = triggerCacheReplacement; -exports.uncacheRxQuery = uncacheRxQuery; -var _index = require("./plugins/utils/index.js"); -/** - * the query-cache makes sure that on every query-state, exactly one instance can exist - * if you use the same mango-query more then once, it will reuse the first RxQuery - */ -var QueryCache = exports.QueryCache = /*#__PURE__*/function () { - function QueryCache() { - this._map = new Map(); - } - var _proto = QueryCache.prototype; - /** - * check if an equal query is in the cache, - * if true, return the cached one, - * if false, save the given one and return it - */ - _proto.getByQuery = function getByQuery(rxQuery) { - var stringRep = rxQuery.toString(); - return (0, _index.getFromMapOrCreate)(this._map, stringRep, () => rxQuery); - }; - return QueryCache; -}(); -function createQueryCache() { - return new QueryCache(); -} -function uncacheRxQuery(queryCache, rxQuery) { - rxQuery.uncached = true; - var stringRep = rxQuery.toString(); - queryCache._map.delete(stringRep); -} -function countRxQuerySubscribers(rxQuery) { - return rxQuery.refCount$.observers.length; -} -var DEFAULT_TRY_TO_KEEP_MAX = exports.DEFAULT_TRY_TO_KEEP_MAX = 100; -var DEFAULT_UNEXECUTED_LIFETIME = exports.DEFAULT_UNEXECUTED_LIFETIME = 30 * 1000; - -/** - * The default cache replacement policy - * See docs-src/query-cache.md to learn how it should work. - * Notice that this runs often and should block the cpu as less as possible - * This is a monad which makes it easier to unit test - */ -var defaultCacheReplacementPolicyMonad = (tryToKeepMax, unExecutedLifetime) => (_collection, queryCache) => { - if (queryCache._map.size < tryToKeepMax) { - return; - } - var minUnExecutedLifetime = (0, _index.now)() - unExecutedLifetime; - var maybeUncache = []; - var queriesInCache = Array.from(queryCache._map.values()); - for (var rxQuery of queriesInCache) { - // filter out queries with subscribers - if (countRxQuerySubscribers(rxQuery) > 0) { - continue; - } - // directly uncache queries that never executed and are older then unExecutedLifetime - if (rxQuery._lastEnsureEqual === 0 && rxQuery._creationTime < minUnExecutedLifetime) { - uncacheRxQuery(queryCache, rxQuery); - continue; - } - maybeUncache.push(rxQuery); - } - var mustUncache = maybeUncache.length - tryToKeepMax; - if (mustUncache <= 0) { - return; - } - var sortedByLastUsage = maybeUncache.sort((a, b) => a._lastEnsureEqual - b._lastEnsureEqual); - var toRemove = sortedByLastUsage.slice(0, mustUncache); - toRemove.forEach(rxQuery => uncacheRxQuery(queryCache, rxQuery)); -}; -exports.defaultCacheReplacementPolicyMonad = defaultCacheReplacementPolicyMonad; -var defaultCacheReplacementPolicy = exports.defaultCacheReplacementPolicy = defaultCacheReplacementPolicyMonad(DEFAULT_TRY_TO_KEEP_MAX, DEFAULT_UNEXECUTED_LIFETIME); -var COLLECTIONS_WITH_RUNNING_CLEANUP = exports.COLLECTIONS_WITH_RUNNING_CLEANUP = new WeakSet(); - -/** - * Triggers the cache replacement policy after waitTime has passed. - * We do not run this directly because at exactly the time a query is created, - * we need all CPU to minimize latency. - * Also this should not be triggered multiple times when waitTime is still waiting. - */ -function triggerCacheReplacement(rxCollection) { - if (COLLECTIONS_WITH_RUNNING_CLEANUP.has(rxCollection)) { - // already started - return; - } - COLLECTIONS_WITH_RUNNING_CLEANUP.add(rxCollection); - - /** - * Do not run directly to not reduce result latency of a new query - */ - (0, _index.nextTick)() // wait at least one tick - .then(() => (0, _index.requestIdlePromise)(200)) // and then wait for the CPU to be idle - .then(() => { - if (!rxCollection.destroyed) { - rxCollection.cacheReplacementPolicy(rxCollection, rxCollection._queryCache); - } - COLLECTIONS_WITH_RUNNING_CLEANUP.delete(rxCollection); - }); -} -//# sourceMappingURL=query-cache.js.map \ No newline at end of file diff --git a/dist/cjs/query-cache.js.map b/dist/cjs/query-cache.js.map deleted file mode 100644 index 4b4d89d17d4..00000000000 --- a/dist/cjs/query-cache.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"query-cache.js","names":["_index","require","QueryCache","exports","_map","Map","_proto","prototype","getByQuery","rxQuery","stringRep","toString","getFromMapOrCreate","createQueryCache","uncacheRxQuery","queryCache","uncached","delete","countRxQuerySubscribers","refCount$","observers","length","DEFAULT_TRY_TO_KEEP_MAX","DEFAULT_UNEXECUTED_LIFETIME","defaultCacheReplacementPolicyMonad","tryToKeepMax","unExecutedLifetime","_collection","size","minUnExecutedLifetime","now","maybeUncache","queriesInCache","Array","from","values","_lastEnsureEqual","_creationTime","push","mustUncache","sortedByLastUsage","sort","a","b","toRemove","slice","forEach","defaultCacheReplacementPolicy","COLLECTIONS_WITH_RUNNING_CLEANUP","WeakSet","triggerCacheReplacement","rxCollection","has","add","nextTick","then","requestIdlePromise","destroyed","cacheReplacementPolicy","_queryCache"],"sources":["../../src/query-cache.ts"],"sourcesContent":["/**\n * the query-cache makes sure that on every query-state, exactly one instance can exist\n * if you use the same mango-query more then once, it will reuse the first RxQuery\n */\nimport type {\n RxQuery,\n RxCacheReplacementPolicy,\n RxCollection\n} from './types/index.d.ts';\nimport {\n getFromMapOrCreate,\n nextTick,\n now,\n requestIdlePromise\n} from './plugins/utils/index.ts';\n\nexport class QueryCache {\n public _map: Map = new Map();\n\n /**\n * check if an equal query is in the cache,\n * if true, return the cached one,\n * if false, save the given one and return it\n */\n getByQuery(rxQuery: RxQuery): RxQuery {\n const stringRep = rxQuery.toString();\n return getFromMapOrCreate(\n this._map,\n stringRep,\n () => rxQuery\n );\n }\n}\n\nexport function createQueryCache() {\n return new QueryCache();\n}\n\n\nexport function uncacheRxQuery(queryCache: QueryCache, rxQuery: RxQuery) {\n rxQuery.uncached = true;\n const stringRep = rxQuery.toString();\n queryCache._map.delete(stringRep);\n\n}\n\n\nexport function countRxQuerySubscribers(rxQuery: RxQuery): number {\n return rxQuery.refCount$.observers.length;\n}\n\n\nexport const DEFAULT_TRY_TO_KEEP_MAX = 100;\nexport const DEFAULT_UNEXECUTED_LIFETIME = 30 * 1000;\n\n/**\n * The default cache replacement policy\n * See docs-src/query-cache.md to learn how it should work.\n * Notice that this runs often and should block the cpu as less as possible\n * This is a monad which makes it easier to unit test\n */\nexport const defaultCacheReplacementPolicyMonad: (\n tryToKeepMax: number,\n unExecutedLifetime: number\n) => RxCacheReplacementPolicy = (\n tryToKeepMax,\n unExecutedLifetime\n) => (\n _collection: RxCollection,\n queryCache: QueryCache\n) => {\n if (queryCache._map.size < tryToKeepMax) {\n return;\n }\n\n const minUnExecutedLifetime = now() - unExecutedLifetime;\n const maybeUncache: RxQuery[] = [];\n\n const queriesInCache = Array.from(queryCache._map.values());\n for (const rxQuery of queriesInCache) {\n // filter out queries with subscribers\n if (countRxQuerySubscribers(rxQuery) > 0) {\n continue;\n }\n // directly uncache queries that never executed and are older then unExecutedLifetime\n if (rxQuery._lastEnsureEqual === 0 && rxQuery._creationTime < minUnExecutedLifetime) {\n uncacheRxQuery(queryCache, rxQuery);\n continue;\n }\n maybeUncache.push(rxQuery);\n }\n\n const mustUncache = maybeUncache.length - tryToKeepMax;\n if (mustUncache <= 0) {\n return;\n }\n\n const sortedByLastUsage = maybeUncache.sort((a, b) => a._lastEnsureEqual - b._lastEnsureEqual);\n const toRemove = sortedByLastUsage.slice(0, mustUncache);\n toRemove.forEach(rxQuery => uncacheRxQuery(queryCache, rxQuery));\n };\n\n\nexport const defaultCacheReplacementPolicy: RxCacheReplacementPolicy = defaultCacheReplacementPolicyMonad(\n DEFAULT_TRY_TO_KEEP_MAX,\n DEFAULT_UNEXECUTED_LIFETIME\n);\n\nexport const COLLECTIONS_WITH_RUNNING_CLEANUP: WeakSet = new WeakSet();\n\n/**\n * Triggers the cache replacement policy after waitTime has passed.\n * We do not run this directly because at exactly the time a query is created,\n * we need all CPU to minimize latency.\n * Also this should not be triggered multiple times when waitTime is still waiting.\n */\nexport function triggerCacheReplacement(\n rxCollection: RxCollection\n) {\n if (COLLECTIONS_WITH_RUNNING_CLEANUP.has(rxCollection)) {\n // already started\n return;\n }\n\n COLLECTIONS_WITH_RUNNING_CLEANUP.add(rxCollection);\n\n /**\n * Do not run directly to not reduce result latency of a new query\n */\n nextTick() // wait at least one tick\n .then(() => requestIdlePromise(200)) // and then wait for the CPU to be idle\n .then(() => {\n if (!rxCollection.destroyed) {\n rxCollection.cacheReplacementPolicy(rxCollection, rxCollection._queryCache);\n }\n COLLECTIONS_WITH_RUNNING_CLEANUP.delete(rxCollection);\n });\n}\n"],"mappings":";;;;;;;;;;;AASA,IAAAA,MAAA,GAAAC,OAAA;AATA;AACA;AACA;AACA;AAHA,IAgBaC,UAAU,GAAAC,OAAA,CAAAD,UAAA;EAAA,SAAAA,WAAA;IAAA,KACZE,IAAI,GAAyB,IAAIC,GAAG,CAAC,CAAC;EAAA;EAAA,IAAAC,MAAA,GAAAJ,UAAA,CAAAK,SAAA;EAE7C;AACJ;AACA;AACA;AACA;EAJID,MAAA,CAKAE,UAAU,GAAV,SAAAA,WAAWC,OAAgB,EAAW;IAClC,IAAMC,SAAS,GAAGD,OAAO,CAACE,QAAQ,CAAC,CAAC;IACpC,OAAO,IAAAC,yBAAkB,EACrB,IAAI,CAACR,IAAI,EACTM,SAAS,EACT,MAAMD,OACV,CAAC;EACL,CAAC;EAAA,OAAAP,UAAA;AAAA;AAGE,SAASW,gBAAgBA,CAAA,EAAG;EAC/B,OAAO,IAAIX,UAAU,CAAC,CAAC;AAC3B;AAGO,SAASY,cAAcA,CAACC,UAAsB,EAAEN,OAAgB,EAAE;EACrEA,OAAO,CAACO,QAAQ,GAAG,IAAI;EACvB,IAAMN,SAAS,GAAGD,OAAO,CAACE,QAAQ,CAAC,CAAC;EACpCI,UAAU,CAACX,IAAI,CAACa,MAAM,CAACP,SAAS,CAAC;AAErC;AAGO,SAASQ,uBAAuBA,CAACT,OAAgB,EAAU;EAC9D,OAAOA,OAAO,CAACU,SAAS,CAACC,SAAS,CAACC,MAAM;AAC7C;AAGO,IAAMC,uBAAuB,GAAAnB,OAAA,CAAAmB,uBAAA,GAAG,GAAG;AACnC,IAAMC,2BAA2B,GAAApB,OAAA,CAAAoB,2BAAA,GAAG,EAAE,GAAG,IAAI;;AAEpD;AACA;AACA;AACA;AACA;AACA;AACO,IAAMC,kCAGgB,GAAGA,CAC5BC,YAAY,EACZC,kBAAkB,KACjB,CACDC,WAAyB,EACzBZ,UAAsB,KACrB;EACO,IAAIA,UAAU,CAACX,IAAI,CAACwB,IAAI,GAAGH,YAAY,EAAE;IACrC;EACJ;EAEA,IAAMI,qBAAqB,GAAG,IAAAC,UAAG,EAAC,CAAC,GAAGJ,kBAAkB;EACxD,IAAMK,YAAuB,GAAG,EAAE;EAElC,IAAMC,cAAc,GAAGC,KAAK,CAACC,IAAI,CAACnB,UAAU,CAACX,IAAI,CAAC+B,MAAM,CAAC,CAAC,CAAC;EAC3D,KAAK,IAAM1B,OAAO,IAAIuB,cAAc,EAAE;IAClC;IACA,IAAId,uBAAuB,CAACT,OAAO,CAAC,GAAG,CAAC,EAAE;MACtC;IACJ;IACA;IACA,IAAIA,OAAO,CAAC2B,gBAAgB,KAAK,CAAC,IAAI3B,OAAO,CAAC4B,aAAa,GAAGR,qBAAqB,EAAE;MACjFf,cAAc,CAACC,UAAU,EAAEN,OAAO,CAAC;MACnC;IACJ;IACAsB,YAAY,CAACO,IAAI,CAAC7B,OAAO,CAAC;EAC9B;EAEA,IAAM8B,WAAW,GAAGR,YAAY,CAACV,MAAM,GAAGI,YAAY;EACtD,IAAIc,WAAW,IAAI,CAAC,EAAE;IAClB;EACJ;EAEA,IAAMC,iBAAiB,GAAGT,YAAY,CAACU,IAAI,CAAC,CAACC,CAAC,EAAEC,CAAC,KAAKD,CAAC,CAACN,gBAAgB,GAAGO,CAAC,CAACP,gBAAgB,CAAC;EAC9F,IAAMQ,QAAQ,GAAGJ,iBAAiB,CAACK,KAAK,CAAC,CAAC,EAAEN,WAAW,CAAC;EACxDK,QAAQ,CAACE,OAAO,CAACrC,OAAO,IAAIK,cAAc,CAACC,UAAU,EAAEN,OAAO,CAAC,CAAC;AACpE,CAAC;AAACN,OAAA,CAAAqB,kCAAA,GAAAA,kCAAA;AAGH,IAAMuB,6BAAuD,GAAA5C,OAAA,CAAA4C,6BAAA,GAAGvB,kCAAkC,CACrGF,uBAAuB,EACvBC,2BACJ,CAAC;AAEM,IAAMyB,gCAAuD,GAAA7C,OAAA,CAAA6C,gCAAA,GAAG,IAAIC,OAAO,CAAC,CAAC;;AAEpF;AACA;AACA;AACA;AACA;AACA;AACO,SAASC,uBAAuBA,CACnCC,YAA0B,EAC5B;EACE,IAAIH,gCAAgC,CAACI,GAAG,CAACD,YAAY,CAAC,EAAE;IACpD;IACA;EACJ;EAEAH,gCAAgC,CAACK,GAAG,CAACF,YAAY,CAAC;;EAElD;AACJ;AACA;EACI,IAAAG,eAAQ,EAAC,CAAC,CAAC;EAAA,CACNC,IAAI,CAAC,MAAM,IAAAC,yBAAkB,EAAC,GAAG,CAAC,CAAC,CAAC;EAAA,CACpCD,IAAI,CAAC,MAAM;IACR,IAAI,CAACJ,YAAY,CAACM,SAAS,EAAE;MACzBN,YAAY,CAACO,sBAAsB,CAACP,YAAY,EAAEA,YAAY,CAACQ,WAAW,CAAC;IAC/E;IACAX,gCAAgC,CAAC/B,MAAM,CAACkC,YAAY,CAAC;EACzD,CAAC,CAAC;AACV","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/query-planner.js b/dist/cjs/query-planner.js deleted file mode 100644 index 5bd3cf73092..00000000000 --- a/dist/cjs/query-planner.js +++ /dev/null @@ -1,306 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.UPPER_BOUND_LOGICAL_OPERATORS = exports.LOWER_BOUND_LOGICAL_OPERATORS = exports.LOGICAL_OPERATORS = exports.INDEX_MIN = exports.INDEX_MAX = void 0; -exports.getMatcherQueryOpts = getMatcherQueryOpts; -exports.getQueryPlan = getQueryPlan; -exports.isSelectorSatisfiedByIndex = isSelectorSatisfiedByIndex; -exports.rateQueryPlan = rateQueryPlan; -var _index = require("./plugins/utils/index.js"); -var _rxError = require("./rx-error.js"); -var _rxSchemaHelper = require("./rx-schema-helper.js"); -var INDEX_MAX = exports.INDEX_MAX = String.fromCharCode(65535); - -/** - * Do not use -Infinity here because it would be - * transformed to null on JSON.stringify() which can break things - * when the query plan is send to the storage as json. - * @link https://stackoverflow.com/a/16644751 - * Notice that for IndexedDB IDBKeyRange we have - * to transform the value back to -Infinity - * before we can use it in IDBKeyRange.bound. - */ -var INDEX_MIN = exports.INDEX_MIN = Number.MIN_SAFE_INTEGER; - -/** - * Returns the query plan which contains - * information about how to run the query - * and which indexes to use. - * - * This is used in some storage like Memory, dexie.js and IndexedDB. - */ -function getQueryPlan(schema, query) { - var selector = query.selector; - var indexes = schema.indexes ? schema.indexes.slice(0) : []; - if (query.index) { - indexes = [query.index]; - } - - /** - * Most storages do not support descending indexes - * so having a 'desc' in the sorting, means we always have to re-sort the results. - */ - var hasDescSorting = !!query.sort.find(sortField => Object.values(sortField)[0] === 'desc'); - - /** - * Some fields can be part of the selector while not being relevant for sorting - * because their selector operators specify that in all cases all matching docs - * would have the same value. - * For example the boolean field _deleted. - * TODO similar thing could be done for enums. - */ - var sortIrrelevevantFields = new Set(); - Object.keys(selector).forEach(fieldName => { - var schemaPart = (0, _rxSchemaHelper.getSchemaByObjectPath)(schema, fieldName); - if (schemaPart && schemaPart.type === 'boolean' && Object.prototype.hasOwnProperty.call(selector[fieldName], '$eq')) { - sortIrrelevevantFields.add(fieldName); - } - }); - var optimalSortIndex = query.sort.map(sortField => Object.keys(sortField)[0]); - var optimalSortIndexCompareString = optimalSortIndex.filter(f => !sortIrrelevevantFields.has(f)).join(','); - var currentBestQuality = -1; - var currentBestQueryPlan; - - /** - * Calculate one query plan for each index - * and then test which of the plans is best. - */ - indexes.forEach(index => { - var inclusiveEnd = true; - var inclusiveStart = true; - var opts = index.map(indexField => { - var matcher = selector[indexField]; - var operators = matcher ? Object.keys(matcher) : []; - var matcherOpts = {}; - if (!matcher || !operators.length) { - var startKey = inclusiveStart ? INDEX_MIN : INDEX_MAX; - matcherOpts = { - startKey, - endKey: inclusiveEnd ? INDEX_MAX : INDEX_MIN, - inclusiveStart: true, - inclusiveEnd: true - }; - } else { - operators.forEach(operator => { - if (LOGICAL_OPERATORS.has(operator)) { - var operatorValue = matcher[operator]; - var partialOpts = getMatcherQueryOpts(operator, operatorValue); - matcherOpts = Object.assign(matcherOpts, partialOpts); - } - }); - } - - // fill missing attributes - if (typeof matcherOpts.startKey === 'undefined') { - matcherOpts.startKey = INDEX_MIN; - } - if (typeof matcherOpts.endKey === 'undefined') { - matcherOpts.endKey = INDEX_MAX; - } - if (typeof matcherOpts.inclusiveStart === 'undefined') { - matcherOpts.inclusiveStart = true; - } - if (typeof matcherOpts.inclusiveEnd === 'undefined') { - matcherOpts.inclusiveEnd = true; - } - if (inclusiveStart && !matcherOpts.inclusiveStart) { - inclusiveStart = false; - } - if (inclusiveEnd && !matcherOpts.inclusiveEnd) { - inclusiveEnd = false; - } - return matcherOpts; - }); - var startKeys = opts.map(opt => opt.startKey); - var endKeys = opts.map(opt => opt.endKey); - var queryPlan = { - index, - startKeys, - endKeys, - inclusiveEnd, - inclusiveStart, - sortSatisfiedByIndex: !hasDescSorting && optimalSortIndexCompareString === index.filter(f => !sortIrrelevevantFields.has(f)).join(','), - selectorSatisfiedByIndex: isSelectorSatisfiedByIndex(index, query.selector, startKeys, endKeys) - }; - var quality = rateQueryPlan(schema, query, queryPlan); - if (quality >= currentBestQuality || query.index) { - currentBestQuality = quality; - currentBestQueryPlan = queryPlan; - } - }); - - /** - * In all cases and index must be found - */ - if (!currentBestQueryPlan) { - throw (0, _rxError.newRxError)('SNH', { - query - }); - } - return currentBestQueryPlan; -} -var LOGICAL_OPERATORS = exports.LOGICAL_OPERATORS = new Set(['$eq', '$gt', '$gte', '$lt', '$lte']); -var LOWER_BOUND_LOGICAL_OPERATORS = exports.LOWER_BOUND_LOGICAL_OPERATORS = new Set(['$eq', '$gt', '$gte']); -var UPPER_BOUND_LOGICAL_OPERATORS = exports.UPPER_BOUND_LOGICAL_OPERATORS = new Set(['$eq', '$lt', '$lte']); -function isSelectorSatisfiedByIndex(index, selector, startKeys, endKeys) { - /** - * Not satisfied if one or more operators are non-logical - * operators that can never be satisfied by an index. - */ - var selectorEntries = Object.entries(selector); - var hasNonMatchingOperator = selectorEntries.find(([fieldName, operation]) => { - if (!index.includes(fieldName)) { - return true; - } - var hasNonLogicOperator = Object.entries(operation).find(([op, _value]) => !LOGICAL_OPERATORS.has(op)); - return hasNonLogicOperator; - }); - if (hasNonMatchingOperator) { - return false; - } - - /** - * Not satisfied if contains $and or $or operations. - */ - if (selector.$and || selector.$or) { - return false; - } - - // ensure all lower bound in index - var satisfieldLowerBound = []; - var lowerOperatorFieldNames = new Set(); - for (var [fieldName, operation] of Object.entries(selector)) { - if (!index.includes(fieldName)) { - return false; - } - - // If more then one logic op on the same field, we have to selector-match. - var lowerLogicOps = Object.keys(operation).filter(key => LOWER_BOUND_LOGICAL_OPERATORS.has(key)); - if (lowerLogicOps.length > 1) { - return false; - } - var hasLowerLogicOp = lowerLogicOps[0]; - if (hasLowerLogicOp) { - lowerOperatorFieldNames.add(fieldName); - } - if (hasLowerLogicOp !== '$eq') { - if (satisfieldLowerBound.length > 0) { - return false; - } else { - satisfieldLowerBound.push(hasLowerLogicOp); - } - } - } - - // ensure all upper bound in index - var satisfieldUpperBound = []; - var upperOperatorFieldNames = new Set(); - for (var [_fieldName, _operation] of Object.entries(selector)) { - if (!index.includes(_fieldName)) { - return false; - } - - // If more then one logic op on the same field, we have to selector-match. - var upperLogicOps = Object.keys(_operation).filter(key => UPPER_BOUND_LOGICAL_OPERATORS.has(key)); - if (upperLogicOps.length > 1) { - return false; - } - var hasUperLogicOp = upperLogicOps[0]; - if (hasUperLogicOp) { - upperOperatorFieldNames.add(_fieldName); - } - if (hasUperLogicOp !== '$eq') { - if (satisfieldUpperBound.length > 0) { - return false; - } else { - satisfieldUpperBound.push(hasUperLogicOp); - } - } - } - - /** - * If the index contains a non-relevant field between - * the relevant fields, then the index is not satisfying. - */ - var i = 0; - for (var _fieldName2 of index) { - for (var set of [lowerOperatorFieldNames, upperOperatorFieldNames]) { - if (!set.has(_fieldName2) && set.size > 0) { - return false; - } - set.delete(_fieldName2); - } - var startKey = startKeys[i]; - var endKey = endKeys[i]; - if (startKey !== endKey && lowerOperatorFieldNames.size > 0 && upperOperatorFieldNames.size > 0) { - return false; - } - i++; - } - return true; -} -function getMatcherQueryOpts(operator, operatorValue) { - switch (operator) { - case '$eq': - return { - startKey: operatorValue, - endKey: operatorValue, - inclusiveEnd: true, - inclusiveStart: true - }; - case '$lte': - return { - endKey: operatorValue, - inclusiveEnd: true - }; - case '$gte': - return { - startKey: operatorValue, - inclusiveStart: true - }; - case '$lt': - return { - endKey: operatorValue, - inclusiveEnd: false - }; - case '$gt': - return { - startKey: operatorValue, - inclusiveStart: false - }; - default: - throw new Error('SNH'); - } -} - -/** - * Returns a number that determines the quality of the query plan. - * Higher number means better query plan. - */ -function rateQueryPlan(schema, query, queryPlan) { - var quality = 0; - var addQuality = value => { - if (value > 0) { - quality = quality + value; - } - }; - var pointsPerMatchingKey = 10; - var nonMinKeyCount = (0, _index.countUntilNotMatching)(queryPlan.startKeys, keyValue => keyValue !== INDEX_MIN && keyValue !== INDEX_MAX); - addQuality(nonMinKeyCount * pointsPerMatchingKey); - var nonMaxKeyCount = (0, _index.countUntilNotMatching)(queryPlan.startKeys, keyValue => keyValue !== INDEX_MAX && keyValue !== INDEX_MIN); - addQuality(nonMaxKeyCount * pointsPerMatchingKey); - var equalKeyCount = (0, _index.countUntilNotMatching)(queryPlan.startKeys, (keyValue, idx) => { - if (keyValue === queryPlan.endKeys[idx]) { - return true; - } else { - return false; - } - }); - addQuality(equalKeyCount * pointsPerMatchingKey * 1.5); - var pointsIfNoReSortMustBeDone = queryPlan.sortSatisfiedByIndex ? 5 : 0; - addQuality(pointsIfNoReSortMustBeDone); - return quality; -} -//# sourceMappingURL=query-planner.js.map \ No newline at end of file diff --git a/dist/cjs/query-planner.js.map b/dist/cjs/query-planner.js.map deleted file mode 100644 index 243e5d4a734..00000000000 --- a/dist/cjs/query-planner.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"query-planner.js","names":["_index","require","_rxError","_rxSchemaHelper","INDEX_MAX","exports","String","fromCharCode","INDEX_MIN","Number","MIN_SAFE_INTEGER","getQueryPlan","schema","query","selector","indexes","slice","index","hasDescSorting","sort","find","sortField","Object","values","sortIrrelevevantFields","Set","keys","forEach","fieldName","schemaPart","getSchemaByObjectPath","type","prototype","hasOwnProperty","call","add","optimalSortIndex","map","optimalSortIndexCompareString","filter","f","has","join","currentBestQuality","currentBestQueryPlan","inclusiveEnd","inclusiveStart","opts","indexField","matcher","operators","matcherOpts","length","startKey","endKey","operator","LOGICAL_OPERATORS","operatorValue","partialOpts","getMatcherQueryOpts","assign","startKeys","opt","endKeys","queryPlan","sortSatisfiedByIndex","selectorSatisfiedByIndex","isSelectorSatisfiedByIndex","quality","rateQueryPlan","newRxError","LOWER_BOUND_LOGICAL_OPERATORS","UPPER_BOUND_LOGICAL_OPERATORS","selectorEntries","entries","hasNonMatchingOperator","operation","includes","hasNonLogicOperator","op","_value","$and","$or","satisfieldLowerBound","lowerOperatorFieldNames","lowerLogicOps","key","hasLowerLogicOp","push","satisfieldUpperBound","upperOperatorFieldNames","upperLogicOps","hasUperLogicOp","i","set","size","delete","Error","addQuality","value","pointsPerMatchingKey","nonMinKeyCount","countUntilNotMatching","keyValue","nonMaxKeyCount","equalKeyCount","idx","pointsIfNoReSortMustBeDone"],"sources":["../../src/query-planner.ts"],"sourcesContent":["import { countUntilNotMatching } from './plugins/utils/index.ts';\nimport { newRxError } from './rx-error.ts';\nimport { getSchemaByObjectPath } from './rx-schema-helper.ts';\nimport type {\n FilledMangoQuery,\n MangoQuerySelector,\n RxDocumentData,\n RxJsonSchema,\n RxQueryPlan,\n RxQueryPlanKey,\n RxQueryPlanerOpts\n} from './types/index.d.ts';\n\nexport const INDEX_MAX = String.fromCharCode(65535);\n\n/**\n * Do not use -Infinity here because it would be\n * transformed to null on JSON.stringify() which can break things\n * when the query plan is send to the storage as json.\n * @link https://stackoverflow.com/a/16644751\n * Notice that for IndexedDB IDBKeyRange we have\n * to transform the value back to -Infinity\n * before we can use it in IDBKeyRange.bound.\n */\nexport const INDEX_MIN = Number.MIN_SAFE_INTEGER;\n\n/**\n * Returns the query plan which contains\n * information about how to run the query\n * and which indexes to use.\n *\n * This is used in some storage like Memory, dexie.js and IndexedDB.\n */\nexport function getQueryPlan(\n schema: RxJsonSchema>,\n query: FilledMangoQuery\n): RxQueryPlan {\n const selector = query.selector;\n\n let indexes: string[][] = schema.indexes ? schema.indexes.slice(0) as any : [];\n if (query.index) {\n indexes = [query.index];\n }\n\n /**\n * Most storages do not support descending indexes\n * so having a 'desc' in the sorting, means we always have to re-sort the results.\n */\n const hasDescSorting = !!query.sort.find(sortField => Object.values(sortField)[0] === 'desc');\n\n /**\n * Some fields can be part of the selector while not being relevant for sorting\n * because their selector operators specify that in all cases all matching docs\n * would have the same value.\n * For example the boolean field _deleted.\n * TODO similar thing could be done for enums.\n */\n const sortIrrelevevantFields = new Set();\n Object.keys(selector).forEach(fieldName => {\n const schemaPart = getSchemaByObjectPath(schema, fieldName);\n if (\n schemaPart &&\n schemaPart.type === 'boolean' &&\n Object.prototype.hasOwnProperty.call((selector as any)[fieldName], '$eq')\n ) {\n sortIrrelevevantFields.add(fieldName);\n }\n });\n\n\n const optimalSortIndex = query.sort.map(sortField => Object.keys(sortField)[0]);\n const optimalSortIndexCompareString = optimalSortIndex\n .filter(f => !sortIrrelevevantFields.has(f))\n .join(',');\n\n let currentBestQuality = -1;\n let currentBestQueryPlan: RxQueryPlan | undefined;\n\n /**\n * Calculate one query plan for each index\n * and then test which of the plans is best.\n */\n indexes.forEach((index) => {\n let inclusiveEnd = true;\n let inclusiveStart = true;\n const opts: RxQueryPlanerOpts[] = index.map(indexField => {\n const matcher = (selector as any)[indexField];\n const operators = matcher ? Object.keys(matcher) : [];\n\n let matcherOpts: RxQueryPlanerOpts = {} as any;\n if (\n !matcher ||\n !operators.length\n ) {\n const startKey = inclusiveStart ? INDEX_MIN : INDEX_MAX;\n matcherOpts = {\n startKey,\n endKey: inclusiveEnd ? INDEX_MAX : INDEX_MIN,\n inclusiveStart: true,\n inclusiveEnd: true\n };\n } else {\n operators.forEach(operator => {\n if (LOGICAL_OPERATORS.has(operator)) {\n const operatorValue = matcher[operator];\n const partialOpts = getMatcherQueryOpts(operator, operatorValue);\n matcherOpts = Object.assign(matcherOpts, partialOpts);\n }\n });\n }\n\n // fill missing attributes\n if (typeof matcherOpts.startKey === 'undefined') {\n matcherOpts.startKey = INDEX_MIN;\n }\n if (typeof matcherOpts.endKey === 'undefined') {\n matcherOpts.endKey = INDEX_MAX;\n }\n if (typeof matcherOpts.inclusiveStart === 'undefined') {\n matcherOpts.inclusiveStart = true;\n }\n if (typeof matcherOpts.inclusiveEnd === 'undefined') {\n matcherOpts.inclusiveEnd = true;\n }\n\n if (inclusiveStart && !matcherOpts.inclusiveStart) {\n inclusiveStart = false;\n }\n if (inclusiveEnd && !matcherOpts.inclusiveEnd) {\n inclusiveEnd = false;\n }\n\n return matcherOpts;\n });\n\n\n const startKeys = opts.map(opt => opt.startKey);\n const endKeys = opts.map(opt => opt.endKey);\n const queryPlan: RxQueryPlan = {\n index,\n startKeys,\n endKeys,\n inclusiveEnd,\n inclusiveStart,\n sortSatisfiedByIndex: !hasDescSorting && optimalSortIndexCompareString === index.filter(f => !sortIrrelevevantFields.has(f)).join(','),\n selectorSatisfiedByIndex: isSelectorSatisfiedByIndex(index, query.selector, startKeys, endKeys)\n };\n const quality = rateQueryPlan(\n schema,\n query,\n queryPlan\n );\n if (\n (\n quality >= currentBestQuality\n ) ||\n query.index\n ) {\n currentBestQuality = quality;\n currentBestQueryPlan = queryPlan;\n }\n });\n\n /**\n * In all cases and index must be found\n */\n if (!currentBestQueryPlan) {\n throw newRxError('SNH', {\n query\n });\n }\n\n return currentBestQueryPlan;\n}\n\nexport const LOGICAL_OPERATORS = new Set(['$eq', '$gt', '$gte', '$lt', '$lte']);\nexport const LOWER_BOUND_LOGICAL_OPERATORS = new Set(['$eq', '$gt', '$gte']);\nexport const UPPER_BOUND_LOGICAL_OPERATORS = new Set(['$eq', '$lt', '$lte']);\n\n\nexport function isSelectorSatisfiedByIndex(\n index: string[],\n selector: MangoQuerySelector,\n startKeys: RxQueryPlanKey[],\n endKeys: RxQueryPlanKey[]\n): boolean {\n\n\n /**\n * Not satisfied if one or more operators are non-logical\n * operators that can never be satisfied by an index.\n */\n const selectorEntries = Object.entries(selector);\n const hasNonMatchingOperator = selectorEntries\n .find(([fieldName, operation]) => {\n if (!index.includes(fieldName)) {\n return true;\n }\n const hasNonLogicOperator = Object.entries(operation as any)\n .find(([op, _value]) => !LOGICAL_OPERATORS.has(op));\n return hasNonLogicOperator;\n });\n\n if (hasNonMatchingOperator) {\n return false;\n }\n\n /**\n * Not satisfied if contains $and or $or operations.\n */\n if (selector.$and || selector.$or) {\n return false;\n }\n\n\n\n // ensure all lower bound in index\n const satisfieldLowerBound: string[] = [];\n const lowerOperatorFieldNames = new Set();\n for (const [fieldName, operation] of Object.entries(selector)) {\n if (!index.includes(fieldName)) {\n return false;\n }\n\n // If more then one logic op on the same field, we have to selector-match.\n const lowerLogicOps = Object.keys(operation as any).filter(key => LOWER_BOUND_LOGICAL_OPERATORS.has(key));\n if (lowerLogicOps.length > 1) {\n return false;\n }\n\n const hasLowerLogicOp = lowerLogicOps[0];\n if (hasLowerLogicOp) {\n lowerOperatorFieldNames.add(fieldName);\n }\n if (hasLowerLogicOp !== '$eq') {\n if (satisfieldLowerBound.length > 0) {\n return false;\n } else {\n satisfieldLowerBound.push(hasLowerLogicOp);\n }\n }\n }\n\n // ensure all upper bound in index\n const satisfieldUpperBound: string[] = [];\n const upperOperatorFieldNames = new Set();\n for (const [fieldName, operation] of Object.entries(selector)) {\n if (!index.includes(fieldName)) {\n return false;\n }\n\n // If more then one logic op on the same field, we have to selector-match.\n const upperLogicOps = Object.keys(operation as any).filter(key => UPPER_BOUND_LOGICAL_OPERATORS.has(key));\n if (upperLogicOps.length > 1) {\n return false;\n }\n\n const hasUperLogicOp = upperLogicOps[0];\n if (hasUperLogicOp) {\n upperOperatorFieldNames.add(fieldName);\n }\n if (hasUperLogicOp !== '$eq') {\n if (satisfieldUpperBound.length > 0) {\n return false;\n } else {\n satisfieldUpperBound.push(hasUperLogicOp);\n }\n }\n }\n\n\n /**\n * If the index contains a non-relevant field between\n * the relevant fields, then the index is not satisfying.\n */\n let i = 0;\n for (const fieldName of index) {\n for (const set of [\n lowerOperatorFieldNames,\n upperOperatorFieldNames\n ]) {\n if (\n !set.has(fieldName) &&\n set.size > 0\n ) {\n return false;\n }\n set.delete(fieldName);\n }\n\n const startKey = startKeys[i];\n const endKey = endKeys[i];\n\n if (\n startKey !== endKey && (\n lowerOperatorFieldNames.size > 0 &&\n upperOperatorFieldNames.size > 0\n )\n ) {\n return false;\n }\n\n i++;\n }\n\n return true;\n}\n\nexport function getMatcherQueryOpts(\n operator: string,\n operatorValue: any\n): Partial {\n switch (operator) {\n case '$eq':\n return {\n startKey: operatorValue,\n endKey: operatorValue,\n inclusiveEnd: true,\n inclusiveStart: true\n };\n case '$lte':\n return {\n endKey: operatorValue,\n inclusiveEnd: true\n };\n case '$gte':\n return {\n startKey: operatorValue,\n inclusiveStart: true\n };\n case '$lt':\n return {\n endKey: operatorValue,\n inclusiveEnd: false\n };\n case '$gt':\n return {\n startKey: operatorValue,\n inclusiveStart: false\n };\n default:\n throw new Error('SNH');\n }\n}\n\n\n/**\n * Returns a number that determines the quality of the query plan.\n * Higher number means better query plan.\n */\nexport function rateQueryPlan(\n schema: RxJsonSchema>,\n query: FilledMangoQuery,\n queryPlan: RxQueryPlan\n): number {\n let quality: number = 0;\n const addQuality = (value: number) => {\n if (value > 0) {\n quality = quality + value;\n }\n };\n\n const pointsPerMatchingKey = 10;\n\n const nonMinKeyCount = countUntilNotMatching(queryPlan.startKeys, keyValue => keyValue !== INDEX_MIN && keyValue !== INDEX_MAX);\n addQuality(nonMinKeyCount * pointsPerMatchingKey);\n\n const nonMaxKeyCount = countUntilNotMatching(queryPlan.startKeys, keyValue => keyValue !== INDEX_MAX && keyValue !== INDEX_MIN);\n addQuality(nonMaxKeyCount * pointsPerMatchingKey);\n\n const equalKeyCount = countUntilNotMatching(queryPlan.startKeys, (keyValue, idx) => {\n if (keyValue === queryPlan.endKeys[idx]) {\n return true;\n } else {\n return false;\n }\n });\n addQuality(equalKeyCount * pointsPerMatchingKey * 1.5);\n\n const pointsIfNoReSortMustBeDone = queryPlan.sortSatisfiedByIndex ? 5 : 0;\n addQuality(pointsIfNoReSortMustBeDone);\n\n return quality;\n}\n"],"mappings":";;;;;;;;;;AAAA,IAAAA,MAAA,GAAAC,OAAA;AACA,IAAAC,QAAA,GAAAD,OAAA;AACA,IAAAE,eAAA,GAAAF,OAAA;AAWO,IAAMG,SAAS,GAAAC,OAAA,CAAAD,SAAA,GAAGE,MAAM,CAACC,YAAY,CAAC,KAAK,CAAC;;AAEnD;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACO,IAAMC,SAAS,GAAAH,OAAA,CAAAG,SAAA,GAAGC,MAAM,CAACC,gBAAgB;;AAEhD;AACA;AACA;AACA;AACA;AACA;AACA;AACO,SAASC,YAAYA,CACxBC,MAA+C,EAC/CC,KAAkC,EACvB;EACX,IAAMC,QAAQ,GAAGD,KAAK,CAACC,QAAQ;EAE/B,IAAIC,OAAmB,GAAGH,MAAM,CAACG,OAAO,GAAGH,MAAM,CAACG,OAAO,CAACC,KAAK,CAAC,CAAC,CAAC,GAAU,EAAE;EAC9E,IAAIH,KAAK,CAACI,KAAK,EAAE;IACbF,OAAO,GAAG,CAACF,KAAK,CAACI,KAAK,CAAC;EAC3B;;EAEA;AACJ;AACA;AACA;EACI,IAAMC,cAAc,GAAG,CAAC,CAACL,KAAK,CAACM,IAAI,CAACC,IAAI,CAACC,SAAS,IAAIC,MAAM,CAACC,MAAM,CAACF,SAAS,CAAC,CAAC,CAAC,CAAC,KAAK,MAAM,CAAC;;EAE7F;AACJ;AACA;AACA;AACA;AACA;AACA;EACI,IAAMG,sBAAsB,GAAG,IAAIC,GAAG,CAAC,CAAC;EACxCH,MAAM,CAACI,IAAI,CAACZ,QAAQ,CAAC,CAACa,OAAO,CAACC,SAAS,IAAI;IACvC,IAAMC,UAAU,GAAG,IAAAC,qCAAqB,EAAClB,MAAM,EAAEgB,SAAS,CAAC;IAC3D,IACIC,UAAU,IACVA,UAAU,CAACE,IAAI,KAAK,SAAS,IAC7BT,MAAM,CAACU,SAAS,CAACC,cAAc,CAACC,IAAI,CAAEpB,QAAQ,CAASc,SAAS,CAAC,EAAE,KAAK,CAAC,EAC3E;MACEJ,sBAAsB,CAACW,GAAG,CAACP,SAAS,CAAC;IACzC;EACJ,CAAC,CAAC;EAGF,IAAMQ,gBAAgB,GAAGvB,KAAK,CAACM,IAAI,CAACkB,GAAG,CAAChB,SAAS,IAAIC,MAAM,CAACI,IAAI,CAACL,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC;EAC/E,IAAMiB,6BAA6B,GAAGF,gBAAgB,CACjDG,MAAM,CAACC,CAAC,IAAI,CAAChB,sBAAsB,CAACiB,GAAG,CAACD,CAAC,CAAC,CAAC,CAC3CE,IAAI,CAAC,GAAG,CAAC;EAEd,IAAIC,kBAAkB,GAAG,CAAC,CAAC;EAC3B,IAAIC,oBAA6C;;EAEjD;AACJ;AACA;AACA;EACI7B,OAAO,CAACY,OAAO,CAAEV,KAAK,IAAK;IACvB,IAAI4B,YAAY,GAAG,IAAI;IACvB,IAAIC,cAAc,GAAG,IAAI;IACzB,IAAMC,IAAyB,GAAG9B,KAAK,CAACoB,GAAG,CAACW,UAAU,IAAI;MACtD,IAAMC,OAAO,GAAInC,QAAQ,CAASkC,UAAU,CAAC;MAC7C,IAAME,SAAS,GAAGD,OAAO,GAAG3B,MAAM,CAACI,IAAI,CAACuB,OAAO,CAAC,GAAG,EAAE;MAErD,IAAIE,WAA8B,GAAG,CAAC,CAAQ;MAC9C,IACI,CAACF,OAAO,IACR,CAACC,SAAS,CAACE,MAAM,EACnB;QACE,IAAMC,QAAQ,GAAGP,cAAc,GAAGtC,SAAS,GAAGJ,SAAS;QACvD+C,WAAW,GAAG;UACVE,QAAQ;UACRC,MAAM,EAAET,YAAY,GAAGzC,SAAS,GAAGI,SAAS;UAC5CsC,cAAc,EAAE,IAAI;UACpBD,YAAY,EAAE;QAClB,CAAC;MACL,CAAC,MAAM;QACHK,SAAS,CAACvB,OAAO,CAAC4B,QAAQ,IAAI;UAC1B,IAAIC,iBAAiB,CAACf,GAAG,CAACc,QAAQ,CAAC,EAAE;YACjC,IAAME,aAAa,GAAGR,OAAO,CAACM,QAAQ,CAAC;YACvC,IAAMG,WAAW,GAAGC,mBAAmB,CAACJ,QAAQ,EAAEE,aAAa,CAAC;YAChEN,WAAW,GAAG7B,MAAM,CAACsC,MAAM,CAACT,WAAW,EAAEO,WAAW,CAAC;UACzD;QACJ,CAAC,CAAC;MACN;;MAEA;MACA,IAAI,OAAOP,WAAW,CAACE,QAAQ,KAAK,WAAW,EAAE;QAC7CF,WAAW,CAACE,QAAQ,GAAG7C,SAAS;MACpC;MACA,IAAI,OAAO2C,WAAW,CAACG,MAAM,KAAK,WAAW,EAAE;QAC3CH,WAAW,CAACG,MAAM,GAAGlD,SAAS;MAClC;MACA,IAAI,OAAO+C,WAAW,CAACL,cAAc,KAAK,WAAW,EAAE;QACnDK,WAAW,CAACL,cAAc,GAAG,IAAI;MACrC;MACA,IAAI,OAAOK,WAAW,CAACN,YAAY,KAAK,WAAW,EAAE;QACjDM,WAAW,CAACN,YAAY,GAAG,IAAI;MACnC;MAEA,IAAIC,cAAc,IAAI,CAACK,WAAW,CAACL,cAAc,EAAE;QAC/CA,cAAc,GAAG,KAAK;MAC1B;MACA,IAAID,YAAY,IAAI,CAACM,WAAW,CAACN,YAAY,EAAE;QAC3CA,YAAY,GAAG,KAAK;MACxB;MAEA,OAAOM,WAAW;IACtB,CAAC,CAAC;IAGF,IAAMU,SAAS,GAAGd,IAAI,CAACV,GAAG,CAACyB,GAAG,IAAIA,GAAG,CAACT,QAAQ,CAAC;IAC/C,IAAMU,OAAO,GAAGhB,IAAI,CAACV,GAAG,CAACyB,GAAG,IAAIA,GAAG,CAACR,MAAM,CAAC;IAC3C,IAAMU,SAAsB,GAAG;MAC3B/C,KAAK;MACL4C,SAAS;MACTE,OAAO;MACPlB,YAAY;MACZC,cAAc;MACdmB,oBAAoB,EAAE,CAAC/C,cAAc,IAAIoB,6BAA6B,KAAKrB,KAAK,CAACsB,MAAM,CAACC,CAAC,IAAI,CAAChB,sBAAsB,CAACiB,GAAG,CAACD,CAAC,CAAC,CAAC,CAACE,IAAI,CAAC,GAAG,CAAC;MACtIwB,wBAAwB,EAAEC,0BAA0B,CAAClD,KAAK,EAAEJ,KAAK,CAACC,QAAQ,EAAE+C,SAAS,EAAEE,OAAO;IAClG,CAAC;IACD,IAAMK,OAAO,GAAGC,aAAa,CACzBzD,MAAM,EACNC,KAAK,EACLmD,SACJ,CAAC;IACD,IAEQI,OAAO,IAAIzB,kBAAkB,IAEjC9B,KAAK,CAACI,KAAK,EACb;MACE0B,kBAAkB,GAAGyB,OAAO;MAC5BxB,oBAAoB,GAAGoB,SAAS;IACpC;EACJ,CAAC,CAAC;;EAEF;AACJ;AACA;EACI,IAAI,CAACpB,oBAAoB,EAAE;IACvB,MAAM,IAAA0B,mBAAU,EAAC,KAAK,EAAE;MACpBzD;IACJ,CAAC,CAAC;EACN;EAEA,OAAO+B,oBAAoB;AAC/B;AAEO,IAAMY,iBAAiB,GAAAnD,OAAA,CAAAmD,iBAAA,GAAG,IAAI/B,GAAG,CAAC,CAAC,KAAK,EAAE,KAAK,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;AACxE,IAAM8C,6BAA6B,GAAAlE,OAAA,CAAAkE,6BAAA,GAAG,IAAI9C,GAAG,CAAC,CAAC,KAAK,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;AACrE,IAAM+C,6BAA6B,GAAAnE,OAAA,CAAAmE,6BAAA,GAAG,IAAI/C,GAAG,CAAC,CAAC,KAAK,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;AAGrE,SAAS0C,0BAA0BA,CACtClD,KAAe,EACfH,QAAiC,EACjC+C,SAA2B,EAC3BE,OAAyB,EAClB;EAGP;AACJ;AACA;AACA;EACI,IAAMU,eAAe,GAAGnD,MAAM,CAACoD,OAAO,CAAC5D,QAAQ,CAAC;EAChD,IAAM6D,sBAAsB,GAAGF,eAAe,CACzCrD,IAAI,CAAC,CAAC,CAACQ,SAAS,EAAEgD,SAAS,CAAC,KAAK;IAC9B,IAAI,CAAC3D,KAAK,CAAC4D,QAAQ,CAACjD,SAAS,CAAC,EAAE;MAC5B,OAAO,IAAI;IACf;IACA,IAAMkD,mBAAmB,GAAGxD,MAAM,CAACoD,OAAO,CAACE,SAAgB,CAAC,CACvDxD,IAAI,CAAC,CAAC,CAAC2D,EAAE,EAAEC,MAAM,CAAC,KAAK,CAACxB,iBAAiB,CAACf,GAAG,CAACsC,EAAE,CAAC,CAAC;IACvD,OAAOD,mBAAmB;EAC9B,CAAC,CAAC;EAEN,IAAIH,sBAAsB,EAAE;IACxB,OAAO,KAAK;EAChB;;EAEA;AACJ;AACA;EACI,IAAI7D,QAAQ,CAACmE,IAAI,IAAInE,QAAQ,CAACoE,GAAG,EAAE;IAC/B,OAAO,KAAK;EAChB;;EAIA;EACA,IAAMC,oBAA8B,GAAG,EAAE;EACzC,IAAMC,uBAAuB,GAAG,IAAI3D,GAAG,CAAS,CAAC;EACjD,KAAK,IAAM,CAACG,SAAS,EAAEgD,SAAS,CAAC,IAAItD,MAAM,CAACoD,OAAO,CAAC5D,QAAQ,CAAC,EAAE;IAC3D,IAAI,CAACG,KAAK,CAAC4D,QAAQ,CAACjD,SAAS,CAAC,EAAE;MAC5B,OAAO,KAAK;IAChB;;IAEA;IACA,IAAMyD,aAAa,GAAG/D,MAAM,CAACI,IAAI,CAACkD,SAAgB,CAAC,CAACrC,MAAM,CAAC+C,GAAG,IAAIf,6BAA6B,CAAC9B,GAAG,CAAC6C,GAAG,CAAC,CAAC;IACzG,IAAID,aAAa,CAACjC,MAAM,GAAG,CAAC,EAAE;MAC1B,OAAO,KAAK;IAChB;IAEA,IAAMmC,eAAe,GAAGF,aAAa,CAAC,CAAC,CAAC;IACxC,IAAIE,eAAe,EAAE;MACjBH,uBAAuB,CAACjD,GAAG,CAACP,SAAS,CAAC;IAC1C;IACA,IAAI2D,eAAe,KAAK,KAAK,EAAE;MAC3B,IAAIJ,oBAAoB,CAAC/B,MAAM,GAAG,CAAC,EAAE;QACjC,OAAO,KAAK;MAChB,CAAC,MAAM;QACH+B,oBAAoB,CAACK,IAAI,CAACD,eAAe,CAAC;MAC9C;IACJ;EACJ;;EAEA;EACA,IAAME,oBAA8B,GAAG,EAAE;EACzC,IAAMC,uBAAuB,GAAG,IAAIjE,GAAG,CAAS,CAAC;EACjD,KAAK,IAAM,CAACG,UAAS,EAAEgD,UAAS,CAAC,IAAItD,MAAM,CAACoD,OAAO,CAAC5D,QAAQ,CAAC,EAAE;IAC3D,IAAI,CAACG,KAAK,CAAC4D,QAAQ,CAACjD,UAAS,CAAC,EAAE;MAC5B,OAAO,KAAK;IAChB;;IAEA;IACA,IAAM+D,aAAa,GAAGrE,MAAM,CAACI,IAAI,CAACkD,UAAgB,CAAC,CAACrC,MAAM,CAAC+C,GAAG,IAAId,6BAA6B,CAAC/B,GAAG,CAAC6C,GAAG,CAAC,CAAC;IACzG,IAAIK,aAAa,CAACvC,MAAM,GAAG,CAAC,EAAE;MAC1B,OAAO,KAAK;IAChB;IAEA,IAAMwC,cAAc,GAAGD,aAAa,CAAC,CAAC,CAAC;IACvC,IAAIC,cAAc,EAAE;MAChBF,uBAAuB,CAACvD,GAAG,CAACP,UAAS,CAAC;IAC1C;IACA,IAAIgE,cAAc,KAAK,KAAK,EAAE;MAC1B,IAAIH,oBAAoB,CAACrC,MAAM,GAAG,CAAC,EAAE;QACjC,OAAO,KAAK;MAChB,CAAC,MAAM;QACHqC,oBAAoB,CAACD,IAAI,CAACI,cAAc,CAAC;MAC7C;IACJ;EACJ;;EAGA;AACJ;AACA;AACA;EACI,IAAIC,CAAC,GAAG,CAAC;EACT,KAAK,IAAMjE,WAAS,IAAIX,KAAK,EAAE;IAC3B,KAAK,IAAM6E,GAAG,IAAI,CACdV,uBAAuB,EACvBM,uBAAuB,CAC1B,EAAE;MACC,IACI,CAACI,GAAG,CAACrD,GAAG,CAACb,WAAS,CAAC,IACnBkE,GAAG,CAACC,IAAI,GAAG,CAAC,EACd;QACE,OAAO,KAAK;MAChB;MACAD,GAAG,CAACE,MAAM,CAACpE,WAAS,CAAC;IACzB;IAEA,IAAMyB,QAAQ,GAAGQ,SAAS,CAACgC,CAAC,CAAC;IAC7B,IAAMvC,MAAM,GAAGS,OAAO,CAAC8B,CAAC,CAAC;IAEzB,IACIxC,QAAQ,KAAKC,MAAM,IACf8B,uBAAuB,CAACW,IAAI,GAAG,CAAC,IAChCL,uBAAuB,CAACK,IAAI,GAAG,CAClC,EACH;MACE,OAAO,KAAK;IAChB;IAEAF,CAAC,EAAE;EACP;EAEA,OAAO,IAAI;AACf;AAEO,SAASlC,mBAAmBA,CAC/BJ,QAAgB,EAChBE,aAAkB,EACQ;EAC1B,QAAQF,QAAQ;IACZ,KAAK,KAAK;MACN,OAAO;QACHF,QAAQ,EAAEI,aAAa;QACvBH,MAAM,EAAEG,aAAa;QACrBZ,YAAY,EAAE,IAAI;QAClBC,cAAc,EAAE;MACpB,CAAC;IACL,KAAK,MAAM;MACP,OAAO;QACHQ,MAAM,EAAEG,aAAa;QACrBZ,YAAY,EAAE;MAClB,CAAC;IACL,KAAK,MAAM;MACP,OAAO;QACHQ,QAAQ,EAAEI,aAAa;QACvBX,cAAc,EAAE;MACpB,CAAC;IACL,KAAK,KAAK;MACN,OAAO;QACHQ,MAAM,EAAEG,aAAa;QACrBZ,YAAY,EAAE;MAClB,CAAC;IACL,KAAK,KAAK;MACN,OAAO;QACHQ,QAAQ,EAAEI,aAAa;QACvBX,cAAc,EAAE;MACpB,CAAC;IACL;MACI,MAAM,IAAImD,KAAK,CAAC,KAAK,CAAC;EAC9B;AACJ;;AAGA;AACA;AACA;AACA;AACO,SAAS5B,aAAaA,CACzBzD,MAA+C,EAC/CC,KAAkC,EAClCmD,SAAsB,EAChB;EACN,IAAII,OAAe,GAAG,CAAC;EACvB,IAAM8B,UAAU,GAAIC,KAAa,IAAK;IAClC,IAAIA,KAAK,GAAG,CAAC,EAAE;MACX/B,OAAO,GAAGA,OAAO,GAAG+B,KAAK;IAC7B;EACJ,CAAC;EAED,IAAMC,oBAAoB,GAAG,EAAE;EAE/B,IAAMC,cAAc,GAAG,IAAAC,4BAAqB,EAACtC,SAAS,CAACH,SAAS,EAAE0C,QAAQ,IAAIA,QAAQ,KAAK/F,SAAS,IAAI+F,QAAQ,KAAKnG,SAAS,CAAC;EAC/H8F,UAAU,CAACG,cAAc,GAAGD,oBAAoB,CAAC;EAEjD,IAAMI,cAAc,GAAG,IAAAF,4BAAqB,EAACtC,SAAS,CAACH,SAAS,EAAE0C,QAAQ,IAAIA,QAAQ,KAAKnG,SAAS,IAAImG,QAAQ,KAAK/F,SAAS,CAAC;EAC/H0F,UAAU,CAACM,cAAc,GAAGJ,oBAAoB,CAAC;EAEjD,IAAMK,aAAa,GAAG,IAAAH,4BAAqB,EAACtC,SAAS,CAACH,SAAS,EAAE,CAAC0C,QAAQ,EAAEG,GAAG,KAAK;IAChF,IAAIH,QAAQ,KAAKvC,SAAS,CAACD,OAAO,CAAC2C,GAAG,CAAC,EAAE;MACrC,OAAO,IAAI;IACf,CAAC,MAAM;MACH,OAAO,KAAK;IAChB;EACJ,CAAC,CAAC;EACFR,UAAU,CAACO,aAAa,GAAGL,oBAAoB,GAAG,GAAG,CAAC;EAEtD,IAAMO,0BAA0B,GAAG3C,SAAS,CAACC,oBAAoB,GAAG,CAAC,GAAG,CAAC;EACzEiC,UAAU,CAACS,0BAA0B,CAAC;EAEtC,OAAOvC,OAAO;AAClB","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/replication-protocol/checkpoint.js b/dist/cjs/replication-protocol/checkpoint.js deleted file mode 100644 index 1d5fccaaeb1..00000000000 --- a/dist/cjs/replication-protocol/checkpoint.js +++ /dev/null @@ -1,102 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.getCheckpointKey = getCheckpointKey; -exports.getLastCheckpointDoc = getLastCheckpointDoc; -exports.setCheckpoint = setCheckpoint; -var _rxSchemaHelper = require("../rx-schema-helper.js"); -var _rxStorageHelper = require("../rx-storage-helper.js"); -var _index = require("../plugins/utils/index.js"); -async function getLastCheckpointDoc(state, direction) { - var checkpointDocId = (0, _rxSchemaHelper.getComposedPrimaryKeyOfDocumentData)(state.input.metaInstance.schema, { - isCheckpoint: '1', - itemId: direction - }); - var checkpointResult = await state.input.metaInstance.findDocumentsById([checkpointDocId], false); - var checkpointDoc = checkpointResult[0]; - state.lastCheckpointDoc[direction] = checkpointDoc; - if (checkpointDoc) { - return checkpointDoc.checkpointData; - } else { - return undefined; - } -} - -/** - * Sets the checkpoint, - * automatically resolves conflicts that appear. - */ -async function setCheckpoint(state, direction, checkpoint) { - state.checkpointQueue = state.checkpointQueue.then(async () => { - var previousCheckpointDoc = state.lastCheckpointDoc[direction]; - if (checkpoint && - /** - * If the replication is already canceled, - * we do not write a checkpoint - * because that could mean we write a checkpoint - * for data that has been fetched from the master - * but not been written to the child. - */ - !state.events.canceled.getValue() && ( - /** - * Only write checkpoint if it is different from before - * to have less writes to the storage. - */ - - !previousCheckpointDoc || JSON.stringify(previousCheckpointDoc.checkpointData) !== JSON.stringify(checkpoint))) { - var newDoc = { - id: '', - isCheckpoint: '1', - itemId: direction, - _deleted: false, - _attachments: {}, - checkpointData: checkpoint, - _meta: (0, _index.getDefaultRxDocumentMeta)(), - _rev: (0, _index.getDefaultRevision)() - }; - newDoc.id = (0, _rxSchemaHelper.getComposedPrimaryKeyOfDocumentData)(state.input.metaInstance.schema, newDoc); - while (!state.events.canceled.getValue()) { - /** - * Instead of just storing the new checkpoint, - * we have to stack up the checkpoint with the previous one. - * This is required for plugins like the sharding RxStorage - * where the changeStream events only contain a Partial of the - * checkpoint. - */ - if (previousCheckpointDoc) { - newDoc.checkpointData = (0, _rxStorageHelper.stackCheckpoints)([previousCheckpointDoc.checkpointData, newDoc.checkpointData]); - } - newDoc._meta.lwt = (0, _index.now)(); - newDoc._rev = (0, _index.createRevision)(await state.checkpointKey, previousCheckpointDoc); - if (state.events.canceled.getValue()) { - return; - } - var result = await state.input.metaInstance.bulkWrite([{ - previous: previousCheckpointDoc, - document: newDoc - }], 'replication-set-checkpoint'); - var sucessDoc = result.success[0]; - if (sucessDoc) { - state.lastCheckpointDoc[direction] = sucessDoc; - return; - } else { - var error = result.error[0]; - if (error.status !== 409) { - throw error; - } else { - previousCheckpointDoc = (0, _index.ensureNotFalsy)(error.documentInDb); - newDoc._rev = (0, _index.createRevision)(await state.checkpointKey, previousCheckpointDoc); - } - } - } - } - }); - await state.checkpointQueue; -} -async function getCheckpointKey(input) { - var hash = await input.hashFunction([input.identifier, input.forkInstance.databaseName, input.forkInstance.collectionName].join('||')); - return 'rx_storage_replication_' + hash; -} -//# sourceMappingURL=checkpoint.js.map \ No newline at end of file diff --git a/dist/cjs/replication-protocol/checkpoint.js.map b/dist/cjs/replication-protocol/checkpoint.js.map deleted file mode 100644 index 28cff33234b..00000000000 --- a/dist/cjs/replication-protocol/checkpoint.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"checkpoint.js","names":["_rxSchemaHelper","require","_rxStorageHelper","_index","getLastCheckpointDoc","state","direction","checkpointDocId","getComposedPrimaryKeyOfDocumentData","input","metaInstance","schema","isCheckpoint","itemId","checkpointResult","findDocumentsById","checkpointDoc","lastCheckpointDoc","checkpointData","undefined","setCheckpoint","checkpoint","checkpointQueue","then","previousCheckpointDoc","events","canceled","getValue","JSON","stringify","newDoc","id","_deleted","_attachments","_meta","getDefaultRxDocumentMeta","_rev","getDefaultRevision","stackCheckpoints","lwt","now","createRevision","checkpointKey","result","bulkWrite","previous","document","sucessDoc","success","error","status","ensureNotFalsy","documentInDb","getCheckpointKey","hash","hashFunction","identifier","forkInstance","databaseName","collectionName","join"],"sources":["../../../src/replication-protocol/checkpoint.ts"],"sourcesContent":["import { getComposedPrimaryKeyOfDocumentData } from '../rx-schema-helper.ts';\nimport { stackCheckpoints } from '../rx-storage-helper.ts';\nimport type {\n RxDocumentData,\n RxStorageInstanceReplicationInput,\n RxStorageInstanceReplicationState,\n RxStorageReplicationDirection,\n RxStorageReplicationMeta\n} from '../types/index.d.ts';\nimport {\n createRevision,\n ensureNotFalsy,\n getDefaultRevision,\n getDefaultRxDocumentMeta,\n now\n} from '../plugins/utils/index.ts';\n\nexport async function getLastCheckpointDoc(\n state: RxStorageInstanceReplicationState,\n direction: RxStorageReplicationDirection\n): Promise {\n const checkpointDocId = getComposedPrimaryKeyOfDocumentData(\n state.input.metaInstance.schema,\n {\n isCheckpoint: '1',\n itemId: direction\n }\n );\n const checkpointResult = await state.input.metaInstance.findDocumentsById(\n [\n checkpointDocId\n ],\n false\n );\n\n const checkpointDoc = checkpointResult[0];\n state.lastCheckpointDoc[direction] = checkpointDoc;\n if (checkpointDoc) {\n return checkpointDoc.checkpointData;\n } else {\n return undefined;\n }\n}\n\n\n/**\n * Sets the checkpoint,\n * automatically resolves conflicts that appear.\n */\nexport async function setCheckpoint(\n state: RxStorageInstanceReplicationState,\n direction: RxStorageReplicationDirection,\n checkpoint: CheckpointType\n) {\n state.checkpointQueue = state.checkpointQueue.then(async () => {\n let previousCheckpointDoc = state.lastCheckpointDoc[direction];\n if (\n checkpoint &&\n /**\n * If the replication is already canceled,\n * we do not write a checkpoint\n * because that could mean we write a checkpoint\n * for data that has been fetched from the master\n * but not been written to the child.\n */\n !state.events.canceled.getValue() &&\n /**\n * Only write checkpoint if it is different from before\n * to have less writes to the storage.\n */\n (\n !previousCheckpointDoc ||\n JSON.stringify(previousCheckpointDoc.checkpointData) !== JSON.stringify(checkpoint)\n )\n ) {\n const newDoc: RxDocumentData> = {\n id: '',\n isCheckpoint: '1',\n itemId: direction,\n _deleted: false,\n _attachments: {},\n checkpointData: checkpoint,\n _meta: getDefaultRxDocumentMeta(),\n _rev: getDefaultRevision()\n };\n newDoc.id = getComposedPrimaryKeyOfDocumentData(\n state.input.metaInstance.schema,\n newDoc\n );\n while (!state.events.canceled.getValue()) {\n /**\n * Instead of just storing the new checkpoint,\n * we have to stack up the checkpoint with the previous one.\n * This is required for plugins like the sharding RxStorage\n * where the changeStream events only contain a Partial of the\n * checkpoint.\n */\n if (previousCheckpointDoc) {\n newDoc.checkpointData = stackCheckpoints([\n previousCheckpointDoc.checkpointData,\n newDoc.checkpointData\n ]);\n }\n newDoc._meta.lwt = now();\n newDoc._rev = createRevision(\n await state.checkpointKey,\n previousCheckpointDoc\n );\n\n if (state.events.canceled.getValue()) {\n return;\n }\n\n const result = await state.input.metaInstance.bulkWrite([{\n previous: previousCheckpointDoc,\n document: newDoc\n }], 'replication-set-checkpoint');\n\n const sucessDoc = result.success[0];\n if (sucessDoc) {\n state.lastCheckpointDoc[direction] = sucessDoc;\n return;\n } else {\n const error = result.error[0];\n if (error.status !== 409) {\n throw error;\n } else {\n previousCheckpointDoc = ensureNotFalsy(error.documentInDb);\n newDoc._rev = createRevision(\n await state.checkpointKey,\n previousCheckpointDoc\n );\n }\n }\n }\n }\n });\n await state.checkpointQueue;\n}\n\nexport async function getCheckpointKey(\n input: RxStorageInstanceReplicationInput\n): Promise {\n const hash = await input.hashFunction([\n input.identifier,\n input.forkInstance.databaseName,\n input.forkInstance.collectionName\n ].join('||'));\n return 'rx_storage_replication_' + hash;\n}\n"],"mappings":";;;;;;;;AAAA,IAAAA,eAAA,GAAAC,OAAA;AACA,IAAAC,gBAAA,GAAAD,OAAA;AAQA,IAAAE,MAAA,GAAAF,OAAA;AAQO,eAAeG,oBAAoBA,CACtCC,KAAmD,EACnDC,SAAwC,EACL;EACnC,IAAMC,eAAe,GAAG,IAAAC,mDAAmC,EACvDH,KAAK,CAACI,KAAK,CAACC,YAAY,CAACC,MAAM,EAC/B;IACIC,YAAY,EAAE,GAAG;IACjBC,MAAM,EAAEP;EACZ,CACJ,CAAC;EACD,IAAMQ,gBAAgB,GAAG,MAAMT,KAAK,CAACI,KAAK,CAACC,YAAY,CAACK,iBAAiB,CACrE,CACIR,eAAe,CAClB,EACD,KACJ,CAAC;EAED,IAAMS,aAAa,GAAGF,gBAAgB,CAAC,CAAC,CAAC;EACzCT,KAAK,CAACY,iBAAiB,CAACX,SAAS,CAAC,GAAGU,aAAa;EAClD,IAAIA,aAAa,EAAE;IACf,OAAOA,aAAa,CAACE,cAAc;EACvC,CAAC,MAAM;IACH,OAAOC,SAAS;EACpB;AACJ;;AAGA;AACA;AACA;AACA;AACO,eAAeC,aAAaA,CAC/Bf,KAAmD,EACnDC,SAAwC,EACxCe,UAA0B,EAC5B;EACEhB,KAAK,CAACiB,eAAe,GAAGjB,KAAK,CAACiB,eAAe,CAACC,IAAI,CAAC,YAAY;IAC3D,IAAIC,qBAAqB,GAAGnB,KAAK,CAACY,iBAAiB,CAACX,SAAS,CAAC;IAC9D,IACIe,UAAU;IACV;AACZ;AACA;AACA;AACA;AACA;AACA;IACY,CAAChB,KAAK,CAACoB,MAAM,CAACC,QAAQ,CAACC,QAAQ,CAAC,CAAC;IACjC;AACZ;AACA;AACA;;IAEgB,CAACH,qBAAqB,IACtBI,IAAI,CAACC,SAAS,CAACL,qBAAqB,CAACN,cAAc,CAAC,KAAKU,IAAI,CAACC,SAAS,CAACR,UAAU,CAAC,CACtF,EACH;MACE,IAAMS,MAA2E,GAAG;QAChFC,EAAE,EAAE,EAAE;QACNnB,YAAY,EAAE,GAAG;QACjBC,MAAM,EAAEP,SAAS;QACjB0B,QAAQ,EAAE,KAAK;QACfC,YAAY,EAAE,CAAC,CAAC;QAChBf,cAAc,EAAEG,UAAU;QAC1Ba,KAAK,EAAE,IAAAC,+BAAwB,EAAC,CAAC;QACjCC,IAAI,EAAE,IAAAC,yBAAkB,EAAC;MAC7B,CAAC;MACDP,MAAM,CAACC,EAAE,GAAG,IAAAvB,mDAAmC,EAC3CH,KAAK,CAACI,KAAK,CAACC,YAAY,CAACC,MAAM,EAC/BmB,MACJ,CAAC;MACD,OAAO,CAACzB,KAAK,CAACoB,MAAM,CAACC,QAAQ,CAACC,QAAQ,CAAC,CAAC,EAAE;QACtC;AAChB;AACA;AACA;AACA;AACA;AACA;QACgB,IAAIH,qBAAqB,EAAE;UACvBM,MAAM,CAACZ,cAAc,GAAG,IAAAoB,iCAAgB,EAAC,CACrCd,qBAAqB,CAACN,cAAc,EACpCY,MAAM,CAACZ,cAAc,CACxB,CAAC;QACN;QACAY,MAAM,CAACI,KAAK,CAACK,GAAG,GAAG,IAAAC,UAAG,EAAC,CAAC;QACxBV,MAAM,CAACM,IAAI,GAAG,IAAAK,qBAAc,EACxB,MAAMpC,KAAK,CAACqC,aAAa,EACzBlB,qBACJ,CAAC;QAED,IAAInB,KAAK,CAACoB,MAAM,CAACC,QAAQ,CAACC,QAAQ,CAAC,CAAC,EAAE;UAClC;QACJ;QAEA,IAAMgB,MAAM,GAAG,MAAMtC,KAAK,CAACI,KAAK,CAACC,YAAY,CAACkC,SAAS,CAAC,CAAC;UACrDC,QAAQ,EAAErB,qBAAqB;UAC/BsB,QAAQ,EAAEhB;QACd,CAAC,CAAC,EAAE,4BAA4B,CAAC;QAEjC,IAAMiB,SAAS,GAAGJ,MAAM,CAACK,OAAO,CAAC,CAAC,CAAC;QACnC,IAAID,SAAS,EAAE;UACX1C,KAAK,CAACY,iBAAiB,CAACX,SAAS,CAAC,GAAGyC,SAAS;UAC9C;QACJ,CAAC,MAAM;UACH,IAAME,KAAK,GAAGN,MAAM,CAACM,KAAK,CAAC,CAAC,CAAC;UAC7B,IAAIA,KAAK,CAACC,MAAM,KAAK,GAAG,EAAE;YACtB,MAAMD,KAAK;UACf,CAAC,MAAM;YACHzB,qBAAqB,GAAG,IAAA2B,qBAAc,EAACF,KAAK,CAACG,YAAY,CAAC;YAC1DtB,MAAM,CAACM,IAAI,GAAG,IAAAK,qBAAc,EACxB,MAAMpC,KAAK,CAACqC,aAAa,EACzBlB,qBACJ,CAAC;UACL;QACJ;MACJ;IACJ;EACJ,CAAC,CAAC;EACF,MAAMnB,KAAK,CAACiB,eAAe;AAC/B;AAEO,eAAe+B,gBAAgBA,CAClC5C,KAAmD,EACpC;EACf,IAAM6C,IAAI,GAAG,MAAM7C,KAAK,CAAC8C,YAAY,CAAC,CAClC9C,KAAK,CAAC+C,UAAU,EAChB/C,KAAK,CAACgD,YAAY,CAACC,YAAY,EAC/BjD,KAAK,CAACgD,YAAY,CAACE,cAAc,CACpC,CAACC,IAAI,CAAC,IAAI,CAAC,CAAC;EACb,OAAO,yBAAyB,GAAGN,IAAI;AAC3C","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/replication-protocol/conflicts.js b/dist/cjs/replication-protocol/conflicts.js deleted file mode 100644 index 4fc04d9a04b..00000000000 --- a/dist/cjs/replication-protocol/conflicts.js +++ /dev/null @@ -1,77 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.defaultConflictHandler = void 0; -exports.resolveConflictError = resolveConflictError; -var _index = require("../plugins/utils/index.js"); -var _rxStorageHelper = require("../rx-storage-helper.js"); -var defaultConflictHandler = function (i, _context) { - var newDocumentState = (0, _rxStorageHelper.stripAttachmentsDataFromDocument)(i.newDocumentState); - var realMasterState = (0, _rxStorageHelper.stripAttachmentsDataFromDocument)(i.realMasterState); - - /** - * If the documents are deep equal, - * we have no conflict. - * On your custom conflict handler you might only - * check some properties, like the updatedAt time, - * for better performance, because deepEqual is expensive. - */ - if ((0, _index.deepEqual)(newDocumentState, realMasterState)) { - return Promise.resolve({ - isEqual: true - }); - } - - /** - * The default conflict handler will always - * drop the fork state and use the master state instead. - */ - return Promise.resolve({ - isEqual: false, - documentData: i.realMasterState - }); -}; - -/** - * Resolves a conflict error or determines that the given document states are equal. - * Returns the resolved document that must be written to the fork. - * Then the new document state can be pushed upstream. - * If document is not in conflict, returns undefined. - * If error is non-409, it throws an error. - * Conflicts are only solved in the upstream, never in the downstream. - */ -exports.defaultConflictHandler = defaultConflictHandler; -async function resolveConflictError(state, input, forkState) { - var conflictHandler = state.input.conflictHandler; - var conflictHandlerOutput = await conflictHandler(input, 'replication-resolve-conflict'); - if (conflictHandlerOutput.isEqual) { - /** - * Documents are equal, - * so this is not a conflict -> do nothing. - */ - return undefined; - } else { - /** - * We have a resolved conflict, - * use the resolved document data. - */ - var resolvedDoc = Object.assign({}, conflictHandlerOutput.documentData, { - /** - * Because the resolved conflict is written to the fork, - * we have to keep/update the forks _meta data, not the masters. - */ - _meta: (0, _index.flatClone)(forkState._meta), - _rev: (0, _index.getDefaultRevision)(), - _attachments: (0, _index.flatClone)(forkState._attachments) - }); - resolvedDoc._meta.lwt = (0, _index.now)(); - resolvedDoc._rev = (0, _index.createRevision)(await state.checkpointKey, forkState); - return { - resolvedDoc, - output: conflictHandlerOutput - }; - } -} -//# sourceMappingURL=conflicts.js.map \ No newline at end of file diff --git a/dist/cjs/replication-protocol/conflicts.js.map b/dist/cjs/replication-protocol/conflicts.js.map deleted file mode 100644 index 50a0e71bf42..00000000000 --- a/dist/cjs/replication-protocol/conflicts.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"conflicts.js","names":["_index","require","_rxStorageHelper","defaultConflictHandler","i","_context","newDocumentState","stripAttachmentsDataFromDocument","realMasterState","deepEqual","Promise","resolve","isEqual","documentData","exports","resolveConflictError","state","input","forkState","conflictHandler","conflictHandlerOutput","undefined","resolvedDoc","Object","assign","_meta","flatClone","_rev","getDefaultRevision","_attachments","lwt","now","createRevision","checkpointKey","output"],"sources":["../../../src/replication-protocol/conflicts.ts"],"sourcesContent":["import type {\n RxConflictHandler,\n RxConflictHandlerInput,\n RxConflictHandlerOutput,\n RxDocumentData,\n RxStorageInstanceReplicationState\n} from '../types/index.d.ts';\nimport {\n getDefaultRevision,\n createRevision,\n now,\n flatClone,\n deepEqual\n} from '../plugins/utils/index.ts';\nimport { stripAttachmentsDataFromDocument } from '../rx-storage-helper.ts';\n\nexport const defaultConflictHandler: RxConflictHandler = function (\n i: RxConflictHandlerInput,\n _context: string\n): Promise> {\n const newDocumentState = stripAttachmentsDataFromDocument(i.newDocumentState);\n const realMasterState = stripAttachmentsDataFromDocument(i.realMasterState);\n\n /**\n * If the documents are deep equal,\n * we have no conflict.\n * On your custom conflict handler you might only\n * check some properties, like the updatedAt time,\n * for better performance, because deepEqual is expensive.\n */\n if (deepEqual(\n newDocumentState,\n realMasterState\n )) {\n return Promise.resolve({\n isEqual: true\n });\n }\n\n /**\n * The default conflict handler will always\n * drop the fork state and use the master state instead.\n */\n return Promise.resolve({\n isEqual: false,\n documentData: i.realMasterState\n });\n};\n\n\n/**\n * Resolves a conflict error or determines that the given document states are equal.\n * Returns the resolved document that must be written to the fork.\n * Then the new document state can be pushed upstream.\n * If document is not in conflict, returns undefined.\n * If error is non-409, it throws an error.\n * Conflicts are only solved in the upstream, never in the downstream.\n */\nexport async function resolveConflictError(\n state: RxStorageInstanceReplicationState,\n input: RxConflictHandlerInput,\n forkState: RxDocumentData\n): Promise<{\n resolvedDoc: RxDocumentData;\n output: RxConflictHandlerOutput;\n} | undefined> {\n const conflictHandler: RxConflictHandler = state.input.conflictHandler;\n const conflictHandlerOutput = await conflictHandler(input, 'replication-resolve-conflict');\n\n if (conflictHandlerOutput.isEqual) {\n /**\n * Documents are equal,\n * so this is not a conflict -> do nothing.\n */\n return undefined;\n } else {\n /**\n * We have a resolved conflict,\n * use the resolved document data.\n */\n const resolvedDoc: RxDocumentData = Object.assign(\n {},\n conflictHandlerOutput.documentData,\n {\n /**\n * Because the resolved conflict is written to the fork,\n * we have to keep/update the forks _meta data, not the masters.\n */\n _meta: flatClone(forkState._meta),\n _rev: getDefaultRevision(),\n _attachments: flatClone(forkState._attachments)\n }\n ) as any;\n resolvedDoc._meta.lwt = now();\n resolvedDoc._rev = createRevision(\n await state.checkpointKey,\n forkState\n );\n return {\n resolvedDoc,\n output: conflictHandlerOutput\n };\n }\n}\n"],"mappings":";;;;;;;AAOA,IAAAA,MAAA,GAAAC,OAAA;AAOA,IAAAC,gBAAA,GAAAD,OAAA;AAEO,IAAME,sBAA8C,GAAG,SAAAA,CAC1DC,CAA8B,EAC9BC,QAAgB,EACqB;EACrC,IAAMC,gBAAgB,GAAG,IAAAC,iDAAgC,EAACH,CAAC,CAACE,gBAAgB,CAAC;EAC7E,IAAME,eAAe,GAAG,IAAAD,iDAAgC,EAACH,CAAC,CAACI,eAAe,CAAC;;EAE3E;AACJ;AACA;AACA;AACA;AACA;AACA;EACI,IAAI,IAAAC,gBAAS,EACTH,gBAAgB,EAChBE,eACJ,CAAC,EAAE;IACC,OAAOE,OAAO,CAACC,OAAO,CAAC;MACnBC,OAAO,EAAE;IACb,CAAC,CAAC;EACN;;EAEA;AACJ;AACA;AACA;EACI,OAAOF,OAAO,CAACC,OAAO,CAAC;IACnBC,OAAO,EAAE,KAAK;IACdC,YAAY,EAAET,CAAC,CAACI;EACpB,CAAC,CAAC;AACN,CAAC;;AAGD;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AAPAM,OAAA,CAAAX,sBAAA,GAAAA,sBAAA;AAQO,eAAeY,oBAAoBA,CACtCC,KAAmD,EACnDC,KAAwC,EACxCC,SAAoC,EAIzB;EACX,IAAMC,eAA6C,GAAGH,KAAK,CAACC,KAAK,CAACE,eAAe;EACjF,IAAMC,qBAAqB,GAAG,MAAMD,eAAe,CAACF,KAAK,EAAE,8BAA8B,CAAC;EAE1F,IAAIG,qBAAqB,CAACR,OAAO,EAAE;IAC/B;AACR;AACA;AACA;IACQ,OAAOS,SAAS;EACpB,CAAC,MAAM;IACH;AACR;AACA;AACA;IACQ,IAAMC,WAAsC,GAAGC,MAAM,CAACC,MAAM,CACxD,CAAC,CAAC,EACFJ,qBAAqB,CAACP,YAAY,EAClC;MACI;AAChB;AACA;AACA;MACgBY,KAAK,EAAE,IAAAC,gBAAS,EAACR,SAAS,CAACO,KAAK,CAAC;MACjCE,IAAI,EAAE,IAAAC,yBAAkB,EAAC,CAAC;MAC1BC,YAAY,EAAE,IAAAH,gBAAS,EAACR,SAAS,CAACW,YAAY;IAClD,CACJ,CAAQ;IACRP,WAAW,CAACG,KAAK,CAACK,GAAG,GAAG,IAAAC,UAAG,EAAC,CAAC;IAC7BT,WAAW,CAACK,IAAI,GAAG,IAAAK,qBAAc,EAC7B,MAAMhB,KAAK,CAACiB,aAAa,EACzBf,SACJ,CAAC;IACD,OAAO;MACHI,WAAW;MACXY,MAAM,EAAEd;IACZ,CAAC;EACL;AACJ","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/replication-protocol/downstream.js b/dist/cjs/replication-protocol/downstream.js deleted file mode 100644 index 26fb440809e..00000000000 --- a/dist/cjs/replication-protocol/downstream.js +++ /dev/null @@ -1,330 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.startReplicationDownstream = startReplicationDownstream; -var _rxjs = require("rxjs"); -var _rxError = require("../rx-error.js"); -var _rxStorageHelper = require("../rx-storage-helper.js"); -var _index = require("../plugins/utils/index.js"); -var _checkpoint = require("./checkpoint.js"); -var _helper = require("./helper.js"); -var _metaInstance = require("./meta-instance.js"); -/** - * Writes all documents from the master to the fork. - * The downstream has two operation modes - * - Sync by iterating over the checkpoints via downstreamResyncOnce() - * - Sync by listening to the changestream via downstreamProcessChanges() - * We need this to be able to do initial syncs - * and still can have fast event based sync when the client is not offline. - */ -async function startReplicationDownstream(state) { - if (state.input.initialCheckpoint && state.input.initialCheckpoint.downstream) { - var checkpointDoc = await (0, _checkpoint.getLastCheckpointDoc)(state, 'down'); - if (!checkpointDoc) { - await (0, _checkpoint.setCheckpoint)(state, 'down', state.input.initialCheckpoint.downstream); - } - } - var identifierHash = await state.input.hashFunction(state.input.identifier); - var replicationHandler = state.input.replicationHandler; - - // used to detect which tasks etc can in it at which order. - var timer = 0; - var openTasks = []; - function addNewTask(task) { - state.stats.down.addNewTask = state.stats.down.addNewTask + 1; - var taskWithTime = { - time: timer++, - task - }; - openTasks.push(taskWithTime); - state.streamQueue.down = state.streamQueue.down.then(() => { - var useTasks = []; - while (openTasks.length > 0) { - state.events.active.down.next(true); - var innerTaskWithTime = (0, _index.ensureNotFalsy)(openTasks.shift()); - - /** - * If the task came in before the last time we started the pull - * from the master, then we can drop the task. - */ - if (innerTaskWithTime.time < lastTimeMasterChangesRequested) { - continue; - } - if (innerTaskWithTime.task === 'RESYNC') { - if (useTasks.length === 0) { - useTasks.push(innerTaskWithTime.task); - break; - } else { - break; - } - } - useTasks.push(innerTaskWithTime.task); - } - if (useTasks.length === 0) { - return; - } - if (useTasks[0] === 'RESYNC') { - return downstreamResyncOnce(); - } else { - return downstreamProcessChanges(useTasks); - } - }).then(() => { - state.events.active.down.next(false); - if (!state.firstSyncDone.down.getValue() && !state.events.canceled.getValue()) { - state.firstSyncDone.down.next(true); - } - }); - } - addNewTask('RESYNC'); - - /** - * If a write on the master happens, we have to trigger the downstream. - * Only do this if not canceled yet, otherwise firstValueFrom errors - * when running on a completed observable. - */ - if (!state.events.canceled.getValue()) { - var sub = replicationHandler.masterChangeStream$.pipe((0, _rxjs.mergeMap)(async ev => { - /** - * While a push is running, we have to delay all incoming - * events from the server to not mix up the replication state. - */ - await (0, _rxjs.firstValueFrom)(state.events.active.up.pipe((0, _rxjs.filter)(s => !s))); - return ev; - })).subscribe(task => { - state.stats.down.masterChangeStreamEmit = state.stats.down.masterChangeStreamEmit + 1; - addNewTask(task); - }); - (0, _rxjs.firstValueFrom)(state.events.canceled.pipe((0, _rxjs.filter)(canceled => !!canceled))).then(() => sub.unsubscribe()); - } - - /** - * For faster performance, we directly start each write - * and then await all writes at the end. - */ - var lastTimeMasterChangesRequested = -1; - async function downstreamResyncOnce() { - state.stats.down.downstreamResyncOnce = state.stats.down.downstreamResyncOnce + 1; - if (state.events.canceled.getValue()) { - return; - } - state.checkpointQueue = state.checkpointQueue.then(() => (0, _checkpoint.getLastCheckpointDoc)(state, 'down')); - var lastCheckpoint = await state.checkpointQueue; - var promises = []; - while (!state.events.canceled.getValue()) { - lastTimeMasterChangesRequested = timer++; - var downResult = await replicationHandler.masterChangesSince(lastCheckpoint, state.input.pullBatchSize); - if (downResult.documents.length === 0) { - break; - } - lastCheckpoint = (0, _rxStorageHelper.stackCheckpoints)([lastCheckpoint, downResult.checkpoint]); - promises.push(persistFromMaster(downResult.documents, lastCheckpoint)); - - /** - * By definition we stop pull when the pulled documents - * do not fill up the pullBatchSize because we - * can assume that the remote has no more documents. - */ - if (downResult.documents.length < state.input.pullBatchSize) { - break; - } - } - await Promise.all(promises); - } - function downstreamProcessChanges(tasks) { - state.stats.down.downstreamProcessChanges = state.stats.down.downstreamProcessChanges + 1; - var docsOfAllTasks = []; - var lastCheckpoint = null; - tasks.forEach(task => { - if (task === 'RESYNC') { - throw new Error('SNH'); - } - (0, _index.appendToArray)(docsOfAllTasks, task.documents); - lastCheckpoint = (0, _rxStorageHelper.stackCheckpoints)([lastCheckpoint, task.checkpoint]); - }); - return persistFromMaster(docsOfAllTasks, (0, _index.ensureNotFalsy)(lastCheckpoint)); - } - - /** - * It can happen that the calls to masterChangesSince() or the changeStream() - * are way faster then how fast the documents can be persisted. - * Therefore we merge all incoming downResults into the nonPersistedFromMaster object - * and process them together if possible. - * This often bundles up single writes and improves performance - * by processing the documents in bulks. - */ - var persistenceQueue = _index.PROMISE_RESOLVE_VOID; - var nonPersistedFromMaster = { - docs: {} - }; - function persistFromMaster(docs, checkpoint) { - var primaryPath = state.primaryPath; - state.stats.down.persistFromMaster = state.stats.down.persistFromMaster + 1; - - /** - * Add the new docs to the non-persistent list - */ - docs.forEach(docData => { - var docId = docData[primaryPath]; - nonPersistedFromMaster.docs[docId] = docData; - }); - nonPersistedFromMaster.checkpoint = checkpoint; - - /** - * Run in the queue - * with all open documents from nonPersistedFromMaster. - */ - persistenceQueue = persistenceQueue.then(() => { - var downDocsById = nonPersistedFromMaster.docs; - nonPersistedFromMaster.docs = {}; - var useCheckpoint = nonPersistedFromMaster.checkpoint; - var docIds = Object.keys(downDocsById); - if (state.events.canceled.getValue() || docIds.length === 0) { - return _index.PROMISE_RESOLVE_VOID; - } - var writeRowsToFork = []; - var writeRowsToForkById = {}; - var writeRowsToMeta = {}; - var useMetaWriteRows = []; - return Promise.all([state.input.forkInstance.findDocumentsById(docIds, true), (0, _metaInstance.getAssumedMasterState)(state, docIds)]).then(([currentForkStateList, assumedMasterState]) => { - var currentForkState = new Map(); - currentForkStateList.forEach(doc => currentForkState.set(doc[primaryPath], doc)); - return Promise.all(docIds.map(async docId => { - var forkStateFullDoc = currentForkState.get(docId); - var forkStateDocData = forkStateFullDoc ? (0, _helper.writeDocToDocState)(forkStateFullDoc, state.hasAttachments, false) : undefined; - var masterState = downDocsById[docId]; - var assumedMaster = assumedMasterState[docId]; - if (assumedMaster && forkStateFullDoc && assumedMaster.metaDocument.isResolvedConflict === forkStateFullDoc._rev) { - /** - * The current fork state represents a resolved conflict - * that first must be send to the master in the upstream. - * All conflicts are resolved by the upstream. - */ - // return PROMISE_RESOLVE_VOID; - await state.streamQueue.up; - } - var isAssumedMasterEqualToForkState = !assumedMaster || !forkStateDocData ? false : await state.input.conflictHandler({ - realMasterState: assumedMaster.docData, - newDocumentState: forkStateDocData - }, 'downstream-check-if-equal-0').then(r => r.isEqual); - if (!isAssumedMasterEqualToForkState && assumedMaster && assumedMaster.docData._rev && forkStateFullDoc && forkStateFullDoc._meta[state.input.identifier] && (0, _index.getHeightOfRevision)(forkStateFullDoc._rev) === forkStateFullDoc._meta[state.input.identifier]) { - isAssumedMasterEqualToForkState = true; - } - if (forkStateFullDoc && assumedMaster && isAssumedMasterEqualToForkState === false || forkStateFullDoc && !assumedMaster) { - /** - * We have a non-upstream-replicated - * local write to the fork. - * This means we ignore the downstream of this document - * because anyway the upstream will first resolve the conflict. - */ - return _index.PROMISE_RESOLVE_VOID; - } - var areStatesExactlyEqual = !forkStateDocData ? false : await state.input.conflictHandler({ - realMasterState: masterState, - newDocumentState: forkStateDocData - }, 'downstream-check-if-equal-1').then(r => r.isEqual); - if (forkStateDocData && areStatesExactlyEqual) { - /** - * Document states are exactly equal. - * This can happen when the replication is shut down - * unexpected like when the user goes offline. - * - * Only when the assumedMaster is different from the forkState, - * we have to patch the document in the meta instance. - */ - if (!assumedMaster || isAssumedMasterEqualToForkState === false) { - useMetaWriteRows.push(await (0, _metaInstance.getMetaWriteRow)(state, forkStateDocData, assumedMaster ? assumedMaster.metaDocument : undefined)); - } - return _index.PROMISE_RESOLVE_VOID; - } - - /** - * All other master states need to be written to the forkInstance - * and metaInstance. - */ - var newForkState = Object.assign({}, masterState, forkStateFullDoc ? { - _meta: (0, _index.flatClone)(forkStateFullDoc._meta), - _attachments: state.hasAttachments && masterState._attachments ? masterState._attachments : {}, - _rev: (0, _index.getDefaultRevision)() - } : { - _meta: { - lwt: (0, _index.now)() - }, - _rev: (0, _index.getDefaultRevision)(), - _attachments: state.hasAttachments && masterState._attachments ? masterState._attachments : {} - }); - /** - * If the remote works with revisions, - * we store the height of the next fork-state revision - * inside of the documents meta data. - * By doing so we can filter it out in the upstream - * and detect the document as being equal to master or not. - * This is used for example in the CouchDB replication plugin. - */ - if (masterState._rev) { - var nextRevisionHeight = !forkStateFullDoc ? 1 : (0, _index.getHeightOfRevision)(forkStateFullDoc._rev) + 1; - newForkState._meta[state.input.identifier] = nextRevisionHeight; - if (state.input.keepMeta) { - newForkState._rev = masterState._rev; - } - } - if (state.input.keepMeta && masterState._meta) { - newForkState._meta = masterState._meta; - } - var forkWriteRow = { - previous: forkStateFullDoc, - document: newForkState - }; - forkWriteRow.document._rev = forkWriteRow.document._rev ? forkWriteRow.document._rev : (0, _index.createRevision)(identifierHash, forkWriteRow.previous); - writeRowsToFork.push(forkWriteRow); - writeRowsToForkById[docId] = forkWriteRow; - writeRowsToMeta[docId] = await (0, _metaInstance.getMetaWriteRow)(state, masterState, assumedMaster ? assumedMaster.metaDocument : undefined); - })); - }).then(async () => { - if (writeRowsToFork.length > 0) { - return state.input.forkInstance.bulkWrite(writeRowsToFork, await state.downstreamBulkWriteFlag).then(forkWriteResult => { - forkWriteResult.success.forEach(doc => { - var docId = doc[primaryPath]; - state.events.processed.down.next(writeRowsToForkById[docId]); - useMetaWriteRows.push(writeRowsToMeta[docId]); - }); - forkWriteResult.error.forEach(error => { - /** - * We do not have to care about downstream conflict errors here - * because on conflict, it will be solved locally and result in another write. - */ - if (error.status === 409) { - return; - } - // other non-conflict errors must be handled - state.events.error.next((0, _rxError.newRxError)('RC_PULL', { - writeError: error - })); - }); - }); - } - }).then(() => { - if (useMetaWriteRows.length > 0) { - return state.input.metaInstance.bulkWrite((0, _helper.stripAttachmentsDataFromMetaWriteRows)(state, useMetaWriteRows), 'replication-down-write-meta').then(metaWriteResult => { - metaWriteResult.error.forEach(writeError => { - state.events.error.next((0, _rxError.newRxError)('RC_PULL', { - id: writeError.documentId, - writeError - })); - }); - }); - } - }).then(() => { - /** - * For better performance we do not await checkpoint writes, - * but to ensure order on parallel checkpoint writes, - * we have to use a queue. - */ - (0, _checkpoint.setCheckpoint)(state, 'down', useCheckpoint); - }); - }).catch(unhandledError => state.events.error.next(unhandledError)); - return persistenceQueue; - } -} -//# sourceMappingURL=downstream.js.map \ No newline at end of file diff --git a/dist/cjs/replication-protocol/downstream.js.map b/dist/cjs/replication-protocol/downstream.js.map deleted file mode 100644 index 1cbd98e6b8f..00000000000 --- a/dist/cjs/replication-protocol/downstream.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"downstream.js","names":["_rxjs","require","_rxError","_rxStorageHelper","_index","_checkpoint","_helper","_metaInstance","startReplicationDownstream","state","input","initialCheckpoint","downstream","checkpointDoc","getLastCheckpointDoc","setCheckpoint","identifierHash","hashFunction","identifier","replicationHandler","timer","openTasks","addNewTask","task","stats","down","taskWithTime","time","push","streamQueue","then","useTasks","length","events","active","next","innerTaskWithTime","ensureNotFalsy","shift","lastTimeMasterChangesRequested","downstreamResyncOnce","downstreamProcessChanges","firstSyncDone","getValue","canceled","sub","masterChangeStream$","pipe","mergeMap","ev","firstValueFrom","up","filter","s","subscribe","masterChangeStreamEmit","unsubscribe","checkpointQueue","lastCheckpoint","promises","downResult","masterChangesSince","pullBatchSize","documents","stackCheckpoints","checkpoint","persistFromMaster","Promise","all","tasks","docsOfAllTasks","forEach","Error","appendToArray","persistenceQueue","PROMISE_RESOLVE_VOID","nonPersistedFromMaster","docs","primaryPath","docData","docId","downDocsById","useCheckpoint","docIds","Object","keys","writeRowsToFork","writeRowsToForkById","writeRowsToMeta","useMetaWriteRows","forkInstance","findDocumentsById","getAssumedMasterState","currentForkStateList","assumedMasterState","currentForkState","Map","doc","set","map","forkStateFullDoc","get","forkStateDocData","writeDocToDocState","hasAttachments","undefined","masterState","assumedMaster","metaDocument","isResolvedConflict","_rev","isAssumedMasterEqualToForkState","conflictHandler","realMasterState","newDocumentState","r","isEqual","_meta","getHeightOfRevision","areStatesExactlyEqual","getMetaWriteRow","newForkState","assign","flatClone","_attachments","getDefaultRevision","lwt","now","nextRevisionHeight","keepMeta","forkWriteRow","previous","document","createRevision","bulkWrite","downstreamBulkWriteFlag","forkWriteResult","success","processed","error","status","newRxError","writeError","metaInstance","stripAttachmentsDataFromMetaWriteRows","metaWriteResult","id","documentId","catch","unhandledError"],"sources":["../../../src/replication-protocol/downstream.ts"],"sourcesContent":["import {\n firstValueFrom,\n filter,\n mergeMap\n} from 'rxjs';\nimport { newRxError } from '../rx-error.ts';\nimport { stackCheckpoints } from '../rx-storage-helper.ts';\nimport type {\n RxStorageInstanceReplicationState,\n BulkWriteRow,\n BulkWriteRowById,\n RxStorageReplicationMeta,\n RxDocumentData,\n ById,\n WithDeleted,\n DocumentsWithCheckpoint,\n WithDeletedAndAttachments\n} from '../types/index.d.ts';\nimport {\n appendToArray,\n createRevision,\n ensureNotFalsy,\n flatClone,\n getDefaultRevision,\n getHeightOfRevision,\n now,\n PROMISE_RESOLVE_VOID\n} from '../plugins/utils/index.ts';\nimport {\n getLastCheckpointDoc,\n setCheckpoint\n} from './checkpoint.ts';\nimport {\n stripAttachmentsDataFromMetaWriteRows,\n writeDocToDocState\n} from './helper.ts';\nimport {\n getAssumedMasterState,\n getMetaWriteRow\n} from './meta-instance.ts';\n\n/**\n * Writes all documents from the master to the fork.\n * The downstream has two operation modes\n * - Sync by iterating over the checkpoints via downstreamResyncOnce()\n * - Sync by listening to the changestream via downstreamProcessChanges()\n * We need this to be able to do initial syncs\n * and still can have fast event based sync when the client is not offline.\n */\nexport async function startReplicationDownstream(\n state: RxStorageInstanceReplicationState\n) {\n if (\n state.input.initialCheckpoint &&\n state.input.initialCheckpoint.downstream\n ) {\n const checkpointDoc = await getLastCheckpointDoc(state, 'down');\n if (!checkpointDoc) {\n await setCheckpoint(\n state,\n 'down',\n state.input.initialCheckpoint.downstream\n );\n }\n }\n\n const identifierHash = await state.input.hashFunction(state.input.identifier);\n const replicationHandler = state.input.replicationHandler;\n\n // used to detect which tasks etc can in it at which order.\n let timer = 0;\n\n\n type Task = DocumentsWithCheckpoint | 'RESYNC';\n type TaskWithTime = {\n time: number;\n task: Task;\n };\n const openTasks: TaskWithTime[] = [];\n\n\n function addNewTask(task: Task): void {\n state.stats.down.addNewTask = state.stats.down.addNewTask + 1;\n const taskWithTime = {\n time: timer++,\n task\n };\n openTasks.push(taskWithTime);\n state.streamQueue.down = state.streamQueue.down\n .then(() => {\n const useTasks: Task[] = [];\n while (openTasks.length > 0) {\n state.events.active.down.next(true);\n const innerTaskWithTime = ensureNotFalsy(openTasks.shift());\n\n /**\n * If the task came in before the last time we started the pull\n * from the master, then we can drop the task.\n */\n if (innerTaskWithTime.time < lastTimeMasterChangesRequested) {\n continue;\n }\n\n if (innerTaskWithTime.task === 'RESYNC') {\n if (useTasks.length === 0) {\n useTasks.push(innerTaskWithTime.task);\n break;\n } else {\n break;\n }\n }\n\n useTasks.push(innerTaskWithTime.task);\n }\n if (useTasks.length === 0) {\n return;\n }\n\n if (useTasks[0] === 'RESYNC') {\n return downstreamResyncOnce();\n } else {\n return downstreamProcessChanges(useTasks);\n }\n }).then(() => {\n state.events.active.down.next(false);\n if (\n !state.firstSyncDone.down.getValue() &&\n !state.events.canceled.getValue()\n ) {\n state.firstSyncDone.down.next(true);\n }\n });\n }\n addNewTask('RESYNC');\n\n /**\n * If a write on the master happens, we have to trigger the downstream.\n * Only do this if not canceled yet, otherwise firstValueFrom errors\n * when running on a completed observable.\n */\n if (!state.events.canceled.getValue()) {\n const sub = replicationHandler\n .masterChangeStream$\n .pipe(\n mergeMap(async (ev) => {\n /**\n * While a push is running, we have to delay all incoming\n * events from the server to not mix up the replication state.\n */\n await firstValueFrom(\n state.events.active.up.pipe(filter(s => !s))\n );\n return ev;\n })\n )\n .subscribe((task: Task) => {\n state.stats.down.masterChangeStreamEmit = state.stats.down.masterChangeStreamEmit + 1;\n addNewTask(task);\n });\n firstValueFrom(\n state.events.canceled.pipe(\n filter(canceled => !!canceled)\n )\n ).then(() => sub.unsubscribe());\n }\n\n\n /**\n * For faster performance, we directly start each write\n * and then await all writes at the end.\n */\n let lastTimeMasterChangesRequested: number = -1;\n async function downstreamResyncOnce() {\n state.stats.down.downstreamResyncOnce = state.stats.down.downstreamResyncOnce + 1;\n if (state.events.canceled.getValue()) {\n return;\n }\n\n state.checkpointQueue = state.checkpointQueue.then(() => getLastCheckpointDoc(state, 'down'));\n let lastCheckpoint: CheckpointType = await state.checkpointQueue;\n\n\n const promises: Promise[] = [];\n while (!state.events.canceled.getValue()) {\n lastTimeMasterChangesRequested = timer++;\n const downResult = await replicationHandler.masterChangesSince(\n lastCheckpoint,\n state.input.pullBatchSize\n );\n\n if (downResult.documents.length === 0) {\n break;\n }\n\n lastCheckpoint = stackCheckpoints([lastCheckpoint, downResult.checkpoint]);\n\n promises.push(\n persistFromMaster(\n downResult.documents,\n lastCheckpoint\n )\n );\n\n /**\n * By definition we stop pull when the pulled documents\n * do not fill up the pullBatchSize because we\n * can assume that the remote has no more documents.\n */\n if (downResult.documents.length < state.input.pullBatchSize) {\n break;\n }\n\n }\n await Promise.all(promises);\n }\n\n\n function downstreamProcessChanges(tasks: Task[]) {\n state.stats.down.downstreamProcessChanges = state.stats.down.downstreamProcessChanges + 1;\n const docsOfAllTasks: WithDeleted[] = [];\n let lastCheckpoint: CheckpointType | undefined = null as any;\n\n tasks.forEach(task => {\n if (task === 'RESYNC') {\n throw new Error('SNH');\n }\n appendToArray(docsOfAllTasks, task.documents);\n lastCheckpoint = stackCheckpoints([lastCheckpoint, task.checkpoint]);\n });\n return persistFromMaster(\n docsOfAllTasks,\n ensureNotFalsy(lastCheckpoint)\n );\n }\n\n\n /**\n * It can happen that the calls to masterChangesSince() or the changeStream()\n * are way faster then how fast the documents can be persisted.\n * Therefore we merge all incoming downResults into the nonPersistedFromMaster object\n * and process them together if possible.\n * This often bundles up single writes and improves performance\n * by processing the documents in bulks.\n */\n let persistenceQueue = PROMISE_RESOLVE_VOID;\n const nonPersistedFromMaster: {\n checkpoint?: CheckpointType;\n docs: ById>;\n } = {\n docs: {}\n };\n\n function persistFromMaster(\n docs: WithDeleted[],\n checkpoint: CheckpointType\n ): Promise {\n const primaryPath = state.primaryPath;\n state.stats.down.persistFromMaster = state.stats.down.persistFromMaster + 1;\n\n /**\n * Add the new docs to the non-persistent list\n */\n docs.forEach(docData => {\n const docId: string = (docData as any)[primaryPath];\n nonPersistedFromMaster.docs[docId] = docData;\n });\n nonPersistedFromMaster.checkpoint = checkpoint;\n\n /**\n * Run in the queue\n * with all open documents from nonPersistedFromMaster.\n */\n persistenceQueue = persistenceQueue.then(() => {\n\n const downDocsById: ById> = nonPersistedFromMaster.docs;\n nonPersistedFromMaster.docs = {};\n const useCheckpoint = nonPersistedFromMaster.checkpoint;\n const docIds = Object.keys(downDocsById);\n\n if (\n state.events.canceled.getValue() ||\n docIds.length === 0\n ) {\n return PROMISE_RESOLVE_VOID;\n }\n\n const writeRowsToFork: BulkWriteRow[] = [];\n const writeRowsToForkById: ById> = {};\n const writeRowsToMeta: BulkWriteRowById> = {};\n const useMetaWriteRows: BulkWriteRow>[] = [];\n\n return Promise.all([\n state.input.forkInstance.findDocumentsById(docIds, true),\n getAssumedMasterState(\n state,\n docIds\n )\n ]).then(([\n currentForkStateList,\n assumedMasterState\n ]) => {\n const currentForkState = new Map>();\n currentForkStateList.forEach(doc => currentForkState.set((doc as any)[primaryPath], doc));\n return Promise.all(\n docIds.map(async (docId) => {\n const forkStateFullDoc: RxDocumentData | undefined = currentForkState.get(docId);\n const forkStateDocData: WithDeletedAndAttachments | undefined = forkStateFullDoc\n ? writeDocToDocState(forkStateFullDoc, state.hasAttachments, false)\n : undefined\n ;\n const masterState = downDocsById[docId];\n const assumedMaster = assumedMasterState[docId];\n\n if (\n assumedMaster &&\n forkStateFullDoc &&\n assumedMaster.metaDocument.isResolvedConflict === forkStateFullDoc._rev\n ) {\n /**\n * The current fork state represents a resolved conflict\n * that first must be send to the master in the upstream.\n * All conflicts are resolved by the upstream.\n */\n // return PROMISE_RESOLVE_VOID;\n await state.streamQueue.up;\n }\n\n let isAssumedMasterEqualToForkState = !assumedMaster || !forkStateDocData ?\n false :\n await state.input.conflictHandler({\n realMasterState: assumedMaster.docData,\n newDocumentState: forkStateDocData\n }, 'downstream-check-if-equal-0').then(r => r.isEqual);\n if (\n !isAssumedMasterEqualToForkState &&\n (\n assumedMaster &&\n (assumedMaster.docData as any)._rev &&\n forkStateFullDoc &&\n forkStateFullDoc._meta[state.input.identifier] &&\n getHeightOfRevision(forkStateFullDoc._rev) === forkStateFullDoc._meta[state.input.identifier]\n )\n ) {\n isAssumedMasterEqualToForkState = true;\n }\n if (\n (\n forkStateFullDoc &&\n assumedMaster &&\n isAssumedMasterEqualToForkState === false\n ) ||\n (\n forkStateFullDoc && !assumedMaster\n )\n ) {\n /**\n * We have a non-upstream-replicated\n * local write to the fork.\n * This means we ignore the downstream of this document\n * because anyway the upstream will first resolve the conflict.\n */\n return PROMISE_RESOLVE_VOID;\n }\n\n const areStatesExactlyEqual = !forkStateDocData\n ? false\n : await state.input.conflictHandler(\n {\n realMasterState: masterState,\n newDocumentState: forkStateDocData\n },\n 'downstream-check-if-equal-1'\n ).then(r => r.isEqual);\n if (\n forkStateDocData &&\n areStatesExactlyEqual\n ) {\n /**\n * Document states are exactly equal.\n * This can happen when the replication is shut down\n * unexpected like when the user goes offline.\n *\n * Only when the assumedMaster is different from the forkState,\n * we have to patch the document in the meta instance.\n */\n if (\n !assumedMaster ||\n isAssumedMasterEqualToForkState === false\n ) {\n useMetaWriteRows.push(\n await getMetaWriteRow(\n state,\n forkStateDocData,\n assumedMaster ? assumedMaster.metaDocument : undefined\n )\n );\n }\n return PROMISE_RESOLVE_VOID;\n }\n\n /**\n * All other master states need to be written to the forkInstance\n * and metaInstance.\n */\n const newForkState = Object.assign(\n {},\n masterState,\n forkStateFullDoc ? {\n _meta: flatClone(forkStateFullDoc._meta),\n _attachments: state.hasAttachments && masterState._attachments ? masterState._attachments : {},\n _rev: getDefaultRevision()\n } : {\n _meta: {\n lwt: now()\n },\n _rev: getDefaultRevision(),\n _attachments: state.hasAttachments && masterState._attachments ? masterState._attachments : {}\n }\n );\n /**\n * If the remote works with revisions,\n * we store the height of the next fork-state revision\n * inside of the documents meta data.\n * By doing so we can filter it out in the upstream\n * and detect the document as being equal to master or not.\n * This is used for example in the CouchDB replication plugin.\n */\n if ((masterState as any)._rev) {\n const nextRevisionHeight = !forkStateFullDoc ? 1 : getHeightOfRevision(forkStateFullDoc._rev) + 1;\n newForkState._meta[state.input.identifier] = nextRevisionHeight;\n if (state.input.keepMeta) {\n newForkState._rev = (masterState as any)._rev;\n }\n }\n if (\n state.input.keepMeta &&\n (masterState as any)._meta\n ) {\n newForkState._meta = (masterState as any)._meta;\n }\n\n const forkWriteRow = {\n previous: forkStateFullDoc,\n document: newForkState\n };\n\n forkWriteRow.document._rev = forkWriteRow.document._rev ? forkWriteRow.document._rev : createRevision(\n identifierHash,\n forkWriteRow.previous\n );\n writeRowsToFork.push(forkWriteRow);\n writeRowsToForkById[docId] = forkWriteRow;\n writeRowsToMeta[docId] = await getMetaWriteRow(\n state,\n masterState,\n assumedMaster ? assumedMaster.metaDocument : undefined\n );\n })\n );\n }).then(async () => {\n if (writeRowsToFork.length > 0) {\n return state.input.forkInstance.bulkWrite(\n writeRowsToFork,\n await state.downstreamBulkWriteFlag\n ).then((forkWriteResult) => {\n forkWriteResult.success.forEach(doc => {\n const docId = (doc as any)[primaryPath];\n state.events.processed.down.next(writeRowsToForkById[docId]);\n useMetaWriteRows.push(writeRowsToMeta[docId]);\n });\n forkWriteResult.error.forEach(error => {\n /**\n * We do not have to care about downstream conflict errors here\n * because on conflict, it will be solved locally and result in another write.\n */\n if (error.status === 409) {\n return;\n }\n // other non-conflict errors must be handled\n state.events.error.next(newRxError('RC_PULL', {\n writeError: error\n }));\n });\n });\n }\n }).then(() => {\n if (useMetaWriteRows.length > 0) {\n return state.input.metaInstance.bulkWrite(\n stripAttachmentsDataFromMetaWriteRows(state, useMetaWriteRows),\n 'replication-down-write-meta'\n ).then(metaWriteResult => {\n metaWriteResult.error\n .forEach(writeError => {\n state.events.error.next(newRxError('RC_PULL', {\n id: writeError.documentId,\n writeError\n }));\n });\n });\n }\n }).then(() => {\n /**\n * For better performance we do not await checkpoint writes,\n * but to ensure order on parallel checkpoint writes,\n * we have to use a queue.\n */\n setCheckpoint(\n state,\n 'down',\n useCheckpoint\n );\n });\n }).catch(unhandledError => state.events.error.next(unhandledError));\n return persistenceQueue;\n }\n}\n"],"mappings":";;;;;;AAAA,IAAAA,KAAA,GAAAC,OAAA;AAKA,IAAAC,QAAA,GAAAD,OAAA;AACA,IAAAE,gBAAA,GAAAF,OAAA;AAYA,IAAAG,MAAA,GAAAH,OAAA;AAUA,IAAAI,WAAA,GAAAJ,OAAA;AAIA,IAAAK,OAAA,GAAAL,OAAA;AAIA,IAAAM,aAAA,GAAAN,OAAA;AAKA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACO,eAAeO,0BAA0BA,CAC5CC,KAAmD,EACrD;EACE,IACIA,KAAK,CAACC,KAAK,CAACC,iBAAiB,IAC7BF,KAAK,CAACC,KAAK,CAACC,iBAAiB,CAACC,UAAU,EAC1C;IACE,IAAMC,aAAa,GAAG,MAAM,IAAAC,gCAAoB,EAACL,KAAK,EAAE,MAAM,CAAC;IAC/D,IAAI,CAACI,aAAa,EAAE;MAChB,MAAM,IAAAE,yBAAa,EACfN,KAAK,EACL,MAAM,EACNA,KAAK,CAACC,KAAK,CAACC,iBAAiB,CAACC,UAClC,CAAC;IACL;EACJ;EAEA,IAAMI,cAAc,GAAG,MAAMP,KAAK,CAACC,KAAK,CAACO,YAAY,CAACR,KAAK,CAACC,KAAK,CAACQ,UAAU,CAAC;EAC7E,IAAMC,kBAAkB,GAAGV,KAAK,CAACC,KAAK,CAACS,kBAAkB;;EAEzD;EACA,IAAIC,KAAK,GAAG,CAAC;EAQb,IAAMC,SAAyB,GAAG,EAAE;EAGpC,SAASC,UAAUA,CAACC,IAAU,EAAQ;IAClCd,KAAK,CAACe,KAAK,CAACC,IAAI,CAACH,UAAU,GAAGb,KAAK,CAACe,KAAK,CAACC,IAAI,CAACH,UAAU,GAAG,CAAC;IAC7D,IAAMI,YAAY,GAAG;MACjBC,IAAI,EAAEP,KAAK,EAAE;MACbG;IACJ,CAAC;IACDF,SAAS,CAACO,IAAI,CAACF,YAAY,CAAC;IAC5BjB,KAAK,CAACoB,WAAW,CAACJ,IAAI,GAAGhB,KAAK,CAACoB,WAAW,CAACJ,IAAI,CAC1CK,IAAI,CAAC,MAAM;MACR,IAAMC,QAAgB,GAAG,EAAE;MAC3B,OAAOV,SAAS,CAACW,MAAM,GAAG,CAAC,EAAE;QACzBvB,KAAK,CAACwB,MAAM,CAACC,MAAM,CAACT,IAAI,CAACU,IAAI,CAAC,IAAI,CAAC;QACnC,IAAMC,iBAAiB,GAAG,IAAAC,qBAAc,EAAChB,SAAS,CAACiB,KAAK,CAAC,CAAC,CAAC;;QAE3D;AACpB;AACA;AACA;QACoB,IAAIF,iBAAiB,CAACT,IAAI,GAAGY,8BAA8B,EAAE;UACzD;QACJ;QAEA,IAAIH,iBAAiB,CAACb,IAAI,KAAK,QAAQ,EAAE;UACrC,IAAIQ,QAAQ,CAACC,MAAM,KAAK,CAAC,EAAE;YACvBD,QAAQ,CAACH,IAAI,CAACQ,iBAAiB,CAACb,IAAI,CAAC;YACrC;UACJ,CAAC,MAAM;YACH;UACJ;QACJ;QAEAQ,QAAQ,CAACH,IAAI,CAACQ,iBAAiB,CAACb,IAAI,CAAC;MACzC;MACA,IAAIQ,QAAQ,CAACC,MAAM,KAAK,CAAC,EAAE;QACvB;MACJ;MAEA,IAAID,QAAQ,CAAC,CAAC,CAAC,KAAK,QAAQ,EAAE;QAC1B,OAAOS,oBAAoB,CAAC,CAAC;MACjC,CAAC,MAAM;QACH,OAAOC,wBAAwB,CAACV,QAAQ,CAAC;MAC7C;IACJ,CAAC,CAAC,CAACD,IAAI,CAAC,MAAM;MACVrB,KAAK,CAACwB,MAAM,CAACC,MAAM,CAACT,IAAI,CAACU,IAAI,CAAC,KAAK,CAAC;MACpC,IACI,CAAC1B,KAAK,CAACiC,aAAa,CAACjB,IAAI,CAACkB,QAAQ,CAAC,CAAC,IACpC,CAAClC,KAAK,CAACwB,MAAM,CAACW,QAAQ,CAACD,QAAQ,CAAC,CAAC,EACnC;QACElC,KAAK,CAACiC,aAAa,CAACjB,IAAI,CAACU,IAAI,CAAC,IAAI,CAAC;MACvC;IACJ,CAAC,CAAC;EACV;EACAb,UAAU,CAAC,QAAQ,CAAC;;EAEpB;AACJ;AACA;AACA;AACA;EACI,IAAI,CAACb,KAAK,CAACwB,MAAM,CAACW,QAAQ,CAACD,QAAQ,CAAC,CAAC,EAAE;IACnC,IAAME,GAAG,GAAG1B,kBAAkB,CACzB2B,mBAAmB,CACnBC,IAAI,CACD,IAAAC,cAAQ,EAAC,MAAOC,EAAE,IAAK;MACnB;AACpB;AACA;AACA;MACoB,MAAM,IAAAC,oBAAc,EAChBzC,KAAK,CAACwB,MAAM,CAACC,MAAM,CAACiB,EAAE,CAACJ,IAAI,CAAC,IAAAK,YAAM,EAACC,CAAC,IAAI,CAACA,CAAC,CAAC,CAC/C,CAAC;MACD,OAAOJ,EAAE;IACb,CAAC,CACL,CAAC,CACAK,SAAS,CAAE/B,IAAU,IAAK;MACvBd,KAAK,CAACe,KAAK,CAACC,IAAI,CAAC8B,sBAAsB,GAAG9C,KAAK,CAACe,KAAK,CAACC,IAAI,CAAC8B,sBAAsB,GAAG,CAAC;MACrFjC,UAAU,CAACC,IAAI,CAAC;IACpB,CAAC,CAAC;IACN,IAAA2B,oBAAc,EACVzC,KAAK,CAACwB,MAAM,CAACW,QAAQ,CAACG,IAAI,CACtB,IAAAK,YAAM,EAACR,QAAQ,IAAI,CAAC,CAACA,QAAQ,CACjC,CACJ,CAAC,CAACd,IAAI,CAAC,MAAMe,GAAG,CAACW,WAAW,CAAC,CAAC,CAAC;EACnC;;EAGA;AACJ;AACA;AACA;EACI,IAAIjB,8BAAsC,GAAG,CAAC,CAAC;EAC/C,eAAeC,oBAAoBA,CAAA,EAAG;IAClC/B,KAAK,CAACe,KAAK,CAACC,IAAI,CAACe,oBAAoB,GAAG/B,KAAK,CAACe,KAAK,CAACC,IAAI,CAACe,oBAAoB,GAAG,CAAC;IACjF,IAAI/B,KAAK,CAACwB,MAAM,CAACW,QAAQ,CAACD,QAAQ,CAAC,CAAC,EAAE;MAClC;IACJ;IAEAlC,KAAK,CAACgD,eAAe,GAAGhD,KAAK,CAACgD,eAAe,CAAC3B,IAAI,CAAC,MAAM,IAAAhB,gCAAoB,EAACL,KAAK,EAAE,MAAM,CAAC,CAAC;IAC7F,IAAIiD,cAA8B,GAAG,MAAMjD,KAAK,CAACgD,eAAe;IAGhE,IAAME,QAAwB,GAAG,EAAE;IACnC,OAAO,CAAClD,KAAK,CAACwB,MAAM,CAACW,QAAQ,CAACD,QAAQ,CAAC,CAAC,EAAE;MACtCJ,8BAA8B,GAAGnB,KAAK,EAAE;MACxC,IAAMwC,UAAU,GAAG,MAAMzC,kBAAkB,CAAC0C,kBAAkB,CAC1DH,cAAc,EACdjD,KAAK,CAACC,KAAK,CAACoD,aAChB,CAAC;MAED,IAAIF,UAAU,CAACG,SAAS,CAAC/B,MAAM,KAAK,CAAC,EAAE;QACnC;MACJ;MAEA0B,cAAc,GAAG,IAAAM,iCAAgB,EAAC,CAACN,cAAc,EAAEE,UAAU,CAACK,UAAU,CAAC,CAAC;MAE1EN,QAAQ,CAAC/B,IAAI,CACTsC,iBAAiB,CACbN,UAAU,CAACG,SAAS,EACpBL,cACJ,CACJ,CAAC;;MAED;AACZ;AACA;AACA;AACA;MACY,IAAIE,UAAU,CAACG,SAAS,CAAC/B,MAAM,GAAGvB,KAAK,CAACC,KAAK,CAACoD,aAAa,EAAE;QACzD;MACJ;IAEJ;IACA,MAAMK,OAAO,CAACC,GAAG,CAACT,QAAQ,CAAC;EAC/B;EAGA,SAASlB,wBAAwBA,CAAC4B,KAAa,EAAE;IAC7C5D,KAAK,CAACe,KAAK,CAACC,IAAI,CAACgB,wBAAwB,GAAGhC,KAAK,CAACe,KAAK,CAACC,IAAI,CAACgB,wBAAwB,GAAG,CAAC;IACzF,IAAM6B,cAAwC,GAAG,EAAE;IACnD,IAAIZ,cAA0C,GAAG,IAAW;IAE5DW,KAAK,CAACE,OAAO,CAAChD,IAAI,IAAI;MAClB,IAAIA,IAAI,KAAK,QAAQ,EAAE;QACnB,MAAM,IAAIiD,KAAK,CAAC,KAAK,CAAC;MAC1B;MACA,IAAAC,oBAAa,EAACH,cAAc,EAAE/C,IAAI,CAACwC,SAAS,CAAC;MAC7CL,cAAc,GAAG,IAAAM,iCAAgB,EAAC,CAACN,cAAc,EAAEnC,IAAI,CAAC0C,UAAU,CAAC,CAAC;IACxE,CAAC,CAAC;IACF,OAAOC,iBAAiB,CACpBI,cAAc,EACd,IAAAjC,qBAAc,EAACqB,cAAc,CACjC,CAAC;EACL;;EAGA;AACJ;AACA;AACA;AACA;AACA;AACA;AACA;EACI,IAAIgB,gBAAgB,GAAGC,2BAAoB;EAC3C,IAAMC,sBAGL,GAAG;IACAC,IAAI,EAAE,CAAC;EACX,CAAC;EAED,SAASX,iBAAiBA,CACtBW,IAA8B,EAC9BZ,UAA0B,EACb;IACb,IAAMa,WAAW,GAAGrE,KAAK,CAACqE,WAAW;IACrCrE,KAAK,CAACe,KAAK,CAACC,IAAI,CAACyC,iBAAiB,GAAGzD,KAAK,CAACe,KAAK,CAACC,IAAI,CAACyC,iBAAiB,GAAG,CAAC;;IAE3E;AACR;AACA;IACQW,IAAI,CAACN,OAAO,CAACQ,OAAO,IAAI;MACpB,IAAMC,KAAa,GAAID,OAAO,CAASD,WAAW,CAAC;MACnDF,sBAAsB,CAACC,IAAI,CAACG,KAAK,CAAC,GAAGD,OAAO;IAChD,CAAC,CAAC;IACFH,sBAAsB,CAACX,UAAU,GAAGA,UAAU;;IAE9C;AACR;AACA;AACA;IACQS,gBAAgB,GAAGA,gBAAgB,CAAC5C,IAAI,CAAC,MAAM;MAE3C,IAAMmD,YAAwD,GAAGL,sBAAsB,CAACC,IAAI;MAC5FD,sBAAsB,CAACC,IAAI,GAAG,CAAC,CAAC;MAChC,IAAMK,aAAa,GAAGN,sBAAsB,CAACX,UAAU;MACvD,IAAMkB,MAAM,GAAGC,MAAM,CAACC,IAAI,CAACJ,YAAY,CAAC;MAExC,IACIxE,KAAK,CAACwB,MAAM,CAACW,QAAQ,CAACD,QAAQ,CAAC,CAAC,IAChCwC,MAAM,CAACnD,MAAM,KAAK,CAAC,EACrB;QACE,OAAO2C,2BAAoB;MAC/B;MAEA,IAAMW,eAA0C,GAAG,EAAE;MACrD,IAAMC,mBAAkD,GAAG,CAAC,CAAC;MAC7D,IAAMC,eAAsF,GAAG,CAAC,CAAC;MACjG,IAAMC,gBAAqF,GAAG,EAAE;MAEhG,OAAOtB,OAAO,CAACC,GAAG,CAAC,CACf3D,KAAK,CAACC,KAAK,CAACgF,YAAY,CAACC,iBAAiB,CAACR,MAAM,EAAE,IAAI,CAAC,EACxD,IAAAS,mCAAqB,EACjBnF,KAAK,EACL0E,MACJ,CAAC,CACJ,CAAC,CAACrD,IAAI,CAAC,CAAC,CACL+D,oBAAoB,EACpBC,kBAAkB,CACrB,KAAK;QACF,IAAMC,gBAAgB,GAAG,IAAIC,GAAG,CAAoC,CAAC;QACrEH,oBAAoB,CAACtB,OAAO,CAAC0B,GAAG,IAAIF,gBAAgB,CAACG,GAAG,CAAED,GAAG,CAASnB,WAAW,CAAC,EAAEmB,GAAG,CAAC,CAAC;QACzF,OAAO9B,OAAO,CAACC,GAAG,CACde,MAAM,CAACgB,GAAG,CAAC,MAAOnB,KAAK,IAAK;UACxB,IAAMoB,gBAAuD,GAAGL,gBAAgB,CAACM,GAAG,CAACrB,KAAK,CAAC;UAC3F,IAAMsB,gBAAkE,GAAGF,gBAAgB,GACrF,IAAAG,0BAAkB,EAACH,gBAAgB,EAAE3F,KAAK,CAAC+F,cAAc,EAAE,KAAK,CAAC,GACjEC,SAAS;UAEf,IAAMC,WAAW,GAAGzB,YAAY,CAACD,KAAK,CAAC;UACvC,IAAM2B,aAAa,GAAGb,kBAAkB,CAACd,KAAK,CAAC;UAE/C,IACI2B,aAAa,IACbP,gBAAgB,IAChBO,aAAa,CAACC,YAAY,CAACC,kBAAkB,KAAKT,gBAAgB,CAACU,IAAI,EACzE;YACE;AAC5B;AACA;AACA;AACA;YAC4B;YACA,MAAMrG,KAAK,CAACoB,WAAW,CAACsB,EAAE;UAC9B;UAEA,IAAI4D,+BAA+B,GAAG,CAACJ,aAAa,IAAI,CAACL,gBAAgB,GACrE,KAAK,GACL,MAAM7F,KAAK,CAACC,KAAK,CAACsG,eAAe,CAAC;YAC9BC,eAAe,EAAEN,aAAa,CAAC5B,OAAO;YACtCmC,gBAAgB,EAAEZ;UACtB,CAAC,EAAE,6BAA6B,CAAC,CAACxE,IAAI,CAACqF,CAAC,IAAIA,CAAC,CAACC,OAAO,CAAC;UAC1D,IACI,CAACL,+BAA+B,IAE5BJ,aAAa,IACZA,aAAa,CAAC5B,OAAO,CAAS+B,IAAI,IACnCV,gBAAgB,IAChBA,gBAAgB,CAACiB,KAAK,CAAC5G,KAAK,CAACC,KAAK,CAACQ,UAAU,CAAC,IAC9C,IAAAoG,0BAAmB,EAAClB,gBAAgB,CAACU,IAAI,CAAC,KAAKV,gBAAgB,CAACiB,KAAK,CAAC5G,KAAK,CAACC,KAAK,CAACQ,UAAU,CAC/F,EACH;YACE6F,+BAA+B,GAAG,IAAI;UAC1C;UACA,IAEQX,gBAAgB,IAChBO,aAAa,IACbI,+BAA+B,KAAK,KAAK,IAGzCX,gBAAgB,IAAI,CAACO,aACxB,EACH;YACE;AAC5B;AACA;AACA;AACA;AACA;YAC4B,OAAOhC,2BAAoB;UAC/B;UAEA,IAAM4C,qBAAqB,GAAG,CAACjB,gBAAgB,GACzC,KAAK,GACL,MAAM7F,KAAK,CAACC,KAAK,CAACsG,eAAe,CAC/B;YACIC,eAAe,EAAEP,WAAW;YAC5BQ,gBAAgB,EAAEZ;UACtB,CAAC,EACD,6BACJ,CAAC,CAACxE,IAAI,CAACqF,CAAC,IAAIA,CAAC,CAACC,OAAO,CAAC;UAC1B,IACId,gBAAgB,IAChBiB,qBAAqB,EACvB;YACE;AAC5B;AACA;AACA;AACA;AACA;AACA;AACA;YAC4B,IACI,CAACZ,aAAa,IACdI,+BAA+B,KAAK,KAAK,EAC3C;cACEtB,gBAAgB,CAAC7D,IAAI,CACjB,MAAM,IAAA4F,6BAAe,EACjB/G,KAAK,EACL6F,gBAAgB,EAChBK,aAAa,GAAGA,aAAa,CAACC,YAAY,GAAGH,SACjD,CACJ,CAAC;YACL;YACA,OAAO9B,2BAAoB;UAC/B;;UAEA;AACxB;AACA;AACA;UACwB,IAAM8C,YAAY,GAAGrC,MAAM,CAACsC,MAAM,CAC9B,CAAC,CAAC,EACFhB,WAAW,EACXN,gBAAgB,GAAG;YACfiB,KAAK,EAAE,IAAAM,gBAAS,EAACvB,gBAAgB,CAACiB,KAAK,CAAC;YACxCO,YAAY,EAAEnH,KAAK,CAAC+F,cAAc,IAAIE,WAAW,CAACkB,YAAY,GAAGlB,WAAW,CAACkB,YAAY,GAAG,CAAC,CAAC;YAC9Fd,IAAI,EAAE,IAAAe,yBAAkB,EAAC;UAC7B,CAAC,GAAG;YACAR,KAAK,EAAE;cACHS,GAAG,EAAE,IAAAC,UAAG,EAAC;YACb,CAAC;YACDjB,IAAI,EAAE,IAAAe,yBAAkB,EAAC,CAAC;YAC1BD,YAAY,EAAEnH,KAAK,CAAC+F,cAAc,IAAIE,WAAW,CAACkB,YAAY,GAAGlB,WAAW,CAACkB,YAAY,GAAG,CAAC;UACjG,CACJ,CAAC;UACD;AACxB;AACA;AACA;AACA;AACA;AACA;AACA;UACwB,IAAKlB,WAAW,CAASI,IAAI,EAAE;YAC3B,IAAMkB,kBAAkB,GAAG,CAAC5B,gBAAgB,GAAG,CAAC,GAAG,IAAAkB,0BAAmB,EAAClB,gBAAgB,CAACU,IAAI,CAAC,GAAG,CAAC;YACjGW,YAAY,CAACJ,KAAK,CAAC5G,KAAK,CAACC,KAAK,CAACQ,UAAU,CAAC,GAAG8G,kBAAkB;YAC/D,IAAIvH,KAAK,CAACC,KAAK,CAACuH,QAAQ,EAAE;cACtBR,YAAY,CAACX,IAAI,GAAIJ,WAAW,CAASI,IAAI;YACjD;UACJ;UACA,IACIrG,KAAK,CAACC,KAAK,CAACuH,QAAQ,IACnBvB,WAAW,CAASW,KAAK,EAC5B;YACEI,YAAY,CAACJ,KAAK,GAAIX,WAAW,CAASW,KAAK;UACnD;UAEA,IAAMa,YAAY,GAAG;YACjBC,QAAQ,EAAE/B,gBAAgB;YAC1BgC,QAAQ,EAAEX;UACd,CAAC;UAEDS,YAAY,CAACE,QAAQ,CAACtB,IAAI,GAAGoB,YAAY,CAACE,QAAQ,CAACtB,IAAI,GAAGoB,YAAY,CAACE,QAAQ,CAACtB,IAAI,GAAG,IAAAuB,qBAAc,EACjGrH,cAAc,EACdkH,YAAY,CAACC,QACjB,CAAC;UACD7C,eAAe,CAAC1D,IAAI,CAACsG,YAAY,CAAC;UAClC3C,mBAAmB,CAACP,KAAK,CAAC,GAAGkD,YAAY;UACzC1C,eAAe,CAACR,KAAK,CAAC,GAAG,MAAM,IAAAwC,6BAAe,EAC1C/G,KAAK,EACLiG,WAAW,EACXC,aAAa,GAAGA,aAAa,CAACC,YAAY,GAAGH,SACjD,CAAC;QACL,CAAC,CACL,CAAC;MACL,CAAC,CAAC,CAAC3E,IAAI,CAAC,YAAY;QAChB,IAAIwD,eAAe,CAACtD,MAAM,GAAG,CAAC,EAAE;UAC5B,OAAOvB,KAAK,CAACC,KAAK,CAACgF,YAAY,CAAC4C,SAAS,CACrChD,eAAe,EACf,MAAM7E,KAAK,CAAC8H,uBAChB,CAAC,CAACzG,IAAI,CAAE0G,eAAe,IAAK;YACxBA,eAAe,CAACC,OAAO,CAAClE,OAAO,CAAC0B,GAAG,IAAI;cACnC,IAAMjB,KAAK,GAAIiB,GAAG,CAASnB,WAAW,CAAC;cACvCrE,KAAK,CAACwB,MAAM,CAACyG,SAAS,CAACjH,IAAI,CAACU,IAAI,CAACoD,mBAAmB,CAACP,KAAK,CAAC,CAAC;cAC5DS,gBAAgB,CAAC7D,IAAI,CAAC4D,eAAe,CAACR,KAAK,CAAC,CAAC;YACjD,CAAC,CAAC;YACFwD,eAAe,CAACG,KAAK,CAACpE,OAAO,CAACoE,KAAK,IAAI;cACnC;AAC5B;AACA;AACA;cAC4B,IAAIA,KAAK,CAACC,MAAM,KAAK,GAAG,EAAE;gBACtB;cACJ;cACA;cACAnI,KAAK,CAACwB,MAAM,CAAC0G,KAAK,CAACxG,IAAI,CAAC,IAAA0G,mBAAU,EAAC,SAAS,EAAE;gBAC1CC,UAAU,EAAEH;cAChB,CAAC,CAAC,CAAC;YACP,CAAC,CAAC;UACN,CAAC,CAAC;QACN;MACJ,CAAC,CAAC,CAAC7G,IAAI,CAAC,MAAM;QACV,IAAI2D,gBAAgB,CAACzD,MAAM,GAAG,CAAC,EAAE;UAC7B,OAAOvB,KAAK,CAACC,KAAK,CAACqI,YAAY,CAACT,SAAS,CACrC,IAAAU,6CAAqC,EAACvI,KAAK,EAAEgF,gBAAgB,CAAC,EAC9D,6BACJ,CAAC,CAAC3D,IAAI,CAACmH,eAAe,IAAI;YACtBA,eAAe,CAACN,KAAK,CAChBpE,OAAO,CAACuE,UAAU,IAAI;cACnBrI,KAAK,CAACwB,MAAM,CAAC0G,KAAK,CAACxG,IAAI,CAAC,IAAA0G,mBAAU,EAAC,SAAS,EAAE;gBAC1CK,EAAE,EAAEJ,UAAU,CAACK,UAAU;gBACzBL;cACJ,CAAC,CAAC,CAAC;YACP,CAAC,CAAC;UACV,CAAC,CAAC;QACN;MACJ,CAAC,CAAC,CAAChH,IAAI,CAAC,MAAM;QACV;AAChB;AACA;AACA;AACA;QACgB,IAAAf,yBAAa,EACTN,KAAK,EACL,MAAM,EACNyE,aACJ,CAAC;MACL,CAAC,CAAC;IACN,CAAC,CAAC,CAACkE,KAAK,CAACC,cAAc,IAAI5I,KAAK,CAACwB,MAAM,CAAC0G,KAAK,CAACxG,IAAI,CAACkH,cAAc,CAAC,CAAC;IACnE,OAAO3E,gBAAgB;EAC3B;AACJ","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/replication-protocol/helper.js b/dist/cjs/replication-protocol/helper.js deleted file mode 100644 index 3554865c9a7..00000000000 --- a/dist/cjs/replication-protocol/helper.js +++ /dev/null @@ -1,58 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.docStateToWriteDoc = docStateToWriteDoc; -exports.getUnderlyingPersistentStorage = getUnderlyingPersistentStorage; -exports.stripAttachmentsDataFromMetaWriteRows = stripAttachmentsDataFromMetaWriteRows; -exports.writeDocToDocState = writeDocToDocState; -var _index = require("../plugins/utils/index.js"); -var _rxStorageHelper = require("../rx-storage-helper.js"); -function docStateToWriteDoc(databaseInstanceToken, hasAttachments, keepMeta, docState, previous) { - var docData = Object.assign({}, docState, { - _attachments: hasAttachments && docState._attachments ? docState._attachments : {}, - _meta: keepMeta ? docState._meta : Object.assign({}, previous ? previous._meta : {}, { - lwt: (0, _index.now)() - }), - _rev: keepMeta ? docState._rev : (0, _index.getDefaultRevision)() - }); - if (!docData._rev) { - docData._rev = (0, _index.createRevision)(databaseInstanceToken, previous); - } - return docData; -} -function writeDocToDocState(writeDoc, keepAttachments, keepMeta) { - var ret = (0, _index.flatClone)(writeDoc); - if (!keepAttachments) { - delete ret._attachments; - } - if (!keepMeta) { - delete ret._meta; - delete ret._rev; - } - return ret; -} -function stripAttachmentsDataFromMetaWriteRows(state, rows) { - if (!state.hasAttachments) { - return rows; - } - return rows.map(row => { - var document = (0, _index.clone)(row.document); - document.docData = (0, _rxStorageHelper.stripAttachmentsDataFromDocument)(document.docData); - return { - document, - previous: row.previous - }; - }); -} -function getUnderlyingPersistentStorage(instance) { - while (true) { - if (instance.underlyingPersistentStorage) { - instance = instance.underlyingPersistentStorage; - } else { - return instance; - } - } -} -//# sourceMappingURL=helper.js.map \ No newline at end of file diff --git a/dist/cjs/replication-protocol/helper.js.map b/dist/cjs/replication-protocol/helper.js.map deleted file mode 100644 index bf02f9ce31b..00000000000 --- a/dist/cjs/replication-protocol/helper.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"helper.js","names":["_index","require","_rxStorageHelper","docStateToWriteDoc","databaseInstanceToken","hasAttachments","keepMeta","docState","previous","docData","Object","assign","_attachments","_meta","lwt","now","_rev","getDefaultRevision","createRevision","writeDocToDocState","writeDoc","keepAttachments","ret","flatClone","stripAttachmentsDataFromMetaWriteRows","state","rows","map","row","document","clone","stripAttachmentsDataFromDocument","getUnderlyingPersistentStorage","instance","underlyingPersistentStorage"],"sources":["../../../src/replication-protocol/helper.ts"],"sourcesContent":["import type {\n BulkWriteRow,\n RxDocumentData,\n RxDocumentWriteData,\n RxStorageInstance,\n RxStorageInstanceReplicationState,\n RxStorageReplicationMeta,\n WithDeletedAndAttachments\n} from '../types/index.d.ts';\nimport {\n clone,\n createRevision,\n flatClone,\n getDefaultRevision,\n now\n} from '../plugins/utils/index.ts';\nimport { stripAttachmentsDataFromDocument } from '../rx-storage-helper.ts';\n\nexport function docStateToWriteDoc(\n databaseInstanceToken: string,\n hasAttachments: boolean,\n keepMeta: boolean,\n docState: WithDeletedAndAttachments,\n previous?: RxDocumentData\n): RxDocumentWriteData {\n const docData: RxDocumentWriteData = Object.assign(\n {},\n docState,\n {\n _attachments: hasAttachments && docState._attachments ? docState._attachments : {},\n _meta: keepMeta ? (docState as any)._meta : Object.assign(\n {},\n previous ? previous._meta : {},\n {\n lwt: now()\n }\n ),\n _rev: keepMeta ? (docState as any)._rev : getDefaultRevision()\n }\n );\n if (!docData._rev) {\n docData._rev = createRevision(\n databaseInstanceToken,\n previous\n );\n }\n\n return docData;\n}\n\nexport function writeDocToDocState(\n writeDoc: RxDocumentData,\n keepAttachments: boolean,\n keepMeta: boolean\n): WithDeletedAndAttachments {\n const ret = flatClone(writeDoc);\n\n if (!keepAttachments) {\n delete (ret as any)._attachments;\n }\n if (!keepMeta) {\n delete (ret as any)._meta;\n delete (ret as any)._rev;\n }\n return ret;\n}\n\n\nexport function stripAttachmentsDataFromMetaWriteRows(\n state: RxStorageInstanceReplicationState,\n rows: BulkWriteRow>[]\n): BulkWriteRow>[] {\n if (!state.hasAttachments) {\n return rows;\n }\n return rows.map(row => {\n const document = clone(row.document);\n document.docData = stripAttachmentsDataFromDocument(document.docData);\n return {\n document,\n previous: row.previous\n };\n });\n}\n\nexport function getUnderlyingPersistentStorage(\n instance: RxStorageInstance\n): RxStorageInstance {\n while (true) {\n if (instance.underlyingPersistentStorage) {\n instance = instance.underlyingPersistentStorage;\n } else {\n return instance;\n }\n }\n}\n"],"mappings":";;;;;;;;;AASA,IAAAA,MAAA,GAAAC,OAAA;AAOA,IAAAC,gBAAA,GAAAD,OAAA;AAEO,SAASE,kBAAkBA,CAC9BC,qBAA6B,EAC7BC,cAAuB,EACvBC,QAAiB,EACjBC,QAA8C,EAC9CC,QAAoC,EACN;EAC9B,IAAMC,OAAuC,GAAGC,MAAM,CAACC,MAAM,CACzD,CAAC,CAAC,EACFJ,QAAQ,EACR;IACIK,YAAY,EAAEP,cAAc,IAAIE,QAAQ,CAACK,YAAY,GAAGL,QAAQ,CAACK,YAAY,GAAG,CAAC,CAAC;IAClFC,KAAK,EAAEP,QAAQ,GAAIC,QAAQ,CAASM,KAAK,GAAGH,MAAM,CAACC,MAAM,CACrD,CAAC,CAAC,EACFH,QAAQ,GAAGA,QAAQ,CAACK,KAAK,GAAG,CAAC,CAAC,EAC9B;MACIC,GAAG,EAAE,IAAAC,UAAG,EAAC;IACb,CACJ,CAAC;IACDC,IAAI,EAAEV,QAAQ,GAAIC,QAAQ,CAASS,IAAI,GAAG,IAAAC,yBAAkB,EAAC;EACjE,CACJ,CAAC;EACD,IAAI,CAACR,OAAO,CAACO,IAAI,EAAE;IACfP,OAAO,CAACO,IAAI,GAAG,IAAAE,qBAAc,EACzBd,qBAAqB,EACrBI,QACJ,CAAC;EACL;EAEA,OAAOC,OAAO;AAClB;AAEO,SAASU,kBAAkBA,CAC9BC,QAAmC,EACnCC,eAAwB,EACxBf,QAAiB,EACmB;EACpC,IAAMgB,GAAG,GAAG,IAAAC,gBAAS,EAACH,QAAQ,CAAC;EAE/B,IAAI,CAACC,eAAe,EAAE;IAClB,OAAQC,GAAG,CAASV,YAAY;EACpC;EACA,IAAI,CAACN,QAAQ,EAAE;IACX,OAAQgB,GAAG,CAAST,KAAK;IACzB,OAAQS,GAAG,CAASN,IAAI;EAC5B;EACA,OAAOM,GAAG;AACd;AAGO,SAASE,qCAAqCA,CACjDC,KAA6C,EAC7CC,IAA8D,EACN;EACxD,IAAI,CAACD,KAAK,CAACpB,cAAc,EAAE;IACvB,OAAOqB,IAAI;EACf;EACA,OAAOA,IAAI,CAACC,GAAG,CAACC,GAAG,IAAI;IACnB,IAAMC,QAAQ,GAAG,IAAAC,YAAK,EAACF,GAAG,CAACC,QAAQ,CAAC;IACpCA,QAAQ,CAACpB,OAAO,GAAG,IAAAsB,iDAAgC,EAACF,QAAQ,CAACpB,OAAO,CAAC;IACrE,OAAO;MACHoB,QAAQ;MACRrB,QAAQ,EAAEoB,GAAG,CAACpB;IAClB,CAAC;EACL,CAAC,CAAC;AACN;AAEO,SAASwB,8BAA8BA,CAC1CC,QAAqD,EACV;EAC3C,OAAO,IAAI,EAAE;IACT,IAAIA,QAAQ,CAACC,2BAA2B,EAAE;MACtCD,QAAQ,GAAGA,QAAQ,CAACC,2BAA2B;IACnD,CAAC,MAAM;MACH,OAAOD,QAAQ;IACnB;EACJ;AACJ","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/replication-protocol/index.js b/dist/cjs/replication-protocol/index.js deleted file mode 100644 index 9b7bb51fc24..00000000000 --- a/dist/cjs/replication-protocol/index.js +++ /dev/null @@ -1,290 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -var _exportNames = { - replicateRxStorageInstance: true, - awaitRxStorageReplicationFirstInSync: true, - awaitRxStorageReplicationInSync: true, - awaitRxStorageReplicationIdle: true, - rxStorageInstanceToReplicationHandler: true, - cancelRxStorageReplication: true -}; -exports.awaitRxStorageReplicationFirstInSync = awaitRxStorageReplicationFirstInSync; -exports.awaitRxStorageReplicationIdle = awaitRxStorageReplicationIdle; -exports.awaitRxStorageReplicationInSync = awaitRxStorageReplicationInSync; -exports.cancelRxStorageReplication = cancelRxStorageReplication; -exports.replicateRxStorageInstance = replicateRxStorageInstance; -exports.rxStorageInstanceToReplicationHandler = rxStorageInstanceToReplicationHandler; -var _rxjs = require("rxjs"); -var _rxSchemaHelper = require("../rx-schema-helper.js"); -var _index = require("../plugins/utils/index.js"); -var _checkpoint = require("./checkpoint.js"); -Object.keys(_checkpoint).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; - if (key in exports && exports[key] === _checkpoint[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _checkpoint[key]; - } - }); -}); -var _downstream = require("./downstream.js"); -Object.keys(_downstream).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; - if (key in exports && exports[key] === _downstream[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _downstream[key]; - } - }); -}); -var _helper = require("./helper.js"); -Object.keys(_helper).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; - if (key in exports && exports[key] === _helper[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _helper[key]; - } - }); -}); -var _upstream = require("./upstream.js"); -Object.keys(_upstream).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; - if (key in exports && exports[key] === _upstream[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _upstream[key]; - } - }); -}); -var _index2 = require("../plugins/attachments/index.js"); -var _rxStorageHelper = require("../rx-storage-helper.js"); -var _metaInstance = require("./meta-instance.js"); -Object.keys(_metaInstance).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; - if (key in exports && exports[key] === _metaInstance[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _metaInstance[key]; - } - }); -}); -var _conflicts = require("./conflicts.js"); -Object.keys(_conflicts).forEach(function (key) { - if (key === "default" || key === "__esModule") return; - if (Object.prototype.hasOwnProperty.call(_exportNames, key)) return; - if (key in exports && exports[key] === _conflicts[key]) return; - Object.defineProperty(exports, key, { - enumerable: true, - get: function () { - return _conflicts[key]; - } - }); -}); -/** - * These files contain the replication protocol. - * It can be used to replicated RxStorageInstances or RxCollections - * or even to do a client(s)-server replication. - */ - -function replicateRxStorageInstance(input) { - input = (0, _index.flatClone)(input); - input.forkInstance = (0, _helper.getUnderlyingPersistentStorage)(input.forkInstance); - input.metaInstance = (0, _helper.getUnderlyingPersistentStorage)(input.metaInstance); - var checkpointKeyPromise = (0, _checkpoint.getCheckpointKey)(input); - var state = { - primaryPath: (0, _rxSchemaHelper.getPrimaryFieldOfPrimaryKey)(input.forkInstance.schema.primaryKey), - hasAttachments: !!input.forkInstance.schema.attachments, - input, - checkpointKey: checkpointKeyPromise, - downstreamBulkWriteFlag: checkpointKeyPromise.then(checkpointKey => 'replication-downstream-' + checkpointKey), - events: { - canceled: new _rxjs.BehaviorSubject(false), - active: { - down: new _rxjs.BehaviorSubject(true), - up: new _rxjs.BehaviorSubject(true) - }, - processed: { - down: new _rxjs.Subject(), - up: new _rxjs.Subject() - }, - resolvedConflicts: new _rxjs.Subject(), - error: new _rxjs.Subject() - }, - stats: { - down: { - addNewTask: 0, - downstreamProcessChanges: 0, - downstreamResyncOnce: 0, - masterChangeStreamEmit: 0, - persistFromMaster: 0 - }, - up: { - forkChangeStreamEmit: 0, - persistToMaster: 0, - persistToMasterConflictWrites: 0, - persistToMasterHadConflicts: 0, - processTasks: 0, - upstreamInitialSync: 0 - } - }, - firstSyncDone: { - down: new _rxjs.BehaviorSubject(false), - up: new _rxjs.BehaviorSubject(false) - }, - streamQueue: { - down: _index.PROMISE_RESOLVE_VOID, - up: _index.PROMISE_RESOLVE_VOID - }, - checkpointQueue: _index.PROMISE_RESOLVE_VOID, - lastCheckpointDoc: {} - }; - (0, _downstream.startReplicationDownstream)(state); - (0, _upstream.startReplicationUpstream)(state); - return state; -} -function awaitRxStorageReplicationFirstInSync(state) { - return (0, _rxjs.firstValueFrom)((0, _rxjs.combineLatest)([state.firstSyncDone.down.pipe((0, _rxjs.filter)(v => !!v)), state.firstSyncDone.up.pipe((0, _rxjs.filter)(v => !!v))])).then(() => {}); -} -function awaitRxStorageReplicationInSync(replicationState) { - return Promise.all([replicationState.streamQueue.up, replicationState.streamQueue.down, replicationState.checkpointQueue]); -} -async function awaitRxStorageReplicationIdle(state) { - await awaitRxStorageReplicationFirstInSync(state); - while (true) { - var { - down, - up - } = state.streamQueue; - await Promise.all([up, down]); - /** - * If the Promises have not been reassigned - * after awaiting them, we know that the replication - * is in idle state at this point in time. - */ - if (down === state.streamQueue.down && up === state.streamQueue.up) { - return; - } - } -} -function rxStorageInstanceToReplicationHandler(instance, conflictHandler, databaseInstanceToken, -/** - * If set to true, - * the _meta.lwt from the pushed documents is kept. - * (Used in the migration to ensure checkpoints are still valid) - */ -keepMeta = false) { - instance = (0, _helper.getUnderlyingPersistentStorage)(instance); - var hasAttachments = !!instance.schema.attachments; - var primaryPath = (0, _rxSchemaHelper.getPrimaryFieldOfPrimaryKey)(instance.schema.primaryKey); - var replicationHandler = { - masterChangeStream$: instance.changeStream().pipe((0, _rxjs.mergeMap)(async eventBulk => { - var ret = { - checkpoint: eventBulk.checkpoint, - documents: await Promise.all(eventBulk.events.map(async event => { - var docData = (0, _helper.writeDocToDocState)(event.documentData, hasAttachments, keepMeta); - if (hasAttachments) { - docData = await (0, _index2.fillWriteDataForAttachmentsChange)(primaryPath, instance, (0, _index.clone)(docData), - /** - * Notice that the master never knows - * the client state of the document. - * Therefore we always send all attachments data. - */ - undefined); - } - return docData; - })) - }; - return ret; - })), - masterChangesSince(checkpoint, batchSize) { - return (0, _rxStorageHelper.getChangedDocumentsSince)(instance, batchSize, checkpoint).then(async result => { - return { - checkpoint: result.documents.length > 0 ? result.checkpoint : checkpoint, - documents: await Promise.all(result.documents.map(async plainDocumentData => { - var docData = (0, _helper.writeDocToDocState)(plainDocumentData, hasAttachments, keepMeta); - if (hasAttachments) { - docData = await (0, _index2.fillWriteDataForAttachmentsChange)(primaryPath, instance, (0, _index.clone)(docData), - /** - * Notice the the master never knows - * the client state of the document. - * Therefore we always send all attachments data. - */ - undefined); - } - return docData; - })) - }; - }); - }, - async masterWrite(rows) { - var rowById = {}; - rows.forEach(row => { - var docId = row.newDocumentState[primaryPath]; - rowById[docId] = row; - }); - var ids = Object.keys(rowById); - var masterDocsStateList = await instance.findDocumentsById(ids, true); - var masterDocsState = new Map(); - masterDocsStateList.forEach(doc => masterDocsState.set(doc[primaryPath], doc)); - var conflicts = []; - var writeRows = []; - await Promise.all(Object.entries(rowById).map(async ([id, row]) => { - var masterState = masterDocsState.get(id); - if (!masterState) { - writeRows.push({ - document: (0, _helper.docStateToWriteDoc)(databaseInstanceToken, hasAttachments, keepMeta, row.newDocumentState) - }); - } else if (masterState && !row.assumedMasterState) { - conflicts.push((0, _helper.writeDocToDocState)(masterState, hasAttachments, keepMeta)); - } else if ((await conflictHandler({ - realMasterState: (0, _helper.writeDocToDocState)(masterState, hasAttachments, keepMeta), - newDocumentState: (0, _index.ensureNotFalsy)(row.assumedMasterState) - }, 'rxStorageInstanceToReplicationHandler-masterWrite')).isEqual === true) { - writeRows.push({ - previous: masterState, - document: (0, _helper.docStateToWriteDoc)(databaseInstanceToken, hasAttachments, keepMeta, row.newDocumentState, masterState) - }); - } else { - conflicts.push((0, _helper.writeDocToDocState)(masterState, hasAttachments, keepMeta)); - } - })); - if (writeRows.length > 0) { - var result = await instance.bulkWrite(writeRows, 'replication-master-write'); - result.error.forEach(err => { - if (err.status !== 409) { - throw new Error('non conflict error'); - } else { - conflicts.push((0, _helper.writeDocToDocState)((0, _index.ensureNotFalsy)(err.documentInDb), hasAttachments, keepMeta)); - } - }); - } - return conflicts; - } - }; - return replicationHandler; -} -async function cancelRxStorageReplication(replicationState) { - replicationState.events.canceled.next(true); - replicationState.events.active.up.complete(); - replicationState.events.active.down.complete(); - replicationState.events.processed.up.complete(); - replicationState.events.processed.down.complete(); - replicationState.events.resolvedConflicts.complete(); - replicationState.events.canceled.complete(); - await replicationState.checkpointQueue; -} -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/cjs/replication-protocol/index.js.map b/dist/cjs/replication-protocol/index.js.map deleted file mode 100644 index d35d7c6046f..00000000000 --- a/dist/cjs/replication-protocol/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["_rxjs","require","_rxSchemaHelper","_index","_checkpoint","Object","keys","forEach","key","prototype","hasOwnProperty","call","_exportNames","exports","defineProperty","enumerable","get","_downstream","_helper","_upstream","_index2","_rxStorageHelper","_metaInstance","_conflicts","replicateRxStorageInstance","input","flatClone","forkInstance","getUnderlyingPersistentStorage","metaInstance","checkpointKeyPromise","getCheckpointKey","state","primaryPath","getPrimaryFieldOfPrimaryKey","schema","primaryKey","hasAttachments","attachments","checkpointKey","downstreamBulkWriteFlag","then","events","canceled","BehaviorSubject","active","down","up","processed","Subject","resolvedConflicts","error","stats","addNewTask","downstreamProcessChanges","downstreamResyncOnce","masterChangeStreamEmit","persistFromMaster","forkChangeStreamEmit","persistToMaster","persistToMasterConflictWrites","persistToMasterHadConflicts","processTasks","upstreamInitialSync","firstSyncDone","streamQueue","PROMISE_RESOLVE_VOID","checkpointQueue","lastCheckpointDoc","startReplicationDownstream","startReplicationUpstream","awaitRxStorageReplicationFirstInSync","firstValueFrom","combineLatest","pipe","filter","v","awaitRxStorageReplicationInSync","replicationState","Promise","all","awaitRxStorageReplicationIdle","rxStorageInstanceToReplicationHandler","instance","conflictHandler","databaseInstanceToken","keepMeta","replicationHandler","masterChangeStream$","changeStream","mergeMap","eventBulk","ret","checkpoint","documents","map","event","docData","writeDocToDocState","documentData","fillWriteDataForAttachmentsChange","clone","undefined","masterChangesSince","batchSize","getChangedDocumentsSince","result","length","plainDocumentData","masterWrite","rows","rowById","row","docId","newDocumentState","ids","masterDocsStateList","findDocumentsById","masterDocsState","Map","doc","set","conflicts","writeRows","entries","id","masterState","push","document","docStateToWriteDoc","assumedMasterState","realMasterState","ensureNotFalsy","isEqual","previous","bulkWrite","err","status","Error","documentInDb","cancelRxStorageReplication","next","complete"],"sources":["../../../src/replication-protocol/index.ts"],"sourcesContent":["/**\n * These files contain the replication protocol.\n * It can be used to replicated RxStorageInstances or RxCollections\n * or even to do a client(s)-server replication.\n */\n\n\nimport {\n BehaviorSubject,\n combineLatest,\n filter,\n firstValueFrom,\n mergeMap,\n Subject\n} from 'rxjs';\nimport {\n getPrimaryFieldOfPrimaryKey\n} from '../rx-schema-helper.ts';\nimport type {\n BulkWriteRow,\n ById,\n DocumentsWithCheckpoint,\n RxConflictHandler,\n RxDocumentData,\n RxReplicationHandler,\n RxReplicationWriteToMasterRow,\n RxStorageInstance,\n RxStorageInstanceReplicationInput,\n RxStorageInstanceReplicationState,\n WithDeleted\n} from '../types/index.d.ts';\nimport {\n clone,\n ensureNotFalsy,\n flatClone,\n PROMISE_RESOLVE_VOID\n} from '../plugins/utils/index.ts';\nimport {\n getCheckpointKey\n} from './checkpoint.ts';\nimport { startReplicationDownstream } from './downstream.ts';\nimport { docStateToWriteDoc, getUnderlyingPersistentStorage, writeDocToDocState } from './helper.ts';\nimport { startReplicationUpstream } from './upstream.ts';\nimport { fillWriteDataForAttachmentsChange } from '../plugins/attachments/index.ts';\nimport { getChangedDocumentsSince } from '../rx-storage-helper.ts';\n\n\nexport * from './checkpoint.ts';\nexport * from './downstream.ts';\nexport * from './upstream.ts';\nexport * from './meta-instance.ts';\nexport * from './conflicts.ts';\nexport * from './helper.ts';\n\n\nexport function replicateRxStorageInstance(\n input: RxStorageInstanceReplicationInput\n): RxStorageInstanceReplicationState {\n input = flatClone(input);\n input.forkInstance = getUnderlyingPersistentStorage(input.forkInstance);\n input.metaInstance = getUnderlyingPersistentStorage(input.metaInstance);\n const checkpointKeyPromise = getCheckpointKey(input);\n const state: RxStorageInstanceReplicationState = {\n primaryPath: getPrimaryFieldOfPrimaryKey(input.forkInstance.schema.primaryKey),\n hasAttachments: !!input.forkInstance.schema.attachments,\n input,\n checkpointKey: checkpointKeyPromise,\n downstreamBulkWriteFlag: checkpointKeyPromise.then(checkpointKey => 'replication-downstream-' + checkpointKey),\n events: {\n canceled: new BehaviorSubject(false),\n active: {\n down: new BehaviorSubject(true),\n up: new BehaviorSubject(true)\n },\n processed: {\n down: new Subject(),\n up: new Subject()\n },\n resolvedConflicts: new Subject(),\n error: new Subject()\n },\n stats: {\n down: {\n addNewTask: 0,\n downstreamProcessChanges: 0,\n downstreamResyncOnce: 0,\n masterChangeStreamEmit: 0,\n persistFromMaster: 0\n },\n up: {\n forkChangeStreamEmit: 0,\n persistToMaster: 0,\n persistToMasterConflictWrites: 0,\n persistToMasterHadConflicts: 0,\n processTasks: 0,\n upstreamInitialSync: 0\n }\n },\n firstSyncDone: {\n down: new BehaviorSubject(false),\n up: new BehaviorSubject(false)\n },\n streamQueue: {\n down: PROMISE_RESOLVE_VOID,\n up: PROMISE_RESOLVE_VOID\n },\n checkpointQueue: PROMISE_RESOLVE_VOID,\n lastCheckpointDoc: {}\n };\n\n startReplicationDownstream(state);\n startReplicationUpstream(state);\n return state;\n}\n\nexport function awaitRxStorageReplicationFirstInSync(\n state: RxStorageInstanceReplicationState\n): Promise {\n return firstValueFrom(\n combineLatest([\n state.firstSyncDone.down.pipe(\n filter(v => !!v)\n ),\n state.firstSyncDone.up.pipe(\n filter(v => !!v)\n )\n ])\n ).then(() => { });\n}\n\nexport function awaitRxStorageReplicationInSync(\n replicationState: RxStorageInstanceReplicationState\n) {\n return Promise.all([\n replicationState.streamQueue.up,\n replicationState.streamQueue.down,\n replicationState.checkpointQueue\n ]);\n}\n\n\nexport async function awaitRxStorageReplicationIdle(\n state: RxStorageInstanceReplicationState\n) {\n await awaitRxStorageReplicationFirstInSync(state);\n while (true) {\n const { down, up } = state.streamQueue;\n await Promise.all([\n up,\n down\n ]);\n /**\n * If the Promises have not been reassigned\n * after awaiting them, we know that the replication\n * is in idle state at this point in time.\n */\n if (\n down === state.streamQueue.down &&\n up === state.streamQueue.up\n ) {\n return;\n }\n }\n}\n\n\nexport function rxStorageInstanceToReplicationHandler(\n instance: RxStorageInstance,\n conflictHandler: RxConflictHandler,\n databaseInstanceToken: string,\n /**\n * If set to true,\n * the _meta.lwt from the pushed documents is kept.\n * (Used in the migration to ensure checkpoints are still valid)\n */\n keepMeta: boolean = false\n): RxReplicationHandler {\n instance = getUnderlyingPersistentStorage(instance);\n\n const hasAttachments = !!instance.schema.attachments;\n const primaryPath = getPrimaryFieldOfPrimaryKey(instance.schema.primaryKey);\n const replicationHandler: RxReplicationHandler = {\n masterChangeStream$: instance.changeStream().pipe(\n mergeMap(async (eventBulk) => {\n const ret: DocumentsWithCheckpoint = {\n checkpoint: eventBulk.checkpoint,\n documents: await Promise.all(\n eventBulk.events.map(async (event) => {\n let docData = writeDocToDocState(event.documentData, hasAttachments, keepMeta);\n if (hasAttachments) {\n docData = await fillWriteDataForAttachmentsChange(\n primaryPath,\n instance,\n clone(docData),\n /**\n * Notice that the master never knows\n * the client state of the document.\n * Therefore we always send all attachments data.\n */\n undefined\n );\n }\n\n return docData;\n })\n )\n };\n return ret;\n })\n ),\n masterChangesSince(\n checkpoint,\n batchSize\n ) {\n return getChangedDocumentsSince(\n instance,\n batchSize,\n checkpoint\n ).then(async (result) => {\n return {\n checkpoint: result.documents.length > 0 ? result.checkpoint : checkpoint,\n documents: await Promise.all(\n result.documents.map(async (plainDocumentData) => {\n let docData = writeDocToDocState(plainDocumentData, hasAttachments, keepMeta);\n if (hasAttachments) {\n docData = await fillWriteDataForAttachmentsChange(\n primaryPath,\n instance,\n clone(docData),\n /**\n * Notice the the master never knows\n * the client state of the document.\n * Therefore we always send all attachments data.\n */\n undefined\n );\n }\n return docData;\n })\n )\n };\n });\n },\n async masterWrite(\n rows\n ) {\n const rowById: ById> = {};\n rows.forEach(row => {\n const docId: string = (row.newDocumentState as any)[primaryPath];\n rowById[docId] = row;\n });\n const ids = Object.keys(rowById);\n\n const masterDocsStateList = await instance.findDocumentsById(\n ids,\n true\n );\n const masterDocsState = new Map>();\n masterDocsStateList.forEach(doc => masterDocsState.set((doc as any)[primaryPath], doc));\n const conflicts: WithDeleted[] = [];\n const writeRows: BulkWriteRow[] = [];\n await Promise.all(\n Object.entries(rowById)\n .map(async ([id, row]) => {\n const masterState = masterDocsState.get(id);\n if (!masterState) {\n writeRows.push({\n document: docStateToWriteDoc(databaseInstanceToken, hasAttachments, keepMeta, row.newDocumentState)\n });\n } else if (\n masterState &&\n !row.assumedMasterState\n ) {\n conflicts.push(writeDocToDocState(masterState, hasAttachments, keepMeta));\n } else if (\n (await conflictHandler({\n realMasterState: writeDocToDocState(masterState, hasAttachments, keepMeta),\n newDocumentState: ensureNotFalsy(row.assumedMasterState)\n }, 'rxStorageInstanceToReplicationHandler-masterWrite')).isEqual === true\n ) {\n writeRows.push({\n previous: masterState,\n document: docStateToWriteDoc(databaseInstanceToken, hasAttachments, keepMeta, row.newDocumentState, masterState)\n });\n } else {\n conflicts.push(writeDocToDocState(masterState, hasAttachments, keepMeta));\n }\n })\n );\n\n if (writeRows.length > 0) {\n const result = await instance.bulkWrite(\n writeRows,\n 'replication-master-write'\n );\n\n result.error.forEach(err => {\n if (err.status !== 409) {\n throw new Error('non conflict error');\n } else {\n conflicts.push(\n writeDocToDocState(ensureNotFalsy(err.documentInDb), hasAttachments, keepMeta)\n );\n }\n });\n }\n return conflicts;\n }\n };\n\n return replicationHandler;\n}\n\n\nexport async function cancelRxStorageReplication(\n replicationState: RxStorageInstanceReplicationState\n) {\n replicationState.events.canceled.next(true);\n replicationState.events.active.up.complete();\n replicationState.events.active.down.complete();\n replicationState.events.processed.up.complete();\n replicationState.events.processed.down.complete();\n replicationState.events.resolvedConflicts.complete();\n replicationState.events.canceled.complete();\n await replicationState.checkpointQueue;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;AAOA,IAAAA,KAAA,GAAAC,OAAA;AAQA,IAAAC,eAAA,GAAAD,OAAA;AAgBA,IAAAE,MAAA,GAAAF,OAAA;AAMA,IAAAG,WAAA,GAAAH,OAAA;AAUAI,MAAA,CAAAC,IAAA,CAAAF,WAAA,EAAAG,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAJ,WAAA,CAAAI,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAZ,WAAA,CAAAI,GAAA;IAAA;EAAA;AAAA;AAPA,IAAAS,WAAA,GAAAhB,OAAA;AAQAI,MAAA,CAAAC,IAAA,CAAAW,WAAA,EAAAV,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAS,WAAA,CAAAT,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAC,WAAA,CAAAT,GAAA;IAAA;EAAA;AAAA;AAPA,IAAAU,OAAA,GAAAjB,OAAA;AAWAI,MAAA,CAAAC,IAAA,CAAAY,OAAA,EAAAX,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAU,OAAA,CAAAV,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAE,OAAA,CAAAV,GAAA;IAAA;EAAA;AAAA;AAVA,IAAAW,SAAA,GAAAlB,OAAA;AAOAI,MAAA,CAAAC,IAAA,CAAAa,SAAA,EAAAZ,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAW,SAAA,CAAAX,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAG,SAAA,CAAAX,GAAA;IAAA;EAAA;AAAA;AANA,IAAAY,OAAA,GAAAnB,OAAA;AACA,IAAAoB,gBAAA,GAAApB,OAAA;AAMA,IAAAqB,aAAA,GAAArB,OAAA;AAAAI,MAAA,CAAAC,IAAA,CAAAgB,aAAA,EAAAf,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAc,aAAA,CAAAd,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAM,aAAA,CAAAd,GAAA;IAAA;EAAA;AAAA;AACA,IAAAe,UAAA,GAAAtB,OAAA;AAAAI,MAAA,CAAAC,IAAA,CAAAiB,UAAA,EAAAhB,OAAA,WAAAC,GAAA;EAAA,IAAAA,GAAA,kBAAAA,GAAA;EAAA,IAAAH,MAAA,CAAAI,SAAA,CAAAC,cAAA,CAAAC,IAAA,CAAAC,YAAA,EAAAJ,GAAA;EAAA,IAAAA,GAAA,IAAAK,OAAA,IAAAA,OAAA,CAAAL,GAAA,MAAAe,UAAA,CAAAf,GAAA;EAAAH,MAAA,CAAAS,cAAA,CAAAD,OAAA,EAAAL,GAAA;IAAAO,UAAA;IAAAC,GAAA,WAAAA,CAAA;MAAA,OAAAO,UAAA,CAAAf,GAAA;IAAA;EAAA;AAAA;AAnDA;AACA;AACA;AACA;AACA;;AAmDO,SAASgB,0BAA0BA,CACtCC,KAAmD,EACP;EAC5CA,KAAK,GAAG,IAAAC,gBAAS,EAACD,KAAK,CAAC;EACxBA,KAAK,CAACE,YAAY,GAAG,IAAAC,sCAA8B,EAACH,KAAK,CAACE,YAAY,CAAC;EACvEF,KAAK,CAACI,YAAY,GAAG,IAAAD,sCAA8B,EAACH,KAAK,CAACI,YAAY,CAAC;EACvE,IAAMC,oBAAoB,GAAG,IAAAC,4BAAgB,EAACN,KAAK,CAAC;EACpD,IAAMO,KAAmD,GAAG;IACxDC,WAAW,EAAE,IAAAC,2CAA2B,EAACT,KAAK,CAACE,YAAY,CAACQ,MAAM,CAACC,UAAU,CAAC;IAC9EC,cAAc,EAAE,CAAC,CAACZ,KAAK,CAACE,YAAY,CAACQ,MAAM,CAACG,WAAW;IACvDb,KAAK;IACLc,aAAa,EAAET,oBAAoB;IACnCU,uBAAuB,EAAEV,oBAAoB,CAACW,IAAI,CAACF,aAAa,IAAI,yBAAyB,GAAGA,aAAa,CAAC;IAC9GG,MAAM,EAAE;MACJC,QAAQ,EAAE,IAAIC,qBAAe,CAAU,KAAK,CAAC;MAC7CC,MAAM,EAAE;QACJC,IAAI,EAAE,IAAIF,qBAAe,CAAU,IAAI,CAAC;QACxCG,EAAE,EAAE,IAAIH,qBAAe,CAAU,IAAI;MACzC,CAAC;MACDI,SAAS,EAAE;QACPF,IAAI,EAAE,IAAIG,aAAO,CAAC,CAAC;QACnBF,EAAE,EAAE,IAAIE,aAAO,CAAC;MACpB,CAAC;MACDC,iBAAiB,EAAE,IAAID,aAAO,CAAC,CAAC;MAChCE,KAAK,EAAE,IAAIF,aAAO,CAAC;IACvB,CAAC;IACDG,KAAK,EAAE;MACHN,IAAI,EAAE;QACFO,UAAU,EAAE,CAAC;QACbC,wBAAwB,EAAE,CAAC;QAC3BC,oBAAoB,EAAE,CAAC;QACvBC,sBAAsB,EAAE,CAAC;QACzBC,iBAAiB,EAAE;MACvB,CAAC;MACDV,EAAE,EAAE;QACAW,oBAAoB,EAAE,CAAC;QACvBC,eAAe,EAAE,CAAC;QAClBC,6BAA6B,EAAE,CAAC;QAChCC,2BAA2B,EAAE,CAAC;QAC9BC,YAAY,EAAE,CAAC;QACfC,mBAAmB,EAAE;MACzB;IACJ,CAAC;IACDC,aAAa,EAAE;MACXlB,IAAI,EAAE,IAAIF,qBAAe,CAAU,KAAK,CAAC;MACzCG,EAAE,EAAE,IAAIH,qBAAe,CAAU,KAAK;IAC1C,CAAC;IACDqB,WAAW,EAAE;MACTnB,IAAI,EAAEoB,2BAAoB;MAC1BnB,EAAE,EAAEmB;IACR,CAAC;IACDC,eAAe,EAAED,2BAAoB;IACrCE,iBAAiB,EAAE,CAAC;EACxB,CAAC;EAED,IAAAC,sCAA0B,EAACrC,KAAK,CAAC;EACjC,IAAAsC,kCAAwB,EAACtC,KAAK,CAAC;EAC/B,OAAOA,KAAK;AAChB;AAEO,SAASuC,oCAAoCA,CAChDvC,KAA6C,EAChC;EACb,OAAO,IAAAwC,oBAAc,EACjB,IAAAC,mBAAa,EAAC,CACVzC,KAAK,CAACgC,aAAa,CAAClB,IAAI,CAAC4B,IAAI,CACzB,IAAAC,YAAM,EAACC,CAAC,IAAI,CAAC,CAACA,CAAC,CACnB,CAAC,EACD5C,KAAK,CAACgC,aAAa,CAACjB,EAAE,CAAC2B,IAAI,CACvB,IAAAC,YAAM,EAACC,CAAC,IAAI,CAAC,CAACA,CAAC,CACnB,CAAC,CACJ,CACL,CAAC,CAACnC,IAAI,CAAC,MAAM,CAAE,CAAC,CAAC;AACrB;AAEO,SAASoC,+BAA+BA,CAC3CC,gBAAwD,EAC1D;EACE,OAAOC,OAAO,CAACC,GAAG,CAAC,CACfF,gBAAgB,CAACb,WAAW,CAAClB,EAAE,EAC/B+B,gBAAgB,CAACb,WAAW,CAACnB,IAAI,EACjCgC,gBAAgB,CAACX,eAAe,CACnC,CAAC;AACN;AAGO,eAAec,6BAA6BA,CAC/CjD,KAA6C,EAC/C;EACE,MAAMuC,oCAAoC,CAACvC,KAAK,CAAC;EACjD,OAAO,IAAI,EAAE;IACT,IAAM;MAAEc,IAAI;MAAEC;IAAG,CAAC,GAAGf,KAAK,CAACiC,WAAW;IACtC,MAAMc,OAAO,CAACC,GAAG,CAAC,CACdjC,EAAE,EACFD,IAAI,CACP,CAAC;IACF;AACR;AACA;AACA;AACA;IACQ,IACIA,IAAI,KAAKd,KAAK,CAACiC,WAAW,CAACnB,IAAI,IAC/BC,EAAE,KAAKf,KAAK,CAACiC,WAAW,CAAClB,EAAE,EAC7B;MACE;IACJ;EACJ;AACJ;AAGO,SAASmC,qCAAqCA,CACjDC,QAAsE,EACtEC,eAA6C,EAC7CC,qBAA6B;AAC7B;AACJ;AACA;AACA;AACA;AACIC,QAAiB,GAAG,KAAK,EAC4B;EACrDH,QAAQ,GAAG,IAAAvD,sCAA8B,EAACuD,QAAQ,CAAC;EAEnD,IAAM9C,cAAc,GAAG,CAAC,CAAC8C,QAAQ,CAAChD,MAAM,CAACG,WAAW;EACpD,IAAML,WAAW,GAAG,IAAAC,2CAA2B,EAACiD,QAAQ,CAAChD,MAAM,CAACC,UAAU,CAAC;EAC3E,IAAMmD,kBAAyE,GAAG;IAC9EC,mBAAmB,EAAEL,QAAQ,CAACM,YAAY,CAAC,CAAC,CAACf,IAAI,CAC7C,IAAAgB,cAAQ,EAAC,MAAOC,SAAS,IAAK;MAC1B,IAAMC,GAA6D,GAAG;QAClEC,UAAU,EAAEF,SAAS,CAACE,UAAU;QAChCC,SAAS,EAAE,MAAMf,OAAO,CAACC,GAAG,CACxBW,SAAS,CAACjD,MAAM,CAACqD,GAAG,CAAC,MAAOC,KAAK,IAAK;UAClC,IAAIC,OAAO,GAAG,IAAAC,0BAAkB,EAACF,KAAK,CAACG,YAAY,EAAE9D,cAAc,EAAEiD,QAAQ,CAAC;UAC9E,IAAIjD,cAAc,EAAE;YAChB4D,OAAO,GAAG,MAAM,IAAAG,yCAAiC,EAC7CnE,WAAW,EACXkD,QAAQ,EACR,IAAAkB,YAAK,EAACJ,OAAO,CAAC;YACd;AACpC;AACA;AACA;AACA;YACoCK,SACJ,CAAC;UACL;UAEA,OAAOL,OAAO;QAClB,CAAC,CACL;MACJ,CAAC;MACD,OAAOL,GAAG;IACd,CAAC,CACL,CAAC;IACDW,kBAAkBA,CACdV,UAAU,EACVW,SAAS,EACX;MACE,OAAO,IAAAC,yCAAwB,EAC3BtB,QAAQ,EACRqB,SAAS,EACTX,UACJ,CAAC,CAACpD,IAAI,CAAC,MAAOiE,MAAM,IAAK;QACrB,OAAO;UACHb,UAAU,EAAEa,MAAM,CAACZ,SAAS,CAACa,MAAM,GAAG,CAAC,GAAGD,MAAM,CAACb,UAAU,GAAGA,UAAU;UACxEC,SAAS,EAAE,MAAMf,OAAO,CAACC,GAAG,CACxB0B,MAAM,CAACZ,SAAS,CAACC,GAAG,CAAC,MAAOa,iBAAiB,IAAK;YAC9C,IAAIX,OAAO,GAAG,IAAAC,0BAAkB,EAACU,iBAAiB,EAAEvE,cAAc,EAAEiD,QAAQ,CAAC;YAC7E,IAAIjD,cAAc,EAAE;cAChB4D,OAAO,GAAG,MAAM,IAAAG,yCAAiC,EAC7CnE,WAAW,EACXkD,QAAQ,EACR,IAAAkB,YAAK,EAACJ,OAAO,CAAC;cACd;AACpC;AACA;AACA;AACA;cACoCK,SACJ,CAAC;YACL;YACA,OAAOL,OAAO;UAClB,CAAC,CACL;QACJ,CAAC;MACL,CAAC,CAAC;IACN,CAAC;IACD,MAAMY,WAAWA,CACbC,IAAI,EACN;MACE,IAAMC,OAAuD,GAAG,CAAC,CAAC;MAClED,IAAI,CAACvG,OAAO,CAACyG,GAAG,IAAI;QAChB,IAAMC,KAAa,GAAID,GAAG,CAACE,gBAAgB,CAASjF,WAAW,CAAC;QAChE8E,OAAO,CAACE,KAAK,CAAC,GAAGD,GAAG;MACxB,CAAC,CAAC;MACF,IAAMG,GAAG,GAAG9G,MAAM,CAACC,IAAI,CAACyG,OAAO,CAAC;MAEhC,IAAMK,mBAAmB,GAAG,MAAMjC,QAAQ,CAACkC,iBAAiB,CACxDF,GAAG,EACH,IACJ,CAAC;MACD,IAAMG,eAAe,GAAG,IAAIC,GAAG,CAAoC,CAAC;MACpEH,mBAAmB,CAAC7G,OAAO,CAACiH,GAAG,IAAIF,eAAe,CAACG,GAAG,CAAED,GAAG,CAASvF,WAAW,CAAC,EAAEuF,GAAG,CAAC,CAAC;MACvF,IAAME,SAAmC,GAAG,EAAE;MAC9C,IAAMC,SAAoC,GAAG,EAAE;MAC/C,MAAM5C,OAAO,CAACC,GAAG,CACb3E,MAAM,CAACuH,OAAO,CAACb,OAAO,CAAC,CAClBhB,GAAG,CAAC,OAAO,CAAC8B,EAAE,EAAEb,GAAG,CAAC,KAAK;QACtB,IAAMc,WAAW,GAAGR,eAAe,CAACtG,GAAG,CAAC6G,EAAE,CAAC;QAC3C,IAAI,CAACC,WAAW,EAAE;UACdH,SAAS,CAACI,IAAI,CAAC;YACXC,QAAQ,EAAE,IAAAC,0BAAkB,EAAC5C,qBAAqB,EAAEhD,cAAc,EAAEiD,QAAQ,EAAE0B,GAAG,CAACE,gBAAgB;UACtG,CAAC,CAAC;QACN,CAAC,MAAM,IACHY,WAAW,IACX,CAACd,GAAG,CAACkB,kBAAkB,EACzB;UACER,SAAS,CAACK,IAAI,CAAC,IAAA7B,0BAAkB,EAAC4B,WAAW,EAAEzF,cAAc,EAAEiD,QAAQ,CAAC,CAAC;QAC7E,CAAC,MAAM,IACH,CAAC,MAAMF,eAAe,CAAC;UACnB+C,eAAe,EAAE,IAAAjC,0BAAkB,EAAC4B,WAAW,EAAEzF,cAAc,EAAEiD,QAAQ,CAAC;UAC1E4B,gBAAgB,EAAE,IAAAkB,qBAAc,EAACpB,GAAG,CAACkB,kBAAkB;QAC3D,CAAC,EAAE,mDAAmD,CAAC,EAAEG,OAAO,KAAK,IAAI,EAC3E;UACEV,SAAS,CAACI,IAAI,CAAC;YACXO,QAAQ,EAAER,WAAW;YACrBE,QAAQ,EAAE,IAAAC,0BAAkB,EAAC5C,qBAAqB,EAAEhD,cAAc,EAAEiD,QAAQ,EAAE0B,GAAG,CAACE,gBAAgB,EAAEY,WAAW;UACnH,CAAC,CAAC;QACN,CAAC,MAAM;UACHJ,SAAS,CAACK,IAAI,CAAC,IAAA7B,0BAAkB,EAAC4B,WAAW,EAAEzF,cAAc,EAAEiD,QAAQ,CAAC,CAAC;QAC7E;MACJ,CAAC,CACT,CAAC;MAED,IAAIqC,SAAS,CAAChB,MAAM,GAAG,CAAC,EAAE;QACtB,IAAMD,MAAM,GAAG,MAAMvB,QAAQ,CAACoD,SAAS,CACnCZ,SAAS,EACT,0BACJ,CAAC;QAEDjB,MAAM,CAACvD,KAAK,CAAC5C,OAAO,CAACiI,GAAG,IAAI;UACxB,IAAIA,GAAG,CAACC,MAAM,KAAK,GAAG,EAAE;YACpB,MAAM,IAAIC,KAAK,CAAC,oBAAoB,CAAC;UACzC,CAAC,MAAM;YACHhB,SAAS,CAACK,IAAI,CACV,IAAA7B,0BAAkB,EAAC,IAAAkC,qBAAc,EAACI,GAAG,CAACG,YAAY,CAAC,EAAEtG,cAAc,EAAEiD,QAAQ,CACjF,CAAC;UACL;QACJ,CAAC,CAAC;MACN;MACA,OAAOoC,SAAS;IACpB;EACJ,CAAC;EAED,OAAOnC,kBAAkB;AAC7B;AAGO,eAAeqD,0BAA0BA,CAC5C9D,gBAAwD,EAC1D;EACEA,gBAAgB,CAACpC,MAAM,CAACC,QAAQ,CAACkG,IAAI,CAAC,IAAI,CAAC;EAC3C/D,gBAAgB,CAACpC,MAAM,CAACG,MAAM,CAACE,EAAE,CAAC+F,QAAQ,CAAC,CAAC;EAC5ChE,gBAAgB,CAACpC,MAAM,CAACG,MAAM,CAACC,IAAI,CAACgG,QAAQ,CAAC,CAAC;EAC9ChE,gBAAgB,CAACpC,MAAM,CAACM,SAAS,CAACD,EAAE,CAAC+F,QAAQ,CAAC,CAAC;EAC/ChE,gBAAgB,CAACpC,MAAM,CAACM,SAAS,CAACF,IAAI,CAACgG,QAAQ,CAAC,CAAC;EACjDhE,gBAAgB,CAACpC,MAAM,CAACQ,iBAAiB,CAAC4F,QAAQ,CAAC,CAAC;EACpDhE,gBAAgB,CAACpC,MAAM,CAACC,QAAQ,CAACmG,QAAQ,CAAC,CAAC;EAC3C,MAAMhE,gBAAgB,CAACX,eAAe;AAC1C","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/replication-protocol/meta-instance.js b/dist/cjs/replication-protocol/meta-instance.js deleted file mode 100644 index 61f619f2860..00000000000 --- a/dist/cjs/replication-protocol/meta-instance.js +++ /dev/null @@ -1,124 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.META_INSTANCE_SCHEMA_TITLE = void 0; -exports.getAssumedMasterState = getAssumedMasterState; -exports.getMetaWriteRow = getMetaWriteRow; -exports.getRxReplicationMetaInstanceSchema = getRxReplicationMetaInstanceSchema; -var _rxSchemaHelper = require("../rx-schema-helper.js"); -var _rxStorageHelper = require("../rx-storage-helper.js"); -var _index = require("../plugins/utils/index.js"); -var META_INSTANCE_SCHEMA_TITLE = exports.META_INSTANCE_SCHEMA_TITLE = 'RxReplicationProtocolMetaData'; -function getRxReplicationMetaInstanceSchema(replicatedDocumentsSchema, encrypted) { - var parentPrimaryKeyLength = (0, _rxSchemaHelper.getLengthOfPrimaryKey)(replicatedDocumentsSchema); - var baseSchema = { - title: META_INSTANCE_SCHEMA_TITLE, - primaryKey: { - key: 'id', - fields: ['itemId', 'isCheckpoint'], - separator: '|' - }, - type: 'object', - version: replicatedDocumentsSchema.version, - additionalProperties: false, - properties: { - id: { - type: 'string', - minLength: 1, - // add +1 for the '|' and +1 for the 'isCheckpoint' flag - maxLength: parentPrimaryKeyLength + 2 - }, - isCheckpoint: { - type: 'string', - enum: ['0', '1'], - minLength: 1, - maxLength: 1 - }, - itemId: { - type: 'string', - /** - * ensure that all values of RxStorageReplicationDirection ('DOWN' has 4 chars) fit into it - * because checkpoints use the itemId field for that. - */ - maxLength: parentPrimaryKeyLength > 4 ? parentPrimaryKeyLength : 4 - }, - checkpointData: { - type: 'object', - additionalProperties: true - }, - docData: { - type: 'object', - properties: replicatedDocumentsSchema.properties - }, - isResolvedConflict: { - type: 'string' - } - }, - keyCompression: replicatedDocumentsSchema.keyCompression, - required: ['id', 'isCheckpoint', 'itemId'] - }; - if (encrypted) { - baseSchema.encrypted = ['docData']; - } - var metaInstanceSchema = (0, _rxSchemaHelper.fillWithDefaultSettings)(baseSchema); - return metaInstanceSchema; -} - -/** - * Returns the document states of what the fork instance - * assumes to be the latest state on the master instance. - */ -function getAssumedMasterState(state, docIds) { - return state.input.metaInstance.findDocumentsById(docIds.map(docId => { - var useId = (0, _rxSchemaHelper.getComposedPrimaryKeyOfDocumentData)(state.input.metaInstance.schema, { - itemId: docId, - isCheckpoint: '0' - }); - return useId; - }), true).then(metaDocs => { - var ret = {}; - Object.values(metaDocs).forEach(metaDoc => { - ret[metaDoc.itemId] = { - docData: metaDoc.docData, - metaDocument: metaDoc - }; - }); - return ret; - }); -} -async function getMetaWriteRow(state, newMasterDocState, previous, isResolvedConflict) { - var docId = newMasterDocState[state.primaryPath]; - var newMeta = previous ? (0, _rxStorageHelper.flatCloneDocWithMeta)(previous) : { - id: '', - isCheckpoint: '0', - itemId: docId, - docData: newMasterDocState, - _attachments: {}, - _deleted: false, - _rev: (0, _index.getDefaultRevision)(), - _meta: { - lwt: 0 - } - }; - newMeta.docData = newMasterDocState; - - /** - * Sending isResolvedConflict with the value undefined - * will throw a schema validation error because it must be either - * not set or have a string. - */ - if (isResolvedConflict) { - newMeta.isResolvedConflict = isResolvedConflict; - } - newMeta._meta.lwt = (0, _index.now)(); - newMeta.id = (0, _rxSchemaHelper.getComposedPrimaryKeyOfDocumentData)(state.input.metaInstance.schema, newMeta); - newMeta._rev = (0, _index.createRevision)(await state.checkpointKey, previous); - var ret = { - previous, - document: newMeta - }; - return ret; -} -//# sourceMappingURL=meta-instance.js.map \ No newline at end of file diff --git a/dist/cjs/replication-protocol/meta-instance.js.map b/dist/cjs/replication-protocol/meta-instance.js.map deleted file mode 100644 index 59fda3f7f79..00000000000 --- a/dist/cjs/replication-protocol/meta-instance.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"meta-instance.js","names":["_rxSchemaHelper","require","_rxStorageHelper","_index","META_INSTANCE_SCHEMA_TITLE","exports","getRxReplicationMetaInstanceSchema","replicatedDocumentsSchema","encrypted","parentPrimaryKeyLength","getLengthOfPrimaryKey","baseSchema","title","primaryKey","key","fields","separator","type","version","additionalProperties","properties","id","minLength","maxLength","isCheckpoint","enum","itemId","checkpointData","docData","isResolvedConflict","keyCompression","required","metaInstanceSchema","fillWithDefaultSettings","getAssumedMasterState","state","docIds","input","metaInstance","findDocumentsById","map","docId","useId","getComposedPrimaryKeyOfDocumentData","schema","then","metaDocs","ret","Object","values","forEach","metaDoc","metaDocument","getMetaWriteRow","newMasterDocState","previous","primaryPath","newMeta","flatCloneDocWithMeta","_attachments","_deleted","_rev","getDefaultRevision","_meta","lwt","now","createRevision","checkpointKey","document"],"sources":["../../../src/replication-protocol/meta-instance.ts"],"sourcesContent":["import {\n fillWithDefaultSettings,\n getComposedPrimaryKeyOfDocumentData,\n getLengthOfPrimaryKey\n} from '../rx-schema-helper.ts';\nimport { flatCloneDocWithMeta } from '../rx-storage-helper.ts';\nimport type {\n BulkWriteRow,\n ById,\n RxDocumentData,\n RxJsonSchema,\n RxStorageInstanceReplicationState,\n RxStorageReplicationMeta,\n WithDeleted\n} from '../types/index.d.ts';\nimport {\n getDefaultRevision,\n createRevision,\n now\n} from '../plugins/utils/index.ts';\n\n\nexport const META_INSTANCE_SCHEMA_TITLE = 'RxReplicationProtocolMetaData';\n\nexport function getRxReplicationMetaInstanceSchema(\n replicatedDocumentsSchema: RxJsonSchema>,\n encrypted: boolean\n): RxJsonSchema>> {\n const parentPrimaryKeyLength = getLengthOfPrimaryKey(replicatedDocumentsSchema);\n\n const baseSchema: RxJsonSchema> = {\n title: META_INSTANCE_SCHEMA_TITLE,\n primaryKey: {\n key: 'id',\n fields: [\n 'itemId',\n 'isCheckpoint'\n ],\n separator: '|'\n },\n type: 'object',\n version: replicatedDocumentsSchema.version,\n additionalProperties: false,\n properties: {\n id: {\n type: 'string',\n minLength: 1,\n // add +1 for the '|' and +1 for the 'isCheckpoint' flag\n maxLength: parentPrimaryKeyLength + 2\n },\n isCheckpoint: {\n type: 'string',\n enum: [\n '0',\n '1'\n ],\n minLength: 1,\n maxLength: 1\n },\n itemId: {\n type: 'string',\n /**\n * ensure that all values of RxStorageReplicationDirection ('DOWN' has 4 chars) fit into it\n * because checkpoints use the itemId field for that.\n */\n maxLength: parentPrimaryKeyLength > 4 ? parentPrimaryKeyLength : 4\n },\n checkpointData: {\n type: 'object',\n additionalProperties: true\n },\n docData: {\n type: 'object',\n properties: replicatedDocumentsSchema.properties\n },\n isResolvedConflict: {\n type: 'string'\n }\n },\n keyCompression: replicatedDocumentsSchema.keyCompression,\n required: [\n 'id',\n 'isCheckpoint',\n 'itemId'\n ]\n };\n if (encrypted) {\n baseSchema.encrypted = ['docData'];\n }\n const metaInstanceSchema: RxJsonSchema>> = fillWithDefaultSettings(baseSchema);\n return metaInstanceSchema;\n}\n\n\n\n/**\n * Returns the document states of what the fork instance\n * assumes to be the latest state on the master instance.\n */\nexport function getAssumedMasterState(\n state: RxStorageInstanceReplicationState,\n docIds: string[]\n): Promise;\n metaDocument: RxDocumentData>;\n}>> {\n return state.input.metaInstance.findDocumentsById(\n docIds.map(docId => {\n const useId = getComposedPrimaryKeyOfDocumentData(\n state.input.metaInstance.schema,\n {\n itemId: docId,\n isCheckpoint: '0'\n }\n );\n return useId;\n }),\n true\n ).then(metaDocs => {\n const ret: {\n [docId: string]: {\n docData: RxDocumentData;\n metaDocument: RxDocumentData>;\n };\n } = {};\n Object\n .values(metaDocs)\n .forEach((metaDoc) => {\n ret[metaDoc.itemId] = {\n docData: metaDoc.docData,\n metaDocument: metaDoc\n };\n });\n\n return ret;\n });\n}\n\n\nexport async function getMetaWriteRow(\n state: RxStorageInstanceReplicationState,\n newMasterDocState: WithDeleted,\n previous?: RxDocumentData>,\n isResolvedConflict?: string\n): Promise>> {\n const docId: string = (newMasterDocState as any)[state.primaryPath];\n const newMeta: RxDocumentData> = previous ? flatCloneDocWithMeta(\n previous\n ) : {\n id: '',\n isCheckpoint: '0',\n itemId: docId,\n docData: newMasterDocState,\n _attachments: {},\n _deleted: false,\n _rev: getDefaultRevision(),\n _meta: {\n lwt: 0\n }\n };\n newMeta.docData = newMasterDocState;\n\n /**\n * Sending isResolvedConflict with the value undefined\n * will throw a schema validation error because it must be either\n * not set or have a string.\n */\n if (isResolvedConflict) {\n newMeta.isResolvedConflict = isResolvedConflict;\n }\n\n newMeta._meta.lwt = now();\n newMeta.id = getComposedPrimaryKeyOfDocumentData(\n state.input.metaInstance.schema,\n newMeta\n );\n newMeta._rev = createRevision(\n await state.checkpointKey,\n previous\n );\n\n const ret = {\n previous,\n document: newMeta\n };\n\n return ret;\n}\n"],"mappings":";;;;;;;;;AAAA,IAAAA,eAAA,GAAAC,OAAA;AAKA,IAAAC,gBAAA,GAAAD,OAAA;AAUA,IAAAE,MAAA,GAAAF,OAAA;AAOO,IAAMG,0BAA0B,GAAAC,OAAA,CAAAD,0BAAA,GAAG,+BAA+B;AAElE,SAASE,kCAAkCA,CAC9CC,yBAAkE,EAClEC,SAAkB,EAC+D;EACjF,IAAMC,sBAAsB,GAAG,IAAAC,qCAAqB,EAACH,yBAAyB,CAAC;EAE/E,IAAMI,UAA6E,GAAG;IAClFC,KAAK,EAAER,0BAA0B;IACjCS,UAAU,EAAE;MACRC,GAAG,EAAE,IAAI;MACTC,MAAM,EAAE,CACJ,QAAQ,EACR,cAAc,CACjB;MACDC,SAAS,EAAE;IACf,CAAC;IACDC,IAAI,EAAE,QAAQ;IACdC,OAAO,EAAEX,yBAAyB,CAACW,OAAO;IAC1CC,oBAAoB,EAAE,KAAK;IAC3BC,UAAU,EAAE;MACRC,EAAE,EAAE;QACAJ,IAAI,EAAE,QAAQ;QACdK,SAAS,EAAE,CAAC;QACZ;QACAC,SAAS,EAAEd,sBAAsB,GAAG;MACxC,CAAC;MACDe,YAAY,EAAE;QACVP,IAAI,EAAE,QAAQ;QACdQ,IAAI,EAAE,CACF,GAAG,EACH,GAAG,CACN;QACDH,SAAS,EAAE,CAAC;QACZC,SAAS,EAAE;MACf,CAAC;MACDG,MAAM,EAAE;QACJT,IAAI,EAAE,QAAQ;QACd;AAChB;AACA;AACA;QACgBM,SAAS,EAAEd,sBAAsB,GAAG,CAAC,GAAGA,sBAAsB,GAAG;MACrE,CAAC;MACDkB,cAAc,EAAE;QACZV,IAAI,EAAE,QAAQ;QACdE,oBAAoB,EAAE;MAC1B,CAAC;MACDS,OAAO,EAAE;QACLX,IAAI,EAAE,QAAQ;QACdG,UAAU,EAAEb,yBAAyB,CAACa;MAC1C,CAAC;MACDS,kBAAkB,EAAE;QAChBZ,IAAI,EAAE;MACV;IACJ,CAAC;IACDa,cAAc,EAAEvB,yBAAyB,CAACuB,cAAc;IACxDC,QAAQ,EAAE,CACN,IAAI,EACJ,cAAc,EACd,QAAQ;EAEhB,CAAC;EACD,IAAIvB,SAAS,EAAE;IACXG,UAAU,CAACH,SAAS,GAAG,CAAC,SAAS,CAAC;EACtC;EACA,IAAMwB,kBAAqG,GAAG,IAAAC,uCAAuB,EAACtB,UAAU,CAAC;EACjJ,OAAOqB,kBAAkB;AAC7B;;AAIA;AACA;AACA;AACA;AACO,SAASE,qBAAqBA,CACjCC,KAAmD,EACnDC,MAAgB,EAIhB;EACA,OAAOD,KAAK,CAACE,KAAK,CAACC,YAAY,CAACC,iBAAiB,CAC7CH,MAAM,CAACI,GAAG,CAACC,KAAK,IAAI;IAChB,IAAMC,KAAK,GAAG,IAAAC,mDAAmC,EAC7CR,KAAK,CAACE,KAAK,CAACC,YAAY,CAACM,MAAM,EAC/B;MACIlB,MAAM,EAAEe,KAAK;MACbjB,YAAY,EAAE;IAClB,CACJ,CAAC;IACD,OAAOkB,KAAK;EAChB,CAAC,CAAC,EACF,IACJ,CAAC,CAACG,IAAI,CAACC,QAAQ,IAAI;IACf,IAAMC,GAKL,GAAG,CAAC,CAAC;IACNC,MAAM,CACDC,MAAM,CAACH,QAAQ,CAAC,CAChBI,OAAO,CAAEC,OAAO,IAAK;MAClBJ,GAAG,CAACI,OAAO,CAACzB,MAAM,CAAC,GAAG;QAClBE,OAAO,EAAEuB,OAAO,CAACvB,OAAO;QACxBwB,YAAY,EAAED;MAClB,CAAC;IACL,CAAC,CAAC;IAEN,OAAOJ,GAAG;EACd,CAAC,CAAC;AACN;AAGO,eAAeM,eAAeA,CACjClB,KAAmD,EACnDmB,iBAAyC,EACzCC,QAAmE,EACnE1B,kBAA2B,EACoC;EAC/D,IAAMY,KAAa,GAAIa,iBAAiB,CAASnB,KAAK,CAACqB,WAAW,CAAC;EACnE,IAAMC,OAAiE,GAAGF,QAAQ,GAAG,IAAAG,qCAAoB,EACrGH,QACJ,CAAC,GAAG;IACAlC,EAAE,EAAE,EAAE;IACNG,YAAY,EAAE,GAAG;IACjBE,MAAM,EAAEe,KAAK;IACbb,OAAO,EAAE0B,iBAAiB;IAC1BK,YAAY,EAAE,CAAC,CAAC;IAChBC,QAAQ,EAAE,KAAK;IACfC,IAAI,EAAE,IAAAC,yBAAkB,EAAC,CAAC;IAC1BC,KAAK,EAAE;MACHC,GAAG,EAAE;IACT;EACJ,CAAC;EACDP,OAAO,CAAC7B,OAAO,GAAG0B,iBAAiB;;EAEnC;AACJ;AACA;AACA;AACA;EACI,IAAIzB,kBAAkB,EAAE;IACpB4B,OAAO,CAAC5B,kBAAkB,GAAGA,kBAAkB;EACnD;EAEA4B,OAAO,CAACM,KAAK,CAACC,GAAG,GAAG,IAAAC,UAAG,EAAC,CAAC;EACzBR,OAAO,CAACpC,EAAE,GAAG,IAAAsB,mDAAmC,EAC5CR,KAAK,CAACE,KAAK,CAACC,YAAY,CAACM,MAAM,EAC/Ba,OACJ,CAAC;EACDA,OAAO,CAACI,IAAI,GAAG,IAAAK,qBAAc,EACzB,MAAM/B,KAAK,CAACgC,aAAa,EACzBZ,QACJ,CAAC;EAED,IAAMR,GAAG,GAAG;IACRQ,QAAQ;IACRa,QAAQ,EAAEX;EACd,CAAC;EAED,OAAOV,GAAG;AACd","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/replication-protocol/upstream.js b/dist/cjs/replication-protocol/upstream.js deleted file mode 100644 index 50d33d0441b..00000000000 --- a/dist/cjs/replication-protocol/upstream.js +++ /dev/null @@ -1,331 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.startReplicationUpstream = startReplicationUpstream; -var _rxjs = require("rxjs"); -var _rxStorageHelper = require("../rx-storage-helper.js"); -var _index = require("../plugins/utils/index.js"); -var _checkpoint = require("./checkpoint.js"); -var _conflicts = require("./conflicts.js"); -var _helper = require("./helper.js"); -var _metaInstance = require("./meta-instance.js"); -var _index2 = require("../plugins/attachments/index.js"); -/** - * Writes all document changes from the fork to the master. - * The upstream runs on two modes: - * - For initial replication, a checkpoint-iteration is used - * - For ongoing local writes, we just subscribe to the changeStream of the fork. - * In contrast to the master, the fork can be assumed to never loose connection, - * so we do not have to prepare for missed out events. - */ -async function startReplicationUpstream(state) { - if (state.input.initialCheckpoint && state.input.initialCheckpoint.upstream) { - var checkpointDoc = await (0, _checkpoint.getLastCheckpointDoc)(state, 'up'); - if (!checkpointDoc) { - await (0, _checkpoint.setCheckpoint)(state, 'up', state.input.initialCheckpoint.upstream); - } - } - var replicationHandler = state.input.replicationHandler; - state.streamQueue.up = state.streamQueue.up.then(() => { - return upstreamInitialSync().then(() => { - processTasks(); - }); - }); - - // used to detect which tasks etc can in it at which order. - var timer = 0; - var initialSyncStartTime = -1; - var openTasks = []; - var persistenceQueue = _index.PROMISE_RESOLVE_FALSE; - var nonPersistedFromMaster = { - docs: {} - }; - var sub = state.input.forkInstance.changeStream().subscribe(async eventBulk => { - // ignore writes that came from the downstream - if (eventBulk.context === (await state.downstreamBulkWriteFlag)) { - return; - } - state.stats.up.forkChangeStreamEmit = state.stats.up.forkChangeStreamEmit + 1; - openTasks.push({ - task: eventBulk, - time: timer++ - }); - if (!state.events.active.up.getValue()) { - state.events.active.up.next(true); - } - if (state.input.waitBeforePersist) { - return state.input.waitBeforePersist().then(() => processTasks()); - } else { - return processTasks(); - } - }); - (0, _rxjs.firstValueFrom)(state.events.canceled.pipe((0, _rxjs.filter)(canceled => !!canceled))).then(() => sub.unsubscribe()); - async function upstreamInitialSync() { - state.stats.up.upstreamInitialSync = state.stats.up.upstreamInitialSync + 1; - if (state.events.canceled.getValue()) { - return; - } - state.checkpointQueue = state.checkpointQueue.then(() => (0, _checkpoint.getLastCheckpointDoc)(state, 'up')); - var lastCheckpoint = await state.checkpointQueue; - var promises = new Set(); - var _loop = async function () { - initialSyncStartTime = timer++; - - /** - * Throttle the calls to - * forkInstance.getChangedDocumentsSince() so that - * if the pushing to the remote is slower compared to the - * pulling out of forkInstance, we do not block the UI too much - * and have a big memory spike with all forkInstance documents. - */ - if (promises.size > 3) { - await Promise.race(Array.from(promises)); - } - var upResult = await (0, _rxStorageHelper.getChangedDocumentsSince)(state.input.forkInstance, state.input.pushBatchSize, lastCheckpoint); - if (upResult.documents.length === 0) { - return 1; // break - } - lastCheckpoint = (0, _rxStorageHelper.stackCheckpoints)([lastCheckpoint, upResult.checkpoint]); - var promise = persistToMaster(upResult.documents, (0, _index.ensureNotFalsy)(lastCheckpoint)); - promises.add(promise); - promise.catch().then(() => promises.delete(promise)); - }; - while (!state.events.canceled.getValue()) { - if (await _loop()) break; - } - - /** - * If we had conflicts during the initial sync, - * it means that we likely have new writes to the fork - * and so we have to run the initial sync again to upstream these new writes. - */ - var resolvedPromises = await Promise.all(promises); - var hadConflicts = resolvedPromises.find(r => !!r); - if (hadConflicts) { - await upstreamInitialSync(); - } else if (!state.firstSyncDone.up.getValue() && !state.events.canceled.getValue()) { - state.firstSyncDone.up.next(true); - } - } - - /** - * Takes all open tasks an processes them at once. - */ - function processTasks() { - if (state.events.canceled.getValue() || openTasks.length === 0) { - state.events.active.up.next(false); - return; - } - state.stats.up.processTasks = state.stats.up.processTasks + 1; - state.events.active.up.next(true); - state.streamQueue.up = state.streamQueue.up.then(() => { - /** - * Merge/filter all open tasks - */ - var docs = []; - var checkpoint = {}; - while (openTasks.length > 0) { - var taskWithTime = (0, _index.ensureNotFalsy)(openTasks.shift()); - /** - * If the task came in before the last time the initial sync fetching - * has run, we can ignore the task because the initial sync already processed - * these documents. - */ - if (taskWithTime.time < initialSyncStartTime) { - continue; - } - (0, _index.appendToArray)(docs, taskWithTime.task.events.map(r => { - return r.documentData; - })); - checkpoint = (0, _rxStorageHelper.stackCheckpoints)([checkpoint, taskWithTime.task.checkpoint]); - } - var promise = docs.length === 0 ? _index.PROMISE_RESOLVE_FALSE : persistToMaster(docs, checkpoint); - return promise.then(() => { - if (openTasks.length === 0) { - state.events.active.up.next(false); - } else { - processTasks(); - } - }); - }); - } - - /** - * Returns true if had conflicts, - * false if not. - */ - function persistToMaster(docs, checkpoint) { - state.stats.up.persistToMaster = state.stats.up.persistToMaster + 1; - - /** - * Add the new docs to the non-persistent list - */ - docs.forEach(docData => { - var docId = docData[state.primaryPath]; - nonPersistedFromMaster.docs[docId] = docData; - }); - nonPersistedFromMaster.checkpoint = checkpoint; - persistenceQueue = persistenceQueue.then(async () => { - if (state.events.canceled.getValue()) { - return false; - } - var upDocsById = nonPersistedFromMaster.docs; - nonPersistedFromMaster.docs = {}; - var useCheckpoint = nonPersistedFromMaster.checkpoint; - var docIds = Object.keys(upDocsById); - if (docIds.length === 0) { - return false; - } - var assumedMasterState = await (0, _metaInstance.getAssumedMasterState)(state, docIds); - var writeRowsToMaster = {}; - var writeRowsToMasterIds = []; - var writeRowsToMeta = {}; - var forkStateById = {}; - await Promise.all(docIds.map(async docId => { - var fullDocData = upDocsById[docId]; - forkStateById[docId] = fullDocData; - var docData = (0, _helper.writeDocToDocState)(fullDocData, state.hasAttachments, !!state.input.keepMeta); - var assumedMasterDoc = assumedMasterState[docId]; - - /** - * If the master state is equal to the - * fork state, we can assume that the document state is already - * replicated. - */ - if (assumedMasterDoc && - // if the isResolvedConflict is correct, we do not have to compare the documents. - assumedMasterDoc.metaDocument.isResolvedConflict !== fullDocData._rev && (await state.input.conflictHandler({ - realMasterState: assumedMasterDoc.docData, - newDocumentState: docData - }, 'upstream-check-if-equal')).isEqual || ( - /** - * If the master works with _rev fields, - * we use that to check if our current doc state - * is different from the assumedMasterDoc. - */ - - assumedMasterDoc && assumedMasterDoc.docData._rev && (0, _index.getHeightOfRevision)(fullDocData._rev) === fullDocData._meta[state.input.identifier])) { - return; - } - writeRowsToMasterIds.push(docId); - writeRowsToMaster[docId] = { - assumedMasterState: assumedMasterDoc ? assumedMasterDoc.docData : undefined, - newDocumentState: docData - }; - writeRowsToMeta[docId] = await (0, _metaInstance.getMetaWriteRow)(state, docData, assumedMasterDoc ? assumedMasterDoc.metaDocument : undefined); - })); - if (writeRowsToMasterIds.length === 0) { - return false; - } - var writeRowsArray = Object.values(writeRowsToMaster); - var conflictIds = new Set(); - var conflictsById = {}; - - /** - * To always respect the push.batchSize, - * we have to split the write rows into batches - * to ensure that replicationHandler.masterWrite() is never - * called with more documents than what the batchSize limits. - */ - var writeBatches = (0, _index.batchArray)(writeRowsArray, state.input.pushBatchSize); - await Promise.all(writeBatches.map(async writeBatch => { - // enhance docs with attachments - if (state.hasAttachments) { - await Promise.all(writeBatch.map(async row => { - row.newDocumentState = await (0, _index2.fillWriteDataForAttachmentsChange)(state.primaryPath, state.input.forkInstance, (0, _index.clone)(row.newDocumentState), row.assumedMasterState); - })); - } - var masterWriteResult = await replicationHandler.masterWrite(writeBatch); - masterWriteResult.forEach(conflictDoc => { - var id = conflictDoc[state.primaryPath]; - conflictIds.add(id); - conflictsById[id] = conflictDoc; - }); - })); - var useWriteRowsToMeta = []; - writeRowsToMasterIds.forEach(docId => { - if (!conflictIds.has(docId)) { - state.events.processed.up.next(writeRowsToMaster[docId]); - useWriteRowsToMeta.push(writeRowsToMeta[docId]); - } - }); - if (state.events.canceled.getValue()) { - return false; - } - if (useWriteRowsToMeta.length > 0) { - await state.input.metaInstance.bulkWrite((0, _helper.stripAttachmentsDataFromMetaWriteRows)(state, useWriteRowsToMeta), 'replication-up-write-meta'); - // TODO what happens when we have conflicts here? - } - - /** - * Resolve conflicts by writing a new document - * state to the fork instance and the 'real' master state - * to the meta instance. - * Non-409 errors will be detected by resolveConflictError() - */ - var hadConflictWrites = false; - if (conflictIds.size > 0) { - state.stats.up.persistToMasterHadConflicts = state.stats.up.persistToMasterHadConflicts + 1; - var conflictWriteFork = []; - var conflictWriteMeta = {}; - await Promise.all(Object.entries(conflictsById).map(([docId, realMasterState]) => { - var writeToMasterRow = writeRowsToMaster[docId]; - var input = { - newDocumentState: writeToMasterRow.newDocumentState, - assumedMasterState: writeToMasterRow.assumedMasterState, - realMasterState - }; - return (0, _conflicts.resolveConflictError)(state, input, forkStateById[docId]).then(async resolved => { - if (resolved) { - state.events.resolvedConflicts.next({ - input, - output: resolved.output - }); - conflictWriteFork.push({ - previous: forkStateById[docId], - document: resolved.resolvedDoc - }); - var assumedMasterDoc = assumedMasterState[docId]; - conflictWriteMeta[docId] = await (0, _metaInstance.getMetaWriteRow)(state, (0, _index.ensureNotFalsy)(realMasterState), assumedMasterDoc ? assumedMasterDoc.metaDocument : undefined, resolved.resolvedDoc._rev); - } - }); - })); - if (conflictWriteFork.length > 0) { - hadConflictWrites = true; - state.stats.up.persistToMasterConflictWrites = state.stats.up.persistToMasterConflictWrites + 1; - var forkWriteResult = await state.input.forkInstance.bulkWrite(conflictWriteFork, 'replication-up-write-conflict'); - /** - * Errors in the forkWriteResult must not be handled - * because they have been caused by a write to the forkInstance - * in between which will anyway trigger a new upstream cycle - * that will then resolved the conflict again. - */ - var useMetaWrites = []; - forkWriteResult.success.forEach(docData => { - var docId = docData[state.primaryPath]; - useMetaWrites.push(conflictWriteMeta[docId]); - }); - if (useMetaWrites.length > 0) { - await state.input.metaInstance.bulkWrite((0, _helper.stripAttachmentsDataFromMetaWriteRows)(state, useMetaWrites), 'replication-up-write-conflict-meta'); - } - // TODO what to do with conflicts while writing to the metaInstance? - } - } - - /** - * For better performance we do not await checkpoint writes, - * but to ensure order on parallel checkpoint writes, - * we have to use a queue. - */ - (0, _checkpoint.setCheckpoint)(state, 'up', useCheckpoint); - return hadConflictWrites; - }).catch(unhandledError => { - state.events.error.next(unhandledError); - return false; - }); - return persistenceQueue; - } -} -//# sourceMappingURL=upstream.js.map \ No newline at end of file diff --git a/dist/cjs/replication-protocol/upstream.js.map b/dist/cjs/replication-protocol/upstream.js.map deleted file mode 100644 index e2bca641e02..00000000000 --- a/dist/cjs/replication-protocol/upstream.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"upstream.js","names":["_rxjs","require","_rxStorageHelper","_index","_checkpoint","_conflicts","_helper","_metaInstance","_index2","startReplicationUpstream","state","input","initialCheckpoint","upstream","checkpointDoc","getLastCheckpointDoc","setCheckpoint","replicationHandler","streamQueue","up","then","upstreamInitialSync","processTasks","timer","initialSyncStartTime","openTasks","persistenceQueue","PROMISE_RESOLVE_FALSE","nonPersistedFromMaster","docs","sub","forkInstance","changeStream","subscribe","eventBulk","context","downstreamBulkWriteFlag","stats","forkChangeStreamEmit","push","task","time","events","active","getValue","next","waitBeforePersist","firstValueFrom","canceled","pipe","filter","unsubscribe","checkpointQueue","lastCheckpoint","promises","Set","_loop","size","Promise","race","Array","from","upResult","getChangedDocumentsSince","pushBatchSize","documents","length","stackCheckpoints","checkpoint","promise","persistToMaster","ensureNotFalsy","add","catch","delete","resolvedPromises","all","hadConflicts","find","r","firstSyncDone","taskWithTime","shift","appendToArray","map","documentData","forEach","docData","docId","primaryPath","upDocsById","useCheckpoint","docIds","Object","keys","assumedMasterState","getAssumedMasterState","writeRowsToMaster","writeRowsToMasterIds","writeRowsToMeta","forkStateById","fullDocData","writeDocToDocState","hasAttachments","keepMeta","assumedMasterDoc","metaDocument","isResolvedConflict","_rev","conflictHandler","realMasterState","newDocumentState","isEqual","getHeightOfRevision","_meta","identifier","undefined","getMetaWriteRow","writeRowsArray","values","conflictIds","conflictsById","writeBatches","batchArray","writeBatch","row","fillWriteDataForAttachmentsChange","clone","masterWriteResult","masterWrite","conflictDoc","id","useWriteRowsToMeta","has","processed","metaInstance","bulkWrite","stripAttachmentsDataFromMetaWriteRows","hadConflictWrites","persistToMasterHadConflicts","conflictWriteFork","conflictWriteMeta","entries","writeToMasterRow","resolveConflictError","resolved","resolvedConflicts","output","previous","document","resolvedDoc","persistToMasterConflictWrites","forkWriteResult","useMetaWrites","success","unhandledError","error"],"sources":["../../../src/replication-protocol/upstream.ts"],"sourcesContent":["import { firstValueFrom, filter } from 'rxjs';\nimport {\n getChangedDocumentsSince,\n stackCheckpoints\n} from '../rx-storage-helper.ts';\nimport type {\n BulkWriteRow,\n BulkWriteRowById,\n ById,\n EventBulk,\n RxDocumentData,\n RxReplicationWriteToMasterRow,\n RxStorageChangeEvent,\n RxStorageInstanceReplicationState,\n RxStorageReplicationMeta,\n WithDeleted\n} from '../types/index.d.ts';\nimport {\n appendToArray,\n batchArray,\n clone,\n ensureNotFalsy,\n getHeightOfRevision,\n PROMISE_RESOLVE_FALSE\n} from '../plugins/utils/index.ts';\nimport {\n getLastCheckpointDoc,\n setCheckpoint\n} from './checkpoint.ts';\nimport {\n resolveConflictError\n} from './conflicts.ts';\nimport {\n stripAttachmentsDataFromMetaWriteRows,\n writeDocToDocState\n} from './helper.ts';\nimport {\n getAssumedMasterState,\n getMetaWriteRow\n} from './meta-instance.ts';\nimport { fillWriteDataForAttachmentsChange } from '../plugins/attachments/index.ts';\n\n/**\n * Writes all document changes from the fork to the master.\n * The upstream runs on two modes:\n * - For initial replication, a checkpoint-iteration is used\n * - For ongoing local writes, we just subscribe to the changeStream of the fork.\n * In contrast to the master, the fork can be assumed to never loose connection,\n * so we do not have to prepare for missed out events.\n */\nexport async function startReplicationUpstream(\n state: RxStorageInstanceReplicationState\n) {\n if (\n state.input.initialCheckpoint &&\n state.input.initialCheckpoint.upstream\n ) {\n const checkpointDoc = await getLastCheckpointDoc(state, 'up');\n if (!checkpointDoc) {\n await setCheckpoint(\n state,\n 'up',\n state.input.initialCheckpoint.upstream\n );\n }\n }\n\n const replicationHandler = state.input.replicationHandler;\n state.streamQueue.up = state.streamQueue.up.then(() => {\n return upstreamInitialSync().then(() => {\n processTasks();\n });\n });\n\n // used to detect which tasks etc can in it at which order.\n let timer = 0;\n let initialSyncStartTime = -1;\n\n type Task = EventBulk, any>;\n type TaskWithTime = {\n task: Task;\n time: number;\n };\n const openTasks: TaskWithTime[] = [];\n let persistenceQueue: Promise = PROMISE_RESOLVE_FALSE;\n const nonPersistedFromMaster: {\n checkpoint?: CheckpointType;\n docs: ById>;\n } = {\n docs: {}\n };\n\n const sub = state.input.forkInstance.changeStream()\n .subscribe(async (eventBulk) => {\n // ignore writes that came from the downstream\n if (eventBulk.context === await state.downstreamBulkWriteFlag) {\n return;\n }\n\n state.stats.up.forkChangeStreamEmit = state.stats.up.forkChangeStreamEmit + 1;\n openTasks.push({\n task: eventBulk,\n time: timer++\n });\n if (!state.events.active.up.getValue()) {\n state.events.active.up.next(true);\n }\n if (state.input.waitBeforePersist) {\n return state.input.waitBeforePersist()\n .then(() => processTasks());\n } else {\n return processTasks();\n }\n });\n firstValueFrom(\n state.events.canceled.pipe(\n filter(canceled => !!canceled)\n )\n ).then(() => sub.unsubscribe());\n\n\n async function upstreamInitialSync() {\n state.stats.up.upstreamInitialSync = state.stats.up.upstreamInitialSync + 1;\n if (state.events.canceled.getValue()) {\n return;\n }\n\n state.checkpointQueue = state.checkpointQueue.then(() => getLastCheckpointDoc(state, 'up'));\n let lastCheckpoint: CheckpointType = await state.checkpointQueue;\n\n const promises: Set> = new Set();\n\n while (!state.events.canceled.getValue()) {\n initialSyncStartTime = timer++;\n\n /**\n * Throttle the calls to\n * forkInstance.getChangedDocumentsSince() so that\n * if the pushing to the remote is slower compared to the\n * pulling out of forkInstance, we do not block the UI too much\n * and have a big memory spike with all forkInstance documents.\n */\n if (promises.size > 3) {\n await Promise.race(Array.from(promises));\n }\n\n const upResult = await getChangedDocumentsSince(\n state.input.forkInstance,\n state.input.pushBatchSize,\n lastCheckpoint\n );\n if (upResult.documents.length === 0) {\n break;\n }\n\n lastCheckpoint = stackCheckpoints([lastCheckpoint, upResult.checkpoint]);\n\n const promise = persistToMaster(\n upResult.documents,\n ensureNotFalsy(lastCheckpoint)\n );\n promises.add(promise);\n promise.catch().then(() => promises.delete(promise));\n }\n\n /**\n * If we had conflicts during the initial sync,\n * it means that we likely have new writes to the fork\n * and so we have to run the initial sync again to upstream these new writes.\n */\n const resolvedPromises = await Promise.all(promises);\n const hadConflicts = resolvedPromises.find(r => !!r);\n if (hadConflicts) {\n await upstreamInitialSync();\n } else if (\n !state.firstSyncDone.up.getValue() &&\n !state.events.canceled.getValue()\n ) {\n state.firstSyncDone.up.next(true);\n }\n }\n\n\n /**\n * Takes all open tasks an processes them at once.\n */\n function processTasks() {\n if (\n state.events.canceled.getValue() ||\n openTasks.length === 0\n ) {\n state.events.active.up.next(false);\n return;\n }\n state.stats.up.processTasks = state.stats.up.processTasks + 1;\n state.events.active.up.next(true);\n state.streamQueue.up = state.streamQueue.up.then(() => {\n /**\n * Merge/filter all open tasks\n */\n const docs: RxDocumentData[] = [];\n let checkpoint: CheckpointType = {} as any;\n while (openTasks.length > 0) {\n const taskWithTime = ensureNotFalsy(openTasks.shift());\n /**\n * If the task came in before the last time the initial sync fetching\n * has run, we can ignore the task because the initial sync already processed\n * these documents.\n */\n if (taskWithTime.time < initialSyncStartTime) {\n continue;\n }\n appendToArray(\n docs,\n taskWithTime.task.events.map(r => {\n return r.documentData as any;\n })\n );\n checkpoint = stackCheckpoints([checkpoint, taskWithTime.task.checkpoint]);\n }\n\n const promise = docs.length === 0 ? PROMISE_RESOLVE_FALSE : persistToMaster(\n docs,\n checkpoint\n );\n return promise.then(() => {\n if (openTasks.length === 0) {\n state.events.active.up.next(false);\n } else {\n processTasks();\n }\n });\n });\n }\n\n /**\n * Returns true if had conflicts,\n * false if not.\n */\n function persistToMaster(\n docs: RxDocumentData[],\n checkpoint: CheckpointType\n ): Promise {\n state.stats.up.persistToMaster = state.stats.up.persistToMaster + 1;\n\n /**\n * Add the new docs to the non-persistent list\n */\n docs.forEach(docData => {\n const docId: string = (docData as any)[state.primaryPath];\n nonPersistedFromMaster.docs[docId] = docData;\n });\n nonPersistedFromMaster.checkpoint = checkpoint;\n\n persistenceQueue = persistenceQueue.then(async () => {\n if (state.events.canceled.getValue()) {\n return false;\n }\n\n const upDocsById: ById> = nonPersistedFromMaster.docs;\n nonPersistedFromMaster.docs = {};\n const useCheckpoint = nonPersistedFromMaster.checkpoint;\n const docIds = Object.keys(upDocsById);\n if (docIds.length === 0) {\n return false;\n }\n\n const assumedMasterState = await getAssumedMasterState(\n state,\n docIds\n );\n\n const writeRowsToMaster: ById> = {};\n const writeRowsToMasterIds: string[] = [];\n const writeRowsToMeta: BulkWriteRowById> = {};\n const forkStateById: ById> = {};\n\n await Promise.all(\n docIds.map(async (docId) => {\n const fullDocData: RxDocumentData = upDocsById[docId];\n forkStateById[docId] = fullDocData;\n const docData: WithDeleted = writeDocToDocState(fullDocData, state.hasAttachments, !!state.input.keepMeta);\n const assumedMasterDoc = assumedMasterState[docId];\n\n /**\n * If the master state is equal to the\n * fork state, we can assume that the document state is already\n * replicated.\n */\n if (\n (\n assumedMasterDoc &&\n // if the isResolvedConflict is correct, we do not have to compare the documents.\n assumedMasterDoc.metaDocument.isResolvedConflict !== fullDocData._rev\n &&\n (await state.input.conflictHandler({\n realMasterState: assumedMasterDoc.docData,\n newDocumentState: docData\n }, 'upstream-check-if-equal')).isEqual\n )\n ||\n /**\n * If the master works with _rev fields,\n * we use that to check if our current doc state\n * is different from the assumedMasterDoc.\n */\n (\n assumedMasterDoc &&\n (assumedMasterDoc.docData as any)._rev &&\n getHeightOfRevision(fullDocData._rev) === fullDocData._meta[state.input.identifier]\n )\n ) {\n return;\n }\n\n writeRowsToMasterIds.push(docId);\n\n writeRowsToMaster[docId] = {\n assumedMasterState: assumedMasterDoc ? assumedMasterDoc.docData : undefined,\n newDocumentState: docData\n };\n writeRowsToMeta[docId] = await getMetaWriteRow(\n state,\n docData,\n assumedMasterDoc ? assumedMasterDoc.metaDocument : undefined\n );\n })\n );\n\n if (writeRowsToMasterIds.length === 0) {\n return false;\n }\n\n\n const writeRowsArray = Object.values(writeRowsToMaster);\n const conflictIds: Set = new Set();\n const conflictsById: ById> = {};\n\n /**\n * To always respect the push.batchSize,\n * we have to split the write rows into batches\n * to ensure that replicationHandler.masterWrite() is never\n * called with more documents than what the batchSize limits.\n */\n const writeBatches = batchArray(writeRowsArray, state.input.pushBatchSize);\n await Promise.all(\n writeBatches.map(async (writeBatch) => {\n\n // enhance docs with attachments\n if (state.hasAttachments) {\n await Promise.all(\n writeBatch.map(async (row) => {\n row.newDocumentState = await fillWriteDataForAttachmentsChange(\n state.primaryPath,\n state.input.forkInstance,\n clone(row.newDocumentState),\n row.assumedMasterState\n );\n })\n );\n }\n const masterWriteResult = await replicationHandler.masterWrite(writeBatch);\n masterWriteResult.forEach(conflictDoc => {\n const id = (conflictDoc as any)[state.primaryPath];\n conflictIds.add(id);\n conflictsById[id] = conflictDoc;\n });\n })\n );\n\n const useWriteRowsToMeta: BulkWriteRow>[] = [];\n\n writeRowsToMasterIds.forEach(docId => {\n if (!conflictIds.has(docId)) {\n state.events.processed.up.next(writeRowsToMaster[docId]);\n useWriteRowsToMeta.push(writeRowsToMeta[docId]);\n }\n });\n\n if (state.events.canceled.getValue()) {\n return false;\n }\n\n if (useWriteRowsToMeta.length > 0) {\n await state.input.metaInstance.bulkWrite(\n stripAttachmentsDataFromMetaWriteRows(state, useWriteRowsToMeta),\n 'replication-up-write-meta'\n );\n // TODO what happens when we have conflicts here?\n }\n\n /**\n * Resolve conflicts by writing a new document\n * state to the fork instance and the 'real' master state\n * to the meta instance.\n * Non-409 errors will be detected by resolveConflictError()\n */\n let hadConflictWrites = false;\n if (conflictIds.size > 0) {\n state.stats.up.persistToMasterHadConflicts = state.stats.up.persistToMasterHadConflicts + 1;\n const conflictWriteFork: BulkWriteRow[] = [];\n const conflictWriteMeta: BulkWriteRowById> = {};\n await Promise.all(\n Object\n .entries(conflictsById)\n .map(([docId, realMasterState]) => {\n const writeToMasterRow = writeRowsToMaster[docId];\n const input = {\n newDocumentState: writeToMasterRow.newDocumentState,\n assumedMasterState: writeToMasterRow.assumedMasterState,\n realMasterState\n };\n return resolveConflictError(\n state,\n input,\n forkStateById[docId]\n ).then(async (resolved) => {\n if (resolved) {\n state.events.resolvedConflicts.next({\n input,\n output: resolved.output\n });\n conflictWriteFork.push({\n previous: forkStateById[docId],\n document: resolved.resolvedDoc\n });\n const assumedMasterDoc = assumedMasterState[docId];\n conflictWriteMeta[docId] = await getMetaWriteRow(\n state,\n ensureNotFalsy(realMasterState),\n assumedMasterDoc ? assumedMasterDoc.metaDocument : undefined,\n resolved.resolvedDoc._rev\n );\n }\n });\n })\n );\n\n if (conflictWriteFork.length > 0) {\n hadConflictWrites = true;\n\n state.stats.up.persistToMasterConflictWrites = state.stats.up.persistToMasterConflictWrites + 1;\n const forkWriteResult = await state.input.forkInstance.bulkWrite(\n conflictWriteFork,\n 'replication-up-write-conflict'\n );\n /**\n * Errors in the forkWriteResult must not be handled\n * because they have been caused by a write to the forkInstance\n * in between which will anyway trigger a new upstream cycle\n * that will then resolved the conflict again.\n */\n const useMetaWrites: BulkWriteRow>[] = [];\n forkWriteResult.success\n .forEach(docData => {\n const docId = (docData as any)[state.primaryPath];\n useMetaWrites.push(\n conflictWriteMeta[docId]\n );\n });\n if (useMetaWrites.length > 0) {\n await state.input.metaInstance.bulkWrite(\n stripAttachmentsDataFromMetaWriteRows(state, useMetaWrites),\n 'replication-up-write-conflict-meta'\n );\n }\n // TODO what to do with conflicts while writing to the metaInstance?\n }\n }\n\n /**\n * For better performance we do not await checkpoint writes,\n * but to ensure order on parallel checkpoint writes,\n * we have to use a queue.\n */\n setCheckpoint(\n state,\n 'up',\n useCheckpoint\n );\n\n return hadConflictWrites;\n }).catch(unhandledError => {\n state.events.error.next(unhandledError);\n return false;\n });\n\n return persistenceQueue;\n }\n}\n"],"mappings":";;;;;;AAAA,IAAAA,KAAA,GAAAC,OAAA;AACA,IAAAC,gBAAA,GAAAD,OAAA;AAgBA,IAAAE,MAAA,GAAAF,OAAA;AAQA,IAAAG,WAAA,GAAAH,OAAA;AAIA,IAAAI,UAAA,GAAAJ,OAAA;AAGA,IAAAK,OAAA,GAAAL,OAAA;AAIA,IAAAM,aAAA,GAAAN,OAAA;AAIA,IAAAO,OAAA,GAAAP,OAAA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACO,eAAeQ,wBAAwBA,CAC1CC,KAAmD,EACrD;EACE,IACIA,KAAK,CAACC,KAAK,CAACC,iBAAiB,IAC7BF,KAAK,CAACC,KAAK,CAACC,iBAAiB,CAACC,QAAQ,EACxC;IACE,IAAMC,aAAa,GAAG,MAAM,IAAAC,gCAAoB,EAACL,KAAK,EAAE,IAAI,CAAC;IAC7D,IAAI,CAACI,aAAa,EAAE;MAChB,MAAM,IAAAE,yBAAa,EACfN,KAAK,EACL,IAAI,EACJA,KAAK,CAACC,KAAK,CAACC,iBAAiB,CAACC,QAClC,CAAC;IACL;EACJ;EAEA,IAAMI,kBAAkB,GAAGP,KAAK,CAACC,KAAK,CAACM,kBAAkB;EACzDP,KAAK,CAACQ,WAAW,CAACC,EAAE,GAAGT,KAAK,CAACQ,WAAW,CAACC,EAAE,CAACC,IAAI,CAAC,MAAM;IACnD,OAAOC,mBAAmB,CAAC,CAAC,CAACD,IAAI,CAAC,MAAM;MACpCE,YAAY,CAAC,CAAC;IAClB,CAAC,CAAC;EACN,CAAC,CAAC;;EAEF;EACA,IAAIC,KAAK,GAAG,CAAC;EACb,IAAIC,oBAAoB,GAAG,CAAC,CAAC;EAO7B,IAAMC,SAAyB,GAAG,EAAE;EACpC,IAAIC,gBAAkC,GAAGC,4BAAqB;EAC9D,IAAMC,sBAGL,GAAG;IACAC,IAAI,EAAE,CAAC;EACX,CAAC;EAED,IAAMC,GAAG,GAAGpB,KAAK,CAACC,KAAK,CAACoB,YAAY,CAACC,YAAY,CAAC,CAAC,CAC9CC,SAAS,CAAC,MAAOC,SAAS,IAAK;IAC5B;IACA,IAAIA,SAAS,CAACC,OAAO,MAAK,MAAMzB,KAAK,CAAC0B,uBAAuB,GAAE;MAC3D;IACJ;IAEA1B,KAAK,CAAC2B,KAAK,CAAClB,EAAE,CAACmB,oBAAoB,GAAG5B,KAAK,CAAC2B,KAAK,CAAClB,EAAE,CAACmB,oBAAoB,GAAG,CAAC;IAC7Eb,SAAS,CAACc,IAAI,CAAC;MACXC,IAAI,EAAEN,SAAS;MACfO,IAAI,EAAElB,KAAK;IACf,CAAC,CAAC;IACF,IAAI,CAACb,KAAK,CAACgC,MAAM,CAACC,MAAM,CAACxB,EAAE,CAACyB,QAAQ,CAAC,CAAC,EAAE;MACpClC,KAAK,CAACgC,MAAM,CAACC,MAAM,CAACxB,EAAE,CAAC0B,IAAI,CAAC,IAAI,CAAC;IACrC;IACA,IAAInC,KAAK,CAACC,KAAK,CAACmC,iBAAiB,EAAE;MAC/B,OAAOpC,KAAK,CAACC,KAAK,CAACmC,iBAAiB,CAAC,CAAC,CACjC1B,IAAI,CAAC,MAAME,YAAY,CAAC,CAAC,CAAC;IACnC,CAAC,MAAM;MACH,OAAOA,YAAY,CAAC,CAAC;IACzB;EACJ,CAAC,CAAC;EACN,IAAAyB,oBAAc,EACVrC,KAAK,CAACgC,MAAM,CAACM,QAAQ,CAACC,IAAI,CACtB,IAAAC,YAAM,EAACF,QAAQ,IAAI,CAAC,CAACA,QAAQ,CACjC,CACJ,CAAC,CAAC5B,IAAI,CAAC,MAAMU,GAAG,CAACqB,WAAW,CAAC,CAAC,CAAC;EAG/B,eAAe9B,mBAAmBA,CAAA,EAAG;IACjCX,KAAK,CAAC2B,KAAK,CAAClB,EAAE,CAACE,mBAAmB,GAAGX,KAAK,CAAC2B,KAAK,CAAClB,EAAE,CAACE,mBAAmB,GAAG,CAAC;IAC3E,IAAIX,KAAK,CAACgC,MAAM,CAACM,QAAQ,CAACJ,QAAQ,CAAC,CAAC,EAAE;MAClC;IACJ;IAEAlC,KAAK,CAAC0C,eAAe,GAAG1C,KAAK,CAAC0C,eAAe,CAAChC,IAAI,CAAC,MAAM,IAAAL,gCAAoB,EAACL,KAAK,EAAE,IAAI,CAAC,CAAC;IAC3F,IAAI2C,cAA8B,GAAG,MAAM3C,KAAK,CAAC0C,eAAe;IAEhE,IAAME,QAA2B,GAAG,IAAIC,GAAG,CAAC,CAAC;IAAC,IAAAC,KAAA,kBAAAA,CAAA,EAEJ;MACtChC,oBAAoB,GAAGD,KAAK,EAAE;;MAE9B;AACZ;AACA;AACA;AACA;AACA;AACA;MACY,IAAI+B,QAAQ,CAACG,IAAI,GAAG,CAAC,EAAE;QACnB,MAAMC,OAAO,CAACC,IAAI,CAACC,KAAK,CAACC,IAAI,CAACP,QAAQ,CAAC,CAAC;MAC5C;MAEA,IAAMQ,QAAQ,GAAG,MAAM,IAAAC,yCAAwB,EAC3CrD,KAAK,CAACC,KAAK,CAACoB,YAAY,EACxBrB,KAAK,CAACC,KAAK,CAACqD,aAAa,EACzBX,cACJ,CAAC;MACD,IAAIS,QAAQ,CAACG,SAAS,CAACC,MAAM,KAAK,CAAC,EAAE;QAAA;MAErC;MAEAb,cAAc,GAAG,IAAAc,iCAAgB,EAAC,CAACd,cAAc,EAAES,QAAQ,CAACM,UAAU,CAAC,CAAC;MAExE,IAAMC,OAAO,GAAGC,eAAe,CAC3BR,QAAQ,CAACG,SAAS,EAClB,IAAAM,qBAAc,EAAClB,cAAc,CACjC,CAAC;MACDC,QAAQ,CAACkB,GAAG,CAACH,OAAO,CAAC;MACrBA,OAAO,CAACI,KAAK,CAAC,CAAC,CAACrD,IAAI,CAAC,MAAMkC,QAAQ,CAACoB,MAAM,CAACL,OAAO,CAAC,CAAC;IACxD,CAAC;IA/BD,OAAO,CAAC3D,KAAK,CAACgC,MAAM,CAACM,QAAQ,CAACJ,QAAQ,CAAC,CAAC;MAAA,UAAAY,KAAA,IAoBhC;IAAM;;IAad;AACR;AACA;AACA;AACA;IACQ,IAAMmB,gBAAgB,GAAG,MAAMjB,OAAO,CAACkB,GAAG,CAACtB,QAAQ,CAAC;IACpD,IAAMuB,YAAY,GAAGF,gBAAgB,CAACG,IAAI,CAACC,CAAC,IAAI,CAAC,CAACA,CAAC,CAAC;IACpD,IAAIF,YAAY,EAAE;MACd,MAAMxD,mBAAmB,CAAC,CAAC;IAC/B,CAAC,MAAM,IACH,CAACX,KAAK,CAACsE,aAAa,CAAC7D,EAAE,CAACyB,QAAQ,CAAC,CAAC,IAClC,CAAClC,KAAK,CAACgC,MAAM,CAACM,QAAQ,CAACJ,QAAQ,CAAC,CAAC,EACnC;MACElC,KAAK,CAACsE,aAAa,CAAC7D,EAAE,CAAC0B,IAAI,CAAC,IAAI,CAAC;IACrC;EACJ;;EAGA;AACJ;AACA;EACI,SAASvB,YAAYA,CAAA,EAAG;IACpB,IACIZ,KAAK,CAACgC,MAAM,CAACM,QAAQ,CAACJ,QAAQ,CAAC,CAAC,IAChCnB,SAAS,CAACyC,MAAM,KAAK,CAAC,EACxB;MACExD,KAAK,CAACgC,MAAM,CAACC,MAAM,CAACxB,EAAE,CAAC0B,IAAI,CAAC,KAAK,CAAC;MAClC;IACJ;IACAnC,KAAK,CAAC2B,KAAK,CAAClB,EAAE,CAACG,YAAY,GAAGZ,KAAK,CAAC2B,KAAK,CAAClB,EAAE,CAACG,YAAY,GAAG,CAAC;IAC7DZ,KAAK,CAACgC,MAAM,CAACC,MAAM,CAACxB,EAAE,CAAC0B,IAAI,CAAC,IAAI,CAAC;IACjCnC,KAAK,CAACQ,WAAW,CAACC,EAAE,GAAGT,KAAK,CAACQ,WAAW,CAACC,EAAE,CAACC,IAAI,CAAC,MAAM;MACnD;AACZ;AACA;MACY,IAAMS,IAAiC,GAAG,EAAE;MAC5C,IAAIuC,UAA0B,GAAG,CAAC,CAAQ;MAC1C,OAAO3C,SAAS,CAACyC,MAAM,GAAG,CAAC,EAAE;QACzB,IAAMe,YAAY,GAAG,IAAAV,qBAAc,EAAC9C,SAAS,CAACyD,KAAK,CAAC,CAAC,CAAC;QACtD;AAChB;AACA;AACA;AACA;QACgB,IAAID,YAAY,CAACxC,IAAI,GAAGjB,oBAAoB,EAAE;UAC1C;QACJ;QACA,IAAA2D,oBAAa,EACTtD,IAAI,EACJoD,YAAY,CAACzC,IAAI,CAACE,MAAM,CAAC0C,GAAG,CAACL,CAAC,IAAI;UAC9B,OAAOA,CAAC,CAACM,YAAY;QACzB,CAAC,CACL,CAAC;QACDjB,UAAU,GAAG,IAAAD,iCAAgB,EAAC,CAACC,UAAU,EAAEa,YAAY,CAACzC,IAAI,CAAC4B,UAAU,CAAC,CAAC;MAC7E;MAEA,IAAMC,OAAO,GAAGxC,IAAI,CAACqC,MAAM,KAAK,CAAC,GAAGvC,4BAAqB,GAAG2C,eAAe,CACvEzC,IAAI,EACJuC,UACJ,CAAC;MACD,OAAOC,OAAO,CAACjD,IAAI,CAAC,MAAM;QACtB,IAAIK,SAAS,CAACyC,MAAM,KAAK,CAAC,EAAE;UACxBxD,KAAK,CAACgC,MAAM,CAACC,MAAM,CAACxB,EAAE,CAAC0B,IAAI,CAAC,KAAK,CAAC;QACtC,CAAC,MAAM;UACHvB,YAAY,CAAC,CAAC;QAClB;MACJ,CAAC,CAAC;IACN,CAAC,CAAC;EACN;;EAEA;AACJ;AACA;AACA;EACI,SAASgD,eAAeA,CACpBzC,IAAiC,EACjCuC,UAA0B,EACV;IAChB1D,KAAK,CAAC2B,KAAK,CAAClB,EAAE,CAACmD,eAAe,GAAG5D,KAAK,CAAC2B,KAAK,CAAClB,EAAE,CAACmD,eAAe,GAAG,CAAC;;IAEnE;AACR;AACA;IACQzC,IAAI,CAACyD,OAAO,CAACC,OAAO,IAAI;MACpB,IAAMC,KAAa,GAAID,OAAO,CAAS7E,KAAK,CAAC+E,WAAW,CAAC;MACzD7D,sBAAsB,CAACC,IAAI,CAAC2D,KAAK,CAAC,GAAGD,OAAO;IAChD,CAAC,CAAC;IACF3D,sBAAsB,CAACwC,UAAU,GAAGA,UAAU;IAE9C1C,gBAAgB,GAAGA,gBAAgB,CAACN,IAAI,CAAC,YAAY;MACjD,IAAIV,KAAK,CAACgC,MAAM,CAACM,QAAQ,CAACJ,QAAQ,CAAC,CAAC,EAAE;QAClC,OAAO,KAAK;MAChB;MAEA,IAAM8C,UAA2C,GAAG9D,sBAAsB,CAACC,IAAI;MAC/ED,sBAAsB,CAACC,IAAI,GAAG,CAAC,CAAC;MAChC,IAAM8D,aAAa,GAAG/D,sBAAsB,CAACwC,UAAU;MACvD,IAAMwB,MAAM,GAAGC,MAAM,CAACC,IAAI,CAACJ,UAAU,CAAC;MACtC,IAAIE,MAAM,CAAC1B,MAAM,KAAK,CAAC,EAAE;QACrB,OAAO,KAAK;MAChB;MAEA,IAAM6B,kBAAkB,GAAG,MAAM,IAAAC,mCAAqB,EAClDtF,KAAK,EACLkF,MACJ,CAAC;MAED,IAAMK,iBAAiE,GAAG,CAAC,CAAC;MAC5E,IAAMC,oBAA8B,GAAG,EAAE;MACzC,IAAMC,eAA2E,GAAG,CAAC,CAAC;MACtF,IAAMC,aAA8C,GAAG,CAAC,CAAC;MAEzD,MAAM1C,OAAO,CAACkB,GAAG,CACbgB,MAAM,CAACR,GAAG,CAAC,MAAOI,KAAK,IAAK;QACxB,IAAMa,WAAsC,GAAGX,UAAU,CAACF,KAAK,CAAC;QAChEY,aAAa,CAACZ,KAAK,CAAC,GAAGa,WAAW;QAClC,IAAMd,OAA+B,GAAG,IAAAe,0BAAkB,EAACD,WAAW,EAAE3F,KAAK,CAAC6F,cAAc,EAAE,CAAC,CAAC7F,KAAK,CAACC,KAAK,CAAC6F,QAAQ,CAAC;QACrH,IAAMC,gBAAgB,GAAGV,kBAAkB,CAACP,KAAK,CAAC;;QAElD;AACpB;AACA;AACA;AACA;QACoB,IAEQiB,gBAAgB;QAChB;QACAA,gBAAgB,CAACC,YAAY,CAACC,kBAAkB,KAAKN,WAAW,CAACO,IAAI,IAErE,CAAC,MAAMlG,KAAK,CAACC,KAAK,CAACkG,eAAe,CAAC;UAC/BC,eAAe,EAAEL,gBAAgB,CAAClB,OAAO;UACzCwB,gBAAgB,EAAExB;QACtB,CAAC,EAAE,yBAAyB,CAAC,EAAEyB,OAAO;QAG1C;AACxB;AACA;AACA;AACA;;QAE4BP,gBAAgB,IACfA,gBAAgB,CAAClB,OAAO,CAASqB,IAAI,IACtC,IAAAK,0BAAmB,EAACZ,WAAW,CAACO,IAAI,CAAC,KAAKP,WAAW,CAACa,KAAK,CAACxG,KAAK,CAACC,KAAK,CAACwG,UAAU,CAAC,CACtF,EACH;UACE;QACJ;QAEAjB,oBAAoB,CAAC3D,IAAI,CAACiD,KAAK,CAAC;QAEhCS,iBAAiB,CAACT,KAAK,CAAC,GAAG;UACvBO,kBAAkB,EAAEU,gBAAgB,GAAGA,gBAAgB,CAAClB,OAAO,GAAG6B,SAAS;UAC3EL,gBAAgB,EAAExB;QACtB,CAAC;QACDY,eAAe,CAACX,KAAK,CAAC,GAAG,MAAM,IAAA6B,6BAAe,EAC1C3G,KAAK,EACL6E,OAAO,EACPkB,gBAAgB,GAAGA,gBAAgB,CAACC,YAAY,GAAGU,SACvD,CAAC;MACL,CAAC,CACL,CAAC;MAED,IAAIlB,oBAAoB,CAAChC,MAAM,KAAK,CAAC,EAAE;QACnC,OAAO,KAAK;MAChB;MAGA,IAAMoD,cAAc,GAAGzB,MAAM,CAAC0B,MAAM,CAACtB,iBAAiB,CAAC;MACvD,IAAMuB,WAAwB,GAAG,IAAIjE,GAAG,CAAC,CAAC;MAC1C,IAAMkE,aAA2C,GAAG,CAAC,CAAC;;MAEtD;AACZ;AACA;AACA;AACA;AACA;MACY,IAAMC,YAAY,GAAG,IAAAC,iBAAU,EAACL,cAAc,EAAE5G,KAAK,CAACC,KAAK,CAACqD,aAAa,CAAC;MAC1E,MAAMN,OAAO,CAACkB,GAAG,CACb8C,YAAY,CAACtC,GAAG,CAAC,MAAOwC,UAAU,IAAK;QAEnC;QACA,IAAIlH,KAAK,CAAC6F,cAAc,EAAE;UACtB,MAAM7C,OAAO,CAACkB,GAAG,CACbgD,UAAU,CAACxC,GAAG,CAAC,MAAOyC,GAAG,IAAK;YAC1BA,GAAG,CAACd,gBAAgB,GAAG,MAAM,IAAAe,yCAAiC,EAC1DpH,KAAK,CAAC+E,WAAW,EACjB/E,KAAK,CAACC,KAAK,CAACoB,YAAY,EACxB,IAAAgG,YAAK,EAACF,GAAG,CAACd,gBAAgB,CAAC,EAC3Bc,GAAG,CAAC9B,kBACR,CAAC;UACL,CAAC,CACL,CAAC;QACL;QACA,IAAMiC,iBAAiB,GAAG,MAAM/G,kBAAkB,CAACgH,WAAW,CAACL,UAAU,CAAC;QAC1EI,iBAAiB,CAAC1C,OAAO,CAAC4C,WAAW,IAAI;UACrC,IAAMC,EAAE,GAAID,WAAW,CAASxH,KAAK,CAAC+E,WAAW,CAAC;UAClD+B,WAAW,CAAChD,GAAG,CAAC2D,EAAE,CAAC;UACnBV,aAAa,CAACU,EAAE,CAAC,GAAGD,WAAW;QACnC,CAAC,CAAC;MACN,CAAC,CACL,CAAC;MAED,IAAME,kBAA4E,GAAG,EAAE;MAEvFlC,oBAAoB,CAACZ,OAAO,CAACE,KAAK,IAAI;QAClC,IAAI,CAACgC,WAAW,CAACa,GAAG,CAAC7C,KAAK,CAAC,EAAE;UACzB9E,KAAK,CAACgC,MAAM,CAAC4F,SAAS,CAACnH,EAAE,CAAC0B,IAAI,CAACoD,iBAAiB,CAACT,KAAK,CAAC,CAAC;UACxD4C,kBAAkB,CAAC7F,IAAI,CAAC4D,eAAe,CAACX,KAAK,CAAC,CAAC;QACnD;MACJ,CAAC,CAAC;MAEF,IAAI9E,KAAK,CAACgC,MAAM,CAACM,QAAQ,CAACJ,QAAQ,CAAC,CAAC,EAAE;QAClC,OAAO,KAAK;MAChB;MAEA,IAAIwF,kBAAkB,CAAClE,MAAM,GAAG,CAAC,EAAE;QAC/B,MAAMxD,KAAK,CAACC,KAAK,CAAC4H,YAAY,CAACC,SAAS,CACpC,IAAAC,6CAAqC,EAAC/H,KAAK,EAAE0H,kBAAkB,CAAC,EAChE,2BACJ,CAAC;QACD;MACJ;;MAEA;AACZ;AACA;AACA;AACA;AACA;MACY,IAAIM,iBAAiB,GAAG,KAAK;MAC7B,IAAIlB,WAAW,CAAC/D,IAAI,GAAG,CAAC,EAAE;QACtB/C,KAAK,CAAC2B,KAAK,CAAClB,EAAE,CAACwH,2BAA2B,GAAGjI,KAAK,CAAC2B,KAAK,CAAClB,EAAE,CAACwH,2BAA2B,GAAG,CAAC;QAC3F,IAAMC,iBAA4C,GAAG,EAAE;QACvD,IAAMC,iBAA6E,GAAG,CAAC,CAAC;QACxF,MAAMnF,OAAO,CAACkB,GAAG,CACbiB,MAAM,CACDiD,OAAO,CAACrB,aAAa,CAAC,CACtBrC,GAAG,CAAC,CAAC,CAACI,KAAK,EAAEsB,eAAe,CAAC,KAAK;UAC/B,IAAMiC,gBAAgB,GAAG9C,iBAAiB,CAACT,KAAK,CAAC;UACjD,IAAM7E,KAAK,GAAG;YACVoG,gBAAgB,EAAEgC,gBAAgB,CAAChC,gBAAgB;YACnDhB,kBAAkB,EAAEgD,gBAAgB,CAAChD,kBAAkB;YACvDe;UACJ,CAAC;UACD,OAAO,IAAAkC,+BAAoB,EACvBtI,KAAK,EACLC,KAAK,EACLyF,aAAa,CAACZ,KAAK,CACvB,CAAC,CAACpE,IAAI,CAAC,MAAO6H,QAAQ,IAAK;YACvB,IAAIA,QAAQ,EAAE;cACVvI,KAAK,CAACgC,MAAM,CAACwG,iBAAiB,CAACrG,IAAI,CAAC;gBAChClC,KAAK;gBACLwI,MAAM,EAAEF,QAAQ,CAACE;cACrB,CAAC,CAAC;cACFP,iBAAiB,CAACrG,IAAI,CAAC;gBACnB6G,QAAQ,EAAEhD,aAAa,CAACZ,KAAK,CAAC;gBAC9B6D,QAAQ,EAAEJ,QAAQ,CAACK;cACvB,CAAC,CAAC;cACF,IAAM7C,gBAAgB,GAAGV,kBAAkB,CAACP,KAAK,CAAC;cAClDqD,iBAAiB,CAACrD,KAAK,CAAC,GAAG,MAAM,IAAA6B,6BAAe,EAC5C3G,KAAK,EACL,IAAA6D,qBAAc,EAACuC,eAAe,CAAC,EAC/BL,gBAAgB,GAAGA,gBAAgB,CAACC,YAAY,GAAGU,SAAS,EAC5D6B,QAAQ,CAACK,WAAW,CAAC1C,IACzB,CAAC;YACL;UACJ,CAAC,CAAC;QACN,CAAC,CACT,CAAC;QAED,IAAIgC,iBAAiB,CAAC1E,MAAM,GAAG,CAAC,EAAE;UAC9BwE,iBAAiB,GAAG,IAAI;UAExBhI,KAAK,CAAC2B,KAAK,CAAClB,EAAE,CAACoI,6BAA6B,GAAG7I,KAAK,CAAC2B,KAAK,CAAClB,EAAE,CAACoI,6BAA6B,GAAG,CAAC;UAC/F,IAAMC,eAAe,GAAG,MAAM9I,KAAK,CAACC,KAAK,CAACoB,YAAY,CAACyG,SAAS,CAC5DI,iBAAiB,EACjB,+BACJ,CAAC;UACD;AACpB;AACA;AACA;AACA;AACA;UACoB,IAAMa,aAAuE,GAAG,EAAE;UAClFD,eAAe,CAACE,OAAO,CAClBpE,OAAO,CAACC,OAAO,IAAI;YAChB,IAAMC,KAAK,GAAID,OAAO,CAAS7E,KAAK,CAAC+E,WAAW,CAAC;YACjDgE,aAAa,CAAClH,IAAI,CACdsG,iBAAiB,CAACrD,KAAK,CAC3B,CAAC;UACL,CAAC,CAAC;UACN,IAAIiE,aAAa,CAACvF,MAAM,GAAG,CAAC,EAAE;YAC1B,MAAMxD,KAAK,CAACC,KAAK,CAAC4H,YAAY,CAACC,SAAS,CACpC,IAAAC,6CAAqC,EAAC/H,KAAK,EAAE+I,aAAa,CAAC,EAC3D,oCACJ,CAAC;UACL;UACA;QACJ;MACJ;;MAEA;AACZ;AACA;AACA;AACA;MACY,IAAAzI,yBAAa,EACTN,KAAK,EACL,IAAI,EACJiF,aACJ,CAAC;MAED,OAAO+C,iBAAiB;IAC5B,CAAC,CAAC,CAACjE,KAAK,CAACkF,cAAc,IAAI;MACvBjJ,KAAK,CAACgC,MAAM,CAACkH,KAAK,CAAC/G,IAAI,CAAC8G,cAAc,CAAC;MACvC,OAAO,KAAK;IAChB,CAAC,CAAC;IAEF,OAAOjI,gBAAgB;EAC3B;AACJ","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/rx-change-event.js b/dist/cjs/rx-change-event.js deleted file mode 100644 index bf62b8960af..00000000000 --- a/dist/cjs/rx-change-event.js +++ /dev/null @@ -1,89 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.flattenEvents = flattenEvents; -exports.getDocumentDataOfRxChangeEvent = getDocumentDataOfRxChangeEvent; -exports.rxChangeEventToEventReduceChangeEvent = rxChangeEventToEventReduceChangeEvent; -var _overwritable = require("./overwritable.js"); -var _index = require("./plugins/utils/index.js"); -/** - * RxChangeEvents a emitted when something in the database changes - * they can be grabbed by the observables of database, collection and document - */ - -function getDocumentDataOfRxChangeEvent(rxChangeEvent) { - if (rxChangeEvent.documentData) { - return rxChangeEvent.documentData; - } else { - return rxChangeEvent.previousDocumentData; - } -} - -/** - * Might return null which means an - * already deleted document got modified but still is deleted. - * These kind of events are not relevant for the event-reduce algorithm - * and must be filtered out. - */ -function rxChangeEventToEventReduceChangeEvent(rxChangeEvent) { - switch (rxChangeEvent.operation) { - case 'INSERT': - return { - operation: rxChangeEvent.operation, - id: rxChangeEvent.documentId, - doc: rxChangeEvent.documentData, - previous: null - }; - case 'UPDATE': - return { - operation: rxChangeEvent.operation, - id: rxChangeEvent.documentId, - doc: _overwritable.overwritable.deepFreezeWhenDevMode(rxChangeEvent.documentData), - previous: rxChangeEvent.previousDocumentData ? rxChangeEvent.previousDocumentData : 'UNKNOWN' - }; - case 'DELETE': - return { - operation: rxChangeEvent.operation, - id: rxChangeEvent.documentId, - doc: null, - previous: rxChangeEvent.previousDocumentData - }; - } -} - -/** - * Flattens the given events into a single array of events. - * Used mostly in tests. - */ -function flattenEvents(input) { - var output = []; - if (Array.isArray(input)) { - input.forEach(inputItem => { - var add = flattenEvents(inputItem); - (0, _index.appendToArray)(output, add); - }); - } else { - if (input.id && input.events) { - // is bulk - input.events.forEach(ev => output.push(ev)); - } else { - output.push(input); - } - } - var usedIds = new Set(); - var nonDuplicate = []; - function getEventId(ev) { - return [ev.documentId, ev.documentData ? ev.documentData._rev : '', ev.previousDocumentData ? ev.previousDocumentData._rev : ''].join('|'); - } - output.forEach(ev => { - var eventId = getEventId(ev); - if (!usedIds.has(eventId)) { - usedIds.add(eventId); - nonDuplicate.push(ev); - } - }); - return nonDuplicate; -} -//# sourceMappingURL=rx-change-event.js.map \ No newline at end of file diff --git a/dist/cjs/rx-change-event.js.map b/dist/cjs/rx-change-event.js.map deleted file mode 100644 index 6d1887a5cfc..00000000000 --- a/dist/cjs/rx-change-event.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-change-event.js","names":["_overwritable","require","_index","getDocumentDataOfRxChangeEvent","rxChangeEvent","documentData","previousDocumentData","rxChangeEventToEventReduceChangeEvent","operation","id","documentId","doc","previous","overwritable","deepFreezeWhenDevMode","flattenEvents","input","output","Array","isArray","forEach","inputItem","add","appendToArray","events","ev","push","usedIds","Set","nonDuplicate","getEventId","_rev","join","eventId","has"],"sources":["../../src/rx-change-event.ts"],"sourcesContent":["/**\n * RxChangeEvents a emitted when something in the database changes\n * they can be grabbed by the observables of database, collection and document\n */\n\nimport type {\n ChangeEvent as EventReduceChangeEvent,\n} from 'event-reduce-js';\nimport { overwritable } from './overwritable.ts';\n\nimport type {\n EventBulk,\n RxChangeEvent,\n RxDocumentData\n} from './types/index.d.ts';\nimport { appendToArray } from './plugins/utils/index.ts';\n\nexport function getDocumentDataOfRxChangeEvent(\n rxChangeEvent: RxChangeEvent\n): RxDocumentData {\n if ((rxChangeEvent as any).documentData) {\n return (rxChangeEvent as any).documentData;\n } else {\n return (rxChangeEvent as any).previousDocumentData;\n }\n}\n\n/**\n * Might return null which means an\n * already deleted document got modified but still is deleted.\n * These kind of events are not relevant for the event-reduce algorithm\n * and must be filtered out.\n */\nexport function rxChangeEventToEventReduceChangeEvent(\n rxChangeEvent: RxChangeEvent\n): EventReduceChangeEvent | null {\n switch (rxChangeEvent.operation) {\n case 'INSERT':\n return {\n operation: rxChangeEvent.operation,\n id: rxChangeEvent.documentId,\n doc: rxChangeEvent.documentData as any,\n previous: null\n };\n case 'UPDATE':\n return {\n operation: rxChangeEvent.operation,\n id: rxChangeEvent.documentId,\n doc: overwritable.deepFreezeWhenDevMode(rxChangeEvent.documentData) as any,\n previous: rxChangeEvent.previousDocumentData ? rxChangeEvent.previousDocumentData as any : 'UNKNOWN'\n };\n case 'DELETE':\n return {\n operation: rxChangeEvent.operation,\n id: rxChangeEvent.documentId,\n doc: null,\n previous: rxChangeEvent.previousDocumentData as DocType\n };\n }\n}\n\n/**\n * Flattens the given events into a single array of events.\n * Used mostly in tests.\n */\nexport function flattenEvents(\n input: EventBulk | EventBulk[] | EventType | EventType[]\n): EventType[] {\n const output: EventType[] = [];\n if (Array.isArray(input)) {\n input.forEach(inputItem => {\n const add = flattenEvents(inputItem);\n appendToArray(output, add);\n });\n } else {\n if ((input as any).id && (input as any).events) {\n // is bulk\n (input as EventBulk)\n .events\n .forEach(ev => output.push(ev));\n } else {\n output.push(input as any);\n }\n }\n\n const usedIds = new Set();\n const nonDuplicate: EventType[] = [];\n\n function getEventId(ev: any): string {\n return [\n ev.documentId,\n ev.documentData ? ev.documentData._rev : '',\n ev.previousDocumentData ? ev.previousDocumentData._rev : ''\n ].join('|');\n }\n\n output.forEach(ev => {\n const eventId = getEventId(ev);\n if (!usedIds.has(eventId)) {\n usedIds.add(eventId);\n nonDuplicate.push(ev);\n }\n });\n\n return nonDuplicate;\n}\n"],"mappings":";;;;;;;;AAQA,IAAAA,aAAA,GAAAC,OAAA;AAOA,IAAAC,MAAA,GAAAD,OAAA;AAfA;AACA;AACA;AACA;;AAcO,SAASE,8BAA8BA,CAC1CC,aAA+B,EACd;EACjB,IAAKA,aAAa,CAASC,YAAY,EAAE;IACrC,OAAQD,aAAa,CAASC,YAAY;EAC9C,CAAC,MAAM;IACH,OAAQD,aAAa,CAASE,oBAAoB;EACtD;AACJ;;AAEA;AACA;AACA;AACA;AACA;AACA;AACO,SAASC,qCAAqCA,CACjDH,aAAqC,EACC;EACtC,QAAQA,aAAa,CAACI,SAAS;IAC3B,KAAK,QAAQ;MACT,OAAO;QACHA,SAAS,EAAEJ,aAAa,CAACI,SAAS;QAClCC,EAAE,EAAEL,aAAa,CAACM,UAAU;QAC5BC,GAAG,EAAEP,aAAa,CAACC,YAAmB;QACtCO,QAAQ,EAAE;MACd,CAAC;IACL,KAAK,QAAQ;MACT,OAAO;QACHJ,SAAS,EAAEJ,aAAa,CAACI,SAAS;QAClCC,EAAE,EAAEL,aAAa,CAACM,UAAU;QAC5BC,GAAG,EAAEE,0BAAY,CAACC,qBAAqB,CAACV,aAAa,CAACC,YAAY,CAAQ;QAC1EO,QAAQ,EAAER,aAAa,CAACE,oBAAoB,GAAGF,aAAa,CAACE,oBAAoB,GAAU;MAC/F,CAAC;IACL,KAAK,QAAQ;MACT,OAAO;QACHE,SAAS,EAAEJ,aAAa,CAACI,SAAS;QAClCC,EAAE,EAAEL,aAAa,CAACM,UAAU;QAC5BC,GAAG,EAAE,IAAI;QACTC,QAAQ,EAAER,aAAa,CAACE;MAC5B,CAAC;EACT;AACJ;;AAEA;AACA;AACA;AACA;AACO,SAASS,aAAaA,CACzBC,KAAwF,EAC7E;EACX,IAAMC,MAAmB,GAAG,EAAE;EAC9B,IAAIC,KAAK,CAACC,OAAO,CAACH,KAAK,CAAC,EAAE;IACtBA,KAAK,CAACI,OAAO,CAACC,SAAS,IAAI;MACvB,IAAMC,GAAG,GAAGP,aAAa,CAACM,SAAS,CAAC;MACpC,IAAAE,oBAAa,EAACN,MAAM,EAAEK,GAAG,CAAC;IAC9B,CAAC,CAAC;EACN,CAAC,MAAM;IACH,IAAKN,KAAK,CAASP,EAAE,IAAKO,KAAK,CAASQ,MAAM,EAAE;MAC5C;MACCR,KAAK,CACDQ,MAAM,CACNJ,OAAO,CAACK,EAAE,IAAIR,MAAM,CAACS,IAAI,CAACD,EAAE,CAAC,CAAC;IACvC,CAAC,MAAM;MACHR,MAAM,CAACS,IAAI,CAACV,KAAY,CAAC;IAC7B;EACJ;EAEA,IAAMW,OAAO,GAAG,IAAIC,GAAG,CAAS,CAAC;EACjC,IAAMC,YAAyB,GAAG,EAAE;EAEpC,SAASC,UAAUA,CAACL,EAAO,EAAU;IACjC,OAAO,CACHA,EAAE,CAACf,UAAU,EACbe,EAAE,CAACpB,YAAY,GAAGoB,EAAE,CAACpB,YAAY,CAAC0B,IAAI,GAAG,EAAE,EAC3CN,EAAE,CAACnB,oBAAoB,GAAGmB,EAAE,CAACnB,oBAAoB,CAACyB,IAAI,GAAG,EAAE,CAC9D,CAACC,IAAI,CAAC,GAAG,CAAC;EACf;EAEAf,MAAM,CAACG,OAAO,CAACK,EAAE,IAAI;IACjB,IAAMQ,OAAO,GAAGH,UAAU,CAACL,EAAE,CAAC;IAC9B,IAAI,CAACE,OAAO,CAACO,GAAG,CAACD,OAAO,CAAC,EAAE;MACvBN,OAAO,CAACL,GAAG,CAACW,OAAO,CAAC;MACpBJ,YAAY,CAACH,IAAI,CAACD,EAAE,CAAC;IACzB;EACJ,CAAC,CAAC;EAEF,OAAOI,YAAY;AACvB","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/rx-collection-helper.js b/dist/cjs/rx-collection-helper.js deleted file mode 100644 index 9c98837e1af..00000000000 --- a/dist/cjs/rx-collection-helper.js +++ /dev/null @@ -1,131 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.createRxCollectionStorageInstance = createRxCollectionStorageInstance; -exports.ensureRxCollectionIsNotDestroyed = ensureRxCollectionIsNotDestroyed; -exports.fillObjectDataBeforeInsert = fillObjectDataBeforeInsert; -exports.removeCollectionStorages = removeCollectionStorages; -var _index = require("./plugins/utils/index.js"); -var _rxSchemaHelper = require("./rx-schema-helper.js"); -var _hooks = require("./hooks.js"); -var _rxDatabaseInternalStore = require("./rx-database-internal-store.js"); -var _rxStorageHelper = require("./rx-storage-helper.js"); -var _overwritable = require("./overwritable.js"); -var _rxError = require("./rx-error.js"); -/** - * fills in the default data. - * This also clones the data. - */ -function fillObjectDataBeforeInsert(schema, data) { - data = (0, _index.flatClone)(data); - data = (0, _rxSchemaHelper.fillObjectWithDefaults)(schema, data); - data = (0, _rxSchemaHelper.fillPrimaryKey)(schema.primaryPath, schema.jsonSchema, data); - data._meta = (0, _index.getDefaultRxDocumentMeta)(); - if (!Object.prototype.hasOwnProperty.call(data, '_deleted')) { - data._deleted = false; - } - if (!Object.prototype.hasOwnProperty.call(data, '_attachments')) { - data._attachments = {}; - } - if (!Object.prototype.hasOwnProperty.call(data, '_rev')) { - data._rev = (0, _index.getDefaultRevision)(); - } - return data; -} - -/** - * Creates the storage instances that are used internally in the collection - */ -async function createRxCollectionStorageInstance(rxDatabase, storageInstanceCreationParams) { - storageInstanceCreationParams.multiInstance = rxDatabase.multiInstance; - var storageInstance = await rxDatabase.storage.createStorageInstance(storageInstanceCreationParams); - return storageInstance; -} - -/** - * Removes the main storage of the collection - * and all connected storages like the ones from the replication meta etc. - */ -async function removeCollectionStorages(storage, databaseInternalStorage, databaseInstanceToken, databaseName, collectionName, password, -/** - * If no hash function is provided, - * we assume that the whole internal store is removed anyway - * so we do not have to delete the meta documents. - */ -hashFunction) { - var allCollectionMetaDocs = await (0, _rxDatabaseInternalStore.getAllCollectionDocuments)(databaseInternalStorage); - var relevantCollectionMetaDocs = allCollectionMetaDocs.filter(metaDoc => metaDoc.data.name === collectionName); - var removeStorages = []; - relevantCollectionMetaDocs.forEach(metaDoc => { - removeStorages.push({ - collectionName: metaDoc.data.name, - schema: metaDoc.data.schema, - isCollection: true - }); - metaDoc.data.connectedStorages.forEach(row => removeStorages.push({ - collectionName: row.collectionName, - isCollection: false, - schema: row.schema - })); - }); - - // ensure uniqueness - var alreadyAdded = new Set(); - removeStorages = removeStorages.filter(row => { - var key = row.collectionName + '||' + row.schema.version; - if (alreadyAdded.has(key)) { - return false; - } else { - alreadyAdded.add(key); - return true; - } - }); - - // remove all the storages - await Promise.all(removeStorages.map(async row => { - var storageInstance = await storage.createStorageInstance({ - collectionName: row.collectionName, - databaseInstanceToken, - databaseName, - multiInstance: false, - options: {}, - schema: row.schema, - password, - devMode: _overwritable.overwritable.isDevMode() - }); - await storageInstance.remove(); - if (row.isCollection) { - await (0, _hooks.runAsyncPluginHooks)('postRemoveRxCollection', { - storage, - databaseName: databaseName, - collectionName - }); - } - })); - - // remove the meta documents - if (hashFunction) { - var writeRows = relevantCollectionMetaDocs.map(doc => { - var writeDoc = (0, _rxStorageHelper.flatCloneDocWithMeta)(doc); - writeDoc._deleted = true; - writeDoc._meta.lwt = (0, _index.now)(); - writeDoc._rev = (0, _index.createRevision)(databaseInstanceToken, doc); - return { - previous: doc, - document: writeDoc - }; - }); - await databaseInternalStorage.bulkWrite(writeRows, 'rx-database-remove-collection-all'); - } -} -function ensureRxCollectionIsNotDestroyed(collection) { - if (collection.destroyed) { - throw (0, _rxError.newRxError)('COL21', { - collection: collection.name, - version: collection.schema.version - }); - } -} -//# sourceMappingURL=rx-collection-helper.js.map \ No newline at end of file diff --git a/dist/cjs/rx-collection-helper.js.map b/dist/cjs/rx-collection-helper.js.map deleted file mode 100644 index 6299888ff20..00000000000 --- a/dist/cjs/rx-collection-helper.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-collection-helper.js","names":["_index","require","_rxSchemaHelper","_hooks","_rxDatabaseInternalStore","_rxStorageHelper","_overwritable","_rxError","fillObjectDataBeforeInsert","schema","data","flatClone","fillObjectWithDefaults","fillPrimaryKey","primaryPath","jsonSchema","_meta","getDefaultRxDocumentMeta","Object","prototype","hasOwnProperty","call","_deleted","_attachments","_rev","getDefaultRevision","createRxCollectionStorageInstance","rxDatabase","storageInstanceCreationParams","multiInstance","storageInstance","storage","createStorageInstance","removeCollectionStorages","databaseInternalStorage","databaseInstanceToken","databaseName","collectionName","password","hashFunction","allCollectionMetaDocs","getAllCollectionDocuments","relevantCollectionMetaDocs","filter","metaDoc","name","removeStorages","forEach","push","isCollection","connectedStorages","row","alreadyAdded","Set","key","version","has","add","Promise","all","map","options","devMode","overwritable","isDevMode","remove","runAsyncPluginHooks","writeRows","doc","writeDoc","flatCloneDocWithMeta","lwt","now","createRevision","previous","document","bulkWrite","ensureRxCollectionIsNotDestroyed","collection","destroyed","newRxError"],"sources":["../../src/rx-collection-helper.ts"],"sourcesContent":["import type {\n HashFunction,\n InternalStoreDocType,\n RxCollection,\n RxDatabase,\n RxDocumentData,\n RxJsonSchema,\n RxStorage,\n RxStorageInstance,\n RxStorageInstanceCreationParams\n} from './types/index.d.ts';\nimport {\n createRevision,\n flatClone,\n getDefaultRevision,\n getDefaultRxDocumentMeta,\n now\n} from './plugins/utils/index.ts';\nimport {\n fillObjectWithDefaults,\n fillPrimaryKey\n} from './rx-schema-helper.ts';\nimport type { RxSchema } from './rx-schema.ts';\nimport { runAsyncPluginHooks } from './hooks.ts';\nimport { getAllCollectionDocuments } from './rx-database-internal-store.ts';\nimport { flatCloneDocWithMeta } from './rx-storage-helper.ts';\nimport { overwritable } from './overwritable.ts';\nimport type { RxCollectionBase } from './rx-collection.ts';\nimport { newRxError } from './rx-error.ts';\n\n/**\n * fills in the default data.\n * This also clones the data.\n */\nexport function fillObjectDataBeforeInsert(\n schema: RxSchema,\n data: Partial> | any\n): RxDocumentData {\n data = flatClone(data);\n data = fillObjectWithDefaults(schema, data);\n data = fillPrimaryKey(\n schema.primaryPath,\n schema.jsonSchema,\n data\n );\n data._meta = getDefaultRxDocumentMeta();\n if (!Object.prototype.hasOwnProperty.call(data, '_deleted')) {\n data._deleted = false;\n }\n if (!Object.prototype.hasOwnProperty.call(data, '_attachments')) {\n data._attachments = {};\n }\n if (!Object.prototype.hasOwnProperty.call(data, '_rev')) {\n data._rev = getDefaultRevision();\n }\n return data;\n}\n\n/**\n * Creates the storage instances that are used internally in the collection\n */\nexport async function createRxCollectionStorageInstance(\n rxDatabase: RxDatabase<{}, Internals, InstanceCreationOptions>,\n storageInstanceCreationParams: RxStorageInstanceCreationParams\n): Promise> {\n storageInstanceCreationParams.multiInstance = rxDatabase.multiInstance;\n const storageInstance = await rxDatabase.storage.createStorageInstance(\n storageInstanceCreationParams\n );\n return storageInstance;\n}\n\n/**\n * Removes the main storage of the collection\n * and all connected storages like the ones from the replication meta etc.\n */\nexport async function removeCollectionStorages(\n storage: RxStorage,\n databaseInternalStorage: RxStorageInstance, any, any>,\n databaseInstanceToken: string,\n databaseName: string,\n collectionName: string,\n password?: string,\n /**\n * If no hash function is provided,\n * we assume that the whole internal store is removed anyway\n * so we do not have to delete the meta documents.\n */\n hashFunction?: HashFunction,\n) {\n const allCollectionMetaDocs = await getAllCollectionDocuments(\n databaseInternalStorage\n );\n const relevantCollectionMetaDocs = allCollectionMetaDocs\n .filter(metaDoc => metaDoc.data.name === collectionName);\n\n let removeStorages: {\n collectionName: string;\n schema: RxJsonSchema;\n isCollection: boolean;\n }[] = [];\n relevantCollectionMetaDocs.forEach(metaDoc => {\n removeStorages.push({\n collectionName: metaDoc.data.name,\n schema: metaDoc.data.schema,\n isCollection: true\n });\n metaDoc.data.connectedStorages.forEach(row => removeStorages.push({\n collectionName: row.collectionName,\n isCollection: false,\n schema: row.schema\n }));\n });\n\n // ensure uniqueness\n const alreadyAdded = new Set();\n removeStorages = removeStorages.filter(row => {\n const key = row.collectionName + '||' + row.schema.version;\n if (alreadyAdded.has(key)) {\n return false;\n } else {\n alreadyAdded.add(key);\n return true;\n }\n });\n\n // remove all the storages\n await Promise.all(\n removeStorages\n .map(async (row) => {\n const storageInstance = await storage.createStorageInstance({\n collectionName: row.collectionName,\n databaseInstanceToken,\n databaseName,\n multiInstance: false,\n options: {},\n schema: row.schema,\n password,\n devMode: overwritable.isDevMode()\n });\n await storageInstance.remove();\n if (row.isCollection) {\n await runAsyncPluginHooks('postRemoveRxCollection', {\n storage,\n databaseName: databaseName,\n collectionName\n });\n }\n })\n );\n\n // remove the meta documents\n if (hashFunction) {\n const writeRows = relevantCollectionMetaDocs.map(doc => {\n const writeDoc = flatCloneDocWithMeta(doc);\n writeDoc._deleted = true;\n writeDoc._meta.lwt = now();\n writeDoc._rev = createRevision(\n databaseInstanceToken,\n doc\n );\n return {\n previous: doc,\n document: writeDoc\n };\n });\n await databaseInternalStorage.bulkWrite(\n writeRows,\n 'rx-database-remove-collection-all'\n );\n }\n}\n\n\nexport function ensureRxCollectionIsNotDestroyed(\n collection: RxCollection | RxCollectionBase\n) {\n if (collection.destroyed) {\n throw newRxError(\n 'COL21',\n {\n collection: collection.name,\n version: collection.schema.version\n }\n );\n }\n}\n"],"mappings":";;;;;;;;;AAWA,IAAAA,MAAA,GAAAC,OAAA;AAOA,IAAAC,eAAA,GAAAD,OAAA;AAKA,IAAAE,MAAA,GAAAF,OAAA;AACA,IAAAG,wBAAA,GAAAH,OAAA;AACA,IAAAI,gBAAA,GAAAJ,OAAA;AACA,IAAAK,aAAA,GAAAL,OAAA;AAEA,IAAAM,QAAA,GAAAN,OAAA;AAEA;AACA;AACA;AACA;AACO,SAASO,0BAA0BA,CACtCC,MAA2B,EAC3BC,IAA8C,EACrB;EACzBA,IAAI,GAAG,IAAAC,gBAAS,EAACD,IAAI,CAAC;EACtBA,IAAI,GAAG,IAAAE,sCAAsB,EAACH,MAAM,EAAEC,IAAI,CAAC;EAC3CA,IAAI,GAAG,IAAAG,8BAAc,EACjBJ,MAAM,CAACK,WAAW,EAClBL,MAAM,CAACM,UAAU,EACjBL,IACJ,CAAC;EACDA,IAAI,CAACM,KAAK,GAAG,IAAAC,+BAAwB,EAAC,CAAC;EACvC,IAAI,CAACC,MAAM,CAACC,SAAS,CAACC,cAAc,CAACC,IAAI,CAACX,IAAI,EAAE,UAAU,CAAC,EAAE;IACzDA,IAAI,CAACY,QAAQ,GAAG,KAAK;EACzB;EACA,IAAI,CAACJ,MAAM,CAACC,SAAS,CAACC,cAAc,CAACC,IAAI,CAACX,IAAI,EAAE,cAAc,CAAC,EAAE;IAC7DA,IAAI,CAACa,YAAY,GAAG,CAAC,CAAC;EAC1B;EACA,IAAI,CAACL,MAAM,CAACC,SAAS,CAACC,cAAc,CAACC,IAAI,CAACX,IAAI,EAAE,MAAM,CAAC,EAAE;IACrDA,IAAI,CAACc,IAAI,GAAG,IAAAC,yBAAkB,EAAC,CAAC;EACpC;EACA,OAAOf,IAAI;AACf;;AAEA;AACA;AACA;AACO,eAAegB,iCAAiCA,CACnDC,UAA8D,EAC9DC,6BAAuG,EACzB;EAC9EA,6BAA6B,CAACC,aAAa,GAAGF,UAAU,CAACE,aAAa;EACtE,IAAMC,eAAe,GAAG,MAAMH,UAAU,CAACI,OAAO,CAACC,qBAAqB,CAClEJ,6BACJ,CAAC;EACD,OAAOE,eAAe;AAC1B;;AAEA;AACA;AACA;AACA;AACO,eAAeG,wBAAwBA,CAC1CF,OAA4B,EAC5BG,uBAA+E,EAC/EC,qBAA6B,EAC7BC,YAAoB,EACpBC,cAAsB,EACtBC,QAAiB;AACjB;AACJ;AACA;AACA;AACA;AACIC,YAA2B,EAC7B;EACE,IAAMC,qBAAqB,GAAG,MAAM,IAAAC,kDAAyB,EACzDP,uBACJ,CAAC;EACD,IAAMQ,0BAA0B,GAAGF,qBAAqB,CACnDG,MAAM,CAACC,OAAO,IAAIA,OAAO,CAAClC,IAAI,CAACmC,IAAI,KAAKR,cAAc,CAAC;EAE5D,IAAIS,cAID,GAAG,EAAE;EACRJ,0BAA0B,CAACK,OAAO,CAACH,OAAO,IAAI;IAC1CE,cAAc,CAACE,IAAI,CAAC;MAChBX,cAAc,EAAEO,OAAO,CAAClC,IAAI,CAACmC,IAAI;MACjCpC,MAAM,EAAEmC,OAAO,CAAClC,IAAI,CAACD,MAAM;MAC3BwC,YAAY,EAAE;IAClB,CAAC,CAAC;IACFL,OAAO,CAAClC,IAAI,CAACwC,iBAAiB,CAACH,OAAO,CAACI,GAAG,IAAIL,cAAc,CAACE,IAAI,CAAC;MAC9DX,cAAc,EAAEc,GAAG,CAACd,cAAc;MAClCY,YAAY,EAAE,KAAK;MACnBxC,MAAM,EAAE0C,GAAG,CAAC1C;IAChB,CAAC,CAAC,CAAC;EACP,CAAC,CAAC;;EAEF;EACA,IAAM2C,YAAY,GAAG,IAAIC,GAAG,CAAS,CAAC;EACtCP,cAAc,GAAGA,cAAc,CAACH,MAAM,CAACQ,GAAG,IAAI;IAC1C,IAAMG,GAAG,GAAGH,GAAG,CAACd,cAAc,GAAG,IAAI,GAAGc,GAAG,CAAC1C,MAAM,CAAC8C,OAAO;IAC1D,IAAIH,YAAY,CAACI,GAAG,CAACF,GAAG,CAAC,EAAE;MACvB,OAAO,KAAK;IAChB,CAAC,MAAM;MACHF,YAAY,CAACK,GAAG,CAACH,GAAG,CAAC;MACrB,OAAO,IAAI;IACf;EACJ,CAAC,CAAC;;EAEF;EACA,MAAMI,OAAO,CAACC,GAAG,CACbb,cAAc,CACTc,GAAG,CAAC,MAAOT,GAAG,IAAK;IAChB,IAAMrB,eAAe,GAAG,MAAMC,OAAO,CAACC,qBAAqB,CAAM;MAC7DK,cAAc,EAAEc,GAAG,CAACd,cAAc;MAClCF,qBAAqB;MACrBC,YAAY;MACZP,aAAa,EAAE,KAAK;MACpBgC,OAAO,EAAE,CAAC,CAAC;MACXpD,MAAM,EAAE0C,GAAG,CAAC1C,MAAM;MAClB6B,QAAQ;MACRwB,OAAO,EAAEC,0BAAY,CAACC,SAAS,CAAC;IACpC,CAAC,CAAC;IACF,MAAMlC,eAAe,CAACmC,MAAM,CAAC,CAAC;IAC9B,IAAId,GAAG,CAACF,YAAY,EAAE;MAClB,MAAM,IAAAiB,0BAAmB,EAAC,wBAAwB,EAAE;QAChDnC,OAAO;QACPK,YAAY,EAAEA,YAAY;QAC1BC;MACJ,CAAC,CAAC;IACN;EACJ,CAAC,CACT,CAAC;;EAED;EACA,IAAIE,YAAY,EAAE;IACd,IAAM4B,SAAS,GAAGzB,0BAA0B,CAACkB,GAAG,CAACQ,GAAG,IAAI;MACpD,IAAMC,QAAQ,GAAG,IAAAC,qCAAoB,EAACF,GAAG,CAAC;MAC1CC,QAAQ,CAAC/C,QAAQ,GAAG,IAAI;MACxB+C,QAAQ,CAACrD,KAAK,CAACuD,GAAG,GAAG,IAAAC,UAAG,EAAC,CAAC;MAC1BH,QAAQ,CAAC7C,IAAI,GAAG,IAAAiD,qBAAc,EAC1BtC,qBAAqB,EACrBiC,GACJ,CAAC;MACD,OAAO;QACHM,QAAQ,EAAEN,GAAG;QACbO,QAAQ,EAAEN;MACd,CAAC;IACL,CAAC,CAAC;IACF,MAAMnC,uBAAuB,CAAC0C,SAAS,CACnCT,SAAS,EACT,mCACJ,CAAC;EACL;AACJ;AAGO,SAASU,gCAAgCA,CAC5CC,UAAoE,EACtE;EACE,IAAIA,UAAU,CAACC,SAAS,EAAE;IACtB,MAAM,IAAAC,mBAAU,EACZ,OAAO,EACP;MACIF,UAAU,EAAEA,UAAU,CAACjC,IAAI;MAC3BU,OAAO,EAAEuB,UAAU,CAACrE,MAAM,CAAC8C;IAC/B,CACJ,CAAC;EACL;AACJ","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/rx-collection.js b/dist/cjs/rx-collection.js deleted file mode 100644 index da758db1123..00000000000 --- a/dist/cjs/rx-collection.js +++ /dev/null @@ -1,757 +0,0 @@ -"use strict"; - -var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault"); -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.RxCollectionBase = void 0; -exports.createRxCollection = createRxCollection; -exports.isRxCollection = isRxCollection; -var _createClass2 = _interopRequireDefault(require("@babel/runtime/helpers/createClass")); -var _rxjs = require("rxjs"); -var _index = require("./plugins/utils/index.js"); -var _rxCollectionHelper = require("./rx-collection-helper.js"); -var _rxQuery = require("./rx-query.js"); -var _rxError = require("./rx-error.js"); -var _docCache = require("./doc-cache.js"); -var _queryCache = require("./query-cache.js"); -var _changeEventBuffer = require("./change-event-buffer.js"); -var _hooks = require("./hooks.js"); -var _rxDocumentPrototypeMerge = require("./rx-document-prototype-merge.js"); -var _rxStorageHelper = require("./rx-storage-helper.js"); -var _index2 = require("./replication-protocol/index.js"); -var _incrementalWrite = require("./incremental-write.js"); -var _rxDocument = require("./rx-document.js"); -var _overwritable = require("./overwritable.js"); -var HOOKS_WHEN = ['pre', 'post']; -var HOOKS_KEYS = ['insert', 'save', 'remove', 'create']; -var hooksApplied = false; -var RxCollectionBase = exports.RxCollectionBase = /*#__PURE__*/function () { - /** - * Stores all 'normal' documents - */ - - function RxCollectionBase(database, name, schema, internalStorageInstance, instanceCreationOptions = {}, migrationStrategies = {}, methods = {}, attachments = {}, options = {}, cacheReplacementPolicy = _queryCache.defaultCacheReplacementPolicy, statics = {}, conflictHandler = _index2.defaultConflictHandler) { - this.storageInstance = {}; - this.timeouts = new Set(); - this.incrementalWriteQueue = {}; - this._incrementalUpsertQueues = new Map(); - this.synced = false; - this.hooks = {}; - this._subs = []; - this._docCache = {}; - this._queryCache = (0, _queryCache.createQueryCache)(); - this.$ = {}; - this.checkpoint$ = {}; - this._changeEventBuffer = {}; - this.onDestroy = []; - this.destroyed = false; - this.onRemove = []; - this.database = database; - this.name = name; - this.schema = schema; - this.internalStorageInstance = internalStorageInstance; - this.instanceCreationOptions = instanceCreationOptions; - this.migrationStrategies = migrationStrategies; - this.methods = methods; - this.attachments = attachments; - this.options = options; - this.cacheReplacementPolicy = cacheReplacementPolicy; - this.statics = statics; - this.conflictHandler = conflictHandler; - _applyHookFunctions(this.asRxCollection); - } - var _proto = RxCollectionBase.prototype; - _proto.prepare = async function prepare() { - this.storageInstance = (0, _rxStorageHelper.getWrappedStorageInstance)(this.database, this.internalStorageInstance, this.schema.jsonSchema); - this.incrementalWriteQueue = new _incrementalWrite.IncrementalWriteQueue(this.storageInstance, this.schema.primaryPath, (newData, oldData) => (0, _rxDocument.beforeDocumentUpdateWrite)(this, newData, oldData), result => this._runHooks('post', 'save', result)); - var collectionEventBulks$ = this.database.eventBulks$.pipe((0, _rxjs.filter)(changeEventBulk => changeEventBulk.collectionName === this.name)); - this.$ = collectionEventBulks$.pipe((0, _rxjs.mergeMap)(changeEventBulk => changeEventBulk.events)); - this.checkpoint$ = collectionEventBulks$.pipe((0, _rxjs.map)(changeEventBulk => changeEventBulk.checkpoint)); - this._changeEventBuffer = (0, _changeEventBuffer.createChangeEventBuffer)(this.asRxCollection); - var documentConstructor; - this._docCache = new _docCache.DocumentCache(this.schema.primaryPath, this.$.pipe((0, _rxjs.filter)(cE => !cE.isLocal)), docData => { - if (!documentConstructor) { - documentConstructor = (0, _rxDocumentPrototypeMerge.getRxDocumentConstructor)(this.asRxCollection); - } - return (0, _rxDocumentPrototypeMerge.createNewRxDocument)(this.asRxCollection, documentConstructor, docData); - }); - var listenToRemoveSub = this.database.internalStore.changeStream().pipe((0, _rxjs.filter)(bulk => { - var key = this.name + '-' + this.schema.version; - var found = bulk.events.find(event => { - return event.documentData.context === 'collection' && event.documentData.key === key && event.operation === 'DELETE'; - }); - return !!found; - })).subscribe(async () => { - await this.destroy(); - await Promise.all(this.onRemove.map(fn => fn())); - }); - this._subs.push(listenToRemoveSub); - - /** - * TODO Instead of resolving the EventBulk array here and spit it into - * single events, we should fully work with event bulks internally - * to save performance. - */ - var databaseStorageToken = await this.database.storageToken; - var subDocs = this.storageInstance.changeStream().subscribe(eventBulk => { - var events = new Array(eventBulk.events.length); - var rawEvents = eventBulk.events; - var collectionName = this.name; - var deepFreezeWhenDevMode = _overwritable.overwritable.deepFreezeWhenDevMode; - for (var index = 0; index < rawEvents.length; index++) { - var event = rawEvents[index]; - events[index] = { - documentId: event.documentId, - collectionName, - isLocal: false, - operation: event.operation, - documentData: deepFreezeWhenDevMode(event.documentData), - previousDocumentData: deepFreezeWhenDevMode(event.previousDocumentData) - }; - } - var changeEventBulk = { - id: eventBulk.id, - internal: false, - collectionName: this.name, - storageToken: databaseStorageToken, - events, - databaseToken: this.database.token, - checkpoint: eventBulk.checkpoint, - context: eventBulk.context, - endTime: eventBulk.endTime, - startTime: eventBulk.startTime - }; - this.database.$emit(changeEventBulk); - }); - this._subs.push(subDocs); - - /** - * Resolve the conflict tasks - * of the RxStorageInstance - */ - this._subs.push(this.storageInstance.conflictResultionTasks().subscribe(task => { - this.conflictHandler(task.input, task.context).then(output => { - this.storageInstance.resolveConflictResultionTask({ - id: task.id, - output - }); - }); - })); - return _index.PROMISE_RESOLVE_VOID; - } - - /** - * Manually call the cleanup function of the storage. - * @link https://rxdb.info/cleanup.html - */; - _proto.cleanup = function cleanup(_minimumDeletedTime) { - (0, _rxCollectionHelper.ensureRxCollectionIsNotDestroyed)(this); - throw (0, _index.pluginMissing)('cleanup'); - } - - // overwritten by migration-plugin - ; - _proto.migrationNeeded = function migrationNeeded() { - throw (0, _index.pluginMissing)('migration-schema'); - }; - _proto.getMigrationState = function getMigrationState() { - throw (0, _index.pluginMissing)('migration-schema'); - }; - _proto.startMigration = function startMigration(batchSize = 10) { - (0, _rxCollectionHelper.ensureRxCollectionIsNotDestroyed)(this); - return this.getMigrationState().startMigration(batchSize); - }; - _proto.migratePromise = function migratePromise(batchSize = 10) { - return this.getMigrationState().migratePromise(batchSize); - }; - _proto.insert = async function insert(json) { - (0, _rxCollectionHelper.ensureRxCollectionIsNotDestroyed)(this); - var writeResult = await this.bulkInsert([json]); - var isError = writeResult.error[0]; - (0, _rxStorageHelper.throwIfIsStorageWriteError)(this, json[this.schema.primaryPath], json, isError); - var insertResult = (0, _index.ensureNotFalsy)(writeResult.success[0]); - return insertResult; - }; - _proto.bulkInsert = async function bulkInsert(docsData) { - (0, _rxCollectionHelper.ensureRxCollectionIsNotDestroyed)(this); - /** - * Optimization shortcut, - * do nothing when called with an empty array - */ - if (docsData.length === 0) { - return { - success: [], - error: [] - }; - } - var primaryPath = this.schema.primaryPath; - - /** - * This code is a bit redundant for better performance. - * Instead of iterating multiple times, - * we directly transform the input to a write-row array. - */ - var insertRows; - if (this.hasHooks('pre', 'insert')) { - insertRows = await Promise.all(docsData.map(docData => { - var useDocData = (0, _rxCollectionHelper.fillObjectDataBeforeInsert)(this.schema, docData); - return this._runHooks('pre', 'insert', useDocData).then(() => { - return { - document: useDocData - }; - }); - })); - } else { - insertRows = []; - for (var index = 0; index < docsData.length; index++) { - var docData = docsData[index]; - var useDocData = (0, _rxCollectionHelper.fillObjectDataBeforeInsert)(this.schema, docData); - insertRows[index] = { - document: useDocData - }; - } - } - var results = await this.storageInstance.bulkWrite(insertRows, 'rx-collection-bulk-insert'); - - // create documents - var rxDocuments = (0, _docCache.mapDocumentsDataToCacheDocs)(this._docCache, results.success); - if (this.hasHooks('post', 'insert')) { - var docsMap = new Map(); - insertRows.forEach(row => { - var doc = row.document; - docsMap.set(doc[primaryPath], doc); - }); - await Promise.all(rxDocuments.map(doc => { - return this._runHooks('post', 'insert', docsMap.get(doc.primary), doc); - })); - } - return { - success: rxDocuments, - error: results.error - }; - }; - _proto.bulkRemove = async function bulkRemove(ids) { - (0, _rxCollectionHelper.ensureRxCollectionIsNotDestroyed)(this); - var primaryPath = this.schema.primaryPath; - /** - * Optimization shortcut, - * do nothing when called with an empty array - */ - if (ids.length === 0) { - return { - success: [], - error: [] - }; - } - var rxDocumentMap = await this.findByIds(ids).exec(); - var docsData = []; - var docsMap = new Map(); - Array.from(rxDocumentMap.values()).forEach(rxDocument => { - var data = rxDocument.toMutableJSON(true); - docsData.push(data); - docsMap.set(rxDocument.primary, data); - }); - await Promise.all(docsData.map(doc => { - var primary = doc[this.schema.primaryPath]; - return this._runHooks('pre', 'remove', doc, rxDocumentMap.get(primary)); - })); - var removeDocs = docsData.map(doc => { - var writeDoc = (0, _index.flatClone)(doc); - writeDoc._deleted = true; - return { - previous: doc, - document: writeDoc - }; - }); - var results = await this.storageInstance.bulkWrite(removeDocs, 'rx-collection-bulk-remove'); - var successIds = results.success.map(d => d[primaryPath]); - - // run hooks - await Promise.all(successIds.map(id => { - return this._runHooks('post', 'remove', docsMap.get(id), rxDocumentMap.get(id)); - })); - var rxDocuments = successIds.map(id => (0, _index.getFromMapOrThrow)(rxDocumentMap, id)); - return { - success: rxDocuments, - error: results.error - }; - } - - /** - * same as bulkInsert but overwrites existing document with same primary - */; - _proto.bulkUpsert = async function bulkUpsert(docsData) { - (0, _rxCollectionHelper.ensureRxCollectionIsNotDestroyed)(this); - var insertData = []; - var useJsonByDocId = new Map(); - docsData.forEach(docData => { - var useJson = (0, _rxCollectionHelper.fillObjectDataBeforeInsert)(this.schema, docData); - var primary = useJson[this.schema.primaryPath]; - if (!primary) { - throw (0, _rxError.newRxError)('COL3', { - primaryPath: this.schema.primaryPath, - data: useJson, - schema: this.schema.jsonSchema - }); - } - useJsonByDocId.set(primary, useJson); - insertData.push(useJson); - }); - var insertResult = await this.bulkInsert(insertData); - var success = insertResult.success.slice(0); - var error = []; - - // update the ones that existed already - await Promise.all(insertResult.error.map(async err => { - if (err.status !== 409) { - error.push(err); - } else { - var id = err.documentId; - var writeData = (0, _index.getFromMapOrThrow)(useJsonByDocId, id); - var docDataInDb = (0, _index.ensureNotFalsy)(err.documentInDb); - var doc = this._docCache.getCachedRxDocuments([docDataInDb])[0]; - var newDoc = await doc.incrementalModify(() => writeData); - success.push(newDoc); - } - })); - return { - error, - success - }; - } - - /** - * same as insert but overwrites existing document with same primary - */; - _proto.upsert = async function upsert(json) { - (0, _rxCollectionHelper.ensureRxCollectionIsNotDestroyed)(this); - var bulkResult = await this.bulkUpsert([json]); - (0, _rxStorageHelper.throwIfIsStorageWriteError)(this.asRxCollection, json[this.schema.primaryPath], json, bulkResult.error[0]); - return bulkResult.success[0]; - } - - /** - * upserts to a RxDocument, uses incrementalModify if document already exists - */; - _proto.incrementalUpsert = function incrementalUpsert(json) { - (0, _rxCollectionHelper.ensureRxCollectionIsNotDestroyed)(this); - var useJson = (0, _rxCollectionHelper.fillObjectDataBeforeInsert)(this.schema, json); - var primary = useJson[this.schema.primaryPath]; - if (!primary) { - throw (0, _rxError.newRxError)('COL4', { - data: json - }); - } - - // ensure that it won't try 2 parallel runs - var queue = this._incrementalUpsertQueues.get(primary); - if (!queue) { - queue = _index.PROMISE_RESOLVE_VOID; - } - queue = queue.then(() => _incrementalUpsertEnsureRxDocumentExists(this, primary, useJson)).then(wasInserted => { - if (!wasInserted.inserted) { - return _incrementalUpsertUpdate(wasInserted.doc, useJson); - } else { - return wasInserted.doc; - } - }); - this._incrementalUpsertQueues.set(primary, queue); - return queue; - }; - _proto.find = function find(queryObj) { - (0, _rxCollectionHelper.ensureRxCollectionIsNotDestroyed)(this); - if (typeof queryObj === 'string') { - throw (0, _rxError.newRxError)('COL5', { - queryObj - }); - } - if (!queryObj) { - queryObj = (0, _rxQuery._getDefaultQuery)(); - } - var query = (0, _rxQuery.createRxQuery)('find', queryObj, this); - return query; - }; - _proto.findOne = function findOne(queryObj) { - (0, _rxCollectionHelper.ensureRxCollectionIsNotDestroyed)(this); - - // TODO move this check to dev-mode plugin - if (typeof queryObj === 'number' || Array.isArray(queryObj)) { - throw (0, _rxError.newRxTypeError)('COL6', { - queryObj - }); - } - var query; - if (typeof queryObj === 'string') { - query = (0, _rxQuery.createRxQuery)('findOne', { - selector: { - [this.schema.primaryPath]: queryObj - }, - limit: 1 - }, this); - } else { - if (!queryObj) { - queryObj = (0, _rxQuery._getDefaultQuery)(); - } - - // cannot have limit on findOne queries because it will be overwritten - if (queryObj.limit) { - throw (0, _rxError.newRxError)('QU6'); - } - queryObj = (0, _index.flatClone)(queryObj); - queryObj.limit = 1; - query = (0, _rxQuery.createRxQuery)('findOne', queryObj, this); - } - return query; - }; - _proto.count = function count(queryObj) { - (0, _rxCollectionHelper.ensureRxCollectionIsNotDestroyed)(this); - if (!queryObj) { - queryObj = (0, _rxQuery._getDefaultQuery)(); - } - var query = (0, _rxQuery.createRxQuery)('count', queryObj, this); - return query; - } - - /** - * find a list documents by their primary key - * has way better performance then running multiple findOne() or a find() with a complex $or-selected - */; - _proto.findByIds = function findByIds(ids) { - (0, _rxCollectionHelper.ensureRxCollectionIsNotDestroyed)(this); - var mangoQuery = { - selector: { - [this.schema.primaryPath]: { - $in: ids.slice(0) - } - } - }; - var query = (0, _rxQuery.createRxQuery)('findByIds', mangoQuery, this); - return query; - } - - /** - * Export collection to a JSON friendly format. - */; - _proto.exportJSON = function exportJSON() { - throw (0, _index.pluginMissing)('json-dump'); - } - - /** - * Import the parsed JSON export into the collection. - * @param _exportedJSON The previously exported data from the `.exportJSON()` method. - */; - _proto.importJSON = function importJSON(_exportedJSON) { - throw (0, _index.pluginMissing)('json-dump'); - }; - _proto.insertCRDT = function insertCRDT(_updateObj) { - throw (0, _index.pluginMissing)('crdt'); - } - - /** - * HOOKS - */; - _proto.addHook = function addHook(when, key, fun, parallel = false) { - if (typeof fun !== 'function') { - throw (0, _rxError.newRxTypeError)('COL7', { - key, - when - }); - } - if (!HOOKS_WHEN.includes(when)) { - throw (0, _rxError.newRxTypeError)('COL8', { - key, - when - }); - } - if (!HOOKS_KEYS.includes(key)) { - throw (0, _rxError.newRxError)('COL9', { - key - }); - } - if (when === 'post' && key === 'create' && parallel === true) { - throw (0, _rxError.newRxError)('COL10', { - when, - key, - parallel - }); - } - - // bind this-scope to hook-function - var boundFun = fun.bind(this); - var runName = parallel ? 'parallel' : 'series'; - this.hooks[key] = this.hooks[key] || {}; - this.hooks[key][when] = this.hooks[key][when] || { - series: [], - parallel: [] - }; - this.hooks[key][when][runName].push(boundFun); - }; - _proto.getHooks = function getHooks(when, key) { - if (!this.hooks[key] || !this.hooks[key][when]) { - return { - series: [], - parallel: [] - }; - } - return this.hooks[key][when]; - }; - _proto.hasHooks = function hasHooks(when, key) { - /** - * Performance shortcut - * so that we not have to build the empty object. - */ - if (!this.hooks[key] || !this.hooks[key][when]) { - return false; - } - var hooks = this.getHooks(when, key); - if (!hooks) { - return false; - } - return hooks.series.length > 0 || hooks.parallel.length > 0; - }; - _proto._runHooks = function _runHooks(when, key, data, instance) { - var hooks = this.getHooks(when, key); - if (!hooks) { - return _index.PROMISE_RESOLVE_VOID; - } - - // run parallel: false - var tasks = hooks.series.map(hook => () => hook(data, instance)); - return (0, _index.promiseSeries)(tasks) - // run parallel: true - .then(() => Promise.all(hooks.parallel.map(hook => hook(data, instance)))); - } - - /** - * does the same as ._runHooks() but with non-async-functions - */; - _proto._runHooksSync = function _runHooksSync(when, key, data, instance) { - if (!this.hasHooks(when, key)) { - return; - } - var hooks = this.getHooks(when, key); - if (!hooks) return; - hooks.series.forEach(hook => hook(data, instance)); - } - - /** - * Returns a promise that resolves after the given time. - * Ensures that is properly cleans up when the collection is destroyed - * so that no running timeouts prevent the exit of the JavaScript process. - */; - _proto.promiseWait = function promiseWait(time) { - var ret = new Promise(res => { - var timeout = setTimeout(() => { - this.timeouts.delete(timeout); - res(); - }, time); - this.timeouts.add(timeout); - }); - return ret; - }; - _proto.destroy = async function destroy() { - if (this.destroyed) { - return _index.PROMISE_RESOLVE_FALSE; - } - await Promise.all(this.onDestroy.map(fn => fn())); - - /** - * Settings destroyed = true - * must be the first thing to do, - * so for example the replication can directly stop - * instead of sending requests to a closed storage. - */ - this.destroyed = true; - Array.from(this.timeouts).forEach(timeout => clearTimeout(timeout)); - if (this._changeEventBuffer) { - this._changeEventBuffer.destroy(); - } - /** - * First wait until the whole database is idle. - * This ensures that the storage does not get closed - * while some operation is running. - * It is important that we do not intercept a running call - * because it might lead to undefined behavior like when a doc is written - * but the change is not added to the changes collection. - */ - return this.database.requestIdlePromise().then(() => this.storageInstance.close()).then(() => { - /** - * Unsubscribing must be done AFTER the storageInstance.close() - * Because the conflict handling is part of the subscriptions and - * otherwise there might be open conflicts to be resolved which - * will then stuck and never resolve. - */ - this._subs.forEach(sub => sub.unsubscribe()); - delete this.database.collections[this.name]; - return (0, _hooks.runAsyncPluginHooks)('postDestroyRxCollection', this).then(() => true); - }); - } - - /** - * remove all data of the collection - */; - _proto.remove = async function remove() { - await this.destroy(); - await Promise.all(this.onRemove.map(fn => fn())); - await (0, _rxCollectionHelper.removeCollectionStorages)(this.database.storage, this.database.internalStore, this.database.token, this.database.name, this.name, this.database.password, this.database.hashFunction); - }; - return (0, _createClass2.default)(RxCollectionBase, [{ - key: "insert$", - get: function () { - return this.$.pipe((0, _rxjs.filter)(cE => cE.operation === 'INSERT')); - } - }, { - key: "update$", - get: function () { - return this.$.pipe((0, _rxjs.filter)(cE => cE.operation === 'UPDATE')); - } - }, { - key: "remove$", - get: function () { - return this.$.pipe((0, _rxjs.filter)(cE => cE.operation === 'DELETE')); - } - - // defaults - - /** - * When the collection is destroyed, - * these functions will be called an awaited. - * Used to automatically clean up stuff that - * belongs to this collection. - */ - }, { - key: "asRxCollection", - get: function () { - return this; - } - }]); -}(); -/** - * adds the hook-functions to the collections prototype - * this runs only once - */ -function _applyHookFunctions(collection) { - if (hooksApplied) return; // already run - hooksApplied = true; - var colProto = Object.getPrototypeOf(collection); - HOOKS_KEYS.forEach(key => { - HOOKS_WHEN.map(when => { - var fnName = when + (0, _index.ucfirst)(key); - colProto[fnName] = function (fun, parallel) { - return this.addHook(when, key, fun, parallel); - }; - }); - }); -} -function _incrementalUpsertUpdate(doc, json) { - return doc.incrementalModify(_innerDoc => { - return json; - }); -} - -/** - * ensures that the given document exists - * @return promise that resolves with new doc and flag if inserted - */ -function _incrementalUpsertEnsureRxDocumentExists(rxCollection, primary, json) { - /** - * Optimisation shortcut, - * first try to find the document in the doc-cache - */ - var docDataFromCache = rxCollection._docCache.getLatestDocumentDataIfExists(primary); - if (docDataFromCache) { - return Promise.resolve({ - doc: rxCollection._docCache.getCachedRxDocuments([docDataFromCache])[0], - inserted: false - }); - } - return rxCollection.findOne(primary).exec().then(doc => { - if (!doc) { - return rxCollection.insert(json).then(newDoc => ({ - doc: newDoc, - inserted: true - })); - } else { - return { - doc, - inserted: false - }; - } - }); -} - -/** - * creates and prepares a new collection - */ -function createRxCollection({ - database, - name, - schema, - instanceCreationOptions = {}, - migrationStrategies = {}, - autoMigrate = true, - statics = {}, - methods = {}, - attachments = {}, - options = {}, - localDocuments = false, - cacheReplacementPolicy = _queryCache.defaultCacheReplacementPolicy, - conflictHandler = _index2.defaultConflictHandler -}) { - var storageInstanceCreationParams = { - databaseInstanceToken: database.token, - databaseName: database.name, - collectionName: name, - schema: schema.jsonSchema, - options: instanceCreationOptions, - multiInstance: database.multiInstance, - password: database.password, - devMode: _overwritable.overwritable.isDevMode() - }; - (0, _hooks.runPluginHooks)('preCreateRxStorageInstance', storageInstanceCreationParams); - return (0, _rxCollectionHelper.createRxCollectionStorageInstance)(database, storageInstanceCreationParams).then(storageInstance => { - var collection = new RxCollectionBase(database, name, schema, storageInstance, instanceCreationOptions, migrationStrategies, methods, attachments, options, cacheReplacementPolicy, statics, conflictHandler); - return collection.prepare().then(() => { - // ORM add statics - Object.entries(statics).forEach(([funName, fun]) => { - Object.defineProperty(collection, funName, { - get: () => fun.bind(collection) - }); - }); - var ret = _index.PROMISE_RESOLVE_VOID; - if (autoMigrate && collection.schema.version !== 0) { - ret = collection.migratePromise(); - } - return ret; - }).then(() => { - (0, _hooks.runPluginHooks)('createRxCollection', { - collection, - creator: { - name, - schema, - storageInstance, - instanceCreationOptions, - migrationStrategies, - methods, - attachments, - options, - cacheReplacementPolicy, - localDocuments, - statics - } - }); - return collection; - }) - /** - * If the collection creation fails, - * we yet have to close the storage instances. - */.catch(err => { - return storageInstance.close().then(() => Promise.reject(err)); - }); - }); -} -function isRxCollection(obj) { - return obj instanceof RxCollectionBase; -} -//# sourceMappingURL=rx-collection.js.map \ No newline at end of file diff --git a/dist/cjs/rx-collection.js.map b/dist/cjs/rx-collection.js.map deleted file mode 100644 index f2e9eb6f43b..00000000000 --- a/dist/cjs/rx-collection.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-collection.js","names":["_rxjs","require","_index","_rxCollectionHelper","_rxQuery","_rxError","_docCache","_queryCache","_changeEventBuffer","_hooks","_rxDocumentPrototypeMerge","_rxStorageHelper","_index2","_incrementalWrite","_rxDocument","_overwritable","HOOKS_WHEN","HOOKS_KEYS","hooksApplied","RxCollectionBase","exports","database","name","schema","internalStorageInstance","instanceCreationOptions","migrationStrategies","methods","attachments","options","cacheReplacementPolicy","defaultCacheReplacementPolicy","statics","conflictHandler","defaultConflictHandler","storageInstance","timeouts","Set","incrementalWriteQueue","_incrementalUpsertQueues","Map","synced","hooks","_subs","createQueryCache","$","checkpoint$","onDestroy","destroyed","onRemove","_applyHookFunctions","asRxCollection","_proto","prototype","prepare","getWrappedStorageInstance","jsonSchema","IncrementalWriteQueue","primaryPath","newData","oldData","beforeDocumentUpdateWrite","result","_runHooks","collectionEventBulks$","eventBulks$","pipe","filter","changeEventBulk","collectionName","mergeMap","events","map","checkpoint","createChangeEventBuffer","documentConstructor","DocumentCache","cE","isLocal","docData","getRxDocumentConstructor","createNewRxDocument","listenToRemoveSub","internalStore","changeStream","bulk","key","version","found","find","event","documentData","context","operation","subscribe","destroy","Promise","all","fn","push","databaseStorageToken","storageToken","subDocs","eventBulk","Array","length","rawEvents","deepFreezeWhenDevMode","overwritable","index","documentId","previousDocumentData","id","internal","databaseToken","token","endTime","startTime","$emit","conflictResultionTasks","task","input","then","output","resolveConflictResultionTask","PROMISE_RESOLVE_VOID","cleanup","_minimumDeletedTime","ensureRxCollectionIsNotDestroyed","pluginMissing","migrationNeeded","getMigrationState","startMigration","batchSize","migratePromise","insert","json","writeResult","bulkInsert","isError","error","throwIfIsStorageWriteError","insertResult","ensureNotFalsy","success","docsData","insertRows","hasHooks","useDocData","fillObjectDataBeforeInsert","document","results","bulkWrite","rxDocuments","mapDocumentsDataToCacheDocs","docsMap","forEach","row","doc","set","get","primary","bulkRemove","ids","rxDocumentMap","findByIds","exec","from","values","rxDocument","data","toMutableJSON","removeDocs","writeDoc","flatClone","_deleted","previous","successIds","d","getFromMapOrThrow","bulkUpsert","insertData","useJsonByDocId","useJson","newRxError","slice","err","status","writeData","docDataInDb","documentInDb","getCachedRxDocuments","newDoc","incrementalModify","upsert","bulkResult","incrementalUpsert","queue","_incrementalUpsertEnsureRxDocumentExists","wasInserted","inserted","_incrementalUpsertUpdate","queryObj","_getDefaultQuery","query","createRxQuery","findOne","isArray","newRxTypeError","selector","limit","count","mangoQuery","$in","exportJSON","importJSON","_exportedJSON","insertCRDT","_updateObj","addHook","when","fun","parallel","includes","boundFun","bind","runName","series","getHooks","instance","tasks","hook","promiseSeries","_runHooksSync","promiseWait","time","ret","res","timeout","setTimeout","delete","add","PROMISE_RESOLVE_FALSE","clearTimeout","requestIdlePromise","close","sub","unsubscribe","collections","runAsyncPluginHooks","remove","removeCollectionStorages","storage","password","hashFunction","_createClass2","default","collection","colProto","Object","getPrototypeOf","fnName","ucfirst","_innerDoc","rxCollection","docDataFromCache","getLatestDocumentDataIfExists","resolve","createRxCollection","autoMigrate","localDocuments","storageInstanceCreationParams","databaseInstanceToken","databaseName","multiInstance","devMode","isDevMode","runPluginHooks","createRxCollectionStorageInstance","entries","funName","defineProperty","creator","catch","reject","isRxCollection","obj"],"sources":["../../src/rx-collection.ts"],"sourcesContent":["import {\n filter,\n map,\n mergeMap\n} from 'rxjs';\n\nimport {\n ucfirst,\n flatClone,\n promiseSeries,\n pluginMissing,\n ensureNotFalsy,\n getFromMapOrThrow,\n PROMISE_RESOLVE_FALSE,\n PROMISE_RESOLVE_VOID\n} from './plugins/utils/index.ts';\nimport {\n fillObjectDataBeforeInsert,\n createRxCollectionStorageInstance,\n removeCollectionStorages,\n ensureRxCollectionIsNotDestroyed\n} from './rx-collection-helper.ts';\nimport {\n createRxQuery,\n _getDefaultQuery\n} from './rx-query.ts';\nimport {\n newRxError,\n newRxTypeError\n} from './rx-error.ts';\nimport type {\n RxMigrationState\n} from './plugins/migration-schema/index.ts';\nimport {\n DocumentCache,\n mapDocumentsDataToCacheDocs\n} from './doc-cache.ts';\nimport {\n QueryCache,\n createQueryCache,\n defaultCacheReplacementPolicy\n} from './query-cache.ts';\nimport {\n ChangeEventBuffer,\n createChangeEventBuffer\n} from './change-event-buffer.ts';\nimport {\n runAsyncPluginHooks,\n runPluginHooks\n} from './hooks.ts';\n\nimport {\n Subscription,\n Observable\n} from 'rxjs';\n\nimport type {\n KeyFunctionMap,\n RxCollection,\n RxDatabase,\n RxQuery,\n RxDocument,\n RxDumpCollection,\n RxDumpCollectionAny,\n MangoQuery,\n MangoQueryNoLimit,\n RxCacheReplacementPolicy,\n RxStorageWriteError,\n RxDocumentData,\n RxStorageInstanceCreationParams,\n BulkWriteRow,\n RxChangeEvent,\n RxChangeEventInsert,\n RxChangeEventUpdate,\n RxChangeEventDelete,\n RxStorageInstance,\n CollectionsOfDatabase,\n RxChangeEventBulk,\n RxLocalDocumentData,\n RxDocumentBase,\n RxConflictHandler,\n MaybePromise,\n CRDTEntry,\n MangoQuerySelectorAndIndex,\n MigrationStrategies\n} from './types/index.d.ts';\n\nimport {\n RxSchema\n} from './rx-schema.ts';\n\nimport {\n createNewRxDocument,\n getRxDocumentConstructor\n} from './rx-document-prototype-merge.ts';\nimport {\n getWrappedStorageInstance,\n throwIfIsStorageWriteError,\n WrappedRxStorageInstance\n} from './rx-storage-helper.ts';\nimport { defaultConflictHandler } from './replication-protocol/index.ts';\nimport { IncrementalWriteQueue } from './incremental-write.ts';\nimport { beforeDocumentUpdateWrite } from './rx-document.ts';\nimport { overwritable } from './overwritable.ts';\n\nconst HOOKS_WHEN = ['pre', 'post'] as const;\ntype HookWhenType = typeof HOOKS_WHEN[number];\nconst HOOKS_KEYS = ['insert', 'save', 'remove', 'create'] as const;\ntype HookKeyType = typeof HOOKS_KEYS[number];\nlet hooksApplied = false;\n\nexport class RxCollectionBase<\n InstanceCreationOptions,\n RxDocumentType = { [prop: string]: any; },\n OrmMethods = {},\n StaticMethods = { [key: string]: any; },\n Reactivity = any\n> {\n\n\n /**\n * Stores all 'normal' documents\n */\n public storageInstance: WrappedRxStorageInstance = {} as any;\n public readonly timeouts: Set> = new Set();\n public incrementalWriteQueue: IncrementalWriteQueue = {} as any;\n\n constructor(\n public database: RxDatabase,\n public name: string,\n public schema: RxSchema,\n public internalStorageInstance: RxStorageInstance,\n public instanceCreationOptions: InstanceCreationOptions = {} as any,\n public migrationStrategies: MigrationStrategies = {},\n public methods: KeyFunctionMap = {},\n public attachments: KeyFunctionMap = {},\n public options: any = {},\n public cacheReplacementPolicy: RxCacheReplacementPolicy = defaultCacheReplacementPolicy,\n public statics: KeyFunctionMap = {},\n public conflictHandler: RxConflictHandler = defaultConflictHandler\n ) {\n _applyHookFunctions(this.asRxCollection);\n }\n\n get insert$(): Observable> {\n return this.$.pipe(\n filter(cE => cE.operation === 'INSERT')\n ) as any;\n }\n get update$(): Observable> {\n return this.$.pipe(\n filter(cE => cE.operation === 'UPDATE')\n ) as any;\n }\n get remove$(): Observable> {\n return this.$.pipe(\n filter(cE => cE.operation === 'DELETE')\n ) as any;\n }\n\n public _incrementalUpsertQueues: Map> = new Map();\n // defaults\n public synced: boolean = false;\n public hooks: {\n [key in HookKeyType]: {\n [when in HookWhenType]: {\n series: Function[];\n parallel: Function[];\n };\n }\n } = {} as any;\n public _subs: Subscription[] = [];\n\n public _docCache: DocumentCache = {} as any;\n\n public _queryCache: QueryCache = createQueryCache();\n public $: Observable> = {} as any;\n public checkpoint$: Observable = {} as any;\n public _changeEventBuffer: ChangeEventBuffer = {} as ChangeEventBuffer;\n\n\n\n /**\n * When the collection is destroyed,\n * these functions will be called an awaited.\n * Used to automatically clean up stuff that\n * belongs to this collection.\n */\n public onDestroy: (() => MaybePromise)[] = [];\n public destroyed = false;\n\n public onRemove: (() => MaybePromise)[] = [];\n\n public async prepare(): Promise {\n this.storageInstance = getWrappedStorageInstance(\n this.database,\n this.internalStorageInstance,\n this.schema.jsonSchema\n );\n this.incrementalWriteQueue = new IncrementalWriteQueue(\n this.storageInstance,\n this.schema.primaryPath,\n (newData, oldData) => beforeDocumentUpdateWrite(this as any, newData, oldData),\n result => this._runHooks('post', 'save', result)\n );\n\n const collectionEventBulks$ = this.database.eventBulks$.pipe(\n filter(changeEventBulk => changeEventBulk.collectionName === this.name),\n );\n this.$ = collectionEventBulks$.pipe(\n mergeMap(changeEventBulk => changeEventBulk.events),\n );\n this.checkpoint$ = collectionEventBulks$.pipe(\n map(changeEventBulk => changeEventBulk.checkpoint),\n );\n\n this._changeEventBuffer = createChangeEventBuffer(this.asRxCollection);\n let documentConstructor: any;\n this._docCache = new DocumentCache(\n this.schema.primaryPath,\n this.$.pipe(filter(cE => !cE.isLocal)),\n docData => {\n if (!documentConstructor) {\n documentConstructor = getRxDocumentConstructor(this.asRxCollection);\n }\n return createNewRxDocument(this.asRxCollection, documentConstructor, docData);\n }\n );\n\n\n const listenToRemoveSub = this.database.internalStore.changeStream().pipe(\n filter(bulk => {\n const key = this.name + '-' + this.schema.version;\n const found = bulk.events.find(event => {\n return (\n event.documentData.context === 'collection' &&\n event.documentData.key === key &&\n event.operation === 'DELETE'\n );\n });\n return !!found;\n })\n ).subscribe(async () => {\n await this.destroy();\n await Promise.all(this.onRemove.map(fn => fn()));\n });\n this._subs.push(listenToRemoveSub);\n\n\n /**\n * TODO Instead of resolving the EventBulk array here and spit it into\n * single events, we should fully work with event bulks internally\n * to save performance.\n */\n const databaseStorageToken = await this.database.storageToken;\n const subDocs = this.storageInstance.changeStream().subscribe(eventBulk => {\n const events = new Array(eventBulk.events.length);\n const rawEvents = eventBulk.events;\n const collectionName = this.name;\n const deepFreezeWhenDevMode = overwritable.deepFreezeWhenDevMode;\n for (let index = 0; index < rawEvents.length; index++) {\n const event = rawEvents[index];\n events[index] = {\n documentId: event.documentId,\n collectionName,\n isLocal: false,\n operation: event.operation,\n documentData: deepFreezeWhenDevMode(event.documentData) as any,\n previousDocumentData: deepFreezeWhenDevMode(event.previousDocumentData) as any\n };\n }\n const changeEventBulk: RxChangeEventBulk = {\n id: eventBulk.id,\n internal: false,\n collectionName: this.name,\n storageToken: databaseStorageToken,\n events,\n databaseToken: this.database.token,\n checkpoint: eventBulk.checkpoint,\n context: eventBulk.context,\n endTime: eventBulk.endTime,\n startTime: eventBulk.startTime\n };\n this.database.$emit(changeEventBulk);\n });\n this._subs.push(subDocs);\n\n /**\n * Resolve the conflict tasks\n * of the RxStorageInstance\n */\n this._subs.push(\n this.storageInstance\n .conflictResultionTasks()\n .subscribe(task => {\n this\n .conflictHandler(task.input, task.context)\n .then(output => {\n this.storageInstance.resolveConflictResultionTask({\n id: task.id,\n output\n });\n });\n })\n );\n\n return PROMISE_RESOLVE_VOID;\n }\n\n\n /**\n * Manually call the cleanup function of the storage.\n * @link https://rxdb.info/cleanup.html\n */\n cleanup(_minimumDeletedTime?: number): Promise {\n ensureRxCollectionIsNotDestroyed(this);\n throw pluginMissing('cleanup');\n }\n\n // overwritten by migration-plugin\n migrationNeeded(): Promise {\n throw pluginMissing('migration-schema');\n }\n getMigrationState(): RxMigrationState {\n throw pluginMissing('migration-schema');\n }\n startMigration(batchSize: number = 10): Promise {\n ensureRxCollectionIsNotDestroyed(this);\n return this.getMigrationState().startMigration(batchSize);\n }\n migratePromise(batchSize: number = 10): Promise {\n return this.getMigrationState().migratePromise(batchSize);\n }\n\n async insert(\n json: RxDocumentType | RxDocument\n ): Promise> {\n ensureRxCollectionIsNotDestroyed(this);\n const writeResult = await this.bulkInsert([json as any]);\n\n const isError = writeResult.error[0];\n throwIfIsStorageWriteError(this as any, (json as any)[this.schema.primaryPath] as any, json, isError);\n const insertResult = ensureNotFalsy(writeResult.success[0]);\n return insertResult;\n }\n\n async bulkInsert(\n docsData: RxDocumentType[]\n ): Promise<{\n success: RxDocument[];\n error: RxStorageWriteError[];\n }> {\n ensureRxCollectionIsNotDestroyed(this);\n /**\n * Optimization shortcut,\n * do nothing when called with an empty array\n */\n if (docsData.length === 0) {\n return {\n success: [],\n error: []\n };\n }\n\n const primaryPath = this.schema.primaryPath;\n\n\n /**\n * This code is a bit redundant for better performance.\n * Instead of iterating multiple times,\n * we directly transform the input to a write-row array.\n */\n let insertRows: BulkWriteRow[];\n if (this.hasHooks('pre', 'insert')) {\n insertRows = await Promise.all(\n docsData.map(docData => {\n const useDocData = fillObjectDataBeforeInsert(this.schema, docData);\n return this._runHooks('pre', 'insert', useDocData)\n .then(() => {\n return { document: useDocData };\n });\n })\n );\n } else {\n insertRows = [];\n for (let index = 0; index < docsData.length; index++) {\n const docData = docsData[index];\n const useDocData = fillObjectDataBeforeInsert(this.schema, docData);\n insertRows[index] = { document: useDocData };\n }\n }\n\n const results = await this.storageInstance.bulkWrite(\n insertRows,\n 'rx-collection-bulk-insert'\n );\n\n // create documents\n const rxDocuments = mapDocumentsDataToCacheDocs(this._docCache, results.success);\n\n if (this.hasHooks('post', 'insert')) {\n const docsMap: Map = new Map();\n insertRows.forEach(row => {\n const doc = row.document;\n docsMap.set((doc as any)[primaryPath] as any, doc);\n });\n await Promise.all(\n rxDocuments.map(doc => {\n return this._runHooks(\n 'post', 'insert',\n docsMap.get(doc.primary),\n doc\n );\n })\n );\n }\n\n return {\n success: rxDocuments,\n error: results.error\n };\n }\n\n async bulkRemove(\n ids: string[]\n ): Promise<{\n success: RxDocument[];\n error: RxStorageWriteError[];\n }> {\n ensureRxCollectionIsNotDestroyed(this);\n const primaryPath = this.schema.primaryPath;\n /**\n * Optimization shortcut,\n * do nothing when called with an empty array\n */\n if (ids.length === 0) {\n return {\n success: [],\n error: []\n };\n }\n\n const rxDocumentMap = await this.findByIds(ids).exec();\n const docsData: RxDocumentData[] = [];\n const docsMap: Map> = new Map();\n Array.from(rxDocumentMap.values()).forEach(rxDocument => {\n const data: RxDocumentData = rxDocument.toMutableJSON(true) as any;\n docsData.push(data);\n docsMap.set(rxDocument.primary, data);\n });\n\n await Promise.all(\n docsData.map(doc => {\n const primary = (doc as any)[this.schema.primaryPath];\n return this._runHooks('pre', 'remove', doc, rxDocumentMap.get(primary));\n })\n );\n const removeDocs: BulkWriteRow[] = docsData.map(doc => {\n const writeDoc = flatClone(doc);\n writeDoc._deleted = true;\n return {\n previous: doc,\n document: writeDoc\n };\n });\n const results = await this.storageInstance.bulkWrite(\n removeDocs,\n 'rx-collection-bulk-remove'\n );\n\n const successIds: string[] = results.success.map(d => d[primaryPath] as string);\n\n // run hooks\n await Promise.all(\n successIds.map(id => {\n return this._runHooks(\n 'post',\n 'remove',\n docsMap.get(id),\n rxDocumentMap.get(id)\n );\n })\n );\n\n const rxDocuments = successIds.map(id => getFromMapOrThrow(rxDocumentMap, id));\n\n return {\n success: rxDocuments,\n error: results.error\n };\n }\n\n /**\n * same as bulkInsert but overwrites existing document with same primary\n */\n async bulkUpsert(docsData: Partial[]): Promise<{\n success: RxDocument[];\n error: RxStorageWriteError[];\n }> {\n ensureRxCollectionIsNotDestroyed(this);\n const insertData: RxDocumentType[] = [];\n const useJsonByDocId: Map = new Map();\n docsData.forEach(docData => {\n const useJson = fillObjectDataBeforeInsert(this.schema, docData);\n const primary: string = useJson[this.schema.primaryPath] as any;\n if (!primary) {\n throw newRxError('COL3', {\n primaryPath: this.schema.primaryPath as string,\n data: useJson,\n schema: this.schema.jsonSchema\n });\n }\n useJsonByDocId.set(primary, useJson);\n insertData.push(useJson);\n });\n\n const insertResult = await this.bulkInsert(insertData);\n const success = insertResult.success.slice(0);\n const error: RxStorageWriteError[] = [];\n\n // update the ones that existed already\n await Promise.all(\n insertResult.error.map(async (err) => {\n if (err.status !== 409) {\n error.push(err);\n } else {\n const id = err.documentId;\n const writeData = getFromMapOrThrow(useJsonByDocId, id);\n const docDataInDb = ensureNotFalsy(err.documentInDb);\n const doc = this._docCache.getCachedRxDocuments([docDataInDb])[0];\n const newDoc = await doc.incrementalModify(() => writeData);\n success.push(newDoc);\n }\n })\n );\n return {\n error,\n success\n };\n }\n\n /**\n * same as insert but overwrites existing document with same primary\n */\n async upsert(json: Partial): Promise> {\n ensureRxCollectionIsNotDestroyed(this);\n const bulkResult = await this.bulkUpsert([json]);\n throwIfIsStorageWriteError(\n this.asRxCollection,\n (json as any)[this.schema.primaryPath],\n json as any,\n bulkResult.error[0]\n );\n return bulkResult.success[0];\n }\n\n /**\n * upserts to a RxDocument, uses incrementalModify if document already exists\n */\n incrementalUpsert(json: Partial): Promise> {\n ensureRxCollectionIsNotDestroyed(this);\n const useJson = fillObjectDataBeforeInsert(this.schema, json);\n const primary: string = useJson[this.schema.primaryPath] as any;\n if (!primary) {\n throw newRxError('COL4', {\n data: json\n });\n }\n\n // ensure that it won't try 2 parallel runs\n let queue = this._incrementalUpsertQueues.get(primary);\n if (!queue) {\n queue = PROMISE_RESOLVE_VOID;\n }\n queue = queue\n .then(() => _incrementalUpsertEnsureRxDocumentExists(this as any, primary as any, useJson))\n .then((wasInserted) => {\n if (!wasInserted.inserted) {\n return _incrementalUpsertUpdate(wasInserted.doc, useJson);\n } else {\n return wasInserted.doc;\n }\n });\n this._incrementalUpsertQueues.set(primary, queue);\n return queue;\n }\n\n find(queryObj?: MangoQuery): RxQuery<\n RxDocumentType,\n RxDocument[],\n OrmMethods,\n Reactivity\n > {\n ensureRxCollectionIsNotDestroyed(this);\n if (typeof queryObj === 'string') {\n throw newRxError('COL5', {\n queryObj\n });\n }\n\n if (!queryObj) {\n queryObj = _getDefaultQuery();\n }\n\n const query = createRxQuery('find', queryObj, this as any);\n return query as any;\n }\n\n findOne(\n queryObj?: MangoQueryNoLimit | string\n ): RxQuery<\n RxDocumentType,\n RxDocument | null,\n OrmMethods,\n Reactivity\n > {\n ensureRxCollectionIsNotDestroyed(this);\n\n // TODO move this check to dev-mode plugin\n if (\n typeof queryObj === 'number' ||\n Array.isArray(queryObj)\n ) {\n throw newRxTypeError('COL6', {\n queryObj\n });\n }\n\n let query;\n\n if (typeof queryObj === 'string') {\n query = createRxQuery('findOne', {\n selector: {\n [this.schema.primaryPath]: queryObj\n },\n limit: 1\n }, this as any);\n } else {\n if (!queryObj) {\n queryObj = _getDefaultQuery();\n }\n\n\n // cannot have limit on findOne queries because it will be overwritten\n if ((queryObj as MangoQuery).limit) {\n throw newRxError('QU6');\n }\n\n queryObj = flatClone(queryObj);\n (queryObj as any).limit = 1;\n query = createRxQuery('findOne', queryObj, this as any);\n }\n\n\n return query as any;\n }\n\n count(queryObj?: MangoQuerySelectorAndIndex): RxQuery<\n RxDocumentType,\n number,\n OrmMethods,\n Reactivity\n > {\n ensureRxCollectionIsNotDestroyed(this);\n if (!queryObj) {\n queryObj = _getDefaultQuery();\n }\n const query = createRxQuery('count', queryObj, this as any);\n return query as any;\n }\n\n /**\n * find a list documents by their primary key\n * has way better performance then running multiple findOne() or a find() with a complex $or-selected\n */\n findByIds(\n ids: string[]\n ): RxQuery<\n RxDocumentType,\n Map>,\n OrmMethods,\n Reactivity\n > {\n ensureRxCollectionIsNotDestroyed(this);\n const mangoQuery: MangoQuery = {\n selector: {\n [this.schema.primaryPath]: {\n $in: ids.slice(0)\n }\n } as any\n };\n const query = createRxQuery('findByIds', mangoQuery, this as any);\n return query as any;\n }\n\n /**\n * Export collection to a JSON friendly format.\n */\n exportJSON(): Promise>;\n exportJSON(): Promise>;\n exportJSON(): Promise {\n throw pluginMissing('json-dump');\n }\n\n /**\n * Import the parsed JSON export into the collection.\n * @param _exportedJSON The previously exported data from the `.exportJSON()` method.\n */\n importJSON(_exportedJSON: RxDumpCollectionAny): Promise {\n throw pluginMissing('json-dump');\n }\n\n insertCRDT(_updateObj: CRDTEntry | CRDTEntry[]): RxDocument {\n throw pluginMissing('crdt');\n }\n\n /**\n * HOOKS\n */\n addHook(when: HookWhenType, key: HookKeyType, fun: any, parallel = false) {\n if (typeof fun !== 'function') {\n throw newRxTypeError('COL7', {\n key,\n when\n });\n }\n\n if (!HOOKS_WHEN.includes(when)) {\n throw newRxTypeError('COL8', {\n key,\n when\n });\n }\n\n if (!HOOKS_KEYS.includes(key)) {\n throw newRxError('COL9', {\n key\n });\n }\n\n if (when === 'post' && key === 'create' && parallel === true) {\n throw newRxError('COL10', {\n when,\n key,\n parallel\n });\n }\n\n // bind this-scope to hook-function\n const boundFun = fun.bind(this);\n\n const runName = parallel ? 'parallel' : 'series';\n\n this.hooks[key] = this.hooks[key] || {};\n this.hooks[key][when] = this.hooks[key][when] || {\n series: [],\n parallel: []\n };\n this.hooks[key][when][runName].push(boundFun);\n }\n\n getHooks(when: HookWhenType, key: HookKeyType) {\n if (\n !this.hooks[key] ||\n !this.hooks[key][when]\n ) {\n return {\n series: [],\n parallel: []\n };\n }\n return this.hooks[key][when];\n }\n\n hasHooks(when: HookWhenType, key: HookKeyType) {\n /**\n * Performance shortcut\n * so that we not have to build the empty object.\n */\n if (\n !this.hooks[key] ||\n !this.hooks[key][when]\n ) {\n return false;\n }\n\n const hooks = this.getHooks(when, key);\n if (!hooks) {\n return false;\n }\n return hooks.series.length > 0 || hooks.parallel.length > 0;\n }\n\n _runHooks(when: HookWhenType, key: HookKeyType, data: any, instance?: any): Promise {\n const hooks = this.getHooks(when, key);\n\n if (!hooks) {\n return PROMISE_RESOLVE_VOID;\n }\n\n // run parallel: false\n const tasks = hooks.series.map((hook: any) => () => hook(data, instance));\n return promiseSeries(tasks)\n // run parallel: true\n .then(() => Promise.all(\n hooks.parallel\n .map((hook: any) => hook(data, instance))\n ));\n }\n\n /**\n * does the same as ._runHooks() but with non-async-functions\n */\n _runHooksSync(when: HookWhenType, key: HookKeyType, data: any, instance: any) {\n if (!this.hasHooks(when, key)) {\n return;\n }\n const hooks = this.getHooks(when, key);\n if (!hooks) return;\n hooks.series.forEach((hook: any) => hook(data, instance));\n }\n\n /**\n * Returns a promise that resolves after the given time.\n * Ensures that is properly cleans up when the collection is destroyed\n * so that no running timeouts prevent the exit of the JavaScript process.\n */\n promiseWait(time: number): Promise {\n const ret = new Promise(res => {\n const timeout = setTimeout(() => {\n this.timeouts.delete(timeout);\n res();\n }, time);\n this.timeouts.add(timeout);\n });\n return ret;\n }\n\n async destroy(): Promise {\n if (this.destroyed) {\n return PROMISE_RESOLVE_FALSE;\n }\n\n\n await Promise.all(this.onDestroy.map(fn => fn()));\n\n /**\n * Settings destroyed = true\n * must be the first thing to do,\n * so for example the replication can directly stop\n * instead of sending requests to a closed storage.\n */\n this.destroyed = true;\n\n\n Array.from(this.timeouts).forEach(timeout => clearTimeout(timeout));\n if (this._changeEventBuffer) {\n this._changeEventBuffer.destroy();\n }\n /**\n * First wait until the whole database is idle.\n * This ensures that the storage does not get closed\n * while some operation is running.\n * It is important that we do not intercept a running call\n * because it might lead to undefined behavior like when a doc is written\n * but the change is not added to the changes collection.\n */\n return this.database.requestIdlePromise()\n .then(() => this.storageInstance.close())\n .then(() => {\n /**\n * Unsubscribing must be done AFTER the storageInstance.close()\n * Because the conflict handling is part of the subscriptions and\n * otherwise there might be open conflicts to be resolved which\n * will then stuck and never resolve.\n */\n this._subs.forEach(sub => sub.unsubscribe());\n\n delete this.database.collections[this.name];\n return runAsyncPluginHooks('postDestroyRxCollection', this).then(() => true);\n });\n }\n\n /**\n * remove all data of the collection\n */\n async remove(): Promise {\n await this.destroy();\n await Promise.all(this.onRemove.map(fn => fn()));\n await removeCollectionStorages(\n this.database.storage,\n this.database.internalStore,\n this.database.token,\n this.database.name,\n this.name,\n this.database.password,\n this.database.hashFunction\n );\n }\n\n get asRxCollection(): RxCollection {\n return this as any;\n }\n}\n\n/**\n * adds the hook-functions to the collections prototype\n * this runs only once\n */\nfunction _applyHookFunctions(\n collection: RxCollection\n) {\n if (hooksApplied) return; // already run\n hooksApplied = true;\n const colProto = Object.getPrototypeOf(collection);\n HOOKS_KEYS.forEach(key => {\n HOOKS_WHEN.map(when => {\n const fnName = when + ucfirst(key);\n colProto[fnName] = function (fun: string, parallel: boolean) {\n return this.addHook(when, key, fun, parallel);\n };\n });\n });\n}\n\nfunction _incrementalUpsertUpdate(\n doc: RxDocumentBase,\n json: RxDocumentData\n): Promise> {\n return doc.incrementalModify((_innerDoc) => {\n return json;\n });\n}\n\n/**\n * ensures that the given document exists\n * @return promise that resolves with new doc and flag if inserted\n */\nfunction _incrementalUpsertEnsureRxDocumentExists(\n rxCollection: RxCollection,\n primary: string,\n json: any\n): Promise<\n {\n doc: RxDocument;\n inserted: boolean;\n }\n> {\n /**\n * Optimisation shortcut,\n * first try to find the document in the doc-cache\n */\n const docDataFromCache = rxCollection._docCache.getLatestDocumentDataIfExists(primary);\n if (docDataFromCache) {\n return Promise.resolve({\n doc: rxCollection._docCache.getCachedRxDocuments([docDataFromCache])[0],\n inserted: false\n });\n }\n return rxCollection.findOne(primary).exec()\n .then(doc => {\n if (!doc) {\n return rxCollection.insert(json).then(newDoc => ({\n doc: newDoc,\n inserted: true\n }));\n } else {\n return {\n doc,\n inserted: false\n };\n }\n });\n}\n\n/**\n * creates and prepares a new collection\n */\nexport function createRxCollection(\n {\n database,\n name,\n schema,\n instanceCreationOptions = {},\n migrationStrategies = {},\n autoMigrate = true,\n statics = {},\n methods = {},\n attachments = {},\n options = {},\n localDocuments = false,\n cacheReplacementPolicy = defaultCacheReplacementPolicy,\n conflictHandler = defaultConflictHandler\n }: any\n): Promise {\n const storageInstanceCreationParams: RxStorageInstanceCreationParams = {\n databaseInstanceToken: database.token,\n databaseName: database.name,\n collectionName: name,\n schema: schema.jsonSchema,\n options: instanceCreationOptions,\n multiInstance: database.multiInstance,\n password: database.password,\n devMode: overwritable.isDevMode()\n };\n\n runPluginHooks(\n 'preCreateRxStorageInstance',\n storageInstanceCreationParams\n );\n\n return createRxCollectionStorageInstance(\n database,\n storageInstanceCreationParams\n ).then(storageInstance => {\n const collection = new RxCollectionBase(\n database,\n name,\n schema,\n storageInstance,\n instanceCreationOptions,\n migrationStrategies,\n methods,\n attachments,\n options,\n cacheReplacementPolicy,\n statics,\n conflictHandler\n );\n\n return collection\n .prepare()\n .then(() => {\n // ORM add statics\n Object\n .entries(statics)\n .forEach(([funName, fun]) => {\n Object.defineProperty(collection, funName, {\n get: () => (fun as any).bind(collection)\n });\n });\n\n let ret = PROMISE_RESOLVE_VOID;\n if (autoMigrate && collection.schema.version !== 0) {\n ret = collection.migratePromise();\n }\n return ret;\n })\n .then(() => {\n runPluginHooks('createRxCollection', {\n collection,\n creator: {\n name,\n schema,\n storageInstance,\n instanceCreationOptions,\n migrationStrategies,\n methods,\n attachments,\n options,\n cacheReplacementPolicy,\n localDocuments,\n statics\n }\n });\n return collection as any;\n })\n /**\n * If the collection creation fails,\n * we yet have to close the storage instances.\n */\n .catch(err => {\n return storageInstance.close()\n .then(() => Promise.reject(err));\n });\n });\n}\n\nexport function isRxCollection(obj: any): boolean {\n return obj instanceof RxCollectionBase;\n}\n"],"mappings":";;;;;;;;;;AAAA,IAAAA,KAAA,GAAAC,OAAA;AAMA,IAAAC,MAAA,GAAAD,OAAA;AAUA,IAAAE,mBAAA,GAAAF,OAAA;AAMA,IAAAG,QAAA,GAAAH,OAAA;AAIA,IAAAI,QAAA,GAAAJ,OAAA;AAOA,IAAAK,SAAA,GAAAL,OAAA;AAIA,IAAAM,WAAA,GAAAN,OAAA;AAKA,IAAAO,kBAAA,GAAAP,OAAA;AAIA,IAAAQ,MAAA,GAAAR,OAAA;AA6CA,IAAAS,yBAAA,GAAAT,OAAA;AAIA,IAAAU,gBAAA,GAAAV,OAAA;AAKA,IAAAW,OAAA,GAAAX,OAAA;AACA,IAAAY,iBAAA,GAAAZ,OAAA;AACA,IAAAa,WAAA,GAAAb,OAAA;AACA,IAAAc,aAAA,GAAAd,OAAA;AAEA,IAAMe,UAAU,GAAG,CAAC,KAAK,EAAE,MAAM,CAAU;AAE3C,IAAMC,UAAU,GAAG,CAAC,QAAQ,EAAE,MAAM,EAAE,QAAQ,EAAE,QAAQ,CAAU;AAElE,IAAIC,YAAY,GAAG,KAAK;AAAC,IAEZC,gBAAgB,GAAAC,OAAA,CAAAD,gBAAA;EASzB;AACJ;AACA;;EAKI,SAAAA,iBACWE,QAAqF,EACrFC,IAAY,EACZC,MAAgC,EAChCC,uBAAwF,EACxFC,uBAAgD,GAAG,CAAC,CAAQ,EAC5DC,mBAAwC,GAAG,CAAC,CAAC,EAC7CC,OAAuB,GAAG,CAAC,CAAC,EAC5BC,WAA2B,GAAG,CAAC,CAAC,EAChCC,OAAY,GAAG,CAAC,CAAC,EACjBC,sBAAgD,GAAGC,yCAA6B,EAChFC,OAAuB,GAAG,CAAC,CAAC,EAC5BC,eAAkD,GAAGC,8BAAsB,EACpF;IAAA,KAjBKC,eAAe,GAA2E,CAAC,CAAC;IAAA,KACnFC,QAAQ,GAAuC,IAAIC,GAAG,CAAC,CAAC;IAAA,KACjEC,qBAAqB,GAA0C,CAAC,CAAC;IAAA,KAmCjEC,wBAAwB,GAA8B,IAAIC,GAAG,CAAC,CAAC;IAAA,KAE/DC,MAAM,GAAY,KAAK;IAAA,KACvBC,KAAK,GAOR,CAAC,CAAC;IAAA,KACCC,KAAK,GAAmB,EAAE;IAAA,KAE1BrC,SAAS,GAA8C,CAAC,CAAC;IAAA,KAEzDC,WAAW,GAAe,IAAAqC,4BAAgB,EAAC,CAAC;IAAA,KAC5CC,CAAC,GAA8C,CAAC,CAAC;IAAA,KACjDC,WAAW,GAAoB,CAAC,CAAC;IAAA,KACjCtC,kBAAkB,GAAsC,CAAC,CAAC;IAAA,KAU1DuC,SAAS,GAAgC,EAAE;IAAA,KAC3CC,SAAS,GAAG,KAAK;IAAA,KAEjBC,QAAQ,GAAgC,EAAE;IAAA,KA/DtC5B,QAAqF,GAArFA,QAAqF;IAAA,KACrFC,IAAY,GAAZA,IAAY;IAAA,KACZC,MAAgC,GAAhCA,MAAgC;IAAA,KAChCC,uBAAwF,GAAxFA,uBAAwF;IAAA,KACxFC,uBAAgD,GAAhDA,uBAAgD;IAAA,KAChDC,mBAAwC,GAAxCA,mBAAwC;IAAA,KACxCC,OAAuB,GAAvBA,OAAuB;IAAA,KACvBC,WAA2B,GAA3BA,WAA2B;IAAA,KAC3BC,OAAY,GAAZA,OAAY;IAAA,KACZC,sBAAgD,GAAhDA,sBAAgD;IAAA,KAChDE,OAAuB,GAAvBA,OAAuB;IAAA,KACvBC,eAAkD,GAAlDA,eAAkD;IAEzDiB,mBAAmB,CAAC,IAAI,CAACC,cAAc,CAAC;EAC5C;EAAC,IAAAC,MAAA,GAAAjC,gBAAA,CAAAkC,SAAA;EAAAD,MAAA,CAmDYE,OAAO,GAApB,eAAAA,QAAA,EAAsC;IAClC,IAAI,CAACnB,eAAe,GAAG,IAAAoB,0CAAyB,EAC5C,IAAI,CAAClC,QAAQ,EACb,IAAI,CAACG,uBAAuB,EAC5B,IAAI,CAACD,MAAM,CAACiC,UAChB,CAAC;IACD,IAAI,CAAClB,qBAAqB,GAAG,IAAImB,uCAAqB,CAClD,IAAI,CAACtB,eAAe,EACpB,IAAI,CAACZ,MAAM,CAACmC,WAAW,EACvB,CAACC,OAAO,EAAEC,OAAO,KAAK,IAAAC,qCAAyB,EAAC,IAAI,EAASF,OAAO,EAAEC,OAAO,CAAC,EAC9EE,MAAM,IAAI,IAAI,CAACC,SAAS,CAAC,MAAM,EAAE,MAAM,EAAED,MAAM,CACnD,CAAC;IAED,IAAME,qBAAqB,GAAG,IAAI,CAAC3C,QAAQ,CAAC4C,WAAW,CAACC,IAAI,CACxD,IAAAC,YAAM,EAACC,eAAe,IAAIA,eAAe,CAACC,cAAc,KAAK,IAAI,CAAC/C,IAAI,CAC1E,CAAC;IACD,IAAI,CAACuB,CAAC,GAAGmB,qBAAqB,CAACE,IAAI,CAC/B,IAAAI,cAAQ,EAACF,eAAe,IAAIA,eAAe,CAACG,MAAM,CACtD,CAAC;IACD,IAAI,CAACzB,WAAW,GAAGkB,qBAAqB,CAACE,IAAI,CACzC,IAAAM,SAAG,EAACJ,eAAe,IAAIA,eAAe,CAACK,UAAU,CACrD,CAAC;IAED,IAAI,CAACjE,kBAAkB,GAAG,IAAAkE,0CAAuB,EAAiB,IAAI,CAACvB,cAAc,CAAC;IACtF,IAAIwB,mBAAwB;IAC5B,IAAI,CAACrE,SAAS,GAAG,IAAIsE,uBAAa,CAC9B,IAAI,CAACrD,MAAM,CAACmC,WAAW,EACvB,IAAI,CAACb,CAAC,CAACqB,IAAI,CAAC,IAAAC,YAAM,EAACU,EAAE,IAAI,CAACA,EAAE,CAACC,OAAO,CAAC,CAAC,EACtCC,OAAO,IAAI;MACP,IAAI,CAACJ,mBAAmB,EAAE;QACtBA,mBAAmB,GAAG,IAAAK,kDAAwB,EAAC,IAAI,CAAC7B,cAAc,CAAC;MACvE;MACA,OAAO,IAAA8B,6CAAmB,EAAC,IAAI,CAAC9B,cAAc,EAAEwB,mBAAmB,EAAEI,OAAO,CAAC;IACjF,CACJ,CAAC;IAGD,IAAMG,iBAAiB,GAAG,IAAI,CAAC7D,QAAQ,CAAC8D,aAAa,CAACC,YAAY,CAAC,CAAC,CAAClB,IAAI,CACrE,IAAAC,YAAM,EAACkB,IAAI,IAAI;MACX,IAAMC,GAAG,GAAG,IAAI,CAAChE,IAAI,GAAG,GAAG,GAAG,IAAI,CAACC,MAAM,CAACgE,OAAO;MACjD,IAAMC,KAAK,GAAGH,IAAI,CAACd,MAAM,CAACkB,IAAI,CAACC,KAAK,IAAI;QACpC,OACIA,KAAK,CAACC,YAAY,CAACC,OAAO,KAAK,YAAY,IAC3CF,KAAK,CAACC,YAAY,CAACL,GAAG,KAAKA,GAAG,IAC9BI,KAAK,CAACG,SAAS,KAAK,QAAQ;MAEpC,CAAC,CAAC;MACF,OAAO,CAAC,CAACL,KAAK;IAClB,CAAC,CACL,CAAC,CAACM,SAAS,CAAC,YAAY;MACpB,MAAM,IAAI,CAACC,OAAO,CAAC,CAAC;MACpB,MAAMC,OAAO,CAACC,GAAG,CAAC,IAAI,CAAChD,QAAQ,CAACuB,GAAG,CAAC0B,EAAE,IAAIA,EAAE,CAAC,CAAC,CAAC,CAAC;IACpD,CAAC,CAAC;IACF,IAAI,CAACvD,KAAK,CAACwD,IAAI,CAACjB,iBAAiB,CAAC;;IAGlC;AACR;AACA;AACA;AACA;IACQ,IAAMkB,oBAAoB,GAAG,MAAM,IAAI,CAAC/E,QAAQ,CAACgF,YAAY;IAC7D,IAAMC,OAAO,GAAG,IAAI,CAACnE,eAAe,CAACiD,YAAY,CAAC,CAAC,CAACU,SAAS,CAACS,SAAS,IAAI;MACvE,IAAMhC,MAAM,GAAG,IAAIiC,KAAK,CAACD,SAAS,CAAChC,MAAM,CAACkC,MAAM,CAAC;MACjD,IAAMC,SAAS,GAAGH,SAAS,CAAChC,MAAM;MAClC,IAAMF,cAAc,GAAG,IAAI,CAAC/C,IAAI;MAChC,IAAMqF,qBAAqB,GAAGC,0BAAY,CAACD,qBAAqB;MAChE,KAAK,IAAIE,KAAK,GAAG,CAAC,EAAEA,KAAK,GAAGH,SAAS,CAACD,MAAM,EAAEI,KAAK,EAAE,EAAE;QACnD,IAAMnB,KAAK,GAAGgB,SAAS,CAACG,KAAK,CAAC;QAC9BtC,MAAM,CAACsC,KAAK,CAAC,GAAG;UACZC,UAAU,EAAEpB,KAAK,CAACoB,UAAU;UAC5BzC,cAAc;UACdS,OAAO,EAAE,KAAK;UACde,SAAS,EAAEH,KAAK,CAACG,SAAS;UAC1BF,YAAY,EAAEgB,qBAAqB,CAACjB,KAAK,CAACC,YAAY,CAAQ;UAC9DoB,oBAAoB,EAAEJ,qBAAqB,CAACjB,KAAK,CAACqB,oBAAoB;QAC1E,CAAC;MACL;MACA,IAAM3C,eAAwE,GAAG;QAC7E4C,EAAE,EAAET,SAAS,CAACS,EAAE;QAChBC,QAAQ,EAAE,KAAK;QACf5C,cAAc,EAAE,IAAI,CAAC/C,IAAI;QACzB+E,YAAY,EAAED,oBAAoB;QAClC7B,MAAM;QACN2C,aAAa,EAAE,IAAI,CAAC7F,QAAQ,CAAC8F,KAAK;QAClC1C,UAAU,EAAE8B,SAAS,CAAC9B,UAAU;QAChCmB,OAAO,EAAEW,SAAS,CAACX,OAAO;QAC1BwB,OAAO,EAAEb,SAAS,CAACa,OAAO;QAC1BC,SAAS,EAAEd,SAAS,CAACc;MACzB,CAAC;MACD,IAAI,CAAChG,QAAQ,CAACiG,KAAK,CAAClD,eAAe,CAAC;IACxC,CAAC,CAAC;IACF,IAAI,CAACzB,KAAK,CAACwD,IAAI,CAACG,OAAO,CAAC;;IAExB;AACR;AACA;AACA;IACQ,IAAI,CAAC3D,KAAK,CAACwD,IAAI,CACX,IAAI,CAAChE,eAAe,CACfoF,sBAAsB,CAAC,CAAC,CACxBzB,SAAS,CAAC0B,IAAI,IAAI;MACf,IAAI,CACCvF,eAAe,CAACuF,IAAI,CAACC,KAAK,EAAED,IAAI,CAAC5B,OAAO,CAAC,CACzC8B,IAAI,CAACC,MAAM,IAAI;QACZ,IAAI,CAACxF,eAAe,CAACyF,4BAA4B,CAAC;UAC9CZ,EAAE,EAAEQ,IAAI,CAACR,EAAE;UACXW;QACJ,CAAC,CAAC;MACN,CAAC,CAAC;IACV,CAAC,CACT,CAAC;IAED,OAAOE,2BAAoB;EAC/B;;EAGA;AACJ;AACA;AACA,KAHI;EAAAzE,MAAA,CAIA0E,OAAO,GAAP,SAAAA,QAAQC,mBAA4B,EAAoB;IACpD,IAAAC,oDAAgC,EAAC,IAAI,CAAC;IACtC,MAAM,IAAAC,oBAAa,EAAC,SAAS,CAAC;EAClC;;EAEA;EAAA;EAAA7E,MAAA,CACA8E,eAAe,GAAf,SAAAA,gBAAA,EAAoC;IAChC,MAAM,IAAAD,oBAAa,EAAC,kBAAkB,CAAC;EAC3C,CAAC;EAAA7E,MAAA,CACD+E,iBAAiB,GAAjB,SAAAA,kBAAA,EAAsC;IAClC,MAAM,IAAAF,oBAAa,EAAC,kBAAkB,CAAC;EAC3C,CAAC;EAAA7E,MAAA,CACDgF,cAAc,GAAd,SAAAA,eAAeC,SAAiB,GAAG,EAAE,EAAiB;IAClD,IAAAL,oDAAgC,EAAC,IAAI,CAAC;IACtC,OAAO,IAAI,CAACG,iBAAiB,CAAC,CAAC,CAACC,cAAc,CAACC,SAAS,CAAC;EAC7D,CAAC;EAAAjF,MAAA,CACDkF,cAAc,GAAd,SAAAA,eAAeD,SAAiB,GAAG,EAAE,EAAgB;IACjD,OAAO,IAAI,CAACF,iBAAiB,CAAC,CAAC,CAACG,cAAc,CAACD,SAAS,CAAC;EAC7D,CAAC;EAAAjF,MAAA,CAEKmF,MAAM,GAAZ,eAAAA,OACIC,IAAiC,EACc;IAC/C,IAAAR,oDAAgC,EAAC,IAAI,CAAC;IACtC,IAAMS,WAAW,GAAG,MAAM,IAAI,CAACC,UAAU,CAAC,CAACF,IAAI,CAAQ,CAAC;IAExD,IAAMG,OAAO,GAAGF,WAAW,CAACG,KAAK,CAAC,CAAC,CAAC;IACpC,IAAAC,2CAA0B,EAAC,IAAI,EAAUL,IAAI,CAAS,IAAI,CAACjH,MAAM,CAACmC,WAAW,CAAC,EAAS8E,IAAI,EAAEG,OAAO,CAAC;IACrG,IAAMG,YAAY,GAAG,IAAAC,qBAAc,EAACN,WAAW,CAACO,OAAO,CAAC,CAAC,CAAC,CAAC;IAC3D,OAAOF,YAAY;EACvB,CAAC;EAAA1F,MAAA,CAEKsF,UAAU,GAAhB,eAAAA,WACIO,QAA0B,EAI3B;IACC,IAAAjB,oDAAgC,EAAC,IAAI,CAAC;IACtC;AACR;AACA;AACA;IACQ,IAAIiB,QAAQ,CAACxC,MAAM,KAAK,CAAC,EAAE;MACvB,OAAO;QACHuC,OAAO,EAAE,EAAE;QACXJ,KAAK,EAAE;MACX,CAAC;IACL;IAEA,IAAMlF,WAAW,GAAG,IAAI,CAACnC,MAAM,CAACmC,WAAW;;IAG3C;AACR;AACA;AACA;AACA;IACQ,IAAIwF,UAA0C;IAC9C,IAAI,IAAI,CAACC,QAAQ,CAAC,KAAK,EAAE,QAAQ,CAAC,EAAE;MAChCD,UAAU,GAAG,MAAMlD,OAAO,CAACC,GAAG,CAC1BgD,QAAQ,CAACzE,GAAG,CAACO,OAAO,IAAI;QACpB,IAAMqE,UAAU,GAAG,IAAAC,8CAA0B,EAAC,IAAI,CAAC9H,MAAM,EAAEwD,OAAO,CAAC;QACnE,OAAO,IAAI,CAAChB,SAAS,CAAC,KAAK,EAAE,QAAQ,EAAEqF,UAAU,CAAC,CAC7C1B,IAAI,CAAC,MAAM;UACR,OAAO;YAAE4B,QAAQ,EAAEF;UAAW,CAAC;QACnC,CAAC,CAAC;MACV,CAAC,CACL,CAAC;IACL,CAAC,MAAM;MACHF,UAAU,GAAG,EAAE;MACf,KAAK,IAAIrC,KAAK,GAAG,CAAC,EAAEA,KAAK,GAAGoC,QAAQ,CAACxC,MAAM,EAAEI,KAAK,EAAE,EAAE;QAClD,IAAM9B,OAAO,GAAGkE,QAAQ,CAACpC,KAAK,CAAC;QAC/B,IAAMuC,UAAU,GAAG,IAAAC,8CAA0B,EAAC,IAAI,CAAC9H,MAAM,EAAEwD,OAAO,CAAC;QACnEmE,UAAU,CAACrC,KAAK,CAAC,GAAG;UAAEyC,QAAQ,EAAEF;QAAW,CAAC;MAChD;IACJ;IAEA,IAAMG,OAAO,GAAG,MAAM,IAAI,CAACpH,eAAe,CAACqH,SAAS,CAChDN,UAAU,EACV,2BACJ,CAAC;;IAED;IACA,IAAMO,WAAW,GAAG,IAAAC,qCAA2B,EAA6B,IAAI,CAACpJ,SAAS,EAAEiJ,OAAO,CAACP,OAAO,CAAC;IAE5G,IAAI,IAAI,CAACG,QAAQ,CAAC,MAAM,EAAE,QAAQ,CAAC,EAAE;MACjC,IAAMQ,OAAoC,GAAG,IAAInH,GAAG,CAAC,CAAC;MACtD0G,UAAU,CAACU,OAAO,CAACC,GAAG,IAAI;QACtB,IAAMC,GAAG,GAAGD,GAAG,CAACP,QAAQ;QACxBK,OAAO,CAACI,GAAG,CAAED,GAAG,CAASpG,WAAW,CAAC,EAASoG,GAAG,CAAC;MACtD,CAAC,CAAC;MACF,MAAM9D,OAAO,CAACC,GAAG,CACbwD,WAAW,CAACjF,GAAG,CAACsF,GAAG,IAAI;QACnB,OAAO,IAAI,CAAC/F,SAAS,CACjB,MAAM,EAAE,QAAQ,EAChB4F,OAAO,CAACK,GAAG,CAACF,GAAG,CAACG,OAAO,CAAC,EACxBH,GACJ,CAAC;MACL,CAAC,CACL,CAAC;IACL;IAEA,OAAO;MACHd,OAAO,EAAES,WAAW;MACpBb,KAAK,EAAEW,OAAO,CAACX;IACnB,CAAC;EACL,CAAC;EAAAxF,MAAA,CAEK8G,UAAU,GAAhB,eAAAA,WACIC,GAAa,EAId;IACC,IAAAnC,oDAAgC,EAAC,IAAI,CAAC;IACtC,IAAMtE,WAAW,GAAG,IAAI,CAACnC,MAAM,CAACmC,WAAW;IAC3C;AACR;AACA;AACA;IACQ,IAAIyG,GAAG,CAAC1D,MAAM,KAAK,CAAC,EAAE;MAClB,OAAO;QACHuC,OAAO,EAAE,EAAE;QACXJ,KAAK,EAAE;MACX,CAAC;IACL;IAEA,IAAMwB,aAAa,GAAG,MAAM,IAAI,CAACC,SAAS,CAACF,GAAG,CAAC,CAACG,IAAI,CAAC,CAAC;IACtD,IAAMrB,QAA0C,GAAG,EAAE;IACrD,IAAMU,OAAoD,GAAG,IAAInH,GAAG,CAAC,CAAC;IACtEgE,KAAK,CAAC+D,IAAI,CAACH,aAAa,CAACI,MAAM,CAAC,CAAC,CAAC,CAACZ,OAAO,CAACa,UAAU,IAAI;MACrD,IAAMC,IAAoC,GAAGD,UAAU,CAACE,aAAa,CAAC,IAAI,CAAQ;MAClF1B,QAAQ,CAAC9C,IAAI,CAACuE,IAAI,CAAC;MACnBf,OAAO,CAACI,GAAG,CAACU,UAAU,CAACR,OAAO,EAAES,IAAI,CAAC;IACzC,CAAC,CAAC;IAEF,MAAM1E,OAAO,CAACC,GAAG,CACbgD,QAAQ,CAACzE,GAAG,CAACsF,GAAG,IAAI;MAChB,IAAMG,OAAO,GAAIH,GAAG,CAAS,IAAI,CAACvI,MAAM,CAACmC,WAAW,CAAC;MACrD,OAAO,IAAI,CAACK,SAAS,CAAC,KAAK,EAAE,QAAQ,EAAE+F,GAAG,EAAEM,aAAa,CAACJ,GAAG,CAACC,OAAO,CAAC,CAAC;IAC3E,CAAC,CACL,CAAC;IACD,IAAMW,UAA0C,GAAG3B,QAAQ,CAACzE,GAAG,CAACsF,GAAG,IAAI;MACnE,IAAMe,QAAQ,GAAG,IAAAC,gBAAS,EAAChB,GAAG,CAAC;MAC/Be,QAAQ,CAACE,QAAQ,GAAG,IAAI;MACxB,OAAO;QACHC,QAAQ,EAAElB,GAAG;QACbR,QAAQ,EAAEuB;MACd,CAAC;IACL,CAAC,CAAC;IACF,IAAMtB,OAAO,GAAG,MAAM,IAAI,CAACpH,eAAe,CAACqH,SAAS,CAChDoB,UAAU,EACV,2BACJ,CAAC;IAED,IAAMK,UAAoB,GAAG1B,OAAO,CAACP,OAAO,CAACxE,GAAG,CAAC0G,CAAC,IAAIA,CAAC,CAACxH,WAAW,CAAW,CAAC;;IAE/E;IACA,MAAMsC,OAAO,CAACC,GAAG,CACbgF,UAAU,CAACzG,GAAG,CAACwC,EAAE,IAAI;MACjB,OAAO,IAAI,CAACjD,SAAS,CACjB,MAAM,EACN,QAAQ,EACR4F,OAAO,CAACK,GAAG,CAAChD,EAAE,CAAC,EACfoD,aAAa,CAACJ,GAAG,CAAChD,EAAE,CACxB,CAAC;IACL,CAAC,CACL,CAAC;IAED,IAAMyC,WAAW,GAAGwB,UAAU,CAACzG,GAAG,CAACwC,EAAE,IAAI,IAAAmE,wBAAiB,EAACf,aAAa,EAAEpD,EAAE,CAAC,CAAC;IAE9E,OAAO;MACHgC,OAAO,EAAES,WAAW;MACpBb,KAAK,EAAEW,OAAO,CAACX;IACnB,CAAC;EACL;;EAEA;AACJ;AACA,KAFI;EAAAxF,MAAA,CAGMgI,UAAU,GAAhB,eAAAA,WAAiBnC,QAAmC,EAGjD;IACC,IAAAjB,oDAAgC,EAAC,IAAI,CAAC;IACtC,IAAMqD,UAA4B,GAAG,EAAE;IACvC,IAAMC,cAA2C,GAAG,IAAI9I,GAAG,CAAC,CAAC;IAC7DyG,QAAQ,CAACW,OAAO,CAAC7E,OAAO,IAAI;MACxB,IAAMwG,OAAO,GAAG,IAAAlC,8CAA0B,EAAC,IAAI,CAAC9H,MAAM,EAAEwD,OAAO,CAAC;MAChE,IAAMkF,OAAe,GAAGsB,OAAO,CAAC,IAAI,CAAChK,MAAM,CAACmC,WAAW,CAAQ;MAC/D,IAAI,CAACuG,OAAO,EAAE;QACV,MAAM,IAAAuB,mBAAU,EAAC,MAAM,EAAE;UACrB9H,WAAW,EAAE,IAAI,CAACnC,MAAM,CAACmC,WAAqB;UAC9CgH,IAAI,EAAEa,OAAO;UACbhK,MAAM,EAAE,IAAI,CAACA,MAAM,CAACiC;QACxB,CAAC,CAAC;MACN;MACA8H,cAAc,CAACvB,GAAG,CAACE,OAAO,EAAEsB,OAAO,CAAC;MACpCF,UAAU,CAAClF,IAAI,CAACoF,OAAO,CAAC;IAC5B,CAAC,CAAC;IAEF,IAAMzC,YAAY,GAAG,MAAM,IAAI,CAACJ,UAAU,CAAC2C,UAAU,CAAC;IACtD,IAAMrC,OAAO,GAAGF,YAAY,CAACE,OAAO,CAACyC,KAAK,CAAC,CAAC,CAAC;IAC7C,IAAM7C,KAA4C,GAAG,EAAE;;IAEvD;IACA,MAAM5C,OAAO,CAACC,GAAG,CACb6C,YAAY,CAACF,KAAK,CAACpE,GAAG,CAAC,MAAOkH,GAAG,IAAK;MAClC,IAAIA,GAAG,CAACC,MAAM,KAAK,GAAG,EAAE;QACpB/C,KAAK,CAACzC,IAAI,CAACuF,GAAG,CAAC;MACnB,CAAC,MAAM;QACH,IAAM1E,EAAE,GAAG0E,GAAG,CAAC5E,UAAU;QACzB,IAAM8E,SAAS,GAAG,IAAAT,wBAAiB,EAACG,cAAc,EAAEtE,EAAE,CAAC;QACvD,IAAM6E,WAAW,GAAG,IAAA9C,qBAAc,EAAC2C,GAAG,CAACI,YAAY,CAAC;QACpD,IAAMhC,GAAG,GAAG,IAAI,CAACxJ,SAAS,CAACyL,oBAAoB,CAAC,CAACF,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC;QACjE,IAAMG,MAAM,GAAG,MAAMlC,GAAG,CAACmC,iBAAiB,CAAC,MAAML,SAAS,CAAC;QAC3D5C,OAAO,CAAC7C,IAAI,CAAC6F,MAAM,CAAC;MACxB;IACJ,CAAC,CACL,CAAC;IACD,OAAO;MACHpD,KAAK;MACLI;IACJ,CAAC;EACL;;EAEA;AACJ;AACA,KAFI;EAAA5F,MAAA,CAGM8I,MAAM,GAAZ,eAAAA,OAAa1D,IAA6B,EAAmD;IACzF,IAAAR,oDAAgC,EAAC,IAAI,CAAC;IACtC,IAAMmE,UAAU,GAAG,MAAM,IAAI,CAACf,UAAU,CAAC,CAAC5C,IAAI,CAAC,CAAC;IAChD,IAAAK,2CAA0B,EACtB,IAAI,CAAC1F,cAAc,EAClBqF,IAAI,CAAS,IAAI,CAACjH,MAAM,CAACmC,WAAW,CAAC,EACtC8E,IAAI,EACJ2D,UAAU,CAACvD,KAAK,CAAC,CAAC,CACtB,CAAC;IACD,OAAOuD,UAAU,CAACnD,OAAO,CAAC,CAAC,CAAC;EAChC;;EAEA;AACJ;AACA,KAFI;EAAA5F,MAAA,CAGAgJ,iBAAiB,GAAjB,SAAAA,kBAAkB5D,IAA6B,EAAmD;IAC9F,IAAAR,oDAAgC,EAAC,IAAI,CAAC;IACtC,IAAMuD,OAAO,GAAG,IAAAlC,8CAA0B,EAAC,IAAI,CAAC9H,MAAM,EAAEiH,IAAI,CAAC;IAC7D,IAAMyB,OAAe,GAAGsB,OAAO,CAAC,IAAI,CAAChK,MAAM,CAACmC,WAAW,CAAQ;IAC/D,IAAI,CAACuG,OAAO,EAAE;MACV,MAAM,IAAAuB,mBAAU,EAAC,MAAM,EAAE;QACrBd,IAAI,EAAElC;MACV,CAAC,CAAC;IACN;;IAEA;IACA,IAAI6D,KAAK,GAAG,IAAI,CAAC9J,wBAAwB,CAACyH,GAAG,CAACC,OAAO,CAAC;IACtD,IAAI,CAACoC,KAAK,EAAE;MACRA,KAAK,GAAGxE,2BAAoB;IAChC;IACAwE,KAAK,GAAGA,KAAK,CACR3E,IAAI,CAAC,MAAM4E,wCAAwC,CAAC,IAAI,EAASrC,OAAO,EAASsB,OAAO,CAAC,CAAC,CAC1F7D,IAAI,CAAE6E,WAAW,IAAK;MACnB,IAAI,CAACA,WAAW,CAACC,QAAQ,EAAE;QACvB,OAAOC,wBAAwB,CAACF,WAAW,CAACzC,GAAG,EAAEyB,OAAO,CAAC;MAC7D,CAAC,MAAM;QACH,OAAOgB,WAAW,CAACzC,GAAG;MAC1B;IACJ,CAAC,CAAC;IACN,IAAI,CAACvH,wBAAwB,CAACwH,GAAG,CAACE,OAAO,EAAEoC,KAAK,CAAC;IACjD,OAAOA,KAAK;EAChB,CAAC;EAAAjJ,MAAA,CAEDqC,IAAI,GAAJ,SAAAA,KAAKiH,QAAqC,EAKxC;IACE,IAAA1E,oDAAgC,EAAC,IAAI,CAAC;IACtC,IAAI,OAAO0E,QAAQ,KAAK,QAAQ,EAAE;MAC9B,MAAM,IAAAlB,mBAAU,EAAC,MAAM,EAAE;QACrBkB;MACJ,CAAC,CAAC;IACN;IAEA,IAAI,CAACA,QAAQ,EAAE;MACXA,QAAQ,GAAG,IAAAC,yBAAgB,EAAC,CAAC;IACjC;IAEA,IAAMC,KAAK,GAAG,IAAAC,sBAAa,EAAC,MAAM,EAAEH,QAAQ,EAAE,IAAW,CAAC;IAC1D,OAAOE,KAAK;EAChB,CAAC;EAAAxJ,MAAA,CAED0J,OAAO,GAAP,SAAAA,QACIJ,QAAqD,EAMvD;IACE,IAAA1E,oDAAgC,EAAC,IAAI,CAAC;;IAEtC;IACA,IACI,OAAO0E,QAAQ,KAAK,QAAQ,IAC5BlG,KAAK,CAACuG,OAAO,CAACL,QAAQ,CAAC,EACzB;MACE,MAAM,IAAAM,uBAAc,EAAC,MAAM,EAAE;QACzBN;MACJ,CAAC,CAAC;IACN;IAEA,IAAIE,KAAK;IAET,IAAI,OAAOF,QAAQ,KAAK,QAAQ,EAAE;MAC9BE,KAAK,GAAG,IAAAC,sBAAa,EAAC,SAAS,EAAE;QAC7BI,QAAQ,EAAE;UACN,CAAC,IAAI,CAAC1L,MAAM,CAACmC,WAAW,GAAGgJ;QAC/B,CAAC;QACDQ,KAAK,EAAE;MACX,CAAC,EAAE,IAAW,CAAC;IACnB,CAAC,MAAM;MACH,IAAI,CAACR,QAAQ,EAAE;QACXA,QAAQ,GAAG,IAAAC,yBAAgB,EAAC,CAAC;MACjC;;MAGA;MACA,IAAKD,QAAQ,CAAgBQ,KAAK,EAAE;QAChC,MAAM,IAAA1B,mBAAU,EAAC,KAAK,CAAC;MAC3B;MAEAkB,QAAQ,GAAG,IAAA5B,gBAAS,EAAC4B,QAAQ,CAAC;MAC7BA,QAAQ,CAASQ,KAAK,GAAG,CAAC;MAC3BN,KAAK,GAAG,IAAAC,sBAAa,EAAiB,SAAS,EAAEH,QAAQ,EAAE,IAAW,CAAC;IAC3E;IAGA,OAAOE,KAAK;EAChB,CAAC;EAAAxJ,MAAA,CAED+J,KAAK,GAAL,SAAAA,MAAMT,QAAqD,EAKzD;IACE,IAAA1E,oDAAgC,EAAC,IAAI,CAAC;IACtC,IAAI,CAAC0E,QAAQ,EAAE;MACXA,QAAQ,GAAG,IAAAC,yBAAgB,EAAC,CAAC;IACjC;IACA,IAAMC,KAAK,GAAG,IAAAC,sBAAa,EAAC,OAAO,EAAEH,QAAQ,EAAE,IAAW,CAAC;IAC3D,OAAOE,KAAK;EAChB;;EAEA;AACJ;AACA;AACA,KAHI;EAAAxJ,MAAA,CAIAiH,SAAS,GAAT,SAAAA,UACIF,GAAa,EAMf;IACE,IAAAnC,oDAAgC,EAAC,IAAI,CAAC;IACtC,IAAMoF,UAAsC,GAAG;MAC3CH,QAAQ,EAAE;QACN,CAAC,IAAI,CAAC1L,MAAM,CAACmC,WAAW,GAAG;UACvB2J,GAAG,EAAElD,GAAG,CAACsB,KAAK,CAAC,CAAC;QACpB;MACJ;IACJ,CAAC;IACD,IAAMmB,KAAK,GAAG,IAAAC,sBAAa,EAAC,WAAW,EAAEO,UAAU,EAAE,IAAW,CAAC;IACjE,OAAOR,KAAK;EAChB;;EAEA;AACJ;AACA,KAFI;EAAAxJ,MAAA,CAKAkK,UAAU,GAAV,SAAAA,WAAA,EAA2B;IACvB,MAAM,IAAArF,oBAAa,EAAC,WAAW,CAAC;EACpC;;EAEA;AACJ;AACA;AACA,KAHI;EAAA7E,MAAA,CAIAmK,UAAU,GAAV,SAAAA,WAAWC,aAAkD,EAAiB;IAC1E,MAAM,IAAAvF,oBAAa,EAAC,WAAW,CAAC;EACpC,CAAC;EAAA7E,MAAA,CAEDqK,UAAU,GAAV,SAAAA,WAAWC,UAA6C,EAA0C;IAC9F,MAAM,IAAAzF,oBAAa,EAAC,MAAM,CAAC;EAC/B;;EAEA;AACJ;AACA,KAFI;EAAA7E,MAAA,CAGAuK,OAAO,GAAP,SAAAA,QAAQC,IAAkB,EAAEtI,GAAgB,EAAEuI,GAAQ,EAAEC,QAAQ,GAAG,KAAK,EAAE;IACtE,IAAI,OAAOD,GAAG,KAAK,UAAU,EAAE;MAC3B,MAAM,IAAAb,uBAAc,EAAC,MAAM,EAAE;QACzB1H,GAAG;QACHsI;MACJ,CAAC,CAAC;IACN;IAEA,IAAI,CAAC5M,UAAU,CAAC+M,QAAQ,CAACH,IAAI,CAAC,EAAE;MAC5B,MAAM,IAAAZ,uBAAc,EAAC,MAAM,EAAE;QACzB1H,GAAG;QACHsI;MACJ,CAAC,CAAC;IACN;IAEA,IAAI,CAAC3M,UAAU,CAAC8M,QAAQ,CAACzI,GAAG,CAAC,EAAE;MAC3B,MAAM,IAAAkG,mBAAU,EAAC,MAAM,EAAE;QACrBlG;MACJ,CAAC,CAAC;IACN;IAEA,IAAIsI,IAAI,KAAK,MAAM,IAAItI,GAAG,KAAK,QAAQ,IAAIwI,QAAQ,KAAK,IAAI,EAAE;MAC1D,MAAM,IAAAtC,mBAAU,EAAC,OAAO,EAAE;QACtBoC,IAAI;QACJtI,GAAG;QACHwI;MACJ,CAAC,CAAC;IACN;;IAEA;IACA,IAAME,QAAQ,GAAGH,GAAG,CAACI,IAAI,CAAC,IAAI,CAAC;IAE/B,IAAMC,OAAO,GAAGJ,QAAQ,GAAG,UAAU,GAAG,QAAQ;IAEhD,IAAI,CAACpL,KAAK,CAAC4C,GAAG,CAAC,GAAG,IAAI,CAAC5C,KAAK,CAAC4C,GAAG,CAAC,IAAI,CAAC,CAAC;IACvC,IAAI,CAAC5C,KAAK,CAAC4C,GAAG,CAAC,CAACsI,IAAI,CAAC,GAAG,IAAI,CAAClL,KAAK,CAAC4C,GAAG,CAAC,CAACsI,IAAI,CAAC,IAAI;MAC7CO,MAAM,EAAE,EAAE;MACVL,QAAQ,EAAE;IACd,CAAC;IACD,IAAI,CAACpL,KAAK,CAAC4C,GAAG,CAAC,CAACsI,IAAI,CAAC,CAACM,OAAO,CAAC,CAAC/H,IAAI,CAAC6H,QAAQ,CAAC;EACjD,CAAC;EAAA5K,MAAA,CAEDgL,QAAQ,GAAR,SAAAA,SAASR,IAAkB,EAAEtI,GAAgB,EAAE;IAC3C,IACI,CAAC,IAAI,CAAC5C,KAAK,CAAC4C,GAAG,CAAC,IAChB,CAAC,IAAI,CAAC5C,KAAK,CAAC4C,GAAG,CAAC,CAACsI,IAAI,CAAC,EACxB;MACE,OAAO;QACHO,MAAM,EAAE,EAAE;QACVL,QAAQ,EAAE;MACd,CAAC;IACL;IACA,OAAO,IAAI,CAACpL,KAAK,CAAC4C,GAAG,CAAC,CAACsI,IAAI,CAAC;EAChC,CAAC;EAAAxK,MAAA,CAED+F,QAAQ,GAAR,SAAAA,SAASyE,IAAkB,EAAEtI,GAAgB,EAAE;IAC3C;AACR;AACA;AACA;IACQ,IACI,CAAC,IAAI,CAAC5C,KAAK,CAAC4C,GAAG,CAAC,IAChB,CAAC,IAAI,CAAC5C,KAAK,CAAC4C,GAAG,CAAC,CAACsI,IAAI,CAAC,EACxB;MACE,OAAO,KAAK;IAChB;IAEA,IAAMlL,KAAK,GAAG,IAAI,CAAC0L,QAAQ,CAACR,IAAI,EAAEtI,GAAG,CAAC;IACtC,IAAI,CAAC5C,KAAK,EAAE;MACR,OAAO,KAAK;IAChB;IACA,OAAOA,KAAK,CAACyL,MAAM,CAAC1H,MAAM,GAAG,CAAC,IAAI/D,KAAK,CAACoL,QAAQ,CAACrH,MAAM,GAAG,CAAC;EAC/D,CAAC;EAAArD,MAAA,CAEDW,SAAS,GAAT,SAAAA,UAAU6J,IAAkB,EAAEtI,GAAgB,EAAEoF,IAAS,EAAE2D,QAAc,EAAgB;IACrF,IAAM3L,KAAK,GAAG,IAAI,CAAC0L,QAAQ,CAACR,IAAI,EAAEtI,GAAG,CAAC;IAEtC,IAAI,CAAC5C,KAAK,EAAE;MACR,OAAOmF,2BAAoB;IAC/B;;IAEA;IACA,IAAMyG,KAAK,GAAG5L,KAAK,CAACyL,MAAM,CAAC3J,GAAG,CAAE+J,IAAS,IAAK,MAAMA,IAAI,CAAC7D,IAAI,EAAE2D,QAAQ,CAAC,CAAC;IACzE,OAAO,IAAAG,oBAAa,EAACF,KAAK;IACtB;IAAA,CACC5G,IAAI,CAAC,MAAM1B,OAAO,CAACC,GAAG,CACnBvD,KAAK,CAACoL,QAAQ,CACTtJ,GAAG,CAAE+J,IAAS,IAAKA,IAAI,CAAC7D,IAAI,EAAE2D,QAAQ,CAAC,CAChD,CAAC,CAAC;EACV;;EAEA;AACJ;AACA,KAFI;EAAAjL,MAAA,CAGAqL,aAAa,GAAb,SAAAA,cAAcb,IAAkB,EAAEtI,GAAgB,EAAEoF,IAAS,EAAE2D,QAAa,EAAE;IAC1E,IAAI,CAAC,IAAI,CAAClF,QAAQ,CAACyE,IAAI,EAAEtI,GAAG,CAAC,EAAE;MAC3B;IACJ;IACA,IAAM5C,KAAK,GAAG,IAAI,CAAC0L,QAAQ,CAACR,IAAI,EAAEtI,GAAG,CAAC;IACtC,IAAI,CAAC5C,KAAK,EAAE;IACZA,KAAK,CAACyL,MAAM,CAACvE,OAAO,CAAE2E,IAAS,IAAKA,IAAI,CAAC7D,IAAI,EAAE2D,QAAQ,CAAC,CAAC;EAC7D;;EAEA;AACJ;AACA;AACA;AACA,KAJI;EAAAjL,MAAA,CAKAsL,WAAW,GAAX,SAAAA,YAAYC,IAAY,EAAiB;IACrC,IAAMC,GAAG,GAAG,IAAI5I,OAAO,CAAO6I,GAAG,IAAI;MACjC,IAAMC,OAAO,GAAGC,UAAU,CAAC,MAAM;QAC7B,IAAI,CAAC3M,QAAQ,CAAC4M,MAAM,CAACF,OAAO,CAAC;QAC7BD,GAAG,CAAC,CAAC;MACT,CAAC,EAAEF,IAAI,CAAC;MACR,IAAI,CAACvM,QAAQ,CAAC6M,GAAG,CAACH,OAAO,CAAC;IAC9B,CAAC,CAAC;IACF,OAAOF,GAAG;EACd,CAAC;EAAAxL,MAAA,CAEK2C,OAAO,GAAb,eAAAA,QAAA,EAAkC;IAC9B,IAAI,IAAI,CAAC/C,SAAS,EAAE;MAChB,OAAOkM,4BAAqB;IAChC;IAGA,MAAMlJ,OAAO,CAACC,GAAG,CAAC,IAAI,CAAClD,SAAS,CAACyB,GAAG,CAAC0B,EAAE,IAAIA,EAAE,CAAC,CAAC,CAAC,CAAC;;IAEjD;AACR;AACA;AACA;AACA;AACA;IACQ,IAAI,CAAClD,SAAS,GAAG,IAAI;IAGrBwD,KAAK,CAAC+D,IAAI,CAAC,IAAI,CAACnI,QAAQ,CAAC,CAACwH,OAAO,CAACkF,OAAO,IAAIK,YAAY,CAACL,OAAO,CAAC,CAAC;IACnE,IAAI,IAAI,CAACtO,kBAAkB,EAAE;MACzB,IAAI,CAACA,kBAAkB,CAACuF,OAAO,CAAC,CAAC;IACrC;IACA;AACR;AACA;AACA;AACA;AACA;AACA;AACA;IACQ,OAAO,IAAI,CAAC1E,QAAQ,CAAC+N,kBAAkB,CAAC,CAAC,CACpC1H,IAAI,CAAC,MAAM,IAAI,CAACvF,eAAe,CAACkN,KAAK,CAAC,CAAC,CAAC,CACxC3H,IAAI,CAAC,MAAM;MACR;AAChB;AACA;AACA;AACA;AACA;MACgB,IAAI,CAAC/E,KAAK,CAACiH,OAAO,CAAC0F,GAAG,IAAIA,GAAG,CAACC,WAAW,CAAC,CAAC,CAAC;MAE5C,OAAO,IAAI,CAAClO,QAAQ,CAACmO,WAAW,CAAC,IAAI,CAAClO,IAAI,CAAC;MAC3C,OAAO,IAAAmO,0BAAmB,EAAC,yBAAyB,EAAE,IAAI,CAAC,CAAC/H,IAAI,CAAC,MAAM,IAAI,CAAC;IAChF,CAAC,CAAC;EACV;;EAEA;AACJ;AACA,KAFI;EAAAtE,MAAA,CAGMsM,MAAM,GAAZ,eAAAA,OAAA,EAA6B;IACzB,MAAM,IAAI,CAAC3J,OAAO,CAAC,CAAC;IACpB,MAAMC,OAAO,CAACC,GAAG,CAAC,IAAI,CAAChD,QAAQ,CAACuB,GAAG,CAAC0B,EAAE,IAAIA,EAAE,CAAC,CAAC,CAAC,CAAC;IAChD,MAAM,IAAAyJ,4CAAwB,EAC1B,IAAI,CAACtO,QAAQ,CAACuO,OAAO,EACrB,IAAI,CAACvO,QAAQ,CAAC8D,aAAa,EAC3B,IAAI,CAAC9D,QAAQ,CAAC8F,KAAK,EACnB,IAAI,CAAC9F,QAAQ,CAACC,IAAI,EAClB,IAAI,CAACA,IAAI,EACT,IAAI,CAACD,QAAQ,CAACwO,QAAQ,EACtB,IAAI,CAACxO,QAAQ,CAACyO,YAClB,CAAC;EACL,CAAC;EAAA,WAAAC,aAAA,CAAAC,OAAA,EAAA7O,gBAAA;IAAAmE,GAAA;IAAA0E,GAAA,EAlvBD,SAAAA,CAAA,EAA+D;MAC3D,OAAO,IAAI,CAACnH,CAAC,CAACqB,IAAI,CACd,IAAAC,YAAM,EAACU,EAAE,IAAIA,EAAE,CAACgB,SAAS,KAAK,QAAQ,CAC1C,CAAC;IACL;EAAC;IAAAP,GAAA;IAAA0E,GAAA,EACD,SAAAA,CAAA,EAA+D;MAC3D,OAAO,IAAI,CAACnH,CAAC,CAACqB,IAAI,CACd,IAAAC,YAAM,EAACU,EAAE,IAAIA,EAAE,CAACgB,SAAS,KAAK,QAAQ,CAC1C,CAAC;IACL;EAAC;IAAAP,GAAA;IAAA0E,GAAA,EACD,SAAAA,CAAA,EAA+D;MAC3D,OAAO,IAAI,CAACnH,CAAC,CAACqB,IAAI,CACd,IAAAC,YAAM,EAACU,EAAE,IAAIA,EAAE,CAACgB,SAAS,KAAK,QAAQ,CAC1C,CAAC;IACL;;IAGA;;IAqBA;AACJ;AACA;AACA;AACA;AACA;EALI;IAAAP,GAAA;IAAA0E,GAAA,EA8sBA,SAAAA,CAAA,EAA+F;MAC3F,OAAO,IAAI;IACf;EAAC;AAAA;AAGL;AACA;AACA;AACA;AACA,SAAS9G,mBAAmBA,CACxB+M,UAAkC,EACpC;EACE,IAAI/O,YAAY,EAAE,OAAO,CAAC;EAC1BA,YAAY,GAAG,IAAI;EACnB,IAAMgP,QAAQ,GAAGC,MAAM,CAACC,cAAc,CAACH,UAAU,CAAC;EAClDhP,UAAU,CAAC2I,OAAO,CAACtE,GAAG,IAAI;IACtBtE,UAAU,CAACwD,GAAG,CAACoJ,IAAI,IAAI;MACnB,IAAMyC,MAAM,GAAGzC,IAAI,GAAG,IAAA0C,cAAO,EAAChL,GAAG,CAAC;MAClC4K,QAAQ,CAACG,MAAM,CAAC,GAAG,UAAUxC,GAAW,EAAEC,QAAiB,EAAE;QACzD,OAAO,IAAI,CAACH,OAAO,CAACC,IAAI,EAAEtI,GAAG,EAAEuI,GAAG,EAAEC,QAAQ,CAAC;MACjD,CAAC;IACL,CAAC,CAAC;EACN,CAAC,CAAC;AACN;AAEA,SAASrB,wBAAwBA,CAC7B3C,GAA8B,EAC9BtB,IAA+B,EACG;EAClC,OAAOsB,GAAG,CAACmC,iBAAiB,CAAEsE,SAAS,IAAK;IACxC,OAAO/H,IAAI;EACf,CAAC,CAAC;AACN;;AAEA;AACA;AACA;AACA;AACA,SAAS8D,wCAAwCA,CAC7CkE,YAAqC,EACrCvG,OAAe,EACfzB,IAAS,EAMX;EACE;AACJ;AACA;AACA;EACI,IAAMiI,gBAAgB,GAAGD,YAAY,CAAClQ,SAAS,CAACoQ,6BAA6B,CAACzG,OAAO,CAAC;EACtF,IAAIwG,gBAAgB,EAAE;IAClB,OAAOzK,OAAO,CAAC2K,OAAO,CAAC;MACnB7G,GAAG,EAAE0G,YAAY,CAAClQ,SAAS,CAACyL,oBAAoB,CAAC,CAAC0E,gBAAgB,CAAC,CAAC,CAAC,CAAC,CAAC;MACvEjE,QAAQ,EAAE;IACd,CAAC,CAAC;EACN;EACA,OAAOgE,YAAY,CAAC1D,OAAO,CAAC7C,OAAO,CAAC,CAACK,IAAI,CAAC,CAAC,CACtC5C,IAAI,CAACoC,GAAG,IAAI;IACT,IAAI,CAACA,GAAG,EAAE;MACN,OAAO0G,YAAY,CAACjI,MAAM,CAACC,IAAI,CAAC,CAACd,IAAI,CAACsE,MAAM,KAAK;QAC7ClC,GAAG,EAAEkC,MAAM;QACXQ,QAAQ,EAAE;MACd,CAAC,CAAC,CAAC;IACP,CAAC,MAAM;MACH,OAAO;QACH1C,GAAG;QACH0C,QAAQ,EAAE;MACd,CAAC;IACL;EACJ,CAAC,CAAC;AACV;;AAEA;AACA;AACA;AACO,SAASoE,kBAAkBA,CAC9B;EACIvP,QAAQ;EACRC,IAAI;EACJC,MAAM;EACNE,uBAAuB,GAAG,CAAC,CAAC;EAC5BC,mBAAmB,GAAG,CAAC,CAAC;EACxBmP,WAAW,GAAG,IAAI;EAClB7O,OAAO,GAAG,CAAC,CAAC;EACZL,OAAO,GAAG,CAAC,CAAC;EACZC,WAAW,GAAG,CAAC,CAAC;EAChBC,OAAO,GAAG,CAAC,CAAC;EACZiP,cAAc,GAAG,KAAK;EACtBhP,sBAAsB,GAAGC,yCAA6B;EACtDE,eAAe,GAAGC;AACjB,CAAC,EACe;EACrB,IAAM6O,6BAAwE,GAAG;IAC7EC,qBAAqB,EAAE3P,QAAQ,CAAC8F,KAAK;IACrC8J,YAAY,EAAE5P,QAAQ,CAACC,IAAI;IAC3B+C,cAAc,EAAE/C,IAAI;IACpBC,MAAM,EAAEA,MAAM,CAACiC,UAAU;IACzB3B,OAAO,EAAEJ,uBAAuB;IAChCyP,aAAa,EAAE7P,QAAQ,CAAC6P,aAAa;IACrCrB,QAAQ,EAAExO,QAAQ,CAACwO,QAAQ;IAC3BsB,OAAO,EAAEvK,0BAAY,CAACwK,SAAS,CAAC;EACpC,CAAC;EAED,IAAAC,qBAAc,EACV,4BAA4B,EAC5BN,6BACJ,CAAC;EAED,OAAO,IAAAO,qDAAiC,EACpCjQ,QAAQ,EACR0P,6BACJ,CAAC,CAACrJ,IAAI,CAACvF,eAAe,IAAI;IACtB,IAAM8N,UAAU,GAAG,IAAI9O,gBAAgB,CACnCE,QAAQ,EACRC,IAAI,EACJC,MAAM,EACNY,eAAe,EACfV,uBAAuB,EACvBC,mBAAmB,EACnBC,OAAO,EACPC,WAAW,EACXC,OAAO,EACPC,sBAAsB,EACtBE,OAAO,EACPC,eACJ,CAAC;IAED,OAAOgO,UAAU,CACZ3M,OAAO,CAAC,CAAC,CACToE,IAAI,CAAC,MAAM;MACR;MACAyI,MAAM,CACDoB,OAAO,CAACvP,OAAO,CAAC,CAChB4H,OAAO,CAAC,CAAC,CAAC4H,OAAO,EAAE3D,GAAG,CAAC,KAAK;QACzBsC,MAAM,CAACsB,cAAc,CAACxB,UAAU,EAAEuB,OAAO,EAAE;UACvCxH,GAAG,EAAEA,CAAA,KAAO6D,GAAG,CAASI,IAAI,CAACgC,UAAU;QAC3C,CAAC,CAAC;MACN,CAAC,CAAC;MAEN,IAAIrB,GAAG,GAAG/G,2BAAoB;MAC9B,IAAIgJ,WAAW,IAAIZ,UAAU,CAAC1O,MAAM,CAACgE,OAAO,KAAK,CAAC,EAAE;QAChDqJ,GAAG,GAAGqB,UAAU,CAAC3H,cAAc,CAAC,CAAC;MACrC;MACA,OAAOsG,GAAG;IACd,CAAC,CAAC,CACDlH,IAAI,CAAC,MAAM;MACR,IAAA2J,qBAAc,EAAC,oBAAoB,EAAE;QACjCpB,UAAU;QACVyB,OAAO,EAAE;UACLpQ,IAAI;UACJC,MAAM;UACNY,eAAe;UACfV,uBAAuB;UACvBC,mBAAmB;UACnBC,OAAO;UACPC,WAAW;UACXC,OAAO;UACPC,sBAAsB;UACtBgP,cAAc;UACd9O;QACJ;MACJ,CAAC,CAAC;MACF,OAAOiO,UAAU;IACrB,CAAC;IACD;AACZ;AACA;AACA,OAHY,CAIC0B,KAAK,CAACjG,GAAG,IAAI;MACV,OAAOvJ,eAAe,CAACkN,KAAK,CAAC,CAAC,CACzB3H,IAAI,CAAC,MAAM1B,OAAO,CAAC4L,MAAM,CAAClG,GAAG,CAAC,CAAC;IACxC,CAAC,CAAC;EACV,CAAC,CAAC;AACN;AAEO,SAASmG,cAAcA,CAACC,GAAQ,EAAW;EAC9C,OAAOA,GAAG,YAAY3Q,gBAAgB;AAC1C","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/rx-database-internal-store.js b/dist/cjs/rx-database-internal-store.js deleted file mode 100644 index 83726bc213d..00000000000 --- a/dist/cjs/rx-database-internal-store.js +++ /dev/null @@ -1,277 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.STORAGE_TOKEN_DOCUMENT_KEY = exports.STORAGE_TOKEN_DOCUMENT_ID = exports.INTERNAL_STORE_SCHEMA_TITLE = exports.INTERNAL_STORE_SCHEMA = exports.INTERNAL_CONTEXT_STORAGE_TOKEN = exports.INTERNAL_CONTEXT_MIGRATION_STATUS = exports.INTERNAL_CONTEXT_COLLECTION = void 0; -exports._collectionNamePrimary = _collectionNamePrimary; -exports.addConnectedStorageToCollection = addConnectedStorageToCollection; -exports.ensureStorageTokenDocumentExists = ensureStorageTokenDocumentExists; -exports.getAllCollectionDocuments = getAllCollectionDocuments; -exports.getPrimaryKeyOfInternalDocument = getPrimaryKeyOfInternalDocument; -exports.isDatabaseStateVersionCompatibleWithDatabaseCode = isDatabaseStateVersionCompatibleWithDatabaseCode; -exports.removeConnectedStorageFromCollection = removeConnectedStorageFromCollection; -var _rxError = require("./rx-error.js"); -var _rxSchemaHelper = require("./rx-schema-helper.js"); -var _rxStorageHelper = require("./rx-storage-helper.js"); -var _index = require("./plugins/utils/index.js"); -var _rxQuery = require("./rx-query.js"); -var INTERNAL_CONTEXT_COLLECTION = exports.INTERNAL_CONTEXT_COLLECTION = 'collection'; -var INTERNAL_CONTEXT_STORAGE_TOKEN = exports.INTERNAL_CONTEXT_STORAGE_TOKEN = 'storage-token'; -var INTERNAL_CONTEXT_MIGRATION_STATUS = exports.INTERNAL_CONTEXT_MIGRATION_STATUS = 'rx-migration-status'; - -/** - * Do not change the title, - * we have to flag the internal schema so that - * some RxStorage implementations are able - * to detect if the created RxStorageInstance - * is from the internals or not, - * to do some optimizations in some cases. - */ -var INTERNAL_STORE_SCHEMA_TITLE = exports.INTERNAL_STORE_SCHEMA_TITLE = 'RxInternalDocument'; -var INTERNAL_STORE_SCHEMA = exports.INTERNAL_STORE_SCHEMA = (0, _rxSchemaHelper.fillWithDefaultSettings)({ - version: 0, - title: INTERNAL_STORE_SCHEMA_TITLE, - primaryKey: { - key: 'id', - fields: ['context', 'key'], - separator: '|' - }, - type: 'object', - properties: { - id: { - type: 'string', - maxLength: 200 - }, - key: { - type: 'string' - }, - context: { - type: 'string', - enum: [INTERNAL_CONTEXT_COLLECTION, INTERNAL_CONTEXT_STORAGE_TOKEN, INTERNAL_CONTEXT_MIGRATION_STATUS, 'OTHER'] - }, - data: { - type: 'object', - additionalProperties: true - } - }, - indexes: [], - required: ['key', 'context', 'data'], - additionalProperties: false, - /** - * If the sharding plugin is used, - * it must not shard on the internal RxStorageInstance - * because that one anyway has only a small amount of documents - * and also its creation is in the hot path of the initial page load, - * so we should spend less time creating multiple RxStorageInstances. - */ - sharding: { - shards: 1, - mode: 'collection' - } -}); -function getPrimaryKeyOfInternalDocument(key, context) { - return (0, _rxSchemaHelper.getComposedPrimaryKeyOfDocumentData)(INTERNAL_STORE_SCHEMA, { - key, - context - }); -} - -/** - * Returns all internal documents - * with context 'collection' - */ -async function getAllCollectionDocuments(storageInstance) { - var getAllQueryPrepared = (0, _rxQuery.prepareQuery)(storageInstance.schema, { - selector: { - context: INTERNAL_CONTEXT_COLLECTION, - _deleted: { - $eq: false - } - }, - sort: [{ - id: 'asc' - }], - skip: 0 - }); - var queryResult = await storageInstance.query(getAllQueryPrepared); - var allDocs = queryResult.documents; - return allDocs; -} - -/** - * to not confuse multiInstance-messages with other databases that have the same - * name and adapter, but do not share state with this one (for example in-memory-instances), - * we set a storage-token and use it in the broadcast-channel - */ -var STORAGE_TOKEN_DOCUMENT_KEY = exports.STORAGE_TOKEN_DOCUMENT_KEY = 'storageToken'; -var STORAGE_TOKEN_DOCUMENT_ID = exports.STORAGE_TOKEN_DOCUMENT_ID = getPrimaryKeyOfInternalDocument(STORAGE_TOKEN_DOCUMENT_KEY, INTERNAL_CONTEXT_STORAGE_TOKEN); -async function ensureStorageTokenDocumentExists(rxDatabase) { - /** - * To have less read-write cycles, - * we just try to insert a new document - * and only fetch the existing one if a conflict happened. - */ - var storageToken = (0, _index.randomCouchString)(10); - var passwordHash = rxDatabase.password ? await rxDatabase.hashFunction(JSON.stringify(rxDatabase.password)) : undefined; - var docData = { - id: STORAGE_TOKEN_DOCUMENT_ID, - context: INTERNAL_CONTEXT_STORAGE_TOKEN, - key: STORAGE_TOKEN_DOCUMENT_KEY, - data: { - rxdbVersion: rxDatabase.rxdbVersion, - token: storageToken, - /** - * We add the instance token here - * to be able to detect if a given RxDatabase instance - * is the first instance that was ever created - * or if databases have existed earlier on that storage - * with the same database name. - */ - instanceToken: rxDatabase.token, - passwordHash - }, - _deleted: false, - _meta: (0, _index.getDefaultRxDocumentMeta)(), - _rev: (0, _index.getDefaultRevision)(), - _attachments: {} - }; - var writeResult = await rxDatabase.internalStore.bulkWrite([{ - document: docData - }], 'internal-add-storage-token'); - if (writeResult.success[0]) { - return writeResult.success[0]; - } - - /** - * If we get a 409 error, - * it means another instance already inserted the storage token. - * So we get that token from the database and return that one. - */ - var error = (0, _index.ensureNotFalsy)(writeResult.error[0]); - if (error.isError && (0, _rxError.isBulkWriteConflictError)(error)) { - var conflictError = error; - if (!isDatabaseStateVersionCompatibleWithDatabaseCode(conflictError.documentInDb.data.rxdbVersion, rxDatabase.rxdbVersion)) { - throw (0, _rxError.newRxError)('DM5', { - args: { - database: rxDatabase.name, - databaseStateVersion: conflictError.documentInDb.data.rxdbVersion, - codeVersion: rxDatabase.rxdbVersion - } - }); - } - if (passwordHash && passwordHash !== conflictError.documentInDb.data.passwordHash) { - throw (0, _rxError.newRxError)('DB1', { - passwordHash, - existingPasswordHash: conflictError.documentInDb.data.passwordHash - }); - } - var storageTokenDocInDb = conflictError.documentInDb; - return (0, _index.ensureNotFalsy)(storageTokenDocInDb); - } - throw error; -} -function isDatabaseStateVersionCompatibleWithDatabaseCode(databaseStateVersion, codeVersion) { - if (!databaseStateVersion) { - return false; - } - if (codeVersion.includes('beta') && codeVersion !== databaseStateVersion) { - return false; - } - var stateMajor = databaseStateVersion.split('.')[0]; - var codeMajor = codeVersion.split('.')[0]; - if (stateMajor !== codeMajor) { - return false; - } - return true; -} -async function addConnectedStorageToCollection(collection, storageCollectionName, schema) { - if (collection.schema.version !== schema.version) { - throw (0, _rxError.newRxError)('SNH', { - schema, - version: collection.schema.version, - name: collection.name, - collection, - args: { - storageCollectionName - } - }); - } - var collectionNameWithVersion = _collectionNamePrimary(collection.name, collection.schema.jsonSchema); - var collectionDocId = getPrimaryKeyOfInternalDocument(collectionNameWithVersion, INTERNAL_CONTEXT_COLLECTION); - while (true) { - var collectionDoc = await (0, _rxStorageHelper.getSingleDocument)(collection.database.internalStore, collectionDocId); - var saveData = (0, _index.clone)((0, _index.ensureNotFalsy)(collectionDoc)); - - // do nothing if already in array - var alreadyThere = saveData.data.connectedStorages.find(row => row.collectionName === storageCollectionName && row.schema.version === schema.version); - if (alreadyThere) { - return; - } - - // otherwise add to array and save - saveData.data.connectedStorages.push({ - collectionName: storageCollectionName, - schema - }); - try { - await (0, _rxStorageHelper.writeSingle)(collection.database.internalStore, { - previous: (0, _index.ensureNotFalsy)(collectionDoc), - document: saveData - }, 'add-connected-storage-to-collection'); - } catch (err) { - if (!(0, _rxError.isBulkWriteConflictError)(err)) { - throw err; - } - // retry on conflict - } - } -} -async function removeConnectedStorageFromCollection(collection, storageCollectionName, schema) { - if (collection.schema.version !== schema.version) { - throw (0, _rxError.newRxError)('SNH', { - schema, - version: collection.schema.version, - name: collection.name, - collection, - args: { - storageCollectionName - } - }); - } - var collectionNameWithVersion = _collectionNamePrimary(collection.name, collection.schema.jsonSchema); - var collectionDocId = getPrimaryKeyOfInternalDocument(collectionNameWithVersion, INTERNAL_CONTEXT_COLLECTION); - while (true) { - var collectionDoc = await (0, _rxStorageHelper.getSingleDocument)(collection.database.internalStore, collectionDocId); - var saveData = (0, _index.clone)((0, _index.ensureNotFalsy)(collectionDoc)); - - // do nothing if not there - var isThere = saveData.data.connectedStorages.find(row => row.collectionName === storageCollectionName && row.schema.version === schema.version); - if (!isThere) { - return; - } - - // otherwise remove from array and save - saveData.data.connectedStorages = saveData.data.connectedStorages.filter(item => item.collectionName !== storageCollectionName); - try { - await (0, _rxStorageHelper.writeSingle)(collection.database.internalStore, { - previous: (0, _index.ensureNotFalsy)(collectionDoc), - document: saveData - }, 'remove-connected-storage-from-collection'); - } catch (err) { - if (!(0, _rxError.isBulkWriteConflictError)(err)) { - throw err; - } - // retry on conflict - } - } -} - -/** - * returns the primary for a given collection-data - * used in the internal store of a RxDatabase - */ -function _collectionNamePrimary(name, schema) { - return name + '-' + schema.version; -} -//# sourceMappingURL=rx-database-internal-store.js.map \ No newline at end of file diff --git a/dist/cjs/rx-database-internal-store.js.map b/dist/cjs/rx-database-internal-store.js.map deleted file mode 100644 index 1a91f370589..00000000000 --- a/dist/cjs/rx-database-internal-store.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-database-internal-store.js","names":["_rxError","require","_rxSchemaHelper","_rxStorageHelper","_index","_rxQuery","INTERNAL_CONTEXT_COLLECTION","exports","INTERNAL_CONTEXT_STORAGE_TOKEN","INTERNAL_CONTEXT_MIGRATION_STATUS","INTERNAL_STORE_SCHEMA_TITLE","INTERNAL_STORE_SCHEMA","fillWithDefaultSettings","version","title","primaryKey","key","fields","separator","type","properties","id","maxLength","context","enum","data","additionalProperties","indexes","required","sharding","shards","mode","getPrimaryKeyOfInternalDocument","getComposedPrimaryKeyOfDocumentData","getAllCollectionDocuments","storageInstance","getAllQueryPrepared","prepareQuery","schema","selector","_deleted","$eq","sort","skip","queryResult","query","allDocs","documents","STORAGE_TOKEN_DOCUMENT_KEY","STORAGE_TOKEN_DOCUMENT_ID","ensureStorageTokenDocumentExists","rxDatabase","storageToken","randomCouchString","passwordHash","password","hashFunction","JSON","stringify","undefined","docData","rxdbVersion","token","instanceToken","_meta","getDefaultRxDocumentMeta","_rev","getDefaultRevision","_attachments","writeResult","internalStore","bulkWrite","document","success","error","ensureNotFalsy","isError","isBulkWriteConflictError","conflictError","isDatabaseStateVersionCompatibleWithDatabaseCode","documentInDb","newRxError","args","database","name","databaseStateVersion","codeVersion","existingPasswordHash","storageTokenDocInDb","includes","stateMajor","split","codeMajor","addConnectedStorageToCollection","collection","storageCollectionName","collectionNameWithVersion","_collectionNamePrimary","jsonSchema","collectionDocId","collectionDoc","getSingleDocument","saveData","clone","alreadyThere","connectedStorages","find","row","collectionName","push","writeSingle","previous","err","removeConnectedStorageFromCollection","isThere","filter","item"],"sources":["../../src/rx-database-internal-store.ts"],"sourcesContent":["import {\n isBulkWriteConflictError,\n newRxError\n} from './rx-error.ts';\nimport {\n fillWithDefaultSettings,\n getComposedPrimaryKeyOfDocumentData\n} from './rx-schema-helper.ts';\nimport { getSingleDocument, writeSingle } from './rx-storage-helper.ts';\nimport type {\n CollectionsOfDatabase,\n InternalStoreCollectionDocType,\n InternalStoreDocType,\n InternalStoreStorageTokenDocType,\n RxCollection,\n RxDatabase,\n RxDocumentData,\n RxJsonSchema,\n RxStorageInstance,\n RxStorageWriteErrorConflict\n} from './types/index.d.ts';\nimport {\n clone,\n ensureNotFalsy,\n getDefaultRevision,\n getDefaultRxDocumentMeta,\n randomCouchString\n} from './plugins/utils/index.ts';\nimport { prepareQuery } from './rx-query.ts';\n\nexport const INTERNAL_CONTEXT_COLLECTION = 'collection';\nexport const INTERNAL_CONTEXT_STORAGE_TOKEN = 'storage-token';\nexport const INTERNAL_CONTEXT_MIGRATION_STATUS = 'rx-migration-status';\n\n/**\n * Do not change the title,\n * we have to flag the internal schema so that\n * some RxStorage implementations are able\n * to detect if the created RxStorageInstance\n * is from the internals or not,\n * to do some optimizations in some cases.\n */\nexport const INTERNAL_STORE_SCHEMA_TITLE = 'RxInternalDocument';\n\nexport const INTERNAL_STORE_SCHEMA: RxJsonSchema>> = fillWithDefaultSettings({\n version: 0,\n title: INTERNAL_STORE_SCHEMA_TITLE,\n primaryKey: {\n key: 'id',\n fields: [\n 'context',\n 'key'\n ],\n separator: '|'\n },\n type: 'object',\n properties: {\n id: {\n type: 'string',\n maxLength: 200\n },\n key: {\n type: 'string'\n },\n context: {\n type: 'string',\n enum: [\n INTERNAL_CONTEXT_COLLECTION,\n INTERNAL_CONTEXT_STORAGE_TOKEN,\n INTERNAL_CONTEXT_MIGRATION_STATUS,\n 'OTHER'\n ]\n },\n data: {\n type: 'object',\n additionalProperties: true\n }\n },\n indexes: [],\n required: [\n 'key',\n 'context',\n 'data'\n ],\n additionalProperties: false,\n /**\n * If the sharding plugin is used,\n * it must not shard on the internal RxStorageInstance\n * because that one anyway has only a small amount of documents\n * and also its creation is in the hot path of the initial page load,\n * so we should spend less time creating multiple RxStorageInstances.\n */\n sharding: {\n shards: 1,\n mode: 'collection'\n }\n});\n\n\nexport function getPrimaryKeyOfInternalDocument(\n key: string,\n context: string\n): string {\n return getComposedPrimaryKeyOfDocumentData(\n INTERNAL_STORE_SCHEMA,\n {\n key,\n context\n }\n );\n}\n\n/**\n * Returns all internal documents\n * with context 'collection'\n */\nexport async function getAllCollectionDocuments(\n storageInstance: RxStorageInstance, any, any>\n): Promise[]> {\n const getAllQueryPrepared = prepareQuery>(\n storageInstance.schema,\n {\n selector: {\n context: INTERNAL_CONTEXT_COLLECTION,\n _deleted: {\n $eq: false\n }\n },\n sort: [{ id: 'asc' }],\n skip: 0\n }\n );\n const queryResult = await storageInstance.query(getAllQueryPrepared);\n const allDocs = queryResult.documents;\n return allDocs;\n}\n\n/**\n * to not confuse multiInstance-messages with other databases that have the same\n * name and adapter, but do not share state with this one (for example in-memory-instances),\n * we set a storage-token and use it in the broadcast-channel\n */\nexport const STORAGE_TOKEN_DOCUMENT_KEY = 'storageToken';\n\nexport const STORAGE_TOKEN_DOCUMENT_ID = getPrimaryKeyOfInternalDocument(\n STORAGE_TOKEN_DOCUMENT_KEY,\n INTERNAL_CONTEXT_STORAGE_TOKEN\n);\n\nexport async function ensureStorageTokenDocumentExists(\n rxDatabase: RxDatabase\n): Promise> {\n\n /**\n * To have less read-write cycles,\n * we just try to insert a new document\n * and only fetch the existing one if a conflict happened.\n */\n const storageToken = randomCouchString(10);\n\n const passwordHash = rxDatabase.password ?\n await rxDatabase.hashFunction(JSON.stringify(rxDatabase.password)) :\n undefined;\n\n const docData: RxDocumentData = {\n id: STORAGE_TOKEN_DOCUMENT_ID,\n context: INTERNAL_CONTEXT_STORAGE_TOKEN,\n key: STORAGE_TOKEN_DOCUMENT_KEY,\n data: {\n rxdbVersion: rxDatabase.rxdbVersion,\n token: storageToken,\n /**\n * We add the instance token here\n * to be able to detect if a given RxDatabase instance\n * is the first instance that was ever created\n * or if databases have existed earlier on that storage\n * with the same database name.\n */\n instanceToken: rxDatabase.token,\n passwordHash\n },\n _deleted: false,\n _meta: getDefaultRxDocumentMeta(),\n _rev: getDefaultRevision(),\n _attachments: {}\n };\n\n const writeResult = await rxDatabase.internalStore.bulkWrite(\n [{ document: docData }],\n 'internal-add-storage-token'\n );\n if (writeResult.success[0]) {\n return writeResult.success[0];\n }\n\n /**\n * If we get a 409 error,\n * it means another instance already inserted the storage token.\n * So we get that token from the database and return that one.\n */\n const error = ensureNotFalsy(writeResult.error[0]);\n if (\n error.isError &&\n isBulkWriteConflictError(error)\n ) {\n const conflictError = (error as RxStorageWriteErrorConflict);\n\n if (\n !isDatabaseStateVersionCompatibleWithDatabaseCode(\n conflictError.documentInDb.data.rxdbVersion,\n rxDatabase.rxdbVersion\n )\n ) {\n throw newRxError('DM5', {\n args: {\n database: rxDatabase.name,\n databaseStateVersion: conflictError.documentInDb.data.rxdbVersion,\n codeVersion: rxDatabase.rxdbVersion\n }\n });\n }\n\n if (\n passwordHash &&\n passwordHash !== conflictError.documentInDb.data.passwordHash\n ) {\n throw newRxError('DB1', {\n passwordHash,\n existingPasswordHash: conflictError.documentInDb.data.passwordHash\n });\n }\n\n const storageTokenDocInDb = conflictError.documentInDb;\n return ensureNotFalsy(storageTokenDocInDb);\n }\n throw error;\n}\n\n\nexport function isDatabaseStateVersionCompatibleWithDatabaseCode(\n databaseStateVersion: string,\n codeVersion: string\n): boolean {\n if (!databaseStateVersion) {\n return false;\n }\n\n if (\n codeVersion.includes('beta') &&\n codeVersion !== databaseStateVersion\n ) {\n return false;\n }\n\n const stateMajor = databaseStateVersion.split('.')[0];\n const codeMajor = codeVersion.split('.')[0];\n if (stateMajor !== codeMajor) {\n return false;\n }\n return true;\n}\n\n\n\n\n\nexport async function addConnectedStorageToCollection(\n collection: RxCollection,\n storageCollectionName: string,\n schema: RxJsonSchema\n) {\n\n if (collection.schema.version !== schema.version) {\n throw newRxError('SNH', {\n schema,\n version: collection.schema.version,\n name: collection.name,\n collection,\n args: {\n storageCollectionName\n }\n });\n }\n\n const collectionNameWithVersion = _collectionNamePrimary(collection.name, collection.schema.jsonSchema);\n const collectionDocId = getPrimaryKeyOfInternalDocument(\n collectionNameWithVersion,\n INTERNAL_CONTEXT_COLLECTION\n );\n\n while (true) {\n const collectionDoc = await getSingleDocument(\n collection.database.internalStore,\n collectionDocId\n );\n const saveData: RxDocumentData = clone(ensureNotFalsy(collectionDoc));\n\n // do nothing if already in array\n const alreadyThere = saveData.data.connectedStorages\n .find(row => row.collectionName === storageCollectionName && row.schema.version === schema.version);\n if (alreadyThere) {\n return;\n }\n\n // otherwise add to array and save\n saveData.data.connectedStorages.push({\n collectionName: storageCollectionName,\n schema\n });\n try {\n await writeSingle(\n collection.database.internalStore,\n {\n previous: ensureNotFalsy(collectionDoc),\n document: saveData\n },\n 'add-connected-storage-to-collection'\n );\n } catch (err) {\n if (!isBulkWriteConflictError(err)) {\n throw err;\n }\n // retry on conflict\n }\n }\n}\n\nexport async function removeConnectedStorageFromCollection(\n collection: RxCollection,\n storageCollectionName: string,\n schema: RxJsonSchema\n) {\n if (collection.schema.version !== schema.version) {\n throw newRxError('SNH', {\n schema,\n version: collection.schema.version,\n name: collection.name,\n collection,\n args: {\n storageCollectionName\n }\n });\n }\n\n const collectionNameWithVersion = _collectionNamePrimary(collection.name, collection.schema.jsonSchema);\n const collectionDocId = getPrimaryKeyOfInternalDocument(\n collectionNameWithVersion,\n INTERNAL_CONTEXT_COLLECTION\n );\n\n while (true) {\n const collectionDoc = await getSingleDocument(\n collection.database.internalStore,\n collectionDocId\n );\n const saveData: RxDocumentData = clone(ensureNotFalsy(collectionDoc));\n\n // do nothing if not there\n const isThere = saveData.data.connectedStorages\n .find(row => row.collectionName === storageCollectionName && row.schema.version === schema.version);\n if (!isThere) {\n return;\n }\n\n // otherwise remove from array and save\n saveData.data.connectedStorages = saveData.data.connectedStorages.filter(item => item.collectionName !== storageCollectionName);\n try {\n await writeSingle(\n collection.database.internalStore,\n {\n previous: ensureNotFalsy(collectionDoc),\n document: saveData\n },\n 'remove-connected-storage-from-collection'\n );\n } catch (err) {\n if (!isBulkWriteConflictError(err)) {\n throw err;\n }\n // retry on conflict\n }\n }\n}\n\n\n\n/**\n * returns the primary for a given collection-data\n * used in the internal store of a RxDatabase\n */\nexport function _collectionNamePrimary(name: string, schema: RxJsonSchema) {\n return name + '-' + schema.version;\n}\n"],"mappings":";;;;;;;;;;;;;AAAA,IAAAA,QAAA,GAAAC,OAAA;AAIA,IAAAC,eAAA,GAAAD,OAAA;AAIA,IAAAE,gBAAA,GAAAF,OAAA;AAaA,IAAAG,MAAA,GAAAH,OAAA;AAOA,IAAAI,QAAA,GAAAJ,OAAA;AAEO,IAAMK,2BAA2B,GAAAC,OAAA,CAAAD,2BAAA,GAAG,YAAY;AAChD,IAAME,8BAA8B,GAAAD,OAAA,CAAAC,8BAAA,GAAG,eAAe;AACtD,IAAMC,iCAAiC,GAAAF,OAAA,CAAAE,iCAAA,GAAG,qBAAqB;;AAEtE;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACO,IAAMC,2BAA2B,GAAAH,OAAA,CAAAG,2BAAA,GAAG,oBAAoB;AAExD,IAAMC,qBAA8E,GAAAJ,OAAA,CAAAI,qBAAA,GAAG,IAAAC,uCAAuB,EAAC;EAClHC,OAAO,EAAE,CAAC;EACVC,KAAK,EAAEJ,2BAA2B;EAClCK,UAAU,EAAE;IACRC,GAAG,EAAE,IAAI;IACTC,MAAM,EAAE,CACJ,SAAS,EACT,KAAK,CACR;IACDC,SAAS,EAAE;EACf,CAAC;EACDC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRC,EAAE,EAAE;MACAF,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDN,GAAG,EAAE;MACDG,IAAI,EAAE;IACV,CAAC;IACDI,OAAO,EAAE;MACLJ,IAAI,EAAE,QAAQ;MACdK,IAAI,EAAE,CACFlB,2BAA2B,EAC3BE,8BAA8B,EAC9BC,iCAAiC,EACjC,OAAO;IAEf,CAAC;IACDgB,IAAI,EAAE;MACFN,IAAI,EAAE,QAAQ;MACdO,oBAAoB,EAAE;IAC1B;EACJ,CAAC;EACDC,OAAO,EAAE,EAAE;EACXC,QAAQ,EAAE,CACN,KAAK,EACL,SAAS,EACT,MAAM,CACT;EACDF,oBAAoB,EAAE,KAAK;EAC3B;AACJ;AACA;AACA;AACA;AACA;AACA;EACIG,QAAQ,EAAE;IACNC,MAAM,EAAE,CAAC;IACTC,IAAI,EAAE;EACV;AACJ,CAAC,CAAC;AAGK,SAASC,+BAA+BA,CAC3ChB,GAAW,EACXO,OAAe,EACT;EACN,OAAO,IAAAU,mDAAmC,EACtCtB,qBAAqB,EACrB;IACIK,GAAG;IACHO;EACJ,CACJ,CAAC;AACL;;AAEA;AACA;AACA;AACA;AACO,eAAeW,yBAAyBA,CAC3CC,eAAuE,EACd;EACzD,IAAMC,mBAAmB,GAAG,IAAAC,qBAAY,EACpCF,eAAe,CAACG,MAAM,EACtB;IACIC,QAAQ,EAAE;MACNhB,OAAO,EAAEjB,2BAA2B;MACpCkC,QAAQ,EAAE;QACNC,GAAG,EAAE;MACT;IACJ,CAAC;IACDC,IAAI,EAAE,CAAC;MAAErB,EAAE,EAAE;IAAM,CAAC,CAAC;IACrBsB,IAAI,EAAE;EACV,CACJ,CAAC;EACD,IAAMC,WAAW,GAAG,MAAMT,eAAe,CAACU,KAAK,CAACT,mBAAmB,CAAC;EACpE,IAAMU,OAAO,GAAGF,WAAW,CAACG,SAAS;EACrC,OAAOD,OAAO;AAClB;;AAEA;AACA;AACA;AACA;AACA;AACO,IAAME,0BAA0B,GAAAzC,OAAA,CAAAyC,0BAAA,GAAG,cAAc;AAEjD,IAAMC,yBAAyB,GAAA1C,OAAA,CAAA0C,yBAAA,GAAGjB,+BAA+B,CACpEgB,0BAA0B,EAC1BxC,8BACJ,CAAC;AAEM,eAAe0C,gCAAgCA,CAClDC,UAAmC,EACsB;EAEzD;AACJ;AACA;AACA;AACA;EACI,IAAMC,YAAY,GAAG,IAAAC,wBAAiB,EAAC,EAAE,CAAC;EAE1C,IAAMC,YAAY,GAAGH,UAAU,CAACI,QAAQ,GACpC,MAAMJ,UAAU,CAACK,YAAY,CAACC,IAAI,CAACC,SAAS,CAACP,UAAU,CAACI,QAAQ,CAAC,CAAC,GAClEI,SAAS;EAEb,IAAMC,OAAyD,GAAG;IAC9DvC,EAAE,EAAE4B,yBAAyB;IAC7B1B,OAAO,EAAEf,8BAA8B;IACvCQ,GAAG,EAAEgC,0BAA0B;IAC/BvB,IAAI,EAAE;MACFoC,WAAW,EAAEV,UAAU,CAACU,WAAW;MACnCC,KAAK,EAAEV,YAAY;MACnB;AACZ;AACA;AACA;AACA;AACA;AACA;MACYW,aAAa,EAAEZ,UAAU,CAACW,KAAK;MAC/BR;IACJ,CAAC;IACDd,QAAQ,EAAE,KAAK;IACfwB,KAAK,EAAE,IAAAC,+BAAwB,EAAC,CAAC;IACjCC,IAAI,EAAE,IAAAC,yBAAkB,EAAC,CAAC;IAC1BC,YAAY,EAAE,CAAC;EACnB,CAAC;EAED,IAAMC,WAAW,GAAG,MAAMlB,UAAU,CAACmB,aAAa,CAACC,SAAS,CACxD,CAAC;IAAEC,QAAQ,EAAEZ;EAAQ,CAAC,CAAC,EACvB,4BACJ,CAAC;EACD,IAAIS,WAAW,CAACI,OAAO,CAAC,CAAC,CAAC,EAAE;IACxB,OAAOJ,WAAW,CAACI,OAAO,CAAC,CAAC,CAAC;EACjC;;EAEA;AACJ;AACA;AACA;AACA;EACI,IAAMC,KAAK,GAAG,IAAAC,qBAAc,EAACN,WAAW,CAACK,KAAK,CAAC,CAAC,CAAC,CAAC;EAClD,IACIA,KAAK,CAACE,OAAO,IACb,IAAAC,iCAAwB,EAACH,KAAK,CAAC,EACjC;IACE,IAAMI,aAAa,GAAIJ,KAAuE;IAE9F,IACI,CAACK,gDAAgD,CAC7CD,aAAa,CAACE,YAAY,CAACvD,IAAI,CAACoC,WAAW,EAC3CV,UAAU,CAACU,WACf,CAAC,EACH;MACE,MAAM,IAAAoB,mBAAU,EAAC,KAAK,EAAE;QACpBC,IAAI,EAAE;UACFC,QAAQ,EAAEhC,UAAU,CAACiC,IAAI;UACzBC,oBAAoB,EAAEP,aAAa,CAACE,YAAY,CAACvD,IAAI,CAACoC,WAAW;UACjEyB,WAAW,EAAEnC,UAAU,CAACU;QAC5B;MACJ,CAAC,CAAC;IACN;IAEA,IACIP,YAAY,IACZA,YAAY,KAAKwB,aAAa,CAACE,YAAY,CAACvD,IAAI,CAAC6B,YAAY,EAC/D;MACE,MAAM,IAAA2B,mBAAU,EAAC,KAAK,EAAE;QACpB3B,YAAY;QACZiC,oBAAoB,EAAET,aAAa,CAACE,YAAY,CAACvD,IAAI,CAAC6B;MAC1D,CAAC,CAAC;IACN;IAEA,IAAMkC,mBAAmB,GAAGV,aAAa,CAACE,YAAY;IACtD,OAAO,IAAAL,qBAAc,EAACa,mBAAmB,CAAC;EAC9C;EACA,MAAMd,KAAK;AACf;AAGO,SAASK,gDAAgDA,CAC5DM,oBAA4B,EAC5BC,WAAmB,EACZ;EACP,IAAI,CAACD,oBAAoB,EAAE;IACvB,OAAO,KAAK;EAChB;EAEA,IACIC,WAAW,CAACG,QAAQ,CAAC,MAAM,CAAC,IAC5BH,WAAW,KAAKD,oBAAoB,EACtC;IACE,OAAO,KAAK;EAChB;EAEA,IAAMK,UAAU,GAAGL,oBAAoB,CAACM,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;EACrD,IAAMC,SAAS,GAAGN,WAAW,CAACK,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;EAC3C,IAAID,UAAU,KAAKE,SAAS,EAAE;IAC1B,OAAO,KAAK;EAChB;EACA,OAAO,IAAI;AACf;AAMO,eAAeC,+BAA+BA,CACjDC,UAA6B,EAC7BC,qBAA6B,EAC7BzD,MAAyB,EAC3B;EAEE,IAAIwD,UAAU,CAACxD,MAAM,CAACzB,OAAO,KAAKyB,MAAM,CAACzB,OAAO,EAAE;IAC9C,MAAM,IAAAoE,mBAAU,EAAC,KAAK,EAAE;MACpB3C,MAAM;MACNzB,OAAO,EAAEiF,UAAU,CAACxD,MAAM,CAACzB,OAAO;MAClCuE,IAAI,EAAEU,UAAU,CAACV,IAAI;MACrBU,UAAU;MACVZ,IAAI,EAAE;QACFa;MACJ;IACJ,CAAC,CAAC;EACN;EAEA,IAAMC,yBAAyB,GAAGC,sBAAsB,CAACH,UAAU,CAACV,IAAI,EAAEU,UAAU,CAACxD,MAAM,CAAC4D,UAAU,CAAC;EACvG,IAAMC,eAAe,GAAGnE,+BAA+B,CACnDgE,yBAAyB,EACzB1F,2BACJ,CAAC;EAED,OAAO,IAAI,EAAE;IACT,IAAM8F,aAAa,GAAG,MAAM,IAAAC,kCAAiB,EACzCP,UAAU,CAACX,QAAQ,CAACb,aAAa,EACjC6B,eACJ,CAAC;IACD,IAAMG,QAAwD,GAAG,IAAAC,YAAK,EAAC,IAAA5B,qBAAc,EAACyB,aAAa,CAAC,CAAC;;IAErG;IACA,IAAMI,YAAY,GAAGF,QAAQ,CAAC7E,IAAI,CAACgF,iBAAiB,CAC/CC,IAAI,CAACC,GAAG,IAAIA,GAAG,CAACC,cAAc,KAAKb,qBAAqB,IAAIY,GAAG,CAACrE,MAAM,CAACzB,OAAO,KAAKyB,MAAM,CAACzB,OAAO,CAAC;IACvG,IAAI2F,YAAY,EAAE;MACd;IACJ;;IAEA;IACAF,QAAQ,CAAC7E,IAAI,CAACgF,iBAAiB,CAACI,IAAI,CAAC;MACjCD,cAAc,EAAEb,qBAAqB;MACrCzD;IACJ,CAAC,CAAC;IACF,IAAI;MACA,MAAM,IAAAwE,4BAAW,EACbhB,UAAU,CAACX,QAAQ,CAACb,aAAa,EACjC;QACIyC,QAAQ,EAAE,IAAApC,qBAAc,EAACyB,aAAa,CAAC;QACvC5B,QAAQ,EAAE8B;MACd,CAAC,EACD,qCACJ,CAAC;IACL,CAAC,CAAC,OAAOU,GAAG,EAAE;MACV,IAAI,CAAC,IAAAnC,iCAAwB,EAACmC,GAAG,CAAC,EAAE;QAChC,MAAMA,GAAG;MACb;MACA;IACJ;EACJ;AACJ;AAEO,eAAeC,oCAAoCA,CACtDnB,UAA6B,EAC7BC,qBAA6B,EAC7BzD,MAAyB,EAC3B;EACE,IAAIwD,UAAU,CAACxD,MAAM,CAACzB,OAAO,KAAKyB,MAAM,CAACzB,OAAO,EAAE;IAC9C,MAAM,IAAAoE,mBAAU,EAAC,KAAK,EAAE;MACpB3C,MAAM;MACNzB,OAAO,EAAEiF,UAAU,CAACxD,MAAM,CAACzB,OAAO;MAClCuE,IAAI,EAAEU,UAAU,CAACV,IAAI;MACrBU,UAAU;MACVZ,IAAI,EAAE;QACFa;MACJ;IACJ,CAAC,CAAC;EACN;EAEA,IAAMC,yBAAyB,GAAGC,sBAAsB,CAACH,UAAU,CAACV,IAAI,EAAEU,UAAU,CAACxD,MAAM,CAAC4D,UAAU,CAAC;EACvG,IAAMC,eAAe,GAAGnE,+BAA+B,CACnDgE,yBAAyB,EACzB1F,2BACJ,CAAC;EAED,OAAO,IAAI,EAAE;IACT,IAAM8F,aAAa,GAAG,MAAM,IAAAC,kCAAiB,EACzCP,UAAU,CAACX,QAAQ,CAACb,aAAa,EACjC6B,eACJ,CAAC;IACD,IAAMG,QAAwD,GAAG,IAAAC,YAAK,EAAC,IAAA5B,qBAAc,EAACyB,aAAa,CAAC,CAAC;;IAErG;IACA,IAAMc,OAAO,GAAGZ,QAAQ,CAAC7E,IAAI,CAACgF,iBAAiB,CAC1CC,IAAI,CAACC,GAAG,IAAIA,GAAG,CAACC,cAAc,KAAKb,qBAAqB,IAAIY,GAAG,CAACrE,MAAM,CAACzB,OAAO,KAAKyB,MAAM,CAACzB,OAAO,CAAC;IACvG,IAAI,CAACqG,OAAO,EAAE;MACV;IACJ;;IAEA;IACAZ,QAAQ,CAAC7E,IAAI,CAACgF,iBAAiB,GAAGH,QAAQ,CAAC7E,IAAI,CAACgF,iBAAiB,CAACU,MAAM,CAACC,IAAI,IAAIA,IAAI,CAACR,cAAc,KAAKb,qBAAqB,CAAC;IAC/H,IAAI;MACA,MAAM,IAAAe,4BAAW,EACbhB,UAAU,CAACX,QAAQ,CAACb,aAAa,EACjC;QACIyC,QAAQ,EAAE,IAAApC,qBAAc,EAACyB,aAAa,CAAC;QACvC5B,QAAQ,EAAE8B;MACd,CAAC,EACD,0CACJ,CAAC;IACL,CAAC,CAAC,OAAOU,GAAG,EAAE;MACV,IAAI,CAAC,IAAAnC,iCAAwB,EAACmC,GAAG,CAAC,EAAE;QAChC,MAAMA,GAAG;MACb;MACA;IACJ;EACJ;AACJ;;AAIA;AACA;AACA;AACA;AACO,SAASf,sBAAsBA,CAACb,IAAY,EAAE9C,MAAyB,EAAE;EAC5E,OAAO8C,IAAI,GAAG,GAAG,GAAG9C,MAAM,CAACzB,OAAO;AACtC","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/rx-database.js b/dist/cjs/rx-database.js deleted file mode 100644 index cfd94de4aa3..00000000000 --- a/dist/cjs/rx-database.js +++ /dev/null @@ -1,546 +0,0 @@ -"use strict"; - -var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault"); -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.RxDatabaseBase = void 0; -exports.createRxDatabase = createRxDatabase; -exports.createRxDatabaseStorageInstance = createRxDatabaseStorageInstance; -exports.dbCount = dbCount; -exports.ensureNoStartupErrors = ensureNoStartupErrors; -exports.isRxDatabase = isRxDatabase; -exports.isRxDatabaseFirstTimeInstantiated = isRxDatabaseFirstTimeInstantiated; -exports.removeRxDatabase = removeRxDatabase; -var _createClass2 = _interopRequireDefault(require("@babel/runtime/helpers/createClass")); -var _customIdleQueue = require("custom-idle-queue"); -var _index = require("./plugins/utils/index.js"); -var _rxError = require("./rx-error.js"); -var _rxSchema = require("./rx-schema.js"); -var _hooks = require("./hooks.js"); -var _rxjs = require("rxjs"); -var _operators = require("rxjs/operators"); -var _rxCollection = require("./rx-collection.js"); -var _rxStorageHelper = require("./rx-storage-helper.js"); -var _obliviousSet = require("oblivious-set"); -var _rxDatabaseInternalStore = require("./rx-database-internal-store.js"); -var _rxCollectionHelper = require("./rx-collection-helper.js"); -var _overwritable = require("./overwritable.js"); -/** - * stores the used database names+storage names - * so we can throw when the same database is created more then once. - */ -var USED_DATABASE_NAMES = new Set(); -var DB_COUNT = 0; -var RxDatabaseBase = exports.RxDatabaseBase = /*#__PURE__*/function () { - /** - * Contains all known non-closed storage instances - * that belong to this database. - * Used in plugins and unit tests. - */ - - function RxDatabaseBase(name, - /** - * Uniquely identifies the instance - * of this RxDatabase. - */ - token, storage, instanceCreationOptions, password, multiInstance, eventReduce = false, options = {}, - /** - * Stores information documents about the collections of the database - */ - internalStore, hashFunction, cleanupPolicy, allowSlowCount, reactivity) { - this.idleQueue = new _customIdleQueue.IdleQueue(); - this.rxdbVersion = _index.RXDB_VERSION; - this.storageInstances = new Set(); - this._subs = []; - this.startupErrors = []; - this.onDestroy = []; - this.destroyed = false; - this.collections = {}; - this.states = {}; - this.eventBulks$ = new _rxjs.Subject(); - this.observable$ = this.eventBulks$.pipe((0, _operators.mergeMap)(changeEventBulk => changeEventBulk.events)); - this.storageToken = _index.PROMISE_RESOLVE_FALSE; - this.storageTokenDocument = _index.PROMISE_RESOLVE_FALSE; - this.emittedEventBulkIds = new _obliviousSet.ObliviousSet(60 * 1000); - this.name = name; - this.token = token; - this.storage = storage; - this.instanceCreationOptions = instanceCreationOptions; - this.password = password; - this.multiInstance = multiInstance; - this.eventReduce = eventReduce; - this.options = options; - this.internalStore = internalStore; - this.hashFunction = hashFunction; - this.cleanupPolicy = cleanupPolicy; - this.allowSlowCount = allowSlowCount; - this.reactivity = reactivity; - DB_COUNT++; - - /** - * In the dev-mode, we create a pseudoInstance - * to get all properties of RxDatabase and ensure they do not - * conflict with the collection names etc. - * So only if it is not pseudoInstance, - * we have all values to prepare a real RxDatabase. - * - * TODO this is ugly, we should use a different way in the dev-mode - * so that all non-dev-mode code can be cleaner. - */ - if (this.name !== 'pseudoInstance') { - /** - * Wrap the internal store - * to ensure that calls to it also end up in - * calculation of the idle state and the hooks. - */ - this.internalStore = (0, _rxStorageHelper.getWrappedStorageInstance)(this.asRxDatabase, internalStore, _rxDatabaseInternalStore.INTERNAL_STORE_SCHEMA); - - /** - * Start writing the storage token. - * Do not await the creation because it would run - * in a critical path that increases startup time. - * - * Writing the token takes about 20 milliseconds - * even on a fast adapter, so this is worth it. - */ - this.storageTokenDocument = (0, _rxDatabaseInternalStore.ensureStorageTokenDocumentExists)(this.asRxDatabase).catch(err => this.startupErrors.push(err)); - this.storageToken = this.storageTokenDocument.then(doc => doc.data.token).catch(err => this.startupErrors.push(err)); - } - } - var _proto = RxDatabaseBase.prototype; - _proto.getReactivityFactory = function getReactivityFactory() { - if (!this.reactivity) { - throw (0, _rxError.newRxError)('DB14', { - database: this.name - }); - } - return this.reactivity; - } - - /** - * Because having unhandled exceptions would fail, - * we have to store the async errors of the constructor here - * so we can throw them later. - */ - - /** - * When the database is destroyed, - * these functions will be called an awaited. - * Used to automatically clean up stuff that - * belongs to this collection. - */ - - /** - * Unique token that is stored with the data. - * Used to detect if the dataset has been deleted - * and if two RxDatabase instances work on the same dataset or not. - * - * Because reading and writing the storageToken runs in the hot path - * of database creation, we do not await the storageWrites but instead - * work with the promise when we need the value. - */ - - /** - * Stores the whole state of the internal storage token document. - * We need this in some plugins. - */ - - /** - * Contains the ids of all event bulks that have been emitted - * by the database. - * Used to detect duplicates that come in again via BroadcastChannel - * or other streams. - * TODO instead of having this here, we should add a test to ensure each RxStorage - * behaves equal and does never emit duplicate eventBulks. - */; - /** - * This is the main handle-point for all change events - * ChangeEvents created by this instance go: - * RxDocument -> RxCollection -> RxDatabase.$emit -> MultiInstance - * ChangeEvents created by other instances go: - * MultiInstance -> RxDatabase.$emit -> RxCollection -> RxDatabase - */ - _proto.$emit = function $emit(changeEventBulk) { - if (this.emittedEventBulkIds.has(changeEventBulk.id)) { - return; - } - this.emittedEventBulkIds.add(changeEventBulk.id); - - // emit into own stream - this.eventBulks$.next(changeEventBulk); - } - - /** - * removes the collection-doc from the internalStore - */; - _proto.removeCollectionDoc = async function removeCollectionDoc(name, schema) { - var doc = await (0, _rxStorageHelper.getSingleDocument)(this.internalStore, (0, _rxDatabaseInternalStore.getPrimaryKeyOfInternalDocument)((0, _rxDatabaseInternalStore._collectionNamePrimary)(name, schema), _rxDatabaseInternalStore.INTERNAL_CONTEXT_COLLECTION)); - if (!doc) { - throw (0, _rxError.newRxError)('SNH', { - name, - schema - }); - } - var writeDoc = (0, _rxStorageHelper.flatCloneDocWithMeta)(doc); - writeDoc._deleted = true; - await this.internalStore.bulkWrite([{ - document: writeDoc, - previous: doc - }], 'rx-database-remove-collection'); - } - - /** - * creates multiple RxCollections at once - * to be much faster by saving db txs and doing stuff in bulk-operations - * This function is not called often, but mostly in the critical path at the initial page load - * So it must be as fast as possible. - */; - _proto.addCollections = async function addCollections(collectionCreators) { - var jsonSchemas = {}; - var schemas = {}; - var bulkPutDocs = []; - var useArgsByCollectionName = {}; - await Promise.all(Object.entries(collectionCreators).map(async ([name, args]) => { - var collectionName = name; - var rxJsonSchema = args.schema; - jsonSchemas[collectionName] = rxJsonSchema; - var schema = (0, _rxSchema.createRxSchema)(rxJsonSchema, this.hashFunction); - schemas[collectionName] = schema; - - // collection already exists - if (this.collections[name]) { - throw (0, _rxError.newRxError)('DB3', { - name - }); - } - var collectionNameWithVersion = (0, _rxDatabaseInternalStore._collectionNamePrimary)(name, rxJsonSchema); - var collectionDocData = { - id: (0, _rxDatabaseInternalStore.getPrimaryKeyOfInternalDocument)(collectionNameWithVersion, _rxDatabaseInternalStore.INTERNAL_CONTEXT_COLLECTION), - key: collectionNameWithVersion, - context: _rxDatabaseInternalStore.INTERNAL_CONTEXT_COLLECTION, - data: { - name: collectionName, - schemaHash: await schema.hash, - schema: schema.jsonSchema, - version: schema.version, - connectedStorages: [] - }, - _deleted: false, - _meta: (0, _index.getDefaultRxDocumentMeta)(), - _rev: (0, _index.getDefaultRevision)(), - _attachments: {} - }; - bulkPutDocs.push({ - document: collectionDocData - }); - var useArgs = Object.assign({}, args, { - name: collectionName, - schema, - database: this - }); - - // run hooks - var hookData = (0, _index.flatClone)(args); - hookData.database = this; - hookData.name = name; - (0, _hooks.runPluginHooks)('preCreateRxCollection', hookData); - useArgs.conflictHandler = hookData.conflictHandler; - useArgsByCollectionName[collectionName] = useArgs; - })); - var putDocsResult = await this.internalStore.bulkWrite(bulkPutDocs, 'rx-database-add-collection'); - await ensureNoStartupErrors(this); - await Promise.all(putDocsResult.error.map(async error => { - if (error.status !== 409) { - throw (0, _rxError.newRxError)('DB12', { - database: this.name, - writeError: error - }); - } - var docInDb = (0, _index.ensureNotFalsy)(error.documentInDb); - var collectionName = docInDb.data.name; - var schema = schemas[collectionName]; - // collection already exists but has different schema - if (docInDb.data.schemaHash !== (await schema.hash)) { - throw (0, _rxError.newRxError)('DB6', { - database: this.name, - collection: collectionName, - previousSchemaHash: docInDb.data.schemaHash, - schemaHash: await schema.hash, - previousSchema: docInDb.data.schema, - schema: (0, _index.ensureNotFalsy)(jsonSchemas[collectionName]) - }); - } - })); - var ret = {}; - await Promise.all(Object.keys(collectionCreators).map(async collectionName => { - var useArgs = useArgsByCollectionName[collectionName]; - var collection = await (0, _rxCollection.createRxCollection)(useArgs); - ret[collectionName] = collection; - - // set as getter to the database - this.collections[collectionName] = collection; - if (!this[collectionName]) { - Object.defineProperty(this, collectionName, { - get: () => this.collections[collectionName] - }); - } - })); - return ret; - } - - /** - * runs the given function between idleQueue-locking - */; - _proto.lockedRun = function lockedRun(fn) { - return this.idleQueue.wrapCall(fn); - }; - _proto.requestIdlePromise = function requestIdlePromise() { - return this.idleQueue.requestIdlePromise(); - } - - /** - * Export database to a JSON friendly format. - */; - _proto.exportJSON = function exportJSON(_collections) { - throw (0, _index.pluginMissing)('json-dump'); - }; - _proto.addState = function addState(_name) { - throw (0, _index.pluginMissing)('state'); - } - - /** - * Import the parsed JSON export into the collection. - * @param _exportedJSON The previously exported data from the `.exportJSON()` method. - * @note When an interface is loaded in this collection all base properties of the type are typed as `any` - * since data could be encrypted. - */; - _proto.importJSON = function importJSON(_exportedJSON) { - throw (0, _index.pluginMissing)('json-dump'); - }; - _proto.backup = function backup(_options) { - throw (0, _index.pluginMissing)('backup'); - }; - _proto.leaderElector = function leaderElector() { - throw (0, _index.pluginMissing)('leader-election'); - }; - _proto.isLeader = function isLeader() { - throw (0, _index.pluginMissing)('leader-election'); - } - /** - * returns a promise which resolves when the instance becomes leader - */; - _proto.waitForLeadership = function waitForLeadership() { - throw (0, _index.pluginMissing)('leader-election'); - }; - _proto.migrationStates = function migrationStates() { - throw (0, _index.pluginMissing)('migration-schema'); - } - - /** - * destroys the database-instance and all collections - */; - _proto.destroy = async function destroy() { - if (this.destroyed) { - return _index.PROMISE_RESOLVE_FALSE; - } - - // settings destroyed = true must be the first thing to do. - this.destroyed = true; - await (0, _hooks.runAsyncPluginHooks)('preDestroyRxDatabase', this); - /** - * Complete the event stream - * to stop all subscribers who forgot to unsubscribe. - */ - this.eventBulks$.complete(); - DB_COUNT--; - this._subs.map(sub => sub.unsubscribe()); - - /** - * Destroying the pseudo instance will throw - * because stuff is missing - * TODO we should not need the pseudo instance on runtime. - * we should generate the property list on build time. - */ - if (this.name === 'pseudoInstance') { - return _index.PROMISE_RESOLVE_FALSE; - } - - /** - * First wait until the database is idle - */ - return this.requestIdlePromise().then(() => Promise.all(this.onDestroy.map(fn => fn()))) - // destroy all collections - .then(() => Promise.all(Object.keys(this.collections).map(key => this.collections[key]).map(col => col.destroy()))) - // destroy internal storage instances - .then(() => this.internalStore.close()) - // remove combination from USED_COMBINATIONS-map - .then(() => USED_DATABASE_NAMES.delete(this.storage.name + '|' + this.name)).then(() => true); - } - - /** - * deletes the database and its stored data. - * Returns the names of all removed collections. - */; - _proto.remove = function remove() { - return this.destroy().then(() => removeRxDatabase(this.name, this.storage, this.password)); - }; - return (0, _createClass2.default)(RxDatabaseBase, [{ - key: "$", - get: function () { - return this.observable$; - } - }, { - key: "asRxDatabase", - get: function () { - return this; - } - }]); -}(); -/** - * checks if an instance with same name and storage already exists - * @throws {RxError} if used - */ -function throwIfDatabaseNameUsed(name, storage) { - var key = storage.name + '|' + name; - if (!USED_DATABASE_NAMES.has(key)) { - return; - } else { - throw (0, _rxError.newRxError)('DB8', { - name, - storage: storage.name, - link: 'https://rxdb.info/rx-database.html#ignoreduplicate' - }); - } -} - -/** - * Creates the storage instances that are used internally in the database - * to store schemas and other configuration stuff. - */ -async function createRxDatabaseStorageInstance(databaseInstanceToken, storage, databaseName, options, multiInstance, password) { - var internalStore = await storage.createStorageInstance({ - databaseInstanceToken, - databaseName, - collectionName: _rxStorageHelper.INTERNAL_STORAGE_NAME, - schema: _rxDatabaseInternalStore.INTERNAL_STORE_SCHEMA, - options, - multiInstance, - password, - devMode: _overwritable.overwritable.isDevMode() - }); - return internalStore; -} -function createRxDatabase({ - storage, - instanceCreationOptions, - name, - password, - multiInstance = true, - eventReduce = true, - ignoreDuplicate = false, - options = {}, - cleanupPolicy, - allowSlowCount = false, - localDocuments = false, - hashFunction = _index.defaultHashSha256, - reactivity -}) { - (0, _hooks.runPluginHooks)('preCreateRxDatabase', { - storage, - instanceCreationOptions, - name, - password, - multiInstance, - eventReduce, - ignoreDuplicate, - options, - localDocuments - }); - // check if combination already used - if (!ignoreDuplicate) { - throwIfDatabaseNameUsed(name, storage); - } - USED_DATABASE_NAMES.add(storage.name + '|' + name); - var databaseInstanceToken = (0, _index.randomCouchString)(10); - return createRxDatabaseStorageInstance(databaseInstanceToken, storage, name, instanceCreationOptions, multiInstance, password) - /** - * Creating the internal store might fail - * if some RxStorage wrapper is used that does some checks - * and then throw. - * In that case we have to properly clean up the database. - */.catch(err => { - USED_DATABASE_NAMES.delete(storage.name + '|' + name); - throw err; - }).then(storageInstance => { - var rxDatabase = new RxDatabaseBase(name, databaseInstanceToken, storage, instanceCreationOptions, password, multiInstance, eventReduce, options, storageInstance, hashFunction, cleanupPolicy, allowSlowCount, reactivity); - return (0, _hooks.runAsyncPluginHooks)('createRxDatabase', { - database: rxDatabase, - creator: { - storage, - instanceCreationOptions, - name, - password, - multiInstance, - eventReduce, - ignoreDuplicate, - options, - localDocuments - } - }).then(() => rxDatabase); - }); -} - -/** - * Removes the database and all its known data - * with all known collections and all internal meta data. - * - * Returns the names of the removed collections. - */ -async function removeRxDatabase(databaseName, storage, password) { - var databaseInstanceToken = (0, _index.randomCouchString)(10); - var dbInternalsStorageInstance = await createRxDatabaseStorageInstance(databaseInstanceToken, storage, databaseName, {}, false, password); - var collectionDocs = await (0, _rxDatabaseInternalStore.getAllCollectionDocuments)(dbInternalsStorageInstance); - var collectionNames = new Set(); - collectionDocs.forEach(doc => collectionNames.add(doc.data.name)); - var removedCollectionNames = Array.from(collectionNames); - await Promise.all(removedCollectionNames.map(collectionName => (0, _rxCollectionHelper.removeCollectionStorages)(storage, dbInternalsStorageInstance, databaseInstanceToken, databaseName, collectionName, password))); - await (0, _hooks.runAsyncPluginHooks)('postRemoveRxDatabase', { - databaseName, - storage - }); - await dbInternalsStorageInstance.remove(); - return removedCollectionNames; -} -function isRxDatabase(obj) { - return obj instanceof RxDatabaseBase; -} -function dbCount() { - return DB_COUNT; -} - -/** - * Returns true if the given RxDatabase was the first - * instance that was created on the storage with this name. - * - * Can be used for some optimizations because on the first instantiation, - * we can assume that no data was written before. - */ -async function isRxDatabaseFirstTimeInstantiated(database) { - var tokenDoc = await database.storageTokenDocument; - return tokenDoc.data.instanceToken === database.token; -} - -/** - * For better performance some tasks run async - * and are awaited later. - * But we still have to ensure that there have been no errors - * on database creation. - */ -async function ensureNoStartupErrors(rxDatabase) { - await rxDatabase.storageToken; - if (rxDatabase.startupErrors[0]) { - throw rxDatabase.startupErrors[0]; - } -} -//# sourceMappingURL=rx-database.js.map \ No newline at end of file diff --git a/dist/cjs/rx-database.js.map b/dist/cjs/rx-database.js.map deleted file mode 100644 index 030a274102e..00000000000 --- a/dist/cjs/rx-database.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-database.js","names":["_customIdleQueue","require","_index","_rxError","_rxSchema","_hooks","_rxjs","_operators","_rxCollection","_rxStorageHelper","_obliviousSet","_rxDatabaseInternalStore","_rxCollectionHelper","_overwritable","USED_DATABASE_NAMES","Set","DB_COUNT","RxDatabaseBase","exports","name","token","storage","instanceCreationOptions","password","multiInstance","eventReduce","options","internalStore","hashFunction","cleanupPolicy","allowSlowCount","reactivity","idleQueue","IdleQueue","rxdbVersion","RXDB_VERSION","storageInstances","_subs","startupErrors","onDestroy","destroyed","collections","states","eventBulks$","Subject","observable$","pipe","mergeMap","changeEventBulk","events","storageToken","PROMISE_RESOLVE_FALSE","storageTokenDocument","emittedEventBulkIds","ObliviousSet","getWrappedStorageInstance","asRxDatabase","INTERNAL_STORE_SCHEMA","ensureStorageTokenDocumentExists","catch","err","push","then","doc","data","_proto","prototype","getReactivityFactory","newRxError","database","$emit","has","id","add","next","removeCollectionDoc","schema","getSingleDocument","getPrimaryKeyOfInternalDocument","_collectionNamePrimary","INTERNAL_CONTEXT_COLLECTION","writeDoc","flatCloneDocWithMeta","_deleted","bulkWrite","document","previous","addCollections","collectionCreators","jsonSchemas","schemas","bulkPutDocs","useArgsByCollectionName","Promise","all","Object","entries","map","args","collectionName","rxJsonSchema","createRxSchema","collectionNameWithVersion","collectionDocData","key","context","schemaHash","hash","jsonSchema","version","connectedStorages","_meta","getDefaultRxDocumentMeta","_rev","getDefaultRevision","_attachments","useArgs","assign","hookData","flatClone","runPluginHooks","conflictHandler","putDocsResult","ensureNoStartupErrors","error","status","writeError","docInDb","ensureNotFalsy","documentInDb","collection","previousSchemaHash","previousSchema","ret","keys","createRxCollection","defineProperty","get","lockedRun","fn","wrapCall","requestIdlePromise","exportJSON","_collections","pluginMissing","addState","_name","importJSON","_exportedJSON","backup","_options","leaderElector","isLeader","waitForLeadership","migrationStates","destroy","runAsyncPluginHooks","complete","sub","unsubscribe","col","close","delete","remove","removeRxDatabase","_createClass2","default","throwIfDatabaseNameUsed","link","createRxDatabaseStorageInstance","databaseInstanceToken","databaseName","createStorageInstance","INTERNAL_STORAGE_NAME","devMode","overwritable","isDevMode","createRxDatabase","ignoreDuplicate","localDocuments","defaultHashSha256","randomCouchString","storageInstance","rxDatabase","creator","dbInternalsStorageInstance","collectionDocs","getAllCollectionDocuments","collectionNames","forEach","removedCollectionNames","Array","from","removeCollectionStorages","isRxDatabase","obj","dbCount","isRxDatabaseFirstTimeInstantiated","tokenDoc","instanceToken"],"sources":["../../src/rx-database.ts"],"sourcesContent":["import { IdleQueue } from 'custom-idle-queue';\nimport type {\n LeaderElector\n} from 'broadcast-channel';\nimport type {\n CollectionsOfDatabase,\n RxDatabase,\n RxCollectionCreator,\n RxJsonSchema,\n RxCollection,\n RxDumpDatabase,\n RxDumpDatabaseAny,\n BackupOptions,\n RxStorage,\n RxStorageInstance,\n BulkWriteRow,\n RxChangeEvent,\n RxDatabaseCreator,\n RxChangeEventBulk,\n RxDocumentData,\n RxCleanupPolicy,\n InternalStoreDocType,\n InternalStoreStorageTokenDocType,\n InternalStoreCollectionDocType,\n RxTypeError,\n RxError,\n HashFunction,\n MaybePromise,\n RxState\n} from './types/index.d.ts';\n\nimport {\n pluginMissing,\n flatClone,\n PROMISE_RESOLVE_FALSE,\n randomCouchString,\n ensureNotFalsy,\n getDefaultRevision,\n getDefaultRxDocumentMeta,\n defaultHashSha256,\n RXDB_VERSION\n} from './plugins/utils/index.ts';\nimport {\n newRxError\n} from './rx-error.ts';\nimport {\n createRxSchema,\n RxSchema\n} from './rx-schema.ts';\nimport {\n runPluginHooks,\n runAsyncPluginHooks\n} from './hooks.ts';\nimport {\n Subject,\n Subscription,\n Observable\n} from 'rxjs';\nimport {\n mergeMap\n} from 'rxjs/operators';\nimport {\n createRxCollection\n} from './rx-collection.ts';\nimport {\n flatCloneDocWithMeta,\n getSingleDocument,\n getWrappedStorageInstance,\n INTERNAL_STORAGE_NAME,\n WrappedRxStorageInstance\n} from './rx-storage-helper.ts';\nimport type { RxBackupState } from './plugins/backup/index.ts';\nimport { ObliviousSet } from 'oblivious-set';\nimport {\n ensureStorageTokenDocumentExists,\n getAllCollectionDocuments,\n getPrimaryKeyOfInternalDocument,\n INTERNAL_CONTEXT_COLLECTION,\n INTERNAL_STORE_SCHEMA,\n _collectionNamePrimary\n} from './rx-database-internal-store.ts';\nimport { removeCollectionStorages } from './rx-collection-helper.ts';\nimport { overwritable } from './overwritable.ts';\nimport type { RxMigrationState } from './plugins/migration-schema/index.ts';\nimport type { RxReactivityFactory } from './types/plugins/reactivity.d.ts';\n\n/**\n * stores the used database names+storage names\n * so we can throw when the same database is created more then once.\n */\nconst USED_DATABASE_NAMES: Set = new Set();\n\nlet DB_COUNT = 0;\n\nexport class RxDatabaseBase<\n Internals,\n InstanceCreationOptions,\n Collections = CollectionsOfDatabase,\n Reactivity = unknown\n> {\n\n public readonly idleQueue: IdleQueue = new IdleQueue();\n public readonly rxdbVersion = RXDB_VERSION;\n\n /**\n * Contains all known non-closed storage instances\n * that belong to this database.\n * Used in plugins and unit tests.\n */\n public readonly storageInstances = new Set>();\n\n constructor(\n public readonly name: string,\n /**\n * Uniquely identifies the instance\n * of this RxDatabase.\n */\n public readonly token: string,\n public readonly storage: RxStorage,\n public readonly instanceCreationOptions: InstanceCreationOptions,\n public readonly password: any,\n public readonly multiInstance: boolean,\n public readonly eventReduce: boolean = false,\n public options: any = {},\n /**\n * Stores information documents about the collections of the database\n */\n public readonly internalStore: RxStorageInstance,\n public readonly hashFunction: HashFunction,\n public readonly cleanupPolicy?: Partial,\n public readonly allowSlowCount?: boolean,\n public readonly reactivity?: RxReactivityFactory\n ) {\n DB_COUNT++;\n\n /**\n * In the dev-mode, we create a pseudoInstance\n * to get all properties of RxDatabase and ensure they do not\n * conflict with the collection names etc.\n * So only if it is not pseudoInstance,\n * we have all values to prepare a real RxDatabase.\n *\n * TODO this is ugly, we should use a different way in the dev-mode\n * so that all non-dev-mode code can be cleaner.\n */\n if (this.name !== 'pseudoInstance') {\n /**\n * Wrap the internal store\n * to ensure that calls to it also end up in\n * calculation of the idle state and the hooks.\n */\n this.internalStore = getWrappedStorageInstance(\n this.asRxDatabase,\n internalStore,\n INTERNAL_STORE_SCHEMA\n );\n\n /**\n * Start writing the storage token.\n * Do not await the creation because it would run\n * in a critical path that increases startup time.\n *\n * Writing the token takes about 20 milliseconds\n * even on a fast adapter, so this is worth it.\n */\n this.storageTokenDocument = ensureStorageTokenDocumentExists(this.asRxDatabase)\n .catch(err => this.startupErrors.push(err) as any);\n this.storageToken = this.storageTokenDocument\n .then(doc => doc.data.token)\n .catch(err => this.startupErrors.push(err) as any);\n }\n }\n\n get $(): Observable> {\n return this.observable$;\n }\n\n public getReactivityFactory(): RxReactivityFactory {\n if (!this.reactivity) {\n throw newRxError('DB14', { database: this.name });\n }\n return this.reactivity;\n }\n\n public _subs: Subscription[] = [];\n\n /**\n * Because having unhandled exceptions would fail,\n * we have to store the async errors of the constructor here\n * so we can throw them later.\n */\n public startupErrors: (RxError | RxTypeError)[] = [];\n\n /**\n * When the database is destroyed,\n * these functions will be called an awaited.\n * Used to automatically clean up stuff that\n * belongs to this collection.\n */\n public onDestroy: (() => MaybePromise)[] = [];\n public destroyed: boolean = false;\n public collections: Collections = {} as any;\n public states: { [name: string]: RxState; } = {};\n public readonly eventBulks$: Subject> = new Subject();\n private observable$: Observable> = this.eventBulks$\n .pipe(\n mergeMap(changeEventBulk => changeEventBulk.events)\n );\n\n /**\n * Unique token that is stored with the data.\n * Used to detect if the dataset has been deleted\n * and if two RxDatabase instances work on the same dataset or not.\n *\n * Because reading and writing the storageToken runs in the hot path\n * of database creation, we do not await the storageWrites but instead\n * work with the promise when we need the value.\n */\n public storageToken: Promise = PROMISE_RESOLVE_FALSE as any;\n /**\n * Stores the whole state of the internal storage token document.\n * We need this in some plugins.\n */\n public storageTokenDocument: Promise> = PROMISE_RESOLVE_FALSE as any;\n\n /**\n * Contains the ids of all event bulks that have been emitted\n * by the database.\n * Used to detect duplicates that come in again via BroadcastChannel\n * or other streams.\n * TODO instead of having this here, we should add a test to ensure each RxStorage\n * behaves equal and does never emit duplicate eventBulks.\n */\n public emittedEventBulkIds: ObliviousSet = new ObliviousSet(60 * 1000);\n\n /**\n * This is the main handle-point for all change events\n * ChangeEvents created by this instance go:\n * RxDocument -> RxCollection -> RxDatabase.$emit -> MultiInstance\n * ChangeEvents created by other instances go:\n * MultiInstance -> RxDatabase.$emit -> RxCollection -> RxDatabase\n */\n $emit(changeEventBulk: RxChangeEventBulk) {\n if (this.emittedEventBulkIds.has(changeEventBulk.id)) {\n return;\n }\n this.emittedEventBulkIds.add(changeEventBulk.id);\n\n // emit into own stream\n this.eventBulks$.next(changeEventBulk);\n }\n\n /**\n * removes the collection-doc from the internalStore\n */\n async removeCollectionDoc(name: string, schema: any): Promise {\n const doc = await getSingleDocument(\n this.internalStore,\n getPrimaryKeyOfInternalDocument(\n _collectionNamePrimary(name, schema),\n INTERNAL_CONTEXT_COLLECTION\n )\n );\n if (!doc) {\n throw newRxError('SNH', { name, schema });\n }\n const writeDoc = flatCloneDocWithMeta(doc);\n writeDoc._deleted = true;\n\n await this.internalStore.bulkWrite([{\n document: writeDoc,\n previous: doc\n }], 'rx-database-remove-collection');\n }\n\n /**\n * creates multiple RxCollections at once\n * to be much faster by saving db txs and doing stuff in bulk-operations\n * This function is not called often, but mostly in the critical path at the initial page load\n * So it must be as fast as possible.\n */\n async addCollections>(collectionCreators: {\n [key in keyof CreatedCollections]: RxCollectionCreator\n }): Promise<{ [key in keyof CreatedCollections]: RxCollection }> {\n const jsonSchemas: { [key in keyof CreatedCollections]: RxJsonSchema } = {} as any;\n const schemas: { [key in keyof CreatedCollections]: RxSchema } = {} as any;\n const bulkPutDocs: BulkWriteRow[] = [];\n const useArgsByCollectionName: any = {};\n\n await Promise.all(\n Object.entries(collectionCreators).map(async ([name, args]) => {\n const collectionName: keyof CreatedCollections = name as any;\n const rxJsonSchema = (args as RxCollectionCreator).schema;\n jsonSchemas[collectionName] = rxJsonSchema;\n const schema = createRxSchema(rxJsonSchema, this.hashFunction);\n schemas[collectionName] = schema;\n\n // collection already exists\n if ((this.collections as any)[name]) {\n throw newRxError('DB3', {\n name\n });\n }\n\n const collectionNameWithVersion = _collectionNamePrimary(name, rxJsonSchema);\n const collectionDocData: RxDocumentData = {\n id: getPrimaryKeyOfInternalDocument(\n collectionNameWithVersion,\n INTERNAL_CONTEXT_COLLECTION\n ),\n key: collectionNameWithVersion,\n context: INTERNAL_CONTEXT_COLLECTION,\n data: {\n name: collectionName as any,\n schemaHash: await schema.hash,\n schema: schema.jsonSchema,\n version: schema.version,\n connectedStorages: []\n },\n _deleted: false,\n _meta: getDefaultRxDocumentMeta(),\n _rev: getDefaultRevision(),\n _attachments: {}\n };\n bulkPutDocs.push({\n document: collectionDocData\n });\n\n const useArgs: any = Object.assign(\n {},\n args,\n {\n name: collectionName,\n schema,\n database: this\n }\n );\n\n // run hooks\n const hookData: RxCollectionCreator & { name: string; } = flatClone(args) as any;\n (hookData as any).database = this;\n hookData.name = name;\n runPluginHooks('preCreateRxCollection', hookData);\n useArgs.conflictHandler = hookData.conflictHandler;\n\n useArgsByCollectionName[collectionName] = useArgs;\n })\n );\n\n\n const putDocsResult = await this.internalStore.bulkWrite(\n bulkPutDocs,\n 'rx-database-add-collection'\n );\n\n await ensureNoStartupErrors(this);\n\n await Promise.all(\n putDocsResult.error.map(async (error) => {\n if (error.status !== 409) {\n throw newRxError('DB12', {\n database: this.name,\n writeError: error\n });\n }\n const docInDb: RxDocumentData = ensureNotFalsy(error.documentInDb);\n const collectionName = docInDb.data.name;\n const schema = (schemas as any)[collectionName];\n // collection already exists but has different schema\n if (docInDb.data.schemaHash !== await schema.hash) {\n throw newRxError('DB6', {\n database: this.name,\n collection: collectionName,\n previousSchemaHash: docInDb.data.schemaHash,\n schemaHash: await schema.hash,\n previousSchema: docInDb.data.schema,\n schema: ensureNotFalsy((jsonSchemas as any)[collectionName])\n });\n }\n })\n );\n\n const ret: { [key in keyof CreatedCollections]: RxCollection } = {} as any;\n await Promise.all(\n Object.keys(collectionCreators).map(async (collectionName) => {\n const useArgs = useArgsByCollectionName[collectionName];\n const collection = await createRxCollection(useArgs);\n (ret as any)[collectionName] = collection;\n\n // set as getter to the database\n (this.collections as any)[collectionName] = collection;\n if (!(this as any)[collectionName]) {\n Object.defineProperty(this, collectionName, {\n get: () => (this.collections as any)[collectionName]\n });\n }\n })\n );\n\n return ret;\n }\n\n /**\n * runs the given function between idleQueue-locking\n */\n lockedRun(fn: (...args: any[]) => T): T extends Promise ? T : Promise {\n return this.idleQueue.wrapCall(fn) as any;\n }\n\n requestIdlePromise() {\n return this.idleQueue.requestIdlePromise();\n }\n\n /**\n * Export database to a JSON friendly format.\n */\n exportJSON(_collections?: string[]): Promise>;\n exportJSON(_collections?: string[]): Promise>;\n exportJSON(_collections?: string[]): Promise {\n throw pluginMissing('json-dump');\n }\n\n addState(_name?: string): Promise> {\n throw pluginMissing('state');\n }\n\n /**\n * Import the parsed JSON export into the collection.\n * @param _exportedJSON The previously exported data from the `.exportJSON()` method.\n * @note When an interface is loaded in this collection all base properties of the type are typed as `any`\n * since data could be encrypted.\n */\n importJSON(_exportedJSON: RxDumpDatabaseAny): Promise {\n throw pluginMissing('json-dump');\n }\n\n backup(_options: BackupOptions): RxBackupState {\n throw pluginMissing('backup');\n }\n\n public leaderElector(): LeaderElector {\n throw pluginMissing('leader-election');\n }\n\n public isLeader(): boolean {\n throw pluginMissing('leader-election');\n }\n /**\n * returns a promise which resolves when the instance becomes leader\n */\n public waitForLeadership(): Promise {\n throw pluginMissing('leader-election');\n }\n\n public migrationStates(): Observable {\n throw pluginMissing('migration-schema');\n }\n\n /**\n * destroys the database-instance and all collections\n */\n public async destroy(): Promise {\n if (this.destroyed) {\n return PROMISE_RESOLVE_FALSE;\n }\n\n // settings destroyed = true must be the first thing to do.\n this.destroyed = true;\n\n await runAsyncPluginHooks('preDestroyRxDatabase', this);\n /**\n * Complete the event stream\n * to stop all subscribers who forgot to unsubscribe.\n */\n this.eventBulks$.complete();\n\n DB_COUNT--;\n this._subs.map(sub => sub.unsubscribe());\n\n /**\n * Destroying the pseudo instance will throw\n * because stuff is missing\n * TODO we should not need the pseudo instance on runtime.\n * we should generate the property list on build time.\n */\n if (this.name === 'pseudoInstance') {\n return PROMISE_RESOLVE_FALSE;\n }\n\n /**\n * First wait until the database is idle\n */\n return this.requestIdlePromise()\n .then(() => Promise.all(this.onDestroy.map(fn => fn())))\n // destroy all collections\n .then(() => Promise.all(\n Object.keys(this.collections as any)\n .map(key => (this.collections as any)[key])\n .map(col => col.destroy())\n ))\n // destroy internal storage instances\n .then(() => this.internalStore.close())\n // remove combination from USED_COMBINATIONS-map\n .then(() => USED_DATABASE_NAMES.delete(this.storage.name + '|' + this.name))\n .then(() => true);\n }\n\n /**\n * deletes the database and its stored data.\n * Returns the names of all removed collections.\n */\n remove(): Promise {\n return this\n .destroy()\n .then(() => removeRxDatabase(this.name, this.storage, this.password));\n }\n\n get asRxDatabase(): RxDatabase<\n {},\n Internals,\n InstanceCreationOptions,\n Reactivity\n > {\n return this as any;\n }\n}\n\n/**\n * checks if an instance with same name and storage already exists\n * @throws {RxError} if used\n */\nfunction throwIfDatabaseNameUsed(\n name: string,\n storage: RxStorage\n) {\n const key = storage.name + '|' + name;\n if (!USED_DATABASE_NAMES.has(key)) {\n return;\n } else {\n throw newRxError('DB8', {\n name,\n storage: storage.name,\n link: 'https://rxdb.info/rx-database.html#ignoreduplicate'\n });\n }\n}\n\n/**\n * Creates the storage instances that are used internally in the database\n * to store schemas and other configuration stuff.\n */\nexport async function createRxDatabaseStorageInstance(\n databaseInstanceToken: string,\n storage: RxStorage,\n databaseName: string,\n options: InstanceCreationOptions,\n multiInstance: boolean,\n password?: string\n): Promise> {\n const internalStore = await storage.createStorageInstance(\n {\n databaseInstanceToken,\n databaseName,\n collectionName: INTERNAL_STORAGE_NAME,\n schema: INTERNAL_STORE_SCHEMA,\n options,\n multiInstance,\n password,\n devMode: overwritable.isDevMode()\n }\n );\n return internalStore;\n}\n\nexport function createRxDatabase<\n Collections = { [key: string]: RxCollection; },\n Internals = any,\n InstanceCreationOptions = any,\n Reactivity = unknown\n>(\n {\n storage,\n instanceCreationOptions,\n name,\n password,\n multiInstance = true,\n eventReduce = true,\n ignoreDuplicate = false,\n options = {},\n cleanupPolicy,\n allowSlowCount = false,\n localDocuments = false,\n hashFunction = defaultHashSha256,\n reactivity\n }: RxDatabaseCreator\n): Promise<\n RxDatabase\n> {\n runPluginHooks('preCreateRxDatabase', {\n storage,\n instanceCreationOptions,\n name,\n password,\n multiInstance,\n eventReduce,\n ignoreDuplicate,\n options,\n localDocuments\n });\n // check if combination already used\n if (!ignoreDuplicate) {\n throwIfDatabaseNameUsed(name, storage);\n }\n USED_DATABASE_NAMES.add(storage.name + '|' + name);\n\n const databaseInstanceToken = randomCouchString(10);\n\n return createRxDatabaseStorageInstance<\n Internals,\n InstanceCreationOptions\n >(\n databaseInstanceToken,\n storage,\n name,\n instanceCreationOptions as any,\n multiInstance,\n password\n )\n /**\n * Creating the internal store might fail\n * if some RxStorage wrapper is used that does some checks\n * and then throw.\n * In that case we have to properly clean up the database.\n */\n .catch(err => {\n USED_DATABASE_NAMES.delete(storage.name + '|' + name);\n throw err;\n })\n .then(storageInstance => {\n const rxDatabase: RxDatabase = new RxDatabaseBase(\n name,\n databaseInstanceToken,\n storage,\n instanceCreationOptions,\n password,\n multiInstance,\n eventReduce,\n options,\n storageInstance,\n hashFunction,\n cleanupPolicy,\n allowSlowCount,\n reactivity\n ) as any;\n\n return runAsyncPluginHooks('createRxDatabase', {\n database: rxDatabase,\n creator: {\n storage,\n instanceCreationOptions,\n name,\n password,\n multiInstance,\n eventReduce,\n ignoreDuplicate,\n options,\n localDocuments\n }\n }).then(() => rxDatabase);\n });\n}\n\n/**\n * Removes the database and all its known data\n * with all known collections and all internal meta data.\n *\n * Returns the names of the removed collections.\n */\nexport async function removeRxDatabase(\n databaseName: string,\n storage: RxStorage,\n password?: string\n): Promise {\n const databaseInstanceToken = randomCouchString(10);\n const dbInternalsStorageInstance = await createRxDatabaseStorageInstance(\n databaseInstanceToken,\n storage,\n databaseName,\n {},\n false,\n password\n );\n const collectionDocs = await getAllCollectionDocuments(dbInternalsStorageInstance);\n const collectionNames = new Set();\n collectionDocs.forEach(doc => collectionNames.add(doc.data.name));\n const removedCollectionNames: string[] = Array.from(collectionNames);\n\n await Promise.all(\n removedCollectionNames.map(collectionName => removeCollectionStorages(\n storage,\n dbInternalsStorageInstance,\n databaseInstanceToken,\n databaseName,\n collectionName,\n password\n ))\n );\n\n await runAsyncPluginHooks('postRemoveRxDatabase', {\n databaseName,\n storage\n });\n\n await dbInternalsStorageInstance.remove();\n return removedCollectionNames;\n}\n\nexport function isRxDatabase(obj: any) {\n return obj instanceof RxDatabaseBase;\n}\n\nexport function dbCount(): number {\n return DB_COUNT;\n}\n\n\n/**\n * Returns true if the given RxDatabase was the first\n * instance that was created on the storage with this name.\n *\n * Can be used for some optimizations because on the first instantiation,\n * we can assume that no data was written before.\n */\nexport async function isRxDatabaseFirstTimeInstantiated(\n database: RxDatabase\n): Promise {\n const tokenDoc = await database.storageTokenDocument;\n return tokenDoc.data.instanceToken === database.token;\n}\n\n\n/**\n * For better performance some tasks run async\n * and are awaited later.\n * But we still have to ensure that there have been no errors\n * on database creation.\n */\nexport async function ensureNoStartupErrors(\n rxDatabase: RxDatabaseBase\n) {\n await rxDatabase.storageToken;\n if (rxDatabase.startupErrors[0]) {\n throw rxDatabase.startupErrors[0];\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;AAAA,IAAAA,gBAAA,GAAAC,OAAA;AA+BA,IAAAC,MAAA,GAAAD,OAAA;AAWA,IAAAE,QAAA,GAAAF,OAAA;AAGA,IAAAG,SAAA,GAAAH,OAAA;AAIA,IAAAI,MAAA,GAAAJ,OAAA;AAIA,IAAAK,KAAA,GAAAL,OAAA;AAKA,IAAAM,UAAA,GAAAN,OAAA;AAGA,IAAAO,aAAA,GAAAP,OAAA;AAGA,IAAAQ,gBAAA,GAAAR,OAAA;AAQA,IAAAS,aAAA,GAAAT,OAAA;AACA,IAAAU,wBAAA,GAAAV,OAAA;AAQA,IAAAW,mBAAA,GAAAX,OAAA;AACA,IAAAY,aAAA,GAAAZ,OAAA;AAIA;AACA;AACA;AACA;AACA,IAAMa,mBAAgC,GAAG,IAAIC,GAAG,CAAC,CAAC;AAElD,IAAIC,QAAQ,GAAG,CAAC;AAAC,IAEJC,cAAc,GAAAC,OAAA,CAAAD,cAAA;EAUvB;AACJ;AACA;AACA;AACA;;EAGI,SAAAA,eACoBE,IAAY;EAC5B;AACR;AACA;AACA;EACwBC,KAAa,EACbC,OAAsD,EACtDC,uBAAgD,EAChDC,QAAa,EACbC,aAAsB,EACtBC,WAAoB,GAAG,KAAK,EACrCC,OAAY,GAAG,CAAC,CAAC;EACxB;AACR;AACA;EACwBC,aAA0F,EAC1FC,YAA0B,EAC1BC,aAAwC,EACxCC,cAAwB,EACxBC,UAAqC,EACvD;IAAA,KA/BcC,SAAS,GAAc,IAAIC,0BAAS,CAAC,CAAC;IAAA,KACtCC,WAAW,GAAGC,mBAAY;IAAA,KAO1BC,gBAAgB,GAAG,IAAIrB,GAAG,CAAoE,CAAC;IAAA,KA2ExGsB,KAAK,GAAmB,EAAE;IAAA,KAO1BC,aAAa,GAA8B,EAAE;IAAA,KAQ7CC,SAAS,GAAgC,EAAE;IAAA,KAC3CC,SAAS,GAAY,KAAK;IAAA,KAC1BC,WAAW,GAAgB,CAAC,CAAC;IAAA,KAC7BC,MAAM,GAAkD,CAAC,CAAC;IAAA,KACjDC,WAAW,GAAoC,IAAIC,aAAO,CAAC,CAAC;IAAA,KACpEC,WAAW,GAAmC,IAAI,CAACF,WAAW,CACjEG,IAAI,CACD,IAAAC,mBAAQ,EAACC,eAAe,IAAIA,eAAe,CAACC,MAAM,CACtD,CAAC;IAAA,KAWEC,YAAY,GAAoBC,4BAAqB;IAAA,KAKrDC,oBAAoB,GAA8DD,4BAAqB;IAAA,KAUvGE,mBAAmB,GAAyB,IAAIC,0BAAY,CAAC,EAAE,GAAG,IAAI,CAAC;IAAA,KAzH1DnC,IAAY,GAAZA,IAAY;IAAA,KAKZC,KAAa,GAAbA,KAAa;IAAA,KACbC,OAAsD,GAAtDA,OAAsD;IAAA,KACtDC,uBAAgD,GAAhDA,uBAAgD;IAAA,KAChDC,QAAa,GAAbA,QAAa;IAAA,KACbC,aAAsB,GAAtBA,aAAsB;IAAA,KACtBC,WAAoB,GAApBA,WAAoB;IAAA,KAC7BC,OAAY,GAAZA,OAAY;IAAA,KAIHC,aAA0F,GAA1FA,aAA0F;IAAA,KAC1FC,YAA0B,GAA1BA,YAA0B;IAAA,KAC1BC,aAAwC,GAAxCA,aAAwC;IAAA,KACxCC,cAAwB,GAAxBA,cAAwB;IAAA,KACxBC,UAAqC,GAArCA,UAAqC;IAErDf,QAAQ,EAAE;;IAEV;AACR;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;IACQ,IAAI,IAAI,CAACG,IAAI,KAAK,gBAAgB,EAAE;MAChC;AACZ;AACA;AACA;AACA;MACY,IAAI,CAACQ,aAAa,GAAG,IAAA4B,0CAAyB,EAC1C,IAAI,CAACC,YAAY,EACjB7B,aAAa,EACb8B,8CACJ,CAAC;;MAED;AACZ;AACA;AACA;AACA;AACA;AACA;AACA;MACY,IAAI,CAACL,oBAAoB,GAAG,IAAAM,yDAAgC,EAAC,IAAI,CAACF,YAAY,CAAC,CAC1EG,KAAK,CAACC,GAAG,IAAI,IAAI,CAACtB,aAAa,CAACuB,IAAI,CAACD,GAAG,CAAQ,CAAC;MACtD,IAAI,CAACV,YAAY,GAAG,IAAI,CAACE,oBAAoB,CACxCU,IAAI,CAACC,GAAG,IAAIA,GAAG,CAACC,IAAI,CAAC5C,KAAK,CAAC,CAC3BuC,KAAK,CAACC,GAAG,IAAI,IAAI,CAACtB,aAAa,CAACuB,IAAI,CAACD,GAAG,CAAQ,CAAC;IAC1D;EACJ;EAAC,IAAAK,MAAA,GAAAhD,cAAA,CAAAiD,SAAA;EAAAD,MAAA,CAMME,oBAAoB,GAA3B,SAAAA,qBAAA,EAA+D;IAC3D,IAAI,CAAC,IAAI,CAACpC,UAAU,EAAE;MAClB,MAAM,IAAAqC,mBAAU,EAAC,MAAM,EAAE;QAAEC,QAAQ,EAAE,IAAI,CAAClD;MAAK,CAAC,CAAC;IACrD;IACA,OAAO,IAAI,CAACY,UAAU;EAC1B;;EAIA;AACJ;AACA;AACA;AACA;;EAGI;AACJ;AACA;AACA;AACA;AACA;;EAWI;AACJ;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;EAEI;AACJ;AACA;AACA;;EAGI;AACJ;AACA;AACA;AACA;AACA;AACA;AACA,KAPI;EAUA;AACJ;AACA;AACA;AACA;AACA;AACA;EANIkC,MAAA,CAOAK,KAAK,GAAL,SAAAA,MAAMtB,eAAuC,EAAE;IAC3C,IAAI,IAAI,CAACK,mBAAmB,CAACkB,GAAG,CAACvB,eAAe,CAACwB,EAAE,CAAC,EAAE;MAClD;IACJ;IACA,IAAI,CAACnB,mBAAmB,CAACoB,GAAG,CAACzB,eAAe,CAACwB,EAAE,CAAC;;IAEhD;IACA,IAAI,CAAC7B,WAAW,CAAC+B,IAAI,CAAC1B,eAAe,CAAC;EAC1C;;EAEA;AACJ;AACA,KAFI;EAAAiB,MAAA,CAGMU,mBAAmB,GAAzB,eAAAA,oBAA0BxD,IAAY,EAAEyD,MAAW,EAAiB;IAChE,IAAMb,GAAG,GAAG,MAAM,IAAAc,kCAAiB,EAC/B,IAAI,CAAClD,aAAa,EAClB,IAAAmD,wDAA+B,EAC3B,IAAAC,+CAAsB,EAAC5D,IAAI,EAAEyD,MAAM,CAAC,EACpCI,oDACJ,CACJ,CAAC;IACD,IAAI,CAACjB,GAAG,EAAE;MACN,MAAM,IAAAK,mBAAU,EAAC,KAAK,EAAE;QAAEjD,IAAI;QAAEyD;MAAO,CAAC,CAAC;IAC7C;IACA,IAAMK,QAAQ,GAAG,IAAAC,qCAAoB,EAACnB,GAAG,CAAC;IAC1CkB,QAAQ,CAACE,QAAQ,GAAG,IAAI;IAExB,MAAM,IAAI,CAACxD,aAAa,CAACyD,SAAS,CAAC,CAAC;MAChCC,QAAQ,EAAEJ,QAAQ;MAClBK,QAAQ,EAAEvB;IACd,CAAC,CAAC,EAAE,+BAA+B,CAAC;EACxC;;EAEA;AACJ;AACA;AACA;AACA;AACA,KALI;EAAAE,MAAA,CAMMsB,cAAc,GAApB,eAAAA,eAAgEC,kBAE/D,EAA6F;IAC1F,IAAMC,WAAqE,GAAG,CAAC,CAAQ;IACvF,IAAMC,OAA6D,GAAG,CAAC,CAAQ;IAC/E,IAAMC,WAA2D,GAAG,EAAE;IACtE,IAAMC,uBAA4B,GAAG,CAAC,CAAC;IAEvC,MAAMC,OAAO,CAACC,GAAG,CACbC,MAAM,CAACC,OAAO,CAACR,kBAAkB,CAAC,CAACS,GAAG,CAAC,OAAO,CAAC9E,IAAI,EAAE+E,IAAI,CAAC,KAAK;MAC3D,IAAMC,cAAwC,GAAGhF,IAAW;MAC5D,IAAMiF,YAAY,GAAIF,IAAI,CAA8BtB,MAAM;MAC9Da,WAAW,CAACU,cAAc,CAAC,GAAGC,YAAY;MAC1C,IAAMxB,MAAM,GAAG,IAAAyB,wBAAc,EAACD,YAAY,EAAE,IAAI,CAACxE,YAAY,CAAC;MAC9D8D,OAAO,CAACS,cAAc,CAAC,GAAGvB,MAAM;;MAEhC;MACA,IAAK,IAAI,CAACnC,WAAW,CAAStB,IAAI,CAAC,EAAE;QACjC,MAAM,IAAAiD,mBAAU,EAAC,KAAK,EAAE;UACpBjD;QACJ,CAAC,CAAC;MACN;MAEA,IAAMmF,yBAAyB,GAAG,IAAAvB,+CAAsB,EAAC5D,IAAI,EAAEiF,YAAY,CAAC;MAC5E,IAAMG,iBAAiE,GAAG;QACtE/B,EAAE,EAAE,IAAAM,wDAA+B,EAC/BwB,yBAAyB,EACzBtB,oDACJ,CAAC;QACDwB,GAAG,EAAEF,yBAAyB;QAC9BG,OAAO,EAAEzB,oDAA2B;QACpChB,IAAI,EAAE;UACF7C,IAAI,EAAEgF,cAAqB;UAC3BO,UAAU,EAAE,MAAM9B,MAAM,CAAC+B,IAAI;UAC7B/B,MAAM,EAAEA,MAAM,CAACgC,UAAU;UACzBC,OAAO,EAAEjC,MAAM,CAACiC,OAAO;UACvBC,iBAAiB,EAAE;QACvB,CAAC;QACD3B,QAAQ,EAAE,KAAK;QACf4B,KAAK,EAAE,IAAAC,+BAAwB,EAAC,CAAC;QACjCC,IAAI,EAAE,IAAAC,yBAAkB,EAAC,CAAC;QAC1BC,YAAY,EAAE,CAAC;MACnB,CAAC;MACDxB,WAAW,CAAC9B,IAAI,CAAC;QACbwB,QAAQ,EAAEkB;MACd,CAAC,CAAC;MAEF,IAAMa,OAAY,GAAGrB,MAAM,CAACsB,MAAM,CAC9B,CAAC,CAAC,EACFnB,IAAI,EACJ;QACI/E,IAAI,EAAEgF,cAAc;QACpBvB,MAAM;QACNP,QAAQ,EAAE;MACd,CACJ,CAAC;;MAED;MACA,IAAMiD,QAAsD,GAAG,IAAAC,gBAAS,EAACrB,IAAI,CAAQ;MACpFoB,QAAQ,CAASjD,QAAQ,GAAG,IAAI;MACjCiD,QAAQ,CAACnG,IAAI,GAAGA,IAAI;MACpB,IAAAqG,qBAAc,EAAC,uBAAuB,EAAEF,QAAQ,CAAC;MACjDF,OAAO,CAACK,eAAe,GAAGH,QAAQ,CAACG,eAAe;MAElD7B,uBAAuB,CAACO,cAAc,CAAC,GAAGiB,OAAO;IACrD,CAAC,CACL,CAAC;IAGD,IAAMM,aAAa,GAAG,MAAM,IAAI,CAAC/F,aAAa,CAACyD,SAAS,CACpDO,WAAW,EACX,4BACJ,CAAC;IAED,MAAMgC,qBAAqB,CAAC,IAAI,CAAC;IAEjC,MAAM9B,OAAO,CAACC,GAAG,CACb4B,aAAa,CAACE,KAAK,CAAC3B,GAAG,CAAC,MAAO2B,KAAK,IAAK;MACrC,IAAIA,KAAK,CAACC,MAAM,KAAK,GAAG,EAAE;QACtB,MAAM,IAAAzD,mBAAU,EAAC,MAAM,EAAE;UACrBC,QAAQ,EAAE,IAAI,CAAClD,IAAI;UACnB2G,UAAU,EAAEF;QAChB,CAAC,CAAC;MACN;MACA,IAAMG,OAAuD,GAAG,IAAAC,qBAAc,EAACJ,KAAK,CAACK,YAAY,CAAC;MAClG,IAAM9B,cAAc,GAAG4B,OAAO,CAAC/D,IAAI,CAAC7C,IAAI;MACxC,IAAMyD,MAAM,GAAIc,OAAO,CAASS,cAAc,CAAC;MAC/C;MACA,IAAI4B,OAAO,CAAC/D,IAAI,CAAC0C,UAAU,MAAK,MAAM9B,MAAM,CAAC+B,IAAI,GAAE;QAC/C,MAAM,IAAAvC,mBAAU,EAAC,KAAK,EAAE;UACpBC,QAAQ,EAAE,IAAI,CAAClD,IAAI;UACnB+G,UAAU,EAAE/B,cAAc;UAC1BgC,kBAAkB,EAAEJ,OAAO,CAAC/D,IAAI,CAAC0C,UAAU;UAC3CA,UAAU,EAAE,MAAM9B,MAAM,CAAC+B,IAAI;UAC7ByB,cAAc,EAAEL,OAAO,CAAC/D,IAAI,CAACY,MAAM;UACnCA,MAAM,EAAE,IAAAoD,qBAAc,EAAEvC,WAAW,CAASU,cAAc,CAAC;QAC/D,CAAC,CAAC;MACN;IACJ,CAAC,CACL,CAAC;IAED,IAAMkC,GAAqF,GAAG,CAAC,CAAQ;IACvG,MAAMxC,OAAO,CAACC,GAAG,CACbC,MAAM,CAACuC,IAAI,CAAC9C,kBAAkB,CAAC,CAACS,GAAG,CAAC,MAAOE,cAAc,IAAK;MAC1D,IAAMiB,OAAO,GAAGxB,uBAAuB,CAACO,cAAc,CAAC;MACvD,IAAM+B,UAAU,GAAG,MAAM,IAAAK,gCAAkB,EAACnB,OAAO,CAAC;MACnDiB,GAAG,CAASlC,cAAc,CAAC,GAAG+B,UAAU;;MAEzC;MACC,IAAI,CAACzF,WAAW,CAAS0D,cAAc,CAAC,GAAG+B,UAAU;MACtD,IAAI,CAAE,IAAI,CAAS/B,cAAc,CAAC,EAAE;QAChCJ,MAAM,CAACyC,cAAc,CAAC,IAAI,EAAErC,cAAc,EAAE;UACxCsC,GAAG,EAAEA,CAAA,KAAO,IAAI,CAAChG,WAAW,CAAS0D,cAAc;QACvD,CAAC,CAAC;MACN;IACJ,CAAC,CACL,CAAC;IAED,OAAOkC,GAAG;EACd;;EAEA;AACJ;AACA,KAFI;EAAApE,MAAA,CAGAyE,SAAS,GAAT,SAAAA,UAAaC,EAAyB,EAA2C;IAC7E,OAAO,IAAI,CAAC3G,SAAS,CAAC4G,QAAQ,CAACD,EAAE,CAAC;EACtC,CAAC;EAAA1E,MAAA,CAED4E,kBAAkB,GAAlB,SAAAA,mBAAA,EAAqB;IACjB,OAAO,IAAI,CAAC7G,SAAS,CAAC6G,kBAAkB,CAAC,CAAC;EAC9C;;EAEA;AACJ;AACA,KAFI;EAAA5E,MAAA,CAKA6E,UAAU,GAAV,SAAAA,WAAWC,YAAuB,EAAgB;IAC9C,MAAM,IAAAC,oBAAa,EAAC,WAAW,CAAC;EACpC,CAAC;EAAA/E,MAAA,CAEDgF,QAAQ,GAAR,SAAAA,SAAkBC,KAAc,EAAmC;IAC/D,MAAM,IAAAF,oBAAa,EAAC,OAAO,CAAC;EAChC;;EAEA;AACJ;AACA;AACA;AACA;AACA,KALI;EAAA/E,MAAA,CAMAkF,UAAU,GAAV,SAAAA,WAAWC,aAA6C,EAAiB;IACrE,MAAM,IAAAJ,oBAAa,EAAC,WAAW,CAAC;EACpC,CAAC;EAAA/E,MAAA,CAEDoF,MAAM,GAAN,SAAAA,OAAOC,QAAuB,EAAiB;IAC3C,MAAM,IAAAN,oBAAa,EAAC,QAAQ,CAAC;EACjC,CAAC;EAAA/E,MAAA,CAEMsF,aAAa,GAApB,SAAAA,cAAA,EAAsC;IAClC,MAAM,IAAAP,oBAAa,EAAC,iBAAiB,CAAC;EAC1C,CAAC;EAAA/E,MAAA,CAEMuF,QAAQ,GAAf,SAAAA,SAAA,EAA2B;IACvB,MAAM,IAAAR,oBAAa,EAAC,iBAAiB,CAAC;EAC1C;EACA;AACJ;AACA,KAFI;EAAA/E,MAAA,CAGOwF,iBAAiB,GAAxB,SAAAA,kBAAA,EAA6C;IACzC,MAAM,IAAAT,oBAAa,EAAC,iBAAiB,CAAC;EAC1C,CAAC;EAAA/E,MAAA,CAEMyF,eAAe,GAAtB,SAAAA,gBAAA,EAAyD;IACrD,MAAM,IAAAV,oBAAa,EAAC,kBAAkB,CAAC;EAC3C;;EAEA;AACJ;AACA,KAFI;EAAA/E,MAAA,CAGa0F,OAAO,GAApB,eAAAA,QAAA,EAAyC;IACrC,IAAI,IAAI,CAACnH,SAAS,EAAE;MAChB,OAAOW,4BAAqB;IAChC;;IAEA;IACA,IAAI,CAACX,SAAS,GAAG,IAAI;IAErB,MAAM,IAAAoH,0BAAmB,EAAC,sBAAsB,EAAE,IAAI,CAAC;IACvD;AACR;AACA;AACA;IACQ,IAAI,CAACjH,WAAW,CAACkH,QAAQ,CAAC,CAAC;IAE3B7I,QAAQ,EAAE;IACV,IAAI,CAACqB,KAAK,CAAC4D,GAAG,CAAC6D,GAAG,IAAIA,GAAG,CAACC,WAAW,CAAC,CAAC,CAAC;;IAExC;AACR;AACA;AACA;AACA;AACA;IACQ,IAAI,IAAI,CAAC5I,IAAI,KAAK,gBAAgB,EAAE;MAChC,OAAOgC,4BAAqB;IAChC;;IAEA;AACR;AACA;IACQ,OAAO,IAAI,CAAC0F,kBAAkB,CAAC,CAAC,CAC3B/E,IAAI,CAAC,MAAM+B,OAAO,CAACC,GAAG,CAAC,IAAI,CAACvD,SAAS,CAAC0D,GAAG,CAAC0C,EAAE,IAAIA,EAAE,CAAC,CAAC,CAAC,CAAC;IACvD;IAAA,CACC7E,IAAI,CAAC,MAAM+B,OAAO,CAACC,GAAG,CACnBC,MAAM,CAACuC,IAAI,CAAC,IAAI,CAAC7F,WAAkB,CAAC,CAC/BwD,GAAG,CAACO,GAAG,IAAK,IAAI,CAAC/D,WAAW,CAAS+D,GAAG,CAAC,CAAC,CAC1CP,GAAG,CAAC+D,GAAG,IAAIA,GAAG,CAACL,OAAO,CAAC,CAAC,CACjC,CAAC;IACD;IAAA,CACC7F,IAAI,CAAC,MAAM,IAAI,CAACnC,aAAa,CAACsI,KAAK,CAAC,CAAC;IACtC;IAAA,CACCnG,IAAI,CAAC,MAAMhD,mBAAmB,CAACoJ,MAAM,CAAC,IAAI,CAAC7I,OAAO,CAACF,IAAI,GAAG,GAAG,GAAG,IAAI,CAACA,IAAI,CAAC,CAAC,CAC3E2C,IAAI,CAAC,MAAM,IAAI,CAAC;EACzB;;EAEA;AACJ;AACA;AACA,KAHI;EAAAG,MAAA,CAIAkG,MAAM,GAAN,SAAAA,OAAA,EAA4B;IACxB,OAAO,IAAI,CACNR,OAAO,CAAC,CAAC,CACT7F,IAAI,CAAC,MAAMsG,gBAAgB,CAAC,IAAI,CAACjJ,IAAI,EAAE,IAAI,CAACE,OAAO,EAAE,IAAI,CAACE,QAAQ,CAAC,CAAC;EAC7E,CAAC;EAAA,WAAA8I,aAAA,CAAAC,OAAA,EAAArJ,cAAA;IAAAuF,GAAA;IAAAiC,GAAA,EAtVD,SAAAA,CAAA,EAAwC;MACpC,OAAO,IAAI,CAAC5F,WAAW;IAC3B;EAAC;IAAA2D,GAAA;IAAAiC,GAAA,EAsVD,SAAAA,CAAA,EAKE;MACE,OAAO,IAAI;IACf;EAAC;AAAA;AAGL;AACA;AACA;AACA;AACA,SAAS8B,uBAAuBA,CAC5BpJ,IAAY,EACZE,OAA4B,EAC9B;EACE,IAAMmF,GAAG,GAAGnF,OAAO,CAACF,IAAI,GAAG,GAAG,GAAGA,IAAI;EACrC,IAAI,CAACL,mBAAmB,CAACyD,GAAG,CAACiC,GAAG,CAAC,EAAE;IAC/B;EACJ,CAAC,MAAM;IACH,MAAM,IAAApC,mBAAU,EAAC,KAAK,EAAE;MACpBjD,IAAI;MACJE,OAAO,EAAEA,OAAO,CAACF,IAAI;MACrBqJ,IAAI,EAAE;IACV,CAAC,CAAC;EACN;AACJ;;AAEA;AACA;AACA;AACA;AACO,eAAeC,+BAA+BA,CACjDC,qBAA6B,EAC7BrJ,OAAsD,EACtDsJ,YAAoB,EACpBjJ,OAAgC,EAChCF,aAAsB,EACtBD,QAAiB,EACmE;EACpF,IAAMI,aAAa,GAAG,MAAMN,OAAO,CAACuJ,qBAAqB,CACrD;IACIF,qBAAqB;IACrBC,YAAY;IACZxE,cAAc,EAAE0E,sCAAqB;IACrCjG,MAAM,EAAEnB,8CAAqB;IAC7B/B,OAAO;IACPF,aAAa;IACbD,QAAQ;IACRuJ,OAAO,EAAEC,0BAAY,CAACC,SAAS,CAAC;EACpC,CACJ,CAAC;EACD,OAAOrJ,aAAa;AACxB;AAEO,SAASsJ,gBAAgBA,CAM5B;EACI5J,OAAO;EACPC,uBAAuB;EACvBH,IAAI;EACJI,QAAQ;EACRC,aAAa,GAAG,IAAI;EACpBC,WAAW,GAAG,IAAI;EAClByJ,eAAe,GAAG,KAAK;EACvBxJ,OAAO,GAAG,CAAC,CAAC;EACZG,aAAa;EACbC,cAAc,GAAG,KAAK;EACtBqJ,cAAc,GAAG,KAAK;EACtBvJ,YAAY,GAAGwJ,wBAAiB;EAChCrJ;AAC+D,CAAC,EAGtE;EACE,IAAAyF,qBAAc,EAAC,qBAAqB,EAAE;IAClCnG,OAAO;IACPC,uBAAuB;IACvBH,IAAI;IACJI,QAAQ;IACRC,aAAa;IACbC,WAAW;IACXyJ,eAAe;IACfxJ,OAAO;IACPyJ;EACJ,CAAC,CAAC;EACF;EACA,IAAI,CAACD,eAAe,EAAE;IAClBX,uBAAuB,CAACpJ,IAAI,EAAEE,OAAO,CAAC;EAC1C;EACAP,mBAAmB,CAAC2D,GAAG,CAACpD,OAAO,CAACF,IAAI,GAAG,GAAG,GAAGA,IAAI,CAAC;EAElD,IAAMuJ,qBAAqB,GAAG,IAAAW,wBAAiB,EAAC,EAAE,CAAC;EAEnD,OAAOZ,+BAA+B,CAIlCC,qBAAqB,EACrBrJ,OAAO,EACPF,IAAI,EACJG,uBAAuB,EACvBE,aAAa,EACbD,QACJ;EACI;AACR;AACA;AACA;AACA;AACA,KALQ,CAMCoC,KAAK,CAACC,GAAG,IAAI;IACV9C,mBAAmB,CAACoJ,MAAM,CAAC7I,OAAO,CAACF,IAAI,GAAG,GAAG,GAAGA,IAAI,CAAC;IACrD,MAAMyC,GAAG;EACb,CAAC,CAAC,CACDE,IAAI,CAACwH,eAAe,IAAI;IACrB,IAAMC,UAAmC,GAAG,IAAItK,cAAc,CAC1DE,IAAI,EACJuJ,qBAAqB,EACrBrJ,OAAO,EACPC,uBAAuB,EACvBC,QAAQ,EACRC,aAAa,EACbC,WAAW,EACXC,OAAO,EACP4J,eAAe,EACf1J,YAAY,EACZC,aAAa,EACbC,cAAc,EACdC,UACJ,CAAQ;IAER,OAAO,IAAA6H,0BAAmB,EAAC,kBAAkB,EAAE;MAC3CvF,QAAQ,EAAEkH,UAAU;MACpBC,OAAO,EAAE;QACLnK,OAAO;QACPC,uBAAuB;QACvBH,IAAI;QACJI,QAAQ;QACRC,aAAa;QACbC,WAAW;QACXyJ,eAAe;QACfxJ,OAAO;QACPyJ;MACJ;IACJ,CAAC,CAAC,CAACrH,IAAI,CAAC,MAAMyH,UAAU,CAAC;EAC7B,CAAC,CAAC;AACV;;AAEA;AACA;AACA;AACA;AACA;AACA;AACO,eAAenB,gBAAgBA,CAClCO,YAAoB,EACpBtJ,OAA4B,EAC5BE,QAAiB,EACA;EACjB,IAAMmJ,qBAAqB,GAAG,IAAAW,wBAAiB,EAAC,EAAE,CAAC;EACnD,IAAMI,0BAA0B,GAAG,MAAMhB,+BAA+B,CACpEC,qBAAqB,EACrBrJ,OAAO,EACPsJ,YAAY,EACZ,CAAC,CAAC,EACF,KAAK,EACLpJ,QACJ,CAAC;EACD,IAAMmK,cAAc,GAAG,MAAM,IAAAC,kDAAyB,EAACF,0BAA0B,CAAC;EAClF,IAAMG,eAAe,GAAG,IAAI7K,GAAG,CAAS,CAAC;EACzC2K,cAAc,CAACG,OAAO,CAAC9H,GAAG,IAAI6H,eAAe,CAACnH,GAAG,CAACV,GAAG,CAACC,IAAI,CAAC7C,IAAI,CAAC,CAAC;EACjE,IAAM2K,sBAAgC,GAAGC,KAAK,CAACC,IAAI,CAACJ,eAAe,CAAC;EAEpE,MAAM/F,OAAO,CAACC,GAAG,CACbgG,sBAAsB,CAAC7F,GAAG,CAACE,cAAc,IAAI,IAAA8F,4CAAwB,EACjE5K,OAAO,EACPoK,0BAA0B,EAC1Bf,qBAAqB,EACrBC,YAAY,EACZxE,cAAc,EACd5E,QACJ,CAAC,CACL,CAAC;EAED,MAAM,IAAAqI,0BAAmB,EAAC,sBAAsB,EAAE;IAC9Ce,YAAY;IACZtJ;EACJ,CAAC,CAAC;EAEF,MAAMoK,0BAA0B,CAACtB,MAAM,CAAC,CAAC;EACzC,OAAO2B,sBAAsB;AACjC;AAEO,SAASI,YAAYA,CAACC,GAAQ,EAAE;EACnC,OAAOA,GAAG,YAAYlL,cAAc;AACxC;AAEO,SAASmL,OAAOA,CAAA,EAAW;EAC9B,OAAOpL,QAAQ;AACnB;;AAGA;AACA;AACA;AACA;AACA;AACA;AACA;AACO,eAAeqL,iCAAiCA,CACnDhI,QAAoB,EACJ;EAChB,IAAMiI,QAAQ,GAAG,MAAMjI,QAAQ,CAACjB,oBAAoB;EACpD,OAAOkJ,QAAQ,CAACtI,IAAI,CAACuI,aAAa,KAAKlI,QAAQ,CAACjD,KAAK;AACzD;;AAGA;AACA;AACA;AACA;AACA;AACA;AACO,eAAeuG,qBAAqBA,CACvC4D,UAA8C,EAChD;EACE,MAAMA,UAAU,CAACrI,YAAY;EAC7B,IAAIqI,UAAU,CAACjJ,aAAa,CAAC,CAAC,CAAC,EAAE;IAC7B,MAAMiJ,UAAU,CAACjJ,aAAa,CAAC,CAAC,CAAC;EACrC;AACJ","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/rx-document-prototype-merge.js b/dist/cjs/rx-document-prototype-merge.js deleted file mode 100644 index 5ccbb31e72f..00000000000 --- a/dist/cjs/rx-document-prototype-merge.js +++ /dev/null @@ -1,87 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.createNewRxDocument = createNewRxDocument; -exports.getDocumentOrmPrototype = getDocumentOrmPrototype; -exports.getDocumentPrototype = getDocumentPrototype; -exports.getRxDocumentConstructor = getRxDocumentConstructor; -var _rxDocument = require("./rx-document.js"); -var _hooks = require("./hooks.js"); -var _overwritable = require("./overwritable.js"); -var _index = require("./plugins/utils/index.js"); -/** - * For the ORM capabilities, - * we have to merge the document prototype - * with the ORM functions and the data - * We do this iterating over the properties and - * adding them to a new object. - * In the future we should do this by chaining the __proto__ objects - */ - -var constructorForCollection = new WeakMap(); -function getDocumentPrototype(rxCollection) { - var schemaProto = rxCollection.schema.getDocumentPrototype(); - var ormProto = getDocumentOrmPrototype(rxCollection); - var baseProto = _rxDocument.basePrototype; - var proto = {}; - [schemaProto, ormProto, baseProto].forEach(obj => { - var props = Object.getOwnPropertyNames(obj); - props.forEach(key => { - var desc = Object.getOwnPropertyDescriptor(obj, key); - /** - * When enumerable is true, it will show on console dir(instance) - * To not pollute the output, only getters and methods are enumerable - */ - var enumerable = true; - if (key.startsWith('_') || key.endsWith('_') || key.startsWith('$') || key.endsWith('$')) enumerable = false; - if (typeof desc.value === 'function') { - // when getting a function, we automatically do a .bind(this) - Object.defineProperty(proto, key, { - get() { - return desc.value.bind(this); - }, - enumerable, - configurable: false - }); - } else { - desc.enumerable = enumerable; - desc.configurable = false; - if (desc.writable) desc.writable = false; - Object.defineProperty(proto, key, desc); - } - }); - }); - return proto; -} -function getRxDocumentConstructor(rxCollection) { - return (0, _index.getFromMapOrCreate)(constructorForCollection, rxCollection, () => (0, _rxDocument.createRxDocumentConstructor)(getDocumentPrototype(rxCollection))); -} - -/** - * Create a RxDocument-instance from the jsonData - * and the prototype merge. - * You should never call this method directly, - * instead you should get the document from collection._docCache.getCachedRxDocument(). - */ -function createNewRxDocument(rxCollection, documentConstructor, docData) { - var doc = (0, _rxDocument.createWithConstructor)(documentConstructor, rxCollection, _overwritable.overwritable.deepFreezeWhenDevMode(docData)); - rxCollection._runHooksSync('post', 'create', docData, doc); - (0, _hooks.runPluginHooks)('postCreateRxDocument', doc); - return doc; -} - -/** - * returns the prototype-object - * that contains the orm-methods, - * used in the proto-merge - */ -function getDocumentOrmPrototype(rxCollection) { - var proto = {}; - Object.entries(rxCollection.methods).forEach(([k, v]) => { - proto[k] = v; - }); - return proto; -} -//# sourceMappingURL=rx-document-prototype-merge.js.map \ No newline at end of file diff --git a/dist/cjs/rx-document-prototype-merge.js.map b/dist/cjs/rx-document-prototype-merge.js.map deleted file mode 100644 index fab03bf744d..00000000000 --- a/dist/cjs/rx-document-prototype-merge.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-document-prototype-merge.js","names":["_rxDocument","require","_hooks","_overwritable","_index","constructorForCollection","WeakMap","getDocumentPrototype","rxCollection","schemaProto","schema","ormProto","getDocumentOrmPrototype","baseProto","basePrototype","proto","forEach","obj","props","Object","getOwnPropertyNames","key","desc","getOwnPropertyDescriptor","enumerable","startsWith","endsWith","value","defineProperty","get","bind","configurable","writable","getRxDocumentConstructor","getFromMapOrCreate","createRxDocumentConstructor","createNewRxDocument","documentConstructor","docData","doc","createRxDocumentWithConstructor","overwritable","deepFreezeWhenDevMode","_runHooksSync","runPluginHooks","entries","methods","k","v"],"sources":["../../src/rx-document-prototype-merge.ts"],"sourcesContent":["/**\n * For the ORM capabilities,\n * we have to merge the document prototype\n * with the ORM functions and the data\n * We do this iterating over the properties and\n * adding them to a new object.\n * In the future we should do this by chaining the __proto__ objects\n */\n\nimport type {\n RxCollection,\n RxDocument,\n RxDocumentData\n} from './types/index.d.ts';\nimport {\n createRxDocumentConstructor,\n basePrototype,\n createWithConstructor as createRxDocumentWithConstructor\n} from './rx-document.ts';\nimport {\n runPluginHooks\n} from './hooks.ts';\nimport { overwritable } from './overwritable.ts';\nimport { getFromMapOrCreate } from './plugins/utils/index.ts';\n\nconst constructorForCollection = new WeakMap();\n\nexport function getDocumentPrototype(\n rxCollection: RxCollection\n): any {\n const schemaProto = rxCollection.schema.getDocumentPrototype();\n const ormProto = getDocumentOrmPrototype(rxCollection);\n const baseProto = basePrototype;\n const proto = {};\n [\n schemaProto,\n ormProto,\n baseProto\n ].forEach(obj => {\n const props = Object.getOwnPropertyNames(obj);\n props.forEach(key => {\n const desc: any = Object.getOwnPropertyDescriptor(obj, key);\n /**\n * When enumerable is true, it will show on console dir(instance)\n * To not pollute the output, only getters and methods are enumerable\n */\n let enumerable = true;\n if (\n key.startsWith('_') ||\n key.endsWith('_') ||\n key.startsWith('$') ||\n key.endsWith('$')\n ) enumerable = false;\n\n if (typeof desc.value === 'function') {\n // when getting a function, we automatically do a .bind(this)\n Object.defineProperty(proto, key, {\n get() {\n return desc.value.bind(this);\n },\n enumerable,\n configurable: false\n });\n\n } else {\n desc.enumerable = enumerable;\n desc.configurable = false;\n if (desc.writable)\n desc.writable = false;\n Object.defineProperty(proto, key, desc);\n }\n });\n });\n return proto;\n}\n\nexport function getRxDocumentConstructor(\n rxCollection: RxCollection\n) {\n return getFromMapOrCreate(\n constructorForCollection,\n rxCollection,\n () => createRxDocumentConstructor(\n getDocumentPrototype(rxCollection as any)\n )\n );\n}\n\n/**\n * Create a RxDocument-instance from the jsonData\n * and the prototype merge.\n * You should never call this method directly,\n * instead you should get the document from collection._docCache.getCachedRxDocument().\n */\nexport function createNewRxDocument(\n rxCollection: RxCollection,\n documentConstructor: any,\n docData: RxDocumentData\n): RxDocument {\n const doc = createRxDocumentWithConstructor(\n documentConstructor,\n rxCollection as any,\n overwritable.deepFreezeWhenDevMode(docData as any)\n );\n rxCollection._runHooksSync('post', 'create', docData, doc);\n runPluginHooks('postCreateRxDocument', doc);\n return doc as any;\n}\n\n\n/**\n * returns the prototype-object\n * that contains the orm-methods,\n * used in the proto-merge\n */\nexport function getDocumentOrmPrototype(rxCollection: RxCollection): any {\n const proto: any = {};\n Object\n .entries(rxCollection.methods)\n .forEach(([k, v]) => {\n proto[k] = v;\n });\n return proto;\n}\n"],"mappings":";;;;;;;;;AAcA,IAAAA,WAAA,GAAAC,OAAA;AAKA,IAAAC,MAAA,GAAAD,OAAA;AAGA,IAAAE,aAAA,GAAAF,OAAA;AACA,IAAAG,MAAA,GAAAH,OAAA;AAvBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAkBA,IAAMI,wBAAwB,GAAG,IAAIC,OAAO,CAAC,CAAC;AAEvC,SAASC,oBAAoBA,CAChCC,YAA0B,EACvB;EACH,IAAMC,WAAW,GAAGD,YAAY,CAACE,MAAM,CAACH,oBAAoB,CAAC,CAAC;EAC9D,IAAMI,QAAQ,GAAGC,uBAAuB,CAACJ,YAAY,CAAC;EACtD,IAAMK,SAAS,GAAGC,yBAAa;EAC/B,IAAMC,KAAK,GAAG,CAAC,CAAC;EAChB,CACIN,WAAW,EACXE,QAAQ,EACRE,SAAS,CACZ,CAACG,OAAO,CAACC,GAAG,IAAI;IACb,IAAMC,KAAK,GAAGC,MAAM,CAACC,mBAAmB,CAACH,GAAG,CAAC;IAC7CC,KAAK,CAACF,OAAO,CAACK,GAAG,IAAI;MACjB,IAAMC,IAAS,GAAGH,MAAM,CAACI,wBAAwB,CAACN,GAAG,EAAEI,GAAG,CAAC;MAC3D;AACZ;AACA;AACA;MACY,IAAIG,UAAU,GAAG,IAAI;MACrB,IACIH,GAAG,CAACI,UAAU,CAAC,GAAG,CAAC,IACnBJ,GAAG,CAACK,QAAQ,CAAC,GAAG,CAAC,IACjBL,GAAG,CAACI,UAAU,CAAC,GAAG,CAAC,IACnBJ,GAAG,CAACK,QAAQ,CAAC,GAAG,CAAC,EACnBF,UAAU,GAAG,KAAK;MAEpB,IAAI,OAAOF,IAAI,CAACK,KAAK,KAAK,UAAU,EAAE;QAClC;QACAR,MAAM,CAACS,cAAc,CAACb,KAAK,EAAEM,GAAG,EAAE;UAC9BQ,GAAGA,CAAA,EAAG;YACF,OAAOP,IAAI,CAACK,KAAK,CAACG,IAAI,CAAC,IAAI,CAAC;UAChC,CAAC;UACDN,UAAU;UACVO,YAAY,EAAE;QAClB,CAAC,CAAC;MAEN,CAAC,MAAM;QACHT,IAAI,CAACE,UAAU,GAAGA,UAAU;QAC5BF,IAAI,CAACS,YAAY,GAAG,KAAK;QACzB,IAAIT,IAAI,CAACU,QAAQ,EACbV,IAAI,CAACU,QAAQ,GAAG,KAAK;QACzBb,MAAM,CAACS,cAAc,CAACb,KAAK,EAAEM,GAAG,EAAEC,IAAI,CAAC;MAC3C;IACJ,CAAC,CAAC;EACN,CAAC,CAAC;EACF,OAAOP,KAAK;AAChB;AAEO,SAASkB,wBAAwBA,CACpCzB,YAA0C,EAC5C;EACE,OAAO,IAAA0B,yBAAkB,EACrB7B,wBAAwB,EACxBG,YAAY,EACZ,MAAM,IAAA2B,uCAA2B,EAC7B5B,oBAAoB,CAACC,YAAmB,CAC5C,CACJ,CAAC;AACL;;AAEA;AACA;AACA;AACA;AACA;AACA;AACO,SAAS4B,mBAAmBA,CAC/B5B,YAA8D,EAC9D6B,mBAAwB,EACxBC,OAAkC,EACI;EACtC,IAAMC,GAAG,GAAG,IAAAC,iCAA+B,EACvCH,mBAAmB,EACnB7B,YAAY,EACZiC,0BAAY,CAACC,qBAAqB,CAACJ,OAAc,CACrD,CAAC;EACD9B,YAAY,CAACmC,aAAa,CAAC,MAAM,EAAE,QAAQ,EAAEL,OAAO,EAAEC,GAAG,CAAC;EAC1D,IAAAK,qBAAc,EAAC,sBAAsB,EAAEL,GAAG,CAAC;EAC3C,OAAOA,GAAG;AACd;;AAGA;AACA;AACA;AACA;AACA;AACO,SAAS3B,uBAAuBA,CAACJ,YAA0B,EAAO;EACrE,IAAMO,KAAU,GAAG,CAAC,CAAC;EACrBI,MAAM,CACD0B,OAAO,CAACrC,YAAY,CAACsC,OAAO,CAAC,CAC7B9B,OAAO,CAAC,CAAC,CAAC+B,CAAC,EAAEC,CAAC,CAAC,KAAK;IACjBjC,KAAK,CAACgC,CAAC,CAAC,GAAGC,CAAC;EAChB,CAAC,CAAC;EACN,OAAOjC,KAAK;AAChB","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/rx-document.js b/dist/cjs/rx-document.js deleted file mode 100644 index 5c32b97b3c1..00000000000 --- a/dist/cjs/rx-document.js +++ /dev/null @@ -1,376 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.basePrototype = void 0; -exports.beforeDocumentUpdateWrite = beforeDocumentUpdateWrite; -exports.createRxDocumentConstructor = createRxDocumentConstructor; -exports.createWithConstructor = createWithConstructor; -exports.isRxDocument = isRxDocument; -var _operators = require("rxjs/operators"); -var _index = require("./plugins/utils/index.js"); -var _rxError = require("./rx-error.js"); -var _hooks = require("./hooks.js"); -var _rxChangeEvent = require("./rx-change-event.js"); -var _overwritable = require("./overwritable.js"); -var _rxSchemaHelper = require("./rx-schema-helper.js"); -var _rxStorageHelper = require("./rx-storage-helper.js"); -var _incrementalWrite = require("./incremental-write.js"); -var basePrototype = exports.basePrototype = { - get primaryPath() { - var _this = this; - if (!_this.isInstanceOfRxDocument) { - return undefined; - } - return _this.collection.schema.primaryPath; - }, - get primary() { - var _this = this; - if (!_this.isInstanceOfRxDocument) { - return undefined; - } - return _this._data[_this.primaryPath]; - }, - get revision() { - var _this = this; - if (!_this.isInstanceOfRxDocument) { - return undefined; - } - return _this._data._rev; - }, - get deleted$() { - var _this = this; - if (!_this.isInstanceOfRxDocument) { - return undefined; - } - return _this.$.pipe((0, _operators.map)(d => d._data._deleted)); - }, - get deleted$$() { - var _this = this; - var reactivity = _this.collection.database.getReactivityFactory(); - return reactivity.fromObservable(_this.deleted$, _this.getLatest().deleted, _this.collection.database); - }, - get deleted() { - var _this = this; - if (!_this.isInstanceOfRxDocument) { - return undefined; - } - return _this._data._deleted; - }, - getLatest() { - var latestDocData = this.collection._docCache.getLatestDocumentData(this.primary); - return this.collection._docCache.getCachedRxDocument(latestDocData); - }, - /** - * returns the observable which emits the plain-data of this document - */ - get $() { - var _this = this; - return _this.collection.$.pipe((0, _operators.filter)(changeEvent => !changeEvent.isLocal), (0, _operators.filter)(changeEvent => changeEvent.documentId === this.primary), (0, _operators.map)(changeEvent => (0, _rxChangeEvent.getDocumentDataOfRxChangeEvent)(changeEvent)), (0, _operators.startWith)(_this.collection._docCache.getLatestDocumentData(this.primary)), (0, _operators.distinctUntilChanged)((prev, curr) => prev._rev === curr._rev), (0, _operators.map)(docData => this.collection._docCache.getCachedRxDocument(docData)), (0, _operators.shareReplay)(_index.RXJS_SHARE_REPLAY_DEFAULTS)); - }, - get $$() { - var _this = this; - var reactivity = _this.collection.database.getReactivityFactory(); - return reactivity.fromObservable(_this.$, _this.getLatest()._data, _this.collection.database); - }, - /** - * returns observable of the value of the given path - */ - get$(path) { - if (_overwritable.overwritable.isDevMode()) { - if (path.includes('.item.')) { - throw (0, _rxError.newRxError)('DOC1', { - path - }); - } - if (path === this.primaryPath) { - throw (0, _rxError.newRxError)('DOC2'); - } - - // final fields cannot be modified and so also not observed - if (this.collection.schema.finalFields.includes(path)) { - throw (0, _rxError.newRxError)('DOC3', { - path - }); - } - var schemaObj = (0, _rxSchemaHelper.getSchemaByObjectPath)(this.collection.schema.jsonSchema, path); - if (!schemaObj) { - throw (0, _rxError.newRxError)('DOC4', { - path - }); - } - } - return this.$.pipe((0, _operators.map)(data => (0, _index.getProperty)(data, path)), (0, _operators.distinctUntilChanged)()); - }, - get$$(path) { - var obs = this.get$(path); - var reactivity = this.collection.database.getReactivityFactory(); - return reactivity.fromObservable(obs, this.getLatest().get(path), this.collection.database); - }, - /** - * populate the given path - */ - populate(path) { - var schemaObj = (0, _rxSchemaHelper.getSchemaByObjectPath)(this.collection.schema.jsonSchema, path); - var value = this.get(path); - if (!value) { - return _index.PROMISE_RESOLVE_NULL; - } - if (!schemaObj) { - throw (0, _rxError.newRxError)('DOC5', { - path - }); - } - if (!schemaObj.ref) { - throw (0, _rxError.newRxError)('DOC6', { - path, - schemaObj - }); - } - var refCollection = this.collection.database.collections[schemaObj.ref]; - if (!refCollection) { - throw (0, _rxError.newRxError)('DOC7', { - ref: schemaObj.ref, - path, - schemaObj - }); - } - if (schemaObj.type === 'array') { - return refCollection.findByIds(value).exec().then(res => { - var valuesIterator = res.values(); - return Array.from(valuesIterator); - }); - } else { - return refCollection.findOne(value).exec(); - } - }, - /** - * get data by objectPath - * @hotPath Performance here is really important, - * run some tests before changing anything. - */ - get(objPath) { - return (0, _index.getFromMapOrCreate)(this._propertyCache, objPath, () => { - var valueObj = (0, _index.getProperty)(this._data, objPath); - - // direct return if array or non-object - if (typeof valueObj !== 'object' || valueObj === null || Array.isArray(valueObj)) { - return _overwritable.overwritable.deepFreezeWhenDevMode(valueObj); - } - var _this = this; - var proxy = new Proxy( - /** - * In dev-mode, the _data is deep-frozen - * so we have to flat clone here so that - * the proxy can work. - */ - (0, _index.flatClone)(valueObj), { - get(target, property) { - if (typeof property !== 'string') { - return target[property]; - } - var lastChar = property.charAt(property.length - 1); - if (property.endsWith('$$')) { - var key = property.slice(0, -2); - return _this.get$$((0, _index.trimDots)(objPath + '.' + key)); - } else if (lastChar === '$') { - var _key = property.slice(0, -1); - return _this.get$((0, _index.trimDots)(objPath + '.' + _key)); - } else if (lastChar === '_') { - var _key2 = property.slice(0, -1); - return _this.populate((0, _index.trimDots)(objPath + '.' + _key2)); - } else { - return _this.get((0, _index.trimDots)(objPath + '.' + property)); - } - } - }); - return proxy; - }); - }, - toJSON(withMetaFields = false) { - if (!withMetaFields) { - var data = (0, _index.flatClone)(this._data); - delete data._rev; - delete data._attachments; - delete data._deleted; - delete data._meta; - return _overwritable.overwritable.deepFreezeWhenDevMode(data); - } else { - return _overwritable.overwritable.deepFreezeWhenDevMode(this._data); - } - }, - toMutableJSON(withMetaFields = false) { - return (0, _index.clone)(this.toJSON(withMetaFields)); - }, - /** - * updates document - * @overwritten by plugin (optional) - * @param updateObj mongodb-like syntax - */ - update(_updateObj) { - throw (0, _index.pluginMissing)('update'); - }, - incrementalUpdate(_updateObj) { - throw (0, _index.pluginMissing)('update'); - }, - updateCRDT(_updateObj) { - throw (0, _index.pluginMissing)('crdt'); - }, - putAttachment() { - throw (0, _index.pluginMissing)('attachments'); - }, - getAttachment() { - throw (0, _index.pluginMissing)('attachments'); - }, - allAttachments() { - throw (0, _index.pluginMissing)('attachments'); - }, - get allAttachments$() { - throw (0, _index.pluginMissing)('attachments'); - }, - async modify(mutationFunction, - // used by some plugins that wrap the method - _context) { - var oldData = this._data; - var newData = await (0, _incrementalWrite.modifierFromPublicToInternal)(mutationFunction)(oldData); - return this._saveData(newData, oldData); - }, - /** - * runs an incremental update over the document - * @param function that takes the document-data and returns a new data-object - */ - incrementalModify(mutationFunction, - // used by some plugins that wrap the method - _context) { - return this.collection.incrementalWriteQueue.addWrite(this._data, (0, _incrementalWrite.modifierFromPublicToInternal)(mutationFunction)).then(result => this.collection._docCache.getCachedRxDocument(result)); - }, - patch(patch) { - var oldData = this._data; - var newData = (0, _index.clone)(oldData); - Object.entries(patch).forEach(([k, v]) => { - newData[k] = v; - }); - return this._saveData(newData, oldData); - }, - /** - * patches the given properties - */ - incrementalPatch(patch) { - return this.incrementalModify(docData => { - Object.entries(patch).forEach(([k, v]) => { - docData[k] = v; - }); - return docData; - }); - }, - /** - * saves the new document-data - * and handles the events - */ - async _saveData(newData, oldData) { - newData = (0, _index.flatClone)(newData); - - // deleted documents cannot be changed - if (this._data._deleted) { - throw (0, _rxError.newRxError)('DOC11', { - id: this.primary, - document: this - }); - } - await beforeDocumentUpdateWrite(this.collection, newData, oldData); - var writeResult = await this.collection.storageInstance.bulkWrite([{ - previous: oldData, - document: newData - }], 'rx-document-save-data'); - var isError = writeResult.error[0]; - (0, _rxStorageHelper.throwIfIsStorageWriteError)(this.collection, this.primary, newData, isError); - await this.collection._runHooks('post', 'save', newData, this); - return this.collection._docCache.getCachedRxDocument(writeResult.success[0]); - }, - /** - * Remove the document. - * Notice that there is no hard delete, - * instead deleted documents get flagged with _deleted=true. - */ - remove() { - var collection = this.collection; - if (this.deleted) { - return Promise.reject((0, _rxError.newRxError)('DOC13', { - document: this, - id: this.primary - })); - } - var deletedData = (0, _index.flatClone)(this._data); - var removedDocData; - return collection._runHooks('pre', 'remove', deletedData, this).then(async () => { - deletedData._deleted = true; - var writeResult = await collection.storageInstance.bulkWrite([{ - previous: this._data, - document: deletedData - }], 'rx-document-remove'); - var isError = writeResult.error[0]; - (0, _rxStorageHelper.throwIfIsStorageWriteError)(collection, this.primary, deletedData, isError); - return writeResult.success[0]; - }).then(removed => { - removedDocData = removed; - return this.collection._runHooks('post', 'remove', deletedData, this); - }).then(() => { - return this.collection._docCache.getCachedRxDocument(removedDocData); - }); - }, - incrementalRemove() { - return this.incrementalModify(async docData => { - await this.collection._runHooks('pre', 'remove', docData, this); - docData._deleted = true; - return docData; - }).then(async newDoc => { - await this.collection._runHooks('post', 'remove', newDoc._data, newDoc); - return newDoc; - }); - }, - destroy() { - throw (0, _rxError.newRxError)('DOC14'); - } -}; -function createRxDocumentConstructor(proto = basePrototype) { - var constructor = function RxDocumentConstructor(collection, docData) { - this.collection = collection; - - // assume that this is always equal to the doc-data in the database - this._data = docData; - this._propertyCache = new Map(); - - /** - * because of the prototype-merge, - * we can not use the native instanceof operator - */ - this.isInstanceOfRxDocument = true; - }; - constructor.prototype = proto; - return constructor; -} -function createWithConstructor(constructor, collection, jsonData) { - var doc = new constructor(collection, jsonData); - (0, _hooks.runPluginHooks)('createRxDocument', doc); - return doc; -} -function isRxDocument(obj) { - return typeof obj === 'object' && obj !== null && 'isInstanceOfRxDocument' in obj; -} -function beforeDocumentUpdateWrite(collection, newData, oldData) { - /** - * Meta values must always be merged - * instead of overwritten. - * This ensures that different plugins do not overwrite - * each others meta properties. - */ - newData._meta = Object.assign({}, oldData._meta, newData._meta); - - // ensure modifications are ok - if (_overwritable.overwritable.isDevMode()) { - collection.schema.validateChange(oldData, newData); - } - return collection._runHooks('pre', 'save', newData, oldData); -} -//# sourceMappingURL=rx-document.js.map \ No newline at end of file diff --git a/dist/cjs/rx-document.js.map b/dist/cjs/rx-document.js.map deleted file mode 100644 index d177a6e99e8..00000000000 --- a/dist/cjs/rx-document.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-document.js","names":["_operators","require","_index","_rxError","_hooks","_rxChangeEvent","_overwritable","_rxSchemaHelper","_rxStorageHelper","_incrementalWrite","basePrototype","exports","primaryPath","_this","isInstanceOfRxDocument","undefined","collection","schema","primary","_data","revision","_rev","deleted$","$","pipe","map","d","_deleted","deleted$$","reactivity","database","getReactivityFactory","fromObservable","getLatest","deleted","latestDocData","_docCache","getLatestDocumentData","getCachedRxDocument","filter","changeEvent","isLocal","documentId","getDocumentDataOfRxChangeEvent","startWith","distinctUntilChanged","prev","curr","docData","shareReplay","RXJS_SHARE_REPLAY_DEFAULTS","$$","get$","path","overwritable","isDevMode","includes","newRxError","finalFields","schemaObj","getSchemaByObjectPath","jsonSchema","data","getProperty","get$$","obs","get","populate","value","PROMISE_RESOLVE_NULL","ref","refCollection","collections","type","findByIds","exec","then","res","valuesIterator","values","Array","from","findOne","objPath","getFromMapOrCreate","_propertyCache","valueObj","isArray","deepFreezeWhenDevMode","proxy","Proxy","flatClone","target","property","lastChar","charAt","length","endsWith","key","slice","trimDots","toJSON","withMetaFields","_attachments","_meta","toMutableJSON","clone","update","_updateObj","pluginMissing","incrementalUpdate","updateCRDT","putAttachment","getAttachment","allAttachments","allAttachments$","modify","mutationFunction","_context","oldData","newData","modifierFromPublicToInternal","_saveData","incrementalModify","incrementalWriteQueue","addWrite","result","patch","Object","entries","forEach","k","v","incrementalPatch","id","document","beforeDocumentUpdateWrite","writeResult","storageInstance","bulkWrite","previous","isError","error","throwIfIsStorageWriteError","_runHooks","success","remove","Promise","reject","deletedData","removedDocData","removed","incrementalRemove","newDoc","destroy","createRxDocumentConstructor","proto","constructor","RxDocumentConstructor","Map","prototype","createWithConstructor","jsonData","doc","runPluginHooks","isRxDocument","obj","assign","validateChange"],"sources":["../../src/rx-document.ts"],"sourcesContent":["import {\n Observable\n} from 'rxjs';\nimport {\n distinctUntilChanged,\n filter,\n map,\n shareReplay,\n startWith\n} from 'rxjs/operators';\nimport {\n clone,\n trimDots,\n pluginMissing,\n flatClone,\n PROMISE_RESOLVE_NULL,\n RXJS_SHARE_REPLAY_DEFAULTS,\n getProperty,\n getFromMapOrCreate\n} from './plugins/utils/index.ts';\nimport {\n newRxError\n} from './rx-error.ts';\nimport {\n runPluginHooks\n} from './hooks.ts';\n\nimport type {\n RxDocument,\n RxCollection,\n RxDocumentData,\n RxDocumentWriteData,\n UpdateQuery,\n CRDTEntry,\n ModifyFunction\n} from './types/index.d.ts';\nimport { getDocumentDataOfRxChangeEvent } from './rx-change-event.ts';\nimport { overwritable } from './overwritable.ts';\nimport { getSchemaByObjectPath } from './rx-schema-helper.ts';\nimport { throwIfIsStorageWriteError } from './rx-storage-helper.ts';\nimport { modifierFromPublicToInternal } from './incremental-write.ts';\n\nexport const basePrototype = {\n get primaryPath() {\n const _this: RxDocument = this as any;\n if (!_this.isInstanceOfRxDocument) {\n return undefined;\n }\n return _this.collection.schema.primaryPath;\n },\n get primary() {\n const _this: RxDocument = this as any;\n if (!_this.isInstanceOfRxDocument) {\n return undefined;\n }\n return (_this._data as any)[_this.primaryPath];\n },\n get revision() {\n const _this: RxDocument = this as any;\n if (!_this.isInstanceOfRxDocument) {\n return undefined;\n }\n return _this._data._rev;\n },\n get deleted$() {\n const _this: RxDocument = this as any;\n if (!_this.isInstanceOfRxDocument) {\n return undefined;\n }\n return _this.$.pipe(\n map((d: any) => d._data._deleted)\n );\n },\n get deleted$$() {\n const _this: RxDocument = this as any;\n const reactivity = _this.collection.database.getReactivityFactory();\n return reactivity.fromObservable(\n _this.deleted$,\n _this.getLatest().deleted,\n _this.collection.database\n );\n },\n get deleted() {\n const _this: RxDocument = this as any;\n if (!_this.isInstanceOfRxDocument) {\n return undefined;\n }\n return _this._data._deleted;\n },\n\n getLatest(this: RxDocument): RxDocument {\n const latestDocData = this.collection._docCache.getLatestDocumentData(this.primary);\n return this.collection._docCache.getCachedRxDocument(latestDocData);\n },\n\n /**\n * returns the observable which emits the plain-data of this document\n */\n get $(): Observable> {\n const _this: RxDocument<{}, {}, {}> = this as any;\n return _this.collection.$.pipe(\n filter(changeEvent => !changeEvent.isLocal),\n filter(changeEvent => changeEvent.documentId === this.primary),\n map(changeEvent => getDocumentDataOfRxChangeEvent(changeEvent)),\n startWith(_this.collection._docCache.getLatestDocumentData(this.primary)),\n distinctUntilChanged((prev, curr) => prev._rev === curr._rev),\n map(docData => (this as RxDocument).collection._docCache.getCachedRxDocument(docData)),\n shareReplay(RXJS_SHARE_REPLAY_DEFAULTS)\n );\n },\n get $$(): any {\n const _this: RxDocument = this as any;\n const reactivity = _this.collection.database.getReactivityFactory();\n return reactivity.fromObservable(\n _this.$,\n _this.getLatest()._data,\n _this.collection.database\n );\n },\n\n /**\n * returns observable of the value of the given path\n */\n get$(this: RxDocument, path: string): Observable {\n if (overwritable.isDevMode()) {\n if (path.includes('.item.')) {\n throw newRxError('DOC1', {\n path\n });\n }\n\n if (path === this.primaryPath) {\n throw newRxError('DOC2');\n }\n\n // final fields cannot be modified and so also not observed\n if (this.collection.schema.finalFields.includes(path)) {\n throw newRxError('DOC3', {\n path\n });\n }\n\n const schemaObj = getSchemaByObjectPath(\n this.collection.schema.jsonSchema,\n path\n );\n\n if (!schemaObj) {\n throw newRxError('DOC4', {\n path\n });\n }\n }\n\n return this.$\n .pipe(\n map(data => getProperty(data, path)),\n distinctUntilChanged()\n );\n },\n get$$(this: RxDocument, path: string) {\n const obs = this.get$(path);\n const reactivity = this.collection.database.getReactivityFactory();\n return reactivity.fromObservable(\n obs,\n this.getLatest().get(path),\n this.collection.database\n );\n },\n\n /**\n * populate the given path\n */\n populate(this: RxDocument, path: string): Promise {\n const schemaObj = getSchemaByObjectPath(\n this.collection.schema.jsonSchema,\n path\n );\n const value = this.get(path);\n if (!value) {\n return PROMISE_RESOLVE_NULL;\n }\n if (!schemaObj) {\n throw newRxError('DOC5', {\n path\n });\n }\n if (!schemaObj.ref) {\n throw newRxError('DOC6', {\n path,\n schemaObj\n });\n }\n\n const refCollection: RxCollection = this.collection.database.collections[schemaObj.ref];\n if (!refCollection) {\n throw newRxError('DOC7', {\n ref: schemaObj.ref,\n path,\n schemaObj\n });\n }\n\n if (schemaObj.type === 'array') {\n return refCollection.findByIds(value).exec().then(res => {\n const valuesIterator = res.values();\n return Array.from(valuesIterator) as any;\n });\n } else {\n return refCollection.findOne(value).exec();\n }\n },\n /**\n * get data by objectPath\n * @hotPath Performance here is really important,\n * run some tests before changing anything.\n */\n get(this: RxDocument, objPath: string): any | null {\n return getFromMapOrCreate(\n this._propertyCache,\n objPath,\n () => {\n const valueObj = getProperty(this._data, objPath);\n\n // direct return if array or non-object\n if (\n typeof valueObj !== 'object' ||\n valueObj === null ||\n Array.isArray(valueObj)\n ) {\n return overwritable.deepFreezeWhenDevMode(valueObj);\n }\n const _this = this;\n const proxy = new Proxy(\n /**\n * In dev-mode, the _data is deep-frozen\n * so we have to flat clone here so that\n * the proxy can work.\n */\n flatClone(valueObj),\n {\n get(target, property: any) {\n if (typeof property !== 'string') {\n return target[property];\n }\n const lastChar = property.charAt(property.length - 1);\n if (property.endsWith('$$')) {\n const key = property.slice(0, -2);\n return _this.get$$(trimDots(objPath + '.' + key));\n } else if (lastChar === '$') {\n const key = property.slice(0, -1);\n return _this.get$(trimDots(objPath + '.' + key));\n } else if (lastChar === '_') {\n const key = property.slice(0, -1);\n return _this.populate(trimDots(objPath + '.' + key));\n } else {\n return _this.get(trimDots(objPath + '.' + property));\n }\n }\n });\n return proxy;\n }\n );\n\n },\n\n toJSON(this: RxDocument, withMetaFields = false) {\n if (!withMetaFields) {\n const data = flatClone(this._data);\n delete (data as any)._rev;\n delete (data as any)._attachments;\n delete (data as any)._deleted;\n delete (data as any)._meta;\n return overwritable.deepFreezeWhenDevMode(data);\n } else {\n return overwritable.deepFreezeWhenDevMode(this._data);\n }\n },\n toMutableJSON(this: RxDocument, withMetaFields = false) {\n return clone(this.toJSON(withMetaFields as any));\n },\n\n /**\n * updates document\n * @overwritten by plugin (optional)\n * @param updateObj mongodb-like syntax\n */\n update(_updateObj: UpdateQuery) {\n throw pluginMissing('update');\n },\n incrementalUpdate(_updateObj: UpdateQuery) {\n throw pluginMissing('update');\n },\n updateCRDT(_updateObj: CRDTEntry | CRDTEntry[]) {\n throw pluginMissing('crdt');\n },\n putAttachment() {\n throw pluginMissing('attachments');\n },\n getAttachment() {\n throw pluginMissing('attachments');\n },\n allAttachments() {\n throw pluginMissing('attachments');\n },\n get allAttachments$() {\n throw pluginMissing('attachments');\n },\n\n async modify(\n this: RxDocument,\n mutationFunction: ModifyFunction,\n // used by some plugins that wrap the method\n _context?: string\n ): Promise {\n const oldData = this._data;\n const newData: RxDocumentData = await modifierFromPublicToInternal(mutationFunction)(oldData) as any;\n return this._saveData(newData, oldData) as any;\n },\n\n /**\n * runs an incremental update over the document\n * @param function that takes the document-data and returns a new data-object\n */\n incrementalModify(\n this: RxDocument,\n mutationFunction: ModifyFunction,\n // used by some plugins that wrap the method\n _context?: string\n ): Promise {\n return this.collection.incrementalWriteQueue.addWrite(\n this._data,\n modifierFromPublicToInternal(mutationFunction)\n ).then(result => this.collection._docCache.getCachedRxDocument(result));\n },\n\n patch(\n this: RxDocument,\n patch: Partial\n ) {\n const oldData = this._data;\n const newData = clone(oldData);\n Object\n .entries(patch)\n .forEach(([k, v]) => {\n (newData as any)[k] = v;\n });\n return this._saveData(newData, oldData);\n },\n\n /**\n * patches the given properties\n */\n incrementalPatch(\n this: RxDocument,\n patch: Partial\n ): Promise> {\n return this.incrementalModify((docData) => {\n Object\n .entries(patch)\n .forEach(([k, v]) => {\n (docData as any)[k] = v;\n });\n return docData;\n });\n },\n\n /**\n * saves the new document-data\n * and handles the events\n */\n async _saveData(\n this: RxDocument,\n newData: RxDocumentWriteData,\n oldData: RxDocumentData\n ): Promise> {\n newData = flatClone(newData);\n\n // deleted documents cannot be changed\n if (this._data._deleted) {\n throw newRxError('DOC11', {\n id: this.primary,\n document: this\n });\n }\n await beforeDocumentUpdateWrite(this.collection, newData, oldData);\n const writeResult = await this.collection.storageInstance.bulkWrite([{\n previous: oldData,\n document: newData\n }], 'rx-document-save-data');\n\n const isError = writeResult.error[0];\n throwIfIsStorageWriteError(this.collection, this.primary, newData, isError);\n\n await this.collection._runHooks('post', 'save', newData, this);\n return this.collection._docCache.getCachedRxDocument(\n writeResult.success[0]\n );\n },\n\n /**\n * Remove the document.\n * Notice that there is no hard delete,\n * instead deleted documents get flagged with _deleted=true.\n */\n remove(this: RxDocument): Promise {\n const collection = this.collection;\n if (this.deleted) {\n return Promise.reject(newRxError('DOC13', {\n document: this,\n id: this.primary\n }));\n }\n\n const deletedData = flatClone(this._data);\n let removedDocData: RxDocumentData;\n return collection._runHooks('pre', 'remove', deletedData, this)\n .then(async () => {\n deletedData._deleted = true;\n const writeResult = await collection.storageInstance.bulkWrite([{\n previous: this._data,\n document: deletedData\n }], 'rx-document-remove');\n const isError = writeResult.error[0];\n throwIfIsStorageWriteError(collection, this.primary, deletedData, isError);\n return writeResult.success[0];\n })\n .then((removed) => {\n removedDocData = removed;\n return this.collection._runHooks('post', 'remove', deletedData, this);\n })\n .then(() => {\n return this.collection._docCache.getCachedRxDocument(removedDocData);\n });\n },\n incrementalRemove(this: RxDocument): Promise {\n return this.incrementalModify(async (docData) => {\n await this.collection._runHooks('pre', 'remove', docData, this);\n docData._deleted = true;\n return docData;\n }).then(async (newDoc) => {\n await this.collection._runHooks('post', 'remove', newDoc._data, newDoc);\n return newDoc;\n });\n },\n destroy() {\n throw newRxError('DOC14');\n }\n};\n\nexport function createRxDocumentConstructor(proto = basePrototype) {\n const constructor = function RxDocumentConstructor(\n this: RxDocument,\n collection: RxCollection,\n docData: RxDocumentData\n ) {\n this.collection = collection;\n\n // assume that this is always equal to the doc-data in the database\n this._data = docData;\n this._propertyCache = new Map();\n\n /**\n * because of the prototype-merge,\n * we can not use the native instanceof operator\n */\n this.isInstanceOfRxDocument = true;\n };\n constructor.prototype = proto;\n return constructor;\n}\n\nexport function createWithConstructor(\n constructor: any,\n collection: RxCollection,\n jsonData: RxDocumentData\n): RxDocument | null {\n const doc = new constructor(collection, jsonData);\n runPluginHooks('createRxDocument', doc);\n return doc;\n}\n\nexport function isRxDocument(obj: any): boolean {\n return typeof obj === 'object' && obj !== null && 'isInstanceOfRxDocument' in obj;\n}\n\n\nexport function beforeDocumentUpdateWrite(\n collection: RxCollection,\n newData: RxDocumentWriteData,\n oldData: RxDocumentData\n): Promise {\n /**\n * Meta values must always be merged\n * instead of overwritten.\n * This ensures that different plugins do not overwrite\n * each others meta properties.\n */\n newData._meta = Object.assign(\n {},\n oldData._meta,\n newData._meta\n );\n\n // ensure modifications are ok\n if (overwritable.isDevMode()) {\n collection.schema.validateChange(oldData, newData);\n }\n return collection._runHooks('pre', 'save', newData, oldData);\n}\n\n"],"mappings":";;;;;;;;;;AAGA,IAAAA,UAAA,GAAAC,OAAA;AAOA,IAAAC,MAAA,GAAAD,OAAA;AAUA,IAAAE,QAAA,GAAAF,OAAA;AAGA,IAAAG,MAAA,GAAAH,OAAA;AAaA,IAAAI,cAAA,GAAAJ,OAAA;AACA,IAAAK,aAAA,GAAAL,OAAA;AACA,IAAAM,eAAA,GAAAN,OAAA;AACA,IAAAO,gBAAA,GAAAP,OAAA;AACA,IAAAQ,iBAAA,GAAAR,OAAA;AAEO,IAAMS,aAAa,GAAAC,OAAA,CAAAD,aAAA,GAAG;EACzB,IAAIE,WAAWA,CAAA,EAAG;IACd,IAAMC,KAAiB,GAAG,IAAW;IACrC,IAAI,CAACA,KAAK,CAACC,sBAAsB,EAAE;MAC/B,OAAOC,SAAS;IACpB;IACA,OAAOF,KAAK,CAACG,UAAU,CAACC,MAAM,CAACL,WAAW;EAC9C,CAAC;EACD,IAAIM,OAAOA,CAAA,EAAG;IACV,IAAML,KAAiB,GAAG,IAAW;IACrC,IAAI,CAACA,KAAK,CAACC,sBAAsB,EAAE;MAC/B,OAAOC,SAAS;IACpB;IACA,OAAQF,KAAK,CAACM,KAAK,CAASN,KAAK,CAACD,WAAW,CAAC;EAClD,CAAC;EACD,IAAIQ,QAAQA,CAAA,EAAG;IACX,IAAMP,KAAiB,GAAG,IAAW;IACrC,IAAI,CAACA,KAAK,CAACC,sBAAsB,EAAE;MAC/B,OAAOC,SAAS;IACpB;IACA,OAAOF,KAAK,CAACM,KAAK,CAACE,IAAI;EAC3B,CAAC;EACD,IAAIC,QAAQA,CAAA,EAAG;IACX,IAAMT,KAAsB,GAAG,IAAW;IAC1C,IAAI,CAACA,KAAK,CAACC,sBAAsB,EAAE;MAC/B,OAAOC,SAAS;IACpB;IACA,OAAOF,KAAK,CAACU,CAAC,CAACC,IAAI,CACf,IAAAC,cAAG,EAAEC,CAAM,IAAKA,CAAC,CAACP,KAAK,CAACQ,QAAQ,CACpC,CAAC;EACL,CAAC;EACD,IAAIC,SAASA,CAAA,EAAG;IACZ,IAAMf,KAAiB,GAAG,IAAW;IACrC,IAAMgB,UAAU,GAAGhB,KAAK,CAACG,UAAU,CAACc,QAAQ,CAACC,oBAAoB,CAAC,CAAC;IACnE,OAAOF,UAAU,CAACG,cAAc,CAC5BnB,KAAK,CAACS,QAAQ,EACdT,KAAK,CAACoB,SAAS,CAAC,CAAC,CAACC,OAAO,EACzBrB,KAAK,CAACG,UAAU,CAACc,QACrB,CAAC;EACL,CAAC;EACD,IAAII,OAAOA,CAAA,EAAG;IACV,IAAMrB,KAAiB,GAAG,IAAW;IACrC,IAAI,CAACA,KAAK,CAACC,sBAAsB,EAAE;MAC/B,OAAOC,SAAS;IACpB;IACA,OAAOF,KAAK,CAACM,KAAK,CAACQ,QAAQ;EAC/B,CAAC;EAEDM,SAASA,CAAA,EAA+B;IACpC,IAAME,aAAa,GAAG,IAAI,CAACnB,UAAU,CAACoB,SAAS,CAACC,qBAAqB,CAAC,IAAI,CAACnB,OAAO,CAAC;IACnF,OAAO,IAAI,CAACF,UAAU,CAACoB,SAAS,CAACE,mBAAmB,CAACH,aAAa,CAAC;EACvE,CAAC;EAED;AACJ;AACA;EACI,IAAIZ,CAACA,CAAA,EAAoC;IACrC,IAAMV,KAA6B,GAAG,IAAW;IACjD,OAAOA,KAAK,CAACG,UAAU,CAACO,CAAC,CAACC,IAAI,CAC1B,IAAAe,iBAAM,EAACC,WAAW,IAAI,CAACA,WAAW,CAACC,OAAO,CAAC,EAC3C,IAAAF,iBAAM,EAACC,WAAW,IAAIA,WAAW,CAACE,UAAU,KAAK,IAAI,CAACxB,OAAO,CAAC,EAC9D,IAAAO,cAAG,EAACe,WAAW,IAAI,IAAAG,6CAA8B,EAACH,WAAW,CAAC,CAAC,EAC/D,IAAAI,oBAAS,EAAC/B,KAAK,CAACG,UAAU,CAACoB,SAAS,CAACC,qBAAqB,CAAC,IAAI,CAACnB,OAAO,CAAC,CAAC,EACzE,IAAA2B,+BAAoB,EAAC,CAACC,IAAI,EAAEC,IAAI,KAAKD,IAAI,CAACzB,IAAI,KAAK0B,IAAI,CAAC1B,IAAI,CAAC,EAC7D,IAAAI,cAAG,EAACuB,OAAO,IAAK,IAAI,CAAqBhC,UAAU,CAACoB,SAAS,CAACE,mBAAmB,CAACU,OAAO,CAAC,CAAC,EAC3F,IAAAC,sBAAW,EAACC,iCAA0B,CAC1C,CAAC;EACL,CAAC;EACD,IAAIC,EAAEA,CAAA,EAAQ;IACV,IAAMtC,KAAiB,GAAG,IAAW;IACrC,IAAMgB,UAAU,GAAGhB,KAAK,CAACG,UAAU,CAACc,QAAQ,CAACC,oBAAoB,CAAC,CAAC;IACnE,OAAOF,UAAU,CAACG,cAAc,CAC5BnB,KAAK,CAACU,CAAC,EACPV,KAAK,CAACoB,SAAS,CAAC,CAAC,CAACd,KAAK,EACvBN,KAAK,CAACG,UAAU,CAACc,QACrB,CAAC;EACL,CAAC;EAED;AACJ;AACA;EACIsB,IAAIA,CAAmBC,IAAY,EAAmB;IAClD,IAAIC,0BAAY,CAACC,SAAS,CAAC,CAAC,EAAE;MAC1B,IAAIF,IAAI,CAACG,QAAQ,CAAC,QAAQ,CAAC,EAAE;QACzB,MAAM,IAAAC,mBAAU,EAAC,MAAM,EAAE;UACrBJ;QACJ,CAAC,CAAC;MACN;MAEA,IAAIA,IAAI,KAAK,IAAI,CAACzC,WAAW,EAAE;QAC3B,MAAM,IAAA6C,mBAAU,EAAC,MAAM,CAAC;MAC5B;;MAEA;MACA,IAAI,IAAI,CAACzC,UAAU,CAACC,MAAM,CAACyC,WAAW,CAACF,QAAQ,CAACH,IAAI,CAAC,EAAE;QACnD,MAAM,IAAAI,mBAAU,EAAC,MAAM,EAAE;UACrBJ;QACJ,CAAC,CAAC;MACN;MAEA,IAAMM,SAAS,GAAG,IAAAC,qCAAqB,EACnC,IAAI,CAAC5C,UAAU,CAACC,MAAM,CAAC4C,UAAU,EACjCR,IACJ,CAAC;MAED,IAAI,CAACM,SAAS,EAAE;QACZ,MAAM,IAAAF,mBAAU,EAAC,MAAM,EAAE;UACrBJ;QACJ,CAAC,CAAC;MACN;IACJ;IAEA,OAAO,IAAI,CAAC9B,CAAC,CACRC,IAAI,CACD,IAAAC,cAAG,EAACqC,IAAI,IAAI,IAAAC,kBAAW,EAACD,IAAI,EAAET,IAAI,CAAC,CAAC,EACpC,IAAAR,+BAAoB,EAAC,CACzB,CAAC;EACT,CAAC;EACDmB,KAAKA,CAAmBX,IAAY,EAAE;IAClC,IAAMY,GAAG,GAAG,IAAI,CAACb,IAAI,CAACC,IAAI,CAAC;IAC3B,IAAMxB,UAAU,GAAG,IAAI,CAACb,UAAU,CAACc,QAAQ,CAACC,oBAAoB,CAAC,CAAC;IAClE,OAAOF,UAAU,CAACG,cAAc,CAC5BiC,GAAG,EACH,IAAI,CAAChC,SAAS,CAAC,CAAC,CAACiC,GAAG,CAACb,IAAI,CAAC,EAC1B,IAAI,CAACrC,UAAU,CAACc,QACpB,CAAC;EACL,CAAC;EAED;AACJ;AACA;EACIqC,QAAQA,CAAmBd,IAAY,EAA8B;IACjE,IAAMM,SAAS,GAAG,IAAAC,qCAAqB,EACnC,IAAI,CAAC5C,UAAU,CAACC,MAAM,CAAC4C,UAAU,EACjCR,IACJ,CAAC;IACD,IAAMe,KAAK,GAAG,IAAI,CAACF,GAAG,CAACb,IAAI,CAAC;IAC5B,IAAI,CAACe,KAAK,EAAE;MACR,OAAOC,2BAAoB;IAC/B;IACA,IAAI,CAACV,SAAS,EAAE;MACZ,MAAM,IAAAF,mBAAU,EAAC,MAAM,EAAE;QACrBJ;MACJ,CAAC,CAAC;IACN;IACA,IAAI,CAACM,SAAS,CAACW,GAAG,EAAE;MAChB,MAAM,IAAAb,mBAAU,EAAC,MAAM,EAAE;QACrBJ,IAAI;QACJM;MACJ,CAAC,CAAC;IACN;IAEA,IAAMY,aAA2B,GAAG,IAAI,CAACvD,UAAU,CAACc,QAAQ,CAAC0C,WAAW,CAACb,SAAS,CAACW,GAAG,CAAC;IACvF,IAAI,CAACC,aAAa,EAAE;MAChB,MAAM,IAAAd,mBAAU,EAAC,MAAM,EAAE;QACrBa,GAAG,EAAEX,SAAS,CAACW,GAAG;QAClBjB,IAAI;QACJM;MACJ,CAAC,CAAC;IACN;IAEA,IAAIA,SAAS,CAACc,IAAI,KAAK,OAAO,EAAE;MAC5B,OAAOF,aAAa,CAACG,SAAS,CAACN,KAAK,CAAC,CAACO,IAAI,CAAC,CAAC,CAACC,IAAI,CAACC,GAAG,IAAI;QACrD,IAAMC,cAAc,GAAGD,GAAG,CAACE,MAAM,CAAC,CAAC;QACnC,OAAOC,KAAK,CAACC,IAAI,CAACH,cAAc,CAAC;MACrC,CAAC,CAAC;IACN,CAAC,MAAM;MACH,OAAOP,aAAa,CAACW,OAAO,CAACd,KAAK,CAAC,CAACO,IAAI,CAAC,CAAC;IAC9C;EACJ,CAAC;EACD;AACJ;AACA;AACA;AACA;EACIT,GAAGA,CAAmBiB,OAAe,EAAc;IAC/C,OAAO,IAAAC,yBAAkB,EACrB,IAAI,CAACC,cAAc,EACnBF,OAAO,EACP,MAAM;MACF,IAAMG,QAAQ,GAAG,IAAAvB,kBAAW,EAAC,IAAI,CAAC5C,KAAK,EAAEgE,OAAO,CAAC;;MAEjD;MACA,IACI,OAAOG,QAAQ,KAAK,QAAQ,IAC5BA,QAAQ,KAAK,IAAI,IACjBN,KAAK,CAACO,OAAO,CAACD,QAAQ,CAAC,EACzB;QACE,OAAOhC,0BAAY,CAACkC,qBAAqB,CAACF,QAAQ,CAAC;MACvD;MACA,IAAMzE,KAAK,GAAG,IAAI;MAClB,IAAM4E,KAAK,GAAG,IAAIC,KAAK;MACnB;AACpB;AACA;AACA;AACA;MACoB,IAAAC,gBAAS,EAACL,QAAQ,CAAC,EACnB;QACIpB,GAAGA,CAAC0B,MAAM,EAAEC,QAAa,EAAE;UACvB,IAAI,OAAOA,QAAQ,KAAK,QAAQ,EAAE;YAC9B,OAAOD,MAAM,CAACC,QAAQ,CAAC;UAC3B;UACA,IAAMC,QAAQ,GAAGD,QAAQ,CAACE,MAAM,CAACF,QAAQ,CAACG,MAAM,GAAG,CAAC,CAAC;UACrD,IAAIH,QAAQ,CAACI,QAAQ,CAAC,IAAI,CAAC,EAAE;YACzB,IAAMC,GAAG,GAAGL,QAAQ,CAACM,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;YACjC,OAAOtF,KAAK,CAACmD,KAAK,CAAC,IAAAoC,eAAQ,EAACjB,OAAO,GAAG,GAAG,GAAGe,GAAG,CAAC,CAAC;UACrD,CAAC,MAAM,IAAIJ,QAAQ,KAAK,GAAG,EAAE;YACzB,IAAMI,IAAG,GAAGL,QAAQ,CAACM,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;YACjC,OAAOtF,KAAK,CAACuC,IAAI,CAAC,IAAAgD,eAAQ,EAACjB,OAAO,GAAG,GAAG,GAAGe,IAAG,CAAC,CAAC;UACpD,CAAC,MAAM,IAAIJ,QAAQ,KAAK,GAAG,EAAE;YACzB,IAAMI,KAAG,GAAGL,QAAQ,CAACM,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;YACjC,OAAOtF,KAAK,CAACsD,QAAQ,CAAC,IAAAiC,eAAQ,EAACjB,OAAO,GAAG,GAAG,GAAGe,KAAG,CAAC,CAAC;UACxD,CAAC,MAAM;YACH,OAAOrF,KAAK,CAACqD,GAAG,CAAC,IAAAkC,eAAQ,EAACjB,OAAO,GAAG,GAAG,GAAGU,QAAQ,CAAC,CAAC;UACxD;QACJ;MACJ,CAAC,CAAC;MACN,OAAOJ,KAAK;IAChB,CACJ,CAAC;EAEL,CAAC;EAEDY,MAAMA,CAAmBC,cAAc,GAAG,KAAK,EAAE;IAC7C,IAAI,CAACA,cAAc,EAAE;MACjB,IAAMxC,IAAI,GAAG,IAAA6B,gBAAS,EAAC,IAAI,CAACxE,KAAK,CAAC;MAClC,OAAQ2C,IAAI,CAASzC,IAAI;MACzB,OAAQyC,IAAI,CAASyC,YAAY;MACjC,OAAQzC,IAAI,CAASnC,QAAQ;MAC7B,OAAQmC,IAAI,CAAS0C,KAAK;MAC1B,OAAOlD,0BAAY,CAACkC,qBAAqB,CAAC1B,IAAI,CAAC;IACnD,CAAC,MAAM;MACH,OAAOR,0BAAY,CAACkC,qBAAqB,CAAC,IAAI,CAACrE,KAAK,CAAC;IACzD;EACJ,CAAC;EACDsF,aAAaA,CAAmBH,cAAc,GAAG,KAAK,EAAE;IACpD,OAAO,IAAAI,YAAK,EAAC,IAAI,CAACL,MAAM,CAACC,cAAqB,CAAC,CAAC;EACpD,CAAC;EAED;AACJ;AACA;AACA;AACA;EACIK,MAAMA,CAACC,UAA4B,EAAE;IACjC,MAAM,IAAAC,oBAAa,EAAC,QAAQ,CAAC;EACjC,CAAC;EACDC,iBAAiBA,CAACF,UAA4B,EAAE;IAC5C,MAAM,IAAAC,oBAAa,EAAC,QAAQ,CAAC;EACjC,CAAC;EACDE,UAAUA,CAACH,UAA6C,EAAE;IACtD,MAAM,IAAAC,oBAAa,EAAC,MAAM,CAAC;EAC/B,CAAC;EACDG,aAAaA,CAAA,EAAG;IACZ,MAAM,IAAAH,oBAAa,EAAC,aAAa,CAAC;EACtC,CAAC;EACDI,aAAaA,CAAA,EAAG;IACZ,MAAM,IAAAJ,oBAAa,EAAC,aAAa,CAAC;EACtC,CAAC;EACDK,cAAcA,CAAA,EAAG;IACb,MAAM,IAAAL,oBAAa,EAAC,aAAa,CAAC;EACtC,CAAC;EACD,IAAIM,eAAeA,CAAA,EAAG;IAClB,MAAM,IAAAN,oBAAa,EAAC,aAAa,CAAC;EACtC,CAAC;EAED,MAAMO,MAAMA,CAERC,gBAA2C;EAC3C;EACAC,QAAiB,EACE;IACnB,IAAMC,OAAO,GAAG,IAAI,CAACpG,KAAK;IAC1B,IAAMqG,OAAkC,GAAG,MAAM,IAAAC,8CAA4B,EAAYJ,gBAAgB,CAAC,CAACE,OAAO,CAAQ;IAC1H,OAAO,IAAI,CAACG,SAAS,CAACF,OAAO,EAAED,OAAO,CAAC;EAC3C,CAAC;EAED;AACJ;AACA;AACA;EACII,iBAAiBA,CAEbN,gBAAqC;EACrC;EACAC,QAAiB,EACE;IACnB,OAAO,IAAI,CAACtG,UAAU,CAAC4G,qBAAqB,CAACC,QAAQ,CACjD,IAAI,CAAC1G,KAAK,EACV,IAAAsG,8CAA4B,EAACJ,gBAAgB,CACjD,CAAC,CAACzC,IAAI,CAACkD,MAAM,IAAI,IAAI,CAAC9G,UAAU,CAACoB,SAAS,CAACE,mBAAmB,CAACwF,MAAM,CAAC,CAAC;EAC3E,CAAC;EAEDC,KAAKA,CAEDA,KAAyB,EAC3B;IACE,IAAMR,OAAO,GAAG,IAAI,CAACpG,KAAK;IAC1B,IAAMqG,OAAO,GAAG,IAAAd,YAAK,EAACa,OAAO,CAAC;IAC9BS,MAAM,CACDC,OAAO,CAACF,KAAK,CAAC,CACdG,OAAO,CAAC,CAAC,CAACC,CAAC,EAAEC,CAAC,CAAC,KAAK;MAChBZ,OAAO,CAASW,CAAC,CAAC,GAAGC,CAAC;IAC3B,CAAC,CAAC;IACN,OAAO,IAAI,CAACV,SAAS,CAACF,OAAO,EAAED,OAAO,CAAC;EAC3C,CAAC;EAED;AACJ;AACA;EACIc,gBAAgBA,CAEZN,KAA8B,EACK;IACnC,OAAO,IAAI,CAACJ,iBAAiB,CAAE3E,OAAO,IAAK;MACvCgF,MAAM,CACDC,OAAO,CAACF,KAAK,CAAC,CACdG,OAAO,CAAC,CAAC,CAACC,CAAC,EAAEC,CAAC,CAAC,KAAK;QAChBpF,OAAO,CAASmF,CAAC,CAAC,GAAGC,CAAC;MAC3B,CAAC,CAAC;MACN,OAAOpF,OAAO;IAClB,CAAC,CAAC;EACN,CAAC;EAED;AACJ;AACA;AACA;EACI,MAAM0E,SAASA,CAEXF,OAAuC,EACvCD,OAAkC,EACJ;IAC9BC,OAAO,GAAG,IAAA7B,gBAAS,EAAC6B,OAAO,CAAC;;IAE5B;IACA,IAAI,IAAI,CAACrG,KAAK,CAACQ,QAAQ,EAAE;MACrB,MAAM,IAAA8B,mBAAU,EAAC,OAAO,EAAE;QACtB6E,EAAE,EAAE,IAAI,CAACpH,OAAO;QAChBqH,QAAQ,EAAE;MACd,CAAC,CAAC;IACN;IACA,MAAMC,yBAAyB,CAAC,IAAI,CAACxH,UAAU,EAAEwG,OAAO,EAAED,OAAO,CAAC;IAClE,IAAMkB,WAAW,GAAG,MAAM,IAAI,CAACzH,UAAU,CAAC0H,eAAe,CAACC,SAAS,CAAC,CAAC;MACjEC,QAAQ,EAAErB,OAAO;MACjBgB,QAAQ,EAAEf;IACd,CAAC,CAAC,EAAE,uBAAuB,CAAC;IAE5B,IAAMqB,OAAO,GAAGJ,WAAW,CAACK,KAAK,CAAC,CAAC,CAAC;IACpC,IAAAC,2CAA0B,EAAC,IAAI,CAAC/H,UAAU,EAAE,IAAI,CAACE,OAAO,EAAEsG,OAAO,EAAEqB,OAAO,CAAC;IAE3E,MAAM,IAAI,CAAC7H,UAAU,CAACgI,SAAS,CAAC,MAAM,EAAE,MAAM,EAAExB,OAAO,EAAE,IAAI,CAAC;IAC9D,OAAO,IAAI,CAACxG,UAAU,CAACoB,SAAS,CAACE,mBAAmB,CAChDmG,WAAW,CAACQ,OAAO,CAAC,CAAC,CACzB,CAAC;EACL,CAAC;EAED;AACJ;AACA;AACA;AACA;EACIC,MAAMA,CAAA,EAAwC;IAC1C,IAAMlI,UAAU,GAAG,IAAI,CAACA,UAAU;IAClC,IAAI,IAAI,CAACkB,OAAO,EAAE;MACd,OAAOiH,OAAO,CAACC,MAAM,CAAC,IAAA3F,mBAAU,EAAC,OAAO,EAAE;QACtC8E,QAAQ,EAAE,IAAI;QACdD,EAAE,EAAE,IAAI,CAACpH;MACb,CAAC,CAAC,CAAC;IACP;IAEA,IAAMmI,WAAW,GAAG,IAAA1D,gBAAS,EAAC,IAAI,CAACxE,KAAK,CAAC;IACzC,IAAImI,cAAmC;IACvC,OAAOtI,UAAU,CAACgI,SAAS,CAAC,KAAK,EAAE,QAAQ,EAAEK,WAAW,EAAE,IAAI,CAAC,CAC1DzE,IAAI,CAAC,YAAY;MACdyE,WAAW,CAAC1H,QAAQ,GAAG,IAAI;MAC3B,IAAM8G,WAAW,GAAG,MAAMzH,UAAU,CAAC0H,eAAe,CAACC,SAAS,CAAC,CAAC;QAC5DC,QAAQ,EAAE,IAAI,CAACzH,KAAK;QACpBoH,QAAQ,EAAEc;MACd,CAAC,CAAC,EAAE,oBAAoB,CAAC;MACzB,IAAMR,OAAO,GAAGJ,WAAW,CAACK,KAAK,CAAC,CAAC,CAAC;MACpC,IAAAC,2CAA0B,EAAC/H,UAAU,EAAE,IAAI,CAACE,OAAO,EAAEmI,WAAW,EAAER,OAAO,CAAC;MAC1E,OAAOJ,WAAW,CAACQ,OAAO,CAAC,CAAC,CAAC;IACjC,CAAC,CAAC,CACDrE,IAAI,CAAE2E,OAAO,IAAK;MACfD,cAAc,GAAGC,OAAO;MACxB,OAAO,IAAI,CAACvI,UAAU,CAACgI,SAAS,CAAC,MAAM,EAAE,QAAQ,EAAEK,WAAW,EAAE,IAAI,CAAC;IACzE,CAAC,CAAC,CACDzE,IAAI,CAAC,MAAM;MACR,OAAO,IAAI,CAAC5D,UAAU,CAACoB,SAAS,CAACE,mBAAmB,CAACgH,cAAc,CAAC;IACxE,CAAC,CAAC;EACV,CAAC;EACDE,iBAAiBA,CAAA,EAAwC;IACrD,OAAO,IAAI,CAAC7B,iBAAiB,CAAC,MAAO3E,OAAO,IAAK;MAC7C,MAAM,IAAI,CAAChC,UAAU,CAACgI,SAAS,CAAC,KAAK,EAAE,QAAQ,EAAEhG,OAAO,EAAE,IAAI,CAAC;MAC/DA,OAAO,CAACrB,QAAQ,GAAG,IAAI;MACvB,OAAOqB,OAAO;IAClB,CAAC,CAAC,CAAC4B,IAAI,CAAC,MAAO6E,MAAM,IAAK;MACtB,MAAM,IAAI,CAACzI,UAAU,CAACgI,SAAS,CAAC,MAAM,EAAE,QAAQ,EAAES,MAAM,CAACtI,KAAK,EAAEsI,MAAM,CAAC;MACvE,OAAOA,MAAM;IACjB,CAAC,CAAC;EACN,CAAC;EACDC,OAAOA,CAAA,EAAG;IACN,MAAM,IAAAjG,mBAAU,EAAC,OAAO,CAAC;EAC7B;AACJ,CAAC;AAEM,SAASkG,2BAA2BA,CAACC,KAAK,GAAGlJ,aAAa,EAAE;EAC/D,IAAMmJ,WAAW,GAAG,SAASC,qBAAqBA,CAE9C9I,UAAwB,EACxBgC,OAA4B,EAC9B;IACE,IAAI,CAAChC,UAAU,GAAGA,UAAU;;IAE5B;IACA,IAAI,CAACG,KAAK,GAAG6B,OAAO;IACpB,IAAI,CAACqC,cAAc,GAAG,IAAI0E,GAAG,CAAc,CAAC;;IAE5C;AACR;AACA;AACA;IACQ,IAAI,CAACjJ,sBAAsB,GAAG,IAAI;EACtC,CAAC;EACD+I,WAAW,CAACG,SAAS,GAAGJ,KAAK;EAC7B,OAAOC,WAAW;AACtB;AAEO,SAASI,qBAAqBA,CACjCJ,WAAgB,EAChB7I,UAAmC,EACnCkJ,QAAmC,EACP;EAC5B,IAAMC,GAAG,GAAG,IAAIN,WAAW,CAAC7I,UAAU,EAAEkJ,QAAQ,CAAC;EACjD,IAAAE,qBAAc,EAAC,kBAAkB,EAAED,GAAG,CAAC;EACvC,OAAOA,GAAG;AACd;AAEO,SAASE,YAAYA,CAACC,GAAQ,EAAW;EAC5C,OAAO,OAAOA,GAAG,KAAK,QAAQ,IAAIA,GAAG,KAAK,IAAI,IAAI,wBAAwB,IAAIA,GAAG;AACrF;AAGO,SAAS9B,yBAAyBA,CACrCxH,UAAmC,EACnCwG,OAAuC,EACvCD,OAAkC,EACtB;EACZ;AACJ;AACA;AACA;AACA;AACA;EACIC,OAAO,CAAChB,KAAK,GAAGwB,MAAM,CAACuC,MAAM,CACzB,CAAC,CAAC,EACFhD,OAAO,CAACf,KAAK,EACbgB,OAAO,CAAChB,KACZ,CAAC;;EAED;EACA,IAAIlD,0BAAY,CAACC,SAAS,CAAC,CAAC,EAAE;IAC1BvC,UAAU,CAACC,MAAM,CAACuJ,cAAc,CAACjD,OAAO,EAAEC,OAAO,CAAC;EACtD;EACA,OAAOxG,UAAU,CAACgI,SAAS,CAAC,KAAK,EAAE,MAAM,EAAExB,OAAO,EAAED,OAAO,CAAC;AAChE","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/rx-error.js b/dist/cjs/rx-error.js deleted file mode 100644 index 1e3b06f699d..00000000000 --- a/dist/cjs/rx-error.js +++ /dev/null @@ -1,146 +0,0 @@ -"use strict"; - -var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault"); -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.RxTypeError = exports.RxError = void 0; -exports.errorUrlHint = errorUrlHint; -exports.getErrorUrl = getErrorUrl; -exports.isBulkWriteConflictError = isBulkWriteConflictError; -exports.newRxError = newRxError; -exports.newRxTypeError = newRxTypeError; -exports.rxStorageWriteErrorToRxError = rxStorageWriteErrorToRxError; -var _createClass2 = _interopRequireDefault(require("@babel/runtime/helpers/createClass")); -var _inheritsLoose2 = _interopRequireDefault(require("@babel/runtime/helpers/inheritsLoose")); -var _wrapNativeSuper2 = _interopRequireDefault(require("@babel/runtime/helpers/wrapNativeSuper")); -var _overwritable = require("./overwritable.js"); -/** - * here we use custom errors with the additional field 'parameters' - */ - -/** - * transform an object of parameters to a presentable string - */ -function parametersToString(parameters) { - var ret = ''; - if (Object.keys(parameters).length === 0) return ret; - ret += 'Given parameters: {\n'; - ret += Object.keys(parameters).map(k => { - var paramStr = '[object Object]'; - try { - if (k === 'errors') { - paramStr = parameters[k].map(err => JSON.stringify(err, Object.getOwnPropertyNames(err))); - } else { - paramStr = JSON.stringify(parameters[k], function (_k, v) { - return v === undefined ? null : v; - }, 2); - } - } catch (e) {} - return k + ':' + paramStr; - }).join('\n'); - ret += '}'; - return ret; -} -function messageForError(message, code, parameters) { - return 'RxError (' + code + '):' + '\n' + message + '\n' + parametersToString(parameters); -} -var RxError = exports.RxError = /*#__PURE__*/function (_Error) { - // always true, use this to detect if its an rxdb-error - - function RxError(code, message, parameters = {}) { - var _this; - var mes = messageForError(message, code, parameters); - _this = _Error.call(this, mes) || this; - _this.code = code; - _this.message = mes; - _this.url = getErrorUrl(code); - _this.parameters = parameters; - _this.rxdb = true; // tag them as internal - return _this; - } - (0, _inheritsLoose2.default)(RxError, _Error); - var _proto = RxError.prototype; - _proto.toString = function toString() { - return this.message; - }; - return (0, _createClass2.default)(RxError, [{ - key: "name", - get: function () { - return 'RxError (' + this.code + ')'; - } - }, { - key: "typeError", - get: function () { - return false; - } - }]); -}( /*#__PURE__*/(0, _wrapNativeSuper2.default)(Error)); -var RxTypeError = exports.RxTypeError = /*#__PURE__*/function (_TypeError) { - // always true, use this to detect if its an rxdb-error - - function RxTypeError(code, message, parameters = {}) { - var _this2; - var mes = messageForError(message, code, parameters); - _this2 = _TypeError.call(this, mes) || this; - _this2.code = code; - _this2.message = mes; - _this2.url = getErrorUrl(code); - _this2.parameters = parameters; - _this2.rxdb = true; // tag them as internal - return _this2; - } - (0, _inheritsLoose2.default)(RxTypeError, _TypeError); - var _proto2 = RxTypeError.prototype; - _proto2.toString = function toString() { - return this.message; - }; - return (0, _createClass2.default)(RxTypeError, [{ - key: "name", - get: function () { - return 'RxTypeError (' + this.code + ')'; - } - }, { - key: "typeError", - get: function () { - return true; - } - }]); -}( /*#__PURE__*/(0, _wrapNativeSuper2.default)(TypeError)); -function getErrorUrl(code) { - return 'https://rxdb.info/errors.html?console=errors#' + code; -} -function errorUrlHint(code) { - return '\n You can find out more about this error here: ' + getErrorUrl(code) + ' '; -} -function newRxError(code, parameters) { - return new RxError(code, _overwritable.overwritable.tunnelErrorMessage(code) + errorUrlHint(code), parameters); -} -function newRxTypeError(code, parameters) { - return new RxTypeError(code, _overwritable.overwritable.tunnelErrorMessage(code) + errorUrlHint(code), parameters); -} - -/** - * Returns the error if it is a 409 conflict, - * return false if it is another error. - */ -function isBulkWriteConflictError(err) { - if (err && err.status === 409) { - return err; - } else { - return false; - } -} -var STORAGE_WRITE_ERROR_CODE_TO_MESSAGE = { - 409: 'document write conflict', - 422: 'schema validation error', - 510: 'attachment data missing' -}; -function rxStorageWriteErrorToRxError(err) { - return newRxError('COL20', { - name: STORAGE_WRITE_ERROR_CODE_TO_MESSAGE[err.status], - document: err.documentId, - writeError: err - }); -} -//# sourceMappingURL=rx-error.js.map \ No newline at end of file diff --git a/dist/cjs/rx-error.js.map b/dist/cjs/rx-error.js.map deleted file mode 100644 index b4c433db30f..00000000000 --- a/dist/cjs/rx-error.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-error.js","names":["_overwritable","require","parametersToString","parameters","ret","Object","keys","length","map","k","paramStr","err","JSON","stringify","getOwnPropertyNames","_k","v","undefined","e","join","messageForError","message","code","RxError","exports","_Error","_this","mes","call","url","getErrorUrl","rxdb","_inheritsLoose2","default","_proto","prototype","toString","_createClass2","key","get","_wrapNativeSuper2","Error","RxTypeError","_TypeError","_this2","_proto2","TypeError","errorUrlHint","newRxError","overwritable","tunnelErrorMessage","newRxTypeError","isBulkWriteConflictError","status","STORAGE_WRITE_ERROR_CODE_TO_MESSAGE","rxStorageWriteErrorToRxError","name","document","documentId","writeError"],"sources":["../../src/rx-error.ts"],"sourcesContent":["/**\n * here we use custom errors with the additional field 'parameters'\n */\n\nimport { overwritable } from './overwritable.ts';\nimport type {\n RxErrorParameters,\n RxErrorKey,\n RxStorageWriteError,\n RxStorageWriteErrorConflict\n} from './types/index.d.ts';\n\n/**\n * transform an object of parameters to a presentable string\n */\nfunction parametersToString(parameters: any): string {\n let ret = '';\n if (Object.keys(parameters).length === 0)\n return ret;\n ret += 'Given parameters: {\\n';\n ret += Object.keys(parameters)\n .map(k => {\n let paramStr = '[object Object]';\n try {\n if (k === 'errors') {\n paramStr = parameters[k].map((err: any) => JSON.stringify(err, Object.getOwnPropertyNames(err)));\n } else {\n paramStr = JSON.stringify(parameters[k], function (_k, v) {\n return v === undefined ? null : v;\n }, 2);\n }\n } catch (e) { }\n return k + ':' + paramStr;\n })\n .join('\\n');\n ret += '}';\n return ret;\n}\n\nfunction messageForError(\n message: string,\n code: string,\n parameters: any\n): string {\n return 'RxError (' + code + '):' + '\\n' +\n message + '\\n' +\n parametersToString(parameters);\n}\n\nexport class RxError extends Error {\n public code: RxErrorKey;\n public message: string;\n public url: string;\n public parameters: RxErrorParameters;\n // always true, use this to detect if its an rxdb-error\n public rxdb: true;\n constructor(\n code: RxErrorKey,\n message: string,\n parameters: RxErrorParameters = {}\n ) {\n const mes = messageForError(message, code, parameters);\n super(mes);\n this.code = code;\n this.message = mes;\n this.url = getErrorUrl(code);\n this.parameters = parameters;\n this.rxdb = true; // tag them as internal\n }\n get name(): string {\n return 'RxError (' + this.code + ')';\n }\n toString(): string {\n return this.message;\n }\n get typeError(): boolean {\n return false;\n }\n}\n\nexport class RxTypeError extends TypeError {\n public code: RxErrorKey;\n public message: string;\n public url: string;\n public parameters: RxErrorParameters;\n // always true, use this to detect if its an rxdb-error\n public rxdb: true;\n constructor(\n code: RxErrorKey,\n message: string,\n parameters: RxErrorParameters = {}\n ) {\n const mes = messageForError(message, code, parameters);\n super(mes);\n this.code = code;\n this.message = mes;\n this.url = getErrorUrl(code);\n this.parameters = parameters;\n this.rxdb = true; // tag them as internal\n }\n get name(): string {\n return 'RxTypeError (' + this.code + ')';\n }\n toString(): string {\n return this.message;\n }\n get typeError(): boolean {\n return true;\n }\n}\n\n\nexport function getErrorUrl(code: RxErrorKey) {\n return 'https://rxdb.info/errors.html?console=errors#' + code;\n}\n\nexport function errorUrlHint(code: RxErrorKey) {\n return '\\n You can find out more about this error here: ' + getErrorUrl(code) + ' ';\n}\n\nexport function newRxError(\n code: RxErrorKey,\n parameters?: RxErrorParameters\n): RxError {\n return new RxError(\n code,\n overwritable.tunnelErrorMessage(code) + errorUrlHint(code),\n parameters\n );\n}\n\nexport function newRxTypeError(\n code: RxErrorKey,\n parameters?: RxErrorParameters\n): RxTypeError {\n return new RxTypeError(\n code,\n overwritable.tunnelErrorMessage(code) + errorUrlHint(code),\n parameters\n );\n}\n\n\n/**\n * Returns the error if it is a 409 conflict,\n * return false if it is another error.\n */\nexport function isBulkWriteConflictError(\n err?: RxStorageWriteError | any\n): RxStorageWriteErrorConflict | false {\n if (\n err &&\n err.status === 409\n ) {\n return err;\n } else {\n return false;\n }\n}\n\n\nconst STORAGE_WRITE_ERROR_CODE_TO_MESSAGE: { [k: number]: string; } = {\n 409: 'document write conflict',\n 422: 'schema validation error',\n 510: 'attachment data missing'\n};\n\nexport function rxStorageWriteErrorToRxError(err: RxStorageWriteError): RxError {\n return newRxError('COL20', {\n name: STORAGE_WRITE_ERROR_CODE_TO_MESSAGE[err.status],\n document: err.documentId,\n writeError: err\n });\n}\n"],"mappings":";;;;;;;;;;;;;;;;AAIA,IAAAA,aAAA,GAAAC,OAAA;AAJA;AACA;AACA;;AAUA;AACA;AACA;AACA,SAASC,kBAAkBA,CAACC,UAAe,EAAU;EACjD,IAAIC,GAAG,GAAG,EAAE;EACZ,IAAIC,MAAM,CAACC,IAAI,CAACH,UAAU,CAAC,CAACI,MAAM,KAAK,CAAC,EACpC,OAAOH,GAAG;EACdA,GAAG,IAAI,uBAAuB;EAC9BA,GAAG,IAAIC,MAAM,CAACC,IAAI,CAACH,UAAU,CAAC,CACzBK,GAAG,CAACC,CAAC,IAAI;IACN,IAAIC,QAAQ,GAAG,iBAAiB;IAChC,IAAI;MACA,IAAID,CAAC,KAAK,QAAQ,EAAE;QAChBC,QAAQ,GAAGP,UAAU,CAACM,CAAC,CAAC,CAACD,GAAG,CAAEG,GAAQ,IAAKC,IAAI,CAACC,SAAS,CAACF,GAAG,EAAEN,MAAM,CAACS,mBAAmB,CAACH,GAAG,CAAC,CAAC,CAAC;MACpG,CAAC,MAAM;QACHD,QAAQ,GAAGE,IAAI,CAACC,SAAS,CAACV,UAAU,CAACM,CAAC,CAAC,EAAE,UAAUM,EAAE,EAAEC,CAAC,EAAE;UACtD,OAAOA,CAAC,KAAKC,SAAS,GAAG,IAAI,GAAGD,CAAC;QACrC,CAAC,EAAE,CAAC,CAAC;MACT;IACJ,CAAC,CAAC,OAAOE,CAAC,EAAE,CAAE;IACd,OAAOT,CAAC,GAAG,GAAG,GAAGC,QAAQ;EAC7B,CAAC,CAAC,CACDS,IAAI,CAAC,IAAI,CAAC;EACff,GAAG,IAAI,GAAG;EACV,OAAOA,GAAG;AACd;AAEA,SAASgB,eAAeA,CACpBC,OAAe,EACfC,IAAY,EACZnB,UAAe,EACT;EACN,OAAO,WAAW,GAAGmB,IAAI,GAAG,IAAI,GAAG,IAAI,GACnCD,OAAO,GAAG,IAAI,GACdnB,kBAAkB,CAACC,UAAU,CAAC;AACtC;AAAC,IAEYoB,OAAO,GAAAC,OAAA,CAAAD,OAAA,0BAAAE,MAAA;EAKhB;;EAEA,SAAAF,QACID,IAAgB,EAChBD,OAAe,EACflB,UAA6B,GAAG,CAAC,CAAC,EACpC;IAAA,IAAAuB,KAAA;IACE,IAAMC,GAAG,GAAGP,eAAe,CAACC,OAAO,EAAEC,IAAI,EAAEnB,UAAU,CAAC;IACtDuB,KAAA,GAAAD,MAAA,CAAAG,IAAA,OAAMD,GAAG,CAAC;IACVD,KAAA,CAAKJ,IAAI,GAAGA,IAAI;IAChBI,KAAA,CAAKL,OAAO,GAAGM,GAAG;IAClBD,KAAA,CAAKG,GAAG,GAAGC,WAAW,CAACR,IAAI,CAAC;IAC5BI,KAAA,CAAKvB,UAAU,GAAGA,UAAU;IAC5BuB,KAAA,CAAKK,IAAI,GAAG,IAAI,CAAC,CAAC;IAAA,OAAAL,KAAA;EACtB;EAAC,IAAAM,eAAA,CAAAC,OAAA,EAAAV,OAAA,EAAAE,MAAA;EAAA,IAAAS,MAAA,GAAAX,OAAA,CAAAY,SAAA;EAAAD,MAAA,CAIDE,QAAQ,GAAR,SAAAA,SAAA,EAAmB;IACf,OAAO,IAAI,CAACf,OAAO;EACvB,CAAC;EAAA,WAAAgB,aAAA,CAAAJ,OAAA,EAAAV,OAAA;IAAAe,GAAA;IAAAC,GAAA,EALD,SAAAA,CAAA,EAAmB;MACf,OAAO,WAAW,GAAG,IAAI,CAACjB,IAAI,GAAG,GAAG;IACxC;EAAC;IAAAgB,GAAA;IAAAC,GAAA,EAID,SAAAA,CAAA,EAAyB;MACrB,OAAO,KAAK;IAChB;EAAC;AAAA,oBAAAC,iBAAA,CAAAP,OAAA,EA5BwBQ,KAAK;AAAA,IA+BrBC,WAAW,GAAAlB,OAAA,CAAAkB,WAAA,0BAAAC,UAAA;EAKpB;;EAEA,SAAAD,YACIpB,IAAgB,EAChBD,OAAe,EACflB,UAA6B,GAAG,CAAC,CAAC,EACpC;IAAA,IAAAyC,MAAA;IACE,IAAMjB,GAAG,GAAGP,eAAe,CAACC,OAAO,EAAEC,IAAI,EAAEnB,UAAU,CAAC;IACtDyC,MAAA,GAAAD,UAAA,CAAAf,IAAA,OAAMD,GAAG,CAAC;IACViB,MAAA,CAAKtB,IAAI,GAAGA,IAAI;IAChBsB,MAAA,CAAKvB,OAAO,GAAGM,GAAG;IAClBiB,MAAA,CAAKf,GAAG,GAAGC,WAAW,CAACR,IAAI,CAAC;IAC5BsB,MAAA,CAAKzC,UAAU,GAAGA,UAAU;IAC5ByC,MAAA,CAAKb,IAAI,GAAG,IAAI,CAAC,CAAC;IAAA,OAAAa,MAAA;EACtB;EAAC,IAAAZ,eAAA,CAAAC,OAAA,EAAAS,WAAA,EAAAC,UAAA;EAAA,IAAAE,OAAA,GAAAH,WAAA,CAAAP,SAAA;EAAAU,OAAA,CAIDT,QAAQ,GAAR,SAAAA,SAAA,EAAmB;IACf,OAAO,IAAI,CAACf,OAAO;EACvB,CAAC;EAAA,WAAAgB,aAAA,CAAAJ,OAAA,EAAAS,WAAA;IAAAJ,GAAA;IAAAC,GAAA,EALD,SAAAA,CAAA,EAAmB;MACf,OAAO,eAAe,GAAG,IAAI,CAACjB,IAAI,GAAG,GAAG;IAC5C;EAAC;IAAAgB,GAAA;IAAAC,GAAA,EAID,SAAAA,CAAA,EAAyB;MACrB,OAAO,IAAI;IACf;EAAC;AAAA,oBAAAC,iBAAA,CAAAP,OAAA,EA5B4Ba,SAAS;AAgCnC,SAAShB,WAAWA,CAACR,IAAgB,EAAE;EAC1C,OAAO,+CAA+C,GAAGA,IAAI;AACjE;AAEO,SAASyB,YAAYA,CAACzB,IAAgB,EAAE;EAC3C,OAAO,kDAAkD,GAAGQ,WAAW,CAACR,IAAI,CAAC,GAAG,GAAG;AACvF;AAEO,SAAS0B,UAAUA,CACtB1B,IAAgB,EAChBnB,UAA8B,EACvB;EACP,OAAO,IAAIoB,OAAO,CACdD,IAAI,EACJ2B,0BAAY,CAACC,kBAAkB,CAAC5B,IAAI,CAAC,GAAGyB,YAAY,CAACzB,IAAI,CAAC,EAC1DnB,UACJ,CAAC;AACL;AAEO,SAASgD,cAAcA,CAC1B7B,IAAgB,EAChBnB,UAA8B,EACnB;EACX,OAAO,IAAIuC,WAAW,CAClBpB,IAAI,EACJ2B,0BAAY,CAACC,kBAAkB,CAAC5B,IAAI,CAAC,GAAGyB,YAAY,CAACzB,IAAI,CAAC,EAC1DnB,UACJ,CAAC;AACL;;AAGA;AACA;AACA;AACA;AACO,SAASiD,wBAAwBA,CACpCzC,GAA0C,EACI;EAC9C,IACIA,GAAG,IACHA,GAAG,CAAC0C,MAAM,KAAK,GAAG,EACpB;IACE,OAAO1C,GAAG;EACd,CAAC,MAAM;IACH,OAAO,KAAK;EAChB;AACJ;AAGA,IAAM2C,mCAA6D,GAAG;EAClE,GAAG,EAAE,yBAAyB;EAC9B,GAAG,EAAE,yBAAyB;EAC9B,GAAG,EAAE;AACT,CAAC;AAEM,SAASC,4BAA4BA,CAAC5C,GAA6B,EAAW;EACjF,OAAOqC,UAAU,CAAC,OAAO,EAAE;IACvBQ,IAAI,EAAEF,mCAAmC,CAAC3C,GAAG,CAAC0C,MAAM,CAAC;IACrDI,QAAQ,EAAE9C,GAAG,CAAC+C,UAAU;IACxBC,UAAU,EAAEhD;EAChB,CAAC,CAAC;AACN","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/rx-query-helper.js b/dist/cjs/rx-query-helper.js deleted file mode 100644 index ec81163c57a..00000000000 --- a/dist/cjs/rx-query-helper.js +++ /dev/null @@ -1,214 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.getQueryMatcher = getQueryMatcher; -exports.getSortComparator = getSortComparator; -exports.normalizeMangoQuery = normalizeMangoQuery; -exports.runQueryUpdateFunction = runQueryUpdateFunction; -var _queryPlanner = require("./query-planner.js"); -var _rxSchemaHelper = require("./rx-schema-helper.js"); -var _index = require("./plugins/utils/index.js"); -var _util = require("mingo/util"); -var _rxError = require("./rx-error.js"); -var _rxQueryMingo = require("./rx-query-mingo.js"); -/** - * Normalize the query to ensure we have all fields set - * and queries that represent the same query logic are detected as equal by the caching. - */ -function normalizeMangoQuery(schema, mangoQuery) { - var primaryKey = (0, _rxSchemaHelper.getPrimaryFieldOfPrimaryKey)(schema.primaryKey); - mangoQuery = (0, _index.flatClone)(mangoQuery); - var normalizedMangoQuery = (0, _index.clone)(mangoQuery); - if (typeof normalizedMangoQuery.skip !== 'number') { - normalizedMangoQuery.skip = 0; - } - if (!normalizedMangoQuery.selector) { - normalizedMangoQuery.selector = {}; - } else { - normalizedMangoQuery.selector = normalizedMangoQuery.selector; - /** - * In mango query, it is possible to have an - * equals comparison by directly assigning a value - * to a property, without the '$eq' operator. - * Like: - * selector: { - * foo: 'bar' - * } - * For normalization, we have to normalize this - * so our checks can perform properly. - * - * - * TODO this must work recursive with nested queries that - * contain multiple selectors via $and or $or etc. - */ - Object.entries(normalizedMangoQuery.selector).forEach(([field, matcher]) => { - if (typeof matcher !== 'object' || matcher === null) { - normalizedMangoQuery.selector[field] = { - $eq: matcher - }; - } - }); - } - - /** - * Ensure that if an index is specified, - * the primaryKey is inside of it. - */ - if (normalizedMangoQuery.index) { - var indexAr = (0, _index.toArray)(normalizedMangoQuery.index); - if (!indexAr.includes(primaryKey)) { - indexAr.push(primaryKey); - } - normalizedMangoQuery.index = indexAr; - } - - /** - * To ensure a deterministic sorting, - * we have to ensure the primary key is always part - * of the sort query. - * Primary sorting is added as last sort parameter, - * similar to how we add the primary key to indexes that do not have it. - * - */ - if (!normalizedMangoQuery.sort) { - /** - * If no sort is given at all, - * we can assume that the user does not care about sort order at al. - * - * we cannot just use the primary key as sort parameter - * because it would likely cause the query to run over the primary key index - * which has a bad performance in most cases. - */ - if (normalizedMangoQuery.index) { - normalizedMangoQuery.sort = normalizedMangoQuery.index.map(field => { - return { - [field]: 'asc' - }; - }); - } else { - /** - * Find the index that best matches the fields with the logical operators - */ - if (schema.indexes) { - var fieldsWithLogicalOperator = new Set(); - Object.entries(normalizedMangoQuery.selector).forEach(([field, matcher]) => { - var hasLogical = false; - if (typeof matcher === 'object' && matcher !== null) { - hasLogical = !!Object.keys(matcher).find(operator => _queryPlanner.LOGICAL_OPERATORS.has(operator)); - } else { - hasLogical = true; - } - if (hasLogical) { - fieldsWithLogicalOperator.add(field); - } - }); - var currentFieldsAmount = -1; - var currentBestIndexForSort; - schema.indexes.forEach(index => { - var useIndex = (0, _index.isMaybeReadonlyArray)(index) ? index : [index]; - var firstWrongIndex = useIndex.findIndex(indexField => !fieldsWithLogicalOperator.has(indexField)); - if (firstWrongIndex > 0 && firstWrongIndex > currentFieldsAmount) { - currentFieldsAmount = firstWrongIndex; - currentBestIndexForSort = useIndex; - } - }); - if (currentBestIndexForSort) { - normalizedMangoQuery.sort = currentBestIndexForSort.map(field => { - return { - [field]: 'asc' - }; - }); - } - } - - /** - * Fall back to the primary key as sort order - * if no better one has been found - */ - if (!normalizedMangoQuery.sort) { - normalizedMangoQuery.sort = [{ - [primaryKey]: 'asc' - }]; - } - } - } else { - var isPrimaryInSort = normalizedMangoQuery.sort.find(p => (0, _index.firstPropertyNameOfObject)(p) === primaryKey); - if (!isPrimaryInSort) { - normalizedMangoQuery.sort = normalizedMangoQuery.sort.slice(0); - normalizedMangoQuery.sort.push({ - [primaryKey]: 'asc' - }); - } - } - return normalizedMangoQuery; -} - -/** - * Returns the sort-comparator, - * which is able to sort documents in the same way - * a query over the db would do. - */ -function getSortComparator(schema, query) { - if (!query.sort) { - throw (0, _rxError.newRxError)('SNH', { - query - }); - } - var sortParts = []; - query.sort.forEach(sortBlock => { - var key = Object.keys(sortBlock)[0]; - var direction = Object.values(sortBlock)[0]; - sortParts.push({ - key, - direction, - getValueFn: (0, _index.objectPathMonad)(key) - }); - }); - var fun = (a, b) => { - for (var i = 0; i < sortParts.length; ++i) { - var sortPart = sortParts[i]; - var valueA = sortPart.getValueFn(a); - var valueB = sortPart.getValueFn(b); - if (valueA !== valueB) { - var ret = sortPart.direction === 'asc' ? (0, _util.compare)(valueA, valueB) : (0, _util.compare)(valueB, valueA); - return ret; - } - } - }; - return fun; -} - -/** - * Returns a function - * that can be used to check if a document - * matches the query. - */ -function getQueryMatcher(_schema, query) { - if (!query.sort) { - throw (0, _rxError.newRxError)('SNH', { - query - }); - } - var mingoQuery = (0, _rxQueryMingo.getMingoQuery)(query.selector); - var fun = doc => { - return mingoQuery.test(doc); - }; - return fun; -} -async function runQueryUpdateFunction(rxQuery, fn) { - var docs = await rxQuery.exec(); - if (!docs) { - // only findOne() queries can return null - return null; - } - if (Array.isArray(docs)) { - return Promise.all(docs.map(doc => fn(doc))); - } else { - // via findOne() - var result = await fn(docs); - return result; - } -} -//# sourceMappingURL=rx-query-helper.js.map \ No newline at end of file diff --git a/dist/cjs/rx-query-helper.js.map b/dist/cjs/rx-query-helper.js.map deleted file mode 100644 index ba9f842f3c1..00000000000 --- a/dist/cjs/rx-query-helper.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-query-helper.js","names":["_queryPlanner","require","_rxSchemaHelper","_index","_util","_rxError","_rxQueryMingo","normalizeMangoQuery","schema","mangoQuery","primaryKey","getPrimaryFieldOfPrimaryKey","flatClone","normalizedMangoQuery","clone","skip","selector","Object","entries","forEach","field","matcher","$eq","index","indexAr","toArray","includes","push","sort","map","indexes","fieldsWithLogicalOperator","Set","hasLogical","keys","find","operator","LOGICAL_OPERATORS","has","add","currentFieldsAmount","currentBestIndexForSort","useIndex","isMaybeReadonlyArray","firstWrongIndex","findIndex","indexField","isPrimaryInSort","p","firstPropertyNameOfObject","slice","getSortComparator","query","newRxError","sortParts","sortBlock","key","direction","values","getValueFn","objectPathMonad","fun","a","b","i","length","sortPart","valueA","valueB","ret","mingoSortComparator","getQueryMatcher","_schema","mingoQuery","getMingoQuery","doc","test","runQueryUpdateFunction","rxQuery","fn","docs","exec","Array","isArray","Promise","all","result"],"sources":["../../src/rx-query-helper.ts"],"sourcesContent":["import { LOGICAL_OPERATORS } from './query-planner.ts';\nimport { getPrimaryFieldOfPrimaryKey } from './rx-schema-helper.ts';\nimport type {\n DeepReadonly,\n DeterministicSortComparator,\n FilledMangoQuery,\n MangoQuery,\n MangoQuerySortDirection,\n QueryMatcher,\n RxDocument,\n RxDocumentData,\n RxJsonSchema,\n RxQuery\n} from './types/index.d.ts';\nimport {\n clone,\n firstPropertyNameOfObject,\n toArray,\n isMaybeReadonlyArray,\n flatClone,\n objectPathMonad,\n ObjectPathMonadFunction\n} from './plugins/utils/index.ts';\nimport {\n compare as mingoSortComparator\n} from 'mingo/util';\nimport { newRxError } from './rx-error.ts';\nimport { getMingoQuery } from './rx-query-mingo.ts';\n\n/**\n * Normalize the query to ensure we have all fields set\n * and queries that represent the same query logic are detected as equal by the caching.\n */\nexport function normalizeMangoQuery(\n schema: RxJsonSchema>,\n mangoQuery: MangoQuery\n): FilledMangoQuery {\n const primaryKey: string = getPrimaryFieldOfPrimaryKey(schema.primaryKey);\n mangoQuery = flatClone(mangoQuery);\n\n const normalizedMangoQuery: FilledMangoQuery = clone(mangoQuery) as any;\n if (typeof normalizedMangoQuery.skip !== 'number') {\n normalizedMangoQuery.skip = 0;\n }\n\n if (!normalizedMangoQuery.selector) {\n normalizedMangoQuery.selector = {};\n } else {\n normalizedMangoQuery.selector = normalizedMangoQuery.selector;\n /**\n * In mango query, it is possible to have an\n * equals comparison by directly assigning a value\n * to a property, without the '$eq' operator.\n * Like:\n * selector: {\n * foo: 'bar'\n * }\n * For normalization, we have to normalize this\n * so our checks can perform properly.\n *\n *\n * TODO this must work recursive with nested queries that\n * contain multiple selectors via $and or $or etc.\n */\n Object\n .entries(normalizedMangoQuery.selector)\n .forEach(([field, matcher]) => {\n if (typeof matcher !== 'object' || matcher === null) {\n (normalizedMangoQuery as any).selector[field] = {\n $eq: matcher\n };\n }\n });\n }\n\n /**\n * Ensure that if an index is specified,\n * the primaryKey is inside of it.\n */\n if (normalizedMangoQuery.index) {\n const indexAr = toArray(normalizedMangoQuery.index);\n if (!indexAr.includes(primaryKey)) {\n indexAr.push(primaryKey);\n }\n normalizedMangoQuery.index = indexAr;\n }\n\n /**\n * To ensure a deterministic sorting,\n * we have to ensure the primary key is always part\n * of the sort query.\n * Primary sorting is added as last sort parameter,\n * similar to how we add the primary key to indexes that do not have it.\n *\n */\n if (!normalizedMangoQuery.sort) {\n /**\n * If no sort is given at all,\n * we can assume that the user does not care about sort order at al.\n *\n * we cannot just use the primary key as sort parameter\n * because it would likely cause the query to run over the primary key index\n * which has a bad performance in most cases.\n */\n if (normalizedMangoQuery.index) {\n normalizedMangoQuery.sort = normalizedMangoQuery.index.map((field: string) => {\n return { [field as any]: 'asc' } as any;\n });\n } else {\n /**\n * Find the index that best matches the fields with the logical operators\n */\n if (schema.indexes) {\n const fieldsWithLogicalOperator: Set = new Set();\n Object.entries(normalizedMangoQuery.selector).forEach(([field, matcher]) => {\n let hasLogical = false;\n if (typeof matcher === 'object' && matcher !== null) {\n hasLogical = !!Object.keys(matcher).find(operator => LOGICAL_OPERATORS.has(operator));\n } else {\n hasLogical = true;\n }\n if (hasLogical) {\n fieldsWithLogicalOperator.add(field);\n }\n });\n\n\n let currentFieldsAmount = -1;\n let currentBestIndexForSort: string[] | readonly string[] | undefined;\n schema.indexes.forEach(index => {\n const useIndex = isMaybeReadonlyArray(index) ? index : [index];\n const firstWrongIndex = useIndex.findIndex(indexField => !fieldsWithLogicalOperator.has(indexField));\n if (\n firstWrongIndex > 0 &&\n firstWrongIndex > currentFieldsAmount\n ) {\n currentFieldsAmount = firstWrongIndex;\n currentBestIndexForSort = useIndex;\n }\n });\n if (currentBestIndexForSort) {\n normalizedMangoQuery.sort = currentBestIndexForSort.map((field: string) => {\n return { [field as any]: 'asc' } as any;\n });\n }\n\n }\n\n /**\n * Fall back to the primary key as sort order\n * if no better one has been found\n */\n if (!normalizedMangoQuery.sort) {\n normalizedMangoQuery.sort = [{ [primaryKey]: 'asc' }] as any;\n }\n }\n } else {\n const isPrimaryInSort = normalizedMangoQuery.sort\n .find(p => firstPropertyNameOfObject(p) === primaryKey);\n if (!isPrimaryInSort) {\n normalizedMangoQuery.sort = normalizedMangoQuery.sort.slice(0);\n normalizedMangoQuery.sort.push({ [primaryKey]: 'asc' } as any);\n }\n }\n\n return normalizedMangoQuery;\n}\n\n/**\n * Returns the sort-comparator,\n * which is able to sort documents in the same way\n * a query over the db would do.\n */\nexport function getSortComparator(\n schema: RxJsonSchema>,\n query: FilledMangoQuery\n): DeterministicSortComparator {\n if (!query.sort) {\n throw newRxError('SNH', { query });\n }\n const sortParts: {\n key: string;\n direction: MangoQuerySortDirection;\n getValueFn: ObjectPathMonadFunction;\n }[] = [];\n query.sort.forEach(sortBlock => {\n const key = Object.keys(sortBlock)[0];\n const direction = Object.values(sortBlock)[0];\n sortParts.push({\n key,\n direction,\n getValueFn: objectPathMonad(key)\n });\n });\n const fun: DeterministicSortComparator = (a: RxDocType, b: RxDocType) => {\n for (let i = 0; i < sortParts.length; ++i) {\n const sortPart = sortParts[i];\n const valueA = sortPart.getValueFn(a);\n const valueB = sortPart.getValueFn(b);\n if (valueA !== valueB) {\n const ret = sortPart.direction === 'asc' ? mingoSortComparator(valueA, valueB) : mingoSortComparator(valueB, valueA);\n return ret as any;\n }\n }\n };\n\n return fun;\n}\n\n\n/**\n * Returns a function\n * that can be used to check if a document\n * matches the query.\n */\nexport function getQueryMatcher(\n _schema: RxJsonSchema | RxJsonSchema>,\n query: FilledMangoQuery\n): QueryMatcher> {\n if (!query.sort) {\n throw newRxError('SNH', { query });\n }\n\n const mingoQuery = getMingoQuery(query.selector as any);\n const fun: QueryMatcher> = (doc: RxDocumentData | DeepReadonly>) => {\n return mingoQuery.test(doc);\n };\n return fun;\n}\n\n\nexport async function runQueryUpdateFunction(\n rxQuery: RxQuery,\n fn: (doc: RxDocument) => Promise>\n): Promise {\n const docs = await rxQuery.exec();\n if (!docs) {\n // only findOne() queries can return null\n return null as any;\n }\n if (Array.isArray(docs)) {\n return Promise.all(\n docs.map(doc => fn(doc))\n ) as any;\n } else {\n // via findOne()\n const result = await fn(docs as any);\n return result as any;\n }\n}\n"],"mappings":";;;;;;;;;AAAA,IAAAA,aAAA,GAAAC,OAAA;AACA,IAAAC,eAAA,GAAAD,OAAA;AAaA,IAAAE,MAAA,GAAAF,OAAA;AASA,IAAAG,KAAA,GAAAH,OAAA;AAGA,IAAAI,QAAA,GAAAJ,OAAA;AACA,IAAAK,aAAA,GAAAL,OAAA;AAEA;AACA;AACA;AACA;AACO,SAASM,mBAAmBA,CAC/BC,MAA+C,EAC/CC,UAAiC,EACN;EAC3B,IAAMC,UAAkB,GAAG,IAAAC,2CAA2B,EAACH,MAAM,CAACE,UAAU,CAAC;EACzED,UAAU,GAAG,IAAAG,gBAAS,EAACH,UAAU,CAAC;EAElC,IAAMI,oBAAiD,GAAG,IAAAC,YAAK,EAACL,UAAU,CAAQ;EAClF,IAAI,OAAOI,oBAAoB,CAACE,IAAI,KAAK,QAAQ,EAAE;IAC/CF,oBAAoB,CAACE,IAAI,GAAG,CAAC;EACjC;EAEA,IAAI,CAACF,oBAAoB,CAACG,QAAQ,EAAE;IAChCH,oBAAoB,CAACG,QAAQ,GAAG,CAAC,CAAC;EACtC,CAAC,MAAM;IACHH,oBAAoB,CAACG,QAAQ,GAAGH,oBAAoB,CAACG,QAAQ;IAC7D;AACR;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;IACQC,MAAM,CACDC,OAAO,CAACL,oBAAoB,CAACG,QAAQ,CAAC,CACtCG,OAAO,CAAC,CAAC,CAACC,KAAK,EAAEC,OAAO,CAAC,KAAK;MAC3B,IAAI,OAAOA,OAAO,KAAK,QAAQ,IAAIA,OAAO,KAAK,IAAI,EAAE;QAChDR,oBAAoB,CAASG,QAAQ,CAACI,KAAK,CAAC,GAAG;UAC5CE,GAAG,EAAED;QACT,CAAC;MACL;IACJ,CAAC,CAAC;EACV;;EAEA;AACJ;AACA;AACA;EACI,IAAIR,oBAAoB,CAACU,KAAK,EAAE;IAC5B,IAAMC,OAAO,GAAG,IAAAC,cAAO,EAACZ,oBAAoB,CAACU,KAAK,CAAC;IACnD,IAAI,CAACC,OAAO,CAACE,QAAQ,CAAChB,UAAU,CAAC,EAAE;MAC/Bc,OAAO,CAACG,IAAI,CAACjB,UAAU,CAAC;IAC5B;IACAG,oBAAoB,CAACU,KAAK,GAAGC,OAAO;EACxC;;EAEA;AACJ;AACA;AACA;AACA;AACA;AACA;AACA;EACI,IAAI,CAACX,oBAAoB,CAACe,IAAI,EAAE;IAC5B;AACR;AACA;AACA;AACA;AACA;AACA;AACA;IACQ,IAAIf,oBAAoB,CAACU,KAAK,EAAE;MAC5BV,oBAAoB,CAACe,IAAI,GAAGf,oBAAoB,CAACU,KAAK,CAACM,GAAG,CAAET,KAAa,IAAK;QAC1E,OAAO;UAAE,CAACA,KAAK,GAAU;QAAM,CAAC;MACpC,CAAC,CAAC;IACN,CAAC,MAAM;MACH;AACZ;AACA;MACY,IAAIZ,MAAM,CAACsB,OAAO,EAAE;QAChB,IAAMC,yBAAsC,GAAG,IAAIC,GAAG,CAAC,CAAC;QACxDf,MAAM,CAACC,OAAO,CAACL,oBAAoB,CAACG,QAAQ,CAAC,CAACG,OAAO,CAAC,CAAC,CAACC,KAAK,EAAEC,OAAO,CAAC,KAAK;UACxE,IAAIY,UAAU,GAAG,KAAK;UACtB,IAAI,OAAOZ,OAAO,KAAK,QAAQ,IAAIA,OAAO,KAAK,IAAI,EAAE;YACjDY,UAAU,GAAG,CAAC,CAAChB,MAAM,CAACiB,IAAI,CAACb,OAAO,CAAC,CAACc,IAAI,CAACC,QAAQ,IAAIC,+BAAiB,CAACC,GAAG,CAACF,QAAQ,CAAC,CAAC;UACzF,CAAC,MAAM;YACHH,UAAU,GAAG,IAAI;UACrB;UACA,IAAIA,UAAU,EAAE;YACZF,yBAAyB,CAACQ,GAAG,CAACnB,KAAK,CAAC;UACxC;QACJ,CAAC,CAAC;QAGF,IAAIoB,mBAAmB,GAAG,CAAC,CAAC;QAC5B,IAAIC,uBAAiE;QACrEjC,MAAM,CAACsB,OAAO,CAACX,OAAO,CAACI,KAAK,IAAI;UAC5B,IAAMmB,QAAQ,GAAG,IAAAC,2BAAoB,EAACpB,KAAK,CAAC,GAAGA,KAAK,GAAG,CAACA,KAAK,CAAC;UAC9D,IAAMqB,eAAe,GAAGF,QAAQ,CAACG,SAAS,CAACC,UAAU,IAAI,CAACf,yBAAyB,CAACO,GAAG,CAACQ,UAAU,CAAC,CAAC;UACpG,IACIF,eAAe,GAAG,CAAC,IACnBA,eAAe,GAAGJ,mBAAmB,EACvC;YACEA,mBAAmB,GAAGI,eAAe;YACrCH,uBAAuB,GAAGC,QAAQ;UACtC;QACJ,CAAC,CAAC;QACF,IAAID,uBAAuB,EAAE;UACzB5B,oBAAoB,CAACe,IAAI,GAAGa,uBAAuB,CAACZ,GAAG,CAAET,KAAa,IAAK;YACvE,OAAO;cAAE,CAACA,KAAK,GAAU;YAAM,CAAC;UACpC,CAAC,CAAC;QACN;MAEJ;;MAEA;AACZ;AACA;AACA;MACY,IAAI,CAACP,oBAAoB,CAACe,IAAI,EAAE;QAC5Bf,oBAAoB,CAACe,IAAI,GAAG,CAAC;UAAE,CAAClB,UAAU,GAAG;QAAM,CAAC,CAAQ;MAChE;IACJ;EACJ,CAAC,MAAM;IACH,IAAMqC,eAAe,GAAGlC,oBAAoB,CAACe,IAAI,CAC5CO,IAAI,CAACa,CAAC,IAAI,IAAAC,gCAAyB,EAACD,CAAC,CAAC,KAAKtC,UAAU,CAAC;IAC3D,IAAI,CAACqC,eAAe,EAAE;MAClBlC,oBAAoB,CAACe,IAAI,GAAGf,oBAAoB,CAACe,IAAI,CAACsB,KAAK,CAAC,CAAC,CAAC;MAC9DrC,oBAAoB,CAACe,IAAI,CAACD,IAAI,CAAC;QAAE,CAACjB,UAAU,GAAG;MAAM,CAAQ,CAAC;IAClE;EACJ;EAEA,OAAOG,oBAAoB;AAC/B;;AAEA;AACA;AACA;AACA;AACA;AACO,SAASsC,iBAAiBA,CAC7B3C,MAA+C,EAC/C4C,KAAkC,EACI;EACtC,IAAI,CAACA,KAAK,CAACxB,IAAI,EAAE;IACb,MAAM,IAAAyB,mBAAU,EAAC,KAAK,EAAE;MAAED;IAAM,CAAC,CAAC;EACtC;EACA,IAAME,SAIH,GAAG,EAAE;EACRF,KAAK,CAACxB,IAAI,CAACT,OAAO,CAACoC,SAAS,IAAI;IAC5B,IAAMC,GAAG,GAAGvC,MAAM,CAACiB,IAAI,CAACqB,SAAS,CAAC,CAAC,CAAC,CAAC;IACrC,IAAME,SAAS,GAAGxC,MAAM,CAACyC,MAAM,CAACH,SAAS,CAAC,CAAC,CAAC,CAAC;IAC7CD,SAAS,CAAC3B,IAAI,CAAC;MACX6B,GAAG;MACHC,SAAS;MACTE,UAAU,EAAE,IAAAC,sBAAe,EAACJ,GAAG;IACnC,CAAC,CAAC;EACN,CAAC,CAAC;EACF,IAAMK,GAA2C,GAAGA,CAACC,CAAY,EAAEC,CAAY,KAAK;IAChF,KAAK,IAAIC,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGV,SAAS,CAACW,MAAM,EAAE,EAAED,CAAC,EAAE;MACvC,IAAME,QAAQ,GAAGZ,SAAS,CAACU,CAAC,CAAC;MAC7B,IAAMG,MAAM,GAAGD,QAAQ,CAACP,UAAU,CAACG,CAAC,CAAC;MACrC,IAAMM,MAAM,GAAGF,QAAQ,CAACP,UAAU,CAACI,CAAC,CAAC;MACrC,IAAII,MAAM,KAAKC,MAAM,EAAE;QACnB,IAAMC,GAAG,GAAGH,QAAQ,CAACT,SAAS,KAAK,KAAK,GAAG,IAAAa,aAAmB,EAACH,MAAM,EAAEC,MAAM,CAAC,GAAG,IAAAE,aAAmB,EAACF,MAAM,EAAED,MAAM,CAAC;QACpH,OAAOE,GAAG;MACd;IACJ;EACJ,CAAC;EAED,OAAOR,GAAG;AACd;;AAGA;AACA;AACA;AACA;AACA;AACO,SAASU,eAAeA,CAC3BC,OAA0E,EAC1EpB,KAAkC,EACK;EACvC,IAAI,CAACA,KAAK,CAACxB,IAAI,EAAE;IACb,MAAM,IAAAyB,mBAAU,EAAC,KAAK,EAAE;MAAED;IAAM,CAAC,CAAC;EACtC;EAEA,IAAMqB,UAAU,GAAG,IAAAC,2BAAa,EAACtB,KAAK,CAACpC,QAAe,CAAC;EACvD,IAAM6C,GAA4C,GAAIc,GAAwE,IAAK;IAC/H,OAAOF,UAAU,CAACG,IAAI,CAACD,GAAG,CAAC;EAC/B,CAAC;EACD,OAAOd,GAAG;AACd;AAGO,eAAegB,sBAAsBA,CACxCC,OAA0C,EAC1CC,EAAkE,EAC5C;EACtB,IAAMC,IAAI,GAAG,MAAMF,OAAO,CAACG,IAAI,CAAC,CAAC;EACjC,IAAI,CAACD,IAAI,EAAE;IACP;IACA,OAAO,IAAI;EACf;EACA,IAAIE,KAAK,CAACC,OAAO,CAACH,IAAI,CAAC,EAAE;IACrB,OAAOI,OAAO,CAACC,GAAG,CACdL,IAAI,CAACnD,GAAG,CAAC8C,GAAG,IAAII,EAAE,CAACJ,GAAG,CAAC,CAC3B,CAAC;EACL,CAAC,MAAM;IACH;IACA,IAAMW,MAAM,GAAG,MAAMP,EAAE,CAACC,IAAW,CAAC;IACpC,OAAOM,MAAM;EACjB;AACJ","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/rx-query-mingo.js b/dist/cjs/rx-query-mingo.js deleted file mode 100644 index a4a8541e5cc..00000000000 --- a/dist/cjs/rx-query-mingo.js +++ /dev/null @@ -1,54 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.getMingoQuery = getMingoQuery; -var _core = require("mingo/core"); -var _query = require("mingo/query"); -var _pipeline = require("mingo/operators/pipeline"); -var _logical = require("mingo/operators/query/logical"); -var _comparison = require("mingo/operators/query/comparison"); -var _evaluation = require("mingo/operators/query/evaluation"); -var _array = require("mingo/operators/query/array"); -var _element = require("mingo/operators/query/element"); -var mingoInitDone = false; - -/** - * The MongoDB query library is huge and we do not need all the operators. - * If you add an operator here, make sure that you properly add a test in - * the file /test/unit/rx-storage-query-correctness.test.ts - * - * @link https://github.com/kofrasa/mingo#es6 - */ -function getMingoQuery(selector) { - if (!mingoInitDone) { - (0, _core.useOperators)(_core.OperatorType.PIPELINE, { - $sort: _pipeline.$sort, - $project: _pipeline.$project - }); - (0, _core.useOperators)(_core.OperatorType.QUERY, { - $and: _logical.$and, - $eq: _comparison.$eq, - $elemMatch: _array.$elemMatch, - $exists: _element.$exists, - $gt: _comparison.$gt, - $gte: _comparison.$gte, - $in: _comparison.$in, - $lt: _comparison.$lt, - $lte: _comparison.$lte, - $ne: _comparison.$ne, - $nin: _comparison.$nin, - $mod: _evaluation.$mod, - $nor: _logical.$nor, - $not: _logical.$not, - $or: _logical.$or, - $regex: _evaluation.$regex, - $size: _array.$size, - $type: _element.$type - }); - mingoInitDone = true; - } - return new _query.Query(selector); -} -//# sourceMappingURL=rx-query-mingo.js.map \ No newline at end of file diff --git a/dist/cjs/rx-query-mingo.js.map b/dist/cjs/rx-query-mingo.js.map deleted file mode 100644 index 3bea2049720..00000000000 --- a/dist/cjs/rx-query-mingo.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-query-mingo.js","names":["_core","require","_query","_pipeline","_logical","_comparison","_evaluation","_array","_element","mingoInitDone","getMingoQuery","selector","useOperators","OperatorType","PIPELINE","$sort","$project","QUERY","$and","$eq","$elemMatch","$exists","$gt","$gte","$in","$lt","$lte","$ne","$nin","$mod","$nor","$not","$or","$regex","$size","$type","Query"],"sources":["../../src/rx-query-mingo.ts"],"sourcesContent":["import { useOperators, OperatorType } from 'mingo/core';\nimport { Query } from 'mingo/query';\nimport type { MangoQuerySelector } from './types/index.d.ts';\nimport {\n $project,\n $sort\n} from 'mingo/operators/pipeline';\nimport {\n $and,\n $not,\n $or,\n $nor\n} from 'mingo/operators/query/logical';\nimport {\n $eq,\n $ne,\n $gt,\n $gte,\n $lt,\n $lte,\n $nin,\n $in\n} from 'mingo/operators/query/comparison';\nimport {\n $regex,\n $mod\n} from 'mingo/operators/query/evaluation';\nimport {\n $elemMatch,\n $size\n} from 'mingo/operators/query/array';\nimport {\n $exists,\n $type\n} from 'mingo/operators/query/element';\n\nlet mingoInitDone = false;\n\n\n/**\n * The MongoDB query library is huge and we do not need all the operators.\n * If you add an operator here, make sure that you properly add a test in\n * the file /test/unit/rx-storage-query-correctness.test.ts\n *\n * @link https://github.com/kofrasa/mingo#es6\n */\nexport function getMingoQuery(\n selector?: MangoQuerySelector\n) {\n if (!mingoInitDone) {\n useOperators(OperatorType.PIPELINE, {\n $sort,\n $project\n } as any);\n useOperators(OperatorType.QUERY, {\n $and,\n $eq,\n $elemMatch,\n $exists,\n $gt,\n $gte,\n $in,\n $lt,\n $lte,\n $ne,\n $nin,\n $mod,\n $nor,\n $not,\n $or,\n $regex,\n $size,\n $type,\n } as any);\n mingoInitDone = true;\n }\n return new Query(selector as any);\n}\n"],"mappings":";;;;;;AAAA,IAAAA,KAAA,GAAAC,OAAA;AACA,IAAAC,MAAA,GAAAD,OAAA;AAEA,IAAAE,SAAA,GAAAF,OAAA;AAIA,IAAAG,QAAA,GAAAH,OAAA;AAMA,IAAAI,WAAA,GAAAJ,OAAA;AAUA,IAAAK,WAAA,GAAAL,OAAA;AAIA,IAAAM,MAAA,GAAAN,OAAA;AAIA,IAAAO,QAAA,GAAAP,OAAA;AAKA,IAAIQ,aAAa,GAAG,KAAK;;AAGzB;AACA;AACA;AACA;AACA;AACA;AACA;AACO,SAASC,aAAaA,CACzBC,QAAwC,EAC1C;EACE,IAAI,CAACF,aAAa,EAAE;IAChB,IAAAG,kBAAY,EAACC,kBAAY,CAACC,QAAQ,EAAE;MAChCC,KAAK,EAALA,eAAK;MACLC,QAAQ,EAARA;IACJ,CAAQ,CAAC;IACT,IAAAJ,kBAAY,EAACC,kBAAY,CAACI,KAAK,EAAE;MAC7BC,IAAI,EAAJA,aAAI;MACJC,GAAG,EAAHA,eAAG;MACHC,UAAU,EAAVA,iBAAU;MACVC,OAAO,EAAPA,gBAAO;MACPC,GAAG,EAAHA,eAAG;MACHC,IAAI,EAAJA,gBAAI;MACJC,GAAG,EAAHA,eAAG;MACHC,GAAG,EAAHA,eAAG;MACHC,IAAI,EAAJA,gBAAI;MACJC,GAAG,EAAHA,eAAG;MACHC,IAAI,EAAJA,gBAAI;MACJC,IAAI,EAAJA,gBAAI;MACJC,IAAI,EAAJA,aAAI;MACJC,IAAI,EAAJA,aAAI;MACJC,GAAG,EAAHA,YAAG;MACHC,MAAM,EAANA,kBAAM;MACNC,KAAK,EAALA,YAAK;MACLC,KAAK,EAALA;IACJ,CAAQ,CAAC;IACT1B,aAAa,GAAG,IAAI;EACxB;EACA,OAAO,IAAI2B,YAAK,CAACzB,QAAe,CAAC;AACrC","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/rx-query-single-result.js b/dist/cjs/rx-query-single-result.js deleted file mode 100644 index 40a088fc813..00000000000 --- a/dist/cjs/rx-query-single-result.js +++ /dev/null @@ -1,71 +0,0 @@ -"use strict"; - -var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault"); -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.RxQuerySingleResult = void 0; -var _createClass2 = _interopRequireDefault(require("@babel/runtime/helpers/createClass")); -var _docCache = require("./doc-cache.js"); -var _index = require("./plugins/utils/index.js"); -/** - * RxDB needs the query results in multiple formats. - * Sometimes as a Map or an array with only the documentData. - * For better performance we work with this class - * that initializes stuff lazily so that - * we can directly work with the query results after RxQuery.exec() - */ -var RxQuerySingleResult = exports.RxQuerySingleResult = /*#__PURE__*/function () { - /** - * Time at which the current _result state was created. - * Used to determine if the result set has changed since X - * so that we do not emit the same result multiple times on subscription. - */ - - function RxQuerySingleResult(collection, - // only used internally, do not use outside, use this.docsData instead - docsDataFromStorageInstance, - // can be overwritten for count-queries - count) { - this.time = (0, _index.now)(); - this.collection = collection; - this.count = count; - this.documents = (0, _docCache.mapDocumentsDataToCacheDocs)(this.collection._docCache, docsDataFromStorageInstance); - } - - /** - * Instead of using the newResultData in the result cache, - * we directly use the objects that are stored in the RxDocument - * to ensure we do not store the same data twice and fill up the memory. - * @overwrites itself with the actual value - */ - return (0, _createClass2.default)(RxQuerySingleResult, [{ - key: "docsData", - get: function () { - return (0, _index.overwriteGetterForCaching)(this, 'docsData', this.documents.map(d => d._data)); - } - - // A key->document map, used in the event reduce optimization. - }, { - key: "docsDataMap", - get: function () { - var map = new Map(); - this.documents.forEach(d => { - map.set(d.primary, d._data); - }); - return (0, _index.overwriteGetterForCaching)(this, 'docsDataMap', map); - } - }, { - key: "docsMap", - get: function () { - var map = new Map(); - var documents = this.documents; - for (var i = 0; i < documents.length; i++) { - var doc = documents[i]; - map.set(doc.primary, doc); - } - return (0, _index.overwriteGetterForCaching)(this, 'docsMap', map); - } - }]); -}(); -//# sourceMappingURL=rx-query-single-result.js.map \ No newline at end of file diff --git a/dist/cjs/rx-query-single-result.js.map b/dist/cjs/rx-query-single-result.js.map deleted file mode 100644 index cedfe3a30af..00000000000 --- a/dist/cjs/rx-query-single-result.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-query-single-result.js","names":["_docCache","require","_index","RxQuerySingleResult","exports","collection","docsDataFromStorageInstance","count","time","now","documents","mapDocumentsDataToCacheDocs","_createClass2","default","key","get","overwriteGetterForCaching","map","d","_data","Map","forEach","set","primary","i","length","doc"],"sources":["../../src/rx-query-single-result.ts"],"sourcesContent":["import { mapDocumentsDataToCacheDocs } from './doc-cache.ts';\nimport { now, overwriteGetterForCaching } from './plugins/utils/index.ts';\nimport type {\n RxCollection,\n RxDocument,\n RxDocumentData\n} from './types';\n\n/**\n * RxDB needs the query results in multiple formats.\n * Sometimes as a Map or an array with only the documentData.\n * For better performance we work with this class\n * that initializes stuff lazily so that\n * we can directly work with the query results after RxQuery.exec()\n */\nexport class RxQuerySingleResult{\n /**\n * Time at which the current _result state was created.\n * Used to determine if the result set has changed since X\n * so that we do not emit the same result multiple times on subscription.\n */\n public readonly time = now();\n public readonly documents: RxDocument[];\n constructor(\n public readonly collection: RxCollection,\n // only used internally, do not use outside, use this.docsData instead\n docsDataFromStorageInstance: RxDocumentData[],\n // can be overwritten for count-queries\n public readonly count: number,\n ) {\n this.documents = mapDocumentsDataToCacheDocs(this.collection._docCache, docsDataFromStorageInstance);\n }\n\n\n /**\n * Instead of using the newResultData in the result cache,\n * we directly use the objects that are stored in the RxDocument\n * to ensure we do not store the same data twice and fill up the memory.\n * @overwrites itself with the actual value\n */\n get docsData(): RxDocumentData[] {\n return overwriteGetterForCaching(\n this,\n 'docsData',\n this.documents.map(d => d._data)\n );\n }\n\n\n // A key->document map, used in the event reduce optimization.\n get docsDataMap(): Map> {\n const map = new Map>();\n this.documents.forEach(d => {\n map.set(d.primary, d._data);\n });\n return overwriteGetterForCaching(\n this,\n 'docsDataMap',\n map\n );\n }\n\n get docsMap(): Map> {\n const map = new Map>();\n const documents = this.documents;\n for (let i = 0; i < documents.length; i++) {\n const doc = documents[i];\n map.set(doc.primary, doc);\n }\n return overwriteGetterForCaching(\n this,\n 'docsMap',\n map\n );\n }\n}\n"],"mappings":";;;;;;;;AAAA,IAAAA,SAAA,GAAAC,OAAA;AACA,IAAAC,MAAA,GAAAD,OAAA;AAOA;AACA;AACA;AACA;AACA;AACA;AACA;AANA,IAOaE,mBAAmB,GAAAC,OAAA,CAAAD,mBAAA;EAC5B;AACJ;AACA;AACA;AACA;;EAGI,SAAAA,oBACoBE,UAAmC;EACnD;EACAC,2BAAwD;EACxD;EACgBC,KAAa,EAC/B;IAAA,KARcC,IAAI,GAAG,IAAAC,UAAG,EAAC,CAAC;IAAA,KAGRJ,UAAmC,GAAnCA,UAAmC;IAAA,KAInCE,KAAa,GAAbA,KAAa;IAE7B,IAAI,CAACG,SAAS,GAAG,IAAAC,qCAA2B,EAAiB,IAAI,CAACN,UAAU,CAACL,SAAS,EAAEM,2BAA2B,CAAC;EACxH;;EAGA;AACJ;AACA;AACA;AACA;AACA;EALI,WAAAM,aAAA,CAAAC,OAAA,EAAAV,mBAAA;IAAAW,GAAA;IAAAC,GAAA,EAMA,SAAAA,CAAA,EAA4C;MACxC,OAAO,IAAAC,gCAAyB,EAC5B,IAAI,EACJ,UAAU,EACV,IAAI,CAACN,SAAS,CAACO,GAAG,CAACC,CAAC,IAAIA,CAAC,CAACC,KAAK,CACnC,CAAC;IACL;;IAGA;EAAA;IAAAL,GAAA;IAAAC,GAAA,EACA,SAAAA,CAAA,EAA0D;MACtD,IAAME,GAAG,GAAG,IAAIG,GAAG,CAAoC,CAAC;MACxD,IAAI,CAACV,SAAS,CAACW,OAAO,CAACH,CAAC,IAAI;QACxBD,GAAG,CAACK,GAAG,CAACJ,CAAC,CAACK,OAAO,EAAEL,CAAC,CAACC,KAAK,CAAC;MAC/B,CAAC,CAAC;MACF,OAAO,IAAAH,gCAAyB,EAC5B,IAAI,EACJ,aAAa,EACbC,GACJ,CAAC;IACL;EAAC;IAAAH,GAAA;IAAAC,GAAA,EAED,SAAAA,CAAA,EAAkD;MAC9C,IAAME,GAAG,GAAG,IAAIG,GAAG,CAAgC,CAAC;MACpD,IAAMV,SAAS,GAAG,IAAI,CAACA,SAAS;MAChC,KAAK,IAAIc,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGd,SAAS,CAACe,MAAM,EAAED,CAAC,EAAE,EAAE;QACvC,IAAME,GAAG,GAAGhB,SAAS,CAACc,CAAC,CAAC;QACxBP,GAAG,CAACK,GAAG,CAACI,GAAG,CAACH,OAAO,EAAEG,GAAG,CAAC;MAC7B;MACA,OAAO,IAAAV,gCAAyB,EAC5B,IAAI,EACJ,SAAS,EACTC,GACJ,CAAC;IACL;EAAC;AAAA","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/rx-query.js b/dist/cjs/rx-query.js deleted file mode 100644 index 2452ac5436e..00000000000 --- a/dist/cjs/rx-query.js +++ /dev/null @@ -1,634 +0,0 @@ -"use strict"; - -var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault"); -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.RxQueryBase = void 0; -exports._getDefaultQuery = _getDefaultQuery; -exports.createRxQuery = createRxQuery; -exports.isFindOneByIdQuery = isFindOneByIdQuery; -exports.isRxQuery = isRxQuery; -exports.prepareQuery = prepareQuery; -exports.queryCollection = queryCollection; -exports.tunnelQueryCache = tunnelQueryCache; -var _createClass2 = _interopRequireDefault(require("@babel/runtime/helpers/createClass")); -var _rxjs = require("rxjs"); -var _operators = require("rxjs/operators"); -var _index = require("./plugins/utils/index.js"); -var _rxError = require("./rx-error.js"); -var _hooks = require("./hooks.js"); -var _eventReduce = require("./event-reduce.js"); -var _queryCache = require("./query-cache.js"); -var _rxQueryHelper = require("./rx-query-helper.js"); -var _rxQuerySingleResult = require("./rx-query-single-result.js"); -var _queryPlanner = require("./query-planner.js"); -var _queryCount = 0; -var newQueryID = function () { - return ++_queryCount; -}; -var RxQueryBase = exports.RxQueryBase = /*#__PURE__*/function () { - /** - * Some stats then are used for debugging and cache replacement policies - */ - - // used in the query-cache to determine if the RxQuery can be cleaned up. - - // used to count the subscribers to the query - - /** - * Contains the current result state - * or null if query has not run yet. - */ - - function RxQueryBase(op, mangoQuery, collection, - // used by some plugins - other = {}) { - this.id = newQueryID(); - this._execOverDatabaseCount = 0; - this._creationTime = (0, _index.now)(); - this._lastEnsureEqual = 0; - this.uncached = false; - this.refCount$ = new _rxjs.BehaviorSubject(null); - this._result = null; - this._latestChangeEvent = -1; - this._lastExecStart = 0; - this._lastExecEnd = 0; - this._ensureEqualQueue = _index.PROMISE_RESOLVE_FALSE; - this.op = op; - this.mangoQuery = mangoQuery; - this.collection = collection; - this.other = other; - if (!mangoQuery) { - this.mangoQuery = _getDefaultQuery(); - } - this.isFindOneByIdQuery = isFindOneByIdQuery(this.collection.schema.primaryPath, mangoQuery); - } - var _proto = RxQueryBase.prototype; - /** - * Returns an observable that emits the results - * This should behave like an rxjs-BehaviorSubject which means: - * - Emit the current result-set on subscribe - * - Emit the new result-set when an RxChangeEvent comes in - * - Do not emit anything before the first result-set was created (no null) - */ - /** - * set the new result-data as result-docs of the query - * @param newResultData json-docs that were received from the storage - */ - _proto._setResultData = function _setResultData(newResultData) { - if (typeof newResultData === 'number') { - this._result = new _rxQuerySingleResult.RxQuerySingleResult(this.collection, [], newResultData); - return; - } else if (newResultData instanceof Map) { - newResultData = Array.from(newResultData.values()); - } - var newQueryResult = new _rxQuerySingleResult.RxQuerySingleResult(this.collection, newResultData, newResultData.length); - this._result = newQueryResult; - } - - /** - * executes the query on the database - * @return results-array with document-data - */; - _proto._execOverDatabase = async function _execOverDatabase() { - this._execOverDatabaseCount = this._execOverDatabaseCount + 1; - this._lastExecStart = (0, _index.now)(); - if (this.op === 'count') { - var preparedQuery = this.getPreparedQuery(); - var result = await this.collection.storageInstance.count(preparedQuery); - if (result.mode === 'slow' && !this.collection.database.allowSlowCount) { - throw (0, _rxError.newRxError)('QU14', { - collection: this.collection, - queryObj: this.mangoQuery - }); - } else { - return result.count; - } - } - if (this.op === 'findByIds') { - var ids = (0, _index.ensureNotFalsy)(this.mangoQuery.selector)[this.collection.schema.primaryPath].$in; - var ret = new Map(); - var mustBeQueried = []; - // first try to fill from docCache - ids.forEach(id => { - var docData = this.collection._docCache.getLatestDocumentDataIfExists(id); - if (docData) { - if (!docData._deleted) { - var doc = this.collection._docCache.getCachedRxDocument(docData); - ret.set(id, doc); - } - } else { - mustBeQueried.push(id); - } - }); - // everything which was not in docCache must be fetched from the storage - if (mustBeQueried.length > 0) { - var docs = await this.collection.storageInstance.findDocumentsById(mustBeQueried, false); - docs.forEach(docData => { - var doc = this.collection._docCache.getCachedRxDocument(docData); - ret.set(doc.primary, doc); - }); - } - return ret; - } - var docsPromise = queryCollection(this); - return docsPromise.then(docs => { - this._lastExecEnd = (0, _index.now)(); - return docs; - }); - } - - /** - * Execute the query - * To have an easier implementations, - * just subscribe and use the first result - */; - _proto.exec = function exec(throwIfMissing) { - if (throwIfMissing && this.op !== 'findOne') { - throw (0, _rxError.newRxError)('QU9', { - collection: this.collection.name, - query: this.mangoQuery, - op: this.op - }); - } - - /** - * run _ensureEqual() here, - * this will make sure that errors in the query which throw inside of the RxStorage, - * will be thrown at this execution context and not in the background. - */ - return _ensureEqual(this).then(() => (0, _rxjs.firstValueFrom)(this.$)).then(result => { - if (!result && throwIfMissing) { - throw (0, _rxError.newRxError)('QU10', { - collection: this.collection.name, - query: this.mangoQuery, - op: this.op - }); - } else { - return result; - } - }); - } - - /** - * cached call to get the queryMatcher - * @overwrites itself with the actual value - */; - /** - * returns a string that is used for equal-comparisons - * @overwrites itself with the actual value - */ - _proto.toString = function toString() { - var stringObj = (0, _index.sortObject)({ - op: this.op, - query: this.mangoQuery, - other: this.other - }, true); - var value = JSON.stringify(stringObj); - this.toString = () => value; - return value; - } - - /** - * returns the prepared query - * which can be send to the storage instance to query for documents. - * @overwrites itself with the actual value. - */; - _proto.getPreparedQuery = function getPreparedQuery() { - var hookInput = { - rxQuery: this, - // can be mutated by the hooks so we have to deep clone first. - mangoQuery: (0, _rxQueryHelper.normalizeMangoQuery)(this.collection.schema.jsonSchema, this.mangoQuery) - }; - hookInput.mangoQuery.selector._deleted = { - $eq: false - }; - if (hookInput.mangoQuery.index) { - hookInput.mangoQuery.index.unshift('_deleted'); - } - (0, _hooks.runPluginHooks)('prePrepareQuery', hookInput); - var value = prepareQuery(this.collection.schema.jsonSchema, hookInput.mangoQuery); - this.getPreparedQuery = () => value; - return value; - } - - /** - * returns true if the document matches the query, - * does not use the 'skip' and 'limit' - */; - _proto.doesDocumentDataMatch = function doesDocumentDataMatch(docData) { - // if doc is deleted, it cannot match - if (docData._deleted) { - return false; - } - return this.queryMatcher(docData); - } - - /** - * deletes all found documents - * @return promise with deleted documents - */; - _proto.remove = function remove() { - return this.exec().then(docs => { - if (Array.isArray(docs)) { - // TODO use a bulk operation instead of running .remove() on each document - return Promise.all(docs.map(doc => doc.remove())); - } else { - return docs.remove(); - } - }); - }; - _proto.incrementalRemove = function incrementalRemove() { - return (0, _rxQueryHelper.runQueryUpdateFunction)(this.asRxQuery, doc => doc.incrementalRemove()); - } - - /** - * helper function to transform RxQueryBase to RxQuery type - */; - /** - * updates all found documents - * @overwritten by plugin (optional) - */ - _proto.update = function update(_updateObj) { - throw (0, _index.pluginMissing)('update'); - }; - _proto.patch = function patch(_patch) { - return (0, _rxQueryHelper.runQueryUpdateFunction)(this.asRxQuery, doc => doc.patch(_patch)); - }; - _proto.incrementalPatch = function incrementalPatch(patch) { - return (0, _rxQueryHelper.runQueryUpdateFunction)(this.asRxQuery, doc => doc.incrementalPatch(patch)); - }; - _proto.modify = function modify(mutationFunction) { - return (0, _rxQueryHelper.runQueryUpdateFunction)(this.asRxQuery, doc => doc.modify(mutationFunction)); - }; - _proto.incrementalModify = function incrementalModify(mutationFunction) { - return (0, _rxQueryHelper.runQueryUpdateFunction)(this.asRxQuery, doc => doc.incrementalModify(mutationFunction)); - } - - // we only set some methods of query-builder here - // because the others depend on these ones - ; - _proto.where = function where(_queryObj) { - throw (0, _index.pluginMissing)('query-builder'); - }; - _proto.sort = function sort(_params) { - throw (0, _index.pluginMissing)('query-builder'); - }; - _proto.skip = function skip(_amount) { - throw (0, _index.pluginMissing)('query-builder'); - }; - _proto.limit = function limit(_amount) { - throw (0, _index.pluginMissing)('query-builder'); - }; - return (0, _createClass2.default)(RxQueryBase, [{ - key: "$", - get: function () { - if (!this._$) { - var results$ = this.collection.$.pipe( - /** - * Performance shortcut. - * Changes to local documents are not relevant for the query. - */ - (0, _operators.filter)(changeEvent => !changeEvent.isLocal), - /** - * Start once to ensure the querying also starts - * when there where no changes. - */ - (0, _operators.startWith)(null), - // ensure query results are up to date. - (0, _operators.mergeMap)(() => _ensureEqual(this)), - // use the current result set, written by _ensureEqual(). - (0, _operators.map)(() => this._result), - // do not run stuff above for each new subscriber, only once. - (0, _operators.shareReplay)(_index.RXJS_SHARE_REPLAY_DEFAULTS), - // do not proceed if result set has not changed. - (0, _operators.distinctUntilChanged)((prev, curr) => { - if (prev && prev.time === (0, _index.ensureNotFalsy)(curr).time) { - return true; - } else { - return false; - } - }), (0, _operators.filter)(result => !!result), - /** - * Map the result set to a single RxDocument or an array, - * depending on query type - */ - (0, _operators.map)(result => { - var useResult = (0, _index.ensureNotFalsy)(result); - if (this.op === 'count') { - return useResult.count; - } else if (this.op === 'findOne') { - // findOne()-queries emit RxDocument or null - return useResult.documents.length === 0 ? null : useResult.documents[0]; - } else if (this.op === 'findByIds') { - return useResult.docsMap; - } else { - // find()-queries emit RxDocument[] - // Flat copy the array so it won't matter if the user modifies it. - return useResult.documents.slice(0); - } - })); - this._$ = (0, _rxjs.merge)(results$, - /** - * Also add the refCount$ to the query observable - * to allow us to count the amount of subscribers. - */ - this.refCount$.pipe((0, _operators.filter)(() => false))); - } - return this._$; - } - }, { - key: "$$", - get: function () { - var reactivity = this.collection.database.getReactivityFactory(); - return reactivity.fromObservable(this.$, undefined, this.collection.database); - } - - // stores the changeEvent-number of the last handled change-event - - // time stamps on when the last full exec over the database has run - // used to properly handle events that happen while the find-query is running - - /** - * ensures that the exec-runs - * are not run in parallel - */ - }, { - key: "queryMatcher", - get: function () { - var schema = this.collection.schema.jsonSchema; - var normalizedQuery = (0, _rxQueryHelper.normalizeMangoQuery)(this.collection.schema.jsonSchema, this.mangoQuery); - return (0, _index.overwriteGetterForCaching)(this, 'queryMatcher', (0, _rxQueryHelper.getQueryMatcher)(schema, normalizedQuery)); - } - }, { - key: "asRxQuery", - get: function () { - return this; - } - }]); -}(); -function _getDefaultQuery() { - return { - selector: {} - }; -} - -/** - * run this query through the QueryCache - */ -function tunnelQueryCache(rxQuery) { - return rxQuery.collection._queryCache.getByQuery(rxQuery); -} -function createRxQuery(op, queryObj, collection, other) { - (0, _hooks.runPluginHooks)('preCreateRxQuery', { - op, - queryObj, - collection, - other - }); - var ret = new RxQueryBase(op, queryObj, collection, other); - - // ensure when created with same params, only one is created - ret = tunnelQueryCache(ret); - (0, _queryCache.triggerCacheReplacement)(collection); - return ret; -} - -/** - * Check if the current results-state is in sync with the database - * which means that no write event happened since the last run. - * @return false if not which means it should re-execute - */ -function _isResultsInSync(rxQuery) { - var currentLatestEventNumber = rxQuery.asRxQuery.collection._changeEventBuffer.counter; - if (rxQuery._latestChangeEvent >= currentLatestEventNumber) { - return true; - } else { - return false; - } -} - -/** - * wraps __ensureEqual() - * to ensure it does not run in parallel - * @return true if has changed, false if not - */ -function _ensureEqual(rxQuery) { - // Optimisation shortcut - if (rxQuery.collection.database.destroyed || _isResultsInSync(rxQuery)) { - return _index.PROMISE_RESOLVE_FALSE; - } - rxQuery._ensureEqualQueue = rxQuery._ensureEqualQueue.then(() => __ensureEqual(rxQuery)); - return rxQuery._ensureEqualQueue; -} - -/** - * ensures that the results of this query is equal to the results which a query over the database would give - * @return true if results have changed - */ -function __ensureEqual(rxQuery) { - rxQuery._lastEnsureEqual = (0, _index.now)(); - - /** - * Optimisation shortcuts - */ - if ( - // db is closed - rxQuery.collection.database.destroyed || - // nothing happened since last run - _isResultsInSync(rxQuery)) { - return _index.PROMISE_RESOLVE_FALSE; - } - var ret = false; - var mustReExec = false; // if this becomes true, a whole execution over the database is made - if (rxQuery._latestChangeEvent === -1) { - // have not executed yet -> must run - mustReExec = true; - } - - /** - * try to use EventReduce to calculate the new results - */ - if (!mustReExec) { - var missedChangeEvents = rxQuery.asRxQuery.collection._changeEventBuffer.getFrom(rxQuery._latestChangeEvent + 1); - if (missedChangeEvents === null) { - // changeEventBuffer is of bounds -> we must re-execute over the database - mustReExec = true; - } else { - rxQuery._latestChangeEvent = rxQuery.asRxQuery.collection._changeEventBuffer.counter; - var runChangeEvents = rxQuery.asRxQuery.collection._changeEventBuffer.reduceByLastOfDoc(missedChangeEvents); - if (rxQuery.op === 'count') { - // 'count' query - var previousCount = (0, _index.ensureNotFalsy)(rxQuery._result).count; - var newCount = previousCount; - runChangeEvents.forEach(cE => { - var didMatchBefore = cE.previousDocumentData && rxQuery.doesDocumentDataMatch(cE.previousDocumentData); - var doesMatchNow = rxQuery.doesDocumentDataMatch(cE.documentData); - if (!didMatchBefore && doesMatchNow) { - newCount++; - } - if (didMatchBefore && !doesMatchNow) { - newCount--; - } - }); - if (newCount !== previousCount) { - ret = true; // true because results changed - rxQuery._setResultData(newCount); - } - } else { - // 'find' or 'findOne' query - var eventReduceResult = (0, _eventReduce.calculateNewResults)(rxQuery, runChangeEvents); - if (eventReduceResult.runFullQueryAgain) { - // could not calculate the new results, execute must be done - mustReExec = true; - } else if (eventReduceResult.changed) { - // we got the new results, we do not have to re-execute, mustReExec stays false - ret = true; // true because results changed - rxQuery._setResultData(eventReduceResult.newResults); - } - } - } - } - - // oh no we have to re-execute the whole query over the database - if (mustReExec) { - return rxQuery._execOverDatabase().then(newResultData => { - /** - * The RxStorage is defined to always first emit events and then return - * on bulkWrite() calls. So here we have to use the counter AFTER the execOverDatabase() - * has been run, not the one from before. - */ - rxQuery._latestChangeEvent = rxQuery.collection._changeEventBuffer.counter; - - // A count query needs a different has-changed check. - if (typeof newResultData === 'number') { - if (!rxQuery._result || newResultData !== rxQuery._result.count) { - ret = true; - rxQuery._setResultData(newResultData); - } - return ret; - } - if (!rxQuery._result || !(0, _index.areRxDocumentArraysEqual)(rxQuery.collection.schema.primaryPath, newResultData, rxQuery._result.docsData)) { - ret = true; // true because results changed - rxQuery._setResultData(newResultData); - } - return ret; - }); - } - return Promise.resolve(ret); // true if results have changed -} - -/** - * @returns a format of the query that can be used with the storage - * when calling RxStorageInstance().query() - */ -function prepareQuery(schema, mutateableQuery) { - if (!mutateableQuery.sort) { - throw (0, _rxError.newRxError)('SNH', { - query: mutateableQuery - }); - } - - /** - * Store the query plan together with the - * prepared query to save performance. - */ - var queryPlan = (0, _queryPlanner.getQueryPlan)(schema, mutateableQuery); - return { - query: mutateableQuery, - queryPlan - }; -} - -/** - * Runs the query over the storage instance - * of the collection. - * Does some optimizations to ensure findById is used - * when specific queries are used. - */ -async function queryCollection(rxQuery) { - var docs = []; - var collection = rxQuery.collection; - - /** - * Optimizations shortcut. - * If query is find-one-document-by-id, - * then we do not have to use the slow query() method - * but instead can use findDocumentsById() - */ - if (rxQuery.isFindOneByIdQuery) { - if (Array.isArray(rxQuery.isFindOneByIdQuery)) { - var docIds = rxQuery.isFindOneByIdQuery; - docIds = docIds.filter(docId => { - // first try to fill from docCache - var docData = rxQuery.collection._docCache.getLatestDocumentDataIfExists(docId); - if (docData) { - if (!docData._deleted) { - docs.push(docData); - } - return false; - } else { - return true; - } - }); - // otherwise get from storage - if (docIds.length > 0) { - var docsFromStorage = await collection.storageInstance.findDocumentsById(docIds, false); - (0, _index.appendToArray)(docs, docsFromStorage); - } - } else { - var docId = rxQuery.isFindOneByIdQuery; - - // first try to fill from docCache - var docData = rxQuery.collection._docCache.getLatestDocumentDataIfExists(docId); - if (!docData) { - // otherwise get from storage - var fromStorageList = await collection.storageInstance.findDocumentsById([docId], false); - if (fromStorageList[0]) { - docData = fromStorageList[0]; - } - } - if (docData && !docData._deleted) { - docs.push(docData); - } - } - } else { - var preparedQuery = rxQuery.getPreparedQuery(); - var queryResult = await collection.storageInstance.query(preparedQuery); - docs = queryResult.documents; - } - return docs; -} - -/** - * Returns true if the given query - * selects exactly one document by its id. - * Used to optimize performance because these kind of - * queries do not have to run over an index and can use get-by-id instead. - * Returns false if no query of that kind. - * Returns the document id otherwise. - */ -function isFindOneByIdQuery(primaryPath, query) { - // must have exactly one operator which must be $eq || $in - if (!query.skip && query.selector && Object.keys(query.selector).length === 1 && query.selector[primaryPath]) { - var value = query.selector[primaryPath]; - if (typeof value === 'string') { - return value; - } else if (Object.keys(value).length === 1 && typeof value.$eq === 'string') { - return value.$eq; - } - - // same with $in string arrays - if (Object.keys(value).length === 1 && Array.isArray(value.$eq) && - // must only contain strings - !value.$eq.find(r => typeof r !== 'string')) { - return value.$eq; - } - } - return false; -} -function isRxQuery(obj) { - return obj instanceof RxQueryBase; -} -//# sourceMappingURL=rx-query.js.map \ No newline at end of file diff --git a/dist/cjs/rx-query.js.map b/dist/cjs/rx-query.js.map deleted file mode 100644 index 661f708900f..00000000000 --- a/dist/cjs/rx-query.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-query.js","names":["_rxjs","require","_operators","_index","_rxError","_hooks","_eventReduce","_queryCache","_rxQueryHelper","_rxQuerySingleResult","_queryPlanner","_queryCount","newQueryID","RxQueryBase","exports","op","mangoQuery","collection","other","id","_execOverDatabaseCount","_creationTime","now","_lastEnsureEqual","uncached","refCount$","BehaviorSubject","_result","_latestChangeEvent","_lastExecStart","_lastExecEnd","_ensureEqualQueue","PROMISE_RESOLVE_FALSE","_getDefaultQuery","isFindOneByIdQuery","schema","primaryPath","_proto","prototype","_setResultData","newResultData","RxQuerySingleResult","Map","Array","from","values","newQueryResult","length","_execOverDatabase","preparedQuery","getPreparedQuery","result","storageInstance","count","mode","database","allowSlowCount","newRxError","queryObj","ids","ensureNotFalsy","selector","$in","ret","mustBeQueried","forEach","docData","_docCache","getLatestDocumentDataIfExists","_deleted","doc","getCachedRxDocument","set","push","docs","findDocumentsById","primary","docsPromise","queryCollection","then","exec","throwIfMissing","name","query","_ensureEqual","firstValueFrom","$","toString","stringObj","sortObject","value","JSON","stringify","hookInput","rxQuery","normalizeMangoQuery","jsonSchema","$eq","index","unshift","runPluginHooks","prepareQuery","doesDocumentDataMatch","queryMatcher","remove","isArray","Promise","all","map","incrementalRemove","runQueryUpdateFunction","asRxQuery","update","_updateObj","pluginMissing","patch","incrementalPatch","modify","mutationFunction","incrementalModify","where","_queryObj","sort","_params","skip","_amount","limit","_createClass2","default","key","get","_$","results$","pipe","filter","changeEvent","isLocal","startWith","mergeMap","shareReplay","RXJS_SHARE_REPLAY_DEFAULTS","distinctUntilChanged","prev","curr","time","useResult","documents","docsMap","slice","merge","reactivity","getReactivityFactory","fromObservable","undefined","normalizedQuery","overwriteGetterForCaching","getQueryMatcher","tunnelQueryCache","getByQuery","createRxQuery","triggerCacheReplacement","_isResultsInSync","currentLatestEventNumber","_changeEventBuffer","counter","destroyed","__ensureEqual","mustReExec","missedChangeEvents","getFrom","runChangeEvents","reduceByLastOfDoc","previousCount","newCount","cE","didMatchBefore","previousDocumentData","doesMatchNow","documentData","eventReduceResult","calculateNewResults","runFullQueryAgain","changed","newResults","areRxDocumentArraysEqual","docsData","resolve","mutateableQuery","queryPlan","getQueryPlan","docIds","docId","docsFromStorage","appendToArray","fromStorageList","queryResult","Object","keys","find","r","isRxQuery","obj"],"sources":["../../src/rx-query.ts"],"sourcesContent":["import {\n BehaviorSubject,\n firstValueFrom,\n Observable,\n merge\n} from 'rxjs';\nimport {\n mergeMap,\n filter,\n map,\n startWith,\n distinctUntilChanged,\n shareReplay\n} from 'rxjs/operators';\nimport {\n sortObject,\n pluginMissing,\n overwriteGetterForCaching,\n now,\n PROMISE_RESOLVE_FALSE,\n RXJS_SHARE_REPLAY_DEFAULTS,\n ensureNotFalsy,\n areRxDocumentArraysEqual,\n appendToArray\n} from './plugins/utils/index.ts';\nimport {\n newRxError\n} from './rx-error.ts';\nimport {\n runPluginHooks\n} from './hooks.ts';\nimport type {\n RxCollection,\n RxDocument,\n RxQueryOP,\n RxQuery,\n MangoQuery,\n MangoQuerySortPart,\n MangoQuerySelector,\n PreparedQuery,\n RxChangeEvent,\n RxDocumentWriteData,\n RxDocumentData,\n QueryMatcher,\n RxJsonSchema,\n FilledMangoQuery,\n ModifyFunction\n} from './types/index.d.ts';\nimport { calculateNewResults } from './event-reduce.ts';\nimport { triggerCacheReplacement } from './query-cache.ts';\nimport { getQueryMatcher, normalizeMangoQuery, runQueryUpdateFunction } from './rx-query-helper.ts';\nimport { RxQuerySingleResult } from './rx-query-single-result.ts';\nimport { getQueryPlan } from './query-planner.ts';\n\nlet _queryCount = 0;\nconst newQueryID = function (): number {\n return ++_queryCount;\n};\n\nexport class RxQueryBase<\n RxDocType,\n RxQueryResult,\n OrmMethods = {},\n Reactivity = unknown,\n> {\n\n public id: number = newQueryID();\n\n /**\n * Some stats then are used for debugging and cache replacement policies\n */\n public _execOverDatabaseCount: number = 0;\n public _creationTime = now();\n\n // used in the query-cache to determine if the RxQuery can be cleaned up.\n public _lastEnsureEqual = 0;\n\n public uncached = false;\n\n // used to count the subscribers to the query\n public refCount$ = new BehaviorSubject(null);\n\n public isFindOneByIdQuery: false | string | string[];\n\n\n /**\n * Contains the current result state\n * or null if query has not run yet.\n */\n public _result: RxQuerySingleResult | null = null;\n\n\n constructor(\n public op: RxQueryOP,\n public mangoQuery: Readonly>,\n public collection: RxCollection,\n // used by some plugins\n public other: any = {}\n ) {\n if (!mangoQuery) {\n this.mangoQuery = _getDefaultQuery();\n }\n\n this.isFindOneByIdQuery = isFindOneByIdQuery(\n this.collection.schema.primaryPath as string,\n mangoQuery\n );\n }\n get $(): BehaviorSubject {\n if (!this._$) {\n\n const results$ = this.collection.$.pipe(\n /**\n * Performance shortcut.\n * Changes to local documents are not relevant for the query.\n */\n filter(changeEvent => !changeEvent.isLocal),\n /**\n * Start once to ensure the querying also starts\n * when there where no changes.\n */\n startWith(null),\n // ensure query results are up to date.\n mergeMap(() => _ensureEqual(this as any)),\n // use the current result set, written by _ensureEqual().\n map(() => this._result),\n // do not run stuff above for each new subscriber, only once.\n shareReplay(RXJS_SHARE_REPLAY_DEFAULTS),\n // do not proceed if result set has not changed.\n distinctUntilChanged((prev, curr) => {\n if (prev && prev.time === ensureNotFalsy(curr).time) {\n return true;\n } else {\n return false;\n }\n }),\n filter(result => !!result),\n /**\n * Map the result set to a single RxDocument or an array,\n * depending on query type\n */\n map((result) => {\n const useResult = ensureNotFalsy(result);\n if (this.op === 'count') {\n return useResult.count;\n } else if (this.op === 'findOne') {\n // findOne()-queries emit RxDocument or null\n return useResult.documents.length === 0 ? null : useResult.documents[0];\n } else if (this.op === 'findByIds') {\n return useResult.docsMap;\n } else {\n // find()-queries emit RxDocument[]\n // Flat copy the array so it won't matter if the user modifies it.\n return useResult.documents.slice(0);\n }\n })\n );\n\n this._$ = merge(\n results$,\n /**\n * Also add the refCount$ to the query observable\n * to allow us to count the amount of subscribers.\n */\n this.refCount$.pipe(\n filter(() => false)\n )\n );\n }\n return this._$ as any;\n }\n\n get $$(): Reactivity {\n const reactivity = this.collection.database.getReactivityFactory();\n return reactivity.fromObservable(\n this.$,\n undefined,\n this.collection.database\n );\n }\n\n // stores the changeEvent-number of the last handled change-event\n public _latestChangeEvent: -1 | number = -1;\n\n // time stamps on when the last full exec over the database has run\n // used to properly handle events that happen while the find-query is running\n public _lastExecStart: number = 0;\n public _lastExecEnd: number = 0;\n\n /**\n * ensures that the exec-runs\n * are not run in parallel\n */\n public _ensureEqualQueue: Promise = PROMISE_RESOLVE_FALSE;\n\n /**\n * Returns an observable that emits the results\n * This should behave like an rxjs-BehaviorSubject which means:\n * - Emit the current result-set on subscribe\n * - Emit the new result-set when an RxChangeEvent comes in\n * - Do not emit anything before the first result-set was created (no null)\n */\n public _$?: Observable;\n\n /**\n * set the new result-data as result-docs of the query\n * @param newResultData json-docs that were received from the storage\n */\n _setResultData(newResultData: RxDocumentData[] | number | Map>): void {\n if (typeof newResultData === 'number') {\n this._result = new RxQuerySingleResult(\n this.collection,\n [],\n newResultData\n );\n return;\n } else if (newResultData instanceof Map) {\n newResultData = Array.from((newResultData as Map>).values());\n }\n\n const newQueryResult = new RxQuerySingleResult(\n this.collection,\n newResultData,\n newResultData.length\n );\n this._result = newQueryResult;\n }\n\n /**\n * executes the query on the database\n * @return results-array with document-data\n */\n async _execOverDatabase(): Promise[] | number> {\n this._execOverDatabaseCount = this._execOverDatabaseCount + 1;\n this._lastExecStart = now();\n\n\n if (this.op === 'count') {\n const preparedQuery = this.getPreparedQuery();\n const result = await this.collection.storageInstance.count(preparedQuery);\n if (result.mode === 'slow' && !this.collection.database.allowSlowCount) {\n throw newRxError('QU14', {\n collection: this.collection,\n queryObj: this.mangoQuery\n });\n } else {\n return result.count;\n }\n }\n\n if (this.op === 'findByIds') {\n const ids: string[] = ensureNotFalsy(this.mangoQuery.selector as any)[this.collection.schema.primaryPath].$in;\n const ret = new Map>();\n const mustBeQueried: string[] = [];\n // first try to fill from docCache\n ids.forEach(id => {\n const docData = this.collection._docCache.getLatestDocumentDataIfExists(id);\n if (docData) {\n if (!docData._deleted) {\n const doc = this.collection._docCache.getCachedRxDocument(docData);\n ret.set(id, doc);\n }\n } else {\n mustBeQueried.push(id);\n }\n });\n // everything which was not in docCache must be fetched from the storage\n if (mustBeQueried.length > 0) {\n const docs = await this.collection.storageInstance.findDocumentsById(mustBeQueried, false);\n docs.forEach(docData => {\n const doc = this.collection._docCache.getCachedRxDocument(docData);\n ret.set(doc.primary, doc);\n });\n }\n return ret as any;\n }\n\n\n const docsPromise = queryCollection(this as any);\n return docsPromise.then(docs => {\n this._lastExecEnd = now();\n return docs;\n });\n }\n\n /**\n * Execute the query\n * To have an easier implementations,\n * just subscribe and use the first result\n */\n public exec(throwIfMissing: true): Promise>;\n public exec(): Promise;\n public exec(throwIfMissing?: boolean): Promise {\n if (throwIfMissing && this.op !== 'findOne') {\n throw newRxError('QU9', {\n collection: this.collection.name,\n query: this.mangoQuery,\n op: this.op\n });\n }\n\n\n /**\n * run _ensureEqual() here,\n * this will make sure that errors in the query which throw inside of the RxStorage,\n * will be thrown at this execution context and not in the background.\n */\n return _ensureEqual(this as any)\n .then(() => firstValueFrom(this.$))\n .then(result => {\n if (!result && throwIfMissing) {\n throw newRxError('QU10', {\n collection: this.collection.name,\n query: this.mangoQuery,\n op: this.op\n });\n } else {\n return result;\n }\n });\n }\n\n\n\n /**\n * cached call to get the queryMatcher\n * @overwrites itself with the actual value\n */\n get queryMatcher(): QueryMatcher> {\n const schema = this.collection.schema.jsonSchema;\n const normalizedQuery = normalizeMangoQuery(\n this.collection.schema.jsonSchema,\n this.mangoQuery\n );\n return overwriteGetterForCaching(\n this,\n 'queryMatcher',\n getQueryMatcher(\n schema,\n normalizedQuery\n ) as any\n );\n }\n\n /**\n * returns a string that is used for equal-comparisons\n * @overwrites itself with the actual value\n */\n toString(): string {\n const stringObj = sortObject({\n op: this.op,\n query: this.mangoQuery,\n other: this.other\n }, true);\n const value = JSON.stringify(stringObj);\n this.toString = () => value;\n return value;\n }\n\n /**\n * returns the prepared query\n * which can be send to the storage instance to query for documents.\n * @overwrites itself with the actual value.\n */\n getPreparedQuery(): PreparedQuery {\n const hookInput = {\n rxQuery: this,\n // can be mutated by the hooks so we have to deep clone first.\n mangoQuery: normalizeMangoQuery(\n this.collection.schema.jsonSchema,\n this.mangoQuery\n )\n };\n (hookInput.mangoQuery.selector as any)._deleted = { $eq: false };\n if (hookInput.mangoQuery.index) {\n hookInput.mangoQuery.index.unshift('_deleted');\n }\n runPluginHooks('prePrepareQuery', hookInput);\n\n const value = prepareQuery(\n this.collection.schema.jsonSchema,\n hookInput.mangoQuery as any\n );\n\n this.getPreparedQuery = () => value;\n return value;\n }\n\n /**\n * returns true if the document matches the query,\n * does not use the 'skip' and 'limit'\n */\n doesDocumentDataMatch(docData: RxDocType | any): boolean {\n // if doc is deleted, it cannot match\n if (docData._deleted) {\n return false;\n }\n\n return this.queryMatcher(docData);\n }\n\n /**\n * deletes all found documents\n * @return promise with deleted documents\n */\n remove(): Promise {\n return this\n .exec()\n .then(docs => {\n if (Array.isArray(docs)) {\n // TODO use a bulk operation instead of running .remove() on each document\n return Promise.all(docs.map(doc => doc.remove()));\n } else {\n return (docs as any).remove();\n }\n });\n }\n incrementalRemove(): Promise {\n return runQueryUpdateFunction(\n this.asRxQuery,\n (doc) => doc.incrementalRemove(),\n );\n }\n\n\n /**\n * helper function to transform RxQueryBase to RxQuery type\n */\n get asRxQuery(): RxQuery {\n return this as any;\n }\n\n /**\n * updates all found documents\n * @overwritten by plugin (optional)\n */\n update(_updateObj: any): Promise {\n throw pluginMissing('update');\n }\n\n patch(patch: Partial): Promise {\n return runQueryUpdateFunction(\n this.asRxQuery,\n (doc) => doc.patch(patch),\n );\n }\n incrementalPatch(patch: Partial): Promise {\n return runQueryUpdateFunction(\n this.asRxQuery,\n (doc) => doc.incrementalPatch(patch),\n );\n }\n modify(mutationFunction: ModifyFunction): Promise {\n return runQueryUpdateFunction(\n this.asRxQuery,\n (doc) => doc.modify(mutationFunction),\n );\n }\n incrementalModify(mutationFunction: ModifyFunction): Promise {\n return runQueryUpdateFunction(\n this.asRxQuery,\n (doc) => doc.incrementalModify(mutationFunction),\n );\n }\n\n\n // we only set some methods of query-builder here\n // because the others depend on these ones\n where(_queryObj: MangoQuerySelector | keyof RxDocType | string): RxQuery {\n throw pluginMissing('query-builder');\n }\n sort(_params: string | MangoQuerySortPart): RxQuery {\n throw pluginMissing('query-builder');\n }\n skip(_amount: number | null): RxQuery {\n throw pluginMissing('query-builder');\n }\n limit(_amount: number | null): RxQuery {\n throw pluginMissing('query-builder');\n }\n}\n\nexport function _getDefaultQuery(): MangoQuery {\n return {\n selector: {}\n };\n}\n\n/**\n * run this query through the QueryCache\n */\nexport function tunnelQueryCache(\n rxQuery: RxQueryBase\n): RxQuery {\n return rxQuery.collection._queryCache.getByQuery(rxQuery as any);\n}\n\nexport function createRxQuery(\n op: RxQueryOP,\n queryObj: MangoQuery,\n collection: RxCollection,\n other?: any\n) {\n runPluginHooks('preCreateRxQuery', {\n op,\n queryObj,\n collection,\n other\n });\n\n let ret = new RxQueryBase(op, queryObj, collection, other);\n\n // ensure when created with same params, only one is created\n ret = tunnelQueryCache(ret);\n triggerCacheReplacement(collection);\n\n return ret;\n}\n\n/**\n * Check if the current results-state is in sync with the database\n * which means that no write event happened since the last run.\n * @return false if not which means it should re-execute\n */\nfunction _isResultsInSync(rxQuery: RxQueryBase): boolean {\n const currentLatestEventNumber = rxQuery.asRxQuery.collection._changeEventBuffer.counter;\n if (rxQuery._latestChangeEvent >= currentLatestEventNumber) {\n return true;\n } else {\n return false;\n }\n}\n\n\n/**\n * wraps __ensureEqual()\n * to ensure it does not run in parallel\n * @return true if has changed, false if not\n */\nfunction _ensureEqual(rxQuery: RxQueryBase): Promise {\n // Optimisation shortcut\n if (\n rxQuery.collection.database.destroyed ||\n _isResultsInSync(rxQuery)\n ) {\n return PROMISE_RESOLVE_FALSE;\n }\n\n rxQuery._ensureEqualQueue = rxQuery._ensureEqualQueue\n .then(() => __ensureEqual(rxQuery));\n return rxQuery._ensureEqualQueue;\n}\n\n/**\n * ensures that the results of this query is equal to the results which a query over the database would give\n * @return true if results have changed\n */\nfunction __ensureEqual(rxQuery: RxQueryBase): Promise {\n rxQuery._lastEnsureEqual = now();\n\n /**\n * Optimisation shortcuts\n */\n if (\n // db is closed\n rxQuery.collection.database.destroyed ||\n // nothing happened since last run\n _isResultsInSync(rxQuery)\n ) {\n return PROMISE_RESOLVE_FALSE;\n }\n\n let ret = false;\n let mustReExec = false; // if this becomes true, a whole execution over the database is made\n if (rxQuery._latestChangeEvent === -1) {\n // have not executed yet -> must run\n mustReExec = true;\n }\n\n /**\n * try to use EventReduce to calculate the new results\n */\n if (!mustReExec) {\n const missedChangeEvents = rxQuery.asRxQuery.collection._changeEventBuffer.getFrom(rxQuery._latestChangeEvent + 1);\n if (missedChangeEvents === null) {\n // changeEventBuffer is of bounds -> we must re-execute over the database\n mustReExec = true;\n } else {\n rxQuery._latestChangeEvent = rxQuery.asRxQuery.collection._changeEventBuffer.counter;\n\n const runChangeEvents: RxChangeEvent[] = rxQuery.asRxQuery.collection\n ._changeEventBuffer\n .reduceByLastOfDoc(missedChangeEvents);\n\n if (rxQuery.op === 'count') {\n // 'count' query\n const previousCount = ensureNotFalsy(rxQuery._result).count;\n let newCount = previousCount;\n runChangeEvents.forEach(cE => {\n const didMatchBefore = cE.previousDocumentData && rxQuery.doesDocumentDataMatch(cE.previousDocumentData);\n const doesMatchNow = rxQuery.doesDocumentDataMatch(cE.documentData);\n\n if (!didMatchBefore && doesMatchNow) {\n newCount++;\n }\n if (didMatchBefore && !doesMatchNow) {\n newCount--;\n }\n });\n if (newCount !== previousCount) {\n ret = true; // true because results changed\n rxQuery._setResultData(newCount as any);\n }\n } else {\n // 'find' or 'findOne' query\n const eventReduceResult = calculateNewResults(\n rxQuery as any,\n runChangeEvents\n );\n if (eventReduceResult.runFullQueryAgain) {\n // could not calculate the new results, execute must be done\n mustReExec = true;\n } else if (eventReduceResult.changed) {\n // we got the new results, we do not have to re-execute, mustReExec stays false\n ret = true; // true because results changed\n rxQuery._setResultData(eventReduceResult.newResults as any);\n }\n }\n }\n }\n\n // oh no we have to re-execute the whole query over the database\n if (mustReExec) {\n return rxQuery._execOverDatabase()\n .then(newResultData => {\n\n /**\n * The RxStorage is defined to always first emit events and then return\n * on bulkWrite() calls. So here we have to use the counter AFTER the execOverDatabase()\n * has been run, not the one from before.\n */\n rxQuery._latestChangeEvent = rxQuery.collection._changeEventBuffer.counter;\n\n // A count query needs a different has-changed check.\n if (typeof newResultData === 'number') {\n if (\n !rxQuery._result ||\n newResultData !== rxQuery._result.count\n ) {\n ret = true;\n rxQuery._setResultData(newResultData as any);\n }\n return ret;\n }\n if (\n !rxQuery._result ||\n !areRxDocumentArraysEqual(\n rxQuery.collection.schema.primaryPath,\n newResultData,\n rxQuery._result.docsData\n )\n ) {\n ret = true; // true because results changed\n rxQuery._setResultData(newResultData as any);\n }\n return ret;\n });\n }\n return Promise.resolve(ret); // true if results have changed\n}\n\n/**\n * @returns a format of the query that can be used with the storage\n * when calling RxStorageInstance().query()\n */\nexport function prepareQuery(\n schema: RxJsonSchema>,\n mutateableQuery: FilledMangoQuery\n): PreparedQuery {\n if (!mutateableQuery.sort) {\n throw newRxError('SNH', {\n query: mutateableQuery\n });\n }\n\n /**\n * Store the query plan together with the\n * prepared query to save performance.\n */\n const queryPlan = getQueryPlan(\n schema,\n mutateableQuery\n );\n\n return {\n query: mutateableQuery,\n queryPlan\n };\n}\n\n/**\n * Runs the query over the storage instance\n * of the collection.\n * Does some optimizations to ensure findById is used\n * when specific queries are used.\n */\nexport async function queryCollection(\n rxQuery: RxQuery | RxQueryBase\n): Promise[]> {\n let docs: RxDocumentData[] = [];\n const collection = rxQuery.collection;\n\n /**\n * Optimizations shortcut.\n * If query is find-one-document-by-id,\n * then we do not have to use the slow query() method\n * but instead can use findDocumentsById()\n */\n if (rxQuery.isFindOneByIdQuery) {\n if (Array.isArray(rxQuery.isFindOneByIdQuery)) {\n let docIds = rxQuery.isFindOneByIdQuery;\n docIds = docIds.filter(docId => {\n // first try to fill from docCache\n const docData = rxQuery.collection._docCache.getLatestDocumentDataIfExists(docId);\n if (docData) {\n if (!docData._deleted) {\n docs.push(docData);\n }\n return false;\n } else {\n return true;\n }\n });\n // otherwise get from storage\n if (docIds.length > 0) {\n const docsFromStorage = await collection.storageInstance.findDocumentsById(docIds, false);\n appendToArray(docs, docsFromStorage);\n }\n } else {\n const docId = rxQuery.isFindOneByIdQuery;\n\n // first try to fill from docCache\n let docData = rxQuery.collection._docCache.getLatestDocumentDataIfExists(docId);\n if (!docData) {\n // otherwise get from storage\n const fromStorageList = await collection.storageInstance.findDocumentsById([docId], false);\n if (fromStorageList[0]) {\n docData = fromStorageList[0];\n }\n }\n if (docData && !docData._deleted) {\n docs.push(docData);\n }\n }\n } else {\n const preparedQuery = rxQuery.getPreparedQuery();\n const queryResult = await collection.storageInstance.query(preparedQuery);\n docs = queryResult.documents;\n }\n return docs;\n\n}\n\n/**\n * Returns true if the given query\n * selects exactly one document by its id.\n * Used to optimize performance because these kind of\n * queries do not have to run over an index and can use get-by-id instead.\n * Returns false if no query of that kind.\n * Returns the document id otherwise.\n */\nexport function isFindOneByIdQuery(\n primaryPath: string,\n query: MangoQuery\n): false | string | string[] {\n // must have exactly one operator which must be $eq || $in\n if (\n !query.skip &&\n query.selector &&\n Object.keys(query.selector).length === 1 &&\n query.selector[primaryPath]\n ) {\n const value: any = query.selector[primaryPath];\n if (typeof value === 'string') {\n return value;\n } else if (\n Object.keys(value).length === 1 &&\n typeof value.$eq === 'string'\n ) {\n return value.$eq;\n }\n\n // same with $in string arrays\n if (\n Object.keys(value).length === 1 &&\n Array.isArray(value.$eq) &&\n // must only contain strings\n !(value.$eq as any[]).find(r => typeof r !== 'string')\n ) {\n return value.$eq;\n }\n }\n return false;\n}\n\n\n\nexport function isRxQuery(obj: any): boolean {\n return obj instanceof RxQueryBase;\n}\n"],"mappings":";;;;;;;;;;;;;;;AAAA,IAAAA,KAAA,GAAAC,OAAA;AAMA,IAAAC,UAAA,GAAAD,OAAA;AAQA,IAAAE,MAAA,GAAAF,OAAA;AAWA,IAAAG,QAAA,GAAAH,OAAA;AAGA,IAAAI,MAAA,GAAAJ,OAAA;AAoBA,IAAAK,YAAA,GAAAL,OAAA;AACA,IAAAM,WAAA,GAAAN,OAAA;AACA,IAAAO,cAAA,GAAAP,OAAA;AACA,IAAAQ,oBAAA,GAAAR,OAAA;AACA,IAAAS,aAAA,GAAAT,OAAA;AAEA,IAAIU,WAAW,GAAG,CAAC;AACnB,IAAMC,UAAU,GAAG,SAAAA,CAAA,EAAoB;EACnC,OAAO,EAAED,WAAW;AACxB,CAAC;AAAC,IAEWE,WAAW,GAAAC,OAAA,CAAAD,WAAA;EASpB;AACJ;AACA;;EAII;;EAKA;;EAMA;AACJ;AACA;AACA;;EAII,SAAAA,YACWE,EAAa,EACbC,UAA2C,EAC3CC,UAAmC;EAC1C;EACOC,KAAU,GAAG,CAAC,CAAC,EACxB;IAAA,KAhCKC,EAAE,GAAWP,UAAU,CAAC,CAAC;IAAA,KAKzBQ,sBAAsB,GAAW,CAAC;IAAA,KAClCC,aAAa,GAAG,IAAAC,UAAG,EAAC,CAAC;IAAA,KAGrBC,gBAAgB,GAAG,CAAC;IAAA,KAEpBC,QAAQ,GAAG,KAAK;IAAA,KAGhBC,SAAS,GAAG,IAAIC,qBAAe,CAAC,IAAI,CAAC;IAAA,KASrCC,OAAO,GAA0C,IAAI;IAAA,KA6FrDC,kBAAkB,GAAgB,CAAC,CAAC;IAAA,KAIpCC,cAAc,GAAW,CAAC;IAAA,KAC1BC,YAAY,GAAW,CAAC;IAAA,KAMxBC,iBAAiB,GAAqBC,4BAAqB;IAAA,KApGvDjB,EAAa,GAAbA,EAAa;IAAA,KACbC,UAA2C,GAA3CA,UAA2C;IAAA,KAC3CC,UAAmC,GAAnCA,UAAmC;IAAA,KAEnCC,KAAU,GAAVA,KAAU;IAEjB,IAAI,CAACF,UAAU,EAAE;MACb,IAAI,CAACA,UAAU,GAAGiB,gBAAgB,CAAC,CAAC;IACxC;IAEA,IAAI,CAACC,kBAAkB,GAAGA,kBAAkB,CACxC,IAAI,CAACjB,UAAU,CAACkB,MAAM,CAACC,WAAW,EAClCpB,UACJ,CAAC;EACL;EAAC,IAAAqB,MAAA,GAAAxB,WAAA,CAAAyB,SAAA;EAwFD;AACJ;AACA;AACA;AACA;AACA;AACA;EAGI;AACJ;AACA;AACA;EAHID,MAAA,CAIAE,cAAc,GAAd,SAAAA,eAAeC,aAA4F,EAAQ;IAC/G,IAAI,OAAOA,aAAa,KAAK,QAAQ,EAAE;MACnC,IAAI,CAACb,OAAO,GAAG,IAAIc,wCAAmB,CAClC,IAAI,CAACxB,UAAU,EACf,EAAE,EACFuB,aACJ,CAAC;MACD;IACJ,CAAC,MAAM,IAAIA,aAAa,YAAYE,GAAG,EAAE;MACrCF,aAAa,GAAGG,KAAK,CAACC,IAAI,CAAEJ,aAAa,CAA4CK,MAAM,CAAC,CAAC,CAAC;IAClG;IAEA,IAAMC,cAAc,GAAG,IAAIL,wCAAmB,CAC1C,IAAI,CAACxB,UAAU,EACfuB,aAAa,EACbA,aAAa,CAACO,MAClB,CAAC;IACD,IAAI,CAACpB,OAAO,GAAGmB,cAAc;EACjC;;EAEA;AACJ;AACA;AACA,KAHI;EAAAT,MAAA,CAIMW,iBAAiB,GAAvB,eAAAA,kBAAA,EAAyE;IACrE,IAAI,CAAC5B,sBAAsB,GAAG,IAAI,CAACA,sBAAsB,GAAG,CAAC;IAC7D,IAAI,CAACS,cAAc,GAAG,IAAAP,UAAG,EAAC,CAAC;IAG3B,IAAI,IAAI,CAACP,EAAE,KAAK,OAAO,EAAE;MACrB,IAAMkC,aAAa,GAAG,IAAI,CAACC,gBAAgB,CAAC,CAAC;MAC7C,IAAMC,MAAM,GAAG,MAAM,IAAI,CAAClC,UAAU,CAACmC,eAAe,CAACC,KAAK,CAACJ,aAAa,CAAC;MACzE,IAAIE,MAAM,CAACG,IAAI,KAAK,MAAM,IAAI,CAAC,IAAI,CAACrC,UAAU,CAACsC,QAAQ,CAACC,cAAc,EAAE;QACpE,MAAM,IAAAC,mBAAU,EAAC,MAAM,EAAE;UACrBxC,UAAU,EAAE,IAAI,CAACA,UAAU;UAC3ByC,QAAQ,EAAE,IAAI,CAAC1C;QACnB,CAAC,CAAC;MACN,CAAC,MAAM;QACH,OAAOmC,MAAM,CAACE,KAAK;MACvB;IACJ;IAEA,IAAI,IAAI,CAACtC,EAAE,KAAK,WAAW,EAAE;MACzB,IAAM4C,GAAa,GAAG,IAAAC,qBAAc,EAAC,IAAI,CAAC5C,UAAU,CAAC6C,QAAe,CAAC,CAAC,IAAI,CAAC5C,UAAU,CAACkB,MAAM,CAACC,WAAW,CAAC,CAAC0B,GAAG;MAC7G,IAAMC,GAAG,GAAG,IAAIrB,GAAG,CAAgC,CAAC;MACpD,IAAMsB,aAAuB,GAAG,EAAE;MAClC;MACAL,GAAG,CAACM,OAAO,CAAC9C,EAAE,IAAI;QACd,IAAM+C,OAAO,GAAG,IAAI,CAACjD,UAAU,CAACkD,SAAS,CAACC,6BAA6B,CAACjD,EAAE,CAAC;QAC3E,IAAI+C,OAAO,EAAE;UACT,IAAI,CAACA,OAAO,CAACG,QAAQ,EAAE;YACnB,IAAMC,GAAG,GAAG,IAAI,CAACrD,UAAU,CAACkD,SAAS,CAACI,mBAAmB,CAACL,OAAO,CAAC;YAClEH,GAAG,CAACS,GAAG,CAACrD,EAAE,EAAEmD,GAAG,CAAC;UACpB;QACJ,CAAC,MAAM;UACHN,aAAa,CAACS,IAAI,CAACtD,EAAE,CAAC;QAC1B;MACJ,CAAC,CAAC;MACF;MACA,IAAI6C,aAAa,CAACjB,MAAM,GAAG,CAAC,EAAE;QAC1B,IAAM2B,IAAI,GAAG,MAAM,IAAI,CAACzD,UAAU,CAACmC,eAAe,CAACuB,iBAAiB,CAACX,aAAa,EAAE,KAAK,CAAC;QAC1FU,IAAI,CAACT,OAAO,CAACC,OAAO,IAAI;UACpB,IAAMI,GAAG,GAAG,IAAI,CAACrD,UAAU,CAACkD,SAAS,CAACI,mBAAmB,CAACL,OAAO,CAAC;UAClEH,GAAG,CAACS,GAAG,CAACF,GAAG,CAACM,OAAO,EAAEN,GAAG,CAAC;QAC7B,CAAC,CAAC;MACN;MACA,OAAOP,GAAG;IACd;IAGA,IAAMc,WAAW,GAAGC,eAAe,CAAY,IAAW,CAAC;IAC3D,OAAOD,WAAW,CAACE,IAAI,CAACL,IAAI,IAAI;MAC5B,IAAI,CAAC5C,YAAY,GAAG,IAAAR,UAAG,EAAC,CAAC;MACzB,OAAOoD,IAAI;IACf,CAAC,CAAC;EACN;;EAEA;AACJ;AACA;AACA;AACA,KAJI;EAAArC,MAAA,CAOO2C,IAAI,GAAX,SAAAA,KAAYC,cAAwB,EAAgB;IAChD,IAAIA,cAAc,IAAI,IAAI,CAAClE,EAAE,KAAK,SAAS,EAAE;MACzC,MAAM,IAAA0C,mBAAU,EAAC,KAAK,EAAE;QACpBxC,UAAU,EAAE,IAAI,CAACA,UAAU,CAACiE,IAAI;QAChCC,KAAK,EAAE,IAAI,CAACnE,UAAU;QACtBD,EAAE,EAAE,IAAI,CAACA;MACb,CAAC,CAAC;IACN;;IAGA;AACR;AACA;AACA;AACA;IACQ,OAAOqE,YAAY,CAAC,IAAW,CAAC,CAC3BL,IAAI,CAAC,MAAM,IAAAM,oBAAc,EAAC,IAAI,CAACC,CAAC,CAAC,CAAC,CAClCP,IAAI,CAAC5B,MAAM,IAAI;MACZ,IAAI,CAACA,MAAM,IAAI8B,cAAc,EAAE;QAC3B,MAAM,IAAAxB,mBAAU,EAAC,MAAM,EAAE;UACrBxC,UAAU,EAAE,IAAI,CAACA,UAAU,CAACiE,IAAI;UAChCC,KAAK,EAAE,IAAI,CAACnE,UAAU;UACtBD,EAAE,EAAE,IAAI,CAACA;QACb,CAAC,CAAC;MACN,CAAC,MAAM;QACH,OAAOoC,MAAM;MACjB;IACJ,CAAC,CAAC;EACV;;EAIA;AACJ;AACA;AACA,KAHI;EAoBA;AACJ;AACA;AACA;EAHId,MAAA,CAIAkD,QAAQ,GAAR,SAAAA,SAAA,EAAmB;IACf,IAAMC,SAAS,GAAG,IAAAC,iBAAU,EAAC;MACzB1E,EAAE,EAAE,IAAI,CAACA,EAAE;MACXoE,KAAK,EAAE,IAAI,CAACnE,UAAU;MACtBE,KAAK,EAAE,IAAI,CAACA;IAChB,CAAC,EAAE,IAAI,CAAC;IACR,IAAMwE,KAAK,GAAGC,IAAI,CAACC,SAAS,CAACJ,SAAS,CAAC;IACvC,IAAI,CAACD,QAAQ,GAAG,MAAMG,KAAK;IAC3B,OAAOA,KAAK;EAChB;;EAEA;AACJ;AACA;AACA;AACA,KAJI;EAAArD,MAAA,CAKAa,gBAAgB,GAAhB,SAAAA,iBAAA,EAA6C;IACzC,IAAM2C,SAAS,GAAG;MACdC,OAAO,EAAE,IAAI;MACb;MACA9E,UAAU,EAAE,IAAA+E,kCAAmB,EAC3B,IAAI,CAAC9E,UAAU,CAACkB,MAAM,CAAC6D,UAAU,EACjC,IAAI,CAAChF,UACT;IACJ,CAAC;IACA6E,SAAS,CAAC7E,UAAU,CAAC6C,QAAQ,CAASQ,QAAQ,GAAG;MAAE4B,GAAG,EAAE;IAAM,CAAC;IAChE,IAAIJ,SAAS,CAAC7E,UAAU,CAACkF,KAAK,EAAE;MAC5BL,SAAS,CAAC7E,UAAU,CAACkF,KAAK,CAACC,OAAO,CAAC,UAAU,CAAC;IAClD;IACA,IAAAC,qBAAc,EAAC,iBAAiB,EAAEP,SAAS,CAAC;IAE5C,IAAMH,KAAK,GAAGW,YAAY,CACtB,IAAI,CAACpF,UAAU,CAACkB,MAAM,CAAC6D,UAAU,EACjCH,SAAS,CAAC7E,UACd,CAAC;IAED,IAAI,CAACkC,gBAAgB,GAAG,MAAMwC,KAAK;IACnC,OAAOA,KAAK;EAChB;;EAEA;AACJ;AACA;AACA,KAHI;EAAArD,MAAA,CAIAiE,qBAAqB,GAArB,SAAAA,sBAAsBpC,OAAwB,EAAW;IACrD;IACA,IAAIA,OAAO,CAACG,QAAQ,EAAE;MAClB,OAAO,KAAK;IAChB;IAEA,OAAO,IAAI,CAACkC,YAAY,CAACrC,OAAO,CAAC;EACrC;;EAEA;AACJ;AACA;AACA,KAHI;EAAA7B,MAAA,CAIAmE,MAAM,GAAN,SAAAA,OAAA,EAAiC;IAC7B,OAAO,IAAI,CACNxB,IAAI,CAAC,CAAC,CACND,IAAI,CAACL,IAAI,IAAI;MACV,IAAI/B,KAAK,CAAC8D,OAAO,CAAC/B,IAAI,CAAC,EAAE;QACrB;QACA,OAAOgC,OAAO,CAACC,GAAG,CAACjC,IAAI,CAACkC,GAAG,CAACtC,GAAG,IAAIA,GAAG,CAACkC,MAAM,CAAC,CAAC,CAAC,CAAC;MACrD,CAAC,MAAM;QACH,OAAQ9B,IAAI,CAAS8B,MAAM,CAAC,CAAC;MACjC;IACJ,CAAC,CAAC;EACV,CAAC;EAAAnE,MAAA,CACDwE,iBAAiB,GAAjB,SAAAA,kBAAA,EAA4C;IACxC,OAAO,IAAAC,qCAAsB,EACzB,IAAI,CAACC,SAAS,EACbzC,GAAG,IAAKA,GAAG,CAACuC,iBAAiB,CAAC,CACnC,CAAC;EACL;;EAGA;AACJ;AACA,KAFI;EAOA;AACJ;AACA;AACA;EAHIxE,MAAA,CAIA2E,MAAM,GAAN,SAAAA,OAAOC,UAAe,EAA0B;IAC5C,MAAM,IAAAC,oBAAa,EAAC,QAAQ,CAAC;EACjC,CAAC;EAAA7E,MAAA,CAED8E,KAAK,GAAL,SAAAA,MAAMA,MAAyB,EAA0B;IACrD,OAAO,IAAAL,qCAAsB,EACzB,IAAI,CAACC,SAAS,EACbzC,GAAG,IAAKA,GAAG,CAAC6C,KAAK,CAACA,MAAK,CAC5B,CAAC;EACL,CAAC;EAAA9E,MAAA,CACD+E,gBAAgB,GAAhB,SAAAA,iBAAiBD,KAAyB,EAA0B;IAChE,OAAO,IAAAL,qCAAsB,EACzB,IAAI,CAACC,SAAS,EACbzC,GAAG,IAAKA,GAAG,CAAC8C,gBAAgB,CAACD,KAAK,CACvC,CAAC;EACL,CAAC;EAAA9E,MAAA,CACDgF,MAAM,GAAN,SAAAA,OAAOC,gBAA2C,EAA0B;IACxE,OAAO,IAAAR,qCAAsB,EACzB,IAAI,CAACC,SAAS,EACbzC,GAAG,IAAKA,GAAG,CAAC+C,MAAM,CAACC,gBAAgB,CACxC,CAAC;EACL,CAAC;EAAAjF,MAAA,CACDkF,iBAAiB,GAAjB,SAAAA,kBAAkBD,gBAA2C,EAA0B;IACnF,OAAO,IAAAR,qCAAsB,EACzB,IAAI,CAACC,SAAS,EACbzC,GAAG,IAAKA,GAAG,CAACiD,iBAAiB,CAACD,gBAAgB,CACnD,CAAC;EACL;;EAGA;EACA;EAAA;EAAAjF,MAAA,CACAmF,KAAK,GAAL,SAAAA,MAAMC,SAAmE,EAAqC;IAC1G,MAAM,IAAAP,oBAAa,EAAC,eAAe,CAAC;EACxC,CAAC;EAAA7E,MAAA,CACDqF,IAAI,GAAJ,SAAAA,KAAKC,OAA+C,EAAqC;IACrF,MAAM,IAAAT,oBAAa,EAAC,eAAe,CAAC;EACxC,CAAC;EAAA7E,MAAA,CACDuF,IAAI,GAAJ,SAAAA,KAAKC,OAAsB,EAAqC;IAC5D,MAAM,IAAAX,oBAAa,EAAC,eAAe,CAAC;EACxC,CAAC;EAAA7E,MAAA,CACDyF,KAAK,GAAL,SAAAA,MAAMD,OAAsB,EAAqC;IAC7D,MAAM,IAAAX,oBAAa,EAAC,eAAe,CAAC;EACxC,CAAC;EAAA,WAAAa,aAAA,CAAAC,OAAA,EAAAnH,WAAA;IAAAoH,GAAA;IAAAC,GAAA,EAnXD,SAAAA,CAAA,EAAwC;MACpC,IAAI,CAAC,IAAI,CAACC,EAAE,EAAE;QAEV,IAAMC,QAAQ,GAAG,IAAI,CAACnH,UAAU,CAACqE,CAAC,CAAC+C,IAAI;QACnC;AAChB;AACA;AACA;QACgB,IAAAC,iBAAM,EAACC,WAAW,IAAI,CAACA,WAAW,CAACC,OAAO,CAAC;QAC3C;AAChB;AACA;AACA;QACgB,IAAAC,oBAAS,EAAC,IAAI,CAAC;QACf;QACA,IAAAC,mBAAQ,EAAC,MAAMtD,YAAY,CAAC,IAAW,CAAC,CAAC;QACzC;QACA,IAAAwB,cAAG,EAAC,MAAM,IAAI,CAACjF,OAAO,CAAC;QACvB;QACA,IAAAgH,sBAAW,EAACC,iCAA0B,CAAC;QACvC;QACA,IAAAC,+BAAoB,EAAC,CAACC,IAAI,EAAEC,IAAI,KAAK;UACjC,IAAID,IAAI,IAAIA,IAAI,CAACE,IAAI,KAAK,IAAApF,qBAAc,EAACmF,IAAI,CAAC,CAACC,IAAI,EAAE;YACjD,OAAO,IAAI;UACf,CAAC,MAAM;YACH,OAAO,KAAK;UAChB;QACJ,CAAC,CAAC,EACF,IAAAV,iBAAM,EAACnF,MAAM,IAAI,CAAC,CAACA,MAAM,CAAC;QAC1B;AAChB;AACA;AACA;QACgB,IAAAyD,cAAG,EAAEzD,MAAM,IAAK;UACZ,IAAM8F,SAAS,GAAG,IAAArF,qBAAc,EAACT,MAAM,CAAC;UACxC,IAAI,IAAI,CAACpC,EAAE,KAAK,OAAO,EAAE;YACrB,OAAOkI,SAAS,CAAC5F,KAAK;UAC1B,CAAC,MAAM,IAAI,IAAI,CAACtC,EAAE,KAAK,SAAS,EAAE;YAC9B;YACA,OAAOkI,SAAS,CAACC,SAAS,CAACnG,MAAM,KAAK,CAAC,GAAG,IAAI,GAAGkG,SAAS,CAACC,SAAS,CAAC,CAAC,CAAC;UAC3E,CAAC,MAAM,IAAI,IAAI,CAACnI,EAAE,KAAK,WAAW,EAAE;YAChC,OAAOkI,SAAS,CAACE,OAAO;UAC5B,CAAC,MAAM;YACH;YACA;YACA,OAAOF,SAAS,CAACC,SAAS,CAACE,KAAK,CAAC,CAAC,CAAC;UACvC;QACJ,CAAC,CACL,CAAC;QAED,IAAI,CAACjB,EAAE,GAAG,IAAAkB,WAAK,EACXjB,QAAQ;QACR;AAChB;AACA;AACA;QACgB,IAAI,CAAC3G,SAAS,CAAC4G,IAAI,CACf,IAAAC,iBAAM,EAAC,MAAM,KAAK,CACtB,CACJ,CAAC;MACL;MACA,OAAO,IAAI,CAACH,EAAE;IAClB;EAAC;IAAAF,GAAA;IAAAC,GAAA,EAED,SAAAA,CAAA,EAAqB;MACjB,IAAMoB,UAAU,GAAG,IAAI,CAACrI,UAAU,CAACsC,QAAQ,CAACgG,oBAAoB,CAAC,CAAC;MAClE,OAAOD,UAAU,CAACE,cAAc,CAC5B,IAAI,CAAClE,CAAC,EACNmE,SAAS,EACT,IAAI,CAACxI,UAAU,CAACsC,QACpB,CAAC;IACL;;IAEA;;IAGA;IACA;;IAIA;AACJ;AACA;AACA;EAHI;IAAA0E,GAAA;IAAAC,GAAA,EA2IA,SAAAA,CAAA,EAAiE;MAC7D,IAAM/F,MAAM,GAAG,IAAI,CAAClB,UAAU,CAACkB,MAAM,CAAC6D,UAAU;MAChD,IAAM0D,eAAe,GAAG,IAAA3D,kCAAmB,EACvC,IAAI,CAAC9E,UAAU,CAACkB,MAAM,CAAC6D,UAAU,EACjC,IAAI,CAAChF,UACT,CAAC;MACD,OAAO,IAAA2I,gCAAyB,EAC5B,IAAI,EACJ,cAAc,EACd,IAAAC,8BAAe,EACXzH,MAAM,EACNuH,eACJ,CACJ,CAAC;IACL;EAAC;IAAAzB,GAAA;IAAAC,GAAA,EAsFD,SAAAA,CAAA,EAAmD;MAC/C,OAAO,IAAI;IACf;EAAC;AAAA;AAoDE,SAASjG,gBAAgBA,CAAA,EAAqC;EACjE,OAAO;IACH4B,QAAQ,EAAE,CAAC;EACf,CAAC;AACL;;AAEA;AACA;AACA;AACO,SAASgG,gBAAgBA,CAC5B/D,OAAmD,EACb;EACtC,OAAOA,OAAO,CAAC7E,UAAU,CAACV,WAAW,CAACuJ,UAAU,CAAChE,OAAc,CAAC;AACpE;AAEO,SAASiE,aAAaA,CACzBhJ,EAAa,EACb2C,QAA+B,EAC/BzC,UAAmC,EACnCC,KAAW,EACb;EACE,IAAAkF,qBAAc,EAAC,kBAAkB,EAAE;IAC/BrF,EAAE;IACF2C,QAAQ;IACRzC,UAAU;IACVC;EACJ,CAAC,CAAC;EAEF,IAAI6C,GAAG,GAAG,IAAIlD,WAAW,CAAiBE,EAAE,EAAE2C,QAAQ,EAAEzC,UAAU,EAAEC,KAAK,CAAC;;EAE1E;EACA6C,GAAG,GAAG8F,gBAAgB,CAAC9F,GAAG,CAAC;EAC3B,IAAAiG,mCAAuB,EAAC/I,UAAU,CAAC;EAEnC,OAAO8C,GAAG;AACd;;AAEA;AACA;AACA;AACA;AACA;AACA,SAASkG,gBAAgBA,CAACnE,OAA8B,EAAW;EAC/D,IAAMoE,wBAAwB,GAAGpE,OAAO,CAACiB,SAAS,CAAC9F,UAAU,CAACkJ,kBAAkB,CAACC,OAAO;EACxF,IAAItE,OAAO,CAAClE,kBAAkB,IAAIsI,wBAAwB,EAAE;IACxD,OAAO,IAAI;EACf,CAAC,MAAM;IACH,OAAO,KAAK;EAChB;AACJ;;AAGA;AACA;AACA;AACA;AACA;AACA,SAAS9E,YAAYA,CAACU,OAA8B,EAAoB;EACpE;EACA,IACIA,OAAO,CAAC7E,UAAU,CAACsC,QAAQ,CAAC8G,SAAS,IACrCJ,gBAAgB,CAACnE,OAAO,CAAC,EAC3B;IACE,OAAO9D,4BAAqB;EAChC;EAEA8D,OAAO,CAAC/D,iBAAiB,GAAG+D,OAAO,CAAC/D,iBAAiB,CAChDgD,IAAI,CAAC,MAAMuF,aAAa,CAACxE,OAAO,CAAC,CAAC;EACvC,OAAOA,OAAO,CAAC/D,iBAAiB;AACpC;;AAEA;AACA;AACA;AACA;AACA,SAASuI,aAAaA,CAAYxE,OAAoC,EAAoB;EACtFA,OAAO,CAACvE,gBAAgB,GAAG,IAAAD,UAAG,EAAC,CAAC;;EAEhC;AACJ;AACA;EACI;EACI;EACAwE,OAAO,CAAC7E,UAAU,CAACsC,QAAQ,CAAC8G,SAAS;EACrC;EACAJ,gBAAgB,CAACnE,OAAO,CAAC,EAC3B;IACE,OAAO9D,4BAAqB;EAChC;EAEA,IAAI+B,GAAG,GAAG,KAAK;EACf,IAAIwG,UAAU,GAAG,KAAK,CAAC,CAAC;EACxB,IAAIzE,OAAO,CAAClE,kBAAkB,KAAK,CAAC,CAAC,EAAE;IACnC;IACA2I,UAAU,GAAG,IAAI;EACrB;;EAEA;AACJ;AACA;EACI,IAAI,CAACA,UAAU,EAAE;IACb,IAAMC,kBAAkB,GAAG1E,OAAO,CAACiB,SAAS,CAAC9F,UAAU,CAACkJ,kBAAkB,CAACM,OAAO,CAAC3E,OAAO,CAAClE,kBAAkB,GAAG,CAAC,CAAC;IAClH,IAAI4I,kBAAkB,KAAK,IAAI,EAAE;MAC7B;MACAD,UAAU,GAAG,IAAI;IACrB,CAAC,MAAM;MACHzE,OAAO,CAAClE,kBAAkB,GAAGkE,OAAO,CAACiB,SAAS,CAAC9F,UAAU,CAACkJ,kBAAkB,CAACC,OAAO;MAEpF,IAAMM,eAA2C,GAAG5E,OAAO,CAACiB,SAAS,CAAC9F,UAAU,CAC3EkJ,kBAAkB,CAClBQ,iBAAiB,CAACH,kBAAkB,CAAC;MAE1C,IAAI1E,OAAO,CAAC/E,EAAE,KAAK,OAAO,EAAE;QACxB;QACA,IAAM6J,aAAa,GAAG,IAAAhH,qBAAc,EAACkC,OAAO,CAACnE,OAAO,CAAC,CAAC0B,KAAK;QAC3D,IAAIwH,QAAQ,GAAGD,aAAa;QAC5BF,eAAe,CAACzG,OAAO,CAAC6G,EAAE,IAAI;UAC1B,IAAMC,cAAc,GAAGD,EAAE,CAACE,oBAAoB,IAAIlF,OAAO,CAACQ,qBAAqB,CAACwE,EAAE,CAACE,oBAAoB,CAAC;UACxG,IAAMC,YAAY,GAAGnF,OAAO,CAACQ,qBAAqB,CAACwE,EAAE,CAACI,YAAY,CAAC;UAEnE,IAAI,CAACH,cAAc,IAAIE,YAAY,EAAE;YACjCJ,QAAQ,EAAE;UACd;UACA,IAAIE,cAAc,IAAI,CAACE,YAAY,EAAE;YACjCJ,QAAQ,EAAE;UACd;QACJ,CAAC,CAAC;QACF,IAAIA,QAAQ,KAAKD,aAAa,EAAE;UAC5B7G,GAAG,GAAG,IAAI,CAAC,CAAC;UACZ+B,OAAO,CAACvD,cAAc,CAACsI,QAAe,CAAC;QAC3C;MACJ,CAAC,MAAM;QACH;QACA,IAAMM,iBAAiB,GAAG,IAAAC,gCAAmB,EACzCtF,OAAO,EACP4E,eACJ,CAAC;QACD,IAAIS,iBAAiB,CAACE,iBAAiB,EAAE;UACrC;UACAd,UAAU,GAAG,IAAI;QACrB,CAAC,MAAM,IAAIY,iBAAiB,CAACG,OAAO,EAAE;UAClC;UACAvH,GAAG,GAAG,IAAI,CAAC,CAAC;UACZ+B,OAAO,CAACvD,cAAc,CAAC4I,iBAAiB,CAACI,UAAiB,CAAC;QAC/D;MACJ;IACJ;EACJ;;EAEA;EACA,IAAIhB,UAAU,EAAE;IACZ,OAAOzE,OAAO,CAAC9C,iBAAiB,CAAC,CAAC,CAC7B+B,IAAI,CAACvC,aAAa,IAAI;MAEnB;AAChB;AACA;AACA;AACA;MACgBsD,OAAO,CAAClE,kBAAkB,GAAGkE,OAAO,CAAC7E,UAAU,CAACkJ,kBAAkB,CAACC,OAAO;;MAE1E;MACA,IAAI,OAAO5H,aAAa,KAAK,QAAQ,EAAE;QACnC,IACI,CAACsD,OAAO,CAACnE,OAAO,IAChBa,aAAa,KAAKsD,OAAO,CAACnE,OAAO,CAAC0B,KAAK,EACzC;UACEU,GAAG,GAAG,IAAI;UACV+B,OAAO,CAACvD,cAAc,CAACC,aAAoB,CAAC;QAChD;QACA,OAAOuB,GAAG;MACd;MACA,IACI,CAAC+B,OAAO,CAACnE,OAAO,IAChB,CAAC,IAAA6J,+BAAwB,EACrB1F,OAAO,CAAC7E,UAAU,CAACkB,MAAM,CAACC,WAAW,EACrCI,aAAa,EACbsD,OAAO,CAACnE,OAAO,CAAC8J,QACpB,CAAC,EACH;QACE1H,GAAG,GAAG,IAAI,CAAC,CAAC;QACZ+B,OAAO,CAACvD,cAAc,CAACC,aAAoB,CAAC;MAChD;MACA,OAAOuB,GAAG;IACd,CAAC,CAAC;EACV;EACA,OAAO2C,OAAO,CAACgF,OAAO,CAAC3H,GAAG,CAAC,CAAC,CAAC;AACjC;;AAEA;AACA;AACA;AACA;AACO,SAASsC,YAAYA,CACxBlE,MAA+C,EAC/CwJ,eAA4C,EACpB;EACxB,IAAI,CAACA,eAAe,CAACjE,IAAI,EAAE;IACvB,MAAM,IAAAjE,mBAAU,EAAC,KAAK,EAAE;MACpB0B,KAAK,EAAEwG;IACX,CAAC,CAAC;EACN;;EAEA;AACJ;AACA;AACA;EACI,IAAMC,SAAS,GAAG,IAAAC,0BAAY,EAC1B1J,MAAM,EACNwJ,eACJ,CAAC;EAED,OAAO;IACHxG,KAAK,EAAEwG,eAAe;IACtBC;EACJ,CAAC;AACL;;AAEA;AACA;AACA;AACA;AACA;AACA;AACO,eAAe9G,eAAeA,CACjCgB,OAAyD,EACrB;EACpC,IAAIpB,IAAiC,GAAG,EAAE;EAC1C,IAAMzD,UAAU,GAAG6E,OAAO,CAAC7E,UAAU;;EAErC;AACJ;AACA;AACA;AACA;AACA;EACI,IAAI6E,OAAO,CAAC5D,kBAAkB,EAAE;IAC5B,IAAIS,KAAK,CAAC8D,OAAO,CAACX,OAAO,CAAC5D,kBAAkB,CAAC,EAAE;MAC3C,IAAI4J,MAAM,GAAGhG,OAAO,CAAC5D,kBAAkB;MACvC4J,MAAM,GAAGA,MAAM,CAACxD,MAAM,CAACyD,KAAK,IAAI;QAC5B;QACA,IAAM7H,OAAO,GAAG4B,OAAO,CAAC7E,UAAU,CAACkD,SAAS,CAACC,6BAA6B,CAAC2H,KAAK,CAAC;QACjF,IAAI7H,OAAO,EAAE;UACT,IAAI,CAACA,OAAO,CAACG,QAAQ,EAAE;YACnBK,IAAI,CAACD,IAAI,CAACP,OAAO,CAAC;UACtB;UACA,OAAO,KAAK;QAChB,CAAC,MAAM;UACH,OAAO,IAAI;QACf;MACJ,CAAC,CAAC;MACF;MACA,IAAI4H,MAAM,CAAC/I,MAAM,GAAG,CAAC,EAAE;QACnB,IAAMiJ,eAAe,GAAG,MAAM/K,UAAU,CAACmC,eAAe,CAACuB,iBAAiB,CAACmH,MAAM,EAAE,KAAK,CAAC;QACzF,IAAAG,oBAAa,EAACvH,IAAI,EAAEsH,eAAe,CAAC;MACxC;IACJ,CAAC,MAAM;MACH,IAAMD,KAAK,GAAGjG,OAAO,CAAC5D,kBAAkB;;MAExC;MACA,IAAIgC,OAAO,GAAG4B,OAAO,CAAC7E,UAAU,CAACkD,SAAS,CAACC,6BAA6B,CAAC2H,KAAK,CAAC;MAC/E,IAAI,CAAC7H,OAAO,EAAE;QACV;QACA,IAAMgI,eAAe,GAAG,MAAMjL,UAAU,CAACmC,eAAe,CAACuB,iBAAiB,CAAC,CAACoH,KAAK,CAAC,EAAE,KAAK,CAAC;QAC1F,IAAIG,eAAe,CAAC,CAAC,CAAC,EAAE;UACpBhI,OAAO,GAAGgI,eAAe,CAAC,CAAC,CAAC;QAChC;MACJ;MACA,IAAIhI,OAAO,IAAI,CAACA,OAAO,CAACG,QAAQ,EAAE;QAC9BK,IAAI,CAACD,IAAI,CAACP,OAAO,CAAC;MACtB;IACJ;EACJ,CAAC,MAAM;IACH,IAAMjB,aAAa,GAAG6C,OAAO,CAAC5C,gBAAgB,CAAC,CAAC;IAChD,IAAMiJ,WAAW,GAAG,MAAMlL,UAAU,CAACmC,eAAe,CAAC+B,KAAK,CAAClC,aAAa,CAAC;IACzEyB,IAAI,GAAGyH,WAAW,CAACjD,SAAS;EAChC;EACA,OAAOxE,IAAI;AAEf;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACO,SAASxC,kBAAkBA,CAC9BE,WAAmB,EACnB+C,KAAsB,EACG;EACzB;EACA,IACI,CAACA,KAAK,CAACyC,IAAI,IACXzC,KAAK,CAACtB,QAAQ,IACduI,MAAM,CAACC,IAAI,CAAClH,KAAK,CAACtB,QAAQ,CAAC,CAACd,MAAM,KAAK,CAAC,IACxCoC,KAAK,CAACtB,QAAQ,CAACzB,WAAW,CAAC,EAC7B;IACE,IAAMsD,KAAU,GAAGP,KAAK,CAACtB,QAAQ,CAACzB,WAAW,CAAC;IAC9C,IAAI,OAAOsD,KAAK,KAAK,QAAQ,EAAE;MAC3B,OAAOA,KAAK;IAChB,CAAC,MAAM,IACH0G,MAAM,CAACC,IAAI,CAAC3G,KAAK,CAAC,CAAC3C,MAAM,KAAK,CAAC,IAC/B,OAAO2C,KAAK,CAACO,GAAG,KAAK,QAAQ,EAC/B;MACE,OAAOP,KAAK,CAACO,GAAG;IACpB;;IAEA;IACA,IACImG,MAAM,CAACC,IAAI,CAAC3G,KAAK,CAAC,CAAC3C,MAAM,KAAK,CAAC,IAC/BJ,KAAK,CAAC8D,OAAO,CAACf,KAAK,CAACO,GAAG,CAAC;IACxB;IACA,CAAEP,KAAK,CAACO,GAAG,CAAWqG,IAAI,CAACC,CAAC,IAAI,OAAOA,CAAC,KAAK,QAAQ,CAAC,EACxD;MACE,OAAO7G,KAAK,CAACO,GAAG;IACpB;EACJ;EACA,OAAO,KAAK;AAChB;AAIO,SAASuG,SAASA,CAACC,GAAQ,EAAW;EACzC,OAAOA,GAAG,YAAY5L,WAAW;AACrC","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/rx-schema-helper.js b/dist/cjs/rx-schema-helper.js deleted file mode 100644 index 90a0fb35acd..00000000000 --- a/dist/cjs/rx-schema-helper.js +++ /dev/null @@ -1,309 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.RX_META_SCHEMA = exports.DEFAULT_CHECKPOINT_SCHEMA = void 0; -exports.fillObjectWithDefaults = fillObjectWithDefaults; -exports.fillPrimaryKey = fillPrimaryKey; -exports.fillWithDefaultSettings = fillWithDefaultSettings; -exports.getComposedPrimaryKeyOfDocumentData = getComposedPrimaryKeyOfDocumentData; -exports.getDefaultIndex = getDefaultIndex; -exports.getFinalFields = getFinalFields; -exports.getLengthOfPrimaryKey = getLengthOfPrimaryKey; -exports.getPrimaryFieldOfPrimaryKey = getPrimaryFieldOfPrimaryKey; -exports.getPseudoSchemaForVersion = getPseudoSchemaForVersion; -exports.getSchemaByObjectPath = getSchemaByObjectPath; -exports.normalizeRxJsonSchema = normalizeRxJsonSchema; -var _rxError = require("./rx-error.js"); -var _index = require("./plugins/utils/index.js"); -/** - * Helper function to create a valid RxJsonSchema - * with a given version. - */ -function getPseudoSchemaForVersion(version, primaryKey) { - var pseudoSchema = fillWithDefaultSettings({ - version, - type: 'object', - primaryKey: primaryKey, - properties: { - [primaryKey]: { - type: 'string', - maxLength: 100 - } - }, - indexes: [[primaryKey]], - required: [primaryKey] - }); - return pseudoSchema; -} - -/** - * Returns the sub-schema for a given path - */ -function getSchemaByObjectPath(rxJsonSchema, path) { - var usePath = path; - usePath = usePath.replace(_index.REGEX_ALL_DOTS, '.properties.'); - usePath = 'properties.' + usePath; - usePath = (0, _index.trimDots)(usePath); - var ret = (0, _index.getProperty)(rxJsonSchema, usePath); - return ret; -} -function fillPrimaryKey(primaryPath, jsonSchema, documentData) { - // optimization shortcut. - if (typeof jsonSchema.primaryKey === 'string') { - return documentData; - } - var newPrimary = getComposedPrimaryKeyOfDocumentData(jsonSchema, documentData); - var existingPrimary = documentData[primaryPath]; - if (existingPrimary && existingPrimary !== newPrimary) { - throw (0, _rxError.newRxError)('DOC19', { - args: { - documentData, - existingPrimary, - newPrimary - }, - schema: jsonSchema - }); - } - documentData[primaryPath] = newPrimary; - return documentData; -} -function getPrimaryFieldOfPrimaryKey(primaryKey) { - if (typeof primaryKey === 'string') { - return primaryKey; - } else { - return primaryKey.key; - } -} -function getLengthOfPrimaryKey(schema) { - var primaryPath = getPrimaryFieldOfPrimaryKey(schema.primaryKey); - var schemaPart = getSchemaByObjectPath(schema, primaryPath); - return (0, _index.ensureNotFalsy)(schemaPart.maxLength); -} - -/** - * Returns the composed primaryKey of a document by its data. - */ -function getComposedPrimaryKeyOfDocumentData(jsonSchema, documentData) { - if (typeof jsonSchema.primaryKey === 'string') { - return documentData[jsonSchema.primaryKey]; - } - var compositePrimary = jsonSchema.primaryKey; - return compositePrimary.fields.map(field => { - var value = (0, _index.getProperty)(documentData, field); - if (typeof value === 'undefined') { - throw (0, _rxError.newRxError)('DOC18', { - args: { - field, - documentData - } - }); - } - return value; - }).join(compositePrimary.separator); -} - -/** - * Normalize the RxJsonSchema. - * We need this to ensure everything is set up properly - * and we have the same hash on schemas that represent the same value but - * have different json. - * - * - Orders the schemas attributes by alphabetical order - * - Adds the primaryKey to all indexes that do not contain the primaryKey - * - We need this for deterministic sort order on all queries, which is required for event-reduce to work. - * - * @return RxJsonSchema - ordered and filled - */ -function normalizeRxJsonSchema(jsonSchema) { - var normalizedSchema = (0, _index.sortObject)(jsonSchema, true); - return normalizedSchema; -} - -/** - * If the schema does not specify any index, - * we add this index so we at least can run RxQuery() - * and only select non-deleted fields. - */ -function getDefaultIndex(primaryPath) { - return ['_deleted', primaryPath]; -} - -/** - * fills the schema-json with default-settings - * @return cloned schemaObj - */ -function fillWithDefaultSettings(schemaObj) { - schemaObj = (0, _index.flatClone)(schemaObj); - var primaryPath = getPrimaryFieldOfPrimaryKey(schemaObj.primaryKey); - schemaObj.properties = (0, _index.flatClone)(schemaObj.properties); - - // additionalProperties is always false - schemaObj.additionalProperties = false; - - // fill with key-compression-state () - if (!Object.prototype.hasOwnProperty.call(schemaObj, 'keyCompression')) { - schemaObj.keyCompression = false; - } - - // indexes must be array - schemaObj.indexes = schemaObj.indexes ? schemaObj.indexes.slice(0) : []; - - // required must be array - schemaObj.required = schemaObj.required ? schemaObj.required.slice(0) : []; - - // encrypted must be array - schemaObj.encrypted = schemaObj.encrypted ? schemaObj.encrypted.slice(0) : []; - - // add _rev - schemaObj.properties._rev = { - type: 'string', - minLength: 1 - }; - - // add attachments - schemaObj.properties._attachments = { - type: 'object' - }; - - // add deleted flag - schemaObj.properties._deleted = { - type: 'boolean' - }; - - // add meta property - schemaObj.properties._meta = RX_META_SCHEMA; - - /** - * meta fields are all required - */ - schemaObj.required = schemaObj.required ? schemaObj.required.slice(0) : []; - schemaObj.required.push('_deleted'); - schemaObj.required.push('_rev'); - schemaObj.required.push('_meta'); - schemaObj.required.push('_attachments'); - - // final fields are always required - var finalFields = getFinalFields(schemaObj); - (0, _index.appendToArray)(schemaObj.required, finalFields); - schemaObj.required = schemaObj.required.filter(field => !field.includes('.')).filter((elem, pos, arr) => arr.indexOf(elem) === pos); // unique; - - // version is 0 by default - schemaObj.version = schemaObj.version || 0; - var useIndexes = schemaObj.indexes.map(index => { - var arIndex = (0, _index.isMaybeReadonlyArray)(index) ? index.slice(0) : [index]; - /** - * Append primary key to indexes that do not contain the primaryKey. - * All indexes must have the primaryKey to ensure a deterministic sort order. - */ - if (!arIndex.includes(primaryPath)) { - arIndex.push(primaryPath); - } - - // add _deleted flag to all indexes so we can query only non-deleted fields - // in RxDB itself - if (arIndex[0] !== '_deleted') { - arIndex.unshift('_deleted'); - } - return arIndex; - }); - if (useIndexes.length === 0) { - useIndexes.push(getDefaultIndex(primaryPath)); - } - - // we need this index for the getChangedDocumentsSince() method - useIndexes.push(['_meta.lwt', primaryPath]); - - // also add the internalIndexes - if (schemaObj.internalIndexes) { - schemaObj.internalIndexes.map(idx => { - useIndexes.push(idx); - }); - } - - // make indexes unique - var hasIndex = new Set(); - useIndexes.filter(index => { - var indexStr = index.join(','); - if (hasIndex.has(indexStr)) { - return false; - } else { - hasIndex.add(indexStr); - return true; - } - }); - schemaObj.indexes = useIndexes; - return schemaObj; -} -var RX_META_SCHEMA = exports.RX_META_SCHEMA = { - type: 'object', - properties: { - /** - * The last-write time. - * Unix time in milliseconds. - */ - lwt: { - type: 'number', - /** - * We use 1 as minimum so that the value is never falsy. - */ - minimum: _index.RX_META_LWT_MINIMUM, - maximum: 1000000000000000, - multipleOf: 0.01 - } - }, - /** - * Additional properties are allowed - * and can be used by plugins to set various flags. - */ - additionalProperties: true, - required: ['lwt'] -}; - -/** - * returns the final-fields of the schema - * @return field-names of the final-fields - */ -function getFinalFields(jsonSchema) { - var ret = Object.keys(jsonSchema.properties).filter(key => jsonSchema.properties[key].final); - - // primary is also final - var primaryPath = getPrimaryFieldOfPrimaryKey(jsonSchema.primaryKey); - ret.push(primaryPath); - - // fields of composite primary are final - if (typeof jsonSchema.primaryKey !== 'string') { - jsonSchema.primaryKey.fields.forEach(field => ret.push(field)); - } - return ret; -} - -/** - * fills all unset fields with default-values if set - * @hotPath - */ -function fillObjectWithDefaults(rxSchema, obj) { - var defaultKeys = Object.keys(rxSchema.defaultValues); - for (var i = 0; i < defaultKeys.length; ++i) { - var key = defaultKeys[i]; - if (!Object.prototype.hasOwnProperty.call(obj, key) || typeof obj[key] === 'undefined') { - obj[key] = rxSchema.defaultValues[key]; - } - } - return obj; -} -var DEFAULT_CHECKPOINT_SCHEMA = exports.DEFAULT_CHECKPOINT_SCHEMA = { - type: 'object', - properties: { - id: { - type: 'string' - }, - lwt: { - type: 'number' - } - }, - required: ['id', 'lwt'], - additionalProperties: false -}; -//# sourceMappingURL=rx-schema-helper.js.map \ No newline at end of file diff --git a/dist/cjs/rx-schema-helper.js.map b/dist/cjs/rx-schema-helper.js.map deleted file mode 100644 index a28e4c38479..00000000000 --- a/dist/cjs/rx-schema-helper.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-schema-helper.js","names":["_rxError","require","_index","getPseudoSchemaForVersion","version","primaryKey","pseudoSchema","fillWithDefaultSettings","type","properties","maxLength","indexes","required","getSchemaByObjectPath","rxJsonSchema","path","usePath","replace","REGEX_ALL_DOTS","trimDots","ret","getProperty","fillPrimaryKey","primaryPath","jsonSchema","documentData","newPrimary","getComposedPrimaryKeyOfDocumentData","existingPrimary","newRxError","args","schema","getPrimaryFieldOfPrimaryKey","key","getLengthOfPrimaryKey","schemaPart","ensureNotFalsy","compositePrimary","fields","map","field","value","join","separator","normalizeRxJsonSchema","normalizedSchema","sortObject","getDefaultIndex","schemaObj","flatClone","additionalProperties","Object","prototype","hasOwnProperty","call","keyCompression","slice","encrypted","_rev","minLength","_attachments","_deleted","_meta","RX_META_SCHEMA","push","finalFields","getFinalFields","appendToArray","filter","includes","elem","pos","arr","indexOf","useIndexes","index","arIndex","isMaybeReadonlyArray","unshift","length","internalIndexes","idx","hasIndex","Set","indexStr","has","add","exports","lwt","minimum","RX_META_LWT_MINIMUM","maximum","multipleOf","keys","final","forEach","fillObjectWithDefaults","rxSchema","obj","defaultKeys","defaultValues","i","DEFAULT_CHECKPOINT_SCHEMA","id"],"sources":["../../src/rx-schema-helper.ts"],"sourcesContent":["import { newRxError } from './rx-error.ts';\nimport type {\n CompositePrimaryKey,\n DeepReadonly,\n JsonSchema,\n PrimaryKey,\n RxDocumentData,\n RxJsonSchema,\n RxStorageDefaultCheckpoint,\n StringKeys\n} from './types/index.d.ts';\nimport {\n appendToArray,\n ensureNotFalsy,\n flatClone,\n getProperty,\n isMaybeReadonlyArray,\n REGEX_ALL_DOTS,\n RX_META_LWT_MINIMUM,\n sortObject,\n trimDots\n} from './plugins/utils/index.ts';\nimport type { RxSchema } from './rx-schema.ts';\n\n/**\n * Helper function to create a valid RxJsonSchema\n * with a given version.\n */\nexport function getPseudoSchemaForVersion(\n version: number,\n primaryKey: StringKeys\n): RxJsonSchema> {\n const pseudoSchema: RxJsonSchema> = fillWithDefaultSettings({\n version,\n type: 'object',\n primaryKey: primaryKey as any,\n properties: {\n [primaryKey]: {\n type: 'string',\n maxLength: 100\n }\n } as any,\n indexes: [\n [primaryKey]\n ],\n required: [primaryKey]\n });\n return pseudoSchema;\n}\n\n/**\n * Returns the sub-schema for a given path\n */\nexport function getSchemaByObjectPath(\n rxJsonSchema: RxJsonSchema,\n path: keyof T | string\n): JsonSchema {\n let usePath: string = path as string;\n usePath = usePath.replace(REGEX_ALL_DOTS, '.properties.');\n usePath = 'properties.' + usePath;\n usePath = trimDots(usePath);\n\n const ret = getProperty(rxJsonSchema, usePath);\n return ret;\n}\n\nexport function fillPrimaryKey(\n primaryPath: keyof T,\n jsonSchema: RxJsonSchema,\n documentData: RxDocumentData\n): RxDocumentData {\n // optimization shortcut.\n if (typeof jsonSchema.primaryKey === 'string') {\n return documentData;\n }\n\n const newPrimary = getComposedPrimaryKeyOfDocumentData(\n jsonSchema,\n documentData\n );\n const existingPrimary: string | undefined = documentData[primaryPath] as any;\n if (\n existingPrimary &&\n existingPrimary !== newPrimary\n ) {\n throw newRxError(\n 'DOC19',\n {\n args: {\n documentData,\n existingPrimary,\n newPrimary,\n },\n schema: jsonSchema\n });\n }\n\n (documentData as any)[primaryPath] = newPrimary;\n return documentData;\n}\n\nexport function getPrimaryFieldOfPrimaryKey(\n primaryKey: PrimaryKey\n): StringKeys {\n if (typeof primaryKey === 'string') {\n return primaryKey as any;\n } else {\n return (primaryKey as CompositePrimaryKey).key;\n }\n}\n\nexport function getLengthOfPrimaryKey(\n schema: RxJsonSchema>\n): number {\n const primaryPath = getPrimaryFieldOfPrimaryKey(schema.primaryKey);\n const schemaPart = getSchemaByObjectPath(schema, primaryPath);\n return ensureNotFalsy(schemaPart.maxLength);\n}\n\n/**\n * Returns the composed primaryKey of a document by its data.\n */\nexport function getComposedPrimaryKeyOfDocumentData(\n jsonSchema: RxJsonSchema | RxJsonSchema>,\n documentData: Partial\n): string {\n if (typeof jsonSchema.primaryKey === 'string') {\n return (documentData as any)[jsonSchema.primaryKey];\n }\n\n const compositePrimary: CompositePrimaryKey = jsonSchema.primaryKey as any;\n return compositePrimary.fields.map(field => {\n const value = getProperty(documentData as any, field as string);\n if (typeof value === 'undefined') {\n throw newRxError('DOC18', { args: { field, documentData } });\n }\n return value;\n }).join(compositePrimary.separator);\n}\n\n\n/**\n * Normalize the RxJsonSchema.\n * We need this to ensure everything is set up properly\n * and we have the same hash on schemas that represent the same value but\n * have different json.\n *\n * - Orders the schemas attributes by alphabetical order\n * - Adds the primaryKey to all indexes that do not contain the primaryKey\n * - We need this for deterministic sort order on all queries, which is required for event-reduce to work.\n *\n * @return RxJsonSchema - ordered and filled\n */\nexport function normalizeRxJsonSchema(jsonSchema: RxJsonSchema): RxJsonSchema {\n const normalizedSchema: RxJsonSchema = sortObject(jsonSchema, true);\n return normalizedSchema;\n}\n\n/**\n * If the schema does not specify any index,\n * we add this index so we at least can run RxQuery()\n * and only select non-deleted fields.\n */\nexport function getDefaultIndex(primaryPath: string) {\n return ['_deleted', primaryPath];\n}\n\n/**\n * fills the schema-json with default-settings\n * @return cloned schemaObj\n */\nexport function fillWithDefaultSettings(\n schemaObj: RxJsonSchema\n): RxJsonSchema> {\n schemaObj = flatClone(schemaObj);\n const primaryPath: string = getPrimaryFieldOfPrimaryKey(schemaObj.primaryKey);\n schemaObj.properties = flatClone(schemaObj.properties);\n\n // additionalProperties is always false\n schemaObj.additionalProperties = false;\n\n // fill with key-compression-state ()\n if (!Object.prototype.hasOwnProperty.call(schemaObj, 'keyCompression')) {\n schemaObj.keyCompression = false;\n }\n\n // indexes must be array\n schemaObj.indexes = schemaObj.indexes ? schemaObj.indexes.slice(0) : [];\n\n // required must be array\n schemaObj.required = schemaObj.required ? schemaObj.required.slice(0) : [];\n\n // encrypted must be array\n schemaObj.encrypted = schemaObj.encrypted ? schemaObj.encrypted.slice(0) : [];\n\n // add _rev\n (schemaObj.properties as any)._rev = {\n type: 'string',\n minLength: 1\n };\n\n // add attachments\n (schemaObj.properties as any)._attachments = {\n type: 'object'\n };\n\n // add deleted flag\n (schemaObj.properties as any)._deleted = {\n type: 'boolean'\n };\n\n // add meta property\n (schemaObj.properties as any)._meta = RX_META_SCHEMA;\n\n /**\n * meta fields are all required\n */\n schemaObj.required = schemaObj.required ? schemaObj.required.slice(0) : [];\n (schemaObj.required as string[]).push('_deleted');\n (schemaObj.required as string[]).push('_rev');\n (schemaObj.required as string[]).push('_meta');\n (schemaObj.required as string[]).push('_attachments');\n\n // final fields are always required\n const finalFields = getFinalFields(schemaObj);\n appendToArray(schemaObj.required as any, finalFields);\n schemaObj.required = schemaObj.required\n .filter((field: string) => !field.includes('.'))\n .filter((elem: any, pos: any, arr: any) => arr.indexOf(elem) === pos); // unique;\n\n // version is 0 by default\n schemaObj.version = schemaObj.version || 0;\n\n const useIndexes: string[][] = schemaObj.indexes.map(index => {\n const arIndex = isMaybeReadonlyArray(index) ? index.slice(0) : [index];\n /**\n * Append primary key to indexes that do not contain the primaryKey.\n * All indexes must have the primaryKey to ensure a deterministic sort order.\n */\n if (!arIndex.includes(primaryPath)) {\n arIndex.push(primaryPath);\n }\n\n // add _deleted flag to all indexes so we can query only non-deleted fields\n // in RxDB itself\n if (arIndex[0] !== '_deleted') {\n arIndex.unshift('_deleted');\n }\n\n return arIndex;\n });\n\n if (useIndexes.length === 0) {\n useIndexes.push(getDefaultIndex(primaryPath));\n }\n\n // we need this index for the getChangedDocumentsSince() method\n useIndexes.push(['_meta.lwt', primaryPath]);\n\n // also add the internalIndexes\n if (schemaObj.internalIndexes) {\n schemaObj.internalIndexes.map(idx => {\n useIndexes.push(idx);\n });\n }\n\n // make indexes unique\n const hasIndex = new Set();\n useIndexes.filter(index => {\n const indexStr = index.join(',');\n if (hasIndex.has(indexStr)) {\n return false;\n } else {\n hasIndex.add(indexStr);\n return true;\n }\n });\n\n schemaObj.indexes = useIndexes;\n\n return schemaObj as any;\n}\n\n\nexport const RX_META_SCHEMA: JsonSchema = {\n type: 'object',\n properties: {\n /**\n * The last-write time.\n * Unix time in milliseconds.\n */\n lwt: {\n type: 'number',\n /**\n * We use 1 as minimum so that the value is never falsy.\n */\n minimum: RX_META_LWT_MINIMUM,\n maximum: 1000000000000000,\n multipleOf: 0.01\n }\n },\n /**\n * Additional properties are allowed\n * and can be used by plugins to set various flags.\n */\n additionalProperties: true as any,\n required: [\n 'lwt'\n ]\n};\n\n\n/**\n * returns the final-fields of the schema\n * @return field-names of the final-fields\n */\nexport function getFinalFields(\n jsonSchema: RxJsonSchema\n): string[] {\n const ret = Object.keys(jsonSchema.properties)\n .filter(key => (jsonSchema as any).properties[key].final);\n\n // primary is also final\n const primaryPath = getPrimaryFieldOfPrimaryKey(jsonSchema.primaryKey);\n ret.push(primaryPath);\n\n // fields of composite primary are final\n if (typeof jsonSchema.primaryKey !== 'string') {\n (jsonSchema.primaryKey as CompositePrimaryKey).fields\n .forEach(field => ret.push(field as string));\n }\n\n return ret;\n}\n\n/**\n * fills all unset fields with default-values if set\n * @hotPath\n */\nexport function fillObjectWithDefaults(rxSchema: RxSchema, obj: any): any {\n const defaultKeys = Object.keys(rxSchema.defaultValues);\n for (let i = 0; i < defaultKeys.length; ++i) {\n const key = defaultKeys[i];\n if (!Object.prototype.hasOwnProperty.call(obj, key) || typeof obj[key] === 'undefined') {\n obj[key] = rxSchema.defaultValues[key];\n }\n }\n return obj;\n}\n\nexport const DEFAULT_CHECKPOINT_SCHEMA: DeepReadonly> = {\n type: 'object',\n properties: {\n id: {\n type: 'string'\n },\n lwt: {\n type: 'number'\n }\n },\n required: [\n 'id',\n 'lwt'\n ],\n additionalProperties: false\n} as const;\n"],"mappings":";;;;;;;;;;;;;;;;;AAAA,IAAAA,QAAA,GAAAC,OAAA;AAWA,IAAAC,MAAA,GAAAD,OAAA;AAaA;AACA;AACA;AACA;AACO,SAASE,yBAAyBA,CACrCC,OAAe,EACfC,UAAyB,EACM;EAC/B,IAAMC,YAA6C,GAAGC,uBAAuB,CAAC;IAC1EH,OAAO;IACPI,IAAI,EAAE,QAAQ;IACdH,UAAU,EAAEA,UAAiB;IAC7BI,UAAU,EAAE;MACR,CAACJ,UAAU,GAAG;QACVG,IAAI,EAAE,QAAQ;QACdE,SAAS,EAAE;MACf;IACJ,CAAQ;IACRC,OAAO,EAAE,CACL,CAACN,UAAU,CAAC,CACf;IACDO,QAAQ,EAAE,CAACP,UAAU;EACzB,CAAC,CAAC;EACF,OAAOC,YAAY;AACvB;;AAEA;AACA;AACA;AACO,SAASO,qBAAqBA,CACjCC,YAA6B,EAC7BC,IAAsB,EACZ;EACV,IAAIC,OAAe,GAAGD,IAAc;EACpCC,OAAO,GAAGA,OAAO,CAACC,OAAO,CAACC,qBAAc,EAAE,cAAc,CAAC;EACzDF,OAAO,GAAG,aAAa,GAAGA,OAAO;EACjCA,OAAO,GAAG,IAAAG,eAAQ,EAACH,OAAO,CAAC;EAE3B,IAAMI,GAAG,GAAG,IAAAC,kBAAW,EAACP,YAAY,EAAEE,OAAO,CAAC;EAC9C,OAAOI,GAAG;AACd;AAEO,SAASE,cAAcA,CAC1BC,WAAoB,EACpBC,UAA2B,EAC3BC,YAA+B,EACd;EACjB;EACA,IAAI,OAAOD,UAAU,CAACnB,UAAU,KAAK,QAAQ,EAAE;IAC3C,OAAOoB,YAAY;EACvB;EAEA,IAAMC,UAAU,GAAGC,mCAAmC,CAClDH,UAAU,EACVC,YACJ,CAAC;EACD,IAAMG,eAAmC,GAAGH,YAAY,CAACF,WAAW,CAAQ;EAC5E,IACIK,eAAe,IACfA,eAAe,KAAKF,UAAU,EAChC;IACE,MAAM,IAAAG,mBAAU,EACZ,OAAO,EACP;MACIC,IAAI,EAAE;QACFL,YAAY;QACZG,eAAe;QACfF;MACJ,CAAC;MACDK,MAAM,EAAEP;IACZ,CAAC,CAAC;EACV;EAECC,YAAY,CAASF,WAAW,CAAC,GAAGG,UAAU;EAC/C,OAAOD,YAAY;AACvB;AAEO,SAASO,2BAA2BA,CACvC3B,UAAiC,EACZ;EACrB,IAAI,OAAOA,UAAU,KAAK,QAAQ,EAAE;IAChC,OAAOA,UAAU;EACrB,CAAC,MAAM;IACH,OAAQA,UAAU,CAAoC4B,GAAG;EAC7D;AACJ;AAEO,SAASC,qBAAqBA,CACjCH,MAA+C,EACzC;EACN,IAAMR,WAAW,GAAGS,2BAA2B,CAACD,MAAM,CAAC1B,UAAU,CAAC;EAClE,IAAM8B,UAAU,GAAGtB,qBAAqB,CAACkB,MAAM,EAAER,WAAW,CAAC;EAC7D,OAAO,IAAAa,qBAAc,EAACD,UAAU,CAACzB,SAAS,CAAC;AAC/C;;AAEA;AACA;AACA;AACO,SAASiB,mCAAmCA,CAC/CH,UAA6E,EAC7EC,YAAgC,EAC1B;EACN,IAAI,OAAOD,UAAU,CAACnB,UAAU,KAAK,QAAQ,EAAE;IAC3C,OAAQoB,YAAY,CAASD,UAAU,CAACnB,UAAU,CAAC;EACvD;EAEA,IAAMgC,gBAAgD,GAAGb,UAAU,CAACnB,UAAiB;EACrF,OAAOgC,gBAAgB,CAACC,MAAM,CAACC,GAAG,CAACC,KAAK,IAAI;IACxC,IAAMC,KAAK,GAAG,IAAApB,kBAAW,EAACI,YAAY,EAASe,KAAe,CAAC;IAC/D,IAAI,OAAOC,KAAK,KAAK,WAAW,EAAE;MAC9B,MAAM,IAAAZ,mBAAU,EAAC,OAAO,EAAE;QAAEC,IAAI,EAAE;UAAEU,KAAK;UAAEf;QAAa;MAAE,CAAC,CAAC;IAChE;IACA,OAAOgB,KAAK;EAChB,CAAC,CAAC,CAACC,IAAI,CAACL,gBAAgB,CAACM,SAAS,CAAC;AACvC;;AAGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACO,SAASC,qBAAqBA,CAAIpB,UAA2B,EAAmB;EACnF,IAAMqB,gBAAiC,GAAG,IAAAC,iBAAU,EAACtB,UAAU,EAAE,IAAI,CAAC;EACtE,OAAOqB,gBAAgB;AAC3B;;AAEA;AACA;AACA;AACA;AACA;AACO,SAASE,eAAeA,CAACxB,WAAmB,EAAE;EACjD,OAAO,CAAC,UAAU,EAAEA,WAAW,CAAC;AACpC;;AAEA;AACA;AACA;AACA;AACO,SAAShB,uBAAuBA,CACnCyC,SAA0B,EACK;EAC/BA,SAAS,GAAG,IAAAC,gBAAS,EAACD,SAAS,CAAC;EAChC,IAAMzB,WAAmB,GAAGS,2BAA2B,CAACgB,SAAS,CAAC3C,UAAU,CAAC;EAC7E2C,SAAS,CAACvC,UAAU,GAAG,IAAAwC,gBAAS,EAACD,SAAS,CAACvC,UAAU,CAAC;;EAEtD;EACAuC,SAAS,CAACE,oBAAoB,GAAG,KAAK;;EAEtC;EACA,IAAI,CAACC,MAAM,CAACC,SAAS,CAACC,cAAc,CAACC,IAAI,CAACN,SAAS,EAAE,gBAAgB,CAAC,EAAE;IACpEA,SAAS,CAACO,cAAc,GAAG,KAAK;EACpC;;EAEA;EACAP,SAAS,CAACrC,OAAO,GAAGqC,SAAS,CAACrC,OAAO,GAAGqC,SAAS,CAACrC,OAAO,CAAC6C,KAAK,CAAC,CAAC,CAAC,GAAG,EAAE;;EAEvE;EACAR,SAAS,CAACpC,QAAQ,GAAGoC,SAAS,CAACpC,QAAQ,GAAGoC,SAAS,CAACpC,QAAQ,CAAC4C,KAAK,CAAC,CAAC,CAAC,GAAG,EAAE;;EAE1E;EACAR,SAAS,CAACS,SAAS,GAAGT,SAAS,CAACS,SAAS,GAAGT,SAAS,CAACS,SAAS,CAACD,KAAK,CAAC,CAAC,CAAC,GAAG,EAAE;;EAE7E;EACCR,SAAS,CAACvC,UAAU,CAASiD,IAAI,GAAG;IACjClD,IAAI,EAAE,QAAQ;IACdmD,SAAS,EAAE;EACf,CAAC;;EAED;EACCX,SAAS,CAACvC,UAAU,CAASmD,YAAY,GAAG;IACzCpD,IAAI,EAAE;EACV,CAAC;;EAED;EACCwC,SAAS,CAACvC,UAAU,CAASoD,QAAQ,GAAG;IACrCrD,IAAI,EAAE;EACV,CAAC;;EAED;EACCwC,SAAS,CAACvC,UAAU,CAASqD,KAAK,GAAGC,cAAc;;EAEpD;AACJ;AACA;EACIf,SAAS,CAACpC,QAAQ,GAAGoC,SAAS,CAACpC,QAAQ,GAAGoC,SAAS,CAACpC,QAAQ,CAAC4C,KAAK,CAAC,CAAC,CAAC,GAAG,EAAE;EACzER,SAAS,CAACpC,QAAQ,CAAcoD,IAAI,CAAC,UAAU,CAAC;EAChDhB,SAAS,CAACpC,QAAQ,CAAcoD,IAAI,CAAC,MAAM,CAAC;EAC5ChB,SAAS,CAACpC,QAAQ,CAAcoD,IAAI,CAAC,OAAO,CAAC;EAC7ChB,SAAS,CAACpC,QAAQ,CAAcoD,IAAI,CAAC,cAAc,CAAC;;EAErD;EACA,IAAMC,WAAW,GAAGC,cAAc,CAAClB,SAAS,CAAC;EAC7C,IAAAmB,oBAAa,EAACnB,SAAS,CAACpC,QAAQ,EAASqD,WAAW,CAAC;EACrDjB,SAAS,CAACpC,QAAQ,GAAGoC,SAAS,CAACpC,QAAQ,CAClCwD,MAAM,CAAE5B,KAAa,IAAK,CAACA,KAAK,CAAC6B,QAAQ,CAAC,GAAG,CAAC,CAAC,CAC/CD,MAAM,CAAC,CAACE,IAAS,EAAEC,GAAQ,EAAEC,GAAQ,KAAKA,GAAG,CAACC,OAAO,CAACH,IAAI,CAAC,KAAKC,GAAG,CAAC,CAAC,CAAC;;EAE3E;EACAvB,SAAS,CAAC5C,OAAO,GAAG4C,SAAS,CAAC5C,OAAO,IAAI,CAAC;EAE1C,IAAMsE,UAAsB,GAAG1B,SAAS,CAACrC,OAAO,CAAC4B,GAAG,CAACoC,KAAK,IAAI;IAC1D,IAAMC,OAAO,GAAG,IAAAC,2BAAoB,EAACF,KAAK,CAAC,GAAGA,KAAK,CAACnB,KAAK,CAAC,CAAC,CAAC,GAAG,CAACmB,KAAK,CAAC;IACtE;AACR;AACA;AACA;IACQ,IAAI,CAACC,OAAO,CAACP,QAAQ,CAAC9C,WAAW,CAAC,EAAE;MAChCqD,OAAO,CAACZ,IAAI,CAACzC,WAAW,CAAC;IAC7B;;IAEA;IACA;IACA,IAAIqD,OAAO,CAAC,CAAC,CAAC,KAAK,UAAU,EAAE;MAC3BA,OAAO,CAACE,OAAO,CAAC,UAAU,CAAC;IAC/B;IAEA,OAAOF,OAAO;EAClB,CAAC,CAAC;EAEF,IAAIF,UAAU,CAACK,MAAM,KAAK,CAAC,EAAE;IACzBL,UAAU,CAACV,IAAI,CAACjB,eAAe,CAACxB,WAAW,CAAC,CAAC;EACjD;;EAEA;EACAmD,UAAU,CAACV,IAAI,CAAC,CAAC,WAAW,EAAEzC,WAAW,CAAC,CAAC;;EAE3C;EACA,IAAIyB,SAAS,CAACgC,eAAe,EAAE;IAC3BhC,SAAS,CAACgC,eAAe,CAACzC,GAAG,CAAC0C,GAAG,IAAI;MACjCP,UAAU,CAACV,IAAI,CAACiB,GAAG,CAAC;IACxB,CAAC,CAAC;EACN;;EAEA;EACA,IAAMC,QAAQ,GAAG,IAAIC,GAAG,CAAS,CAAC;EAClCT,UAAU,CAACN,MAAM,CAACO,KAAK,IAAI;IACvB,IAAMS,QAAQ,GAAGT,KAAK,CAACjC,IAAI,CAAC,GAAG,CAAC;IAChC,IAAIwC,QAAQ,CAACG,GAAG,CAACD,QAAQ,CAAC,EAAE;MACxB,OAAO,KAAK;IAChB,CAAC,MAAM;MACHF,QAAQ,CAACI,GAAG,CAACF,QAAQ,CAAC;MACtB,OAAO,IAAI;IACf;EACJ,CAAC,CAAC;EAEFpC,SAAS,CAACrC,OAAO,GAAG+D,UAAU;EAE9B,OAAO1B,SAAS;AACpB;AAGO,IAAMe,cAA0B,GAAAwB,OAAA,CAAAxB,cAAA,GAAG;EACtCvD,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACR;AACR;AACA;AACA;IACQ+E,GAAG,EAAE;MACDhF,IAAI,EAAE,QAAQ;MACd;AACZ;AACA;MACYiF,OAAO,EAAEC,0BAAmB;MAC5BC,OAAO,EAAE,gBAAgB;MACzBC,UAAU,EAAE;IAChB;EACJ,CAAC;EACD;AACJ;AACA;AACA;EACI1C,oBAAoB,EAAE,IAAW;EACjCtC,QAAQ,EAAE,CACN,KAAK;AAEb,CAAC;;AAGD;AACA;AACA;AACA;AACO,SAASsD,cAAcA,CAC1B1C,UAA2B,EACnB;EACR,IAAMJ,GAAG,GAAG+B,MAAM,CAAC0C,IAAI,CAACrE,UAAU,CAACf,UAAU,CAAC,CACzC2D,MAAM,CAACnC,GAAG,IAAKT,UAAU,CAASf,UAAU,CAACwB,GAAG,CAAC,CAAC6D,KAAK,CAAC;;EAE7D;EACA,IAAMvE,WAAW,GAAGS,2BAA2B,CAACR,UAAU,CAACnB,UAAU,CAAC;EACtEe,GAAG,CAAC4C,IAAI,CAACzC,WAAW,CAAC;;EAErB;EACA,IAAI,OAAOC,UAAU,CAACnB,UAAU,KAAK,QAAQ,EAAE;IAC1CmB,UAAU,CAACnB,UAAU,CAA4BiC,MAAM,CACnDyD,OAAO,CAACvD,KAAK,IAAIpB,GAAG,CAAC4C,IAAI,CAACxB,KAAe,CAAC,CAAC;EACpD;EAEA,OAAOpB,GAAG;AACd;;AAEA;AACA;AACA;AACA;AACO,SAAS4E,sBAAsBA,CAACC,QAAuB,EAAEC,GAAQ,EAAO;EAC3E,IAAMC,WAAW,GAAGhD,MAAM,CAAC0C,IAAI,CAACI,QAAQ,CAACG,aAAa,CAAC;EACvD,KAAK,IAAIC,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGF,WAAW,CAACpB,MAAM,EAAE,EAAEsB,CAAC,EAAE;IACzC,IAAMpE,GAAG,GAAGkE,WAAW,CAACE,CAAC,CAAC;IAC1B,IAAI,CAAClD,MAAM,CAACC,SAAS,CAACC,cAAc,CAACC,IAAI,CAAC4C,GAAG,EAAEjE,GAAG,CAAC,IAAI,OAAOiE,GAAG,CAACjE,GAAG,CAAC,KAAK,WAAW,EAAE;MACpFiE,GAAG,CAACjE,GAAG,CAAC,GAAGgE,QAAQ,CAACG,aAAa,CAACnE,GAAG,CAAC;IAC1C;EACJ;EACA,OAAOiE,GAAG;AACd;AAEO,IAAMI,yBAA+E,GAAAf,OAAA,CAAAe,yBAAA,GAAG;EAC3F9F,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACR8F,EAAE,EAAE;MACA/F,IAAI,EAAE;IACV,CAAC;IACDgF,GAAG,EAAE;MACDhF,IAAI,EAAE;IACV;EACJ,CAAC;EACDI,QAAQ,EAAE,CACN,IAAI,EACJ,KAAK,CACR;EACDsC,oBAAoB,EAAE;AAC1B,CAAU","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/rx-schema.js b/dist/cjs/rx-schema.js deleted file mode 100644 index a7705873a6e..00000000000 --- a/dist/cjs/rx-schema.js +++ /dev/null @@ -1,169 +0,0 @@ -"use strict"; - -var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault"); -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.RxSchema = void 0; -exports.createRxSchema = createRxSchema; -exports.getIndexes = getIndexes; -exports.getPreviousVersions = getPreviousVersions; -exports.isRxSchema = isRxSchema; -exports.toTypedRxJsonSchema = toTypedRxJsonSchema; -var _createClass2 = _interopRequireDefault(require("@babel/runtime/helpers/createClass")); -var _index = require("./plugins/utils/index.js"); -var _rxError = require("./rx-error.js"); -var _hooks = require("./hooks.js"); -var _rxSchemaHelper = require("./rx-schema-helper.js"); -var _overwritable = require("./overwritable.js"); -var RxSchema = exports.RxSchema = /*#__PURE__*/function () { - function RxSchema(jsonSchema, hashFunction) { - this.jsonSchema = jsonSchema; - this.hashFunction = hashFunction; - this.indexes = getIndexes(this.jsonSchema); - - // primary is always required - this.primaryPath = (0, _rxSchemaHelper.getPrimaryFieldOfPrimaryKey)(this.jsonSchema.primaryKey); - this.finalFields = (0, _rxSchemaHelper.getFinalFields)(this.jsonSchema); - } - var _proto = RxSchema.prototype; - /** - * checks if a given change on a document is allowed - * Ensures that: - * - final fields are not modified - * @throws {Error} if not valid - */ - _proto.validateChange = function validateChange(dataBefore, dataAfter) { - this.finalFields.forEach(fieldName => { - if (!(0, _index.deepEqual)(dataBefore[fieldName], dataAfter[fieldName])) { - throw (0, _rxError.newRxError)('DOC9', { - dataBefore, - dataAfter, - fieldName, - schema: this.jsonSchema - }); - } - }); - } - - /** - * creates the schema-based document-prototype, - * see RxCollection.getDocumentPrototype() - */; - _proto.getDocumentPrototype = function getDocumentPrototype() { - var proto = {}; - - /** - * On the top level, we know all keys - * and therefore do not have to create a new Proxy object - * for each document. Instead we define the getter in the prototype once. - */ - var pathProperties = (0, _rxSchemaHelper.getSchemaByObjectPath)(this.jsonSchema, ''); - Object.keys(pathProperties).forEach(key => { - var fullPath = key; - - // getter - value - proto.__defineGetter__(key, function () { - if (!this.get || typeof this.get !== 'function') { - /** - * When an object gets added to the state of a vuejs-component, - * it happens that this getter is called with another scope. - * To prevent errors, we have to return undefined in this case - */ - return undefined; - } - var ret = this.get(fullPath); - return ret; - }); - // getter - observable$ - Object.defineProperty(proto, key + '$', { - get: function () { - return this.get$(fullPath); - }, - enumerable: false, - configurable: false - }); - // getter - reactivity$$ - Object.defineProperty(proto, key + '$$', { - get: function () { - return this.get$$(fullPath); - }, - enumerable: false, - configurable: false - }); - // getter - populate_ - Object.defineProperty(proto, key + '_', { - get: function () { - return this.populate(fullPath); - }, - enumerable: false, - configurable: false - }); - }); - (0, _index.overwriteGetterForCaching)(this, 'getDocumentPrototype', () => proto); - return proto; - }; - _proto.getPrimaryOfDocumentData = function getPrimaryOfDocumentData(documentData) { - return (0, _rxSchemaHelper.getComposedPrimaryKeyOfDocumentData)(this.jsonSchema, documentData); - }; - return (0, _createClass2.default)(RxSchema, [{ - key: "version", - get: function () { - return this.jsonSchema.version; - } - }, { - key: "defaultValues", - get: function () { - var values = {}; - Object.entries(this.jsonSchema.properties).filter(([, v]) => Object.prototype.hasOwnProperty.call(v, 'default')).forEach(([k, v]) => values[k] = v.default); - return (0, _index.overwriteGetterForCaching)(this, 'defaultValues', values); - } - - /** - * @overrides itself on the first call - * - * TODO this should be a pure function that - * caches the hash in a WeakMap. - */ - }, { - key: "hash", - get: function () { - return (0, _index.overwriteGetterForCaching)(this, 'hash', this.hashFunction(JSON.stringify(this.jsonSchema))); - } - }]); -}(); -function getIndexes(jsonSchema) { - return (jsonSchema.indexes || []).map(index => (0, _index.isMaybeReadonlyArray)(index) ? index : [index]); -} - -/** - * array with previous version-numbers - */ -function getPreviousVersions(schema) { - var version = schema.version ? schema.version : 0; - var c = 0; - return new Array(version).fill(0).map(() => c++); -} -function createRxSchema(jsonSchema, hashFunction, runPreCreateHooks = true) { - if (runPreCreateHooks) { - (0, _hooks.runPluginHooks)('preCreateRxSchema', jsonSchema); - } - var useJsonSchema = (0, _rxSchemaHelper.fillWithDefaultSettings)(jsonSchema); - useJsonSchema = (0, _rxSchemaHelper.normalizeRxJsonSchema)(useJsonSchema); - _overwritable.overwritable.deepFreezeWhenDevMode(useJsonSchema); - var schema = new RxSchema(useJsonSchema, hashFunction); - (0, _hooks.runPluginHooks)('createRxSchema', schema); - return schema; -} -function isRxSchema(obj) { - return obj instanceof RxSchema; -} - -/** - * Used as helper function the generate the document type out of the schema via typescript. - * @link https://github.com/pubkey/rxdb/discussions/3467 - */ -function toTypedRxJsonSchema(schema) { - return schema; -} -//# sourceMappingURL=rx-schema.js.map \ No newline at end of file diff --git a/dist/cjs/rx-schema.js.map b/dist/cjs/rx-schema.js.map deleted file mode 100644 index b2b5611c95e..00000000000 --- a/dist/cjs/rx-schema.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-schema.js","names":["_index","require","_rxError","_hooks","_rxSchemaHelper","_overwritable","RxSchema","exports","jsonSchema","hashFunction","indexes","getIndexes","primaryPath","getPrimaryFieldOfPrimaryKey","primaryKey","finalFields","getFinalFields","_proto","prototype","validateChange","dataBefore","dataAfter","forEach","fieldName","deepEqual","newRxError","schema","getDocumentPrototype","proto","pathProperties","getSchemaByObjectPath","Object","keys","key","fullPath","__defineGetter__","get","undefined","ret","defineProperty","get$","enumerable","configurable","get$$","populate","overwriteGetterForCaching","getPrimaryOfDocumentData","documentData","getComposedPrimaryKeyOfDocumentData","_createClass2","default","version","values","entries","properties","filter","v","hasOwnProperty","call","k","JSON","stringify","map","index","isMaybeReadonlyArray","getPreviousVersions","c","Array","fill","createRxSchema","runPreCreateHooks","runPluginHooks","useJsonSchema","fillWithDefaultSettings","normalizeRxJsonSchema","overwritable","deepFreezeWhenDevMode","isRxSchema","obj","toTypedRxJsonSchema"],"sources":["../../src/rx-schema.ts"],"sourcesContent":["import {\n overwriteGetterForCaching,\n isMaybeReadonlyArray,\n deepEqual\n} from './plugins/utils/index.ts';\nimport {\n newRxError,\n} from './rx-error.ts';\nimport {\n runPluginHooks\n} from './hooks.ts';\n\nimport type {\n DeepMutable,\n DeepReadonly,\n HashFunction,\n MaybeReadonly,\n RxDocument,\n RxDocumentData,\n RxJsonSchema,\n StringKeys\n} from './types/index.d.ts';\nimport {\n fillWithDefaultSettings,\n getComposedPrimaryKeyOfDocumentData,\n getFinalFields,\n getPrimaryFieldOfPrimaryKey,\n getSchemaByObjectPath,\n normalizeRxJsonSchema\n} from './rx-schema-helper.ts';\nimport { overwritable } from './overwritable.ts';\n\nexport class RxSchema {\n public indexes: MaybeReadonly[];\n public readonly primaryPath: StringKeys>;\n public finalFields: string[];\n\n constructor(\n public readonly jsonSchema: RxJsonSchema>,\n public readonly hashFunction: HashFunction\n ) {\n this.indexes = getIndexes(this.jsonSchema);\n\n // primary is always required\n this.primaryPath = getPrimaryFieldOfPrimaryKey(this.jsonSchema.primaryKey);\n\n this.finalFields = getFinalFields(this.jsonSchema);\n }\n\n public get version(): number {\n return this.jsonSchema.version;\n }\n\n public get defaultValues(): { [P in keyof RxDocType]: RxDocType[P] } {\n const values = {} as { [P in keyof RxDocType]: RxDocType[P] };\n Object\n .entries(this.jsonSchema.properties)\n .filter(([, v]) => Object.prototype.hasOwnProperty.call(v, 'default'))\n .forEach(([k, v]) => (values as any)[k] = (v as any).default);\n return overwriteGetterForCaching(\n this,\n 'defaultValues',\n values\n );\n }\n\n /**\n * @overrides itself on the first call\n *\n * TODO this should be a pure function that\n * caches the hash in a WeakMap.\n */\n public get hash(): Promise {\n return overwriteGetterForCaching(\n this,\n 'hash',\n this.hashFunction(JSON.stringify(this.jsonSchema))\n );\n }\n\n /**\n * checks if a given change on a document is allowed\n * Ensures that:\n * - final fields are not modified\n * @throws {Error} if not valid\n */\n validateChange(dataBefore: any, dataAfter: any): void {\n this.finalFields.forEach(fieldName => {\n if (!deepEqual(dataBefore[fieldName], dataAfter[fieldName])) {\n throw newRxError('DOC9', {\n dataBefore,\n dataAfter,\n fieldName,\n schema: this.jsonSchema\n });\n }\n });\n }\n\n /**\n * creates the schema-based document-prototype,\n * see RxCollection.getDocumentPrototype()\n */\n public getDocumentPrototype(): any {\n const proto: any = {};\n\n /**\n * On the top level, we know all keys\n * and therefore do not have to create a new Proxy object\n * for each document. Instead we define the getter in the prototype once.\n */\n const pathProperties = getSchemaByObjectPath(\n this.jsonSchema,\n ''\n );\n Object.keys(pathProperties)\n .forEach(key => {\n const fullPath = key;\n\n // getter - value\n proto.__defineGetter__(\n key,\n function (this: RxDocument) {\n if (!this.get || typeof this.get !== 'function') {\n /**\n * When an object gets added to the state of a vuejs-component,\n * it happens that this getter is called with another scope.\n * To prevent errors, we have to return undefined in this case\n */\n return undefined;\n }\n const ret = this.get(fullPath);\n return ret;\n }\n );\n // getter - observable$\n Object.defineProperty(proto, key + '$', {\n get: function () {\n return this.get$(fullPath);\n },\n enumerable: false,\n configurable: false\n });\n // getter - reactivity$$\n Object.defineProperty(proto, key + '$$', {\n get: function () {\n return this.get$$(fullPath);\n },\n enumerable: false,\n configurable: false\n });\n // getter - populate_\n Object.defineProperty(proto, key + '_', {\n get: function () {\n return this.populate(fullPath);\n },\n enumerable: false,\n configurable: false\n });\n });\n\n overwriteGetterForCaching(\n this,\n 'getDocumentPrototype',\n () => proto\n );\n return proto;\n }\n\n\n getPrimaryOfDocumentData(\n documentData: Partial\n ): string {\n return getComposedPrimaryKeyOfDocumentData(\n this.jsonSchema,\n documentData\n );\n }\n}\n\nexport function getIndexes(\n jsonSchema: RxJsonSchema\n): MaybeReadonly[] {\n return (jsonSchema.indexes || []).map(index => isMaybeReadonlyArray(index) ? index : [index]);\n}\n\n/**\n * array with previous version-numbers\n */\nexport function getPreviousVersions(schema: RxJsonSchema): number[] {\n const version = schema.version ? schema.version : 0;\n let c = 0;\n return new Array(version)\n .fill(0)\n .map(() => c++);\n}\n\nexport function createRxSchema(\n jsonSchema: RxJsonSchema,\n hashFunction: HashFunction,\n runPreCreateHooks = true\n): RxSchema {\n if (runPreCreateHooks) {\n runPluginHooks('preCreateRxSchema', jsonSchema);\n }\n\n let useJsonSchema = fillWithDefaultSettings(jsonSchema);\n useJsonSchema = normalizeRxJsonSchema(useJsonSchema);\n overwritable.deepFreezeWhenDevMode(useJsonSchema);\n\n const schema = new RxSchema(useJsonSchema, hashFunction);\n runPluginHooks('createRxSchema', schema);\n return schema;\n}\n\nexport function isRxSchema(obj: any): boolean {\n return obj instanceof RxSchema;\n}\n\n/**\n * Used as helper function the generate the document type out of the schema via typescript.\n * @link https://github.com/pubkey/rxdb/discussions/3467\n */\nexport function toTypedRxJsonSchema>>(schema: T): DeepMutable {\n return schema as any;\n}\n"],"mappings":";;;;;;;;;;;;;AAAA,IAAAA,MAAA,GAAAC,OAAA;AAKA,IAAAC,QAAA,GAAAD,OAAA;AAGA,IAAAE,MAAA,GAAAF,OAAA;AAcA,IAAAG,eAAA,GAAAH,OAAA;AAQA,IAAAI,aAAA,GAAAJ,OAAA;AAAiD,IAEpCK,QAAQ,GAAAC,OAAA,CAAAD,QAAA;EAKjB,SAAAA,SACoBE,UAAmD,EACnDC,YAA0B,EAC5C;IAAA,KAFkBD,UAAmD,GAAnDA,UAAmD;IAAA,KACnDC,YAA0B,GAA1BA,YAA0B;IAE1C,IAAI,CAACC,OAAO,GAAGC,UAAU,CAAC,IAAI,CAACH,UAAU,CAAC;;IAE1C;IACA,IAAI,CAACI,WAAW,GAAG,IAAAC,2CAA2B,EAAC,IAAI,CAACL,UAAU,CAACM,UAAU,CAAC;IAE1E,IAAI,CAACC,WAAW,GAAG,IAAAC,8BAAc,EAAC,IAAI,CAACR,UAAU,CAAC;EACtD;EAAC,IAAAS,MAAA,GAAAX,QAAA,CAAAY,SAAA;EAiCD;AACJ;AACA;AACA;AACA;AACA;EALID,MAAA,CAMAE,cAAc,GAAd,SAAAA,eAAeC,UAAe,EAAEC,SAAc,EAAQ;IAClD,IAAI,CAACN,WAAW,CAACO,OAAO,CAACC,SAAS,IAAI;MAClC,IAAI,CAAC,IAAAC,gBAAS,EAACJ,UAAU,CAACG,SAAS,CAAC,EAAEF,SAAS,CAACE,SAAS,CAAC,CAAC,EAAE;QACzD,MAAM,IAAAE,mBAAU,EAAC,MAAM,EAAE;UACrBL,UAAU;UACVC,SAAS;UACTE,SAAS;UACTG,MAAM,EAAE,IAAI,CAAClB;QACjB,CAAC,CAAC;MACN;IACJ,CAAC,CAAC;EACN;;EAEA;AACJ;AACA;AACA,KAHI;EAAAS,MAAA,CAIOU,oBAAoB,GAA3B,SAAAA,qBAAA,EAAmC;IAC/B,IAAMC,KAAU,GAAG,CAAC,CAAC;;IAErB;AACR;AACA;AACA;AACA;IACQ,IAAMC,cAAc,GAAG,IAAAC,qCAAqB,EACxC,IAAI,CAACtB,UAAU,EACf,EACJ,CAAC;IACDuB,MAAM,CAACC,IAAI,CAACH,cAAc,CAAC,CACtBP,OAAO,CAACW,GAAG,IAAI;MACZ,IAAMC,QAAQ,GAAGD,GAAG;;MAEpB;MACAL,KAAK,CAACO,gBAAgB,CAClBF,GAAG,EACH,YAA4B;QACxB,IAAI,CAAC,IAAI,CAACG,GAAG,IAAI,OAAO,IAAI,CAACA,GAAG,KAAK,UAAU,EAAE;UAC7C;AAC5B;AACA;AACA;AACA;UAC4B,OAAOC,SAAS;QACpB;QACA,IAAMC,GAAG,GAAG,IAAI,CAACF,GAAG,CAACF,QAAQ,CAAC;QAC9B,OAAOI,GAAG;MACd,CACJ,CAAC;MACD;MACAP,MAAM,CAACQ,cAAc,CAACX,KAAK,EAAEK,GAAG,GAAG,GAAG,EAAE;QACpCG,GAAG,EAAE,SAAAA,CAAA,EAAY;UACb,OAAO,IAAI,CAACI,IAAI,CAACN,QAAQ,CAAC;QAC9B,CAAC;QACDO,UAAU,EAAE,KAAK;QACjBC,YAAY,EAAE;MAClB,CAAC,CAAC;MACF;MACAX,MAAM,CAACQ,cAAc,CAACX,KAAK,EAAEK,GAAG,GAAG,IAAI,EAAE;QACrCG,GAAG,EAAE,SAAAA,CAAA,EAAY;UACb,OAAO,IAAI,CAACO,KAAK,CAACT,QAAQ,CAAC;QAC/B,CAAC;QACDO,UAAU,EAAE,KAAK;QACjBC,YAAY,EAAE;MAClB,CAAC,CAAC;MACF;MACAX,MAAM,CAACQ,cAAc,CAACX,KAAK,EAAEK,GAAG,GAAG,GAAG,EAAE;QACpCG,GAAG,EAAE,SAAAA,CAAA,EAAY;UACb,OAAO,IAAI,CAACQ,QAAQ,CAACV,QAAQ,CAAC;QAClC,CAAC;QACDO,UAAU,EAAE,KAAK;QACjBC,YAAY,EAAE;MAClB,CAAC,CAAC;IACN,CAAC,CAAC;IAEN,IAAAG,gCAAyB,EACrB,IAAI,EACJ,sBAAsB,EACtB,MAAMjB,KACV,CAAC;IACD,OAAOA,KAAK;EAChB,CAAC;EAAAX,MAAA,CAGD6B,wBAAwB,GAAxB,SAAAA,yBACIC,YAAgC,EAC1B;IACN,OAAO,IAAAC,mDAAmC,EACtC,IAAI,CAACxC,UAAU,EACfuC,YACJ,CAAC;EACL,CAAC;EAAA,WAAAE,aAAA,CAAAC,OAAA,EAAA5C,QAAA;IAAA2B,GAAA;IAAAG,GAAA,EAhID,SAAAA,CAAA,EAA6B;MACzB,OAAO,IAAI,CAAC5B,UAAU,CAAC2C,OAAO;IAClC;EAAC;IAAAlB,GAAA;IAAAG,GAAA,EAED,SAAAA,CAAA,EAAqE;MACjE,IAAMgB,MAAM,GAAG,CAAC,CAA6C;MAC7DrB,MAAM,CACDsB,OAAO,CAAC,IAAI,CAAC7C,UAAU,CAAC8C,UAAU,CAAC,CACnCC,MAAM,CAAC,CAAC,GAAGC,CAAC,CAAC,KAAKzB,MAAM,CAACb,SAAS,CAACuC,cAAc,CAACC,IAAI,CAACF,CAAC,EAAE,SAAS,CAAC,CAAC,CACrElC,OAAO,CAAC,CAAC,CAACqC,CAAC,EAAEH,CAAC,CAAC,KAAMJ,MAAM,CAASO,CAAC,CAAC,GAAIH,CAAC,CAASN,OAAO,CAAC;MACjE,OAAO,IAAAL,gCAAyB,EAC5B,IAAI,EACJ,eAAe,EACfO,MACJ,CAAC;IACL;;IAEA;AACJ;AACA;AACA;AACA;AACA;EALI;IAAAnB,GAAA;IAAAG,GAAA,EAMA,SAAAA,CAAA,EAAmC;MAC/B,OAAO,IAAAS,gCAAyB,EAC5B,IAAI,EACJ,MAAM,EACN,IAAI,CAACpC,YAAY,CAACmD,IAAI,CAACC,SAAS,CAAC,IAAI,CAACrD,UAAU,CAAC,CACrD,CAAC;IACL;EAAC;AAAA;AAsGE,SAASG,UAAUA,CACtBH,UAAmC,EACV;EACzB,OAAO,CAACA,UAAU,CAACE,OAAO,IAAI,EAAE,EAAEoD,GAAG,CAACC,KAAK,IAAI,IAAAC,2BAAoB,EAACD,KAAK,CAAC,GAAGA,KAAK,GAAG,CAACA,KAAK,CAAC,CAAC;AACjG;;AAEA;AACA;AACA;AACO,SAASE,mBAAmBA,CAACvC,MAAyB,EAAY;EACrE,IAAMyB,OAAO,GAAGzB,MAAM,CAACyB,OAAO,GAAGzB,MAAM,CAACyB,OAAO,GAAG,CAAC;EACnD,IAAIe,CAAC,GAAG,CAAC;EACT,OAAO,IAAIC,KAAK,CAAChB,OAAO,CAAC,CACpBiB,IAAI,CAAC,CAAC,CAAC,CACPN,GAAG,CAAC,MAAMI,CAAC,EAAE,CAAC;AACvB;AAEO,SAASG,cAAcA,CAC1B7D,UAA2B,EAC3BC,YAA0B,EAC1B6D,iBAAiB,GAAG,IAAI,EACb;EACX,IAAIA,iBAAiB,EAAE;IACnB,IAAAC,qBAAc,EAAC,mBAAmB,EAAE/D,UAAU,CAAC;EACnD;EAEA,IAAIgE,aAAa,GAAG,IAAAC,uCAAuB,EAACjE,UAAU,CAAC;EACvDgE,aAAa,GAAG,IAAAE,qCAAqB,EAACF,aAAa,CAAC;EACpDG,0BAAY,CAACC,qBAAqB,CAACJ,aAAa,CAAC;EAEjD,IAAM9C,MAAM,GAAG,IAAIpB,QAAQ,CAACkE,aAAa,EAAE/D,YAAY,CAAC;EACxD,IAAA8D,qBAAc,EAAC,gBAAgB,EAAE7C,MAAM,CAAC;EACxC,OAAOA,MAAM;AACjB;AAEO,SAASmD,UAAUA,CAACC,GAAQ,EAAW;EAC1C,OAAOA,GAAG,YAAYxE,QAAQ;AAClC;;AAEA;AACA;AACA;AACA;AACO,SAASyE,mBAAmBA,CAA4CrD,MAAS,EAAkB;EACtG,OAAOA,MAAM;AACjB","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/rx-storage-helper.js b/dist/cjs/rx-storage-helper.js deleted file mode 100644 index e8c03af293e..00000000000 --- a/dist/cjs/rx-storage-helper.js +++ /dev/null @@ -1,745 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.RX_DATABASE_LOCAL_DOCS_STORAGE_NAME = exports.INTERNAL_STORAGE_NAME = void 0; -exports.attachmentWriteDataToNormalData = attachmentWriteDataToNormalData; -exports.categorizeBulkWriteRows = categorizeBulkWriteRows; -exports.ensureRxStorageInstanceParamsAreCorrect = ensureRxStorageInstanceParamsAreCorrect; -exports.flatCloneDocWithMeta = flatCloneDocWithMeta; -exports.getAttachmentSize = getAttachmentSize; -exports.getChangedDocumentsSince = getChangedDocumentsSince; -exports.getChangedDocumentsSinceQuery = getChangedDocumentsSinceQuery; -exports.getSingleDocument = getSingleDocument; -exports.getWrappedStorageInstance = getWrappedStorageInstance; -exports.hasEncryption = hasEncryption; -exports.observeSingle = observeSingle; -exports.randomDelayStorage = randomDelayStorage; -exports.stackCheckpoints = stackCheckpoints; -exports.stripAttachmentsDataFromDocument = stripAttachmentsDataFromDocument; -exports.stripAttachmentsDataFromRow = stripAttachmentsDataFromRow; -exports.throwIfIsStorageWriteError = throwIfIsStorageWriteError; -exports.writeSingle = writeSingle; -var _overwritable = require("./overwritable.js"); -var _rxError = require("./rx-error.js"); -var _rxSchemaHelper = require("./rx-schema-helper.js"); -var _index = require("./plugins/utils/index.js"); -var _rxjs = require("rxjs"); -var _rxQuery = require("./rx-query.js"); -var _rxQueryHelper = require("./rx-query-helper.js"); -var _hooks = require("./hooks.js"); -/** - * Helper functions for accessing the RxStorage instances. - */ - -var INTERNAL_STORAGE_NAME = exports.INTERNAL_STORAGE_NAME = '_rxdb_internal'; -var RX_DATABASE_LOCAL_DOCS_STORAGE_NAME = exports.RX_DATABASE_LOCAL_DOCS_STORAGE_NAME = 'rxdatabase_storage_local'; -async function getSingleDocument(storageInstance, documentId) { - var results = await storageInstance.findDocumentsById([documentId], false); - var doc = results[0]; - if (doc) { - return doc; - } else { - return undefined; - } -} - -/** - * Writes a single document, - * throws RxStorageBulkWriteError on failure - */ -async function writeSingle(instance, writeRow, context) { - var writeResult = await instance.bulkWrite([writeRow], context); - if (writeResult.error.length > 0) { - var error = writeResult.error[0]; - throw error; - } else { - var ret = writeResult.success[0]; - return ret; - } -} - -/** - * Observe the plain document data of a single document. - * Do not forget to unsubscribe. - */ -function observeSingle(storageInstance, documentId) { - var firstFindPromise = getSingleDocument(storageInstance, documentId); - var ret = storageInstance.changeStream().pipe((0, _rxjs.map)(evBulk => evBulk.events.find(ev => ev.documentId === documentId)), (0, _rxjs.filter)(ev => !!ev), (0, _rxjs.map)(ev => Promise.resolve((0, _index.ensureNotFalsy)(ev).documentData)), (0, _rxjs.startWith)(firstFindPromise), (0, _rxjs.switchMap)(v => v), (0, _rxjs.filter)(v => !!v)); - return ret; -} - -/** - * Checkpoints must be stackable over another. - * This is required form some RxStorage implementations - * like the sharding plugin, where a checkpoint only represents - * the document state from some, but not all shards. - */ -function stackCheckpoints(checkpoints) { - return Object.assign({}, ...checkpoints); -} -function throwIfIsStorageWriteError(collection, documentId, writeData, error) { - if (error) { - if (error.status === 409) { - throw (0, _rxError.newRxError)('CONFLICT', { - collection: collection.name, - id: documentId, - writeError: error, - data: writeData - }); - } else if (error.status === 422) { - throw (0, _rxError.newRxError)('VD2', { - collection: collection.name, - id: documentId, - writeError: error, - data: writeData - }); - } else { - throw error; - } - } -} - -/** - * Analyzes a list of BulkWriteRows and determines - * which documents must be inserted, updated or deleted - * and which events must be emitted and which documents cause a conflict - * and must not be written. - * Used as helper inside of some RxStorage implementations. - * @hotPath The performance of this function is critical - */ -function categorizeBulkWriteRows(storageInstance, primaryPath, -/** - * Current state of the documents - * inside of the storage. Used to determine - * which writes cause conflicts. - * This must be a Map for better performance. - */ -docsInDb, -/** - * The write rows that are passed to - * RxStorageInstance().bulkWrite(). - */ -bulkWriteRows, context, -/** - * Used by some storages for better performance. - * For example when get-by-id and insert/update can run in parallel. - */ -onInsert, onUpdate) { - var hasAttachments = !!storageInstance.schema.attachments; - var bulkInsertDocs = []; - var bulkUpdateDocs = []; - var errors = []; - var eventBulkId = (0, _index.randomCouchString)(10); - var eventBulk = { - id: eventBulkId, - events: [], - checkpoint: null, - context, - startTime: (0, _index.now)(), - endTime: 0 - }; - var eventBulkEvents = eventBulk.events; - var attachmentsAdd = []; - var attachmentsRemove = []; - var attachmentsUpdate = []; - var hasDocsInDb = docsInDb.size > 0; - var newestRow; - - /** - * @performance is really important in this loop! - */ - var rowAmount = bulkWriteRows.length; - var _loop = function () { - var writeRow = bulkWriteRows[rowId]; - - // use these variables to have less property accesses - var document = writeRow.document; - var previous = writeRow.previous; - var docId = document[primaryPath]; - var documentDeleted = document._deleted; - var previousDeleted = previous && previous._deleted; - var documentInDb = undefined; - if (hasDocsInDb) { - documentInDb = docsInDb.get(docId); - } - var attachmentError; - if (!documentInDb) { - /** - * It is possible to insert already deleted documents, - * this can happen on replication. - */ - var insertedIsDeleted = documentDeleted ? true : false; - if (hasAttachments) { - Object.entries(document._attachments).forEach(([attachmentId, attachmentData]) => { - if (!attachmentData.data) { - attachmentError = { - documentId: docId, - isError: true, - status: 510, - writeRow, - attachmentId - }; - errors.push(attachmentError); - } else { - attachmentsAdd.push({ - documentId: docId, - attachmentId, - attachmentData: attachmentData, - digest: attachmentData.digest - }); - } - }); - } - if (!attachmentError) { - if (hasAttachments) { - bulkInsertDocs.push(stripAttachmentsDataFromRow(writeRow)); - if (onInsert) { - onInsert(document); - } - } else { - bulkInsertDocs.push(writeRow); - if (onInsert) { - onInsert(document); - } - } - newestRow = writeRow; - } - if (!insertedIsDeleted) { - var event = { - documentId: docId, - operation: 'INSERT', - documentData: hasAttachments ? stripAttachmentsDataFromDocument(document) : document, - previousDocumentData: hasAttachments && previous ? stripAttachmentsDataFromDocument(previous) : previous - }; - eventBulkEvents.push(event); - } - } else { - // update existing document - var revInDb = documentInDb._rev; - - /** - * Check for conflict - */ - if (!previous || !!previous && revInDb !== previous._rev) { - // is conflict error - var err = { - isError: true, - status: 409, - documentId: docId, - writeRow: writeRow, - documentInDb - }; - errors.push(err); - return 1; // continue - } - - // handle attachments data - - var updatedRow = hasAttachments ? stripAttachmentsDataFromRow(writeRow) : writeRow; - if (hasAttachments) { - if (documentDeleted) { - /** - * Deleted documents must have cleared all their attachments. - */ - if (previous) { - Object.keys(previous._attachments).forEach(attachmentId => { - attachmentsRemove.push({ - documentId: docId, - attachmentId, - digest: (0, _index.ensureNotFalsy)(previous)._attachments[attachmentId].digest - }); - }); - } - } else { - // first check for errors - Object.entries(document._attachments).find(([attachmentId, attachmentData]) => { - var previousAttachmentData = previous ? previous._attachments[attachmentId] : undefined; - if (!previousAttachmentData && !attachmentData.data) { - attachmentError = { - documentId: docId, - documentInDb: documentInDb, - isError: true, - status: 510, - writeRow, - attachmentId - }; - } - return true; - }); - if (!attachmentError) { - Object.entries(document._attachments).forEach(([attachmentId, attachmentData]) => { - var previousAttachmentData = previous ? previous._attachments[attachmentId] : undefined; - if (!previousAttachmentData) { - attachmentsAdd.push({ - documentId: docId, - attachmentId, - attachmentData: attachmentData, - digest: attachmentData.digest - }); - } else { - var newDigest = updatedRow.document._attachments[attachmentId].digest; - if (attachmentData.data && - /** - * Performance shortcut, - * do not update the attachment data if it did not change. - */ - previousAttachmentData.digest !== newDigest) { - attachmentsUpdate.push({ - documentId: docId, - attachmentId, - attachmentData: attachmentData, - digest: attachmentData.digest - }); - } - } - }); - } - } - } - if (attachmentError) { - errors.push(attachmentError); - } else { - if (hasAttachments) { - bulkUpdateDocs.push(stripAttachmentsDataFromRow(updatedRow)); - if (onUpdate) { - onUpdate(document); - } - } else { - bulkUpdateDocs.push(updatedRow); - if (onUpdate) { - onUpdate(document); - } - } - newestRow = updatedRow; - } - var eventDocumentData = null; - var previousEventDocumentData = null; - var operation = null; - if (previousDeleted && !documentDeleted) { - operation = 'INSERT'; - eventDocumentData = hasAttachments ? stripAttachmentsDataFromDocument(document) : document; - } else if (previous && !previousDeleted && !documentDeleted) { - operation = 'UPDATE'; - eventDocumentData = hasAttachments ? stripAttachmentsDataFromDocument(document) : document; - previousEventDocumentData = previous; - } else if (documentDeleted) { - operation = 'DELETE'; - eventDocumentData = (0, _index.ensureNotFalsy)(document); - previousEventDocumentData = previous; - } else { - throw (0, _rxError.newRxError)('SNH', { - args: { - writeRow - } - }); - } - var _event = { - documentId: docId, - documentData: eventDocumentData, - previousDocumentData: previousEventDocumentData, - operation: operation - }; - eventBulkEvents.push(_event); - } - }; - for (var rowId = 0; rowId < rowAmount; rowId++) { - if (_loop()) continue; - } - return { - bulkInsertDocs, - bulkUpdateDocs, - newestRow, - errors, - eventBulk, - attachmentsAdd, - attachmentsRemove, - attachmentsUpdate - }; -} -function stripAttachmentsDataFromRow(writeRow) { - return { - previous: writeRow.previous, - document: stripAttachmentsDataFromDocument(writeRow.document) - }; -} -function getAttachmentSize(attachmentBase64String) { - return atob(attachmentBase64String).length; -} - -/** - * Used in custom RxStorage implementations. - */ -function attachmentWriteDataToNormalData(writeData) { - var data = writeData.data; - if (!data) { - return writeData; - } - var ret = { - length: getAttachmentSize(data), - digest: writeData.digest, - type: writeData.type - }; - return ret; -} -function stripAttachmentsDataFromDocument(doc) { - if (!doc._attachments || Object.keys(doc._attachments).length === 0) { - return doc; - } - var useDoc = (0, _index.flatClone)(doc); - useDoc._attachments = {}; - Object.entries(doc._attachments).forEach(([attachmentId, attachmentData]) => { - useDoc._attachments[attachmentId] = attachmentWriteDataToNormalData(attachmentData); - }); - return useDoc; -} - -/** - * Flat clone the document data - * and also the _meta field. - * Used many times when we want to change the meta - * during replication etc. - */ -function flatCloneDocWithMeta(doc) { - return Object.assign({}, doc, { - _meta: (0, _index.flatClone)(doc._meta) - }); -} -/** - * Wraps the normal storageInstance of a RxCollection - * to ensure that all access is properly using the hooks - * and other data transformations and also ensure that database.lockedRun() - * is used properly. - */ -function getWrappedStorageInstance(database, storageInstance, -/** - * The original RxJsonSchema - * before it was mutated by hooks. - */ -rxJsonSchema) { - _overwritable.overwritable.deepFreezeWhenDevMode(rxJsonSchema); - var ret = { - originalStorageInstance: storageInstance, - schema: storageInstance.schema, - internals: storageInstance.internals, - collectionName: storageInstance.collectionName, - databaseName: storageInstance.databaseName, - options: storageInstance.options, - bulkWrite(rows, context) { - var databaseToken = database.token; - var toStorageWriteRows = new Array(rows.length); - /** - * Use the same timestamp for all docs of this rows-set. - * This improves performance because calling Date.now() inside of the now() function - * is too costly. - */ - var time = (0, _index.now)(); - for (var index = 0; index < rows.length; index++) { - var writeRow = rows[index]; - var document = flatCloneDocWithMeta(writeRow.document); - document._meta.lwt = time; - - /** - * Yes we really want to set the revision here. - * If you make a plugin that relies on having its own revision - * stored into the storage, use this.originalStorageInstance.bulkWrite() instead. - */ - var previous = writeRow.previous; - document._rev = (0, _index.createRevision)(databaseToken, previous); - toStorageWriteRows[index] = { - document, - previous - }; - } - (0, _hooks.runPluginHooks)('preStorageWrite', { - storageInstance: this.originalStorageInstance, - rows: toStorageWriteRows - }); - return database.lockedRun(() => storageInstance.bulkWrite(toStorageWriteRows, context)) - /** - * The RxStorageInstance MUST NOT allow to insert already _deleted documents, - * without sending the previous document version. - * But for better developer experience, RxDB does allow to re-insert deleted documents. - * We do this by automatically fixing the conflict errors for that case - * by running another bulkWrite() and merging the results. - * @link https://github.com/pubkey/rxdb/pull/3839 - */.then(writeResult => { - var useWriteResult = { - error: [], - success: writeResult.success.slice(0) - }; - var reInsertErrors = writeResult.error.length === 0 ? [] : writeResult.error.filter(error => { - if (error.status === 409 && !error.writeRow.previous && !error.writeRow.document._deleted && (0, _index.ensureNotFalsy)(error.documentInDb)._deleted) { - return true; - } - useWriteResult.error.push(error); - return false; - }); - if (reInsertErrors.length > 0) { - var reInserts = reInsertErrors.map(error => { - return { - previous: error.documentInDb, - document: Object.assign({}, error.writeRow.document, { - _rev: (0, _index.createRevision)(database.token, error.documentInDb) - }) - }; - }); - return database.lockedRun(() => storageInstance.bulkWrite(reInserts, context)).then(subResult => { - (0, _index.appendToArray)(useWriteResult.error, subResult.error); - (0, _index.appendToArray)(useWriteResult.success, subResult.success); - return useWriteResult; - }); - } - return writeResult; - }); - }, - query(preparedQuery) { - return database.lockedRun(() => storageInstance.query(preparedQuery)); - }, - count(preparedQuery) { - return database.lockedRun(() => storageInstance.count(preparedQuery)); - }, - findDocumentsById(ids, deleted) { - return database.lockedRun(() => storageInstance.findDocumentsById(ids, deleted)); - }, - getAttachmentData(documentId, attachmentId, digest) { - return database.lockedRun(() => storageInstance.getAttachmentData(documentId, attachmentId, digest)); - }, - getChangedDocumentsSince: !storageInstance.getChangedDocumentsSince ? undefined : (limit, checkpoint) => { - return database.lockedRun(() => storageInstance.getChangedDocumentsSince((0, _index.ensureNotFalsy)(limit), checkpoint)); - }, - cleanup(minDeletedTime) { - return database.lockedRun(() => storageInstance.cleanup(minDeletedTime)); - }, - remove() { - database.storageInstances.delete(ret); - return database.lockedRun(() => storageInstance.remove()); - }, - close() { - database.storageInstances.delete(ret); - return database.lockedRun(() => storageInstance.close()); - }, - changeStream() { - return storageInstance.changeStream(); - }, - conflictResultionTasks() { - return storageInstance.conflictResultionTasks(); - }, - resolveConflictResultionTask(taskSolution) { - if (taskSolution.output.isEqual) { - return storageInstance.resolveConflictResultionTask(taskSolution); - } - var doc = Object.assign({}, taskSolution.output.documentData, { - _meta: (0, _index.getDefaultRxDocumentMeta)(), - _rev: (0, _index.getDefaultRevision)(), - _attachments: {} - }); - var documentData = (0, _index.flatClone)(doc); - delete documentData._meta; - delete documentData._rev; - delete documentData._attachments; - return storageInstance.resolveConflictResultionTask({ - id: taskSolution.id, - output: { - isEqual: false, - documentData - } - }); - } - }; - database.storageInstances.add(ret); - return ret; -} - -/** - * Each RxStorage implementation should - * run this method at the first step of createStorageInstance() - * to ensure that the configuration is correct. - */ -function ensureRxStorageInstanceParamsAreCorrect(params) { - if (params.schema.keyCompression) { - throw (0, _rxError.newRxError)('UT5', { - args: { - params - } - }); - } - if (hasEncryption(params.schema)) { - throw (0, _rxError.newRxError)('UT6', { - args: { - params - } - }); - } - if (params.schema.attachments && params.schema.attachments.compression) { - throw (0, _rxError.newRxError)('UT7', { - args: { - params - } - }); - } -} -function hasEncryption(jsonSchema) { - if (!!jsonSchema.encrypted && jsonSchema.encrypted.length > 0 || jsonSchema.attachments && jsonSchema.attachments.encrypted) { - return true; - } else { - return false; - } -} -function getChangedDocumentsSinceQuery(storageInstance, limit, checkpoint) { - var primaryPath = (0, _rxSchemaHelper.getPrimaryFieldOfPrimaryKey)(storageInstance.schema.primaryKey); - var sinceLwt = checkpoint ? checkpoint.lwt : _index.RX_META_LWT_MINIMUM; - var sinceId = checkpoint ? checkpoint.id : ''; - return (0, _rxQueryHelper.normalizeMangoQuery)(storageInstance.schema, { - selector: { - $or: [{ - '_meta.lwt': { - $gt: sinceLwt - } - }, { - '_meta.lwt': { - $eq: sinceLwt - }, - [primaryPath]: { - $gt: checkpoint ? sinceId : '' - } - }], - // add this hint for better index usage - '_meta.lwt': { - $gte: sinceLwt - } - }, - sort: [{ - '_meta.lwt': 'asc' - }, { - [primaryPath]: 'asc' - }], - skip: 0, - limit - /** - * DO NOT SET A SPECIFIC INDEX HERE! - * The query might be modified by some plugin - * before sending it to the storage. - * We can be sure that in the end the query planner - * will find the best index. - */ - // index: ['_meta.lwt', primaryPath] - }); -} -async function getChangedDocumentsSince(storageInstance, limit, checkpoint) { - if (storageInstance.getChangedDocumentsSince) { - return storageInstance.getChangedDocumentsSince(limit, checkpoint); - } - var primaryPath = (0, _rxSchemaHelper.getPrimaryFieldOfPrimaryKey)(storageInstance.schema.primaryKey); - var query = (0, _rxQuery.prepareQuery)(storageInstance.schema, getChangedDocumentsSinceQuery(storageInstance, limit, checkpoint)); - var result = await storageInstance.query(query); - var documents = result.documents; - var lastDoc = (0, _index.lastOfArray)(documents); - return { - documents: documents, - checkpoint: lastDoc ? { - id: lastDoc[primaryPath], - lwt: lastDoc._meta.lwt - } : checkpoint ? checkpoint : { - id: '', - lwt: 0 - } - }; -} - -/** - * Wraps the storage and simluates - * delays. Mostly used in tests. - */ -function randomDelayStorage(input) { - /** - * Ensure writes to a delay storage - * are still correctly run in order. - */ - var randomDelayStorageWriteQueue = _index.PROMISE_RESOLVE_TRUE; - var retStorage = { - name: 'random-delay-' + input.storage.name, - rxdbVersion: _index.RXDB_VERSION, - async createStorageInstance(params) { - await (0, _index.promiseWait)(input.delayTimeBefore()); - var storageInstance = await input.storage.createStorageInstance(params); - await (0, _index.promiseWait)(input.delayTimeAfter()); - return { - databaseName: storageInstance.databaseName, - internals: storageInstance.internals, - options: storageInstance.options, - schema: storageInstance.schema, - collectionName: storageInstance.collectionName, - bulkWrite(a, b) { - randomDelayStorageWriteQueue = randomDelayStorageWriteQueue.then(async () => { - await (0, _index.promiseWait)(input.delayTimeBefore()); - var response = await storageInstance.bulkWrite(a, b); - await (0, _index.promiseWait)(input.delayTimeAfter()); - return response; - }); - var ret = randomDelayStorageWriteQueue; - return ret; - }, - async findDocumentsById(a, b) { - await (0, _index.promiseWait)(input.delayTimeBefore()); - var ret = await storageInstance.findDocumentsById(a, b); - await (0, _index.promiseWait)(input.delayTimeAfter()); - return ret; - }, - async query(a) { - await (0, _index.promiseWait)(input.delayTimeBefore()); - var ret = await storageInstance.query(a); - return ret; - }, - async count(a) { - await (0, _index.promiseWait)(input.delayTimeBefore()); - var ret = await storageInstance.count(a); - await (0, _index.promiseWait)(input.delayTimeAfter()); - return ret; - }, - async getAttachmentData(a, b, c) { - await (0, _index.promiseWait)(input.delayTimeBefore()); - var ret = await storageInstance.getAttachmentData(a, b, c); - await (0, _index.promiseWait)(input.delayTimeAfter()); - return ret; - }, - getChangedDocumentsSince: !storageInstance.getChangedDocumentsSince ? undefined : async (a, b) => { - await (0, _index.promiseWait)(input.delayTimeBefore()); - var ret = await (0, _index.ensureNotFalsy)(storageInstance.getChangedDocumentsSince)(a, b); - await (0, _index.promiseWait)(input.delayTimeAfter()); - return ret; - }, - changeStream() { - return storageInstance.changeStream(); - }, - conflictResultionTasks() { - return storageInstance.conflictResultionTasks(); - }, - resolveConflictResultionTask(a) { - return storageInstance.resolveConflictResultionTask(a); - }, - async cleanup(a) { - await (0, _index.promiseWait)(input.delayTimeBefore()); - var ret = await storageInstance.cleanup(a); - await (0, _index.promiseWait)(input.delayTimeAfter()); - return ret; - }, - async close() { - await (0, _index.promiseWait)(input.delayTimeBefore()); - var ret = await storageInstance.close(); - await (0, _index.promiseWait)(input.delayTimeAfter()); - return ret; - }, - async remove() { - await (0, _index.promiseWait)(input.delayTimeBefore()); - var ret = await storageInstance.remove(); - await (0, _index.promiseWait)(input.delayTimeAfter()); - return ret; - } - }; - } - }; - return retStorage; -} -//# sourceMappingURL=rx-storage-helper.js.map \ No newline at end of file diff --git a/dist/cjs/rx-storage-helper.js.map b/dist/cjs/rx-storage-helper.js.map deleted file mode 100644 index 42e93d9c627..00000000000 --- a/dist/cjs/rx-storage-helper.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-storage-helper.js","names":["_overwritable","require","_rxError","_rxSchemaHelper","_index","_rxjs","_rxQuery","_rxQueryHelper","_hooks","INTERNAL_STORAGE_NAME","exports","RX_DATABASE_LOCAL_DOCS_STORAGE_NAME","getSingleDocument","storageInstance","documentId","results","findDocumentsById","doc","undefined","writeSingle","instance","writeRow","context","writeResult","bulkWrite","error","length","ret","success","observeSingle","firstFindPromise","changeStream","pipe","map","evBulk","events","find","ev","filter","Promise","resolve","ensureNotFalsy","documentData","startWith","switchMap","v","stackCheckpoints","checkpoints","Object","assign","throwIfIsStorageWriteError","collection","writeData","status","newRxError","name","id","writeError","data","categorizeBulkWriteRows","primaryPath","docsInDb","bulkWriteRows","onInsert","onUpdate","hasAttachments","schema","attachments","bulkInsertDocs","bulkUpdateDocs","errors","eventBulkId","randomCouchString","eventBulk","checkpoint","startTime","now","endTime","eventBulkEvents","attachmentsAdd","attachmentsRemove","attachmentsUpdate","hasDocsInDb","size","newestRow","rowAmount","_loop","rowId","document","previous","docId","documentDeleted","_deleted","previousDeleted","documentInDb","get","attachmentError","insertedIsDeleted","entries","_attachments","forEach","attachmentId","attachmentData","isError","push","digest","stripAttachmentsDataFromRow","event","operation","stripAttachmentsDataFromDocument","previousDocumentData","revInDb","_rev","err","updatedRow","keys","previousAttachmentData","newDigest","eventDocumentData","previousEventDocumentData","args","getAttachmentSize","attachmentBase64String","atob","attachmentWriteDataToNormalData","type","useDoc","flatClone","flatCloneDocWithMeta","_meta","getWrappedStorageInstance","database","rxJsonSchema","overwritable","deepFreezeWhenDevMode","originalStorageInstance","internals","collectionName","databaseName","options","rows","databaseToken","token","toStorageWriteRows","Array","time","index","lwt","createRevision","runPluginHooks","lockedRun","then","useWriteResult","slice","reInsertErrors","reInserts","subResult","appendToArray","query","preparedQuery","count","ids","deleted","getAttachmentData","getChangedDocumentsSince","limit","cleanup","minDeletedTime","remove","storageInstances","delete","close","conflictResultionTasks","resolveConflictResultionTask","taskSolution","output","isEqual","getDefaultRxDocumentMeta","getDefaultRevision","add","ensureRxStorageInstanceParamsAreCorrect","params","keyCompression","hasEncryption","compression","jsonSchema","encrypted","getChangedDocumentsSinceQuery","getPrimaryFieldOfPrimaryKey","primaryKey","sinceLwt","RX_META_LWT_MINIMUM","sinceId","normalizeMangoQuery","selector","$or","$gt","$eq","$gte","sort","skip","prepareQuery","result","documents","lastDoc","lastOfArray","randomDelayStorage","input","randomDelayStorageWriteQueue","PROMISE_RESOLVE_TRUE","retStorage","storage","rxdbVersion","RXDB_VERSION","createStorageInstance","promiseWait","delayTimeBefore","delayTimeAfter","a","b","response","c"],"sources":["../../src/rx-storage-helper.ts"],"sourcesContent":["/**\n * Helper functions for accessing the RxStorage instances.\n */\n\nimport { overwritable } from './overwritable.ts';\nimport { newRxError } from './rx-error.ts';\nimport {\n getPrimaryFieldOfPrimaryKey\n} from './rx-schema-helper.ts';\nimport type {\n BulkWriteRow,\n BulkWriteRowProcessed,\n CategorizeBulkWriteRowsOutput,\n EventBulk,\n RxAttachmentData,\n RxAttachmentWriteData,\n RxCollection,\n RxDatabase,\n RxDocumentData,\n RxDocumentWriteData,\n RxJsonSchema,\n RxStorageWriteError,\n RxStorageChangeEvent,\n RxStorageInstance,\n RxStorageInstanceCreationParams,\n StringKeys,\n RxStorageWriteErrorConflict,\n RxStorageWriteErrorAttachment,\n RxStorage,\n RxStorageDefaultCheckpoint,\n FilledMangoQuery\n} from './types/index.d.ts';\nimport {\n PROMISE_RESOLVE_TRUE,\n RXDB_VERSION,\n RX_META_LWT_MINIMUM,\n appendToArray,\n createRevision,\n ensureNotFalsy,\n flatClone,\n getDefaultRevision,\n getDefaultRxDocumentMeta,\n lastOfArray,\n now,\n promiseWait,\n randomCouchString\n} from './plugins/utils/index.ts';\nimport { Observable, filter, map, startWith, switchMap } from 'rxjs';\nimport { prepareQuery } from './rx-query.ts';\nimport { normalizeMangoQuery } from './rx-query-helper.ts';\nimport { runPluginHooks } from './hooks.ts';\n\nexport const INTERNAL_STORAGE_NAME = '_rxdb_internal';\nexport const RX_DATABASE_LOCAL_DOCS_STORAGE_NAME = 'rxdatabase_storage_local';\n\nexport async function getSingleDocument(\n storageInstance: RxStorageInstance,\n documentId: string\n): Promise | undefined> {\n const results = await storageInstance.findDocumentsById([documentId], false);\n const doc = results[0];\n if (doc) {\n return doc;\n } else {\n return undefined;\n }\n}\n\n/**\n * Writes a single document,\n * throws RxStorageBulkWriteError on failure\n */\nexport async function writeSingle(\n instance: RxStorageInstance,\n writeRow: BulkWriteRow,\n context: string\n): Promise> {\n const writeResult = await instance.bulkWrite(\n [writeRow],\n context\n );\n if (writeResult.error.length > 0) {\n const error = writeResult.error[0];\n throw error;\n } else {\n const ret = writeResult.success[0];\n return ret;\n }\n}\n\n/**\n * Observe the plain document data of a single document.\n * Do not forget to unsubscribe.\n */\nexport function observeSingle(\n storageInstance: RxStorageInstance,\n documentId: string\n): Observable> {\n const firstFindPromise = getSingleDocument(storageInstance, documentId);\n const ret = storageInstance\n .changeStream()\n .pipe(\n map(evBulk => evBulk.events.find(ev => ev.documentId === documentId)),\n filter(ev => !!ev),\n map(ev => Promise.resolve(ensureNotFalsy(ev).documentData)),\n startWith(firstFindPromise),\n switchMap(v => v),\n filter(v => !!v)\n ) as any;\n return ret;\n}\n\n/**\n * Checkpoints must be stackable over another.\n * This is required form some RxStorage implementations\n * like the sharding plugin, where a checkpoint only represents\n * the document state from some, but not all shards.\n */\nexport function stackCheckpoints(\n checkpoints: CheckpointType[]\n): CheckpointType {\n return Object.assign(\n {},\n ...checkpoints\n );\n}\n\nexport function throwIfIsStorageWriteError(\n collection: RxCollection,\n documentId: string,\n writeData: RxDocumentWriteData | RxDocType,\n error: RxStorageWriteError | undefined\n) {\n if (error) {\n if (error.status === 409) {\n throw newRxError('CONFLICT', {\n collection: collection.name,\n id: documentId,\n writeError: error,\n data: writeData\n });\n } else if (error.status === 422) {\n throw newRxError('VD2', {\n collection: collection.name,\n id: documentId,\n writeError: error,\n data: writeData\n });\n } else {\n throw error;\n }\n }\n}\n\n\n/**\n * Analyzes a list of BulkWriteRows and determines\n * which documents must be inserted, updated or deleted\n * and which events must be emitted and which documents cause a conflict\n * and must not be written.\n * Used as helper inside of some RxStorage implementations.\n * @hotPath The performance of this function is critical\n */\nexport function categorizeBulkWriteRows(\n storageInstance: RxStorageInstance,\n primaryPath: StringKeys,\n /**\n * Current state of the documents\n * inside of the storage. Used to determine\n * which writes cause conflicts.\n * This must be a Map for better performance.\n */\n docsInDb: Map[StringKeys] | string, RxDocumentData>,\n /**\n * The write rows that are passed to\n * RxStorageInstance().bulkWrite().\n */\n bulkWriteRows: BulkWriteRow[],\n context: string,\n /**\n * Used by some storages for better performance.\n * For example when get-by-id and insert/update can run in parallel.\n */\n onInsert?: (docData: RxDocumentData) => void,\n onUpdate?: (docData: RxDocumentData) => void\n): CategorizeBulkWriteRowsOutput {\n const hasAttachments = !!storageInstance.schema.attachments;\n const bulkInsertDocs: BulkWriteRowProcessed[] = [];\n const bulkUpdateDocs: BulkWriteRowProcessed[] = [];\n const errors: RxStorageWriteError[] = [];\n const eventBulkId = randomCouchString(10);\n const eventBulk: EventBulk>, any> = {\n id: eventBulkId,\n events: [],\n checkpoint: null,\n context,\n startTime: now(),\n endTime: 0\n };\n const eventBulkEvents = eventBulk.events;\n\n const attachmentsAdd: {\n documentId: string;\n attachmentId: string;\n attachmentData: RxAttachmentWriteData;\n digest: string;\n }[] = [];\n const attachmentsRemove: {\n documentId: string;\n attachmentId: string;\n digest: string;\n }[] = [];\n const attachmentsUpdate: {\n documentId: string;\n attachmentId: string;\n attachmentData: RxAttachmentWriteData;\n digest: string;\n }[] = [];\n\n const hasDocsInDb = docsInDb.size > 0;\n let newestRow: BulkWriteRowProcessed | undefined;\n\n /**\n * @performance is really important in this loop!\n */\n const rowAmount = bulkWriteRows.length;\n for (let rowId = 0; rowId < rowAmount; rowId++) {\n const writeRow = bulkWriteRows[rowId];\n\n // use these variables to have less property accesses\n const document = writeRow.document;\n const previous = writeRow.previous;\n const docId = document[primaryPath] as string;\n const documentDeleted = document._deleted;\n const previousDeleted = previous && previous._deleted;\n\n let documentInDb: RxDocumentData | undefined = undefined as any;\n if (hasDocsInDb) {\n documentInDb = docsInDb.get(docId);\n }\n let attachmentError: RxStorageWriteErrorAttachment | undefined;\n\n if (!documentInDb) {\n /**\n * It is possible to insert already deleted documents,\n * this can happen on replication.\n */\n const insertedIsDeleted = documentDeleted ? true : false;\n if (hasAttachments) {\n Object\n .entries(document._attachments)\n .forEach(([attachmentId, attachmentData]) => {\n if (\n !(attachmentData as RxAttachmentWriteData).data\n ) {\n attachmentError = {\n documentId: docId,\n isError: true,\n status: 510,\n writeRow,\n attachmentId\n };\n errors.push(attachmentError);\n } else {\n attachmentsAdd.push({\n documentId: docId,\n attachmentId,\n attachmentData: attachmentData as any,\n digest: attachmentData.digest\n });\n }\n });\n }\n if (!attachmentError) {\n if (hasAttachments) {\n bulkInsertDocs.push(stripAttachmentsDataFromRow(writeRow));\n if (onInsert) {\n onInsert(document);\n }\n } else {\n bulkInsertDocs.push(writeRow as any);\n if (onInsert) {\n onInsert(document);\n }\n }\n\n newestRow = writeRow as any;\n }\n\n if (!insertedIsDeleted) {\n const event = {\n documentId: docId,\n operation: 'INSERT' as const,\n documentData: hasAttachments ? stripAttachmentsDataFromDocument(document) : document as any,\n previousDocumentData: hasAttachments && previous ? stripAttachmentsDataFromDocument(previous) : previous as any\n };\n eventBulkEvents.push(event);\n }\n } else {\n // update existing document\n const revInDb: string = documentInDb._rev;\n\n /**\n * Check for conflict\n */\n if (\n (\n !previous\n ) ||\n (\n !!previous &&\n revInDb !== previous._rev\n )\n ) {\n // is conflict error\n const err: RxStorageWriteError = {\n isError: true,\n status: 409,\n documentId: docId,\n writeRow: writeRow,\n documentInDb\n };\n errors.push(err);\n continue;\n }\n\n // handle attachments data\n\n const updatedRow: BulkWriteRowProcessed = hasAttachments ? stripAttachmentsDataFromRow(writeRow) : writeRow as any;\n if (hasAttachments) {\n if (documentDeleted) {\n /**\n * Deleted documents must have cleared all their attachments.\n */\n if (previous) {\n Object\n .keys(previous._attachments)\n .forEach(attachmentId => {\n attachmentsRemove.push({\n documentId: docId,\n attachmentId,\n digest: ensureNotFalsy(previous)._attachments[attachmentId].digest\n });\n });\n }\n } else {\n // first check for errors\n Object\n .entries(document._attachments)\n .find(([attachmentId, attachmentData]) => {\n const previousAttachmentData = previous ? previous._attachments[attachmentId] : undefined;\n if (\n !previousAttachmentData &&\n !(attachmentData as RxAttachmentWriteData).data\n ) {\n attachmentError = {\n documentId: docId,\n documentInDb: documentInDb as any,\n isError: true,\n status: 510,\n writeRow,\n attachmentId\n };\n }\n return true;\n });\n if (!attachmentError) {\n Object\n .entries(document._attachments)\n .forEach(([attachmentId, attachmentData]) => {\n const previousAttachmentData = previous ? previous._attachments[attachmentId] : undefined;\n if (!previousAttachmentData) {\n attachmentsAdd.push({\n documentId: docId,\n attachmentId,\n attachmentData: attachmentData as any,\n digest: attachmentData.digest\n });\n } else {\n const newDigest = updatedRow.document._attachments[attachmentId].digest;\n if (\n (attachmentData as RxAttachmentWriteData).data &&\n /**\n * Performance shortcut,\n * do not update the attachment data if it did not change.\n */\n previousAttachmentData.digest !== newDigest\n ) {\n attachmentsUpdate.push({\n documentId: docId,\n attachmentId,\n attachmentData: attachmentData as RxAttachmentWriteData,\n digest: attachmentData.digest\n });\n }\n }\n });\n }\n }\n }\n\n if (attachmentError) {\n errors.push(attachmentError);\n } else {\n if (hasAttachments) {\n bulkUpdateDocs.push(stripAttachmentsDataFromRow(updatedRow));\n if (onUpdate) {\n onUpdate(document);\n }\n } else {\n bulkUpdateDocs.push(updatedRow);\n if (onUpdate) {\n onUpdate(document);\n }\n }\n newestRow = updatedRow as any;\n }\n\n let eventDocumentData: RxDocumentData | undefined = null as any;\n let previousEventDocumentData: RxDocumentData | undefined = null as any;\n let operation: 'INSERT' | 'UPDATE' | 'DELETE' = null as any;\n\n if (previousDeleted && !documentDeleted) {\n operation = 'INSERT';\n eventDocumentData = hasAttachments ? stripAttachmentsDataFromDocument(document) : document as any;\n } else if (previous && !previousDeleted && !documentDeleted) {\n operation = 'UPDATE';\n eventDocumentData = hasAttachments ? stripAttachmentsDataFromDocument(document) : document as any;\n previousEventDocumentData = previous;\n } else if (documentDeleted) {\n operation = 'DELETE';\n eventDocumentData = ensureNotFalsy(document) as any;\n previousEventDocumentData = previous;\n } else {\n throw newRxError('SNH', { args: { writeRow } });\n }\n\n const event = {\n documentId: docId,\n documentData: eventDocumentData as RxDocumentData,\n previousDocumentData: previousEventDocumentData,\n operation: operation\n };\n eventBulkEvents.push(event);\n }\n }\n\n return {\n bulkInsertDocs,\n bulkUpdateDocs,\n newestRow,\n errors,\n eventBulk,\n attachmentsAdd,\n attachmentsRemove,\n attachmentsUpdate\n };\n}\n\nexport function stripAttachmentsDataFromRow(writeRow: BulkWriteRow): BulkWriteRowProcessed {\n return {\n previous: writeRow.previous,\n document: stripAttachmentsDataFromDocument(writeRow.document)\n };\n}\n\nexport function getAttachmentSize(\n attachmentBase64String: string\n): number {\n return atob(attachmentBase64String).length;\n}\n\n/**\n * Used in custom RxStorage implementations.\n */\nexport function attachmentWriteDataToNormalData(writeData: RxAttachmentData | RxAttachmentWriteData): RxAttachmentData {\n const data = (writeData as RxAttachmentWriteData).data;\n if (!data) {\n return writeData as any;\n }\n const ret: RxAttachmentData = {\n length: getAttachmentSize(data),\n digest: writeData.digest,\n type: writeData.type\n };\n return ret;\n}\n\nexport function stripAttachmentsDataFromDocument(doc: RxDocumentWriteData): RxDocumentData {\n if (!doc._attachments || Object.keys(doc._attachments).length === 0) {\n return doc;\n }\n\n const useDoc: RxDocumentData = flatClone(doc) as any;\n useDoc._attachments = {};\n Object\n .entries(doc._attachments)\n .forEach(([attachmentId, attachmentData]) => {\n useDoc._attachments[attachmentId] = attachmentWriteDataToNormalData(attachmentData);\n });\n return useDoc;\n}\n\n/**\n * Flat clone the document data\n * and also the _meta field.\n * Used many times when we want to change the meta\n * during replication etc.\n */\nexport function flatCloneDocWithMeta(\n doc: RxDocumentData\n): RxDocumentData {\n return Object.assign(\n {},\n doc,\n {\n _meta: flatClone(doc._meta)\n }\n );\n}\n\nexport type WrappedRxStorageInstance = RxStorageInstance & {\n originalStorageInstance: RxStorageInstance;\n};\n\n/**\n * Wraps the normal storageInstance of a RxCollection\n * to ensure that all access is properly using the hooks\n * and other data transformations and also ensure that database.lockedRun()\n * is used properly.\n */\nexport function getWrappedStorageInstance<\n RxDocType,\n Internals,\n InstanceCreationOptions,\n CheckpointType\n>(\n database: RxDatabase<{}, Internals, InstanceCreationOptions, any>,\n storageInstance: RxStorageInstance,\n /**\n * The original RxJsonSchema\n * before it was mutated by hooks.\n */\n rxJsonSchema: RxJsonSchema>\n): WrappedRxStorageInstance {\n overwritable.deepFreezeWhenDevMode(rxJsonSchema);\n\n const ret: WrappedRxStorageInstance = {\n originalStorageInstance: storageInstance,\n schema: storageInstance.schema,\n internals: storageInstance.internals,\n collectionName: storageInstance.collectionName,\n databaseName: storageInstance.databaseName,\n options: storageInstance.options,\n bulkWrite(\n rows: BulkWriteRow[],\n context: string\n ) {\n const databaseToken = database.token;\n const toStorageWriteRows: BulkWriteRow[] = new Array(rows.length);\n /**\n * Use the same timestamp for all docs of this rows-set.\n * This improves performance because calling Date.now() inside of the now() function\n * is too costly.\n */\n const time = now();\n for (let index = 0; index < rows.length; index++) {\n const writeRow = rows[index];\n const document = flatCloneDocWithMeta(writeRow.document);\n document._meta.lwt = time;\n\n /**\n * Yes we really want to set the revision here.\n * If you make a plugin that relies on having its own revision\n * stored into the storage, use this.originalStorageInstance.bulkWrite() instead.\n */\n const previous = writeRow.previous;\n document._rev = createRevision(\n databaseToken,\n previous\n );\n toStorageWriteRows[index] = {\n document,\n previous\n };\n }\n\n runPluginHooks('preStorageWrite', {\n storageInstance: this.originalStorageInstance,\n rows: toStorageWriteRows\n });\n\n return database.lockedRun(\n () => storageInstance.bulkWrite(\n toStorageWriteRows,\n context\n )\n )\n /**\n * The RxStorageInstance MUST NOT allow to insert already _deleted documents,\n * without sending the previous document version.\n * But for better developer experience, RxDB does allow to re-insert deleted documents.\n * We do this by automatically fixing the conflict errors for that case\n * by running another bulkWrite() and merging the results.\n * @link https://github.com/pubkey/rxdb/pull/3839\n */\n .then(writeResult => {\n const useWriteResult: typeof writeResult = {\n error: [],\n success: writeResult.success.slice(0)\n };\n const reInsertErrors: RxStorageWriteErrorConflict[] = writeResult.error.length === 0\n ? []\n : writeResult.error\n .filter((error) => {\n if (\n error.status === 409 &&\n !error.writeRow.previous &&\n !error.writeRow.document._deleted &&\n ensureNotFalsy(error.documentInDb)._deleted\n ) {\n return true;\n }\n useWriteResult.error.push(error);\n return false;\n }) as any;\n if (reInsertErrors.length > 0) {\n const reInserts: BulkWriteRow[] = reInsertErrors\n .map((error) => {\n return {\n previous: error.documentInDb,\n document: Object.assign(\n {},\n error.writeRow.document,\n {\n _rev: createRevision(\n database.token,\n error.documentInDb\n )\n }\n )\n };\n });\n\n return database.lockedRun(\n () => storageInstance.bulkWrite(\n reInserts,\n context\n )\n ).then(subResult => {\n appendToArray(useWriteResult.error, subResult.error);\n appendToArray(useWriteResult.success, subResult.success);\n return useWriteResult;\n });\n }\n return writeResult;\n });\n },\n query(preparedQuery) {\n return database.lockedRun(\n () => storageInstance.query(preparedQuery)\n );\n },\n count(preparedQuery) {\n return database.lockedRun(\n () => storageInstance.count(preparedQuery)\n );\n },\n findDocumentsById(ids, deleted) {\n return database.lockedRun(\n () => storageInstance.findDocumentsById(ids, deleted)\n );\n },\n getAttachmentData(\n documentId: string,\n attachmentId: string,\n digest: string\n ) {\n return database.lockedRun(\n () => storageInstance.getAttachmentData(documentId, attachmentId, digest)\n );\n },\n getChangedDocumentsSince: !storageInstance.getChangedDocumentsSince ? undefined : (limit: number, checkpoint?: any) => {\n return database.lockedRun(\n () => ((storageInstance as any).getChangedDocumentsSince)(ensureNotFalsy(limit), checkpoint)\n );\n },\n cleanup(minDeletedTime: number) {\n return database.lockedRun(\n () => storageInstance.cleanup(minDeletedTime)\n );\n },\n remove() {\n database.storageInstances.delete(ret);\n return database.lockedRun(\n () => storageInstance.remove()\n );\n },\n close() {\n database.storageInstances.delete(ret);\n return database.lockedRun(\n () => storageInstance.close()\n );\n },\n changeStream() {\n return storageInstance.changeStream();\n },\n conflictResultionTasks() {\n return storageInstance.conflictResultionTasks();\n },\n resolveConflictResultionTask(taskSolution) {\n if (taskSolution.output.isEqual) {\n return storageInstance.resolveConflictResultionTask(taskSolution);\n }\n\n const doc = Object.assign(\n {},\n taskSolution.output.documentData,\n {\n _meta: getDefaultRxDocumentMeta(),\n _rev: getDefaultRevision(),\n _attachments: {}\n }\n );\n\n const documentData = flatClone(doc);\n delete (documentData as any)._meta;\n delete (documentData as any)._rev;\n delete (documentData as any)._attachments;\n\n return storageInstance.resolveConflictResultionTask({\n id: taskSolution.id,\n output: {\n isEqual: false,\n documentData\n }\n });\n }\n };\n\n database.storageInstances.add(ret);\n return ret;\n}\n\n/**\n * Each RxStorage implementation should\n * run this method at the first step of createStorageInstance()\n * to ensure that the configuration is correct.\n */\nexport function ensureRxStorageInstanceParamsAreCorrect(\n params: RxStorageInstanceCreationParams\n) {\n if (params.schema.keyCompression) {\n throw newRxError('UT5', { args: { params } });\n }\n if (hasEncryption(params.schema)) {\n throw newRxError('UT6', { args: { params } });\n }\n if (\n params.schema.attachments &&\n params.schema.attachments.compression\n ) {\n throw newRxError('UT7', { args: { params } });\n }\n}\n\nexport function hasEncryption(jsonSchema: RxJsonSchema): boolean {\n if (\n (!!jsonSchema.encrypted && jsonSchema.encrypted.length > 0) ||\n (jsonSchema.attachments && jsonSchema.attachments.encrypted)\n ) {\n return true;\n } else {\n return false;\n }\n}\n\nexport function getChangedDocumentsSinceQuery(\n storageInstance: RxStorageInstance,\n limit: number,\n checkpoint?: CheckpointType\n): FilledMangoQuery {\n const primaryPath = getPrimaryFieldOfPrimaryKey(storageInstance.schema.primaryKey);\n const sinceLwt = checkpoint ? (checkpoint as unknown as RxStorageDefaultCheckpoint).lwt : RX_META_LWT_MINIMUM;\n const sinceId = checkpoint ? (checkpoint as unknown as RxStorageDefaultCheckpoint).id : '';\n return normalizeMangoQuery(storageInstance.schema, {\n selector: {\n $or: [\n {\n '_meta.lwt': {\n $gt: sinceLwt\n }\n },\n {\n '_meta.lwt': {\n $eq: sinceLwt\n },\n [primaryPath]: {\n $gt: checkpoint ? sinceId : ''\n }\n }\n ],\n // add this hint for better index usage\n '_meta.lwt': {\n $gte: sinceLwt\n }\n } as any,\n sort: [\n { '_meta.lwt': 'asc' },\n { [primaryPath]: 'asc' }\n ] as any,\n skip: 0,\n limit,\n /**\n * DO NOT SET A SPECIFIC INDEX HERE!\n * The query might be modified by some plugin\n * before sending it to the storage.\n * We can be sure that in the end the query planner\n * will find the best index.\n */\n // index: ['_meta.lwt', primaryPath]\n });\n}\n\nexport async function getChangedDocumentsSince(\n storageInstance: RxStorageInstance,\n limit: number,\n checkpoint?: CheckpointType\n): Promise<{\n documents: RxDocumentData[];\n /**\n * The checkpoint contains data so that another\n * call to getChangedDocumentsSince() will continue\n * from exactly the last document that was returned before.\n */\n checkpoint: CheckpointType;\n}> {\n if (storageInstance.getChangedDocumentsSince) {\n return storageInstance.getChangedDocumentsSince(limit, checkpoint);\n }\n\n const primaryPath = getPrimaryFieldOfPrimaryKey(storageInstance.schema.primaryKey);\n const query = prepareQuery>(\n storageInstance.schema,\n getChangedDocumentsSinceQuery(\n storageInstance,\n limit,\n checkpoint\n )\n );\n\n const result = await storageInstance.query(query);\n const documents = result.documents;\n const lastDoc = lastOfArray(documents);\n\n return {\n documents: documents,\n checkpoint: lastDoc ? {\n id: (lastDoc as any)[primaryPath],\n lwt: lastDoc._meta.lwt\n } as any : checkpoint ? checkpoint : {\n id: '',\n lwt: 0\n }\n };\n}\n\n\n/**\n * Wraps the storage and simluates\n * delays. Mostly used in tests.\n */\nexport function randomDelayStorage(\n input: {\n storage: RxStorage;\n delayTimeBefore: () => number;\n delayTimeAfter: () => number;\n }\n): RxStorage {\n /**\n * Ensure writes to a delay storage\n * are still correctly run in order.\n */\n let randomDelayStorageWriteQueue: Promise = PROMISE_RESOLVE_TRUE;\n\n const retStorage: RxStorage = {\n name: 'random-delay-' + input.storage.name,\n rxdbVersion: RXDB_VERSION,\n async createStorageInstance(params) {\n await promiseWait(input.delayTimeBefore());\n const storageInstance = await input.storage.createStorageInstance(params);\n await promiseWait(input.delayTimeAfter());\n\n return {\n databaseName: storageInstance.databaseName,\n internals: storageInstance.internals,\n options: storageInstance.options,\n schema: storageInstance.schema,\n collectionName: storageInstance.collectionName,\n bulkWrite(a, b) {\n randomDelayStorageWriteQueue = randomDelayStorageWriteQueue.then(async () => {\n await promiseWait(input.delayTimeBefore());\n const response = await storageInstance.bulkWrite(a, b);\n await promiseWait(input.delayTimeAfter());\n return response;\n });\n const ret = randomDelayStorageWriteQueue;\n return ret;\n },\n async findDocumentsById(a, b) {\n await promiseWait(input.delayTimeBefore());\n const ret = await storageInstance.findDocumentsById(a, b);\n await promiseWait(input.delayTimeAfter());\n return ret;\n },\n async query(a) {\n await promiseWait(input.delayTimeBefore());\n const ret = await storageInstance.query(a);\n return ret;\n },\n async count(a) {\n await promiseWait(input.delayTimeBefore());\n const ret = await storageInstance.count(a);\n await promiseWait(input.delayTimeAfter());\n return ret;\n\n },\n async getAttachmentData(a, b, c) {\n await promiseWait(input.delayTimeBefore());\n const ret = await storageInstance.getAttachmentData(a, b, c);\n await promiseWait(input.delayTimeAfter());\n return ret;\n\n },\n getChangedDocumentsSince: !storageInstance.getChangedDocumentsSince ? undefined : async (a, b) => {\n await promiseWait(input.delayTimeBefore());\n const ret = await ensureNotFalsy(storageInstance.getChangedDocumentsSince)(a, b);\n await promiseWait(input.delayTimeAfter());\n return ret;\n\n },\n changeStream() {\n return storageInstance.changeStream();\n },\n conflictResultionTasks() {\n return storageInstance.conflictResultionTasks();\n },\n resolveConflictResultionTask(a) {\n return storageInstance.resolveConflictResultionTask(a);\n },\n async cleanup(a) {\n await promiseWait(input.delayTimeBefore());\n const ret = await storageInstance.cleanup(a);\n await promiseWait(input.delayTimeAfter());\n return ret;\n\n },\n async close() {\n await promiseWait(input.delayTimeBefore());\n const ret = await storageInstance.close();\n await promiseWait(input.delayTimeAfter());\n return ret;\n\n },\n async remove() {\n await promiseWait(input.delayTimeBefore());\n const ret = await storageInstance.remove();\n await promiseWait(input.delayTimeAfter());\n return ret;\n },\n };\n\n\n }\n };\n return retStorage;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;AAIA,IAAAA,aAAA,GAAAC,OAAA;AACA,IAAAC,QAAA,GAAAD,OAAA;AACA,IAAAE,eAAA,GAAAF,OAAA;AA0BA,IAAAG,MAAA,GAAAH,OAAA;AAeA,IAAAI,KAAA,GAAAJ,OAAA;AACA,IAAAK,QAAA,GAAAL,OAAA;AACA,IAAAM,cAAA,GAAAN,OAAA;AACA,IAAAO,MAAA,GAAAP,OAAA;AAlDA;AACA;AACA;;AAkDO,IAAMQ,qBAAqB,GAAAC,OAAA,CAAAD,qBAAA,GAAG,gBAAgB;AAC9C,IAAME,mCAAmC,GAAAD,OAAA,CAAAC,mCAAA,GAAG,0BAA0B;AAEtE,eAAeC,iBAAiBA,CACnCC,eAAuD,EACvDC,UAAkB,EAC4B;EAC9C,IAAMC,OAAO,GAAG,MAAMF,eAAe,CAACG,iBAAiB,CAAC,CAACF,UAAU,CAAC,EAAE,KAAK,CAAC;EAC5E,IAAMG,GAAG,GAAGF,OAAO,CAAC,CAAC,CAAC;EACtB,IAAIE,GAAG,EAAE;IACL,OAAOA,GAAG;EACd,CAAC,MAAM;IACH,OAAOC,SAAS;EACpB;AACJ;;AAEA;AACA;AACA;AACA;AACO,eAAeC,WAAWA,CAC7BC,QAAgD,EAChDC,QAAiC,EACjCC,OAAe,EACmB;EAClC,IAAMC,WAAW,GAAG,MAAMH,QAAQ,CAACI,SAAS,CACxC,CAACH,QAAQ,CAAC,EACVC,OACJ,CAAC;EACD,IAAIC,WAAW,CAACE,KAAK,CAACC,MAAM,GAAG,CAAC,EAAE;IAC9B,IAAMD,KAAK,GAAGF,WAAW,CAACE,KAAK,CAAC,CAAC,CAAC;IAClC,MAAMA,KAAK;EACf,CAAC,MAAM;IACH,IAAME,GAAG,GAAGJ,WAAW,CAACK,OAAO,CAAC,CAAC,CAAC;IAClC,OAAOD,GAAG;EACd;AACJ;;AAEA;AACA;AACA;AACA;AACO,SAASE,aAAaA,CACzBhB,eAAuD,EACvDC,UAAkB,EACmB;EACrC,IAAMgB,gBAAgB,GAAGlB,iBAAiB,CAACC,eAAe,EAAEC,UAAU,CAAC;EACvE,IAAMa,GAAG,GAAGd,eAAe,CACtBkB,YAAY,CAAC,CAAC,CACdC,IAAI,CACD,IAAAC,SAAG,EAACC,MAAM,IAAIA,MAAM,CAACC,MAAM,CAACC,IAAI,CAACC,EAAE,IAAIA,EAAE,CAACvB,UAAU,KAAKA,UAAU,CAAC,CAAC,EACrE,IAAAwB,YAAM,EAACD,EAAE,IAAI,CAAC,CAACA,EAAE,CAAC,EAClB,IAAAJ,SAAG,EAACI,EAAE,IAAIE,OAAO,CAACC,OAAO,CAAC,IAAAC,qBAAc,EAACJ,EAAE,CAAC,CAACK,YAAY,CAAC,CAAC,EAC3D,IAAAC,eAAS,EAACb,gBAAgB,CAAC,EAC3B,IAAAc,eAAS,EAACC,CAAC,IAAIA,CAAC,CAAC,EACjB,IAAAP,YAAM,EAACO,CAAC,IAAI,CAAC,CAACA,CAAC,CACnB,CAAQ;EACZ,OAAOlB,GAAG;AACd;;AAEA;AACA;AACA;AACA;AACA;AACA;AACO,SAASmB,gBAAgBA,CAC5BC,WAA6B,EACf;EACd,OAAOC,MAAM,CAACC,MAAM,CAChB,CAAC,CAAC,EACF,GAAGF,WACP,CAAC;AACL;AAEO,SAASG,0BAA0BA,CACtCC,UAA6C,EAC7CrC,UAAkB,EAClBsC,SAAqD,EACrD3B,KAAiD,EACnD;EACE,IAAIA,KAAK,EAAE;IACP,IAAIA,KAAK,CAAC4B,MAAM,KAAK,GAAG,EAAE;MACtB,MAAM,IAAAC,mBAAU,EAAC,UAAU,EAAE;QACzBH,UAAU,EAAEA,UAAU,CAACI,IAAI;QAC3BC,EAAE,EAAE1C,UAAU;QACd2C,UAAU,EAAEhC,KAAK;QACjBiC,IAAI,EAAEN;MACV,CAAC,CAAC;IACN,CAAC,MAAM,IAAI3B,KAAK,CAAC4B,MAAM,KAAK,GAAG,EAAE;MAC7B,MAAM,IAAAC,mBAAU,EAAC,KAAK,EAAE;QACpBH,UAAU,EAAEA,UAAU,CAACI,IAAI;QAC3BC,EAAE,EAAE1C,UAAU;QACd2C,UAAU,EAAEhC,KAAK;QACjBiC,IAAI,EAAEN;MACV,CAAC,CAAC;IACN,CAAC,MAAM;MACH,MAAM3B,KAAK;IACf;EACJ;AACJ;;AAGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACO,SAASkC,uBAAuBA,CACnC9C,eAAiD,EACjD+C,WAAkC;AAClC;AACJ;AACA;AACA;AACA;AACA;AACIC,QAAmG;AACnG;AACJ;AACA;AACA;AACIC,aAAwC,EACxCxC,OAAe;AACf;AACJ;AACA;AACA;AACIyC,QAAuD,EACvDC,QAAuD,EACf;EACxC,IAAMC,cAAc,GAAG,CAAC,CAACpD,eAAe,CAACqD,MAAM,CAACC,WAAW;EAC3D,IAAMC,cAAkD,GAAG,EAAE;EAC7D,IAAMC,cAAkD,GAAG,EAAE;EAC7D,IAAMC,MAAwC,GAAG,EAAE;EACnD,IAAMC,WAAW,GAAG,IAAAC,wBAAiB,EAAC,EAAE,CAAC;EACzC,IAAMC,SAA0E,GAAG;IAC/EjB,EAAE,EAAEe,WAAW;IACfpC,MAAM,EAAE,EAAE;IACVuC,UAAU,EAAE,IAAI;IAChBpD,OAAO;IACPqD,SAAS,EAAE,IAAAC,UAAG,EAAC,CAAC;IAChBC,OAAO,EAAE;EACb,CAAC;EACD,IAAMC,eAAe,GAAGL,SAAS,CAACtC,MAAM;EAExC,IAAM4C,cAKH,GAAG,EAAE;EACR,IAAMC,iBAIH,GAAG,EAAE;EACR,IAAMC,iBAKH,GAAG,EAAE;EAER,IAAMC,WAAW,GAAGrB,QAAQ,CAACsB,IAAI,GAAG,CAAC;EACrC,IAAIC,SAAuD;;EAE3D;AACJ;AACA;EACI,IAAMC,SAAS,GAAGvB,aAAa,CAACpC,MAAM;EAAC,IAAA4D,KAAA,YAAAA,CAAA,EACS;IAC5C,IAAMjE,QAAQ,GAAGyC,aAAa,CAACyB,KAAK,CAAC;;IAErC;IACA,IAAMC,QAAQ,GAAGnE,QAAQ,CAACmE,QAAQ;IAClC,IAAMC,QAAQ,GAAGpE,QAAQ,CAACoE,QAAQ;IAClC,IAAMC,KAAK,GAAGF,QAAQ,CAAC5B,WAAW,CAAW;IAC7C,IAAM+B,eAAe,GAAGH,QAAQ,CAACI,QAAQ;IACzC,IAAMC,eAAe,GAAGJ,QAAQ,IAAIA,QAAQ,CAACG,QAAQ;IAErD,IAAIE,YAAmD,GAAG5E,SAAgB;IAC1E,IAAIgE,WAAW,EAAE;MACbY,YAAY,GAAGjC,QAAQ,CAACkC,GAAG,CAACL,KAAK,CAAC;IACtC;IACA,IAAIM,eAAqE;IAEzE,IAAI,CAACF,YAAY,EAAE;MACf;AACZ;AACA;AACA;MACY,IAAMG,iBAAiB,GAAGN,eAAe,GAAG,IAAI,GAAG,KAAK;MACxD,IAAI1B,cAAc,EAAE;QAChBjB,MAAM,CACDkD,OAAO,CAACV,QAAQ,CAACW,YAAY,CAAC,CAC9BC,OAAO,CAAC,CAAC,CAACC,YAAY,EAAEC,cAAc,CAAC,KAAK;UACzC,IACI,CAAEA,cAAc,CAA2B5C,IAAI,EACjD;YACEsC,eAAe,GAAG;cACdlF,UAAU,EAAE4E,KAAK;cACjBa,OAAO,EAAE,IAAI;cACblD,MAAM,EAAE,GAAG;cACXhC,QAAQ;cACRgF;YACJ,CAAC;YACD/B,MAAM,CAACkC,IAAI,CAACR,eAAe,CAAC;UAChC,CAAC,MAAM;YACHjB,cAAc,CAACyB,IAAI,CAAC;cAChB1F,UAAU,EAAE4E,KAAK;cACjBW,YAAY;cACZC,cAAc,EAAEA,cAAqB;cACrCG,MAAM,EAAEH,cAAc,CAACG;YAC3B,CAAC,CAAC;UACN;QACJ,CAAC,CAAC;MACV;MACA,IAAI,CAACT,eAAe,EAAE;QAClB,IAAI/B,cAAc,EAAE;UAChBG,cAAc,CAACoC,IAAI,CAACE,2BAA2B,CAACrF,QAAQ,CAAC,CAAC;UAC1D,IAAI0C,QAAQ,EAAE;YACVA,QAAQ,CAACyB,QAAQ,CAAC;UACtB;QACJ,CAAC,MAAM;UACHpB,cAAc,CAACoC,IAAI,CAACnF,QAAe,CAAC;UACpC,IAAI0C,QAAQ,EAAE;YACVA,QAAQ,CAACyB,QAAQ,CAAC;UACtB;QACJ;QAEAJ,SAAS,GAAG/D,QAAe;MAC/B;MAEA,IAAI,CAAC4E,iBAAiB,EAAE;QACpB,IAAMU,KAAK,GAAG;UACV7F,UAAU,EAAE4E,KAAK;UACjBkB,SAAS,EAAE,QAAiB;UAC5BlE,YAAY,EAAEuB,cAAc,GAAG4C,gCAAgC,CAACrB,QAAQ,CAAC,GAAGA,QAAe;UAC3FsB,oBAAoB,EAAE7C,cAAc,IAAIwB,QAAQ,GAAGoB,gCAAgC,CAACpB,QAAQ,CAAC,GAAGA;QACpG,CAAC;QACDX,eAAe,CAAC0B,IAAI,CAACG,KAAK,CAAC;MAC/B;IACJ,CAAC,MAAM;MACH;MACA,IAAMI,OAAe,GAAGjB,YAAY,CAACkB,IAAI;;MAEzC;AACZ;AACA;MACY,IAEQ,CAACvB,QAAQ,IAGT,CAAC,CAACA,QAAQ,IACVsB,OAAO,KAAKtB,QAAQ,CAACuB,IACxB,EACH;QACE;QACA,IAAMC,GAAmC,GAAG;UACxCV,OAAO,EAAE,IAAI;UACblD,MAAM,EAAE,GAAG;UACXvC,UAAU,EAAE4E,KAAK;UACjBrE,QAAQ,EAAEA,QAAQ;UAClByE;QACJ,CAAC;QACDxB,MAAM,CAACkC,IAAI,CAACS,GAAG,CAAC;QAAC;MAErB;;MAEA;;MAEA,IAAMC,UAA4C,GAAGjD,cAAc,GAAGyC,2BAA2B,CAACrF,QAAQ,CAAC,GAAGA,QAAe;MAC7H,IAAI4C,cAAc,EAAE;QAChB,IAAI0B,eAAe,EAAE;UACjB;AACpB;AACA;UACoB,IAAIF,QAAQ,EAAE;YACVzC,MAAM,CACDmE,IAAI,CAAC1B,QAAQ,CAACU,YAAY,CAAC,CAC3BC,OAAO,CAACC,YAAY,IAAI;cACrBrB,iBAAiB,CAACwB,IAAI,CAAC;gBACnB1F,UAAU,EAAE4E,KAAK;gBACjBW,YAAY;gBACZI,MAAM,EAAE,IAAAhE,qBAAc,EAACgD,QAAQ,CAAC,CAACU,YAAY,CAACE,YAAY,CAAC,CAACI;cAChE,CAAC,CAAC;YACN,CAAC,CAAC;UACV;QACJ,CAAC,MAAM;UACH;UACAzD,MAAM,CACDkD,OAAO,CAACV,QAAQ,CAACW,YAAY,CAAC,CAC9B/D,IAAI,CAAC,CAAC,CAACiE,YAAY,EAAEC,cAAc,CAAC,KAAK;YACtC,IAAMc,sBAAsB,GAAG3B,QAAQ,GAAGA,QAAQ,CAACU,YAAY,CAACE,YAAY,CAAC,GAAGnF,SAAS;YACzF,IACI,CAACkG,sBAAsB,IACvB,CAAEd,cAAc,CAA2B5C,IAAI,EACjD;cACEsC,eAAe,GAAG;gBACdlF,UAAU,EAAE4E,KAAK;gBACjBI,YAAY,EAAEA,YAAmB;gBACjCS,OAAO,EAAE,IAAI;gBACblD,MAAM,EAAE,GAAG;gBACXhC,QAAQ;gBACRgF;cACJ,CAAC;YACL;YACA,OAAO,IAAI;UACf,CAAC,CAAC;UACN,IAAI,CAACL,eAAe,EAAE;YAClBhD,MAAM,CACDkD,OAAO,CAACV,QAAQ,CAACW,YAAY,CAAC,CAC9BC,OAAO,CAAC,CAAC,CAACC,YAAY,EAAEC,cAAc,CAAC,KAAK;cACzC,IAAMc,sBAAsB,GAAG3B,QAAQ,GAAGA,QAAQ,CAACU,YAAY,CAACE,YAAY,CAAC,GAAGnF,SAAS;cACzF,IAAI,CAACkG,sBAAsB,EAAE;gBACzBrC,cAAc,CAACyB,IAAI,CAAC;kBAChB1F,UAAU,EAAE4E,KAAK;kBACjBW,YAAY;kBACZC,cAAc,EAAEA,cAAqB;kBACrCG,MAAM,EAAEH,cAAc,CAACG;gBAC3B,CAAC,CAAC;cACN,CAAC,MAAM;gBACH,IAAMY,SAAS,GAAGH,UAAU,CAAC1B,QAAQ,CAACW,YAAY,CAACE,YAAY,CAAC,CAACI,MAAM;gBACvE,IACKH,cAAc,CAA2B5C,IAAI;gBAC9C;AACxC;AACA;AACA;gBACwC0D,sBAAsB,CAACX,MAAM,KAAKY,SAAS,EAC7C;kBACEpC,iBAAiB,CAACuB,IAAI,CAAC;oBACnB1F,UAAU,EAAE4E,KAAK;oBACjBW,YAAY;oBACZC,cAAc,EAAEA,cAAuC;oBACvDG,MAAM,EAAEH,cAAc,CAACG;kBAC3B,CAAC,CAAC;gBACN;cACJ;YACJ,CAAC,CAAC;UACV;QACJ;MACJ;MAEA,IAAIT,eAAe,EAAE;QACjB1B,MAAM,CAACkC,IAAI,CAACR,eAAe,CAAC;MAChC,CAAC,MAAM;QACH,IAAI/B,cAAc,EAAE;UAChBI,cAAc,CAACmC,IAAI,CAACE,2BAA2B,CAACQ,UAAU,CAAC,CAAC;UAC5D,IAAIlD,QAAQ,EAAE;YACVA,QAAQ,CAACwB,QAAQ,CAAC;UACtB;QACJ,CAAC,MAAM;UACHnB,cAAc,CAACmC,IAAI,CAACU,UAAU,CAAC;UAC/B,IAAIlD,QAAQ,EAAE;YACVA,QAAQ,CAACwB,QAAQ,CAAC;UACtB;QACJ;QACAJ,SAAS,GAAG8B,UAAiB;MACjC;MAEA,IAAII,iBAAwD,GAAG,IAAW;MAC1E,IAAIC,yBAAgE,GAAG,IAAW;MAClF,IAAIX,SAAyC,GAAG,IAAW;MAE3D,IAAIf,eAAe,IAAI,CAACF,eAAe,EAAE;QACrCiB,SAAS,GAAG,QAAQ;QACpBU,iBAAiB,GAAGrD,cAAc,GAAG4C,gCAAgC,CAACrB,QAAQ,CAAC,GAAGA,QAAe;MACrG,CAAC,MAAM,IAAIC,QAAQ,IAAI,CAACI,eAAe,IAAI,CAACF,eAAe,EAAE;QACzDiB,SAAS,GAAG,QAAQ;QACpBU,iBAAiB,GAAGrD,cAAc,GAAG4C,gCAAgC,CAACrB,QAAQ,CAAC,GAAGA,QAAe;QACjG+B,yBAAyB,GAAG9B,QAAQ;MACxC,CAAC,MAAM,IAAIE,eAAe,EAAE;QACxBiB,SAAS,GAAG,QAAQ;QACpBU,iBAAiB,GAAG,IAAA7E,qBAAc,EAAC+C,QAAQ,CAAQ;QACnD+B,yBAAyB,GAAG9B,QAAQ;MACxC,CAAC,MAAM;QACH,MAAM,IAAAnC,mBAAU,EAAC,KAAK,EAAE;UAAEkE,IAAI,EAAE;YAAEnG;UAAS;QAAE,CAAC,CAAC;MACnD;MAEA,IAAMsF,MAAK,GAAG;QACV7F,UAAU,EAAE4E,KAAK;QACjBhD,YAAY,EAAE4E,iBAA8C;QAC5DR,oBAAoB,EAAES,yBAAyB;QAC/CX,SAAS,EAAEA;MACf,CAAC;MACD9B,eAAe,CAAC0B,IAAI,CAACG,MAAK,CAAC;IAC/B;EACJ,CAAC;EA3ND,KAAK,IAAIpB,KAAK,GAAG,CAAC,EAAEA,KAAK,GAAGF,SAAS,EAAEE,KAAK,EAAE;IAAA,IAAAD,KAAA,IAiGlC;EAAS;EA4HrB,OAAO;IACHlB,cAAc;IACdC,cAAc;IACde,SAAS;IACTd,MAAM;IACNG,SAAS;IACTM,cAAc;IACdC,iBAAiB;IACjBC;EACJ,CAAC;AACL;AAEO,SAASyB,2BAA2BA,CAAYrF,QAAiC,EAAoC;EACxH,OAAO;IACHoE,QAAQ,EAAEpE,QAAQ,CAACoE,QAAQ;IAC3BD,QAAQ,EAAEqB,gCAAgC,CAACxF,QAAQ,CAACmE,QAAQ;EAChE,CAAC;AACL;AAEO,SAASiC,iBAAiBA,CAC7BC,sBAA8B,EACxB;EACN,OAAOC,IAAI,CAACD,sBAAsB,CAAC,CAAChG,MAAM;AAC9C;;AAEA;AACA;AACA;AACO,SAASkG,+BAA+BA,CAACxE,SAAmD,EAAoB;EACnH,IAAMM,IAAI,GAAIN,SAAS,CAA2BM,IAAI;EACtD,IAAI,CAACA,IAAI,EAAE;IACP,OAAON,SAAS;EACpB;EACA,IAAMzB,GAAqB,GAAG;IAC1BD,MAAM,EAAE+F,iBAAiB,CAAC/D,IAAI,CAAC;IAC/B+C,MAAM,EAAErD,SAAS,CAACqD,MAAM;IACxBoB,IAAI,EAAEzE,SAAS,CAACyE;EACpB,CAAC;EACD,OAAOlG,GAAG;AACd;AAEO,SAASkF,gCAAgCA,CAAY5F,GAAmC,EAA6B;EACxH,IAAI,CAACA,GAAG,CAACkF,YAAY,IAAInD,MAAM,CAACmE,IAAI,CAAClG,GAAG,CAACkF,YAAY,CAAC,CAACzE,MAAM,KAAK,CAAC,EAAE;IACjE,OAAOT,GAAG;EACd;EAEA,IAAM6G,MAAiC,GAAG,IAAAC,gBAAS,EAAC9G,GAAG,CAAQ;EAC/D6G,MAAM,CAAC3B,YAAY,GAAG,CAAC,CAAC;EACxBnD,MAAM,CACDkD,OAAO,CAACjF,GAAG,CAACkF,YAAY,CAAC,CACzBC,OAAO,CAAC,CAAC,CAACC,YAAY,EAAEC,cAAc,CAAC,KAAK;IACzCwB,MAAM,CAAC3B,YAAY,CAACE,YAAY,CAAC,GAAGuB,+BAA+B,CAACtB,cAAc,CAAC;EACvF,CAAC,CAAC;EACN,OAAOwB,MAAM;AACjB;;AAEA;AACA;AACA;AACA;AACA;AACA;AACO,SAASE,oBAAoBA,CAChC/G,GAA8B,EACL;EACzB,OAAO+B,MAAM,CAACC,MAAM,CAChB,CAAC,CAAC,EACFhC,GAAG,EACH;IACIgH,KAAK,EAAE,IAAAF,gBAAS,EAAC9G,GAAG,CAACgH,KAAK;EAC9B,CACJ,CAAC;AACL;AAMA;AACA;AACA;AACA;AACA;AACA;AACO,SAASC,yBAAyBA,CAMrCC,QAAiE,EACjEtH,eAAiG;AACjG;AACJ;AACA;AACA;AACIuH,YAAqD,EACkB;EACvEC,0BAAY,CAACC,qBAAqB,CAACF,YAAY,CAAC;EAEhD,IAAMzG,GAA4E,GAAG;IACjF4G,uBAAuB,EAAE1H,eAAe;IACxCqD,MAAM,EAAErD,eAAe,CAACqD,MAAM;IAC9BsE,SAAS,EAAE3H,eAAe,CAAC2H,SAAS;IACpCC,cAAc,EAAE5H,eAAe,CAAC4H,cAAc;IAC9CC,YAAY,EAAE7H,eAAe,CAAC6H,YAAY;IAC1CC,OAAO,EAAE9H,eAAe,CAAC8H,OAAO;IAChCnH,SAASA,CACLoH,IAA+B,EAC/BtH,OAAe,EACjB;MACE,IAAMuH,aAAa,GAAGV,QAAQ,CAACW,KAAK;MACpC,IAAMC,kBAA6C,GAAG,IAAIC,KAAK,CAACJ,IAAI,CAAClH,MAAM,CAAC;MAC5E;AACZ;AACA;AACA;AACA;MACY,IAAMuH,IAAI,GAAG,IAAArE,UAAG,EAAC,CAAC;MAClB,KAAK,IAAIsE,KAAK,GAAG,CAAC,EAAEA,KAAK,GAAGN,IAAI,CAAClH,MAAM,EAAEwH,KAAK,EAAE,EAAE;QAC9C,IAAM7H,QAAQ,GAAGuH,IAAI,CAACM,KAAK,CAAC;QAC5B,IAAM1D,QAAQ,GAAGwC,oBAAoB,CAAC3G,QAAQ,CAACmE,QAAQ,CAAC;QACxDA,QAAQ,CAACyC,KAAK,CAACkB,GAAG,GAAGF,IAAI;;QAEzB;AAChB;AACA;AACA;AACA;QACgB,IAAMxD,QAAQ,GAAGpE,QAAQ,CAACoE,QAAQ;QAClCD,QAAQ,CAACwB,IAAI,GAAG,IAAAoC,qBAAc,EAC1BP,aAAa,EACbpD,QACJ,CAAC;QACDsD,kBAAkB,CAACG,KAAK,CAAC,GAAG;UACxB1D,QAAQ;UACRC;QACJ,CAAC;MACL;MAEA,IAAA4D,qBAAc,EAAC,iBAAiB,EAAE;QAC9BxI,eAAe,EAAE,IAAI,CAAC0H,uBAAuB;QAC7CK,IAAI,EAAEG;MACV,CAAC,CAAC;MAEF,OAAOZ,QAAQ,CAACmB,SAAS,CACrB,MAAMzI,eAAe,CAACW,SAAS,CAC3BuH,kBAAkB,EAClBzH,OACJ,CACJ;MACI;AAChB;AACA;AACA;AACA;AACA;AACA;AACA,SAPgB,CAQCiI,IAAI,CAAChI,WAAW,IAAI;QACjB,IAAMiI,cAAkC,GAAG;UACvC/H,KAAK,EAAE,EAAE;UACTG,OAAO,EAAEL,WAAW,CAACK,OAAO,CAAC6H,KAAK,CAAC,CAAC;QACxC,CAAC;QACD,IAAMC,cAAwD,GAAGnI,WAAW,CAACE,KAAK,CAACC,MAAM,KAAK,CAAC,GACzF,EAAE,GACFH,WAAW,CAACE,KAAK,CACda,MAAM,CAAEb,KAAK,IAAK;UACf,IACIA,KAAK,CAAC4B,MAAM,KAAK,GAAG,IACpB,CAAC5B,KAAK,CAACJ,QAAQ,CAACoE,QAAQ,IACxB,CAAChE,KAAK,CAACJ,QAAQ,CAACmE,QAAQ,CAACI,QAAQ,IACjC,IAAAnD,qBAAc,EAAChB,KAAK,CAACqE,YAAY,CAAC,CAACF,QAAQ,EAC7C;YACE,OAAO,IAAI;UACf;UACA4D,cAAc,CAAC/H,KAAK,CAAC+E,IAAI,CAAC/E,KAAK,CAAC;UAChC,OAAO,KAAK;QAChB,CAAC,CAAQ;QACjB,IAAIiI,cAAc,CAAChI,MAAM,GAAG,CAAC,EAAE;UAC3B,IAAMiI,SAAoC,GAAGD,cAAc,CACtDzH,GAAG,CAAER,KAAK,IAAK;YACZ,OAAO;cACHgE,QAAQ,EAAEhE,KAAK,CAACqE,YAAY;cAC5BN,QAAQ,EAAExC,MAAM,CAACC,MAAM,CACnB,CAAC,CAAC,EACFxB,KAAK,CAACJ,QAAQ,CAACmE,QAAQ,EACvB;gBACIwB,IAAI,EAAE,IAAAoC,qBAAc,EAChBjB,QAAQ,CAACW,KAAK,EACdrH,KAAK,CAACqE,YACV;cACJ,CACJ;YACJ,CAAC;UACL,CAAC,CAAC;UAEN,OAAOqC,QAAQ,CAACmB,SAAS,CACrB,MAAMzI,eAAe,CAACW,SAAS,CAC3BmI,SAAS,EACTrI,OACJ,CACJ,CAAC,CAACiI,IAAI,CAACK,SAAS,IAAI;YAChB,IAAAC,oBAAa,EAACL,cAAc,CAAC/H,KAAK,EAAEmI,SAAS,CAACnI,KAAK,CAAC;YACpD,IAAAoI,oBAAa,EAACL,cAAc,CAAC5H,OAAO,EAAEgI,SAAS,CAAChI,OAAO,CAAC;YACxD,OAAO4H,cAAc;UACzB,CAAC,CAAC;QACN;QACA,OAAOjI,WAAW;MACtB,CAAC,CAAC;IACV,CAAC;IACDuI,KAAKA,CAACC,aAAa,EAAE;MACjB,OAAO5B,QAAQ,CAACmB,SAAS,CACrB,MAAMzI,eAAe,CAACiJ,KAAK,CAACC,aAAa,CAC7C,CAAC;IACL,CAAC;IACDC,KAAKA,CAACD,aAAa,EAAE;MACjB,OAAO5B,QAAQ,CAACmB,SAAS,CACrB,MAAMzI,eAAe,CAACmJ,KAAK,CAACD,aAAa,CAC7C,CAAC;IACL,CAAC;IACD/I,iBAAiBA,CAACiJ,GAAG,EAAEC,OAAO,EAAE;MAC5B,OAAO/B,QAAQ,CAACmB,SAAS,CACrB,MAAMzI,eAAe,CAACG,iBAAiB,CAACiJ,GAAG,EAAEC,OAAO,CACxD,CAAC;IACL,CAAC;IACDC,iBAAiBA,CACbrJ,UAAkB,EAClBuF,YAAoB,EACpBI,MAAc,EAChB;MACE,OAAO0B,QAAQ,CAACmB,SAAS,CACrB,MAAMzI,eAAe,CAACsJ,iBAAiB,CAACrJ,UAAU,EAAEuF,YAAY,EAAEI,MAAM,CAC5E,CAAC;IACL,CAAC;IACD2D,wBAAwB,EAAE,CAACvJ,eAAe,CAACuJ,wBAAwB,GAAGlJ,SAAS,GAAG,CAACmJ,KAAa,EAAE3F,UAAgB,KAAK;MACnH,OAAOyD,QAAQ,CAACmB,SAAS,CACrB,MAAQzI,eAAe,CAASuJ,wBAAwB,CAAE,IAAA3H,qBAAc,EAAC4H,KAAK,CAAC,EAAE3F,UAAU,CAC/F,CAAC;IACL,CAAC;IACD4F,OAAOA,CAACC,cAAsB,EAAE;MAC5B,OAAOpC,QAAQ,CAACmB,SAAS,CACrB,MAAMzI,eAAe,CAACyJ,OAAO,CAACC,cAAc,CAChD,CAAC;IACL,CAAC;IACDC,MAAMA,CAAA,EAAG;MACLrC,QAAQ,CAACsC,gBAAgB,CAACC,MAAM,CAAC/I,GAAG,CAAC;MACrC,OAAOwG,QAAQ,CAACmB,SAAS,CACrB,MAAMzI,eAAe,CAAC2J,MAAM,CAAC,CACjC,CAAC;IACL,CAAC;IACDG,KAAKA,CAAA,EAAG;MACJxC,QAAQ,CAACsC,gBAAgB,CAACC,MAAM,CAAC/I,GAAG,CAAC;MACrC,OAAOwG,QAAQ,CAACmB,SAAS,CACrB,MAAMzI,eAAe,CAAC8J,KAAK,CAAC,CAChC,CAAC;IACL,CAAC;IACD5I,YAAYA,CAAA,EAAG;MACX,OAAOlB,eAAe,CAACkB,YAAY,CAAC,CAAC;IACzC,CAAC;IACD6I,sBAAsBA,CAAA,EAAG;MACrB,OAAO/J,eAAe,CAAC+J,sBAAsB,CAAC,CAAC;IACnD,CAAC;IACDC,4BAA4BA,CAACC,YAAY,EAAE;MACvC,IAAIA,YAAY,CAACC,MAAM,CAACC,OAAO,EAAE;QAC7B,OAAOnK,eAAe,CAACgK,4BAA4B,CAACC,YAAY,CAAC;MACrE;MAEA,IAAM7J,GAAG,GAAG+B,MAAM,CAACC,MAAM,CACrB,CAAC,CAAC,EACF6H,YAAY,CAACC,MAAM,CAACrI,YAAY,EAChC;QACIuF,KAAK,EAAE,IAAAgD,+BAAwB,EAAC,CAAC;QACjCjE,IAAI,EAAE,IAAAkE,yBAAkB,EAAC,CAAC;QAC1B/E,YAAY,EAAE,CAAC;MACnB,CACJ,CAAC;MAED,IAAMzD,YAAY,GAAG,IAAAqF,gBAAS,EAAC9G,GAAG,CAAC;MACnC,OAAQyB,YAAY,CAASuF,KAAK;MAClC,OAAQvF,YAAY,CAASsE,IAAI;MACjC,OAAQtE,YAAY,CAASyD,YAAY;MAEzC,OAAOtF,eAAe,CAACgK,4BAA4B,CAAC;QAChDrH,EAAE,EAAEsH,YAAY,CAACtH,EAAE;QACnBuH,MAAM,EAAE;UACJC,OAAO,EAAE,KAAK;UACdtI;QACJ;MACJ,CAAC,CAAC;IACN;EACJ,CAAC;EAEDyF,QAAQ,CAACsC,gBAAgB,CAACU,GAAG,CAACxJ,GAAG,CAAC;EAClC,OAAOA,GAAG;AACd;;AAEA;AACA;AACA;AACA;AACA;AACO,SAASyJ,uCAAuCA,CACnDC,MAAiD,EACnD;EACE,IAAIA,MAAM,CAACnH,MAAM,CAACoH,cAAc,EAAE;IAC9B,MAAM,IAAAhI,mBAAU,EAAC,KAAK,EAAE;MAAEkE,IAAI,EAAE;QAAE6D;MAAO;IAAE,CAAC,CAAC;EACjD;EACA,IAAIE,aAAa,CAACF,MAAM,CAACnH,MAAM,CAAC,EAAE;IAC9B,MAAM,IAAAZ,mBAAU,EAAC,KAAK,EAAE;MAAEkE,IAAI,EAAE;QAAE6D;MAAO;IAAE,CAAC,CAAC;EACjD;EACA,IACIA,MAAM,CAACnH,MAAM,CAACC,WAAW,IACzBkH,MAAM,CAACnH,MAAM,CAACC,WAAW,CAACqH,WAAW,EACvC;IACE,MAAM,IAAAlI,mBAAU,EAAC,KAAK,EAAE;MAAEkE,IAAI,EAAE;QAAE6D;MAAO;IAAE,CAAC,CAAC;EACjD;AACJ;AAEO,SAASE,aAAaA,CAACE,UAA6B,EAAW;EAClE,IACK,CAAC,CAACA,UAAU,CAACC,SAAS,IAAID,UAAU,CAACC,SAAS,CAAChK,MAAM,GAAG,CAAC,IACzD+J,UAAU,CAACtH,WAAW,IAAIsH,UAAU,CAACtH,WAAW,CAACuH,SAAU,EAC9D;IACE,OAAO,IAAI;EACf,CAAC,MAAM;IACH,OAAO,KAAK;EAChB;AACJ;AAEO,SAASC,6BAA6BA,CACzC9K,eAAuE,EACvEwJ,KAAa,EACb3F,UAA2B,EACA;EAC3B,IAAMd,WAAW,GAAG,IAAAgI,2CAA2B,EAAC/K,eAAe,CAACqD,MAAM,CAAC2H,UAAU,CAAC;EAClF,IAAMC,QAAQ,GAAGpH,UAAU,GAAIA,UAAU,CAA2CyE,GAAG,GAAG4C,0BAAmB;EAC7G,IAAMC,OAAO,GAAGtH,UAAU,GAAIA,UAAU,CAA2ClB,EAAE,GAAG,EAAE;EAC1F,OAAO,IAAAyI,kCAAmB,EAACpL,eAAe,CAACqD,MAAM,EAAE;IAC/CgI,QAAQ,EAAE;MACNC,GAAG,EAAE,CACD;QACI,WAAW,EAAE;UACTC,GAAG,EAAEN;QACT;MACJ,CAAC,EACD;QACI,WAAW,EAAE;UACTO,GAAG,EAAEP;QACT,CAAC;QACD,CAAClI,WAAW,GAAG;UACXwI,GAAG,EAAE1H,UAAU,GAAGsH,OAAO,GAAG;QAChC;MACJ,CAAC,CACJ;MACD;MACA,WAAW,EAAE;QACTM,IAAI,EAAER;MACV;IACJ,CAAQ;IACRS,IAAI,EAAE,CACF;MAAE,WAAW,EAAE;IAAM,CAAC,EACtB;MAAE,CAAC3I,WAAW,GAAG;IAAM,CAAC,CACpB;IACR4I,IAAI,EAAE,CAAC;IACPnC;IACA;AACR;AACA;AACA;AACA;AACA;AACA;IACQ;EACJ,CAAC,CAAC;AACN;AAEO,eAAeD,wBAAwBA,CAC1CvJ,eAAuE,EACvEwJ,KAAa,EACb3F,UAA2B,EAS5B;EACC,IAAI7D,eAAe,CAACuJ,wBAAwB,EAAE;IAC1C,OAAOvJ,eAAe,CAACuJ,wBAAwB,CAACC,KAAK,EAAE3F,UAAU,CAAC;EACtE;EAEA,IAAMd,WAAW,GAAG,IAAAgI,2CAA2B,EAAC/K,eAAe,CAACqD,MAAM,CAAC2H,UAAU,CAAC;EAClF,IAAM/B,KAAK,GAAG,IAAA2C,qBAAY,EACtB5L,eAAe,CAACqD,MAAM,EACtByH,6BAA6B,CACzB9K,eAAe,EACfwJ,KAAK,EACL3F,UACJ,CACJ,CAAC;EAED,IAAMgI,MAAM,GAAG,MAAM7L,eAAe,CAACiJ,KAAK,CAACA,KAAK,CAAC;EACjD,IAAM6C,SAAS,GAAGD,MAAM,CAACC,SAAS;EAClC,IAAMC,OAAO,GAAG,IAAAC,kBAAW,EAACF,SAAS,CAAC;EAEtC,OAAO;IACHA,SAAS,EAAEA,SAAS;IACpBjI,UAAU,EAAEkI,OAAO,GAAG;MAClBpJ,EAAE,EAAGoJ,OAAO,CAAShJ,WAAW,CAAC;MACjCuF,GAAG,EAAEyD,OAAO,CAAC3E,KAAK,CAACkB;IACvB,CAAC,GAAUzE,UAAU,GAAGA,UAAU,GAAG;MACjClB,EAAE,EAAE,EAAE;MACN2F,GAAG,EAAE;IACT;EACJ,CAAC;AACL;;AAGA;AACA;AACA;AACA;AACO,SAAS2D,kBAAkBA,CAC9BC,KAIC,EAC4C;EAC7C;AACJ;AACA;AACA;EACI,IAAIC,4BAA0C,GAAGC,2BAAoB;EAErE,IAAMC,UAAyD,GAAG;IAC9D3J,IAAI,EAAE,eAAe,GAAGwJ,KAAK,CAACI,OAAO,CAAC5J,IAAI;IAC1C6J,WAAW,EAAEC,mBAAY;IACzB,MAAMC,qBAAqBA,CAACjC,MAAM,EAAE;MAChC,MAAM,IAAAkC,kBAAW,EAACR,KAAK,CAACS,eAAe,CAAC,CAAC,CAAC;MAC1C,IAAM3M,eAAe,GAAG,MAAMkM,KAAK,CAACI,OAAO,CAACG,qBAAqB,CAACjC,MAAM,CAAC;MACzE,MAAM,IAAAkC,kBAAW,EAACR,KAAK,CAACU,cAAc,CAAC,CAAC,CAAC;MAEzC,OAAO;QACH/E,YAAY,EAAE7H,eAAe,CAAC6H,YAAY;QAC1CF,SAAS,EAAE3H,eAAe,CAAC2H,SAAS;QACpCG,OAAO,EAAE9H,eAAe,CAAC8H,OAAO;QAChCzE,MAAM,EAAErD,eAAe,CAACqD,MAAM;QAC9BuE,cAAc,EAAE5H,eAAe,CAAC4H,cAAc;QAC9CjH,SAASA,CAACkM,CAAC,EAAEC,CAAC,EAAE;UACZX,4BAA4B,GAAGA,4BAA4B,CAACzD,IAAI,CAAC,YAAY;YACzE,MAAM,IAAAgE,kBAAW,EAACR,KAAK,CAACS,eAAe,CAAC,CAAC,CAAC;YAC1C,IAAMI,QAAQ,GAAG,MAAM/M,eAAe,CAACW,SAAS,CAACkM,CAAC,EAAEC,CAAC,CAAC;YACtD,MAAM,IAAAJ,kBAAW,EAACR,KAAK,CAACU,cAAc,CAAC,CAAC,CAAC;YACzC,OAAOG,QAAQ;UACnB,CAAC,CAAC;UACF,IAAMjM,GAAG,GAAGqL,4BAA4B;UACxC,OAAOrL,GAAG;QACd,CAAC;QACD,MAAMX,iBAAiBA,CAAC0M,CAAC,EAAEC,CAAC,EAAE;UAC1B,MAAM,IAAAJ,kBAAW,EAACR,KAAK,CAACS,eAAe,CAAC,CAAC,CAAC;UAC1C,IAAM7L,GAAG,GAAG,MAAMd,eAAe,CAACG,iBAAiB,CAAC0M,CAAC,EAAEC,CAAC,CAAC;UACzD,MAAM,IAAAJ,kBAAW,EAACR,KAAK,CAACU,cAAc,CAAC,CAAC,CAAC;UACzC,OAAO9L,GAAG;QACd,CAAC;QACD,MAAMmI,KAAKA,CAAC4D,CAAC,EAAE;UACX,MAAM,IAAAH,kBAAW,EAACR,KAAK,CAACS,eAAe,CAAC,CAAC,CAAC;UAC1C,IAAM7L,GAAG,GAAG,MAAMd,eAAe,CAACiJ,KAAK,CAAC4D,CAAC,CAAC;UAC1C,OAAO/L,GAAG;QACd,CAAC;QACD,MAAMqI,KAAKA,CAAC0D,CAAC,EAAE;UACX,MAAM,IAAAH,kBAAW,EAACR,KAAK,CAACS,eAAe,CAAC,CAAC,CAAC;UAC1C,IAAM7L,GAAG,GAAG,MAAMd,eAAe,CAACmJ,KAAK,CAAC0D,CAAC,CAAC;UAC1C,MAAM,IAAAH,kBAAW,EAACR,KAAK,CAACU,cAAc,CAAC,CAAC,CAAC;UACzC,OAAO9L,GAAG;QAEd,CAAC;QACD,MAAMwI,iBAAiBA,CAACuD,CAAC,EAAEC,CAAC,EAAEE,CAAC,EAAE;UAC7B,MAAM,IAAAN,kBAAW,EAACR,KAAK,CAACS,eAAe,CAAC,CAAC,CAAC;UAC1C,IAAM7L,GAAG,GAAG,MAAMd,eAAe,CAACsJ,iBAAiB,CAACuD,CAAC,EAAEC,CAAC,EAAEE,CAAC,CAAC;UAC5D,MAAM,IAAAN,kBAAW,EAACR,KAAK,CAACU,cAAc,CAAC,CAAC,CAAC;UACzC,OAAO9L,GAAG;QAEd,CAAC;QACDyI,wBAAwB,EAAE,CAACvJ,eAAe,CAACuJ,wBAAwB,GAAGlJ,SAAS,GAAG,OAAOwM,CAAC,EAAEC,CAAC,KAAK;UAC9F,MAAM,IAAAJ,kBAAW,EAACR,KAAK,CAACS,eAAe,CAAC,CAAC,CAAC;UAC1C,IAAM7L,GAAG,GAAG,MAAM,IAAAc,qBAAc,EAAC5B,eAAe,CAACuJ,wBAAwB,CAAC,CAACsD,CAAC,EAAEC,CAAC,CAAC;UAChF,MAAM,IAAAJ,kBAAW,EAACR,KAAK,CAACU,cAAc,CAAC,CAAC,CAAC;UACzC,OAAO9L,GAAG;QAEd,CAAC;QACDI,YAAYA,CAAA,EAAG;UACX,OAAOlB,eAAe,CAACkB,YAAY,CAAC,CAAC;QACzC,CAAC;QACD6I,sBAAsBA,CAAA,EAAG;UACrB,OAAO/J,eAAe,CAAC+J,sBAAsB,CAAC,CAAC;QACnD,CAAC;QACDC,4BAA4BA,CAAC6C,CAAC,EAAE;UAC5B,OAAO7M,eAAe,CAACgK,4BAA4B,CAAC6C,CAAC,CAAC;QAC1D,CAAC;QACD,MAAMpD,OAAOA,CAACoD,CAAC,EAAE;UACb,MAAM,IAAAH,kBAAW,EAACR,KAAK,CAACS,eAAe,CAAC,CAAC,CAAC;UAC1C,IAAM7L,GAAG,GAAG,MAAMd,eAAe,CAACyJ,OAAO,CAACoD,CAAC,CAAC;UAC5C,MAAM,IAAAH,kBAAW,EAACR,KAAK,CAACU,cAAc,CAAC,CAAC,CAAC;UACzC,OAAO9L,GAAG;QAEd,CAAC;QACD,MAAMgJ,KAAKA,CAAA,EAAG;UACV,MAAM,IAAA4C,kBAAW,EAACR,KAAK,CAACS,eAAe,CAAC,CAAC,CAAC;UAC1C,IAAM7L,GAAG,GAAG,MAAMd,eAAe,CAAC8J,KAAK,CAAC,CAAC;UACzC,MAAM,IAAA4C,kBAAW,EAACR,KAAK,CAACU,cAAc,CAAC,CAAC,CAAC;UACzC,OAAO9L,GAAG;QAEd,CAAC;QACD,MAAM6I,MAAMA,CAAA,EAAG;UACX,MAAM,IAAA+C,kBAAW,EAACR,KAAK,CAACS,eAAe,CAAC,CAAC,CAAC;UAC1C,IAAM7L,GAAG,GAAG,MAAMd,eAAe,CAAC2J,MAAM,CAAC,CAAC;UAC1C,MAAM,IAAA+C,kBAAW,EAACR,KAAK,CAACU,cAAc,CAAC,CAAC,CAAC;UACzC,OAAO9L,GAAG;QACd;MACJ,CAAC;IAGL;EACJ,CAAC;EACD,OAAOuL,UAAU;AACrB","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/rx-storage-multiinstance.js b/dist/cjs/rx-storage-multiinstance.js deleted file mode 100644 index 39909888526..00000000000 --- a/dist/cjs/rx-storage-multiinstance.js +++ /dev/null @@ -1,125 +0,0 @@ -"use strict"; - -Object.defineProperty(exports, "__esModule", { - value: true -}); -exports.BROADCAST_CHANNEL_BY_TOKEN = void 0; -exports.addRxStorageMultiInstanceSupport = addRxStorageMultiInstanceSupport; -exports.getBroadcastChannelReference = getBroadcastChannelReference; -exports.removeBroadcastChannelReference = removeBroadcastChannelReference; -var _rxjs = require("rxjs"); -var _operators = require("rxjs/operators"); -var _broadcastChannel = require("broadcast-channel"); -/** - * When a persistent RxStorage is used in more the one JavaScript process, - * the even stream of the changestream() function must be broadcasted to the other - * RxStorageInstances of the same databaseName+collectionName. - * - * In the past this was done by RxDB but it makes more sense to do this - * at the RxStorage level so that the broadcasting etc can all happen inside of a WebWorker - * and not on the main thread. - * Also it makes it less complex to stack up different RxStorages onto each other - * like what we do with the in-memory plugin. - * - * This is intended to be used inside of createStorageInstance() of a storage. - * Do not use this if the storage anyway broadcasts the events like when using MongoDB - * or in the future W3C might introduce a way to listen to IndexedDB changes. - */ - -/** - * The broadcast-channel is reused by the databaseInstanceToken. - * This is required so that it is easy to simulate multi-tab usage - * in the test where different instances of the same RxDatabase must - * have different broadcast channels. - * But also it ensures that for each RxDatabase we only create a single - * broadcast channel that can even be reused in the leader election plugin. - * - * TODO at the end of the unit tests, - * we should ensure that all channels are closed and cleaned up. - * Otherwise we have forgot something. - */ -var BROADCAST_CHANNEL_BY_TOKEN = exports.BROADCAST_CHANNEL_BY_TOKEN = new Map(); -function getBroadcastChannelReference(storageName, databaseInstanceToken, databaseName, refObject) { - var state = BROADCAST_CHANNEL_BY_TOKEN.get(databaseInstanceToken); - if (!state) { - state = { - /** - * We have to use the databaseName instead of the databaseInstanceToken - * in the BroadcastChannel name because different instances must end with the same - * channel name to be able to broadcast messages between each other. - */ - bc: new _broadcastChannel.BroadcastChannel(['RxDB:', storageName, databaseName].join('|')), - refs: new Set() - }; - BROADCAST_CHANNEL_BY_TOKEN.set(databaseInstanceToken, state); - } - state.refs.add(refObject); - return state.bc; -} -function removeBroadcastChannelReference(databaseInstanceToken, refObject) { - var state = BROADCAST_CHANNEL_BY_TOKEN.get(databaseInstanceToken); - if (!state) { - return; - } - state.refs.delete(refObject); - if (state.refs.size === 0) { - BROADCAST_CHANNEL_BY_TOKEN.delete(databaseInstanceToken); - return state.bc.close(); - } -} -function addRxStorageMultiInstanceSupport(storageName, instanceCreationParams, instance, -/** - * If provided, that channel will be used - * instead of an own one. - */ -providedBroadcastChannel) { - if (!instanceCreationParams.multiInstance) { - return; - } - var broadcastChannel = providedBroadcastChannel ? providedBroadcastChannel : getBroadcastChannelReference(storageName, instanceCreationParams.databaseInstanceToken, instance.databaseName, instance); - var changesFromOtherInstances$ = new _rxjs.Subject(); - var eventListener = msg => { - if (msg.storageName === storageName && msg.databaseName === instanceCreationParams.databaseName && msg.collectionName === instanceCreationParams.collectionName && msg.version === instanceCreationParams.schema.version) { - changesFromOtherInstances$.next(msg.eventBulk); - } - }; - broadcastChannel.addEventListener('message', eventListener); - var oldChangestream$ = instance.changeStream(); - var closed = false; - var sub = oldChangestream$.subscribe(eventBulk => { - if (closed) { - return; - } - broadcastChannel.postMessage({ - storageName: storageName, - databaseName: instanceCreationParams.databaseName, - collectionName: instanceCreationParams.collectionName, - version: instanceCreationParams.schema.version, - eventBulk - }); - }); - instance.changeStream = function () { - return changesFromOtherInstances$.asObservable().pipe((0, _operators.mergeWith)(oldChangestream$)); - }; - var oldClose = instance.close.bind(instance); - instance.close = async function () { - closed = true; - sub.unsubscribe(); - broadcastChannel.removeEventListener('message', eventListener); - if (!providedBroadcastChannel) { - await removeBroadcastChannelReference(instanceCreationParams.databaseInstanceToken, instance); - } - return oldClose(); - }; - var oldRemove = instance.remove.bind(instance); - instance.remove = async function () { - closed = true; - sub.unsubscribe(); - broadcastChannel.removeEventListener('message', eventListener); - if (!providedBroadcastChannel) { - await removeBroadcastChannelReference(instanceCreationParams.databaseInstanceToken, instance); - } - return oldRemove(); - }; -} -//# sourceMappingURL=rx-storage-multiinstance.js.map \ No newline at end of file diff --git a/dist/cjs/rx-storage-multiinstance.js.map b/dist/cjs/rx-storage-multiinstance.js.map deleted file mode 100644 index 29edf6ac65e..00000000000 --- a/dist/cjs/rx-storage-multiinstance.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-storage-multiinstance.js","names":["_rxjs","require","_operators","_broadcastChannel","BROADCAST_CHANNEL_BY_TOKEN","exports","Map","getBroadcastChannelReference","storageName","databaseInstanceToken","databaseName","refObject","state","get","bc","BroadcastChannel","join","refs","Set","set","add","removeBroadcastChannelReference","delete","size","close","addRxStorageMultiInstanceSupport","instanceCreationParams","instance","providedBroadcastChannel","multiInstance","broadcastChannel","changesFromOtherInstances$","Subject","eventListener","msg","collectionName","version","schema","next","eventBulk","addEventListener","oldChangestream$","changeStream","closed","sub","subscribe","postMessage","asObservable","pipe","mergeWith","oldClose","bind","unsubscribe","removeEventListener","oldRemove","remove"],"sources":["../../src/rx-storage-multiinstance.ts"],"sourcesContent":["/**\n * When a persistent RxStorage is used in more the one JavaScript process,\n * the even stream of the changestream() function must be broadcasted to the other\n * RxStorageInstances of the same databaseName+collectionName.\n *\n * In the past this was done by RxDB but it makes more sense to do this\n * at the RxStorage level so that the broadcasting etc can all happen inside of a WebWorker\n * and not on the main thread.\n * Also it makes it less complex to stack up different RxStorages onto each other\n * like what we do with the in-memory plugin.\n *\n * This is intended to be used inside of createStorageInstance() of a storage.\n * Do not use this if the storage anyway broadcasts the events like when using MongoDB\n * or in the future W3C might introduce a way to listen to IndexedDB changes.\n */\n\nimport { Observable, Subject } from 'rxjs';\nimport { mergeWith } from 'rxjs/operators';\nimport type {\n EventBulk,\n RxStorageChangeEvent,\n RxStorageInstance,\n RxStorageInstanceCreationParams\n} from './types/index.d.ts';\n\nimport {\n BroadcastChannel\n} from 'broadcast-channel';\n\n/**\n * The broadcast-channel is reused by the databaseInstanceToken.\n * This is required so that it is easy to simulate multi-tab usage\n * in the test where different instances of the same RxDatabase must\n * have different broadcast channels.\n * But also it ensures that for each RxDatabase we only create a single\n * broadcast channel that can even be reused in the leader election plugin.\n *\n * TODO at the end of the unit tests,\n * we should ensure that all channels are closed and cleaned up.\n * Otherwise we have forgot something.\n */\nexport const BROADCAST_CHANNEL_BY_TOKEN: Map;\n /**\n * Contains all context objects that currently use the channel.\n * If this becomes empty, we can close the channel\n */\n refs: Set;\n}> = new Map();\n\n\nexport type RxStorageMultiInstanceBroadcastType = {\n storageName: string;\n collectionName: string;\n /**\n * collection.schema.version\n */\n version: number;\n databaseName: string;\n eventBulk: EventBulk;\n};\n\nexport function getBroadcastChannelReference(\n storageName: string,\n databaseInstanceToken: string,\n databaseName: string,\n refObject: any\n): BroadcastChannel {\n let state = BROADCAST_CHANNEL_BY_TOKEN.get(databaseInstanceToken);\n if (!state) {\n state = {\n /**\n * We have to use the databaseName instead of the databaseInstanceToken\n * in the BroadcastChannel name because different instances must end with the same\n * channel name to be able to broadcast messages between each other.\n */\n bc: new BroadcastChannel(['RxDB:', storageName, databaseName].join('|')),\n refs: new Set()\n };\n BROADCAST_CHANNEL_BY_TOKEN.set(databaseInstanceToken, state);\n }\n state.refs.add(refObject);\n return state.bc;\n}\n\nexport function removeBroadcastChannelReference(\n databaseInstanceToken: string,\n refObject: any\n) {\n const state = BROADCAST_CHANNEL_BY_TOKEN.get(databaseInstanceToken);\n if (!state) {\n return;\n }\n state.refs.delete(refObject);\n if (state.refs.size === 0) {\n BROADCAST_CHANNEL_BY_TOKEN.delete(databaseInstanceToken);\n return state.bc.close();\n }\n}\n\n\nexport function addRxStorageMultiInstanceSupport(\n storageName: string,\n instanceCreationParams: RxStorageInstanceCreationParams,\n instance: RxStorageInstance,\n /**\n * If provided, that channel will be used\n * instead of an own one.\n */\n providedBroadcastChannel?: BroadcastChannel\n) {\n if (!instanceCreationParams.multiInstance) {\n return;\n }\n\n type Emit = EventBulk, any>;\n\n const broadcastChannel = providedBroadcastChannel ?\n providedBroadcastChannel :\n getBroadcastChannelReference(\n storageName,\n instanceCreationParams.databaseInstanceToken,\n instance.databaseName,\n instance\n );\n\n const changesFromOtherInstances$: Subject = new Subject();\n\n\n const eventListener = (msg: RxStorageMultiInstanceBroadcastType) => {\n if (\n msg.storageName === storageName &&\n msg.databaseName === instanceCreationParams.databaseName &&\n msg.collectionName === instanceCreationParams.collectionName &&\n msg.version === instanceCreationParams.schema.version\n ) {\n changesFromOtherInstances$.next(msg.eventBulk);\n }\n };\n\n broadcastChannel.addEventListener('message', eventListener);\n\n const oldChangestream$ = instance.changeStream();\n\n let closed = false;\n const sub = oldChangestream$.subscribe(eventBulk => {\n if (closed) {\n return;\n }\n broadcastChannel.postMessage({\n storageName: storageName,\n databaseName: instanceCreationParams.databaseName,\n collectionName: instanceCreationParams.collectionName,\n version: instanceCreationParams.schema.version,\n eventBulk\n });\n });\n\n instance.changeStream = function (): Observable {\n return changesFromOtherInstances$.asObservable().pipe(\n mergeWith(oldChangestream$)\n );\n };\n\n const oldClose = instance.close.bind(instance);\n instance.close = async function () {\n closed = true;\n sub.unsubscribe();\n broadcastChannel.removeEventListener('message', eventListener);\n if (!providedBroadcastChannel) {\n await removeBroadcastChannelReference(\n instanceCreationParams.databaseInstanceToken,\n instance\n );\n }\n return oldClose();\n };\n\n const oldRemove = instance.remove.bind(instance);\n instance.remove = async function () {\n closed = true;\n sub.unsubscribe();\n broadcastChannel.removeEventListener('message', eventListener);\n if (!providedBroadcastChannel) {\n await removeBroadcastChannelReference(\n instanceCreationParams.databaseInstanceToken,\n instance\n );\n }\n return oldRemove();\n };\n}\n"],"mappings":";;;;;;;;;AAgBA,IAAAA,KAAA,GAAAC,OAAA;AACA,IAAAC,UAAA,GAAAD,OAAA;AAQA,IAAAE,iBAAA,GAAAF,OAAA;AAzBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAeA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACO,IAAMG,0BAOX,GAAAC,OAAA,CAAAD,0BAAA,GAAG,IAAIE,GAAG,CAAC,CAAC;AAcP,SAASC,4BAA4BA,CACxCC,WAAmB,EACnBC,qBAA6B,EAC7BC,YAAoB,EACpBC,SAAc,EACuC;EACrD,IAAIC,KAAK,GAAGR,0BAA0B,CAACS,GAAG,CAACJ,qBAAqB,CAAC;EACjE,IAAI,CAACG,KAAK,EAAE;IACRA,KAAK,GAAG;MACJ;AACZ;AACA;AACA;AACA;MACYE,EAAE,EAAE,IAAIC,kCAAgB,CAAC,CAAC,OAAO,EAAEP,WAAW,EAAEE,YAAY,CAAC,CAACM,IAAI,CAAC,GAAG,CAAC,CAAC;MACxEC,IAAI,EAAE,IAAIC,GAAG,CAAM;IACvB,CAAC;IACDd,0BAA0B,CAACe,GAAG,CAACV,qBAAqB,EAAEG,KAAK,CAAC;EAChE;EACAA,KAAK,CAACK,IAAI,CAACG,GAAG,CAACT,SAAS,CAAC;EACzB,OAAOC,KAAK,CAACE,EAAE;AACnB;AAEO,SAASO,+BAA+BA,CAC3CZ,qBAA6B,EAC7BE,SAAc,EAChB;EACE,IAAMC,KAAK,GAAGR,0BAA0B,CAACS,GAAG,CAACJ,qBAAqB,CAAC;EACnE,IAAI,CAACG,KAAK,EAAE;IACR;EACJ;EACAA,KAAK,CAACK,IAAI,CAACK,MAAM,CAACX,SAAS,CAAC;EAC5B,IAAIC,KAAK,CAACK,IAAI,CAACM,IAAI,KAAK,CAAC,EAAE;IACvBnB,0BAA0B,CAACkB,MAAM,CAACb,qBAAqB,CAAC;IACxD,OAAOG,KAAK,CAACE,EAAE,CAACU,KAAK,CAAC,CAAC;EAC3B;AACJ;AAGO,SAASC,gCAAgCA,CAC5CjB,WAAmB,EACnBkB,sBAAuE,EACvEC,QAAgD;AAChD;AACJ;AACA;AACA;AACIC,wBAAgD,EAClD;EACE,IAAI,CAACF,sBAAsB,CAACG,aAAa,EAAE;IACvC;EACJ;EAIA,IAAMC,gBAAgB,GAAGF,wBAAwB,GAC7CA,wBAAwB,GACxBrB,4BAA4B,CACxBC,WAAW,EACXkB,sBAAsB,CAACjB,qBAAqB,EAC5CkB,QAAQ,CAACjB,YAAY,EACrBiB,QACJ,CAAC;EAEL,IAAMI,0BAAyC,GAAG,IAAIC,aAAO,CAAC,CAAC;EAG/D,IAAMC,aAAa,GAAIC,GAAwC,IAAK;IAChE,IACIA,GAAG,CAAC1B,WAAW,KAAKA,WAAW,IAC/B0B,GAAG,CAACxB,YAAY,KAAKgB,sBAAsB,CAAChB,YAAY,IACxDwB,GAAG,CAACC,cAAc,KAAKT,sBAAsB,CAACS,cAAc,IAC5DD,GAAG,CAACE,OAAO,KAAKV,sBAAsB,CAACW,MAAM,CAACD,OAAO,EACvD;MACEL,0BAA0B,CAACO,IAAI,CAACJ,GAAG,CAACK,SAAS,CAAC;IAClD;EACJ,CAAC;EAEDT,gBAAgB,CAACU,gBAAgB,CAAC,SAAS,EAAEP,aAAa,CAAC;EAE3D,IAAMQ,gBAAgB,GAAGd,QAAQ,CAACe,YAAY,CAAC,CAAC;EAEhD,IAAIC,MAAM,GAAG,KAAK;EAClB,IAAMC,GAAG,GAAGH,gBAAgB,CAACI,SAAS,CAACN,SAAS,IAAI;IAChD,IAAII,MAAM,EAAE;MACR;IACJ;IACAb,gBAAgB,CAACgB,WAAW,CAAC;MACzBtC,WAAW,EAAEA,WAAW;MACxBE,YAAY,EAAEgB,sBAAsB,CAAChB,YAAY;MACjDyB,cAAc,EAAET,sBAAsB,CAACS,cAAc;MACrDC,OAAO,EAAEV,sBAAsB,CAACW,MAAM,CAACD,OAAO;MAC9CG;IACJ,CAAC,CAAC;EACN,CAAC,CAAC;EAEFZ,QAAQ,CAACe,YAAY,GAAG,YAA8B;IAClD,OAAOX,0BAA0B,CAACgB,YAAY,CAAC,CAAC,CAACC,IAAI,CACjD,IAAAC,oBAAS,EAACR,gBAAgB,CAC9B,CAAC;EACL,CAAC;EAED,IAAMS,QAAQ,GAAGvB,QAAQ,CAACH,KAAK,CAAC2B,IAAI,CAACxB,QAAQ,CAAC;EAC9CA,QAAQ,CAACH,KAAK,GAAG,kBAAkB;IAC/BmB,MAAM,GAAG,IAAI;IACbC,GAAG,CAACQ,WAAW,CAAC,CAAC;IACjBtB,gBAAgB,CAACuB,mBAAmB,CAAC,SAAS,EAAEpB,aAAa,CAAC;IAC9D,IAAI,CAACL,wBAAwB,EAAE;MAC3B,MAAMP,+BAA+B,CACjCK,sBAAsB,CAACjB,qBAAqB,EAC5CkB,QACJ,CAAC;IACL;IACA,OAAOuB,QAAQ,CAAC,CAAC;EACrB,CAAC;EAED,IAAMI,SAAS,GAAG3B,QAAQ,CAAC4B,MAAM,CAACJ,IAAI,CAACxB,QAAQ,CAAC;EAChDA,QAAQ,CAAC4B,MAAM,GAAG,kBAAkB;IAChCZ,MAAM,GAAG,IAAI;IACbC,GAAG,CAACQ,WAAW,CAAC,CAAC;IACjBtB,gBAAgB,CAACuB,mBAAmB,CAAC,SAAS,EAAEpB,aAAa,CAAC;IAC9D,IAAI,CAACL,wBAAwB,EAAE;MAC3B,MAAMP,+BAA+B,CACjCK,sBAAsB,CAACjB,qBAAqB,EAC5CkB,QACJ,CAAC;IACL;IACA,OAAO2B,SAAS,CAAC,CAAC;EACtB,CAAC;AACL","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/types/conflict-handling.d.js b/dist/cjs/types/conflict-handling.d.js deleted file mode 100644 index d58bd8c00df..00000000000 --- a/dist/cjs/types/conflict-handling.d.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -//# sourceMappingURL=conflict-handling.d.js.map \ No newline at end of file diff --git a/dist/cjs/types/conflict-handling.d.js.map b/dist/cjs/types/conflict-handling.d.js.map deleted file mode 100644 index e9584db0a01..00000000000 --- a/dist/cjs/types/conflict-handling.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"conflict-handling.d.js","names":[],"sources":["../../../src/types/conflict-handling.d.ts"],"sourcesContent":["import type { WithDeleted } from './rx-storage.d.ts';\n\n/**\n * Notice that the conflict handler input/output\n * does not work on RxDocumentData, but only on WithDeleted.\n * This is because the _meta attributes are meant for the local storing of document data, they not replicated\n * and therefore cannot be used to resolve conflicts.\n */\nexport type RxConflictHandlerInput = {\n assumedMasterState?: WithDeleted;\n realMasterState: WithDeleted;\n newDocumentState: WithDeleted;\n};\n\n/**\n * The conflict handler either returns:\n * - The resolved new document state\n * - A flag to identify the given 'realMasterState' and 'newDocumentState'\n * as being exactly equal, so no conflict has to be resolved.\n */\nexport type RxConflictHandlerOutput = {\n isEqual: false;\n documentData: WithDeleted;\n} | {\n isEqual: true;\n};\n\nexport type RxConflictHandler = (\n i: RxConflictHandlerInput,\n context: string\n) => Promise>;\n\nexport type RxConflictResultionTask = {\n /**\n * Unique id for that single task.\n */\n id: string;\n /**\n * Tasks must have a context\n * which makes it easy to filter/identify them again\n * with plugins or other hacky stuff.\n */\n context: string;\n input: RxConflictHandlerInput;\n};\n\n\nexport type RxConflictResultionTaskSolution = {\n /**\n * Id of the RxConflictResultionTask\n */\n id: string;\n output: RxConflictHandlerOutput;\n};\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/types/couchdb.d.js b/dist/cjs/types/couchdb.d.js deleted file mode 100644 index 4fa12083f2a..00000000000 --- a/dist/cjs/types/couchdb.d.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -//# sourceMappingURL=couchdb.d.js.map \ No newline at end of file diff --git a/dist/cjs/types/couchdb.d.js.map b/dist/cjs/types/couchdb.d.js.map deleted file mode 100644 index 6f8ed63d830..00000000000 --- a/dist/cjs/types/couchdb.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"couchdb.d.js","names":[],"sources":["../../../src/types/couchdb.d.ts"],"sourcesContent":["import type {\n MangoQuery,\n MangoQuerySelector,\n MangoQuerySortPart\n} from './rx-query.d.ts';\nimport type { BulkWriteRow } from './rx-storage.d.ts';\n\n/**\n * This file contains types that are CouchDB specific\n */\n\nexport interface CouchReplicationOptions {\n live?: boolean;\n retry?: boolean;\n filter?: Function;\n doc_ids?: string[];\n query_params?: any;\n view?: any;\n since?: number | 'now';\n heartbeat?: number;\n timeout?: number;\n batch_size?: number;\n batches_limit?: number;\n back_off_function?: Function;\n checkpoint?: false | 'source' | 'target';\n include_docs?: boolean;\n limit?: number;\n}\n\nexport interface CouchChangesOptionsBase {\n include_docs?: boolean;\n conflicts?: boolean;\n attachments?: boolean;\n binary?: boolean;\n descending?: boolean;\n since?: any;\n limit?: number;\n timeout?: any;\n heartbeat?: number | boolean;\n filter?: any;\n doc_ids?: string | string[];\n query_param?: any;\n view?: any;\n return_docs?: boolean;\n batch_size?: number;\n style?: string;\n}\n\nexport interface CouchChangesOptionsLive extends CouchChangesOptionsBase {\n live: true;\n}\n\nexport interface CouchChangesOptionsNonLive extends CouchChangesOptionsBase {\n live: false;\n}\ninterface CouchChangesOnChangeEvent {\n on: (eventName: string, handler: Function) => void;\n off: (eventName: string, handler: Function) => void;\n cancel(): void;\n}\n\nexport type CouchWriteError = {\n /**\n * status code from couchdb\n * 409 for 'conflict'\n */\n status: number;\n error: true;\n /**\n * primary key value of the errored document\n */\n id: string;\n};\n\n/**\n * possible couch-settings\n * @link https://couchdb.com/api.html#create_database\n */\nexport interface CouchSettings {\n auto_compaction?: boolean;\n revs_limit?: number;\n ajax?: any;\n fetch?: any;\n auth?: any;\n skip_setup?: boolean;\n storage?: any;\n size?: number;\n location?: string;\n iosDatabaseLocation?: string;\n}\n\n/**\n * options for couch.allDocs()\n * @link https://couchdb.com/api.html#batch_fetch\n */\nexport type CouchAllDocsOptions = {\n include_docs?: boolean;\n conflicts?: boolean;\n attachments?: boolean;\n binary?: boolean;\n startkey?: string;\n endkey?: string;\n inclusive_end?: boolean;\n limit?: number;\n skip?: number;\n descending?: boolean;\n key?: string;\n keys?: string[];\n update_seq?: string;\n\n // undocument but needed\n revs?: boolean;\n deleted?: 'ok';\n};\n\nexport type CouchSyncHandlerEvents = 'change' | 'paused' | 'active' | 'error' | 'complete';\nexport type CouchSyncHandler = {\n on(ev: CouchSyncHandlerEvents, fn: (el: any) => void): void;\n off(ev: CouchSyncHandlerEvents, fn: any): void;\n cancel(): void;\n};\n\nexport type CouchChangeRow = {\n id: string;\n seq: number;\n deleted?: true;\n changes: {\n rev: 'string';\n }[];\n /**\n * only if include_docs === true\n */\n doc?: CouchChangeDoc;\n};\n\nexport type CouchAttachmentMeta = {\n digest: string;\n content_type: string;\n length: number;\n stub: boolean;\n\n /**\n * 'revpos indicates the generation number (numeric prefix in the revID) at which the attachment was last altered'\n * @link https://github.com/couchbase/couchbase-lite-ios/issues/1200#issuecomment-206444554\n */\n revpos: number;\n};\n\nexport type CouchAttachmentWithData = CouchAttachmentMeta & {\n /**\n * Base64 string with the data\n * or directly a buffer\n */\n data: Blob;\n type: string;\n /**\n * If set, must be false\n * because we have the full data and not only a stub.\n */\n stub?: false;\n};\n\nexport type CouchChangeDoc = {\n _id: string;\n _rev: string;\n /**\n * True if the document is deleted.\n */\n _deleted?: boolean;\n _attachments: {\n [attachmentId: string]: CouchAttachmentMeta;\n };\n};\n\nexport type WithAttachments = Data & {\n /**\n * Intentional optional,\n * if the document has no attachments,\n * we do NOT have an empty object.\n */\n _attachments?: {\n [attachmentId: string]: CouchAttachmentMeta;\n };\n};\nexport type WithAttachmentsData = Data & {\n /**\n * Intentional optional,\n * if the document has no attachments,\n * we do NOT have an empty object.\n */\n _attachments?: {\n [attachmentId: string]: CouchAttachmentWithData;\n };\n};\n\n\nexport type WithCouchMeta = Data & {\n _rev: string;\n _attachments?: {\n [attachmentId: string]: CouchAttachmentMeta;\n };\n _deleted?: boolean;\n};\n\nexport type CouchdbChangesResult = {\n results: CouchChangeRow[];\n last_seq: number;\n};\n\ndeclare type Debug = {\n enable(what: string): void;\n disable(): void;\n};\n\nexport type CouchDbSorting = (string | string[] | { [k: string]: 'asc' | 'desc' | 1 | -1; })[];\n\n// this is not equal to the standard MangoQuery\n// because of different sorting\nexport type CouchdbQuery = MangoQuery & {\n sort?: CouchDbSorting;\n};\n\nexport type CouchBulkDocResultRow = {\n ok: boolean;\n id: string;\n rev: string;\n\n error?: 'conflict';\n reason?: string;\n};\n\nexport type CouchCheckpoint = {\n sequence: number;\n};\n\nexport type CouchBulkDocOptions = {\n new_edits?: boolean;\n\n // custom options for RxDB\n isDeeper?: boolean;\n custom?: {\n primaryPath: string;\n writeRowById: Map>;\n insertDocsById: Map;\n previousDocsInDb: Map;\n context: string;\n };\n};\n\nexport type CouchMangoQuery = MangoQuery & {\n index: undefined;\n use_index?: string;\n};\n\nexport type ExplainedCouchQuery = {\n dbname: string;\n index: {\n ddoc: string | null;\n name: string; // 'idx-rxdb-index-age,_id'\n type: 'json';\n def: {\n fields: MangoQuerySortPart[];\n };\n };\n selector: MangoQuerySelector;\n range: {\n start_key: any[];\n end_key: any[];\n };\n opts: {\n use_index: string[];\n bookmark: string;\n sort: MangoQuerySortPart[];\n conflicts: boolean;\n r: any[];\n };\n skip: number;\n};\n\nexport type CouchAllDocsResponse = {\n offset: number;\n rows: {\n id: string;\n doc: any;\n key: string;\n value: {\n rev: string;\n deleted?: boolean;\n };\n error?: 'not_found' | string;\n }[];\n total_rows: number;\n};\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/types/index.d.js b/dist/cjs/types/index.d.js deleted file mode 100644 index b9338621f65..00000000000 --- a/dist/cjs/types/index.d.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -//# sourceMappingURL=index.d.js.map \ No newline at end of file diff --git a/dist/cjs/types/index.d.js.map b/dist/cjs/types/index.d.js.map deleted file mode 100644 index ac00bcdc177..00000000000 --- a/dist/cjs/types/index.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.d.js","names":[],"sources":["../../../src/types/index.d.ts"],"sourcesContent":["export type * from './couchdb.d.ts';\nexport type * from './rx-attachment.d.ts';\nexport type * from './rx-collection.d.ts';\nexport type * from './rx-database.d.ts';\nexport type * from './rx-database-internal-store.d.ts';\nexport type * from './rx-document.d.ts';\nexport type * from './rx-error.d.ts';\nexport type * from './rx-plugin.d.ts';\nexport type * from './rx-query.d.ts';\nexport type * from './rx-schema.d.ts';\nexport type * from './rx-storage.d.ts';\nexport type * from './rx-storage.interface.d.ts';\nexport type * from './replication-protocol.d.ts';\nexport type * from './conflict-handling.d.ts';\nexport type * from './rx-change-event.d.ts';\nexport type * from './query-planner.d.ts';\nexport type * from './util.d.ts';\n\n// plugins\nexport type * from './plugins/replication.d.ts';\nexport type * from './plugins/replication-graphql.d.ts';\nexport type * from './plugins/replication.d.ts';\nexport type * from './plugins/local-documents.d.ts';\nexport type * from './plugins/migration.d.ts';\nexport type * from './plugins/backup.d.ts';\nexport type * from './plugins/cleanup.d.ts';\nexport type * from './plugins/lokijs.d.ts';\nexport type * from './plugins/dexie.d.ts';\nexport type * from './plugins/reactivity.d.ts';\nexport type * from './plugins/update.d.ts';\nexport type * from './plugins/crdt.d.ts';\nexport type * from './plugins/state.d.ts';\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/types/modules/index.d.js b/dist/cjs/types/modules/index.d.js deleted file mode 100644 index b9338621f65..00000000000 --- a/dist/cjs/types/modules/index.d.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -//# sourceMappingURL=index.d.js.map \ No newline at end of file diff --git a/dist/cjs/types/modules/index.d.js.map b/dist/cjs/types/modules/index.d.js.map deleted file mode 100644 index 320b12e6029..00000000000 --- a/dist/cjs/types/modules/index.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.d.js","names":[],"sources":["../../../../src/types/modules/index.d.ts"],"sourcesContent":[""],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/types/modules/lokijs.d.js b/dist/cjs/types/modules/lokijs.d.js deleted file mode 100644 index 89274c6e6ca..00000000000 --- a/dist/cjs/types/modules/lokijs.d.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -//# sourceMappingURL=lokijs.d.js.map \ No newline at end of file diff --git a/dist/cjs/types/modules/lokijs.d.js.map b/dist/cjs/types/modules/lokijs.d.js.map deleted file mode 100644 index e598510d6fd..00000000000 --- a/dist/cjs/types/modules/lokijs.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"lokijs.d.js","names":[],"sources":["../../../../src/types/modules/lokijs.d.ts"],"sourcesContent":["declare module 'lokijs';\ndeclare module 'lokijs/src/loki-fs-structured-adapter.js';\ndeclare module 'lokijs/src/incremental-indexeddb-adapter.js';\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/types/modules/mocha.parallel.d.js b/dist/cjs/types/modules/mocha.parallel.d.js deleted file mode 100644 index cf25a3022a6..00000000000 --- a/dist/cjs/types/modules/mocha.parallel.d.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -//# sourceMappingURL=mocha.parallel.d.js.map \ No newline at end of file diff --git a/dist/cjs/types/modules/mocha.parallel.d.js.map b/dist/cjs/types/modules/mocha.parallel.d.js.map deleted file mode 100644 index ea5b59352bf..00000000000 --- a/dist/cjs/types/modules/mocha.parallel.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"mocha.parallel.d.js","names":[],"sources":["../../../../src/types/modules/mocha.parallel.d.ts"],"sourcesContent":["declare module 'mocha.parallel';\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/types/plugins/backup.d.js b/dist/cjs/types/plugins/backup.d.js deleted file mode 100644 index e53f8a0dd19..00000000000 --- a/dist/cjs/types/plugins/backup.d.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -//# sourceMappingURL=backup.d.js.map \ No newline at end of file diff --git a/dist/cjs/types/plugins/backup.d.js.map b/dist/cjs/types/plugins/backup.d.js.map deleted file mode 100644 index 5e851955db4..00000000000 --- a/dist/cjs/types/plugins/backup.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"backup.d.js","names":[],"sources":["../../../../src/types/plugins/backup.d.ts"],"sourcesContent":["export type BackupOptions = {\n live: boolean;\n directory: string;\n /**\n * If true,\n * attachments will also be saved\n */\n attachments?: boolean;\n /**\n * How many documents can be processed in one batch\n * [default=10]\n */\n batchSize?: number;\n /**\n * If not set, all collections will be backed up.\n */\n collections?: string[];\n};\n\nexport type BackupMetaFileContent = {\n createdAt: number;\n updatedAt: number;\n collectionStates: {\n [collectionName: string]: {\n checkpoint?: any;\n };\n };\n};\n\nexport type RxBackupWriteEvent = {\n collectionName: string;\n documentId: string;\n files: string[];\n deleted: boolean;\n};\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/types/plugins/cleanup.d.js b/dist/cjs/types/plugins/cleanup.d.js deleted file mode 100644 index e7d1cb2aaf3..00000000000 --- a/dist/cjs/types/plugins/cleanup.d.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -//# sourceMappingURL=cleanup.d.js.map \ No newline at end of file diff --git a/dist/cjs/types/plugins/cleanup.d.js.map b/dist/cjs/types/plugins/cleanup.d.js.map deleted file mode 100644 index 075c7b55ff3..00000000000 --- a/dist/cjs/types/plugins/cleanup.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"cleanup.d.js","names":[],"sources":["../../../../src/types/plugins/cleanup.d.ts"],"sourcesContent":["export type RxCleanupPolicy = {\n /**\n * The minimum time in milliseconds\n * of how long a document must have been deleted\n * until it is purged by the cleanup.\n * This should be higher then the time you expect\n * your user to be offline for.\n * If this is too low, deleted documents might not\n * replicate their deletion state.\n */\n minimumDeletedTime: number;\n /**\n * The minimum amount of that that the RxCollection must have existed.\n * This ensures that at the initial page load, more important\n * tasks are not slowed down because a cleanup process is running.\n */\n minimumCollectionAge: number;\n /**\n * After the initial cleanup is done,\n * a new cleanup is started after [runEach] milliseconds\n */\n runEach: number;\n /**\n * If set to true,\n * RxDB will await all running replications\n * to not have a replication cycle running.\n * This ensures we do not remove deleted documents\n * when they might not have already been replicated.\n */\n awaitReplicationsInSync: boolean;\n /**\n * If true, it will only start the cleanup\n * when the current instance is also the leader.\n * This ensures that when RxDB is used in multiInstance mode,\n * only one instance will start the cleanup.\n */\n waitForLeadership: boolean;\n};\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/types/plugins/crdt.d.js b/dist/cjs/types/plugins/crdt.d.js deleted file mode 100644 index 791d9d2b73a..00000000000 --- a/dist/cjs/types/plugins/crdt.d.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -//# sourceMappingURL=crdt.d.js.map \ No newline at end of file diff --git a/dist/cjs/types/plugins/crdt.d.js.map b/dist/cjs/types/plugins/crdt.d.js.map deleted file mode 100644 index 4a539147445..00000000000 --- a/dist/cjs/types/plugins/crdt.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"crdt.d.js","names":[],"sources":["../../../../src/types/plugins/crdt.d.ts"],"sourcesContent":["import type { MangoQuerySelector } from '../rx-query.d.ts';\nimport type { StringKeys } from '../util.d.ts';\nimport type { UpdateQuery } from './update.d.ts';\n\n\nexport type CRDTEntry = {\n selector?: MangoQuerySelector;\n ifMatch?: UpdateQuery;\n ifNotMatch?: UpdateQuery;\n};\n\n/**\n * Options for the crdt plugin.\n * We set these in the schema because changing them\n * is not possible on the fly because it would\n * destroy the document state in an unpredictable way.\n */\nexport type CRDTSchemaOptions = {\n /**\n * Determines which field of the document must be used\n * to store the crdt operations.\n * The given field must exist with the content of \"CRDT_FIELD_SCHEMA\" in the\n * properties part of your schema.\n */\n field: StringKeys | string;\n\n /**\n * After BOTH of the limits\n * maxOperations/maxTTL is reached,\n * the document will clean up the stored operations\n * and merged them together to ensure\n * that not too many operations are stored which could slow down the\n * database operations.\n */\n // TODO not implemented yet, make a pull request if you need that.\n // maxOperations: number;\n // maxTTL: number;\n};\n\n\nexport type CRDTOperation = {\n body: CRDTEntry[];\n /**\n * A string to uniquely represent the creator\n * of this operation.\n * Mostly you would use the RxDatabase().storageToken().\n */\n creator: string;\n\n /**\n * Unix time in milliseconds\n * that determines when the operation was created.\n * Used to properly clean up old operations.\n */\n time: number;\n};\n\n\nexport type CRDTDocumentField = {\n /**\n * An array with arrays of CRDT operations.\n * The index of the top level array is equal\n * to the revision height where the operations\n * belong to.\n * Sorted by revision height ascending.\n * If we have a conflict and we need a rebuild,\n * the operations will be run in the revision height\n * sort order to make everything deterministic.\n */\n operations: CRDTOperation[][];\n\n /**\n * A hash to uniquely define the whole operations state.\n */\n hash: string;\n};\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/types/plugins/dexie.d.js b/dist/cjs/types/plugins/dexie.d.js deleted file mode 100644 index 038e7388c2e..00000000000 --- a/dist/cjs/types/plugins/dexie.d.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -//# sourceMappingURL=dexie.d.js.map \ No newline at end of file diff --git a/dist/cjs/types/plugins/dexie.d.js.map b/dist/cjs/types/plugins/dexie.d.js.map deleted file mode 100644 index 14bfc0ee657..00000000000 --- a/dist/cjs/types/plugins/dexie.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"dexie.d.js","names":[],"sources":["../../../../src/types/plugins/dexie.d.ts"],"sourcesContent":["import type {\n Dexie,\n DexieOptions,\n Table as DexieTable\n} from 'dexie';\n\nexport type DexieSettings = DexieOptions;\n\n/**\n * The internals is a Promise that resolves\n * when the database has fully opened\n * and Dexie.on.ready was called\n * @link https://dexie.org/docs/Dexie/Dexie.on.ready\n *\n */\nexport type DexieStorageInternals = Promise<{\n dexieDb: Dexie;\n /**\n * Contains all normal documents. Deleted ones and non-deleted ones.\n */\n dexieTable: DexieTable;\n // contains the attachments data\n dexieAttachmentsTable: DexieTable;\n\n // these must be transformed because indexeddb does not allow boolean indexing\n booleanIndexes: string[];\n}>;\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/types/plugins/local-documents.d.js b/dist/cjs/types/plugins/local-documents.d.js deleted file mode 100644 index caa00a0ad6f..00000000000 --- a/dist/cjs/types/plugins/local-documents.d.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -//# sourceMappingURL=local-documents.d.js.map \ No newline at end of file diff --git a/dist/cjs/types/plugins/local-documents.d.js.map b/dist/cjs/types/plugins/local-documents.d.js.map deleted file mode 100644 index 1dafe553ff7..00000000000 --- a/dist/cjs/types/plugins/local-documents.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"local-documents.d.js","names":[],"sources":["../../../../src/types/plugins/local-documents.d.ts"],"sourcesContent":["import type { Observable } from 'rxjs';\nimport type { DocumentCache } from '../../doc-cache.d.ts';\nimport type { IncrementalWriteQueue } from '../../incremental-write.d.ts';\nimport type { RxCollection } from '../rx-collection.d.ts';\nimport type { RxDatabase } from '../rx-database.d.ts';\nimport type { RxDocumentBase } from '../rx-document.d.ts';\nimport type { RxStorageInstance } from '../rx-storage.interface.d.ts';\nimport type { Override } from '../util.d.ts';\n\nexport type LocalDocumentParent = RxDatabase | RxCollection;\nexport type LocalDocumentState = {\n database: RxDatabase;\n parent: LocalDocumentParent;\n storageInstance: RxStorageInstance;\n docCache: DocumentCache;\n incrementalWriteQueue: IncrementalWriteQueue;\n};\nexport type RxLocalDocumentData<\n Data = {\n // local documents are schemaless and contain any data\n [key: string]: any;\n }\n> = {\n id: string;\n data: Data;\n};\n\ndeclare type LocalDocumentModifyFunction = (\n doc: Data,\n rxLocalDocument: RxLocalDocument\n) => Data | Promise;\n\n\nexport declare type RxLocalDocument = Override<\n RxDocumentBase, {}, Reactivity>,\n {\n readonly parent: Parent;\n isLocal(): true;\n\n /**\n * Because local documents store their relevant data inside of the 'data' property,\n * the incremental mutation methods are changed a bit to only allow to change parts of the data property.\n */\n incrementalModify(mutationFunction: LocalDocumentModifyFunction): Promise>;\n incrementalPatch(patch: Partial): Promise>;\n\n $: Observable>;\n }\n>;\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/types/plugins/lokijs.d.js b/dist/cjs/types/plugins/lokijs.d.js deleted file mode 100644 index 89274c6e6ca..00000000000 --- a/dist/cjs/types/plugins/lokijs.d.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -//# sourceMappingURL=lokijs.d.js.map \ No newline at end of file diff --git a/dist/cjs/types/plugins/lokijs.d.js.map b/dist/cjs/types/plugins/lokijs.d.js.map deleted file mode 100644 index 0279bb5299d..00000000000 --- a/dist/cjs/types/plugins/lokijs.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"lokijs.d.js","names":[],"sources":["../../../../src/types/plugins/lokijs.d.ts"],"sourcesContent":["import type { LeaderElector } from 'broadcast-channel';\nimport type { AddReturn } from 'unload';\nimport type { LokiSaveQueue } from '../../plugins/storage-lokijs/loki-save-queue.ts';\n\nexport type LokiDatabaseSettings = any;\n\nexport type LokiCollectionSettings = Partial;\n\nexport type LokiSettings = {\n database?: LokiDatabaseSettings;\n collection?: LokiCollectionSettings;\n};\n\nexport type LokiStorageInternals = {\n leaderElector?: LeaderElector;\n localState?: Promise;\n};\n\nexport type LokiRemoteRequestBroadcastMessage = {\n response: false;\n type: string;\n databaseName: string;\n collectionName: string;\n operation: string;\n params: any[];\n requestId: string;\n};\n\nexport type LokiRemoteResponseBroadcastMessage = {\n response: true;\n type: string;\n databaseName: string;\n collectionName: string;\n requestId: string;\n result: any | any[];\n // if true, the result property will contain an error state\n isError: boolean;\n};\n\nexport type LokiDatabaseState = {\n database: any;\n databaseSettings: LokiDatabaseSettings;\n saveQueue: LokiSaveQueue;\n\n // all known collections of the database\n collections: {\n [collectionName: string]: any;\n };\n\n /**\n * Registered unload handlers\n * so we can remove them on close.\n */\n unloads: AddReturn[];\n};\n\nexport type LokiLocalDatabaseState = {\n databaseState: LokiDatabaseState;\n collection: any;\n};\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/types/plugins/migration.d.js b/dist/cjs/types/plugins/migration.d.js deleted file mode 100644 index 99c30134081..00000000000 --- a/dist/cjs/types/plugins/migration.d.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -//# sourceMappingURL=migration.d.js.map \ No newline at end of file diff --git a/dist/cjs/types/plugins/migration.d.js.map b/dist/cjs/types/plugins/migration.d.js.map deleted file mode 100644 index b7e6d0d567d..00000000000 --- a/dist/cjs/types/plugins/migration.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"migration.d.js","names":[],"sources":["../../../../src/types/plugins/migration.d.ts"],"sourcesContent":["import type {\n WithAttachments\n} from '../couchdb.d.ts';\nimport type { RxCollection } from '../rx-collection.d.ts';\nimport type { MaybePromise } from '../util.d.ts';\n\nexport type MigrationStrategy = (\n oldDocumentData: WithAttachments,\n collection: RxCollection\n) => MaybePromise | null>;\n\nexport type MigrationStrategies = {\n [toVersion: number]: MigrationStrategy;\n};\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/types/plugins/reactivity.d.js b/dist/cjs/types/plugins/reactivity.d.js deleted file mode 100644 index 1c4cda18e7f..00000000000 --- a/dist/cjs/types/plugins/reactivity.d.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -//# sourceMappingURL=reactivity.d.js.map \ No newline at end of file diff --git a/dist/cjs/types/plugins/reactivity.d.js.map b/dist/cjs/types/plugins/reactivity.d.js.map deleted file mode 100644 index 59ade115252..00000000000 --- a/dist/cjs/types/plugins/reactivity.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"reactivity.d.js","names":[],"sources":["../../../../src/types/plugins/reactivity.d.ts"],"sourcesContent":["import type { Observable } from 'rxjs';\nimport type { RxDatabase } from '../rx-database';\n\nexport interface RxReactivityFactory {\n fromObservable(\n obs: Observable,\n initialValue: InitData,\n rxDatabase: RxDatabase\n ): Reactivity; // TODO must use generic data like Reactivity\n}\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/types/plugins/replication-graphql.d.js b/dist/cjs/types/plugins/replication-graphql.d.js deleted file mode 100644 index 855459a6ceb..00000000000 --- a/dist/cjs/types/plugins/replication-graphql.d.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -//# sourceMappingURL=replication-graphql.d.js.map \ No newline at end of file diff --git a/dist/cjs/types/plugins/replication-graphql.d.js.map b/dist/cjs/types/plugins/replication-graphql.d.js.map deleted file mode 100644 index e4cdcd0c707..00000000000 --- a/dist/cjs/types/plugins/replication-graphql.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"replication-graphql.d.js","names":[],"sources":["../../../../src/types/plugins/replication-graphql.d.ts"],"sourcesContent":["import { RxReplicationWriteToMasterRow } from '../replication-protocol.ts';\nimport { ById, MaybePromise } from '../util.ts';\nimport {\n ReplicationOptions,\n ReplicationPullHandlerResult,\n ReplicationPullOptions,\n ReplicationPushHandlerResult,\n ReplicationPushOptions\n} from './replication.ts';\n\nexport interface RxGraphQLReplicationQueryBuilderResponseObject {\n query: string;\n operationName?: string;\n variables: any;\n}\n\nexport type RxGraphQLReplicationClientState = {\n headers: ById;\n credentials: RequestCredentials | undefined;\n};\n\nexport type RxGraphQLReplicationQueryBuilderResponse =\n RxGraphQLReplicationQueryBuilderResponseObject |\n Promise;\nexport type RxGraphQLReplicationPushQueryBuilder = (\n // typed 'any' because the data might be modified by the push.modifier.\n rows: RxReplicationWriteToMasterRow[]\n) => RxGraphQLReplicationQueryBuilderResponse;\n\n\nexport type RxGraphQLReplicationPullQueryBuilder = (\n latestPulledCheckpoint: CheckpointType | undefined,\n limit: number\n) => RxGraphQLReplicationQueryBuilderResponse;\nexport type GraphQLSyncPullOptions = Omit<\n ReplicationPullOptions,\n 'handler' | 'stream$'\n> & {\n queryBuilder: RxGraphQLReplicationPullQueryBuilder;\n streamQueryBuilder?: RxGraphQLReplicationPullStreamQueryBuilder;\n dataPath?: string;\n responseModifier?: RxGraphQLPullResponseModifier;\n includeWsHeaders?: boolean;\n};\n\nexport type RxGraphQLPullResponseModifier = (\n // the exact response that was returned from the server\n plainResponse: ReplicationPullHandlerResult | any,\n // either 'handler' if it came from the pull.handler, or 'stream' if it came from the pull.stream\n origin: 'handler' | 'stream',\n requestCheckpoint?: CheckpointType\n) => MaybePromise>;\n\nexport type RxGraphQLPushResponseModifier = (\n // the exact response that was returned from the server\n plainResponse: ReplicationPushHandlerResult | any,\n) => MaybePromise>;\n\nexport type RxGraphQLReplicationPullStreamQueryBuilder = (headers: { [k: string]: string; }) => RxGraphQLReplicationQueryBuilderResponse;\n\nexport type GraphQLSyncPushOptions = Omit<\n ReplicationPushOptions,\n 'handler'\n> & {\n queryBuilder: RxGraphQLReplicationPushQueryBuilder;\n dataPath?: string;\n responseModifier?: RxGraphQLPushResponseModifier;\n};\n\nexport type GraphQLServerUrl = {\n http?: string;\n ws?: string;\n};\n\nexport type SyncOptionsGraphQL = Omit<\n ReplicationOptions,\n 'pull' | 'push'\n> & {\n url: GraphQLServerUrl;\n fetch?: WindowOrWorkerGlobalScope['fetch'];\n headers?: { [k: string]: string; }; // send with all requests to the endpoint\n credentials?: RequestCredentials;\n pull?: GraphQLSyncPullOptions;\n push?: GraphQLSyncPushOptions;\n};\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/types/plugins/replication.d.js b/dist/cjs/types/plugins/replication.d.js deleted file mode 100644 index d84a68aee14..00000000000 --- a/dist/cjs/types/plugins/replication.d.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -//# sourceMappingURL=replication.d.js.map \ No newline at end of file diff --git a/dist/cjs/types/plugins/replication.d.js.map b/dist/cjs/types/plugins/replication.d.js.map deleted file mode 100644 index ab7122cbc8f..00000000000 --- a/dist/cjs/types/plugins/replication.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"replication.d.js","names":[],"sources":["../../../../src/types/plugins/replication.d.ts"],"sourcesContent":["import { Observable } from 'rxjs';\nimport type {\n InternalStoreDocType,\n MaybePromise,\n RxCollection,\n RxDocumentData,\n RxReplicationPullStreamItem,\n RxReplicationWriteToMasterRow,\n WithDeleted\n} from '../../types/index.d.ts';\n\n\nexport type InternalStoreReplicationPushDocType = InternalStoreDocType<{\n checkpoint: any;\n}>;\nexport type InternalStoreReplicationPullDocType = InternalStoreDocType<{\n lastPulledDoc: RxDocumentData;\n}>;\n\nexport type ReplicationPullHandlerResult = {\n checkpoint: CheckpointType | null;\n documents: WithDeleted[];\n};\n\nexport type ReplicationPushHandlerResult = RxDocType[];\n\nexport type ReplicationPullHandler = (\n lastPulledCheckpoint: CheckpointType | undefined,\n batchSize: number\n) => Promise>;\nexport type ReplicationPullOptions = {\n /**\n * A handler that pulls the new remote changes\n * from the remote actor.\n */\n handler: ReplicationPullHandler;\n\n\n /**\n * An observable that streams all document changes\n * that are happening on the backend.\n * Emits an document bulk together with the latest checkpoint of these documents.\n * Also can emit a 'RESYNC' event when the client was offline and is online again.\n *\n * Not required for non-live replication.\n */\n stream$?: Observable>;\n\n /**\n * Amount of documents that the remote will send in one request.\n * If the response contains less then [batchSize] documents,\n * RxDB will assume there are no more changes on the backend\n * that are not replicated.\n * [default=100]\n */\n batchSize?: number;\n\n /**\n * A modifier that runs on all documents that are pulled,\n * before they are used by RxDB.\n * - the ones from the pull handler\n * - the ones from the pull stream\n */\n modifier?: (docData: any) => MaybePromise>;\n\n /**\n * If set, the push replication\n * will start from the given checkpoint.\n */\n initialCheckpoint?: any;\n};\n\n/**\n * Gets the new write rows.\n * Returns the current master state of all conflicting writes,\n * so that they can be resolved on the client.\n */\nexport type ReplicationPushHandler = (\n docs: RxReplicationWriteToMasterRow[]\n) => Promise[]>;\nexport type ReplicationPushOptions = {\n /**\n * A handler that sends the new local changes\n * to the remote actor.\n * On error, all documents are send again at later time.\n */\n handler: ReplicationPushHandler;\n\n\n /**\n * A modifier that runs on all pushed documents before\n * they are send into the push handler.\n */\n modifier?: (docData: WithDeleted) => MaybePromise;\n\n /**\n * How many local changes to process at once.\n */\n batchSize?: number;\n\n /**\n * If set, the push replication\n * will start from the given checkpoint.\n */\n initialCheckpoint?: any;\n};\n\n\nexport type ReplicationOptions = {\n /**\n * An id for the replication to identify it\n * and so that RxDB is able to resume the replication on app reload.\n * If you replicate with a remote server, it is recommended to put the\n * server url into the replicationIdentifier.\n * Like 'my-rest-replication-to-https://example.com/api/sync'\n */\n replicationIdentifier: string;\n collection: RxCollection;\n /**\n * Define a custom property that is used\n * to flag a document as being deleted.\n * @default '_deleted'\n */\n deletedField?: '_deleted' | string;\n pull?: ReplicationPullOptions;\n push?: ReplicationPushOptions;\n /**\n * By default it will do an ongoing realtime replication.\n * By settings live: false the replication will run once until the local state\n * is in sync with the remote state, then it will cancel itself.\n * @default true\n */\n live?: boolean;\n /**\n * Time in milliseconds after when a failed backend request\n * has to be retried.\n * This time will be skipped if a offline->online switch is detected\n * via `navigator.onLine`\n * @default 5000\n */\n retryTime?: number;\n /**\n * When multiInstance is `true`, like when you use RxDB in multiple browser tabs,\n * the replication should always run in only one of the open browser tabs.\n * If waitForLeadership is `true`, it will wait until the current instance is leader.\n * If waitForLeadership is `false`, it will start replicating, even if it is not leader.\n * @default true\n */\n waitForLeadership?: boolean;\n /**\n * If this is set to `false`,\n * the replication will not start automatically\n * but will wait for `replicationState.start()` being called.\n * @default true\n */\n autoStart?: boolean;\n};\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/types/plugins/state.d.js b/dist/cjs/types/plugins/state.d.js deleted file mode 100644 index 11735c0e0fb..00000000000 --- a/dist/cjs/types/plugins/state.d.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -//# sourceMappingURL=state.d.js.map \ No newline at end of file diff --git a/dist/cjs/types/plugins/state.d.js.map b/dist/cjs/types/plugins/state.d.js.map deleted file mode 100644 index 52fb331215b..00000000000 --- a/dist/cjs/types/plugins/state.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"state.d.js","names":[],"sources":["../../../../src/types/plugins/state.d.ts"],"sourcesContent":["import type { RxStateBase } from '../../plugins/state/rx-state';\nimport type { ExtendObservables, ExtendReactivity } from '../rx-document';\n\nexport type RxState = RxStateBase & T & ExtendObservables> & ExtendReactivity, Reactivity>;\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/types/plugins/update.d.js b/dist/cjs/types/plugins/update.d.js deleted file mode 100644 index 3619fd94266..00000000000 --- a/dist/cjs/types/plugins/update.d.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -//# sourceMappingURL=update.d.js.map \ No newline at end of file diff --git a/dist/cjs/types/plugins/update.d.js.map b/dist/cjs/types/plugins/update.d.js.map deleted file mode 100644 index 3551135b2cc..00000000000 --- a/dist/cjs/types/plugins/update.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"update.d.js","names":[],"sources":["../../../../src/types/plugins/update.d.ts"],"sourcesContent":["import type { AnyKeys, AnyObject } from '../util.d.ts';\n\n// import type {\n// UpdateExpression\n// } from 'mingo/updater';\n\n/**\n * We use an own type here, copied from mongoose\n * @link https://github.com/Automattic/mongoose/blob/eb292d2c4cc98ee315f118d6199a83938f06d901/types/index.d.ts#L466\n * TODO when mingo implements a schema-based type for UpdateExpression, we can use that one.\n */\nexport type UpdateQuery = {\n $min?: AnyKeys & AnyObject;\n $max?: AnyKeys & AnyObject;\n $inc?: AnyKeys & AnyObject;\n $set?: AnyKeys & AnyObject;\n $unset?: AnyKeys & AnyObject;\n $push?: AnyKeys & AnyObject;\n $addToSet?: AnyKeys & AnyObject;\n $pop?: AnyKeys & AnyObject;\n $pullAll?: AnyKeys & AnyObject;\n $rename?: Record;\n};\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/types/query-planner.d.js b/dist/cjs/types/query-planner.d.js deleted file mode 100644 index a50ab278541..00000000000 --- a/dist/cjs/types/query-planner.d.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -//# sourceMappingURL=query-planner.d.js.map \ No newline at end of file diff --git a/dist/cjs/types/query-planner.d.js.map b/dist/cjs/types/query-planner.d.js.map deleted file mode 100644 index 7dd18f0ecfe..00000000000 --- a/dist/cjs/types/query-planner.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"query-planner.d.js","names":[],"sources":["../../../src/types/query-planner.d.ts"],"sourcesContent":["export type RxQueryPlanKey = string | number | undefined;\n\nexport type RxQueryPlanerOpts = {\n startKey: RxQueryPlanKey;\n endKey: RxQueryPlanKey;\n /**\n * True if the first matching document\n * must also be included into the result set.\n */\n inclusiveStart: boolean;\n /**\n * True if the last matching document\n * must also be included into the result set.\n */\n inclusiveEnd: boolean;\n};\n\nexport type RxQueryPlan = {\n index: string[];\n /**\n * If the index does not match the sort params,\n * we have to resort the query results manually\n * after fetching them from the index.\n */\n sortSatisfiedByIndex: boolean;\n\n /**\n * If the whole selector matching is satisfied\n * by the index, we do not have to run a does-document-data-match-query\n * stuff.\n */\n selectorSatisfiedByIndex: boolean;\n\n /**\n * TODO add a flag that determines\n * if we have to run the selector matching on all results\n * or if the used index anyway matches ALL operators.\n */\n\n startKeys: RxQueryPlanKey[];\n endKeys: RxQueryPlanKey[];\n /**\n * True if the first matching document\n * must also be included into the result set.\n */\n inclusiveStart: boolean;\n /**\n * True if the last matching document\n * must also be included into the result set.\n */\n inclusiveEnd: boolean;\n\n};\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/types/replication-protocol.d.js b/dist/cjs/types/replication-protocol.d.js deleted file mode 100644 index 4e0f2ca614d..00000000000 --- a/dist/cjs/types/replication-protocol.d.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -//# sourceMappingURL=replication-protocol.d.js.map \ No newline at end of file diff --git a/dist/cjs/types/replication-protocol.d.js.map b/dist/cjs/types/replication-protocol.d.js.map deleted file mode 100644 index 431820ecb37..00000000000 --- a/dist/cjs/types/replication-protocol.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"replication-protocol.d.js","names":[],"sources":["../../../src/types/replication-protocol.d.ts"],"sourcesContent":["import { BehaviorSubject, Observable, Subject } from 'rxjs';\nimport type {\n RxConflictHandler,\n RxConflictHandlerInput,\n RxConflictHandlerOutput\n} from './conflict-handling.d.ts';\nimport type { RxError, RxTypeError } from './rx-error.d.ts';\nimport type {\n BulkWriteRow,\n RxDocumentData,\n WithDeleted,\n WithDeletedAndAttachments\n} from './rx-storage.d.ts';\nimport type {\n RxStorageInstance\n} from './rx-storage.interface.d.ts';\nimport type { HashFunction } from './util.d.ts';\n\nexport type RxStorageReplicationMeta = {\n\n /**\n * Combined primary key consisting\n * of: [replicationId, itemId, isCheckpoint]\n * so that the same RxStorageInstance\n * can be used for multiple replication states.\n */\n id: string;\n\n /**\n * Either the document primaryKey\n * or the id of the replication checkpoint.\n */\n itemId: string;\n\n /**\n * True if the doc data is about a checkpoint,\n * False if it is about a document state from the master.\n * Stored as a string so it can be used\n * in the combined primary key 'id'\n */\n isCheckpoint: '0' | '1';\n checkpointData?: CheckpointType;\n\n /**\n * the document state of the master\n * only set if not checkpoint.\n */\n docData?: RxDocType | RxDocumentData | any;\n /**\n * If the current assumed master was written while\n * resolving a conflict, this field contains\n * the revision of the conflict-solution that\n * is stored in the forkInstance.\n */\n isResolvedConflict?: string;\n};\n\nexport type RxReplicationWriteToMasterRow = {\n assumedMasterState?: WithDeletedAndAttachments;\n newDocumentState: WithDeletedAndAttachments;\n};\n\n\nexport type DocumentsWithCheckpoint = {\n documents: WithDeletedAndAttachments[];\n checkpoint: CheckpointType;\n};\n\n\nexport type RxReplicationPullStreamItem = DocumentsWithCheckpoint |\n /**\n * Emit this when the masterChangeStream$ might have missed out\n * some events because the fork lost the connection to the master.\n * Like when the user went offline and reconnects.\n */\n 'RESYNC';\n\n/**\n * The replication handler contains all logic\n * that is required by the replication protocol\n * to interact with the master instance.\n * This is an abstraction so that we can use different\n * handlers for GraphQL, REST or any other transportation layer.\n * Even a RxStorageInstance can be wrapped in a way to represent a replication handler.\n *\n * The RxStorage instance of the master branch that is\n * replicated with the fork branch.\n * The replication algorithm is made to make\n * as less writes on the master as possible.\n * The master instance is always 'the truth' which\n * does never contain conflicting document states.\n * All conflicts are handled on the fork branch\n * before being replicated to the master.\n */\nexport type RxReplicationHandler = {\n masterChangeStream$: Observable>;\n masterChangesSince(\n checkpoint: MasterCheckpointType,\n batchSize: number\n ): Promise>;\n /**\n * Writes the fork changes to the master.\n * Only returns the conflicts if there are any.\n * (otherwise returns an empty array.)\n */\n masterWrite(\n rows: RxReplicationWriteToMasterRow[]\n ): Promise[]>;\n};\n\nexport type RxStorageInstanceReplicationInput = {\n /**\n * A string that uniquely identifies\n * the replication.\n * Ensures that checkpoint are not\n * mixed with other replications.\n */\n identifier: string;\n pullBatchSize: number;\n pushBatchSize: number;\n replicationHandler: RxReplicationHandler;\n conflictHandler: RxConflictHandler;\n\n // can be set to also replicate the _meta field of the document.\n keepMeta?: boolean;\n\n /**\n * The fork is the one that contains the forked chain of document writes.\n * All conflicts are solved on the fork and only resolved correct document data\n * is written back to the parent.\n */\n forkInstance: RxStorageInstance;\n\n /**\n * The replication needs to store some meta data\n * for documents to know which state is at the master\n * and how/if it diverges from the fork.\n * In the past this was stored in the _meta field of\n * the forkInstance documents but that was not a good design decision\n * because it required additional writes on the forkInstance\n * to know which documents have been upstream replicated\n * to not cause conflicts.\n * Using the metaInstance instead leads to better overall performance\n * because RxDB will not re-emit query results or document state\n * when replication meta data is written.\n *\n * In addition to per-document meta data,\n * the replication checkpoints are also stored in this instance.\n *\n */\n metaInstance: RxStorageInstance, any, any>;\n\n /**\n * When a write happens to the fork,\n * normally the replication will directly try to persist.\n *\n * For many use cases, it is better to await the next event loop tick\n * or to wait until the RxDatabase is idle or requestIdleCallback() calls\n * to ensure the CPU is idle.\n * This can improve performance because the persistence will not affect UI\n * renders.\n *\n * But: The longer you wait here, the higher is the risk of losing fork\n * writes when the replication is destroyed unexpected.\n */\n waitBeforePersist?: () => Promise;\n\n hashFunction: HashFunction;\n\n initialCheckpoint?: {\n upstream?: any;\n downstream?: any;\n };\n};\n\nexport type RxStorageInstanceReplicationState = {\n // store the primaryPath here for better reuse and performance.\n primaryPath: string;\n hasAttachments: boolean;\n input: RxStorageInstanceReplicationInput;\n\n events: {\n /**\n * Streams all document writes that have successfully\n * been written in one direction.\n */\n processed: {\n up: Subject>;\n down: Subject>;\n };\n resolvedConflicts: Subject<{\n input: RxConflictHandlerInput;\n output: RxConflictHandlerOutput;\n }>;\n /**\n * Contains the cancel state.\n * Emit true here to cancel the replication.\n */\n canceled: BehaviorSubject;\n /**\n * Contains true if the replication is doing something\n * at this point in time.\n * If this is false, it means that the replication\n * is idle AND in sync.\n */\n active: {\n [direction in RxStorageReplicationDirection]: BehaviorSubject;\n };\n /**\n * All errors that would otherwise be unhandled,\n * get emitted here.\n */\n error: Subject;\n };\n\n\n /**\n * Contains counters that can be used in tests\n * or to debug problems.\n */\n stats: {\n down: {\n addNewTask: number;\n downstreamResyncOnce: number;\n downstreamProcessChanges: number;\n masterChangeStreamEmit: number;\n persistFromMaster: number;\n };\n up: {\n upstreamInitialSync: number;\n forkChangeStreamEmit: number;\n processTasks: number;\n persistToMaster: number;\n persistToMasterHadConflicts: number;\n persistToMasterConflictWrites: number;\n };\n };\n\n /**\n * Used in checkpoints and ._meta fields\n * to ensure we do not mix up meta data of\n * different replications.\n * We have to use the promise because the key is hashed which runs async.\n */\n checkpointKey: Promise;\n\n /**\n * Storage.bulkWrites() that are initialized from the\n * downstream, get this flag as context-param\n * so that the emitted event bulk can be identified\n * to be sourced from the downstream and it will not try\n * to upstream these documents again.\n */\n downstreamBulkWriteFlag: Promise;\n\n /**\n * Tracks if the streams have been in sync\n * for at least one time.\n */\n firstSyncDone: {\n [direction in RxStorageReplicationDirection]: BehaviorSubject;\n };\n\n /**\n * Can be used to detect if the replication is doing something\n * or if it is in an idle state.\n */\n streamQueue: {\n [direction in RxStorageReplicationDirection]: Promise;\n };\n\n checkpointQueue: Promise;\n\n /**\n * For better performance we store the last known checkpoint\n * document so that we can likely do checkpoint storing without\n * conflicts.\n */\n lastCheckpointDoc: {\n [direction in RxStorageReplicationDirection]?: RxDocumentData>;\n };\n};\n\nexport type RxStorageReplicationDirection = 'up' | 'down';\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/types/rx-attachment.d.js b/dist/cjs/types/rx-attachment.d.js deleted file mode 100644 index dd40d1e4e47..00000000000 --- a/dist/cjs/types/rx-attachment.d.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -//# sourceMappingURL=rx-attachment.d.js.map \ No newline at end of file diff --git a/dist/cjs/types/rx-attachment.d.js.map b/dist/cjs/types/rx-attachment.d.js.map deleted file mode 100644 index a1e235e40c0..00000000000 --- a/dist/cjs/types/rx-attachment.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-attachment.d.js","names":[],"sources":["../../../src/types/rx-attachment.d.ts"],"sourcesContent":["import type {\n RxDocument\n} from './rx-document.d.ts';\n\ndeclare type Buffer = any;\n\nexport type RxAttachmentCreator = {\n id: string;\n /**\n * Content type like 'plain/text'\n */\n type: string;\n /**\n * The data of the attachment.\n */\n data: Blob;\n};\n\nexport declare class RxAttachment {\n readonly doc: RxDocument;\n readonly id: string;\n readonly type: string;\n readonly length: number;\n readonly digest: string;\n readonly rev: string;\n\n remove(): Promise;\n getData(): Promise;\n getStringData(): Promise;\n}\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/types/rx-change-event.d.js b/dist/cjs/types/rx-change-event.d.js deleted file mode 100644 index 16f602e378a..00000000000 --- a/dist/cjs/types/rx-change-event.d.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -//# sourceMappingURL=rx-change-event.d.js.map \ No newline at end of file diff --git a/dist/cjs/types/rx-change-event.d.js.map b/dist/cjs/types/rx-change-event.d.js.map deleted file mode 100644 index 43dfd1a2e63..00000000000 --- a/dist/cjs/types/rx-change-event.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-change-event.d.js","names":[],"sources":["../../../src/types/rx-change-event.d.ts"],"sourcesContent":["import type {\n EventBulk,\n RxDocumentData\n} from './rx-storage.d.ts';\n\n\nexport type RxChangeEventBase = {\n operation: 'INSERT' | 'UPDATE' | 'DELETE';\n\n readonly documentId: string;\n\n // optional, does not exist on changes to localdocs of the database\n readonly collectionName?: string;\n\n // true if the event is about a local document, false if not.\n readonly isLocal: boolean;\n\n documentData: RxDocumentData;\n};\n\nexport type RxChangeEventInsert = RxChangeEventBase & {\n operation: 'INSERT';\n previousDocumentData: undefined;\n};\n\nexport type RxChangeEventUpdate = RxChangeEventBase & {\n operation: 'UPDATE';\n previousDocumentData: RxDocumentData;\n};\n\nexport type RxChangeEventDelete = RxChangeEventBase & {\n operation: 'DELETE';\n previousDocumentData: RxDocumentData;\n};\n\nexport type RxChangeEvent = RxChangeEventInsert | RxChangeEventUpdate | RxChangeEventDelete;\n\n/**\n * Internally, all events are processed via bulks\n * to save performance when sending them over a transport layer\n * or de-duplicating them.\n */\nexport type RxChangeEventBulk = EventBulk, any> & {\n // optional, not given for changes to local documents of a RxDatabase.\n collectionName?: string;\n /**\n * Token of the database instance that created the events.\n * Used to determine if the events came from another instance over the BroadcastChannel.\n */\n databaseToken: string;\n /**\n * The storageToken of the RxDatabase that created the events.\n * Used to ensure we do not process events of other RxDatabases.\n */\n storageToken: string;\n /**\n * If true, the events belong to some internal stuff like from plugins.\n * Internal events are not emitted to the outside over the .$ Observables.\n */\n internal: boolean;\n};\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/types/rx-collection.d.js b/dist/cjs/types/rx-collection.d.js deleted file mode 100644 index 6d562b8bcf9..00000000000 --- a/dist/cjs/types/rx-collection.d.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -//# sourceMappingURL=rx-collection.d.js.map \ No newline at end of file diff --git a/dist/cjs/types/rx-collection.d.js.map b/dist/cjs/types/rx-collection.d.js.map deleted file mode 100644 index 87c790d0de9..00000000000 --- a/dist/cjs/types/rx-collection.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-collection.d.js","names":[],"sources":["../../../src/types/rx-collection.d.ts"],"sourcesContent":["import type {\n RxJsonSchema,\n RxDocument,\n MigrationStrategies,\n RxConflictHandler\n} from './index.d.ts';\nimport type {\n RxCollectionBase\n} from '../rx-collection.d.ts';\nimport type { QueryCache } from '../query-cache.d.ts';\nimport type { RxLocalDocumentMutation } from './rx-database.d.ts';\n\nexport interface KeyFunctionMap {\n [key: string]: Function;\n}\nexport interface NumberFunctionMap {\n [key: number]: Function;\n}\n\n\n/**\n * Params to create a new collection.\n * Notice the name of the collection is set one level higher\n * when calling addCollections()\n */\nexport type RxCollectionCreator = {\n schema: RxJsonSchema;\n instanceCreationOptions?: any;\n migrationStrategies?: MigrationStrategies;\n autoMigrate?: boolean;\n statics?: KeyFunctionMap;\n methods?: KeyFunctionMap;\n attachments?: KeyFunctionMap;\n options?: any;\n /**\n * Set this to true if you want to store local documents\n * in the RxCollection instance.\n */\n localDocuments?: boolean;\n cacheReplacementPolicy?: RxCacheReplacementPolicy;\n\n /**\n * Depending on which plugins or storage is used,\n * the RxCollection might need a way to resolve conflicts\n * which is done by this conflict handler.\n * If no conflict handler is provided, a master-always-wins handler\n * will be used as default\n */\n conflictHandler?: RxConflictHandler;\n};\n\nexport type RxCacheReplacementPolicy = (collection: RxCollection, queryCache: QueryCache) => void;\n\nexport type RxCollectionHookCallback<\n RxDocumentType,\n OrmMethods,\n Reactivity\n> = (\n data: RxDocumentType,\n instance: RxDocument\n) => void | Promise | any;\nexport type RxCollectionHookNoInstance = (data: RxDocumentType) => void | Promise | any;\nexport type RxCollectionHookCallbackNonAsync = (\n data: RxDocumentType,\n instance: RxDocument\n) => void | any;\nexport type RxCollectionHookNoInstanceCallback<\n RxDocumentType,\n OrmMethods,\n Reactivity\n> = (\n data: RxDocumentType,\n instance: RxCollection\n) => Promise | void | any;\n\nexport type RxCollection<\n RxDocumentType = any,\n OrmMethods = {},\n StaticMethods = {},\n InstanceCreationOptions = {},\n Reactivity = unknown\n> = StaticMethods &\n RxCollectionBase &\n RxCollectionGenerated;\n\nexport interface RxCollectionGenerated extends RxLocalDocumentMutation> {\n\n // HOOKS\n preInsert(fun: RxCollectionHookNoInstanceCallback, parallel: boolean): void;\n preSave(fun: RxCollectionHookCallback, parallel: boolean): void;\n preRemove(fun: RxCollectionHookCallback, parallel: boolean): void;\n postInsert(fun: RxCollectionHookCallback, parallel: boolean): void;\n postSave(fun: RxCollectionHookCallback, parallel: boolean): void;\n postRemove(fun: RxCollectionHookCallback, parallel: boolean): void;\n postCreate(fun: RxCollectionHookCallbackNonAsync): void;\n\n // only inMemory-collections\n awaitPersistence(): Promise;\n}\n\n/**\n * Properties are possibly encrypted so type them as any. TODO this is no longer needed.\n */\nexport type RxDumpCollectionAsAny = { [P in keyof T]: any };\n\ninterface RxDumpCollectionBase {\n name: string;\n passwordHash?: string;\n schemaHash: string;\n}\nexport interface RxDumpCollection extends RxDumpCollectionBase {\n docs: RxDocumentType[];\n}\n/**\n * All base properties are typed as any because they can be encrypted.\n */\nexport interface RxDumpCollectionAny extends RxDumpCollectionBase {\n docs: RxDumpCollectionAsAny[];\n}\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/types/rx-database-internal-store.d.js b/dist/cjs/types/rx-database-internal-store.d.js deleted file mode 100644 index 3fe4304e1c9..00000000000 --- a/dist/cjs/types/rx-database-internal-store.d.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -//# sourceMappingURL=rx-database-internal-store.d.js.map \ No newline at end of file diff --git a/dist/cjs/types/rx-database-internal-store.d.js.map b/dist/cjs/types/rx-database-internal-store.d.js.map deleted file mode 100644 index e78441ef795..00000000000 --- a/dist/cjs/types/rx-database-internal-store.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-database-internal-store.d.js","names":[],"sources":["../../../src/types/rx-database-internal-store.d.ts"],"sourcesContent":["import type {\n RxMigrationStatus\n} from '../plugins/migration-schema/index.ts';\nimport type { RxJsonSchema } from './rx-schema.d.ts';\n\nexport type InternalStoreDocType = {\n id: string;\n key: string;\n context: string;\n data: Data;\n};\n\n/**\n * Stores information about the collections.\n * The collection.name is the 'key' value.\n */\nexport type InternalStoreStorageTokenDocType = InternalStoreDocType<{\n rxdbVersion: string;\n token: string;\n instanceToken: string;\n passwordHash?: string;\n}>;\n\n/**\n * Stores information about the collections.\n * The collection.name is the 'key' value.\n */\nexport type InternalStoreCollectionDocType = InternalStoreDocType<{\n /**\n * Plain name of the collection\n */\n name: string;\n schema: RxJsonSchema;\n schemaHash: string;\n version: number;\n\n /**\n * Storages that are connected to this collection\n * so that when the collection is removed,\n * these storages must also be removed.\n * For example the replication meta storage\n * must be reset when the collection is removed.\n */\n connectedStorages: {\n collectionName: string;\n schema: RxJsonSchema;\n }[];\n\n /**\n * Contains the migration status\n * only if a migration has been started.\n */\n migrationStatus?: RxMigrationStatus;\n}>;\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/types/rx-database.d.js b/dist/cjs/types/rx-database.d.js deleted file mode 100644 index 0ff6f82c68c..00000000000 --- a/dist/cjs/types/rx-database.d.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -//# sourceMappingURL=rx-database.d.js.map \ No newline at end of file diff --git a/dist/cjs/types/rx-database.d.js.map b/dist/cjs/types/rx-database.d.js.map deleted file mode 100644 index 75c0ffa8179..00000000000 --- a/dist/cjs/types/rx-database.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-database.d.js","names":[],"sources":["../../../src/types/rx-database.d.ts"],"sourcesContent":["import type {\n RxCollection,\n RxDumpCollection,\n RxDumpCollectionAsAny\n} from './rx-collection.d.ts';\nimport type {\n RxDatabaseBase\n} from '../rx-database.d.ts';\nimport { Observable } from 'rxjs';\nimport type { RxStorage } from './rx-storage.interface.d.ts';\nimport type { RxLocalDocument } from './plugins/local-documents.d.ts';\nimport type { RxCleanupPolicy } from './plugins/cleanup.d.ts';\nimport type { ById, HashFunction } from './util.d.ts';\nimport type { RxReactivityFactory } from './plugins/reactivity.d.ts';\n\nexport interface RxDatabaseCreator {\n storage: RxStorage;\n instanceCreationOptions?: InstanceCreationOptions;\n name: string;\n password?: string | any;\n multiInstance?: boolean;\n eventReduce?: boolean;\n ignoreDuplicate?: boolean;\n options?: any;\n cleanupPolicy?: Partial;\n /**\n * Set this to true if you want to store local documents\n * in the RxDatabase instance.\n */\n localDocuments?: boolean;\n\n /**\n * Hash method used to hash strings and json-stringified objects.\n * This hash does not have to be cryptographically secure,\n * but it is very important that is does have not create\n * collisions.\n * Default is the sha256 from the ohash library\n * @link https://www.npmjs.com/package/ohash\n */\n hashFunction?: HashFunction;\n\n /**\n * By default, count() queries in 'slow' mode are not allowed.\n */\n allowSlowCount?: boolean;\n\n /**\n * Can be used to add a custom reactivity Factory\n * that is used on all getters and values that end with the double $$.\n * For example you can use the signals api of your framework and vuejs ref()\n */\n reactivity?: RxReactivityFactory;\n}\n\nexport type CollectionsOfDatabase = ById;\nexport type RxDatabase<\n Collections = CollectionsOfDatabase,\n Internals = any,\n InstanceCreationOptions = any,\n Reactivity = any\n> = RxDatabaseBase<\n Internals,\n InstanceCreationOptions,\n Collections,\n Reactivity\n> & Collections & RxDatabaseGenerated;\n\n\nexport interface RxLocalDocumentMutation {\n insertLocal(id: string, data: LocalDocType): Promise<\n RxLocalDocument\n >;\n upsertLocal(id: string, data: LocalDocType): Promise<\n RxLocalDocument\n >;\n getLocal(id: string): Promise<\n RxLocalDocument | null\n >;\n getLocal$(id: string): Observable<\n RxLocalDocument | null\n >;\n}\n\nexport interface RxDatabaseGenerated extends RxLocalDocumentMutation> { }\n\n/**\n * Extract the **DocumentType** of a collection.\n */\ntype ExtractDTcol

= P extends RxCollection ? T : { [prop: string]: any; };\n\ninterface RxDumpDatabaseBase {\n instanceToken: string;\n name: string;\n passwordHash: string | null;\n}\nexport interface RxDumpDatabase extends RxDumpDatabaseBase {\n collections: RxDumpCollection>[];\n}\n/**\n * All base properties are typed as any because they can be encrypted.\n */\nexport interface RxDumpDatabaseAny extends RxDumpDatabaseBase {\n collections: RxDumpCollection>>[];\n}\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/types/rx-document.d.js b/dist/cjs/types/rx-document.d.js deleted file mode 100644 index a84f7978cea..00000000000 --- a/dist/cjs/types/rx-document.d.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -//# sourceMappingURL=rx-document.d.js.map \ No newline at end of file diff --git a/dist/cjs/types/rx-document.d.js.map b/dist/cjs/types/rx-document.d.js.map deleted file mode 100644 index eb6baf2addd..00000000000 --- a/dist/cjs/types/rx-document.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-document.d.js","names":[],"sources":["../../../src/types/rx-document.d.ts"],"sourcesContent":["import {\n Observable\n} from 'rxjs';\n\nimport type {\n RxCollection,\n} from './rx-collection.d.ts';\nimport type {\n RxAttachment,\n RxAttachmentCreator\n} from './rx-attachment.d.ts';\nimport type { RxDocumentData, WithDeleted } from './rx-storage.d.ts';\nimport type { RxChangeEvent } from './rx-change-event.d.ts';\nimport type { DeepReadonly, MaybePromise, PlainJsonValue } from './util.d.ts';\nimport type { UpdateQuery } from './plugins/update.d.ts';\nimport type { CRDTEntry } from './plugins/crdt.d.ts';\n\n\n\nexport type RxDocument = RxDocumentBase<\n RxDocumentType,\n OrmMethods,\n Reactivity\n> & RxDocumentType & OrmMethods & ExtendObservables & ExtendReactivity;\n\n\n/**\n * Extend the base properties by the property$ fields\n * so it knows that RxDocument.age also has RxDocument.age$ which is\n * an observable.\n * TODO how to do this for the nested fields?\n */\ntype ExtendObservables = {\n [P in keyof RxDocumentType as `${string & P}$`]: Observable;\n};\n\ntype ExtendReactivity = {\n [P in keyof RxDocumentType as `${string & P}$$`]: Reactivity;\n};\n\n/**\n * The public facing modify update function.\n * It only gets the document parts as input, that\n * are mutateable by the user.\n */\nexport type ModifyFunction = (\n doc: WithDeleted\n) => MaybePromise> | MaybePromise;\n\n/**\n * Meta data that is attached to each document by RxDB.\n */\nexport type RxDocumentMeta = {\n /**\n * Last write time.\n * Unix epoch in milliseconds.\n */\n lwt: number;\n\n /**\n * Any other value can be attached to the _meta data.\n * Mostly done by plugins to mark documents.\n */\n [k: string]: PlainJsonValue;\n};\n\nexport declare interface RxDocumentBase {\n isInstanceOfRxDocument: true;\n collection: RxCollection;\n readonly deleted: boolean;\n\n readonly $: Observable>;\n readonly $$: Reactivity;\n readonly deleted$: Observable;\n readonly deleted$$: Reactivity;\n\n readonly primary: string;\n readonly allAttachments$: Observable[]>;\n\n // internal things\n _data: RxDocumentData;\n primaryPath: string;\n revision: string;\n /**\n * Used to de-duplicate the enriched property objects\n * of the document.\n */\n _propertyCache: Map;\n $emit(cE: RxChangeEvent): void;\n _saveData(newData: any, oldData: any): Promise>;\n // /internal things\n\n // Returns the latest state of the document\n getLatest(): RxDocument;\n\n\n get$(path: string): Observable;\n get$$(path: string): Reactivity;\n get(objPath: string): DeepReadonly;\n populate(objPath: string): Promise | any | null>;\n\n /**\n * mutate the document with a function\n */\n modify(mutationFunction: ModifyFunction, context?: string): Promise>;\n incrementalModify(mutationFunction: ModifyFunction, context?: string): Promise>;\n\n /**\n * patches the given properties\n */\n patch(patch: Partial): Promise>;\n incrementalPatch(patch: Partial): Promise>;\n\n update(updateObj: UpdateQuery): Promise>;\n incrementalUpdate(updateObj: UpdateQuery): Promise>;\n\n updateCRDT(updateObj: CRDTEntry | CRDTEntry[]): Promise>;\n\n remove(): Promise>;\n incrementalRemove(): Promise>;\n\n // only for temporary documents\n set(objPath: string, value: any): RxDocument;\n save(): Promise;\n\n // attachments\n putAttachment(\n creator: RxAttachmentCreator\n ): Promise>;\n getAttachment(id: string): RxAttachment | null;\n allAttachments(): RxAttachment[];\n\n toJSON(withRevAndAttachments: true): DeepReadonly>;\n toJSON(withRevAndAttachments?: false): DeepReadonly;\n\n toMutableJSON(withRevAndAttachments: true): RxDocumentData;\n toMutableJSON(withRevAndAttachments?: false): RxDocType;\n\n destroy(): void;\n}\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/types/rx-error.d.js b/dist/cjs/types/rx-error.d.js deleted file mode 100644 index a24c8d86115..00000000000 --- a/dist/cjs/types/rx-error.d.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -//# sourceMappingURL=rx-error.d.js.map \ No newline at end of file diff --git a/dist/cjs/types/rx-error.d.js.map b/dist/cjs/types/rx-error.d.js.map deleted file mode 100644 index e3af896cace..00000000000 --- a/dist/cjs/types/rx-error.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-error.d.js","names":[],"sources":["../../../src/types/rx-error.d.ts"],"sourcesContent":["import type { RxJsonSchema } from './rx-schema.d.ts';\nimport {\n RxSchema\n} from '../rx-schema.ts';\nimport type { RxPlugin } from './rx-plugin.d.ts';\nimport { ERROR_MESSAGES } from '../plugins/dev-mode/error-messages.ts';\nimport type { RxReplicationWriteToMasterRow } from './replication-protocol.d.ts';\nimport type { BulkWriteRow, RxDocumentData } from './rx-storage.d.ts';\n\ntype KeyOf = Extract;\nexport type RxErrorKey = KeyOf;\n\nexport type {\n RxError,\n RxTypeError\n} from '../rx-error.ts';\n\n/**\n * this lists all possible parameters\n */\nexport interface RxErrorParameters {\n readonly error?: PlainJsonError;\n readonly errors?: PlainJsonError[];\n readonly writeError?: RxStorageWriteError;\n readonly schemaPath?: string;\n readonly objPath?: string;\n readonly rootPath?: string;\n readonly childpath?: string;\n readonly obj?: any;\n readonly document?: any;\n readonly schema?: Readonly | RxSchema>;\n readonly schemaObj?: any;\n readonly pluginKey?: string;\n readonly originalDoc?: Readonly;\n readonly finalDoc?: Readonly;\n readonly regex?: string;\n readonly fieldName?: string;\n readonly id?: string;\n readonly data?: any;\n readonly missingCollections?: string[];\n readonly primaryPath?: string;\n readonly primary?: string;\n readonly primaryKey?: string;\n readonly have?: any;\n readonly should?: any;\n readonly name?: string;\n readonly adapter?: any;\n readonly link?: string;\n readonly path?: string;\n readonly value?: any;\n readonly givenName?: string;\n readonly fromVersion?: number;\n readonly toVersion?: number;\n readonly version?: number;\n readonly args?: any;\n readonly opts?: any;\n readonly dataBefore?: any;\n readonly dataAfter?: any;\n readonly pull?: boolean;\n readonly push?: boolean;\n readonly url?: string;\n readonly key?: string;\n readonly queryObj?: any;\n readonly query?: any;\n readonly op?: string;\n readonly skip?: any;\n readonly limit?: any;\n readonly passwordHash?: string;\n readonly existingPasswordHash?: string;\n readonly password?: string | any;\n readonly minPassLength?: number;\n readonly own?: any;\n readonly source?: any;\n readonly method?: any;\n readonly field?: string;\n readonly ref?: string;\n readonly funName?: string;\n readonly functionName?: string;\n readonly schemaHash?: string;\n readonly previousSchema?: Readonly>;\n readonly previousSchemaHash?: string;\n readonly type?: string;\n readonly when?: string;\n readonly parallel?: boolean;\n readonly collection?: any;\n readonly database?: any;\n readonly storage?: string;\n readonly indexes?: Array | Readonly>;\n readonly index?: string | string[] | readonly string[];\n readonly plugin?: RxPlugin | any;\n readonly plugins?: Set;\n\n // used in the replication plugin\n\n /**\n * The checkpoint of the response from the last successful\n * pull by the client.\n * Null if there was no pull operation before\n * so that there is no last pulled checkpoint.\n */\n readonly checkpoint?: any;\n /**\n * The documents that failed to be pushed.\n * Typed as 'any' because they might be modified by the push modifier.\n */\n readonly pushRows?: RxReplicationWriteToMasterRow[];\n readonly direction?: 'pull' | 'push';\n\n}\n\n/**\n * Error-Items which are created by the jsonschema-validator\n */\nexport type RxValidationError = {\n readonly field: string;\n readonly message: string;\n};\n\n/**\n * Use to have a transferable error object\n * in plain json instead of a JavaScript Error instance.\n */\nexport type PlainJsonError = {\n name: string;\n message: string;\n rxdb?: true;\n code?: RxErrorKey;\n url?: string;\n extensions?: Record;\n parameters?: RxErrorParameters;\n stack?: string;\n};\n\n\n\n\n\n/**\n * Error that can happen per document when\n * RxStorage.bulkWrite() is called\n */\nexport type RxStorageWriteErrorBase = {\n\n status: number\n | 409 // conflict\n | 422 // schema validation error\n | 510 // attachment data missing\n ;\n\n /**\n * set this property to make it easy\n * to detect if the object is a RxStorageBulkWriteError\n */\n isError: true;\n\n // primary key of the document\n documentId: string;\n\n // the original document data that should have been written.\n writeRow: BulkWriteRow;\n};\n\nexport type RxStorageWriteErrorConflict = RxStorageWriteErrorBase & {\n status: 409;\n /**\n * A conflict error state must contain the\n * document state in the database.\n * This ensures that we can continue resolving a conflict\n * without having to pull the document out of the db first.\n * Is not set if the error happens on an insert.\n */\n documentInDb: RxDocumentData;\n};\n\nexport type RxStorageWriteErrorValidation = RxStorageWriteErrorBase & {\n status: 422;\n /**\n * Other properties that give\n * information about the error,\n * for example a schema validation error\n * might contain the exact error from the validator here.\n * Must be plain JSON!\n */\n validationErrors: RxValidationError[];\n};\n\nexport type RxStorageWriteErrorAttachment = RxStorageWriteErrorBase & {\n status: 510;\n attachmentId: string;\n documentInDb?: RxDocumentData;\n};\n\n\nexport type RxStorageWriteError =\n RxStorageWriteErrorConflict |\n RxStorageWriteErrorValidation |\n RxStorageWriteErrorAttachment;\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/types/rx-plugin.d.js b/dist/cjs/types/rx-plugin.d.js deleted file mode 100644 index 2ffb9eafe5b..00000000000 --- a/dist/cjs/types/rx-plugin.d.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -//# sourceMappingURL=rx-plugin.d.js.map \ No newline at end of file diff --git a/dist/cjs/types/rx-plugin.d.js.map b/dist/cjs/types/rx-plugin.d.js.map deleted file mode 100644 index 1625b7a7238..00000000000 --- a/dist/cjs/types/rx-plugin.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-plugin.d.js","names":[],"sources":["../../../src/types/rx-plugin.d.ts"],"sourcesContent":["import type {\n RxQuery,\n RxQueryOP,\n MangoQuery\n} from './rx-query.d.ts';\nimport type {\n RxCollection,\n RxCollectionCreator\n} from './rx-collection.d.ts';\nimport type {\n RxStorageInstanceCreationParams\n} from './rx-storage.d.ts';\nimport type {\n DeepReadonly,\n FilledMangoQuery,\n RxDatabase,\n RxDatabaseCreator,\n RxDocument,\n RxStorage,\n RxReplicationWriteToMasterRow,\n WithDeleted,\n RxState,\n BulkWriteRow,\n RxStorageInstance\n} from './index.d.ts';\nimport type { RxSchema } from '../rx-schema.d.ts';\n\nexport type RxPluginPreCreateRxQueryArgs = {\n op: RxQueryOP;\n queryObj: MangoQuery;\n collection: RxCollection;\n};\n\nexport type RxPluginPreAddRxPluginArgs = {\n // the plugin that is getting added\n plugin: RxPlugin | any;\n // previous added plugins\n plugins: Set;\n};\n\nexport type RxPluginPrePrepareQueryArgs = {\n rxQuery: RxQuery;\n mangoQuery: FilledMangoQuery;\n};\n\n\n/**\n * Depending on which plugins are used together,\n * it is important that the plugin is able to define if\n * the hooks must be added as first or as last array item.\n * For example the encryption plugin must run encryption\n * before the key-compression changes the fieldnames.\n */\nexport type RxPluginHooks = {\n /**\n * Hook function that is added as first.\n */\n before?: (i: Input) => void;\n /**\n * Hook function that is added as last.\n */\n after?: (i: Input) => void;\n};\n\nexport interface RxPlugin {\n /**\n * A string to uniquely identifies the plugin.\n * Can be used to throw when different versions of the same plugin are used.\n * And also other checks.\n * Use kebab-case.\n */\n readonly name: string;\n\n /**\n * set this to true so RxDB\n * knows that this object in a rxdb plugin\n */\n readonly rxdb: true;\n\n /**\n * Init function where dependent plugins could be added.\n * (optional)\n */\n init?(): any;\n\n prototypes?: {\n RxSchema?: (proto: RxSchema) => void;\n RxDocument?: (proto: RxDocument) => void;\n RxQuery?: (proto: RxQuery) => void;\n RxCollection?: (proto: RxCollection) => void;\n RxDatabase?: (proto: RxDatabase) => void;\n };\n overwritable?: {\n isDevMode?: () => boolean;\n deepFreezeWhenDevMode?: (obj: T) => DeepReadonly;\n validatePassword?: Function;\n checkAdapter?: Function;\n tunnelErrorMessage?: Function;\n };\n hooks?: {\n preAddRxPlugin?: RxPluginHooks;\n preCreateRxDatabase?: RxPluginHooks;\n createRxDatabase?: RxPluginHooks<{\n database: RxDatabase;\n creator: RxDatabaseCreator;\n }>;\n preDestroyRxDatabase?: RxPluginHooks;\n postRemoveRxDatabase?: RxPluginHooks<{\n databaseName: string;\n storage: RxStorage;\n }>;\n createRxCollection?: RxPluginHooks<{\n collection: RxCollection;\n creator: RxCollectionCreator;\n }>;\n createRxState?: RxPluginHooks<{\n collection: RxCollection;\n state: RxState;\n }>;\n preCreateRxCollection?: RxPluginHooks & {\n name: string;\n database: RxDatabase;\n }>;\n postDestroyRxCollection?: RxPluginHooks;\n postRemoveRxCollection?: RxPluginHooks<{\n storage: RxStorage;\n databaseName: string;\n collectionName: string;\n }>;\n preCreateRxSchema?: RxPluginHooks;\n createRxSchema?: RxPluginHooks;\n preCreateRxQuery?: RxPluginHooks;\n prePrepareQuery?: RxPluginHooks;\n createRxQuery?: RxPluginHooks;\n createRxDocument?: RxPluginHooks;\n postCreateRxDocument?: RxPluginHooks;\n preCreateRxStorageInstance?: RxPluginHooks>;\n /**\n * Runs before a write to the storage instance of a RxCollection or RxDatabase.\n */\n preStorageWrite?: RxPluginHooks<{\n storageInstance: RxStorageInstance;\n rows: BulkWriteRow[];\n }>;\n preMigrateDocument?: RxPluginHooks;\n postMigrateDocument?: RxPluginHooks;\n preReplicationMasterWrite?: RxPluginHooks<{\n rows: RxReplicationWriteToMasterRow[];\n collection: RxCollection;\n }>;\n preReplicationMasterWriteDocumentsHandle?: RxPluginHooks<{\n result: WithDeleted[];\n collection: RxCollection;\n }>;\n };\n}\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/types/rx-query.d.js b/dist/cjs/types/rx-query.d.js deleted file mode 100644 index 9dc69e82ed3..00000000000 --- a/dist/cjs/types/rx-query.d.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -//# sourceMappingURL=rx-query.d.js.map \ No newline at end of file diff --git a/dist/cjs/types/rx-query.d.js.map b/dist/cjs/types/rx-query.d.js.map deleted file mode 100644 index 39567183259..00000000000 --- a/dist/cjs/types/rx-query.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-query.d.js","names":[],"sources":["../../../src/types/rx-query.d.ts"],"sourcesContent":["import type {\n RxQueryBase\n} from '../rx-query.d.ts';\nimport type { Paths, StringKeys } from './util.d.ts';\n\n/**\n * Typed Mango Query Selector\n * @link https://github.com/mongodb/node-mongodb-native/blob/26bce4a8debb65df5a42dc8599e886c9c83de10d/src/mongo_types.ts\n * @link https://stackoverflow.com/a/58436959/3443137\n */\n\n\nexport type PropertyType = string extends Property\n ? unknown\n : Property extends keyof Type\n ? Type[Property]\n : Property extends `${number}`\n ? Type extends ReadonlyArray\n ? ArrayType\n : unknown\n : Property extends `${infer Key}.${infer Rest}`\n ? Key extends `${number}`\n ? Type extends ReadonlyArray\n ? PropertyType\n : unknown\n : Key extends keyof Type\n ? Type[Key] extends Map\n ? MapType\n : PropertyType\n : unknown\n : unknown;\n\n\nexport type MangoQueryRegexOptions = 'i' | 'g' | 'm' | 'gi' | 'ig' | 'igm' | string;\n\n/*\n * The MongoDB query library is huge and we do not need all the operators.\n * If you add an operator here, make sure that you properly add a test in\n * the file /test/unit/rx-storage-query-correctness.test.ts\n *\n * @link https://github.com/kofrasa/mingo#es6\n */\nexport interface MangoQueryOperators {\n $eq?: PathValueType;\n $gt?: PathValueType;\n $gte?: PathValueType;\n $lt?: PathValueType;\n $lte?: PathValueType;\n $ne?: PathValueType;\n $in?: PathValueType[];\n $nin?: PathValueType[];\n $regex?: string;\n $options?: MangoQueryRegexOptions;\n $exists?: boolean;\n $type?: 'null' | 'boolean' | 'number' | 'string' | 'array' | 'object';\n $mod?: number;\n $not?: PathValueType;\n $size?: number;\n $elemMatch?: MangoQuerySelector;\n}\n\nexport type MangoQuerySelector = Partial<{\n [Property in Paths]: MangoQueryOperators | PropertyType;\n}> & {\n $and?: MangoQuerySelector[];\n $or?: MangoQuerySelector[];\n $nor?: MangoQuerySelector[];\n};\n\n/**\n * Discussion was at:\n * @link https://github.com/pubkey/rxdb/issues/1972\n */\nexport type MangoQuerySortDirection = 'asc' | 'desc';\nexport type MangoQuerySortPart = {\n [k in StringKeys | string]: MangoQuerySortDirection;\n};\n\nexport type MangoQuerySelectorAndIndex = {\n /**\n * Selector is optional,\n * if not given, the query matches all documents\n * that are not _deleted=true.\n */\n selector?: MangoQuerySelector;\n /**\n * By default, the RxStorage implementation\n * decides which index to use when running the query.\n *\n * For better performance, a different index might be defined\n * by setting it in the query.\n * How this improves performance and if the defined index is used,\n * depends on the RxStorage implementation.\n */\n index?: string | string[];\n};\n\nexport type MangoQueryNoLimit = MangoQuerySelectorAndIndex & {\n /**\n * Sorting of the results.\n * If no sort is set, RxDB will sort by the primary key.\n * Also if sort is set, RxDB will add primaryKey sorting\n * if the primaryKey was not in the sort parameters before.\n * This ensures that there is a deterministic sorting of the\n * results, not mather at which order the documents have been\n * inserted into the storage.\n */\n sort?: MangoQuerySortPart[];\n};\n\nexport type MangoQuery = MangoQueryNoLimit & {\n skip?: number;\n limit?: number;\n};\n\nexport type RxQueryOP = 'find' | 'findOne' | 'count' | 'findByIds';\n\nexport declare class RxQuery<\n RxDocumentType = any,\n RxQueryResult = any,\n OrmMethods = {},\n Reactivity = unknown\n> extends RxQueryBase {\n equals(queryObj: any): RxQuery;\n eq(queryObj: any): RxQuery;\n or(queryObj: keyof RxDocumentType | string | any[]): RxQuery;\n nor(queryObj: keyof RxDocumentType | string | any[]): RxQuery;\n and(queryObj: keyof RxDocumentType | string | any[]): RxQuery;\n gt(queryObj: any): RxQuery;\n gte(queryObj: any): RxQuery;\n lt(queryObj: any): RxQuery;\n lte(queryObj: any): RxQuery;\n ne(queryObj: any): RxQuery;\n in(queryObj: any[]): RxQuery;\n nin(queryObj: any[]): RxQuery;\n all(queryObj: any): RxQuery;\n regex(queryObj: string | {\n $regex: string;\n $options: MangoQueryRegexOptions;\n }): RxQuery;\n exists(queryObj: any): RxQuery;\n elemMatch(queryObj: any): RxQuery;\n mod(p1: any, p2: any, p3: any): RxQuery;\n}\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/types/rx-schema.d.js b/dist/cjs/types/rx-schema.d.js deleted file mode 100644 index 01f5bf4ce00..00000000000 --- a/dist/cjs/types/rx-schema.d.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -//# sourceMappingURL=rx-schema.d.js.map \ No newline at end of file diff --git a/dist/cjs/types/rx-schema.d.js.map b/dist/cjs/types/rx-schema.d.js.map deleted file mode 100644 index 6012722753e..00000000000 --- a/dist/cjs/types/rx-schema.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-schema.d.js","names":[],"sources":["../../../src/types/rx-schema.d.ts"],"sourcesContent":["import { AsTyped } from 'as-typed';\nimport type { CRDTSchemaOptions } from './plugins/crdt.d.ts';\nimport type { StringKeys } from './util.d.ts';\n\n/**\n * @link https://github.com/types/lib-json-schema/blob/master/v4/index.d.ts\n */\nexport type JsonSchemaTypes = 'array' | 'boolean' | 'integer' | 'number' | 'null' | 'object' | 'string' | (string & {});\n\nexport type CompositePrimaryKey = {\n /**\n * The top level field of the document that will be used\n * to store the composite key as string.\n */\n key: StringKeys;\n\n /**\n * The fields of the composite key,\n * the fields must be required and final\n * and have the type number, int, or string.\n */\n fields: (StringKeys | string)[] | readonly (StringKeys | string)[];\n /**\n * The separator which is used to concat the\n * primary fields values.\n * Choose a character as separator that is known\n * to never appear inside of the primary fields values.\n * I recommend to use the pipe char '|'.\n */\n separator: string;\n};\n\nexport type PrimaryKey = StringKeys | CompositePrimaryKey;\n\nexport type JsonSchema = {\n allOf?: JsonSchema[] | readonly JsonSchema[];\n anyOf?: JsonSchema[] | readonly JsonSchema[];\n oneOf?: JsonSchema[] | readonly JsonSchema[];\n additionalItems?: boolean | JsonSchema;\n additionalProperties?: boolean | JsonSchema;\n type?: JsonSchemaTypes | JsonSchemaTypes[] | readonly JsonSchemaTypes[];\n description?: string;\n dependencies?: {\n [key: string]: JsonSchema | string[] | readonly string[];\n };\n exclusiveMinimum?: boolean;\n exclusiveMaximum?: boolean;\n items?: JsonSchema | JsonSchema[] | readonly JsonSchema[];\n multipleOf?: number;\n maxProperties?: number;\n maximum?: number;\n minimum?: number;\n maxLength?: number;\n minLength?: number;\n maxItems?: number;\n minItems?: number;\n minProperties?: number;\n pattern?: string;\n patternProperties?: {\n [key: string]: JsonSchema;\n };\n properties?: {\n [key in StringKeys]: JsonSchema;\n };\n required?: string[] | readonly string[];\n uniqueItems?: boolean;\n enum?: any[] | readonly any[];\n not?: JsonSchema;\n definitions?: {\n [key: string]: JsonSchema;\n };\n format?: 'date-time' | 'email' | 'hostname' | 'ipv4' | 'ipv6' | 'uri' | string;\n example?: any;\n\n // RxDB-specific\n ref?: string;\n final?: boolean;\n};\n\nexport interface TopLevelProperty extends JsonSchema {\n default?: any;\n}\n\n/**\n * @link https://developer.mozilla.org/en-US/docs/Web/API/Compression_Streams_API\n */\nexport type CompressionMode = 'deflate' | 'gzip';\n\nexport type RxJsonSchema<\n /**\n * The doctype must be given, and '=any' cannot be used,\n * otherwise the keyof of primaryKey\n * would be optional when the type of the document is not known.\n */\n RxDocType\n> = {\n title?: string;\n description?: string;\n version: number;\n\n /**\n * The primary key of the documents.\n * Must be in the top level of the properties of the schema\n * and that property must have the type 'string'\n */\n primaryKey: PrimaryKey;\n\n /**\n * TODO this looks like a typescript-bug\n * we have to allows all string because the 'object'-literal is not recognized\n * retry this in later typescript-versions\n */\n type: 'object' | string;\n properties: { [key in StringKeys]: TopLevelProperty };\n\n /**\n * On the top level the required-array must be set\n * because we always have to set the primary key to required.\n *\n * TODO required should be made non-optional on the top level\n */\n required?: StringKeys[] | readonly StringKeys[];\n\n\n /**\n * Indexes that will be used for the queries.\n * RxDB will internally prepend the _deleted field to the index\n * because queries do NOT return documents with _deleted=true.\n */\n indexes?: (string | string[])[] | (string | readonly string[])[] | readonly (string | string[])[] | readonly (string | readonly string[])[];\n\n /**\n * Internally used indexes that do not get _deleted prepended\n * by RxDB. Use these to speed up queries that are run manually on the storage\n * or to speed up requests when you use the RxDB server.\n * These could also be utilised when you build a plugin that\n * has to query documents without respecting the _deleted value.\n */\n internalIndexes?: string[][] | readonly string[][];\n\n\n encrypted?: string[] | readonly string[];\n keyCompression?: boolean;\n /**\n * if not set, rxdb will set 'false' as default\n * Having additionalProperties: true is not allowed on the root level to ensure\n * that property names do not clash with properties of the RxDocument class\n * or ORM methods.\n */\n additionalProperties?: false;\n attachments?: {\n encrypted?: boolean;\n /**\n * @link https://developer.mozilla.org/en-US/docs/Web/API/Compression_Streams_API\n */\n compression?: CompressionMode;\n };\n /**\n * Options for the sharding plugin of rxdb-premium.\n * We set these on the schema because changing the shard amount or mode\n * will require a migration.\n * @link https://rxdb.info/rx-storage-sharding.html\n */\n sharding?: {\n /**\n * Amount of shards.\n * This value cannot be changed after you have stored data,\n * if you change it anyway, you will loose the existing data.\n */\n shards: number;\n /**\n * Either shard by collection or by database.\n * For most use cases (IndexedDB based storages), sharding by collection is the way to go\n * because it has a faster initial load time.\n */\n mode: 'database' | 'collection';\n };\n crdt?: CRDTSchemaOptions;\n};\n\n/**\n * Used to aggregate the document type from the schema.\n * @link https://github.com/pubkey/rxdb/discussions/3467\n */\nexport type ExtractDocumentTypeFromTypedRxJsonSchema = AsTyped;\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/types/rx-storage.d.js b/dist/cjs/types/rx-storage.d.js deleted file mode 100644 index eac5d3fc8f5..00000000000 --- a/dist/cjs/types/rx-storage.d.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -//# sourceMappingURL=rx-storage.d.js.map \ No newline at end of file diff --git a/dist/cjs/types/rx-storage.d.js.map b/dist/cjs/types/rx-storage.d.js.map deleted file mode 100644 index 713c1434c2d..00000000000 --- a/dist/cjs/types/rx-storage.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-storage.d.js","names":[],"sources":["../../../src/types/rx-storage.d.ts"],"sourcesContent":["import type { ChangeEvent } from 'event-reduce-js';\nimport type { RxChangeEvent } from './rx-change-event.d.ts';\nimport type { RxDocumentMeta } from './rx-document.d.ts';\nimport type { RxStorageWriteError } from './rx-error.d.ts';\nimport type { RxJsonSchema } from './rx-schema.d.ts';\nimport type { Override } from './util.d.ts';\n\n/**\n * The document data how it comes out of the storage instance.\n * Contains all meta data like revision, attachments and deleted-flag.\n */\nexport type RxDocumentData = T & {\n\n /**\n * As other NoSQL databases,\n * RxDB also assumes that no data is finally deleted.\n * Instead the documents are stored with _deleted: true\n * which means they will not be returned at queries.\n */\n _deleted: boolean;\n\n /**\n * The attachments meta data is stored besides to document.\n */\n _attachments: {\n [attachmentId: string]: RxAttachmentData;\n };\n\n /**\n * Contains a revision which is concatenated with a [height: number]-[identifier: string]\n * like: '1-3hl4kj3l4kgj34g34glk'.\n * The revision is used to detect write conflicts and have a document history.\n * Revisions behave similar to couchdb revisions:\n * @link https://docs.couchdb.org/en/stable/replication/conflicts.html#revision-tree\n\n * When writing a document, you must send the correct revision in the previous-field\n * to make sure that you do not cause a write conflict.\n * The revision of the 'new' document-field must be created, for example via util.createRevision().\n * Any revision that matches the [height]-[hash] format can be used.\n */\n _rev: string;\n _meta: RxDocumentMeta;\n};\n\nexport type RxDocumentDataById = {\n [documentId: string]: RxDocumentData;\n};\n\n/**\n * The document data how it is send to the\n * storage instance to save it.\n */\n// We & T here instead of in RxDocumentData to preserver indexability by keyof T which the Override breaks\nexport type RxDocumentWriteData = T & Override, {\n _attachments: {\n /**\n * To create a new attachment, set the write data\n * To delete an attachment, leave it out on the _attachments property.\n * To change an attachment, set the new write data.\n * To not touch an attachment, just send the stub again\n * which came out of the storage instance.\n */\n [attachmentId: string]: RxAttachmentData | RxAttachmentWriteData;\n };\n}>;\n\nexport type WithDeleted = DocType & {\n _deleted: boolean;\n};\nexport type WithDeletedAndAttachments = DocType & {\n _deleted: boolean;\n\n /**\n * Here the _attachments might exist\n * or might not, depending one the use case.\n */\n _attachments?: {\n [attachmentId: string]: RxAttachmentData | RxAttachmentWriteData;\n };\n};\n\n/**\n * Send to the bulkWrite() method of a storage instance.\n */\nexport type BulkWriteRow = {\n /**\n * The current document state in the storage engine,\n * assumed by the application.\n * Undefined if the document is a new insert.\n * Notice that we send the full document data as 'previous', not just the revision.\n * The reason is that to get the previous revision you anyway have to get the full\n * previous document and so it is easier to just send it all to the storage instance.\n * This will later allow us to use something different then the _rev key for conflict detection\n * when we implement other storage instances.\n */\n previous?: RxDocumentData;\n /**\n * The new document data to be stored in the storage instance.\n */\n document: RxDocumentWriteData;\n};\nexport type BulkWriteRowById = {\n [documentId: string]: BulkWriteRow;\n};\n\n/**\n * After the RxStorage has processed all rows,\n * we have this to work with afterwards.\n */\nexport type BulkWriteRowProcessed = BulkWriteRow & {\n document: RxDocumentData;\n};\n\n\nexport type RxAttachmentData = {\n /**\n * Size of the attachments data\n */\n length: number;\n /**\n * Content type like 'plain/text'\n */\n type: string;\n /**\n * The hash of the attachments content.\n * It is calculated by RxDB, and send to the storage.\n * The only guarantee is that the digest will change when the attachments data changes.\n * @link https://github.com/pouchdb/pouchdb/issues/3156#issuecomment-66831010\n * @link https://github.com/pubkey/rxdb/pull/4107\n */\n digest: string;\n};\n\n/**\n * Data which is needed for new attachments\n * that are send from RxDB to the RxStorage implementation.\n */\nexport type RxAttachmentWriteData = RxAttachmentData & {\n /**\n * The data of the attachment. As string in base64 format.\n * In the past we used Blob internally but it created many\n * problems because of then we need the full data (for encryption/compression)\n * so we anyway have to get the string value out of the Blob.\n *\n * Also using Blob has no performance benefit because in some RxStorage implementations,\n * it just keeps the transaction open for longer because the Blob\n * has be be read.\n */\n data: string;\n};\n\n\n/**\n * The returned data from RxStorageInstance.bulkWrite()\n * For better performance, we do NOT use an indexed object,\n * but only plain arrays. Because most of the time\n * RxDB anyway only need the array data and we can save performance\n * by not indexing the results.\n */\nexport type RxStorageBulkWriteResponse = {\n /**\n * contains all succeeded writes.\n */\n success: RxDocumentData[];\n /**\n * contains all errored writes.\n */\n error: RxStorageWriteError[];\n};\n\n/**\n * We return a complex object instead of a single array\n * so we are able to add additional fields in the future.\n */\nexport type RxStorageQueryResult = {\n // the found documents, sort order is important.\n documents: RxDocumentData[];\n};\n\nexport type RxStorageCountResult = {\n count: number;\n /**\n * Returns the mode which was used by the storage\n * to count the documents.\n * If this returns 'slow', RxDB will throw by default\n * if 'allowSlowCount' is not set.\n */\n mode: 'fast' | 'slow';\n};\n\nexport type RxStorageInstanceCreationParams = {\n\n /**\n * A string to uniquely identify the instance of the JavaScript object\n * of the RxDatabase where this RxStorageInstance belongs to.\n * In most cases you would use RxDatabase.token here.\n *\n * This is used so that we can add caching or reuse stuff that belongs to the same RxDatabase.\n * For example the BroadcastChannel that is used for event propagation between multiple browser tabs\n * is cached by this token.\n *\n * In theory we could just use the databaseName for that. But to make it easier in unit tests\n * to simulate cross-tab usage, we cannot assume that the databaseName is unique in a single\n * JavaScript process. Therefore we use the instance token instead.\n */\n databaseInstanceToken: string;\n\n\n databaseName: string;\n collectionName: string;\n schema: RxJsonSchema>;\n options: InstanceCreationOptions;\n /**\n * If multiInstance is true, there can be more\n * then one instance of the database, for example\n * when multiple browser tabs exist or more then one Node.js\n * process relies on the same storage.\n */\n multiInstance: boolean;\n password?: string | any;\n\n /**\n * Some storages can do additional checks\n * that are performance expensive\n * and should only be done in dev-mode.\n */\n devMode: boolean;\n};\n\nexport type ChangeStreamOptions = {\n\n /**\n * Sequence number of the first event to start with.\n * If you want to get all ongoing events,\n * first get the latest sequence number and input it here.\n *\n * Optional on changeStream,\n * will start from the newest sequence.\n */\n startSequence?: number;\n /**\n * limits the amount of results\n */\n limit?: number;\n};\n\n/**\n * In the past we handles each RxChangeEvent by its own.\n * But it has been shown that this take way more performance then needed,\n * especially when the events get transferred over a data layer\n * like with WebWorkers or the BroadcastChannel.\n * So we now process events as bulks internally.\n */\nexport type EventBulk = {\n /**\n * Unique id of the bulk,\n * used to detect duplicate bulks\n * that have already been processed.\n */\n id: string;\n events: EventType[];\n\n /**\n * Required for replication.\n * Passing this checkpoint into getChangedDocumentsSince()\n * must return all items that have been modified AFTER this write event.\n */\n checkpoint: CheckpointType;\n\n /**\n * The context that was given at the call to bulkWrite()\n * that caused this EventBulk.\n */\n context: string;\n\n /**\n * Unix timestamp in milliseconds of when the operation was triggered\n * and when it was finished.\n * This is optional because we do not have this time\n * for events that come from the internal storage instance changestream.\n * TODO do we even need this values?\n */\n startTime: number;\n endTime: number;\n};\n\nexport type ChangeStreamEvent = ChangeEvent> & {\n /**\n * An integer that is increasing\n * and unique per event.\n * Can be used to sort events or get information\n * about how many events there are.\n */\n sequence: number;\n /**\n * The value of the primary key\n * of the changed document\n */\n id: string;\n};\n\nexport type RxStorageChangeEvent = Omit, 'isLocal' | 'collectionName'>;\n\n/**\n * An example for how a RxStorage checkpoint can look like.\n * NOTICE: Not all implementations use this type.\n */\nexport type RxStorageDefaultCheckpoint = {\n id: string;\n lwt: number;\n};\n\n\n\n\nexport type CategorizeBulkWriteRowsOutput = {\n\n // TODO only needs the document, not the row.\n bulkInsertDocs: BulkWriteRowProcessed[];\n bulkUpdateDocs: BulkWriteRowProcessed[];\n\n errors: RxStorageWriteError[];\n eventBulk: EventBulk>, any>;\n attachmentsAdd: {\n documentId: string;\n attachmentId: string;\n attachmentData: RxAttachmentWriteData;\n digest: string;\n }[];\n attachmentsRemove: {\n documentId: string;\n attachmentId: string;\n digest: string;\n }[];\n attachmentsUpdate: {\n documentId: string;\n attachmentId: string;\n attachmentData: RxAttachmentWriteData;\n digest: string;\n }[];\n /**\n * Contains the non-error document row that\n * has the newest _meta.lwt time.\n * Empty if no successful write exists.\n */\n newestRow?: BulkWriteRowProcessed;\n};\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/types/rx-storage.interface.d.js b/dist/cjs/types/rx-storage.interface.d.js deleted file mode 100644 index 17d5d3fe724..00000000000 --- a/dist/cjs/types/rx-storage.interface.d.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -//# sourceMappingURL=rx-storage.interface.d.js.map \ No newline at end of file diff --git a/dist/cjs/types/rx-storage.interface.d.js.map b/dist/cjs/types/rx-storage.interface.d.js.map deleted file mode 100644 index 47e6cd1d634..00000000000 --- a/dist/cjs/types/rx-storage.interface.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-storage.interface.d.js","names":[],"sources":["../../../src/types/rx-storage.interface.d.ts"],"sourcesContent":["import type {\n BulkWriteRow,\n EventBulk,\n RxDocumentData,\n RxStorageBulkWriteResponse,\n RxStorageChangeEvent,\n RxStorageCountResult,\n RxStorageInstanceCreationParams,\n RxStorageQueryResult\n} from './rx-storage.ts';\nimport type {\n MangoQuerySelector,\n MangoQuerySortPart,\n RxConflictResultionTask,\n RxConflictResultionTaskSolution,\n RxJsonSchema,\n RxQueryPlan\n} from './index.d.ts';\nimport type {\n Observable\n} from 'rxjs';\n\n/**\n * RxStorage\n * This is an interface that abstracts the storage engine.\n * This allows us to use RxDB with different storage engines.\n *\n * @link https://rxdb.info/rx-storage.html\n * @link https://github.com/pubkey/rxdb/issues/1636\n */\n\n\n/**\n * A RxStorage is a module that acts\n * as a factory that can create multiple RxStorageInstance\n * objects.\n *\n * All data inputs and outputs of a StorageInstance must be plain json objects.\n * Do not use Map, Set or anything else that cannot be JSON.stringify-ed.\n * This will ensure that the storage can exchange data\n * when it is a WebWorker or a WASM process or data is send via BroadcastChannel.\n */\nexport interface RxStorage {\n /**\n * name of the storage engine\n * used to detect if plugins do not work so we can throw proper errors.\n */\n readonly name: string;\n\n /**\n * RxDB version is part of the storage\n * so we can have fallbacks and stuff when\n * multiple storages with different version are in use\n * like in the storage migration plugin.\n */\n readonly rxdbVersion: string;\n\n /**\n * Creates a storage instance\n * that can contain the NoSQL documents of a collection.\n */\n createStorageInstance(\n params: RxStorageInstanceCreationParams\n ): Promise>;\n}\n\n\n/**\n * User provided mango queries will be filled up by RxDB via normalizeMangoQuery()\n * so we do not have to do many if-field-exist tests in the internals.\n */\nexport type FilledMangoQuery = {\n /**\n * The selector is required here.\n */\n selector: MangoQuerySelector>;\n\n /**\n * In contrast to the user-provided MangoQuery,\n * the sorting is required here because\n * RxDB has to ensure that the primary key is always\n * part of the sort params.\n */\n sort: MangoQuerySortPart>[];\n\n /**\n * In the normalized mango query,\n * the index must always be a string[],\n * never just a string.\n * This makes it easier to use the query because\n * we do not have to do an array check.\n */\n index?: string[];\n\n /**\n * Skip must be set which defaults to 0\n */\n skip: number;\n\n limit?: number;\n};\n\n\n/**\n * Before sending a query to the storageInstance.query()\n * we run it through the query planner and do some normalization\n * stuff. Notice that the queryPlan is a hint for the storage and\n * it is not required to use it when running queries. Some storages\n * might use their own query planning instead.\n */\nexport type PreparedQuery = {\n // original query from the input\n query: FilledMangoQuery;\n queryPlan: RxQueryPlan;\n};\n\nexport interface RxStorageInstance<\n /**\n * The type of the documents that can be stored in this instance.\n * All documents in an instance must comply to the same schema.\n * Also all documents are RxDocumentData with the meta properties like\n * _deleted or _rev etc.\n */\n RxDocType,\n Internals,\n InstanceCreationOptions,\n CheckpointType = any\n> {\n readonly databaseName: string;\n /**\n * Returns the internal data that is used by the storage engine.\n */\n readonly internals: Readonly;\n readonly options: Readonly;\n /**\n * The schema that defines the documents that are stored in this instance.\n * Notice that the schema must be enhanced with the meta properties like\n * _meta, _rev and _deleted etc. which are added by fillWithDefaultSettings()\n */\n readonly schema: Readonly>>;\n readonly collectionName: string;\n\n /**\n * (Optional) reference to the underlying persistent storage instance.\n * If set, things like replication will run on that storageInstance instead of the parent.\n * This is mostly used in things like the memory-synced storage where we want to\n * run replications and migrations on the persistent storage instead of the in-memory storage.\n *\n * Having this is the least hacky option. The only other option would be to toggle all calls to the\n * storageInstance by checking the givent context-string. But this would make it impossible\n * to run a replication on the parentStorage itself.\n */\n readonly underlyingPersistentStorage?: RxStorageInstance;\n\n /**\n * Writes multiple documents to the storage instance.\n * The write for each single document is atomic, there\n * is no transaction around all documents.\n * The written documents must be the newest revision of that documents data.\n * If the previous document is not the current newest revision, a conflict error\n * must be returned.\n * It must be possible that some document writes succeed\n * and others error. We need this to have a similar behavior as most NoSQL databases.\n */\n bulkWrite(\n documentWrites: BulkWriteRow[],\n /**\n * Context will be used in all\n * changeStream()-events that are emitted as a result\n * of that bulkWrite() operation.\n * Used in plugins so that we can detect that event X\n * comes from operation Y.\n */\n context: string\n ): Promise>;\n\n /**\n * Get Multiple documents by their primary value.\n * This must also return deleted documents.\n */\n findDocumentsById(\n /**\n * List of primary values\n * of the documents to find.\n */\n ids: string[],\n /**\n * If set to true, deleted documents will also be returned.\n */\n withDeleted: boolean\n\n ): Promise<\n /**\n * For better performance, we return an array\n * instead of an indexed object because most consumers\n * of this anyway have to fill a Map() instance or\n * even do only need the list at all.\n */\n RxDocumentData[]\n >;\n\n /**\n * Runs a NoSQL 'mango' query over the storage\n * and returns the found documents data.\n * Having all storage instances behave similar\n * is likely the most difficult thing when creating a new\n * rx-storage implementation.\n */\n query(\n preparedQuery: PreparedQuery\n ): Promise>;\n\n /**\n * Returns the amount of non-deleted documents\n * that match the given query.\n * Sort, skip and limit of the query must be ignored!\n */\n count(\n preparedQuery: PreparedQuery\n ): Promise;\n\n /**\n * Returns the plain data of a single attachment.\n */\n getAttachmentData(\n documentId: string,\n attachmentId: string,\n digest: string\n ): Promise;\n\n /**\n * Returns the current (not the old!) data of all documents that have been changed AFTER the given checkpoint.\n * If the returned array does not reach the limit, it can be assumed that the \"end\" is reached, when paginating over the changes.\n * Also returns a new checkpoint for each document which can be used to continue with the pagination from that change on.\n * Must never return the same document multiple times in the same call operation.\n * This is used by RxDB to known what has changed since X so these docs can be handled by the backup or the replication\n * plugin.\n *\n * Important: This method is optional. If not defined,\n * RxDB will manually run a query and use the last returned document\n * for checkpointing. In the future we might even remove this method completely\n * and let RxDB do the work instead of the RxStorage.\n */\n getChangedDocumentsSince?(\n limit: number,\n /**\n * The checkpoint from with to start\n * when the events are sorted in time.\n * If we want to start from the beginning,\n * undefined is used as a checkpoint.\n */\n checkpoint?: CheckpointType\n ): Promise<{\n documents: RxDocumentData[];\n /**\n * The checkpoint contains data so that another\n * call to getChangedDocumentsSince() will continue\n * from exactly the last document that was returned before.\n */\n checkpoint: CheckpointType;\n }>;\n\n /**\n * Returns an ongoing stream\n * of all changes that happen to the\n * storage instance.\n * Do not forget to unsubscribe.\n *\n * If the RxStorage support multi-instance,\n * and the storage is persistent,\n * then the emitted changes of one RxStorageInstance\n * must be also emitted to other instances with the same databaseName+collectionName.\n * See ./rx-storage-multiinstance.ts\n */\n changeStream(): Observable, CheckpointType>>;\n\n /**\n * Runs a cleanup that removes all tompstones\n * of documents that have _deleted set to true\n * to free up disc space.\n *\n * Returns true if all cleanable documents have been removed.\n * Returns false if there are more documents to be cleaned up,\n * but not all have been purged because that would block the storage for too long.\n */\n cleanup(\n /**\n * The minimum time in milliseconds\n * of how long a document must have been deleted\n * until it is purged by the cleanup.\n */\n minimumDeletedTime: number\n ): Promise<\n /**\n * True if all docs cleaned up,\n * false if there are more docs to clean up\n */\n boolean\n >;\n\n /**\n * Closes the storage instance so it cannot be used\n * anymore and should clear all memory.\n * The returned promise must resolve when everything is cleaned up.\n */\n close(): Promise;\n\n /**\n * Remove the database and\n * deletes all of its data.\n */\n remove(): Promise;\n\n /**\n * Instead of passing the conflict-resolver function\n * into the storage, we have to work with an observable that emits tasks\n * and a resolver that takes resolved tasks.\n * This is needed because the RxStorageInstance might run inside of a Worker\n * other JavaScript process, so we cannot pass plain code.\n */\n conflictResultionTasks(): Observable>;\n resolveConflictResultionTask(taskSolution: RxConflictResultionTaskSolution): Promise;\n}\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/cjs/types/util.d.js b/dist/cjs/types/util.d.js deleted file mode 100644 index 0ed965d6839..00000000000 --- a/dist/cjs/types/util.d.js +++ /dev/null @@ -1,2 +0,0 @@ -"use strict"; -//# sourceMappingURL=util.d.js.map \ No newline at end of file diff --git a/dist/cjs/types/util.d.js.map b/dist/cjs/types/util.d.js.map deleted file mode 100644 index 9d1b26e0974..00000000000 --- a/dist/cjs/types/util.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"util.d.js","names":[],"sources":["../../../src/types/util.d.ts"],"sourcesContent":["import type { RxStorage } from './rx-storage.interface';\n\nexport type MaybePromise = Promise | T;\n\n\nexport type PlainJsonValue = string | number | boolean | PlainSimpleJsonObject | PlainSimpleJsonObject[] | PlainJsonValue[];\nexport type PlainSimpleJsonObject = {\n [k: string]: PlainJsonValue | PlainJsonValue[];\n};\n\n/**\n * @link https://stackoverflow.com/a/49670389/3443137\n */\ntype DeepReadonly =\n T extends (infer R)[] ? DeepReadonlyArray :\n T extends Function ? T :\n T extends object ? DeepReadonlyObject :\n T;\n\ninterface DeepReadonlyArray extends ReadonlyArray> { }\n\ntype DeepReadonlyObject = {\n readonly [P in keyof T]: DeepReadonly;\n};\n\nexport type MaybeReadonly = T | Readonly;\n\n\n/**\n * Opposite of DeepReadonly,\n * makes everything mutable again.\n */\ntype DeepMutable = (\n T extends object\n ? {\n -readonly [K in keyof T]: (\n T[K] extends object\n ? DeepMutable\n : T[K]\n )\n }\n : never\n);\n\n/**\n * Can be used like 'keyof'\n * but only represents the string keys, not the Symbols or numbers.\n * @link https://stackoverflow.com/a/51808262/3443137\n */\nexport type StringKeys = Extract;\n\nexport type AnyKeys = { [P in keyof T]?: T[P] | any };\nexport interface AnyObject {\n [k: string]: any;\n}\n\n/**\n * @link https://dev.to/vborodulin/ts-how-to-override-properties-with-type-intersection-554l\n */\nexport type Override = Omit & T2;\n\n\n\nexport type ById = {\n [id: string]: T;\n};\n\n/**\n * Must be async to support async hashing like from the WebCrypto API.\n */\nexport type HashFunction = (input: string) => Promise;\n\nexport declare type QueryMatcher = (doc: DocType | DeepReadonly) => boolean;\n\n/**\n * To have a deterministic sorting, we cannot return 0,\n * we only return 1 or -1.\n * This ensures that we always end with the same output array, no mather of the\n * pre-sorting of the input array.\n */\nexport declare type DeterministicSortComparator = (a: DocType, b: DocType) => 1 | -1;\n\n/**\n * To test a storage, we need these\n * configuration values.\n */\nexport type RxTestStorage = {\n // can be used to setup async stuff\n readonly init?: () => any;\n // TODO remove name here, it can be read out already via getStorage().name\n readonly name: string;\n readonly getStorage: () => RxStorage;\n /**\n * Returns a storage that is used in performance tests.\n * For example in a browser it should return the storage with an IndexedDB based adapter,\n * while in node.js it must use the filesystem.\n */\n readonly getPerformanceStorage: () => {\n storage: RxStorage;\n /**\n * A description that describes the storage and setting.\n * For example 'dexie-native'.\n */\n description: string;\n };\n /**\n * True if the storage is able to\n * keep data after an instance is closed and opened again.\n */\n readonly hasPersistence: boolean;\n readonly hasMultiInstance: boolean;\n readonly hasAttachments: boolean;\n\n /**\n * Some storages likes the memory-synced storage,\n * are not able to provide a replication while guaranteeing\n * data integrity.\n */\n readonly hasReplication: boolean;\n\n /**\n * To make it possible to test alternative encryption plugins,\n * you can specify hasEncryption to signal\n * the test runner that the given storage already contains an\n * encryption plugin that should be used to test encryption tests.\n * Otherwise the encryption-crypto-js plugin will be tested.\n *\n * hasEncryption must contain a function that is able\n * to create a new password.\n */\n readonly hasEncryption?: () => Promise;\n};\n\n\n/**\n * The paths as strings-type of nested object\n * @link https://stackoverflow.com/a/58436959/3443137\n */\ntype Join = K extends string | number ?\n P extends string | number ?\n `${K}${'' extends P ? '' : '.'}${P}`\n : never : never;\n\nexport type Paths = [D] extends [never] ? never : T extends object ?\n { [K in keyof T]-?: K extends string | number ?\n `${K}` | (Paths extends infer R ? Join : never)\n : never\n }[keyof T] : '';\n\nexport type Leaves = [D] extends [never] ? never : T extends object ?\n { [K in keyof T]-?: Join> }[keyof T] : '';\ntype Prev = [never, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10,\n 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, ...0[]];\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/change-event-buffer.js b/dist/esm/change-event-buffer.js deleted file mode 100644 index 09e8b58f24f..00000000000 --- a/dist/esm/change-event-buffer.js +++ /dev/null @@ -1,97 +0,0 @@ -/** - * a buffer-cache which holds the last X changeEvents of the collection - */ - -import { filter } from 'rxjs/operators'; -export var ChangeEventBuffer = /*#__PURE__*/function () { - /** - * array with changeEvents - * starts with oldest known event, ends with newest - */ - - function ChangeEventBuffer(collection) { - this.subs = []; - this.limit = 100; - this.counter = 0; - this.eventCounterMap = new WeakMap(); - this.buffer = []; - this.collection = collection; - this.subs.push(this.collection.$.pipe(filter(cE => !cE.isLocal)).subscribe(cE => this._handleChangeEvent(cE))); - } - var _proto = ChangeEventBuffer.prototype; - _proto._handleChangeEvent = function _handleChangeEvent(changeEvent) { - this.counter++; - this.buffer.push(changeEvent); - this.eventCounterMap.set(changeEvent, this.counter); - while (this.buffer.length > this.limit) { - this.buffer.shift(); - } - } - - /** - * gets the array-index for the given pointer - * @return arrayIndex which can be used to iterate from there. If null, pointer is out of lower bound - */; - _proto.getArrayIndexByPointer = function getArrayIndexByPointer(pointer) { - var oldestEvent = this.buffer[0]; - var oldestCounter = this.eventCounterMap.get(oldestEvent); - if (pointer < oldestCounter) return null; // out of bounds - - var rest = pointer - oldestCounter; - return rest; - } - - /** - * get all changeEvents which came in later than the pointer-event - * @return array with change-events. If null, pointer out of bounds - */; - _proto.getFrom = function getFrom(pointer) { - var ret = []; - var currentIndex = this.getArrayIndexByPointer(pointer); - if (currentIndex === null) - // out of bounds - return null; - while (true) { - var nextEvent = this.buffer[currentIndex]; - currentIndex++; - if (!nextEvent) { - return ret; - } else { - ret.push(nextEvent); - } - } - }; - _proto.runFrom = function runFrom(pointer, fn) { - var ret = this.getFrom(pointer); - if (ret === null) { - throw new Error('out of bounds'); - } else { - ret.forEach(cE => fn(cE)); - } - } - - /** - * no matter how many operations are done on one document, - * only the last operation has to be checked to calculate the new state - * this function reduces the events to the last ChangeEvent of each doc - */; - _proto.reduceByLastOfDoc = function reduceByLastOfDoc(changeEvents) { - return changeEvents.slice(0); - // TODO the old implementation was wrong - // because it did not correctly reassigned the previousData of the changeevents - // this should be added to the event-reduce library and not be done in RxDB - var docEventMap = {}; - changeEvents.forEach(changeEvent => { - docEventMap[changeEvent.documentId] = changeEvent; - }); - return Object.values(docEventMap); - }; - _proto.destroy = function destroy() { - this.subs.forEach(sub => sub.unsubscribe()); - }; - return ChangeEventBuffer; -}(); -export function createChangeEventBuffer(collection) { - return new ChangeEventBuffer(collection); -} -//# sourceMappingURL=change-event-buffer.js.map \ No newline at end of file diff --git a/dist/esm/change-event-buffer.js.map b/dist/esm/change-event-buffer.js.map deleted file mode 100644 index 1c84fd7e9c8..00000000000 --- a/dist/esm/change-event-buffer.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"change-event-buffer.js","names":["filter","ChangeEventBuffer","collection","subs","limit","counter","eventCounterMap","WeakMap","buffer","push","$","pipe","cE","isLocal","subscribe","_handleChangeEvent","_proto","prototype","changeEvent","set","length","shift","getArrayIndexByPointer","pointer","oldestEvent","oldestCounter","get","rest","getFrom","ret","currentIndex","nextEvent","runFrom","fn","Error","forEach","reduceByLastOfDoc","changeEvents","slice","docEventMap","documentId","Object","values","destroy","sub","unsubscribe","createChangeEventBuffer"],"sources":["../../src/change-event-buffer.ts"],"sourcesContent":["/**\n * a buffer-cache which holds the last X changeEvents of the collection\n */\nimport {\n Subscription\n} from 'rxjs';\nimport { filter } from 'rxjs/operators';\nimport type {\n RxChangeEvent,\n RxCollection\n} from './types/index.d.ts';\n\nexport class ChangeEventBuffer {\n private subs: Subscription[] = [];\n public limit: number = 100;\n public counter: number = 0;\n private eventCounterMap: WeakMap<\n RxChangeEvent, number\n > = new WeakMap();\n\n /**\n * array with changeEvents\n * starts with oldest known event, ends with newest\n */\n public buffer: RxChangeEvent[] = [];\n\n constructor(\n public collection: RxCollection\n ) {\n this.subs.push(\n this.collection.$.pipe(\n filter(cE => !cE.isLocal)\n ).subscribe((cE: any) => this._handleChangeEvent(cE))\n );\n }\n\n _handleChangeEvent(changeEvent: RxChangeEvent) {\n this.counter++;\n this.buffer.push(changeEvent);\n this.eventCounterMap.set(changeEvent, this.counter);\n while (this.buffer.length > this.limit) {\n this.buffer.shift();\n }\n }\n\n /**\n * gets the array-index for the given pointer\n * @return arrayIndex which can be used to iterate from there. If null, pointer is out of lower bound\n */\n getArrayIndexByPointer(pointer: number): number | null {\n const oldestEvent = this.buffer[0];\n const oldestCounter = this.eventCounterMap.get(\n oldestEvent\n ) as number;\n\n if (pointer < oldestCounter)\n return null; // out of bounds\n\n const rest = pointer - oldestCounter;\n return rest;\n }\n\n /**\n * get all changeEvents which came in later than the pointer-event\n * @return array with change-events. If null, pointer out of bounds\n */\n getFrom(pointer: number): RxChangeEvent[] | null {\n const ret = [];\n let currentIndex = this.getArrayIndexByPointer(pointer);\n if (currentIndex === null) // out of bounds\n return null;\n\n while (true) {\n const nextEvent = this.buffer[currentIndex];\n currentIndex++;\n if (!nextEvent) {\n return ret;\n } else {\n ret.push(nextEvent);\n }\n }\n }\n\n runFrom(pointer: number, fn: Function) {\n const ret = this.getFrom(pointer);\n if (ret === null) {\n throw new Error('out of bounds');\n } else {\n ret.forEach(cE => fn(cE));\n }\n }\n\n /**\n * no matter how many operations are done on one document,\n * only the last operation has to be checked to calculate the new state\n * this function reduces the events to the last ChangeEvent of each doc\n */\n reduceByLastOfDoc(changeEvents: RxChangeEvent[]): RxChangeEvent[] {\n return changeEvents.slice(0);\n // TODO the old implementation was wrong\n // because it did not correctly reassigned the previousData of the changeevents\n // this should be added to the event-reduce library and not be done in RxDB\n const docEventMap: any = {};\n changeEvents.forEach(changeEvent => {\n docEventMap[changeEvent.documentId] = changeEvent;\n });\n return Object.values(docEventMap);\n }\n\n destroy() {\n this.subs.forEach(sub => sub.unsubscribe());\n }\n}\n\nexport function createChangeEventBuffer(\n collection: RxCollection\n) {\n return new ChangeEventBuffer(collection);\n}\n"],"mappings":"AAAA;AACA;AACA;;AAIA,SAASA,MAAM,QAAQ,gBAAgB;AAMvC,WAAaC,iBAAiB;EAQ1B;AACJ;AACA;AACA;;EAGI,SAAAA,kBACWC,UAAwB,EACjC;IAAA,KAfMC,IAAI,GAAmB,EAAE;IAAA,KAC1BC,KAAK,GAAW,GAAG;IAAA,KACnBC,OAAO,GAAW,CAAC;IAAA,KAClBC,eAAe,GAEnB,IAAIC,OAAO,CAAC,CAAC;IAAA,KAMVC,MAAM,GAA+B,EAAE;IAAA,KAGnCN,UAAwB,GAAxBA,UAAwB;IAE/B,IAAI,CAACC,IAAI,CAACM,IAAI,CACV,IAAI,CAACP,UAAU,CAACQ,CAAC,CAACC,IAAI,CAClBX,MAAM,CAACY,EAAE,IAAI,CAACA,EAAE,CAACC,OAAO,CAC5B,CAAC,CAACC,SAAS,CAAEF,EAAO,IAAK,IAAI,CAACG,kBAAkB,CAACH,EAAE,CAAC,CACxD,CAAC;EACL;EAAC,IAAAI,MAAA,GAAAf,iBAAA,CAAAgB,SAAA;EAAAD,MAAA,CAEDD,kBAAkB,GAAlB,SAAAA,mBAAmBG,WAAqC,EAAE;IACtD,IAAI,CAACb,OAAO,EAAE;IACd,IAAI,CAACG,MAAM,CAACC,IAAI,CAACS,WAAW,CAAC;IAC7B,IAAI,CAACZ,eAAe,CAACa,GAAG,CAACD,WAAW,EAAE,IAAI,CAACb,OAAO,CAAC;IACnD,OAAO,IAAI,CAACG,MAAM,CAACY,MAAM,GAAG,IAAI,CAAChB,KAAK,EAAE;MACpC,IAAI,CAACI,MAAM,CAACa,KAAK,CAAC,CAAC;IACvB;EACJ;;EAEA;AACJ;AACA;AACA,KAHI;EAAAL,MAAA,CAIAM,sBAAsB,GAAtB,SAAAA,uBAAuBC,OAAe,EAAiB;IACnD,IAAMC,WAAW,GAAG,IAAI,CAAChB,MAAM,CAAC,CAAC,CAAC;IAClC,IAAMiB,aAAa,GAAG,IAAI,CAACnB,eAAe,CAACoB,GAAG,CAC1CF,WACJ,CAAW;IAEX,IAAID,OAAO,GAAGE,aAAa,EACvB,OAAO,IAAI,CAAC,CAAC;;IAEjB,IAAME,IAAI,GAAGJ,OAAO,GAAGE,aAAa;IACpC,OAAOE,IAAI;EACf;;EAEA;AACJ;AACA;AACA,KAHI;EAAAX,MAAA,CAIAY,OAAO,GAAP,SAAAA,QAAQL,OAAe,EAAqC;IACxD,IAAMM,GAAG,GAAG,EAAE;IACd,IAAIC,YAAY,GAAG,IAAI,CAACR,sBAAsB,CAACC,OAAO,CAAC;IACvD,IAAIO,YAAY,KAAK,IAAI;MAAE;MACvB,OAAO,IAAI;IAEf,OAAO,IAAI,EAAE;MACT,IAAMC,SAAS,GAAG,IAAI,CAACvB,MAAM,CAACsB,YAAY,CAAC;MAC3CA,YAAY,EAAE;MACd,IAAI,CAACC,SAAS,EAAE;QACZ,OAAOF,GAAG;MACd,CAAC,MAAM;QACHA,GAAG,CAACpB,IAAI,CAACsB,SAAS,CAAC;MACvB;IACJ;EACJ,CAAC;EAAAf,MAAA,CAEDgB,OAAO,GAAP,SAAAA,QAAQT,OAAe,EAAEU,EAAY,EAAE;IACnC,IAAMJ,GAAG,GAAG,IAAI,CAACD,OAAO,CAACL,OAAO,CAAC;IACjC,IAAIM,GAAG,KAAK,IAAI,EAAE;MACd,MAAM,IAAIK,KAAK,CAAC,eAAe,CAAC;IACpC,CAAC,MAAM;MACHL,GAAG,CAACM,OAAO,CAACvB,EAAE,IAAIqB,EAAE,CAACrB,EAAE,CAAC,CAAC;IAC7B;EACJ;;EAEA;AACJ;AACA;AACA;AACA,KAJI;EAAAI,MAAA,CAKAoB,iBAAiB,GAAjB,SAAAA,kBAAkBC,YAAwC,EAA8B;IACpF,OAAOA,YAAY,CAACC,KAAK,CAAC,CAAC,CAAC;IAC5B;IACA;IACA;IACA,IAAMC,WAAgB,GAAG,CAAC,CAAC;IAC3BF,YAAY,CAACF,OAAO,CAACjB,WAAW,IAAI;MAChCqB,WAAW,CAACrB,WAAW,CAACsB,UAAU,CAAC,GAAGtB,WAAW;IACrD,CAAC,CAAC;IACF,OAAOuB,MAAM,CAACC,MAAM,CAACH,WAAW,CAAC;EACrC,CAAC;EAAAvB,MAAA,CAED2B,OAAO,GAAP,SAAAA,QAAA,EAAU;IACN,IAAI,CAACxC,IAAI,CAACgC,OAAO,CAACS,GAAG,IAAIA,GAAG,CAACC,WAAW,CAAC,CAAC,CAAC;EAC/C,CAAC;EAAA,OAAA5C,iBAAA;AAAA;AAGL,OAAO,SAAS6C,uBAAuBA,CACnC5C,UAAwC,EAC1C;EACE,OAAO,IAAID,iBAAiB,CAAYC,UAAU,CAAC;AACvD","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/custom-index.js b/dist/esm/custom-index.js deleted file mode 100644 index 2ef49c3b315..00000000000 --- a/dist/esm/custom-index.js +++ /dev/null @@ -1,271 +0,0 @@ -/** - * For some RxStorage implementations, - * we need to use our custom crafted indexes - * so we can easily iterate over them. And sort plain arrays of document data. - * - * We really often have to craft an index string for a given document. - * Performance of everything in this file is very important - * which is why the code sometimes looks strange. - * Run performance tests before and after you touch anything here! - */ - -import { getSchemaByObjectPath } from "./rx-schema-helper.js"; -import { ensureNotFalsy, objectPathMonad } from "./plugins/utils/index.js"; -import { INDEX_MAX, INDEX_MIN } from "./query-planner.js"; - -/** - * Prepare all relevant information - * outside of the returned function - * from getIndexableStringMonad() - * to save performance when the returned - * function is called many times. - */ - -export function getIndexMeta(schema, index) { - var fieldNameProperties = index.map(fieldName => { - var schemaPart = getSchemaByObjectPath(schema, fieldName); - if (!schemaPart) { - throw new Error('not in schema: ' + fieldName); - } - var type = schemaPart.type; - var parsedLengths; - if (type === 'number' || type === 'integer') { - parsedLengths = getStringLengthOfIndexNumber(schemaPart); - } - var getValue = objectPathMonad(fieldName); - var maxLength = schemaPart.maxLength ? schemaPart.maxLength : 0; - var getIndexStringPart; - if (type === 'string') { - getIndexStringPart = docData => { - var fieldValue = getValue(docData); - if (!fieldValue) { - fieldValue = ''; - } - return fieldValue.padEnd(maxLength, ' '); - }; - } else if (type === 'boolean') { - getIndexStringPart = docData => { - var fieldValue = getValue(docData); - return fieldValue ? '1' : '0'; - }; - } else { - // number - getIndexStringPart = docData => { - var fieldValue = getValue(docData); - return getNumberIndexString(parsedLengths, fieldValue); - }; - } - var ret = { - fieldName, - schemaPart, - parsedLengths, - getValue, - getIndexStringPart - }; - return ret; - }); - return fieldNameProperties; -} - -/** - * Crafts an indexable string that can be used - * to check if a document would be sorted below or above - * another documents, dependent on the index values. - * @monad for better performance - * - * IMPORTANT: Performance is really important here - * which is why we code so 'strange'. - * Always run performance tests when you want to - * change something in this method. - */ -export function getIndexableStringMonad(schema, index) { - var fieldNameProperties = getIndexMeta(schema, index); - var fieldNamePropertiesAmount = fieldNameProperties.length; - var indexPartsFunctions = fieldNameProperties.map(r => r.getIndexStringPart); - - /** - * @hotPath Performance of this function is very critical! - */ - var ret = function (docData) { - var str = ''; - for (var i = 0; i < fieldNamePropertiesAmount; ++i) { - str += indexPartsFunctions[i](docData); - } - return str; - }; - return ret; -} -export function getStringLengthOfIndexNumber(schemaPart) { - var minimum = Math.floor(schemaPart.minimum); - var maximum = Math.ceil(schemaPart.maximum); - var multipleOf = schemaPart.multipleOf; - var valueSpan = maximum - minimum; - var nonDecimals = valueSpan.toString().length; - var multipleOfParts = multipleOf.toString().split('.'); - var decimals = 0; - if (multipleOfParts.length > 1) { - decimals = multipleOfParts[1].length; - } - return { - minimum, - maximum, - nonDecimals, - decimals, - roundedMinimum: minimum - }; -} -export function getIndexStringLength(schema, index) { - var fieldNameProperties = getIndexMeta(schema, index); - var length = 0; - fieldNameProperties.forEach(props => { - var schemaPart = props.schemaPart; - var type = schemaPart.type; - if (type === 'string') { - length += schemaPart.maxLength; - } else if (type === 'boolean') { - length += 1; - } else { - var parsedLengths = props.parsedLengths; - length = length + parsedLengths.nonDecimals + parsedLengths.decimals; - } - }); - return length; -} -export function getPrimaryKeyFromIndexableString(indexableString, primaryKeyLength) { - var paddedPrimaryKey = indexableString.slice(primaryKeyLength * -1); - // we can safely trim here because the primary key is not allowed to start or end with a space char. - var primaryKey = paddedPrimaryKey.trim(); - return primaryKey; -} -export function getNumberIndexString(parsedLengths, fieldValue) { - /** - * Ensure that the given value is in the boundaries - * of the schema, otherwise it would create a broken index string. - * This can happen for example if you have a minimum of 0 - * and run a query like - * selector { - * numField: { $gt: -1000 } - * } - */ - if (typeof fieldValue === 'undefined') { - fieldValue = 0; - } - if (fieldValue < parsedLengths.minimum) { - fieldValue = parsedLengths.minimum; - } - if (fieldValue > parsedLengths.maximum) { - fieldValue = parsedLengths.maximum; - } - var nonDecimalsValueAsString = (Math.floor(fieldValue) - parsedLengths.roundedMinimum).toString(); - var str = nonDecimalsValueAsString.padStart(parsedLengths.nonDecimals, '0'); - if (parsedLengths.decimals > 0) { - var splitByDecimalPoint = fieldValue.toString().split('.'); - var decimalValueAsString = splitByDecimalPoint.length > 1 ? splitByDecimalPoint[1] : '0'; - str += decimalValueAsString.padEnd(parsedLengths.decimals, '0'); - } - return str; -} -export function getStartIndexStringFromLowerBound(schema, index, lowerBound) { - var str = ''; - index.forEach((fieldName, idx) => { - var schemaPart = getSchemaByObjectPath(schema, fieldName); - var bound = lowerBound[idx]; - var type = schemaPart.type; - switch (type) { - case 'string': - var maxLength = ensureNotFalsy(schemaPart.maxLength, 'maxLength not set'); - if (typeof bound === 'string') { - str += bound.padEnd(maxLength, ' '); - } else { - // str += ''.padStart(maxLength, inclusiveStart ? ' ' : INDEX_MAX); - str += ''.padEnd(maxLength, ' '); - } - break; - case 'boolean': - if (bound === null) { - str += '0'; - } else if (bound === INDEX_MIN) { - str += '0'; - } else if (bound === INDEX_MAX) { - str += '1'; - } else { - var boolToStr = bound ? '1' : '0'; - str += boolToStr; - } - break; - case 'number': - case 'integer': - var parsedLengths = getStringLengthOfIndexNumber(schemaPart); - if (bound === null || bound === INDEX_MIN) { - var fillChar = '0'; - str += fillChar.repeat(parsedLengths.nonDecimals + parsedLengths.decimals); - } else if (bound === INDEX_MAX) { - str += getNumberIndexString(parsedLengths, parsedLengths.maximum); - } else { - var add = getNumberIndexString(parsedLengths, bound); - str += add; - } - break; - default: - throw new Error('unknown index type ' + type); - } - }); - return str; -} -export function getStartIndexStringFromUpperBound(schema, index, upperBound) { - var str = ''; - index.forEach((fieldName, idx) => { - var schemaPart = getSchemaByObjectPath(schema, fieldName); - var bound = upperBound[idx]; - var type = schemaPart.type; - switch (type) { - case 'string': - var maxLength = ensureNotFalsy(schemaPart.maxLength, 'maxLength not set'); - if (typeof bound === 'string' && bound !== INDEX_MAX) { - str += bound.padEnd(maxLength, ' '); - } else if (bound === INDEX_MIN) { - str += ''.padEnd(maxLength, ' '); - } else { - str += ''.padEnd(maxLength, INDEX_MAX); - } - break; - case 'boolean': - if (bound === null) { - str += '1'; - } else { - var boolToStr = bound ? '1' : '0'; - str += boolToStr; - } - break; - case 'number': - case 'integer': - var parsedLengths = getStringLengthOfIndexNumber(schemaPart); - if (bound === null || bound === INDEX_MAX) { - var fillChar = '9'; - str += fillChar.repeat(parsedLengths.nonDecimals + parsedLengths.decimals); - } else if (bound === INDEX_MIN) { - var _fillChar = '0'; - str += _fillChar.repeat(parsedLengths.nonDecimals + parsedLengths.decimals); - } else { - str += getNumberIndexString(parsedLengths, bound); - } - break; - default: - throw new Error('unknown index type ' + type); - } - }); - return str; -} - -/** - * Used in storages where it is not possible - * to define inclusiveEnd/inclusiveStart - */ -export function changeIndexableStringByOneQuantum(str, direction) { - var lastChar = str.slice(-1); - var charCode = lastChar.charCodeAt(0); - charCode = charCode + direction; - var withoutLastChar = str.slice(0, -1); - return withoutLastChar + String.fromCharCode(charCode); -} -//# sourceMappingURL=custom-index.js.map \ No newline at end of file diff --git a/dist/esm/custom-index.js.map b/dist/esm/custom-index.js.map deleted file mode 100644 index 9438e7a77d0..00000000000 --- a/dist/esm/custom-index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"custom-index.js","names":["getSchemaByObjectPath","ensureNotFalsy","objectPathMonad","INDEX_MAX","INDEX_MIN","getIndexMeta","schema","index","fieldNameProperties","map","fieldName","schemaPart","Error","type","parsedLengths","getStringLengthOfIndexNumber","getValue","maxLength","getIndexStringPart","docData","fieldValue","padEnd","getNumberIndexString","ret","getIndexableStringMonad","fieldNamePropertiesAmount","length","indexPartsFunctions","r","str","i","minimum","Math","floor","maximum","ceil","multipleOf","valueSpan","nonDecimals","toString","multipleOfParts","split","decimals","roundedMinimum","getIndexStringLength","forEach","props","getPrimaryKeyFromIndexableString","indexableString","primaryKeyLength","paddedPrimaryKey","slice","primaryKey","trim","nonDecimalsValueAsString","padStart","splitByDecimalPoint","decimalValueAsString","getStartIndexStringFromLowerBound","lowerBound","idx","bound","boolToStr","fillChar","repeat","add","getStartIndexStringFromUpperBound","upperBound","changeIndexableStringByOneQuantum","direction","lastChar","charCode","charCodeAt","withoutLastChar","String","fromCharCode"],"sources":["../../src/custom-index.ts"],"sourcesContent":["/**\n * For some RxStorage implementations,\n * we need to use our custom crafted indexes\n * so we can easily iterate over them. And sort plain arrays of document data.\n *\n * We really often have to craft an index string for a given document.\n * Performance of everything in this file is very important\n * which is why the code sometimes looks strange.\n * Run performance tests before and after you touch anything here!\n */\n\nimport {\n getSchemaByObjectPath\n} from './rx-schema-helper.ts';\nimport type {\n JsonSchema,\n RxDocumentData,\n RxJsonSchema\n} from './types/index.ts';\nimport {\n ensureNotFalsy,\n objectPathMonad,\n ObjectPathMonadFunction\n} from './plugins/utils/index.ts';\nimport {\n INDEX_MAX,\n INDEX_MIN\n} from './query-planner.ts';\n\n\n/**\n * Prepare all relevant information\n * outside of the returned function\n * from getIndexableStringMonad()\n * to save performance when the returned\n * function is called many times.\n */\ntype IndexMetaField = {\n fieldName: string;\n schemaPart: JsonSchema;\n /*\n * Only in number fields.\n */\n parsedLengths?: ParsedLengths;\n getValue: ObjectPathMonadFunction;\n getIndexStringPart: (docData: RxDocumentData) => string;\n};\n\nexport function getIndexMeta(\n schema: RxJsonSchema>,\n index: string[]\n): IndexMetaField[] {\n const fieldNameProperties: IndexMetaField[] = index.map(fieldName => {\n const schemaPart = getSchemaByObjectPath(\n schema,\n fieldName\n );\n if (!schemaPart) {\n throw new Error('not in schema: ' + fieldName);\n }\n const type = schemaPart.type;\n let parsedLengths: ParsedLengths | undefined;\n if (type === 'number' || type === 'integer') {\n parsedLengths = getStringLengthOfIndexNumber(\n schemaPart\n );\n }\n\n const getValue = objectPathMonad(fieldName);\n const maxLength = schemaPart.maxLength ? schemaPart.maxLength : 0;\n\n let getIndexStringPart: (docData: RxDocumentData) => string;\n if (type === 'string') {\n getIndexStringPart = docData => {\n let fieldValue = getValue(docData);\n if (!fieldValue) {\n fieldValue = '';\n }\n return fieldValue.padEnd(maxLength, ' ');\n };\n } else if (type === 'boolean') {\n getIndexStringPart = docData => {\n const fieldValue = getValue(docData);\n return fieldValue ? '1' : '0';\n };\n } else { // number\n getIndexStringPart = docData => {\n const fieldValue = getValue(docData);\n return getNumberIndexString(\n parsedLengths as any,\n fieldValue\n );\n };\n }\n\n const ret: IndexMetaField = {\n fieldName,\n schemaPart,\n parsedLengths,\n getValue,\n getIndexStringPart\n };\n return ret;\n });\n return fieldNameProperties;\n}\n\n\n/**\n * Crafts an indexable string that can be used\n * to check if a document would be sorted below or above\n * another documents, dependent on the index values.\n * @monad for better performance\n *\n * IMPORTANT: Performance is really important here\n * which is why we code so 'strange'.\n * Always run performance tests when you want to\n * change something in this method.\n */\nexport function getIndexableStringMonad(\n schema: RxJsonSchema>,\n index: string[]\n): (docData: RxDocumentData) => string {\n const fieldNameProperties = getIndexMeta(schema, index);\n const fieldNamePropertiesAmount = fieldNameProperties.length;\n const indexPartsFunctions = fieldNameProperties.map(r => r.getIndexStringPart);\n\n\n /**\n * @hotPath Performance of this function is very critical!\n */\n const ret = function (docData: RxDocumentData): string {\n let str = '';\n for (let i = 0; i < fieldNamePropertiesAmount; ++i) {\n str += indexPartsFunctions[i](docData);\n }\n return str;\n };\n return ret;\n}\n\n\ndeclare type ParsedLengths = {\n minimum: number;\n maximum: number;\n nonDecimals: number;\n decimals: number;\n roundedMinimum: number;\n};\nexport function getStringLengthOfIndexNumber(\n schemaPart: JsonSchema\n): ParsedLengths {\n const minimum = Math.floor(schemaPart.minimum as number);\n const maximum = Math.ceil(schemaPart.maximum as number);\n const multipleOf: number = schemaPart.multipleOf as number;\n\n const valueSpan = maximum - minimum;\n const nonDecimals = valueSpan.toString().length;\n\n const multipleOfParts = multipleOf.toString().split('.');\n let decimals = 0;\n if (multipleOfParts.length > 1) {\n decimals = multipleOfParts[1].length;\n }\n return {\n minimum,\n maximum,\n nonDecimals,\n decimals,\n roundedMinimum: minimum\n };\n}\n\nexport function getIndexStringLength(\n schema: RxJsonSchema>,\n index: string[]\n): number {\n const fieldNameProperties = getIndexMeta(schema, index);\n let length = 0;\n fieldNameProperties.forEach(props => {\n const schemaPart = props.schemaPart;\n const type = schemaPart.type;\n\n if (type === 'string') {\n length += schemaPart.maxLength as number;\n } else if (type === 'boolean') {\n length += 1;\n } else {\n const parsedLengths = props.parsedLengths as ParsedLengths;\n length = length + parsedLengths.nonDecimals + parsedLengths.decimals;\n }\n\n });\n return length;\n}\n\n\nexport function getPrimaryKeyFromIndexableString(\n indexableString: string,\n primaryKeyLength: number\n): string {\n const paddedPrimaryKey = indexableString.slice(primaryKeyLength * -1);\n // we can safely trim here because the primary key is not allowed to start or end with a space char.\n const primaryKey = paddedPrimaryKey.trim();\n return primaryKey;\n}\n\n\nexport function getNumberIndexString(\n parsedLengths: ParsedLengths,\n fieldValue: number\n): string {\n /**\n * Ensure that the given value is in the boundaries\n * of the schema, otherwise it would create a broken index string.\n * This can happen for example if you have a minimum of 0\n * and run a query like\n * selector {\n * numField: { $gt: -1000 }\n * }\n */\n if (typeof fieldValue === 'undefined') {\n fieldValue = 0;\n }\n if (fieldValue < parsedLengths.minimum) {\n fieldValue = parsedLengths.minimum;\n }\n if (fieldValue > parsedLengths.maximum) {\n fieldValue = parsedLengths.maximum;\n }\n\n const nonDecimalsValueAsString = (Math.floor(fieldValue) - parsedLengths.roundedMinimum).toString();\n let str = nonDecimalsValueAsString.padStart(parsedLengths.nonDecimals, '0');\n\n if (parsedLengths.decimals > 0) {\n const splitByDecimalPoint = fieldValue.toString().split('.');\n const decimalValueAsString = splitByDecimalPoint.length > 1 ? splitByDecimalPoint[1] : '0';\n str += decimalValueAsString.padEnd(parsedLengths.decimals, '0');\n }\n return str;\n}\n\nexport function getStartIndexStringFromLowerBound(\n schema: RxJsonSchema,\n index: string[],\n lowerBound: (string | boolean | number | null | undefined)[]\n): string {\n let str = '';\n index.forEach((fieldName, idx) => {\n const schemaPart = getSchemaByObjectPath(\n schema,\n fieldName\n );\n const bound = lowerBound[idx];\n const type = schemaPart.type;\n\n switch (type) {\n case 'string':\n const maxLength = ensureNotFalsy(schemaPart.maxLength, 'maxLength not set');\n if (typeof bound === 'string') {\n str += (bound as string).padEnd(maxLength, ' ');\n } else {\n // str += ''.padStart(maxLength, inclusiveStart ? ' ' : INDEX_MAX);\n str += ''.padEnd(maxLength, ' ');\n }\n break;\n case 'boolean':\n if (bound === null) {\n str += '0';\n } else if (bound === INDEX_MIN) {\n str += '0';\n } else if (bound === INDEX_MAX) {\n str += '1';\n } else {\n const boolToStr = bound ? '1' : '0';\n str += boolToStr;\n }\n break;\n case 'number':\n case 'integer':\n const parsedLengths = getStringLengthOfIndexNumber(\n schemaPart\n );\n if (bound === null || bound === INDEX_MIN) {\n const fillChar = '0';\n str += fillChar.repeat(parsedLengths.nonDecimals + parsedLengths.decimals);\n } else if (bound === INDEX_MAX) {\n str += getNumberIndexString(\n parsedLengths,\n parsedLengths.maximum\n );\n } else {\n const add = getNumberIndexString(\n parsedLengths,\n bound as number\n );\n str += add;\n }\n break;\n default:\n throw new Error('unknown index type ' + type);\n }\n });\n return str;\n}\n\n\nexport function getStartIndexStringFromUpperBound(\n schema: RxJsonSchema,\n index: string[],\n upperBound: (string | boolean | number | null | undefined)[]\n): string {\n let str = '';\n index.forEach((fieldName, idx) => {\n const schemaPart = getSchemaByObjectPath(\n schema,\n fieldName\n );\n const bound = upperBound[idx];\n const type = schemaPart.type;\n\n switch (type) {\n case 'string':\n const maxLength = ensureNotFalsy(schemaPart.maxLength, 'maxLength not set');\n if (typeof bound === 'string' && bound !== INDEX_MAX) {\n str += (bound as string).padEnd(maxLength, ' ');\n } else if (bound === INDEX_MIN) {\n str += ''.padEnd(maxLength, ' ');\n } else {\n str += ''.padEnd(maxLength, INDEX_MAX);\n }\n break;\n case 'boolean':\n if (bound === null) {\n str += '1';\n } else {\n const boolToStr = bound ? '1' : '0';\n str += boolToStr;\n }\n break;\n case 'number':\n case 'integer':\n const parsedLengths = getStringLengthOfIndexNumber(\n schemaPart\n );\n if (bound === null || bound === INDEX_MAX) {\n const fillChar = '9';\n str += fillChar.repeat(parsedLengths.nonDecimals + parsedLengths.decimals);\n } else if (bound === INDEX_MIN) {\n const fillChar = '0';\n str += fillChar.repeat(parsedLengths.nonDecimals + parsedLengths.decimals);\n } else {\n str += getNumberIndexString(\n parsedLengths,\n bound as number\n );\n }\n break;\n default:\n throw new Error('unknown index type ' + type);\n }\n });\n return str;\n}\n\n/**\n * Used in storages where it is not possible\n * to define inclusiveEnd/inclusiveStart\n */\nexport function changeIndexableStringByOneQuantum(str: string, direction: 1 | -1): string {\n const lastChar = str.slice(-1);\n let charCode = lastChar.charCodeAt(0);\n charCode = charCode + direction;\n const withoutLastChar = str.slice(0, -1);\n return withoutLastChar + String.fromCharCode(charCode);\n}\n"],"mappings":"AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA,SACIA,qBAAqB,QAClB,uBAAuB;AAM9B,SACIC,cAAc,EACdC,eAAe,QAEZ,0BAA0B;AACjC,SACIC,SAAS,EACTC,SAAS,QACN,oBAAoB;;AAG3B;AACA;AACA;AACA;AACA;AACA;AACA;;AAYA,OAAO,SAASC,YAAYA,CACxBC,MAA+C,EAC/CC,KAAe,EACY;EAC3B,IAAMC,mBAAgD,GAAGD,KAAK,CAACE,GAAG,CAACC,SAAS,IAAI;IAC5E,IAAMC,UAAU,GAAGX,qBAAqB,CACpCM,MAAM,EACNI,SACJ,CAAC;IACD,IAAI,CAACC,UAAU,EAAE;MACb,MAAM,IAAIC,KAAK,CAAC,iBAAiB,GAAGF,SAAS,CAAC;IAClD;IACA,IAAMG,IAAI,GAAGF,UAAU,CAACE,IAAI;IAC5B,IAAIC,aAAwC;IAC5C,IAAID,IAAI,KAAK,QAAQ,IAAIA,IAAI,KAAK,SAAS,EAAE;MACzCC,aAAa,GAAGC,4BAA4B,CACxCJ,UACJ,CAAC;IACL;IAEA,IAAMK,QAAQ,GAAGd,eAAe,CAACQ,SAAS,CAAC;IAC3C,IAAMO,SAAS,GAAGN,UAAU,CAACM,SAAS,GAAGN,UAAU,CAACM,SAAS,GAAG,CAAC;IAEjE,IAAIC,kBAAkE;IACtE,IAAIL,IAAI,KAAK,QAAQ,EAAE;MACnBK,kBAAkB,GAAGC,OAAO,IAAI;QAC5B,IAAIC,UAAU,GAAGJ,QAAQ,CAACG,OAAO,CAAC;QAClC,IAAI,CAACC,UAAU,EAAE;UACbA,UAAU,GAAG,EAAE;QACnB;QACA,OAAOA,UAAU,CAACC,MAAM,CAACJ,SAAS,EAAE,GAAG,CAAC;MAC5C,CAAC;IACL,CAAC,MAAM,IAAIJ,IAAI,KAAK,SAAS,EAAE;MAC3BK,kBAAkB,GAAGC,OAAO,IAAI;QAC5B,IAAMC,UAAU,GAAGJ,QAAQ,CAACG,OAAO,CAAC;QACpC,OAAOC,UAAU,GAAG,GAAG,GAAG,GAAG;MACjC,CAAC;IACL,CAAC,MAAM;MAAE;MACLF,kBAAkB,GAAGC,OAAO,IAAI;QAC5B,IAAMC,UAAU,GAAGJ,QAAQ,CAACG,OAAO,CAAC;QACpC,OAAOG,oBAAoB,CACvBR,aAAa,EACbM,UACJ,CAAC;MACL,CAAC;IACL;IAEA,IAAMG,GAA8B,GAAG;MACnCb,SAAS;MACTC,UAAU;MACVG,aAAa;MACbE,QAAQ;MACRE;IACJ,CAAC;IACD,OAAOK,GAAG;EACd,CAAC,CAAC;EACF,OAAOf,mBAAmB;AAC9B;;AAGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,SAASgB,uBAAuBA,CACnClB,MAA+C,EAC/CC,KAAe,EAC+B;EAC9C,IAAMC,mBAAmB,GAAGH,YAAY,CAACC,MAAM,EAAEC,KAAK,CAAC;EACvD,IAAMkB,yBAAyB,GAAGjB,mBAAmB,CAACkB,MAAM;EAC5D,IAAMC,mBAAmB,GAAGnB,mBAAmB,CAACC,GAAG,CAACmB,CAAC,IAAIA,CAAC,CAACV,kBAAkB,CAAC;;EAG9E;AACJ;AACA;EACI,IAAMK,GAAG,GAAG,SAAAA,CAAUJ,OAAkC,EAAU;IAC9D,IAAIU,GAAG,GAAG,EAAE;IACZ,KAAK,IAAIC,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGL,yBAAyB,EAAE,EAAEK,CAAC,EAAE;MAChDD,GAAG,IAAIF,mBAAmB,CAACG,CAAC,CAAC,CAACX,OAAO,CAAC;IAC1C;IACA,OAAOU,GAAG;EACd,CAAC;EACD,OAAON,GAAG;AACd;AAUA,OAAO,SAASR,4BAA4BA,CACxCJ,UAAsB,EACT;EACb,IAAMoB,OAAO,GAAGC,IAAI,CAACC,KAAK,CAACtB,UAAU,CAACoB,OAAiB,CAAC;EACxD,IAAMG,OAAO,GAAGF,IAAI,CAACG,IAAI,CAACxB,UAAU,CAACuB,OAAiB,CAAC;EACvD,IAAME,UAAkB,GAAGzB,UAAU,CAACyB,UAAoB;EAE1D,IAAMC,SAAS,GAAGH,OAAO,GAAGH,OAAO;EACnC,IAAMO,WAAW,GAAGD,SAAS,CAACE,QAAQ,CAAC,CAAC,CAACb,MAAM;EAE/C,IAAMc,eAAe,GAAGJ,UAAU,CAACG,QAAQ,CAAC,CAAC,CAACE,KAAK,CAAC,GAAG,CAAC;EACxD,IAAIC,QAAQ,GAAG,CAAC;EAChB,IAAIF,eAAe,CAACd,MAAM,GAAG,CAAC,EAAE;IAC5BgB,QAAQ,GAAGF,eAAe,CAAC,CAAC,CAAC,CAACd,MAAM;EACxC;EACA,OAAO;IACHK,OAAO;IACPG,OAAO;IACPI,WAAW;IACXI,QAAQ;IACRC,cAAc,EAAEZ;EACpB,CAAC;AACL;AAEA,OAAO,SAASa,oBAAoBA,CAChCtC,MAA+C,EAC/CC,KAAe,EACT;EACN,IAAMC,mBAAmB,GAAGH,YAAY,CAACC,MAAM,EAAEC,KAAK,CAAC;EACvD,IAAImB,MAAM,GAAG,CAAC;EACdlB,mBAAmB,CAACqC,OAAO,CAACC,KAAK,IAAI;IACjC,IAAMnC,UAAU,GAAGmC,KAAK,CAACnC,UAAU;IACnC,IAAME,IAAI,GAAGF,UAAU,CAACE,IAAI;IAE5B,IAAIA,IAAI,KAAK,QAAQ,EAAE;MACnBa,MAAM,IAAIf,UAAU,CAACM,SAAmB;IAC5C,CAAC,MAAM,IAAIJ,IAAI,KAAK,SAAS,EAAE;MAC3Ba,MAAM,IAAI,CAAC;IACf,CAAC,MAAM;MACH,IAAMZ,aAAa,GAAGgC,KAAK,CAAChC,aAA8B;MAC1DY,MAAM,GAAGA,MAAM,GAAGZ,aAAa,CAACwB,WAAW,GAAGxB,aAAa,CAAC4B,QAAQ;IACxE;EAEJ,CAAC,CAAC;EACF,OAAOhB,MAAM;AACjB;AAGA,OAAO,SAASqB,gCAAgCA,CAC5CC,eAAuB,EACvBC,gBAAwB,EAClB;EACN,IAAMC,gBAAgB,GAAGF,eAAe,CAACG,KAAK,CAACF,gBAAgB,GAAG,CAAC,CAAC,CAAC;EACrE;EACA,IAAMG,UAAU,GAAGF,gBAAgB,CAACG,IAAI,CAAC,CAAC;EAC1C,OAAOD,UAAU;AACrB;AAGA,OAAO,SAAS9B,oBAAoBA,CAChCR,aAA4B,EAC5BM,UAAkB,EACZ;EACN;AACJ;AACA;AACA;AACA;AACA;AACA;AACA;AACA;EACI,IAAI,OAAOA,UAAU,KAAK,WAAW,EAAE;IACnCA,UAAU,GAAG,CAAC;EAClB;EACA,IAAIA,UAAU,GAAGN,aAAa,CAACiB,OAAO,EAAE;IACpCX,UAAU,GAAGN,aAAa,CAACiB,OAAO;EACtC;EACA,IAAIX,UAAU,GAAGN,aAAa,CAACoB,OAAO,EAAE;IACpCd,UAAU,GAAGN,aAAa,CAACoB,OAAO;EACtC;EAEA,IAAMoB,wBAAwB,GAAG,CAACtB,IAAI,CAACC,KAAK,CAACb,UAAU,CAAC,GAAGN,aAAa,CAAC6B,cAAc,EAAEJ,QAAQ,CAAC,CAAC;EACnG,IAAIV,GAAG,GAAGyB,wBAAwB,CAACC,QAAQ,CAACzC,aAAa,CAACwB,WAAW,EAAE,GAAG,CAAC;EAE3E,IAAIxB,aAAa,CAAC4B,QAAQ,GAAG,CAAC,EAAE;IAC5B,IAAMc,mBAAmB,GAAGpC,UAAU,CAACmB,QAAQ,CAAC,CAAC,CAACE,KAAK,CAAC,GAAG,CAAC;IAC5D,IAAMgB,oBAAoB,GAAGD,mBAAmB,CAAC9B,MAAM,GAAG,CAAC,GAAG8B,mBAAmB,CAAC,CAAC,CAAC,GAAG,GAAG;IAC1F3B,GAAG,IAAI4B,oBAAoB,CAACpC,MAAM,CAACP,aAAa,CAAC4B,QAAQ,EAAE,GAAG,CAAC;EACnE;EACA,OAAOb,GAAG;AACd;AAEA,OAAO,SAAS6B,iCAAiCA,CAC7CpD,MAAyB,EACzBC,KAAe,EACfoD,UAA4D,EACtD;EACN,IAAI9B,GAAG,GAAG,EAAE;EACZtB,KAAK,CAACsC,OAAO,CAAC,CAACnC,SAAS,EAAEkD,GAAG,KAAK;IAC9B,IAAMjD,UAAU,GAAGX,qBAAqB,CACpCM,MAAM,EACNI,SACJ,CAAC;IACD,IAAMmD,KAAK,GAAGF,UAAU,CAACC,GAAG,CAAC;IAC7B,IAAM/C,IAAI,GAAGF,UAAU,CAACE,IAAI;IAE5B,QAAQA,IAAI;MACR,KAAK,QAAQ;QACT,IAAMI,SAAS,GAAGhB,cAAc,CAACU,UAAU,CAACM,SAAS,EAAE,mBAAmB,CAAC;QAC3E,IAAI,OAAO4C,KAAK,KAAK,QAAQ,EAAE;UAC3BhC,GAAG,IAAKgC,KAAK,CAAYxC,MAAM,CAACJ,SAAS,EAAE,GAAG,CAAC;QACnD,CAAC,MAAM;UACH;UACAY,GAAG,IAAI,EAAE,CAACR,MAAM,CAACJ,SAAS,EAAE,GAAG,CAAC;QACpC;QACA;MACJ,KAAK,SAAS;QACV,IAAI4C,KAAK,KAAK,IAAI,EAAE;UAChBhC,GAAG,IAAI,GAAG;QACd,CAAC,MAAM,IAAIgC,KAAK,KAAKzD,SAAS,EAAE;UAC5ByB,GAAG,IAAI,GAAG;QACd,CAAC,MAAM,IAAIgC,KAAK,KAAK1D,SAAS,EAAE;UAC5B0B,GAAG,IAAI,GAAG;QACd,CAAC,MAAM;UACH,IAAMiC,SAAS,GAAGD,KAAK,GAAG,GAAG,GAAG,GAAG;UACnChC,GAAG,IAAIiC,SAAS;QACpB;QACA;MACJ,KAAK,QAAQ;MACb,KAAK,SAAS;QACV,IAAMhD,aAAa,GAAGC,4BAA4B,CAC9CJ,UACJ,CAAC;QACD,IAAIkD,KAAK,KAAK,IAAI,IAAIA,KAAK,KAAKzD,SAAS,EAAE;UACvC,IAAM2D,QAAQ,GAAG,GAAG;UACpBlC,GAAG,IAAIkC,QAAQ,CAACC,MAAM,CAAClD,aAAa,CAACwB,WAAW,GAAGxB,aAAa,CAAC4B,QAAQ,CAAC;QAC9E,CAAC,MAAM,IAAImB,KAAK,KAAK1D,SAAS,EAAE;UAC5B0B,GAAG,IAAIP,oBAAoB,CACvBR,aAAa,EACbA,aAAa,CAACoB,OAClB,CAAC;QACL,CAAC,MAAM;UACH,IAAM+B,GAAG,GAAG3C,oBAAoB,CAC5BR,aAAa,EACb+C,KACJ,CAAC;UACDhC,GAAG,IAAIoC,GAAG;QACd;QACA;MACJ;QACI,MAAM,IAAIrD,KAAK,CAAC,qBAAqB,GAAGC,IAAI,CAAC;IACrD;EACJ,CAAC,CAAC;EACF,OAAOgB,GAAG;AACd;AAGA,OAAO,SAASqC,iCAAiCA,CAC7C5D,MAAyB,EACzBC,KAAe,EACf4D,UAA4D,EACtD;EACN,IAAItC,GAAG,GAAG,EAAE;EACZtB,KAAK,CAACsC,OAAO,CAAC,CAACnC,SAAS,EAAEkD,GAAG,KAAK;IAC9B,IAAMjD,UAAU,GAAGX,qBAAqB,CACpCM,MAAM,EACNI,SACJ,CAAC;IACD,IAAMmD,KAAK,GAAGM,UAAU,CAACP,GAAG,CAAC;IAC7B,IAAM/C,IAAI,GAAGF,UAAU,CAACE,IAAI;IAE5B,QAAQA,IAAI;MACR,KAAK,QAAQ;QACT,IAAMI,SAAS,GAAGhB,cAAc,CAACU,UAAU,CAACM,SAAS,EAAE,mBAAmB,CAAC;QAC3E,IAAI,OAAO4C,KAAK,KAAK,QAAQ,IAAIA,KAAK,KAAK1D,SAAS,EAAE;UAClD0B,GAAG,IAAKgC,KAAK,CAAYxC,MAAM,CAACJ,SAAS,EAAE,GAAG,CAAC;QACnD,CAAC,MAAM,IAAI4C,KAAK,KAAKzD,SAAS,EAAE;UAC5ByB,GAAG,IAAI,EAAE,CAACR,MAAM,CAACJ,SAAS,EAAE,GAAG,CAAC;QACpC,CAAC,MAAM;UACHY,GAAG,IAAI,EAAE,CAACR,MAAM,CAACJ,SAAS,EAAEd,SAAS,CAAC;QAC1C;QACA;MACJ,KAAK,SAAS;QACV,IAAI0D,KAAK,KAAK,IAAI,EAAE;UAChBhC,GAAG,IAAI,GAAG;QACd,CAAC,MAAM;UACH,IAAMiC,SAAS,GAAGD,KAAK,GAAG,GAAG,GAAG,GAAG;UACnChC,GAAG,IAAIiC,SAAS;QACpB;QACA;MACJ,KAAK,QAAQ;MACb,KAAK,SAAS;QACV,IAAMhD,aAAa,GAAGC,4BAA4B,CAC9CJ,UACJ,CAAC;QACD,IAAIkD,KAAK,KAAK,IAAI,IAAIA,KAAK,KAAK1D,SAAS,EAAE;UACvC,IAAM4D,QAAQ,GAAG,GAAG;UACpBlC,GAAG,IAAIkC,QAAQ,CAACC,MAAM,CAAClD,aAAa,CAACwB,WAAW,GAAGxB,aAAa,CAAC4B,QAAQ,CAAC;QAC9E,CAAC,MAAM,IAAImB,KAAK,KAAKzD,SAAS,EAAE;UAC5B,IAAM2D,SAAQ,GAAG,GAAG;UACpBlC,GAAG,IAAIkC,SAAQ,CAACC,MAAM,CAAClD,aAAa,CAACwB,WAAW,GAAGxB,aAAa,CAAC4B,QAAQ,CAAC;QAC9E,CAAC,MAAM;UACHb,GAAG,IAAIP,oBAAoB,CACvBR,aAAa,EACb+C,KACJ,CAAC;QACL;QACA;MACJ;QACI,MAAM,IAAIjD,KAAK,CAAC,qBAAqB,GAAGC,IAAI,CAAC;IACrD;EACJ,CAAC,CAAC;EACF,OAAOgB,GAAG;AACd;;AAEA;AACA;AACA;AACA;AACA,OAAO,SAASuC,iCAAiCA,CAACvC,GAAW,EAAEwC,SAAiB,EAAU;EACtF,IAAMC,QAAQ,GAAGzC,GAAG,CAACsB,KAAK,CAAC,CAAC,CAAC,CAAC;EAC9B,IAAIoB,QAAQ,GAAGD,QAAQ,CAACE,UAAU,CAAC,CAAC,CAAC;EACrCD,QAAQ,GAAGA,QAAQ,GAAGF,SAAS;EAC/B,IAAMI,eAAe,GAAG5C,GAAG,CAACsB,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;EACxC,OAAOsB,eAAe,GAAGC,MAAM,CAACC,YAAY,CAACJ,QAAQ,CAAC;AAC1D","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/doc-cache.js b/dist/esm/doc-cache.js deleted file mode 100644 index 476af58ecbb..00000000000 --- a/dist/esm/doc-cache.js +++ /dev/null @@ -1,187 +0,0 @@ -import _createClass from "@babel/runtime/helpers/createClass"; -import { getFromMapOrThrow, getHeightOfRevision, overwriteGetterForCaching, requestIdlePromiseNoQueue } from "./plugins/utils/index.js"; -import { overwritable } from "./overwritable.js"; -import { getDocumentDataOfRxChangeEvent } from "./rx-change-event.js"; - -/** - * Because we have to create many cache items, - * we use an array instead of an object with properties - * for better performance and less memory usage. - * @link https://stackoverflow.com/questions/17295056/array-vs-object-efficiency-in-javascript - */ - -/** - * @link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/FinalizationRegistry - */ - -/** - * The DocumentCache stores RxDocument objects - * by their primary key and revision. - * This is useful on client side applications where - * it is not known how much memory can be used, so - * we de-duplicate RxDocument states to save memory. - * To not fill up the memory with old document states, the DocumentCache - * only contains weak references to the RxDocuments themself. - * @link https://caniuse.com/?search=weakref - */ -export var DocumentCache = /*#__PURE__*/function () { - /** - * Some JavaScript runtimes like QuickJS, - * so not have a FinalizationRegistry or WeakRef. - * Therefore we need a workaround which might waste a lot of memory, - * but at least works. - */ - - function DocumentCache(primaryPath, changes$, - /** - * A method that can create a RxDocument by the given document data. - */ - documentCreator) { - this.cacheItemByDocId = new Map(); - this.registry = typeof FinalizationRegistry === 'function' ? new FinalizationRegistry(docMeta => { - var docId = docMeta.docId; - var cacheItem = this.cacheItemByDocId.get(docId); - if (cacheItem) { - cacheItem[0].delete(docMeta.revisionHeight); - if (cacheItem[0].size === 0) { - /** - * No state of the document is cached anymore, - * so we can clean up. - */ - this.cacheItemByDocId.delete(docId); - } - } - }) : undefined; - this.primaryPath = primaryPath; - this.changes$ = changes$; - this.documentCreator = documentCreator; - changes$.subscribe(changeEvent => { - var docId = changeEvent.documentId; - var cacheItem = this.cacheItemByDocId.get(docId); - if (cacheItem) { - var documentData = getDocumentDataOfRxChangeEvent(changeEvent); - cacheItem[1] = documentData; - } - }); - } - - /** - * Get the RxDocument from the cache - * and create a new one if not exits before. - * @overwrites itself with the actual function - * because this is @performance relevant. - * It is called on each document row for each write and read. - */ - var _proto = DocumentCache.prototype; - /** - * Throws if not exists - */ - _proto.getLatestDocumentData = function getLatestDocumentData(docId) { - var cacheItem = getFromMapOrThrow(this.cacheItemByDocId, docId); - return cacheItem[1]; - }; - _proto.getLatestDocumentDataIfExists = function getLatestDocumentDataIfExists(docId) { - var cacheItem = this.cacheItemByDocId.get(docId); - if (cacheItem) { - return cacheItem[1]; - } - }; - return _createClass(DocumentCache, [{ - key: "getCachedRxDocuments", - get: function () { - var fn = getCachedRxDocumentMonad(this); - return overwriteGetterForCaching(this, 'getCachedRxDocuments', fn); - } - }, { - key: "getCachedRxDocument", - get: function () { - var fn = getCachedRxDocumentMonad(this); - return overwriteGetterForCaching(this, 'getCachedRxDocument', doc => fn([doc])[0]); - } - }]); -}(); - -/** - * This function is called very very often. - * This is likely the most important function for RxDB overall performance - * @hotPath This is one of the most important methods for performance. - * It is used in many places to transform the raw document data into RxDocuments. - */ -function getCachedRxDocumentMonad(docCache) { - var primaryPath = docCache.primaryPath; - var cacheItemByDocId = docCache.cacheItemByDocId; - var registry = docCache.registry; - var deepFreezeWhenDevMode = overwritable.deepFreezeWhenDevMode; - var documentCreator = docCache.documentCreator; - var fn = docsData => { - var ret = new Array(docsData.length); - var registryTasks = []; - for (var index = 0; index < docsData.length; index++) { - var docData = docsData[index]; - var docId = docData[primaryPath]; - var revisionHeight = getHeightOfRevision(docData._rev); - var byRev = void 0; - var cachedRxDocumentWeakRef = void 0; - var cacheItem = cacheItemByDocId.get(docId); - if (!cacheItem) { - byRev = new Map(); - cacheItem = [byRev, docData]; - cacheItemByDocId.set(docId, cacheItem); - } else { - byRev = cacheItem[0]; - cachedRxDocumentWeakRef = byRev.get(revisionHeight); - } - var cachedRxDocument = cachedRxDocumentWeakRef ? cachedRxDocumentWeakRef.deref() : undefined; - if (!cachedRxDocument) { - docData = deepFreezeWhenDevMode(docData); - cachedRxDocument = documentCreator(docData); - byRev.set(revisionHeight, createWeakRefWithFallback(cachedRxDocument)); - if (registry) { - registryTasks.push(cachedRxDocument); - } - } - ret[index] = cachedRxDocument; - } - if (registryTasks.length > 0 && registry) { - /** - * Calling registry.register() has shown to have - * really bad performance. So we add the cached documents - * lazily. - */ - requestIdlePromiseNoQueue().then(() => { - for (var _index = 0; _index < registryTasks.length; _index++) { - var doc = registryTasks[_index]; - registry.register(doc, { - docId: doc.primary, - revisionHeight: getHeightOfRevision(doc.revision) - }); - } - }); - } - return ret; - }; - return fn; -} -export function mapDocumentsDataToCacheDocs(docCache, docsData) { - var getCachedRxDocuments = docCache.getCachedRxDocuments; - return getCachedRxDocuments(docsData); -} - -/** - * Fallback for JavaScript runtimes that do not support WeakRef. - * The fallback will keep the items in cache forever, - * but at least works. - */ -var HAS_WEAK_REF = typeof WeakRef === 'function'; -var createWeakRefWithFallback = HAS_WEAK_REF ? createWeakRef : createWeakRefFallback; -function createWeakRef(obj) { - return new WeakRef(obj); -} -function createWeakRefFallback(obj) { - return { - deref() { - return obj; - } - }; -} -//# sourceMappingURL=doc-cache.js.map \ No newline at end of file diff --git a/dist/esm/doc-cache.js.map b/dist/esm/doc-cache.js.map deleted file mode 100644 index 5a78bd1d02f..00000000000 --- a/dist/esm/doc-cache.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"doc-cache.js","names":["getFromMapOrThrow","getHeightOfRevision","overwriteGetterForCaching","requestIdlePromiseNoQueue","overwritable","getDocumentDataOfRxChangeEvent","DocumentCache","primaryPath","changes$","documentCreator","cacheItemByDocId","Map","registry","FinalizationRegistry","docMeta","docId","cacheItem","get","delete","revisionHeight","size","undefined","subscribe","changeEvent","documentId","documentData","_proto","prototype","getLatestDocumentData","getLatestDocumentDataIfExists","_createClass","key","fn","getCachedRxDocumentMonad","doc","docCache","deepFreezeWhenDevMode","docsData","ret","Array","length","registryTasks","index","docData","_rev","byRev","cachedRxDocumentWeakRef","set","cachedRxDocument","deref","createWeakRefWithFallback","push","then","register","primary","revision","mapDocumentsDataToCacheDocs","getCachedRxDocuments","HAS_WEAK_REF","WeakRef","createWeakRef","createWeakRefFallback","obj"],"sources":["../../src/doc-cache.ts"],"sourcesContent":["import type {\n RxChangeEvent,\n RxDocument,\n RxDocumentData\n} from './types/index.d.ts';\nimport {\n getFromMapOrThrow,\n getHeightOfRevision,\n overwriteGetterForCaching,\n requestIdlePromiseNoQueue\n} from './plugins/utils/index.ts';\nimport {\n overwritable\n} from './overwritable.ts';\nimport { getDocumentDataOfRxChangeEvent } from './rx-change-event.ts';\nimport { Observable } from 'rxjs';\n\n/**\n * Because we have to create many cache items,\n * we use an array instead of an object with properties\n * for better performance and less memory usage.\n * @link https://stackoverflow.com/questions/17295056/array-vs-object-efficiency-in-javascript\n */\ndeclare type CacheItem = [\n /**\n * Store the different document states of time\n * based on their revision height.\n * We store WeakRefs so that we can later clean up\n * document states that are no longer needed.\n */\n Map>>,\n\n /**\n * Store the latest known document state.\n * As long as any state of the document is in the cache,\n * we observe the changestream and update the latestDoc accordingly.\n * This makes it easier to optimize performance on other parts\n * because for each known document we can always get the current state\n * in the storage.\n * Also it makes it possible to call RxDocument.latest() in a non-async way\n * to retrieve the latest document state or to observe$ some property.\n *\n * To not prevent the whole cacheItem from being garbage collected,\n * we store only the document data here, but not the RxDocument.\n */\n RxDocumentData\n];\n\n\n/**\n * @link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/FinalizationRegistry\n */\ndeclare type FinalizationRegistryValue = {\n docId: string;\n revisionHeight: number;\n};\n\n/**\n * The DocumentCache stores RxDocument objects\n * by their primary key and revision.\n * This is useful on client side applications where\n * it is not known how much memory can be used, so\n * we de-duplicate RxDocument states to save memory.\n * To not fill up the memory with old document states, the DocumentCache\n * only contains weak references to the RxDocuments themself.\n * @link https://caniuse.com/?search=weakref\n */\nexport class DocumentCache {\n public cacheItemByDocId = new Map>();\n\n /**\n * Some JavaScript runtimes like QuickJS,\n * so not have a FinalizationRegistry or WeakRef.\n * Therefore we need a workaround which might waste a lot of memory,\n * but at least works.\n */\n public readonly registry?: FinalizationRegistry = typeof FinalizationRegistry === 'function' ?\n new FinalizationRegistry(docMeta => {\n const docId = docMeta.docId;\n const cacheItem = this.cacheItemByDocId.get(docId);\n if (cacheItem) {\n cacheItem[0].delete(docMeta.revisionHeight);\n if (cacheItem[0].size === 0) {\n /**\n * No state of the document is cached anymore,\n * so we can clean up.\n */\n this.cacheItemByDocId.delete(docId);\n }\n }\n }) :\n undefined;\n\n constructor(\n public readonly primaryPath: string,\n public readonly changes$: Observable>,\n /**\n * A method that can create a RxDocument by the given document data.\n */\n public documentCreator: (docData: RxDocumentData) => RxDocument\n ) {\n changes$.subscribe(changeEvent => {\n const docId = changeEvent.documentId;\n const cacheItem = this.cacheItemByDocId.get(docId);\n if (cacheItem) {\n const documentData = getDocumentDataOfRxChangeEvent(changeEvent);\n cacheItem[1] = documentData;\n }\n });\n }\n\n /**\n * Get the RxDocument from the cache\n * and create a new one if not exits before.\n * @overwrites itself with the actual function\n * because this is @performance relevant.\n * It is called on each document row for each write and read.\n */\n get getCachedRxDocuments(): (docsData: RxDocumentData[]) => RxDocument[] {\n const fn = getCachedRxDocumentMonad(this);\n return overwriteGetterForCaching(\n this,\n 'getCachedRxDocuments',\n fn\n );\n }\n\n get getCachedRxDocument(): (docData: RxDocumentData) => RxDocument {\n const fn = getCachedRxDocumentMonad(this);\n return overwriteGetterForCaching(\n this,\n 'getCachedRxDocument',\n doc => fn([doc])[0]\n );\n }\n\n /**\n * Throws if not exists\n */\n public getLatestDocumentData(docId: string): RxDocumentData {\n const cacheItem = getFromMapOrThrow(this.cacheItemByDocId, docId);\n return cacheItem[1];\n }\n\n public getLatestDocumentDataIfExists(docId: string): RxDocumentData | undefined {\n const cacheItem = this.cacheItemByDocId.get(docId);\n if (cacheItem) {\n return cacheItem[1];\n }\n }\n}\n\n/**\n * This function is called very very often.\n * This is likely the most important function for RxDB overall performance\n * @hotPath This is one of the most important methods for performance.\n * It is used in many places to transform the raw document data into RxDocuments.\n */\nfunction getCachedRxDocumentMonad(\n docCache: DocumentCache\n): (docsData: RxDocumentData[]) => RxDocument[] {\n const primaryPath = docCache.primaryPath;\n const cacheItemByDocId = docCache.cacheItemByDocId;\n const registry = docCache.registry;\n const deepFreezeWhenDevMode = overwritable.deepFreezeWhenDevMode;\n const documentCreator = docCache.documentCreator;\n const fn: (docsData: RxDocumentData[]) => RxDocument[] = (docsData: RxDocumentData[]) => {\n const ret: RxDocument[] = new Array(docsData.length);\n const registryTasks: RxDocument[] = [];\n for (let index = 0; index < docsData.length; index++) {\n let docData = docsData[index];\n const docId: string = (docData as any)[primaryPath];\n const revisionHeight = getHeightOfRevision(docData._rev);\n\n let byRev: Map>>;\n let cachedRxDocumentWeakRef: WeakRef> | undefined;\n let cacheItem = cacheItemByDocId.get(docId);\n if (!cacheItem) {\n byRev = new Map();\n cacheItem = [\n byRev,\n docData\n ];\n cacheItemByDocId.set(docId, cacheItem);\n } else {\n byRev = cacheItem[0];\n cachedRxDocumentWeakRef = byRev.get(revisionHeight);\n }\n let cachedRxDocument = cachedRxDocumentWeakRef ? cachedRxDocumentWeakRef.deref() : undefined;\n if (!cachedRxDocument) {\n docData = deepFreezeWhenDevMode(docData) as any;\n cachedRxDocument = documentCreator(docData) as RxDocument;\n byRev.set(revisionHeight, createWeakRefWithFallback(cachedRxDocument));\n if (registry) {\n registryTasks.push(cachedRxDocument);\n }\n }\n ret[index] = cachedRxDocument;\n }\n if (registryTasks.length > 0 && registry) {\n /**\n * Calling registry.register() has shown to have\n * really bad performance. So we add the cached documents\n * lazily.\n */\n requestIdlePromiseNoQueue().then(() => {\n for (let index = 0; index < registryTasks.length; index++) {\n const doc = registryTasks[index];\n registry.register(doc, {\n docId: doc.primary,\n revisionHeight: getHeightOfRevision(doc.revision)\n });\n }\n });\n }\n return ret;\n };\n return fn;\n}\n\nexport function mapDocumentsDataToCacheDocs(\n docCache: DocumentCache,\n docsData: RxDocumentData[]\n) {\n const getCachedRxDocuments = docCache.getCachedRxDocuments;\n return getCachedRxDocuments(docsData);\n}\n\n/**\n * Fallback for JavaScript runtimes that do not support WeakRef.\n * The fallback will keep the items in cache forever,\n * but at least works.\n */\nconst HAS_WEAK_REF = typeof WeakRef === 'function';\nconst createWeakRefWithFallback = HAS_WEAK_REF ? createWeakRef : createWeakRefFallback;\nfunction createWeakRef(obj: T): WeakRef {\n return new WeakRef(obj) as any;\n}\nfunction createWeakRefFallback(obj: T): WeakRef {\n return {\n deref() {\n return obj;\n }\n } as any;\n}\n"],"mappings":";AAKA,SACIA,iBAAiB,EACjBC,mBAAmB,EACnBC,yBAAyB,EACzBC,yBAAyB,QACtB,0BAA0B;AACjC,SACIC,YAAY,QACT,mBAAmB;AAC1B,SAASC,8BAA8B,QAAQ,sBAAsB;;AAGrE;AACA;AACA;AACA;AACA;AACA;;AA2BA;AACA;AACA;;AAMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,WAAaC,aAAa;EAGtB;AACJ;AACA;AACA;AACA;AACA;;EAkBI,SAAAA,cACoBC,WAAmB,EACnBC,QAA8C;EAC9D;AACR;AACA;EACeC,eAA0F,EACnG;IAAA,KAhCKC,gBAAgB,GAAG,IAAIC,GAAG,CAA2C,CAAC;IAAA,KAQ7DC,QAAQ,GAAqD,OAAOC,oBAAoB,KAAK,UAAU,GACnH,IAAIA,oBAAoB,CAA4BC,OAAO,IAAI;MAC3D,IAAMC,KAAK,GAAGD,OAAO,CAACC,KAAK;MAC3B,IAAMC,SAAS,GAAG,IAAI,CAACN,gBAAgB,CAACO,GAAG,CAACF,KAAK,CAAC;MAClD,IAAIC,SAAS,EAAE;QACXA,SAAS,CAAC,CAAC,CAAC,CAACE,MAAM,CAACJ,OAAO,CAACK,cAAc,CAAC;QAC3C,IAAIH,SAAS,CAAC,CAAC,CAAC,CAACI,IAAI,KAAK,CAAC,EAAE;UACzB;AACpB;AACA;AACA;UACoB,IAAI,CAACV,gBAAgB,CAACQ,MAAM,CAACH,KAAK,CAAC;QACvC;MACJ;IACJ,CAAC,CAAC,GACFM,SAAS;IAAA,KAGOd,WAAmB,GAAnBA,WAAmB;IAAA,KACnBC,QAA8C,GAA9CA,QAA8C;IAAA,KAIvDC,eAA0F,GAA1FA,eAA0F;IAEjGD,QAAQ,CAACc,SAAS,CAACC,WAAW,IAAI;MAC9B,IAAMR,KAAK,GAAGQ,WAAW,CAACC,UAAU;MACpC,IAAMR,SAAS,GAAG,IAAI,CAACN,gBAAgB,CAACO,GAAG,CAACF,KAAK,CAAC;MAClD,IAAIC,SAAS,EAAE;QACX,IAAMS,YAAY,GAAGpB,8BAA8B,CAACkB,WAAW,CAAC;QAChEP,SAAS,CAAC,CAAC,CAAC,GAAGS,YAAY;MAC/B;IACJ,CAAC,CAAC;EACN;;EAEA;AACJ;AACA;AACA;AACA;AACA;AACA;EANI,IAAAC,MAAA,GAAApB,aAAA,CAAAqB,SAAA;EAyBA;AACJ;AACA;EAFID,MAAA,CAGOE,qBAAqB,GAA5B,SAAAA,sBAA6Bb,KAAa,EAA6B;IACnE,IAAMC,SAAS,GAAGhB,iBAAiB,CAAC,IAAI,CAACU,gBAAgB,EAAEK,KAAK,CAAC;IACjE,OAAOC,SAAS,CAAC,CAAC,CAAC;EACvB,CAAC;EAAAU,MAAA,CAEMG,6BAA6B,GAApC,SAAAA,8BAAqCd,KAAa,EAAyC;IACvF,IAAMC,SAAS,GAAG,IAAI,CAACN,gBAAgB,CAACO,GAAG,CAACF,KAAK,CAAC;IAClD,IAAIC,SAAS,EAAE;MACX,OAAOA,SAAS,CAAC,CAAC,CAAC;IACvB;EACJ,CAAC;EAAA,OAAAc,YAAA,CAAAxB,aAAA;IAAAyB,GAAA;IAAAd,GAAA,EA/BD,SAAAA,CAAA,EAA2G;MACvG,IAAMe,EAAE,GAAGC,wBAAwB,CAAC,IAAI,CAAC;MACzC,OAAO/B,yBAAyB,CAC5B,IAAI,EACJ,sBAAsB,EACtB8B,EACJ,CAAC;IACL;EAAC;IAAAD,GAAA;IAAAd,GAAA,EAED,SAAAA,CAAA,EAAqG;MACjG,IAAMe,EAAE,GAAGC,wBAAwB,CAAC,IAAI,CAAC;MACzC,OAAO/B,yBAAyB,CAC5B,IAAI,EACJ,qBAAqB,EACrBgC,GAAG,IAAIF,EAAE,CAAC,CAACE,GAAG,CAAC,CAAC,CAAC,CAAC,CACtB,CAAC;IACL;EAAC;AAAA;;AAkBL;AACA;AACA;AACA;AACA;AACA;AACA,SAASD,wBAAwBA,CAC7BE,QAA8C,EACgC;EAC9E,IAAM5B,WAAW,GAAG4B,QAAQ,CAAC5B,WAAW;EACxC,IAAMG,gBAAgB,GAAGyB,QAAQ,CAACzB,gBAAgB;EAClD,IAAME,QAAQ,GAAGuB,QAAQ,CAACvB,QAAQ;EAClC,IAAMwB,qBAAqB,GAAGhC,YAAY,CAACgC,qBAAqB;EAChE,IAAM3B,eAAe,GAAG0B,QAAQ,CAAC1B,eAAe;EAChD,IAAMuB,EAAkF,GAAIK,QAAqC,IAAK;IAClI,IAAMC,GAAwC,GAAG,IAAIC,KAAK,CAACF,QAAQ,CAACG,MAAM,CAAC;IAC3E,IAAMC,aAAkD,GAAG,EAAE;IAC7D,KAAK,IAAIC,KAAK,GAAG,CAAC,EAAEA,KAAK,GAAGL,QAAQ,CAACG,MAAM,EAAEE,KAAK,EAAE,EAAE;MAClD,IAAIC,OAAO,GAAGN,QAAQ,CAACK,KAAK,CAAC;MAC7B,IAAM3B,KAAa,GAAI4B,OAAO,CAASpC,WAAW,CAAC;MACnD,IAAMY,cAAc,GAAGlB,mBAAmB,CAAC0C,OAAO,CAACC,IAAI,CAAC;MAExD,IAAIC,KAA8D;MAClE,IAAIC,uBAA+E;MACnF,IAAI9B,SAAS,GAAGN,gBAAgB,CAACO,GAAG,CAACF,KAAK,CAAC;MAC3C,IAAI,CAACC,SAAS,EAAE;QACZ6B,KAAK,GAAG,IAAIlC,GAAG,CAAC,CAAC;QACjBK,SAAS,GAAG,CACR6B,KAAK,EACLF,OAAO,CACV;QACDjC,gBAAgB,CAACqC,GAAG,CAAChC,KAAK,EAAEC,SAAS,CAAC;MAC1C,CAAC,MAAM;QACH6B,KAAK,GAAG7B,SAAS,CAAC,CAAC,CAAC;QACpB8B,uBAAuB,GAAGD,KAAK,CAAC5B,GAAG,CAACE,cAAc,CAAC;MACvD;MACA,IAAI6B,gBAAgB,GAAGF,uBAAuB,GAAGA,uBAAuB,CAACG,KAAK,CAAC,CAAC,GAAG5B,SAAS;MAC5F,IAAI,CAAC2B,gBAAgB,EAAE;QACnBL,OAAO,GAAGP,qBAAqB,CAACO,OAAO,CAAQ;QAC/CK,gBAAgB,GAAGvC,eAAe,CAACkC,OAAO,CAAsC;QAChFE,KAAK,CAACE,GAAG,CAAC5B,cAAc,EAAE+B,yBAAyB,CAACF,gBAAgB,CAAC,CAAC;QACtE,IAAIpC,QAAQ,EAAE;UACV6B,aAAa,CAACU,IAAI,CAACH,gBAAgB,CAAC;QACxC;MACJ;MACAV,GAAG,CAACI,KAAK,CAAC,GAAGM,gBAAgB;IACjC;IACA,IAAIP,aAAa,CAACD,MAAM,GAAG,CAAC,IAAI5B,QAAQ,EAAE;MACtC;AACZ;AACA;AACA;AACA;MACYT,yBAAyB,CAAC,CAAC,CAACiD,IAAI,CAAC,MAAM;QACnC,KAAK,IAAIV,MAAK,GAAG,CAAC,EAAEA,MAAK,GAAGD,aAAa,CAACD,MAAM,EAAEE,MAAK,EAAE,EAAE;UACvD,IAAMR,GAAG,GAAGO,aAAa,CAACC,MAAK,CAAC;UAChC9B,QAAQ,CAACyC,QAAQ,CAACnB,GAAG,EAAE;YACnBnB,KAAK,EAAEmB,GAAG,CAACoB,OAAO;YAClBnC,cAAc,EAAElB,mBAAmB,CAACiC,GAAG,CAACqB,QAAQ;UACpD,CAAC,CAAC;QACN;MACJ,CAAC,CAAC;IACN;IACA,OAAOjB,GAAG;EACd,CAAC;EACD,OAAON,EAAE;AACb;AAEA,OAAO,SAASwB,2BAA2BA,CACvCrB,QAA8C,EAC9CE,QAAqC,EACvC;EACE,IAAMoB,oBAAoB,GAAGtB,QAAQ,CAACsB,oBAAoB;EAC1D,OAAOA,oBAAoB,CAACpB,QAAQ,CAAC;AACzC;;AAEA;AACA;AACA;AACA;AACA;AACA,IAAMqB,YAAY,GAAG,OAAOC,OAAO,KAAK,UAAU;AAClD,IAAMT,yBAAyB,GAAGQ,YAAY,GAAGE,aAAa,GAAGC,qBAAqB;AACtF,SAASD,aAAaA,CAAmBE,GAAM,EAAc;EACzD,OAAO,IAAIH,OAAO,CAACG,GAAG,CAAC;AAC3B;AACA,SAASD,qBAAqBA,CAAmBC,GAAM,EAAc;EACjE,OAAO;IACHb,KAAKA,CAAA,EAAG;MACJ,OAAOa,GAAG;IACd;EACJ,CAAC;AACL","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/event-reduce.js b/dist/esm/event-reduce.js deleted file mode 100644 index a518564a22b..00000000000 --- a/dist/esm/event-reduce.js +++ /dev/null @@ -1,97 +0,0 @@ -import { calculateActionName, runAction } from 'event-reduce-js'; -import { rxChangeEventToEventReduceChangeEvent } from "./rx-change-event.js"; -import { arrayFilterNotEmpty, clone, ensureNotFalsy, getFromMapOrCreate } from "./plugins/utils/index.js"; -import { getQueryMatcher, getSortComparator, normalizeMangoQuery } from "./rx-query-helper.js"; -export function getSortFieldsOfQuery(primaryKey, query) { - if (!query.sort || query.sort.length === 0) { - return [primaryKey]; - } else { - return query.sort.map(part => Object.keys(part)[0]); - } -} -export var RXQUERY_QUERY_PARAMS_CACHE = new WeakMap(); -export function getQueryParams(rxQuery) { - return getFromMapOrCreate(RXQUERY_QUERY_PARAMS_CACHE, rxQuery, () => { - var collection = rxQuery.collection; - var normalizedMangoQuery = normalizeMangoQuery(collection.storageInstance.schema, clone(rxQuery.mangoQuery)); - var primaryKey = collection.schema.primaryPath; - - /** - * Create a custom sort comparator - * that uses the hooks to ensure - * we send for example compressed documents to be sorted by compressed queries. - */ - var sortComparator = getSortComparator(collection.schema.jsonSchema, normalizedMangoQuery); - var useSortComparator = (docA, docB) => { - var sortComparatorData = { - docA, - docB, - rxQuery - }; - return sortComparator(sortComparatorData.docA, sortComparatorData.docB); - }; - - /** - * Create a custom query matcher - * that uses the hooks to ensure - * we send for example compressed documents to match compressed queries. - */ - var queryMatcher = getQueryMatcher(collection.schema.jsonSchema, normalizedMangoQuery); - var useQueryMatcher = doc => { - var queryMatcherData = { - doc, - rxQuery - }; - return queryMatcher(queryMatcherData.doc); - }; - var ret = { - primaryKey: rxQuery.collection.schema.primaryPath, - skip: normalizedMangoQuery.skip, - limit: normalizedMangoQuery.limit, - sortFields: getSortFieldsOfQuery(primaryKey, normalizedMangoQuery), - sortComparator: useSortComparator, - queryMatcher: useQueryMatcher - }; - return ret; - }); -} -export function calculateNewResults(rxQuery, rxChangeEvents) { - if (!rxQuery.collection.database.eventReduce) { - return { - runFullQueryAgain: true - }; - } - var queryParams = getQueryParams(rxQuery); - var previousResults = ensureNotFalsy(rxQuery._result).docsData.slice(0); - var previousResultsMap = ensureNotFalsy(rxQuery._result).docsDataMap; - var changed = false; - var eventReduceEvents = rxChangeEvents.map(cE => rxChangeEventToEventReduceChangeEvent(cE)).filter(arrayFilterNotEmpty); - var foundNonOptimizeable = eventReduceEvents.find(eventReduceEvent => { - var stateResolveFunctionInput = { - queryParams, - changeEvent: eventReduceEvent, - previousResults, - keyDocumentMap: previousResultsMap - }; - var actionName = calculateActionName(stateResolveFunctionInput); - if (actionName === 'runFullQueryAgain') { - return true; - } else if (actionName !== 'doNothing') { - changed = true; - runAction(actionName, queryParams, eventReduceEvent, previousResults, previousResultsMap); - return false; - } - }); - if (foundNonOptimizeable) { - return { - runFullQueryAgain: true - }; - } else { - return { - runFullQueryAgain: false, - changed, - newResults: previousResults - }; - } -} -//# sourceMappingURL=event-reduce.js.map \ No newline at end of file diff --git a/dist/esm/event-reduce.js.map b/dist/esm/event-reduce.js.map deleted file mode 100644 index 16e5a2edfcb..00000000000 --- a/dist/esm/event-reduce.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"event-reduce.js","names":["calculateActionName","runAction","rxChangeEventToEventReduceChangeEvent","arrayFilterNotEmpty","clone","ensureNotFalsy","getFromMapOrCreate","getQueryMatcher","getSortComparator","normalizeMangoQuery","getSortFieldsOfQuery","primaryKey","query","sort","length","map","part","Object","keys","RXQUERY_QUERY_PARAMS_CACHE","WeakMap","getQueryParams","rxQuery","collection","normalizedMangoQuery","storageInstance","schema","mangoQuery","primaryPath","sortComparator","jsonSchema","useSortComparator","docA","docB","sortComparatorData","queryMatcher","useQueryMatcher","doc","queryMatcherData","ret","skip","limit","sortFields","calculateNewResults","rxChangeEvents","database","eventReduce","runFullQueryAgain","queryParams","previousResults","_result","docsData","slice","previousResultsMap","docsDataMap","changed","eventReduceEvents","cE","filter","foundNonOptimizeable","find","eventReduceEvent","stateResolveFunctionInput","changeEvent","keyDocumentMap","actionName","newResults"],"sources":["../../src/event-reduce.ts"],"sourcesContent":["import {\n ActionName,\n calculateActionName,\n runAction,\n QueryParams,\n QueryMatcher,\n DeterministicSortComparator,\n StateResolveFunctionInput,\n ChangeEvent\n} from 'event-reduce-js';\nimport type {\n RxQuery,\n MangoQuery,\n RxChangeEvent,\n StringKeys,\n RxDocumentData\n} from './types/index.d.ts';\nimport { rxChangeEventToEventReduceChangeEvent } from './rx-change-event.ts';\nimport {\n arrayFilterNotEmpty,\n clone,\n ensureNotFalsy,\n getFromMapOrCreate\n} from './plugins/utils/index.ts';\nimport { getQueryMatcher, getSortComparator, normalizeMangoQuery } from './rx-query-helper.ts';\n\nexport type EventReduceResultNeg = {\n runFullQueryAgain: true;\n};\nexport type EventReduceResultPos = {\n runFullQueryAgain: false;\n changed: boolean;\n newResults: RxDocumentType[];\n};\nexport type EventReduceResult = EventReduceResultNeg | EventReduceResultPos;\n\n\nexport function getSortFieldsOfQuery(\n primaryKey: StringKeys>,\n query: MangoQuery\n): (string | StringKeys)[] {\n if (!query.sort || query.sort.length === 0) {\n return [primaryKey];\n } else {\n return query.sort.map(part => Object.keys(part)[0]);\n }\n}\n\n\n\nexport const RXQUERY_QUERY_PARAMS_CACHE: WeakMap> = new WeakMap();\nexport function getQueryParams(\n rxQuery: RxQuery\n): QueryParams {\n return getFromMapOrCreate(\n RXQUERY_QUERY_PARAMS_CACHE,\n rxQuery,\n () => {\n const collection = rxQuery.collection;\n const normalizedMangoQuery = normalizeMangoQuery(\n collection.storageInstance.schema,\n clone(rxQuery.mangoQuery)\n );\n const primaryKey = collection.schema.primaryPath;\n\n /**\n * Create a custom sort comparator\n * that uses the hooks to ensure\n * we send for example compressed documents to be sorted by compressed queries.\n */\n const sortComparator = getSortComparator(\n collection.schema.jsonSchema,\n normalizedMangoQuery\n );\n\n const useSortComparator: DeterministicSortComparator = (docA: RxDocType, docB: RxDocType) => {\n const sortComparatorData = {\n docA,\n docB,\n rxQuery\n };\n return sortComparator(sortComparatorData.docA, sortComparatorData.docB);\n };\n\n /**\n * Create a custom query matcher\n * that uses the hooks to ensure\n * we send for example compressed documents to match compressed queries.\n */\n const queryMatcher = getQueryMatcher(\n collection.schema.jsonSchema,\n normalizedMangoQuery\n );\n const useQueryMatcher: QueryMatcher> = (doc: RxDocumentData) => {\n const queryMatcherData = {\n doc,\n rxQuery\n };\n return queryMatcher(queryMatcherData.doc);\n };\n\n const ret: QueryParams = {\n primaryKey: rxQuery.collection.schema.primaryPath as any,\n skip: normalizedMangoQuery.skip,\n limit: normalizedMangoQuery.limit,\n sortFields: getSortFieldsOfQuery(primaryKey, normalizedMangoQuery) as string[],\n sortComparator: useSortComparator,\n queryMatcher: useQueryMatcher\n };\n return ret;\n }\n );\n}\n\n\nexport function calculateNewResults(\n rxQuery: RxQuery,\n rxChangeEvents: RxChangeEvent[]\n): EventReduceResult {\n if (!rxQuery.collection.database.eventReduce) {\n return {\n runFullQueryAgain: true\n };\n }\n const queryParams = getQueryParams(rxQuery);\n const previousResults: RxDocumentType[] = ensureNotFalsy(rxQuery._result).docsData.slice(0);\n const previousResultsMap: Map = ensureNotFalsy(rxQuery._result).docsDataMap;\n let changed: boolean = false;\n\n const eventReduceEvents: ChangeEvent[] = rxChangeEvents\n .map(cE => rxChangeEventToEventReduceChangeEvent(cE))\n .filter(arrayFilterNotEmpty);\n\n const foundNonOptimizeable = eventReduceEvents.find(eventReduceEvent => {\n const stateResolveFunctionInput: StateResolveFunctionInput = {\n queryParams,\n changeEvent: eventReduceEvent,\n previousResults,\n keyDocumentMap: previousResultsMap\n };\n\n const actionName: ActionName = calculateActionName(stateResolveFunctionInput);\n if (actionName === 'runFullQueryAgain') {\n return true;\n } else if (actionName !== 'doNothing') {\n changed = true;\n runAction(\n actionName,\n queryParams,\n eventReduceEvent,\n previousResults,\n previousResultsMap\n );\n return false;\n }\n });\n if (foundNonOptimizeable) {\n return {\n runFullQueryAgain: true,\n };\n } else {\n return {\n runFullQueryAgain: false,\n changed,\n newResults: previousResults\n };\n }\n}\n"],"mappings":"AAAA,SAEIA,mBAAmB,EACnBC,SAAS,QAMN,iBAAiB;AAQxB,SAASC,qCAAqC,QAAQ,sBAAsB;AAC5E,SACIC,mBAAmB,EACnBC,KAAK,EACLC,cAAc,EACdC,kBAAkB,QACf,0BAA0B;AACjC,SAASC,eAAe,EAAEC,iBAAiB,EAAEC,mBAAmB,QAAQ,sBAAsB;AAa9F,OAAO,SAASC,oBAAoBA,CAChCC,UAAiD,EACjDC,KAA4B,EACM;EAClC,IAAI,CAACA,KAAK,CAACC,IAAI,IAAID,KAAK,CAACC,IAAI,CAACC,MAAM,KAAK,CAAC,EAAE;IACxC,OAAO,CAACH,UAAU,CAAC;EACvB,CAAC,MAAM;IACH,OAAOC,KAAK,CAACC,IAAI,CAACE,GAAG,CAACC,IAAI,IAAIC,MAAM,CAACC,IAAI,CAACF,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;EACvD;AACJ;AAIA,OAAO,IAAMG,0BAA8D,GAAG,IAAIC,OAAO,CAAC,CAAC;AAC3F,OAAO,SAASC,cAAcA,CAC1BC,OAA2B,EACL;EACtB,OAAOhB,kBAAkB,CACrBa,0BAA0B,EAC1BG,OAAO,EACP,MAAM;IACF,IAAMC,UAAU,GAAGD,OAAO,CAACC,UAAU;IACrC,IAAMC,oBAAoB,GAAGf,mBAAmB,CAC5Cc,UAAU,CAACE,eAAe,CAACC,MAAM,EACjCtB,KAAK,CAACkB,OAAO,CAACK,UAAU,CAC5B,CAAC;IACD,IAAMhB,UAAU,GAAGY,UAAU,CAACG,MAAM,CAACE,WAAW;;IAEhD;AACZ;AACA;AACA;AACA;IACY,IAAMC,cAAc,GAAGrB,iBAAiB,CACpCe,UAAU,CAACG,MAAM,CAACI,UAAU,EAC5BN,oBACJ,CAAC;IAED,IAAMO,iBAAyD,GAAGA,CAACC,IAAe,EAAEC,IAAe,KAAK;MACpG,IAAMC,kBAAkB,GAAG;QACvBF,IAAI;QACJC,IAAI;QACJX;MACJ,CAAC;MACD,OAAOO,cAAc,CAACK,kBAAkB,CAACF,IAAI,EAAEE,kBAAkB,CAACD,IAAI,CAAC;IAC3E,CAAC;;IAED;AACZ;AACA;AACA;AACA;IACY,IAAME,YAAY,GAAG5B,eAAe,CAChCgB,UAAU,CAACG,MAAM,CAACI,UAAU,EAC5BN,oBACJ,CAAC;IACD,IAAMY,eAAwD,GAAIC,GAA8B,IAAK;MACjG,IAAMC,gBAAgB,GAAG;QACrBD,GAAG;QACHf;MACJ,CAAC;MACD,OAAOa,YAAY,CAACG,gBAAgB,CAACD,GAAG,CAAC;IAC7C,CAAC;IAED,IAAME,GAAqB,GAAG;MAC1B5B,UAAU,EAAEW,OAAO,CAACC,UAAU,CAACG,MAAM,CAACE,WAAkB;MACxDY,IAAI,EAAEhB,oBAAoB,CAACgB,IAAI;MAC/BC,KAAK,EAAEjB,oBAAoB,CAACiB,KAAK;MACjCC,UAAU,EAAEhC,oBAAoB,CAACC,UAAU,EAAEa,oBAAoB,CAAa;MAC9EK,cAAc,EAAEE,iBAAiB;MACjCI,YAAY,EAAEC;IAClB,CAAC;IACD,OAAOG,GAAG;EACd,CACJ,CAAC;AACL;AAGA,OAAO,SAASI,mBAAmBA,CAC/BrB,OAAgC,EAChCsB,cAA+C,EACd;EACjC,IAAI,CAACtB,OAAO,CAACC,UAAU,CAACsB,QAAQ,CAACC,WAAW,EAAE;IAC1C,OAAO;MACHC,iBAAiB,EAAE;IACvB,CAAC;EACL;EACA,IAAMC,WAAW,GAAG3B,cAAc,CAACC,OAAO,CAAC;EAC3C,IAAM2B,eAAiC,GAAG5C,cAAc,CAACiB,OAAO,CAAC4B,OAAO,CAAC,CAACC,QAAQ,CAACC,KAAK,CAAC,CAAC,CAAC;EAC3F,IAAMC,kBAA+C,GAAGhD,cAAc,CAACiB,OAAO,CAAC4B,OAAO,CAAC,CAACI,WAAW;EACnG,IAAIC,OAAgB,GAAG,KAAK;EAE5B,IAAMC,iBAAgD,GAAGZ,cAAc,CAClE7B,GAAG,CAAC0C,EAAE,IAAIvD,qCAAqC,CAACuD,EAAE,CAAC,CAAC,CACpDC,MAAM,CAACvD,mBAAmB,CAAC;EAEhC,IAAMwD,oBAAoB,GAAGH,iBAAiB,CAACI,IAAI,CAACC,gBAAgB,IAAI;IACpE,IAAMC,yBAAoE,GAAG;MACzEd,WAAW;MACXe,WAAW,EAAEF,gBAAgB;MAC7BZ,eAAe;MACfe,cAAc,EAAEX;IACpB,CAAC;IAED,IAAMY,UAAsB,GAAGjE,mBAAmB,CAAC8D,yBAAyB,CAAC;IAC7E,IAAIG,UAAU,KAAK,mBAAmB,EAAE;MACpC,OAAO,IAAI;IACf,CAAC,MAAM,IAAIA,UAAU,KAAK,WAAW,EAAE;MACnCV,OAAO,GAAG,IAAI;MACdtD,SAAS,CACLgE,UAAU,EACVjB,WAAW,EACXa,gBAAgB,EAChBZ,eAAe,EACfI,kBACJ,CAAC;MACD,OAAO,KAAK;IAChB;EACJ,CAAC,CAAC;EACF,IAAIM,oBAAoB,EAAE;IACtB,OAAO;MACHZ,iBAAiB,EAAE;IACvB,CAAC;EACL,CAAC,MAAM;IACH,OAAO;MACHA,iBAAiB,EAAE,KAAK;MACxBQ,OAAO;MACPW,UAAU,EAAEjB;IAChB,CAAC;EACL;AACJ","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/hooks.js b/dist/esm/hooks.js deleted file mode 100644 index 6e7b96fd059..00000000000 --- a/dist/esm/hooks.js +++ /dev/null @@ -1,118 +0,0 @@ -/** - * hook-functions that can be extended by the plugin - */ -export var HOOKS = { - /** - * Runs before a plugin is added. - * Use this to block the usage of non-compatible plugins. - */ - preAddRxPlugin: [], - /** - * functions that run before the database is created - */ - preCreateRxDatabase: [], - /** - * runs after the database is created and prepared - * but before the instance is returned to the user - * @async - */ - createRxDatabase: [], - preCreateRxCollection: [], - createRxCollection: [], - createRxState: [], - /** - * runs at the end of the destroy-process of a collection - * @async - */ - postDestroyRxCollection: [], - /** - * Runs after a collection is removed. - * @async - */ - postRemoveRxCollection: [], - /** - * functions that get the json-schema as input - * to do additionally checks/manipulation - */ - preCreateRxSchema: [], - /** - * functions that run after the RxSchema is created - * gets RxSchema as attribute - */ - createRxSchema: [], - preCreateRxQuery: [], - /** - * Runs before a query is send to the - * prepareQuery function of the storage engine. - */ - prePrepareQuery: [], - createRxDocument: [], - /** - * runs after a RxDocument is created, - * cannot be async - */ - postCreateRxDocument: [], - /** - * Runs before a RxStorageInstance is created - * gets the params of createStorageInstance() - * as attribute so you can manipulate them. - * Notice that you have to clone stuff before mutating the inputs. - */ - preCreateRxStorageInstance: [], - preStorageWrite: [], - /** - * runs on the document-data before the document is migrated - * { - * doc: Object, // original doc-data - * migrated: // migrated doc-data after run through migration-strategies - * } - */ - preMigrateDocument: [], - /** - * runs after the migration of a document has been done - */ - postMigrateDocument: [], - /** - * runs at the beginning of the destroy-process of a database - */ - preDestroyRxDatabase: [], - /** - * runs after a database has been removed - * @async - */ - postRemoveRxDatabase: [], - /** - * runs before the replication writes the rows to master - * but before the rows have been modified - * @async - */ - preReplicationMasterWrite: [], - /** - * runs after the replication has been sent to the server - * but before the new documents have been handled - * @async - */ - preReplicationMasterWriteDocumentsHandle: [] -}; -export function runPluginHooks(hookKey, obj) { - if (HOOKS[hookKey].length > 0) { - HOOKS[hookKey].forEach(fun => fun(obj)); - } -} - -/** - * TODO - * we should not run the hooks in parallel - * this makes stuff unpredictable. - */ -export function runAsyncPluginHooks(hookKey, obj) { - return Promise.all(HOOKS[hookKey].map(fun => fun(obj))); -} - -/** - * used in tests to remove hooks - */ -export function _clearHook(type, fun) { - HOOKS[type] = HOOKS[type].filter(h => h !== fun); -} -//# sourceMappingURL=hooks.js.map \ No newline at end of file diff --git a/dist/esm/hooks.js.map b/dist/esm/hooks.js.map deleted file mode 100644 index 20cd2e1ed07..00000000000 --- a/dist/esm/hooks.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"hooks.js","names":["HOOKS","preAddRxPlugin","preCreateRxDatabase","createRxDatabase","preCreateRxCollection","createRxCollection","createRxState","postDestroyRxCollection","postRemoveRxCollection","preCreateRxSchema","createRxSchema","preCreateRxQuery","prePrepareQuery","createRxDocument","postCreateRxDocument","preCreateRxStorageInstance","preStorageWrite","preMigrateDocument","postMigrateDocument","preDestroyRxDatabase","postRemoveRxDatabase","preReplicationMasterWrite","preReplicationMasterWriteDocumentsHandle","runPluginHooks","hookKey","obj","length","forEach","fun","runAsyncPluginHooks","Promise","all","map","_clearHook","type","filter","h"],"sources":["../../src/hooks.ts"],"sourcesContent":["\n/**\n * hook-functions that can be extended by the plugin\n */\nexport const HOOKS: { [k: string]: any[]; } = {\n /**\n * Runs before a plugin is added.\n * Use this to block the usage of non-compatible plugins.\n */\n preAddRxPlugin: [],\n /**\n * functions that run before the database is created\n */\n preCreateRxDatabase: [],\n /**\n * runs after the database is created and prepared\n * but before the instance is returned to the user\n * @async\n */\n createRxDatabase: [],\n preCreateRxCollection: [],\n createRxCollection: [],\n createRxState: [],\n /**\n * runs at the end of the destroy-process of a collection\n * @async\n */\n postDestroyRxCollection: [],\n /**\n * Runs after a collection is removed.\n * @async\n */\n postRemoveRxCollection: [],\n /**\n * functions that get the json-schema as input\n * to do additionally checks/manipulation\n */\n preCreateRxSchema: [],\n /**\n * functions that run after the RxSchema is created\n * gets RxSchema as attribute\n */\n createRxSchema: [],\n preCreateRxQuery: [],\n /**\n * Runs before a query is send to the\n * prepareQuery function of the storage engine.\n */\n prePrepareQuery: [],\n createRxDocument: [],\n /**\n * runs after a RxDocument is created,\n * cannot be async\n */\n postCreateRxDocument: [],\n /**\n * Runs before a RxStorageInstance is created\n * gets the params of createStorageInstance()\n * as attribute so you can manipulate them.\n * Notice that you have to clone stuff before mutating the inputs.\n */\n preCreateRxStorageInstance: [],\n preStorageWrite: [],\n /**\n * runs on the document-data before the document is migrated\n * {\n * doc: Object, // original doc-data\n * migrated: // migrated doc-data after run through migration-strategies\n * }\n */\n preMigrateDocument: [],\n /**\n * runs after the migration of a document has been done\n */\n postMigrateDocument: [],\n /**\n * runs at the beginning of the destroy-process of a database\n */\n preDestroyRxDatabase: [],\n /**\n * runs after a database has been removed\n * @async\n */\n postRemoveRxDatabase: [],\n\n /**\n * runs before the replication writes the rows to master\n * but before the rows have been modified\n * @async\n */\n preReplicationMasterWrite: [],\n\n /**\n * runs after the replication has been sent to the server\n * but before the new documents have been handled\n * @async\n */\n preReplicationMasterWriteDocumentsHandle: [],\n};\n\nexport function runPluginHooks(hookKey: string, obj: any) {\n if (HOOKS[hookKey].length > 0) {\n HOOKS[hookKey].forEach(fun => fun(obj));\n }\n}\n\n\n/**\n * TODO\n * we should not run the hooks in parallel\n * this makes stuff unpredictable.\n */\nexport function runAsyncPluginHooks(hookKey: string, obj: any): Promise {\n return Promise.all(\n HOOKS[hookKey].map(fun => fun(obj))\n );\n}\n\n/**\n * used in tests to remove hooks\n */\nexport function _clearHook(type: string, fun: Function) {\n HOOKS[type] = HOOKS[type].filter(h => h !== fun);\n}\n"],"mappings":"AACA;AACA;AACA;AACA,OAAO,IAAMA,KAA8B,GAAG;EAC1C;AACJ;AACA;AACA;EACIC,cAAc,EAAE,EAAE;EAClB;AACJ;AACA;EACIC,mBAAmB,EAAE,EAAE;EACvB;AACJ;AACA;AACA;AACA;EACIC,gBAAgB,EAAE,EAAE;EACpBC,qBAAqB,EAAE,EAAE;EACzBC,kBAAkB,EAAE,EAAE;EACtBC,aAAa,EAAE,EAAE;EACjB;AACJ;AACA;AACA;EACIC,uBAAuB,EAAE,EAAE;EAC3B;AACJ;AACA;AACA;EACIC,sBAAsB,EAAE,EAAE;EAC1B;AACJ;AACA;AACA;EACIC,iBAAiB,EAAE,EAAE;EACrB;AACJ;AACA;AACA;EACIC,cAAc,EAAE,EAAE;EAClBC,gBAAgB,EAAE,EAAE;EACpB;AACJ;AACA;AACA;EACIC,eAAe,EAAE,EAAE;EACnBC,gBAAgB,EAAE,EAAE;EACpB;AACJ;AACA;AACA;EACIC,oBAAoB,EAAE,EAAE;EACxB;AACJ;AACA;AACA;AACA;AACA;EACIC,0BAA0B,EAAE,EAAE;EAC9BC,eAAe,EAAE,EAAE;EACnB;AACJ;AACA;AACA;AACA;AACA;AACA;EACIC,kBAAkB,EAAE,EAAE;EACtB;AACJ;AACA;EACIC,mBAAmB,EAAE,EAAE;EACvB;AACJ;AACA;EACIC,oBAAoB,EAAE,EAAE;EACxB;AACJ;AACA;AACA;EACIC,oBAAoB,EAAE,EAAE;EAExB;AACJ;AACA;AACA;AACA;EACIC,yBAAyB,EAAE,EAAE;EAE7B;AACJ;AACA;AACA;AACA;EACIC,wCAAwC,EAAE;AAC9C,CAAC;AAED,OAAO,SAASC,cAAcA,CAACC,OAAe,EAAEC,GAAQ,EAAE;EACtD,IAAIzB,KAAK,CAACwB,OAAO,CAAC,CAACE,MAAM,GAAG,CAAC,EAAE;IAC3B1B,KAAK,CAACwB,OAAO,CAAC,CAACG,OAAO,CAACC,GAAG,IAAIA,GAAG,CAACH,GAAG,CAAC,CAAC;EAC3C;AACJ;;AAGA;AACA;AACA;AACA;AACA;AACA,OAAO,SAASI,mBAAmBA,CAACL,OAAe,EAAEC,GAAQ,EAAgB;EACzE,OAAOK,OAAO,CAACC,GAAG,CACd/B,KAAK,CAACwB,OAAO,CAAC,CAACQ,GAAG,CAACJ,GAAG,IAAIA,GAAG,CAACH,GAAG,CAAC,CACtC,CAAC;AACL;;AAEA;AACA;AACA;AACA,OAAO,SAASQ,UAAUA,CAACC,IAAY,EAAEN,GAAa,EAAE;EACpD5B,KAAK,CAACkC,IAAI,CAAC,GAAGlC,KAAK,CAACkC,IAAI,CAAC,CAACC,MAAM,CAACC,CAAC,IAAIA,CAAC,KAAKR,GAAG,CAAC;AACpD","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/incremental-write.js b/dist/esm/incremental-write.js deleted file mode 100644 index a1056341286..00000000000 --- a/dist/esm/incremental-write.js +++ /dev/null @@ -1,161 +0,0 @@ -import { isBulkWriteConflictError, rxStorageWriteErrorToRxError } from "./rx-error.js"; -import { clone, ensureNotFalsy, getFromMapOrCreate, getFromMapOrThrow, getHeightOfRevision, stripMetaDataFromDocument } from "./plugins/utils/index.js"; -/** - * The incremental write queue - * batches up all incremental writes to a collection - * so that performance can be improved by: - * - Running only one write even when there are multiple modifications to the same document. - * - Run all writes ins a single bulkWrite() call even when there are writes to many documents. - */ -export var IncrementalWriteQueue = /*#__PURE__*/function () { - function IncrementalWriteQueue(storageInstance, primaryPath, - // can be used to run hooks etc. - preWrite, postWrite) { - this.queueByDocId = new Map(); - this.isRunning = false; - this.storageInstance = storageInstance; - this.primaryPath = primaryPath; - this.preWrite = preWrite; - this.postWrite = postWrite; - } - var _proto = IncrementalWriteQueue.prototype; - _proto.addWrite = function addWrite(lastKnownDocumentState, modifier) { - var docId = lastKnownDocumentState[this.primaryPath]; - var ar = getFromMapOrCreate(this.queueByDocId, docId, () => []); - var ret = new Promise((resolve, reject) => { - var item = { - lastKnownDocumentState, - modifier, - resolve, - reject - }; - ensureNotFalsy(ar).push(item); - this.triggerRun(); - }); - return ret; - }; - _proto.triggerRun = async function triggerRun() { - if (this.isRunning === true || this.queueByDocId.size === 0) { - // already running - return; - } - this.isRunning = true; - var writeRows = []; - - /** - * 'take over' so that while the async functions runs, - * new incremental updates could be added from the outside. - */ - var itemsById = this.queueByDocId; - this.queueByDocId = new Map(); - await Promise.all(Array.from(itemsById.entries()).map(async ([_docId, items]) => { - var oldData = findNewestOfDocumentStates(items.map(i => i.lastKnownDocumentState)); - var newData = oldData; - for (var item of items) { - try { - newData = await item.modifier( - /** - * We have to clone() each time because the modifier - * might throw while it already changed some properties - * of the document. - */ - clone(newData)); - } catch (err) { - item.reject(err); - item.reject = () => {}; - item.resolve = () => {}; - } - } - try { - await this.preWrite(newData, oldData); - } catch (err) { - /** - * If the before-hooks fail, - * we reject all of the writes because it is - * not possible to determine which one is to blame. - */ - items.forEach(item => item.reject(err)); - return; - } - writeRows.push({ - previous: oldData, - document: newData - }); - })); - var writeResult = writeRows.length > 0 ? await this.storageInstance.bulkWrite(writeRows, 'incremental-write') : { - error: [], - success: [] - }; - - // process success - await Promise.all(writeResult.success.map(result => { - var docId = result[this.primaryPath]; - this.postWrite(result); - var items = getFromMapOrThrow(itemsById, docId); - items.forEach(item => item.resolve(result)); - })); - - // process errors - writeResult.error.forEach(error => { - var docId = error.documentId; - var items = getFromMapOrThrow(itemsById, docId); - var isConflict = isBulkWriteConflictError(error); - if (isConflict) { - // had conflict -> retry afterwards - var ar = getFromMapOrCreate(this.queueByDocId, docId, () => []); - /** - * Add the items back to this.queueByDocId - * by maintaining the original order. - */ - items.reverse().forEach(item => { - item.lastKnownDocumentState = ensureNotFalsy(isConflict.documentInDb); - ensureNotFalsy(ar).unshift(item); - }); - } else { - // other error -> must be thrown - var rxError = rxStorageWriteErrorToRxError(error); - items.forEach(item => item.reject(rxError)); - } - }); - this.isRunning = false; - - /** - * Always trigger another run - * because in between there might be new items - * been added to the queue. - */ - return this.triggerRun(); - }; - return IncrementalWriteQueue; -}(); -export function modifierFromPublicToInternal(publicModifier) { - var ret = async docData => { - var withoutMeta = stripMetaDataFromDocument(docData); - withoutMeta._deleted = docData._deleted; - var modified = await publicModifier(withoutMeta); - var reattachedMeta = Object.assign({}, modified, { - _meta: docData._meta, - _attachments: docData._attachments, - _rev: docData._rev, - _deleted: typeof modified._deleted !== 'undefined' ? modified._deleted : docData._deleted - }); - if (typeof reattachedMeta._deleted === 'undefined') { - reattachedMeta._deleted = false; - } - return reattachedMeta; - }; - return ret; -} -export function findNewestOfDocumentStates(docs) { - var newest = docs[0]; - var newestRevisionHeight = getHeightOfRevision(newest._rev); - docs.forEach(doc => { - var height = getHeightOfRevision(doc._rev); - if (height > newestRevisionHeight) { - newest = doc; - newestRevisionHeight = height; - } - }); - return newest; -} -//# sourceMappingURL=incremental-write.js.map \ No newline at end of file diff --git a/dist/esm/incremental-write.js.map b/dist/esm/incremental-write.js.map deleted file mode 100644 index 8130123368f..00000000000 --- a/dist/esm/incremental-write.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"incremental-write.js","names":["isBulkWriteConflictError","rxStorageWriteErrorToRxError","clone","ensureNotFalsy","getFromMapOrCreate","getFromMapOrThrow","getHeightOfRevision","stripMetaDataFromDocument","IncrementalWriteQueue","storageInstance","primaryPath","preWrite","postWrite","queueByDocId","Map","isRunning","_proto","prototype","addWrite","lastKnownDocumentState","modifier","docId","ar","ret","Promise","resolve","reject","item","push","triggerRun","size","writeRows","itemsById","all","Array","from","entries","map","_docId","items","oldData","findNewestOfDocumentStates","i","newData","err","forEach","previous","document","writeResult","length","bulkWrite","error","success","result","documentId","isConflict","reverse","documentInDb","unshift","rxError","modifierFromPublicToInternal","publicModifier","docData","withoutMeta","_deleted","modified","reattachedMeta","Object","assign","_meta","_attachments","_rev","docs","newest","newestRevisionHeight","doc","height"],"sources":["../../src/incremental-write.ts"],"sourcesContent":["import {\n isBulkWriteConflictError,\n rxStorageWriteErrorToRxError\n} from './rx-error.ts';\nimport type {\n ModifyFunction,\n BulkWriteRow,\n MaybePromise,\n RxDocumentData,\n RxDocumentWriteData,\n RxError,\n RxStorageBulkWriteResponse,\n RxStorageInstance,\n StringKeys,\n WithDeleted\n} from './types/index.d.ts';\nimport {\n clone,\n ensureNotFalsy,\n getFromMapOrCreate,\n getFromMapOrThrow,\n getHeightOfRevision,\n stripMetaDataFromDocument\n} from './plugins/utils/index.ts';\n\n\n\nexport type IncrementalWriteModifier = (\n doc: RxDocumentData\n) => MaybePromise> | MaybePromise>;\n\n\ntype IncrementalWriteQueueItem = {\n lastKnownDocumentState: RxDocumentData;\n modifier: IncrementalWriteModifier;\n resolve: (d: RxDocumentData) => void;\n reject: (error: RxError) => void;\n};\n\n/**\n * The incremental write queue\n * batches up all incremental writes to a collection\n * so that performance can be improved by:\n * - Running only one write even when there are multiple modifications to the same document.\n * - Run all writes ins a single bulkWrite() call even when there are writes to many documents.\n */\nexport class IncrementalWriteQueue {\n public queueByDocId = new Map[]>();\n public isRunning: boolean = false;\n\n constructor(\n public readonly storageInstance: RxStorageInstance,\n public readonly primaryPath: StringKeys>,\n // can be used to run hooks etc.\n public readonly preWrite: (newData: RxDocumentData, oldData: RxDocumentData) => MaybePromise,\n public readonly postWrite: (docData: RxDocumentData) => void\n\n ) { }\n\n addWrite(\n lastKnownDocumentState: RxDocumentData,\n modifier: IncrementalWriteModifier\n ): Promise> {\n const docId: string = lastKnownDocumentState[this.primaryPath] as any;\n const ar = getFromMapOrCreate(this.queueByDocId, docId, () => []);\n const ret = new Promise>((resolve, reject) => {\n const item: IncrementalWriteQueueItem = {\n lastKnownDocumentState,\n modifier,\n resolve,\n reject\n };\n ensureNotFalsy(ar).push(item);\n this.triggerRun();\n });\n return ret;\n }\n\n async triggerRun(): Promise {\n if (\n this.isRunning === true ||\n this.queueByDocId.size === 0\n ) {\n // already running\n return;\n }\n this.isRunning = true;\n const writeRows: BulkWriteRow[] = [];\n\n /**\n * 'take over' so that while the async functions runs,\n * new incremental updates could be added from the outside.\n */\n const itemsById = this.queueByDocId;\n this.queueByDocId = new Map();\n await Promise.all(\n Array.from(itemsById.entries())\n .map(async ([_docId, items]) => {\n const oldData = findNewestOfDocumentStates(\n items.map(i => i.lastKnownDocumentState)\n );\n let newData = oldData;\n for (const item of items) {\n try {\n newData = await item.modifier(\n /**\n * We have to clone() each time because the modifier\n * might throw while it already changed some properties\n * of the document.\n */\n clone(newData)\n ) as any;\n } catch (err: any) {\n item.reject(err);\n item.reject = () => { };\n item.resolve = () => { };\n }\n }\n\n try {\n await this.preWrite(newData, oldData);\n } catch (err: any) {\n /**\n * If the before-hooks fail,\n * we reject all of the writes because it is\n * not possible to determine which one is to blame.\n */\n items.forEach(item => item.reject(err));\n return;\n }\n writeRows.push({\n previous: oldData,\n document: newData\n });\n })\n );\n const writeResult: RxStorageBulkWriteResponse = writeRows.length > 0 ?\n await this.storageInstance.bulkWrite(writeRows, 'incremental-write') :\n { error: [], success: [] };\n\n // process success\n await Promise.all(\n writeResult.success.map(result => {\n const docId = result[this.primaryPath] as string;\n this.postWrite(result);\n const items = getFromMapOrThrow(itemsById, docId);\n items.forEach(item => item.resolve(result));\n })\n );\n\n // process errors\n writeResult.error\n .forEach(error => {\n const docId = error.documentId;\n const items = getFromMapOrThrow(itemsById, docId);\n const isConflict = isBulkWriteConflictError(error);\n if (isConflict) {\n // had conflict -> retry afterwards\n const ar = getFromMapOrCreate(this.queueByDocId, docId, () => []);\n /**\n * Add the items back to this.queueByDocId\n * by maintaining the original order.\n */\n items\n .reverse()\n .forEach(item => {\n item.lastKnownDocumentState = ensureNotFalsy(isConflict.documentInDb);\n ensureNotFalsy(ar).unshift(item);\n });\n } else {\n // other error -> must be thrown\n const rxError = rxStorageWriteErrorToRxError(error);\n items.forEach(item => item.reject(rxError));\n }\n });\n this.isRunning = false;\n\n /**\n * Always trigger another run\n * because in between there might be new items\n * been added to the queue.\n */\n return this.triggerRun();\n }\n}\n\n\nexport function modifierFromPublicToInternal(\n publicModifier: ModifyFunction\n): IncrementalWriteModifier {\n const ret = async (docData: RxDocumentData) => {\n const withoutMeta: WithDeleted = stripMetaDataFromDocument(docData) as any;\n withoutMeta._deleted = docData._deleted;\n const modified = await publicModifier(withoutMeta);\n const reattachedMeta: RxDocumentData = Object.assign({}, modified, {\n _meta: docData._meta,\n _attachments: docData._attachments,\n _rev: docData._rev,\n _deleted: typeof (modified as WithDeleted)._deleted !== 'undefined' ?\n (modified as WithDeleted)._deleted :\n docData._deleted\n });\n if (typeof reattachedMeta._deleted === 'undefined') {\n reattachedMeta._deleted = false;\n }\n return reattachedMeta;\n };\n return ret;\n}\n\n\nexport function findNewestOfDocumentStates(\n docs: RxDocumentData[]\n): RxDocumentData {\n\n let newest = docs[0];\n let newestRevisionHeight = getHeightOfRevision(newest._rev);\n docs.forEach(doc => {\n const height = getHeightOfRevision(doc._rev);\n if (height > newestRevisionHeight) {\n newest = doc;\n newestRevisionHeight = height;\n }\n });\n return newest;\n}\n"],"mappings":"AAAA,SACIA,wBAAwB,EACxBC,4BAA4B,QACzB,eAAe;AAatB,SACIC,KAAK,EACLC,cAAc,EACdC,kBAAkB,EAClBC,iBAAiB,EACjBC,mBAAmB,EACnBC,yBAAyB,QACtB,0BAA0B;AAgBjC;AACA;AACA;AACA;AACA;AACA;AACA;AACA,WAAaC,qBAAqB;EAI9B,SAAAA,sBACoBC,eAAuD,EACvDC,WAAkD;EAClE;EACgBC,QAAwG,EACxGC,SAAuD,EAEzE;IAAA,KAVKC,YAAY,GAAG,IAAIC,GAAG,CAAiD,CAAC;IAAA,KACxEC,SAAS,GAAY,KAAK;IAAA,KAGbN,eAAuD,GAAvDA,eAAuD;IAAA,KACvDC,WAAkD,GAAlDA,WAAkD;IAAA,KAElDC,QAAwG,GAAxGA,QAAwG;IAAA,KACxGC,SAAuD,GAAvDA,SAAuD;EAEvE;EAAC,IAAAI,MAAA,GAAAR,qBAAA,CAAAS,SAAA;EAAAD,MAAA,CAELE,QAAQ,GAAR,SAAAA,SACIC,sBAAiD,EACjDC,QAA6C,EACX;IAClC,IAAMC,KAAa,GAAGF,sBAAsB,CAAC,IAAI,CAACT,WAAW,CAAQ;IACrE,IAAMY,EAAE,GAAGlB,kBAAkB,CAAC,IAAI,CAACS,YAAY,EAAEQ,KAAK,EAAE,MAAM,EAAE,CAAC;IACjE,IAAME,GAAG,GAAG,IAAIC,OAAO,CAA4B,CAACC,OAAO,EAAEC,MAAM,KAAK;MACpE,IAAMC,IAA0C,GAAG;QAC/CR,sBAAsB;QACtBC,QAAQ;QACRK,OAAO;QACPC;MACJ,CAAC;MACDvB,cAAc,CAACmB,EAAE,CAAC,CAACM,IAAI,CAACD,IAAI,CAAC;MAC7B,IAAI,CAACE,UAAU,CAAC,CAAC;IACrB,CAAC,CAAC;IACF,OAAON,GAAG;EACd,CAAC;EAAAP,MAAA,CAEKa,UAAU,GAAhB,eAAAA,WAAA,EAAkC;IAC9B,IACI,IAAI,CAACd,SAAS,KAAK,IAAI,IACvB,IAAI,CAACF,YAAY,CAACiB,IAAI,KAAK,CAAC,EAC9B;MACE;MACA;IACJ;IACA,IAAI,CAACf,SAAS,GAAG,IAAI;IACrB,IAAMgB,SAAoC,GAAG,EAAE;;IAE/C;AACR;AACA;AACA;IACQ,IAAMC,SAAS,GAAG,IAAI,CAACnB,YAAY;IACnC,IAAI,CAACA,YAAY,GAAG,IAAIC,GAAG,CAAC,CAAC;IAC7B,MAAMU,OAAO,CAACS,GAAG,CACbC,KAAK,CAACC,IAAI,CAACH,SAAS,CAACI,OAAO,CAAC,CAAC,CAAC,CAC1BC,GAAG,CAAC,OAAO,CAACC,MAAM,EAAEC,KAAK,CAAC,KAAK;MAC5B,IAAMC,OAAO,GAAGC,0BAA0B,CACtCF,KAAK,CAACF,GAAG,CAACK,CAAC,IAAIA,CAAC,CAACvB,sBAAsB,CAC3C,CAAC;MACD,IAAIwB,OAAO,GAAGH,OAAO;MACrB,KAAK,IAAMb,IAAI,IAAIY,KAAK,EAAE;QACtB,IAAI;UACAI,OAAO,GAAG,MAAMhB,IAAI,CAACP,QAAQ;UACzB;AAChC;AACA;AACA;AACA;UACgClB,KAAK,CAACyC,OAAO,CACjB,CAAQ;QACZ,CAAC,CAAC,OAAOC,GAAQ,EAAE;UACfjB,IAAI,CAACD,MAAM,CAACkB,GAAG,CAAC;UAChBjB,IAAI,CAACD,MAAM,GAAG,MAAM,CAAE,CAAC;UACvBC,IAAI,CAACF,OAAO,GAAG,MAAM,CAAE,CAAC;QAC5B;MACJ;MAEA,IAAI;QACA,MAAM,IAAI,CAACd,QAAQ,CAACgC,OAAO,EAAEH,OAAO,CAAC;MACzC,CAAC,CAAC,OAAOI,GAAQ,EAAE;QACf;AACxB;AACA;AACA;AACA;QACwBL,KAAK,CAACM,OAAO,CAAClB,IAAI,IAAIA,IAAI,CAACD,MAAM,CAACkB,GAAG,CAAC,CAAC;QACvC;MACJ;MACAb,SAAS,CAACH,IAAI,CAAC;QACXkB,QAAQ,EAAEN,OAAO;QACjBO,QAAQ,EAAEJ;MACd,CAAC,CAAC;IACN,CAAC,CACT,CAAC;IACD,IAAMK,WAAkD,GAAGjB,SAAS,CAACkB,MAAM,GAAG,CAAC,GAC3E,MAAM,IAAI,CAACxC,eAAe,CAACyC,SAAS,CAACnB,SAAS,EAAE,mBAAmB,CAAC,GACpE;MAAEoB,KAAK,EAAE,EAAE;MAAEC,OAAO,EAAE;IAAG,CAAC;;IAE9B;IACA,MAAM5B,OAAO,CAACS,GAAG,CACbe,WAAW,CAACI,OAAO,CAACf,GAAG,CAACgB,MAAM,IAAI;MAC9B,IAAMhC,KAAK,GAAGgC,MAAM,CAAC,IAAI,CAAC3C,WAAW,CAAW;MAChD,IAAI,CAACE,SAAS,CAACyC,MAAM,CAAC;MACtB,IAAMd,KAAK,GAAGlC,iBAAiB,CAAC2B,SAAS,EAAEX,KAAK,CAAC;MACjDkB,KAAK,CAACM,OAAO,CAAClB,IAAI,IAAIA,IAAI,CAACF,OAAO,CAAC4B,MAAM,CAAC,CAAC;IAC/C,CAAC,CACL,CAAC;;IAED;IACAL,WAAW,CAACG,KAAK,CACZN,OAAO,CAACM,KAAK,IAAI;MACd,IAAM9B,KAAK,GAAG8B,KAAK,CAACG,UAAU;MAC9B,IAAMf,KAAK,GAAGlC,iBAAiB,CAAC2B,SAAS,EAAEX,KAAK,CAAC;MACjD,IAAMkC,UAAU,GAAGvD,wBAAwB,CAAYmD,KAAK,CAAC;MAC7D,IAAII,UAAU,EAAE;QACZ;QACA,IAAMjC,EAAE,GAAGlB,kBAAkB,CAAC,IAAI,CAACS,YAAY,EAAEQ,KAAK,EAAE,MAAM,EAAE,CAAC;QACjE;AACpB;AACA;AACA;QACoBkB,KAAK,CACAiB,OAAO,CAAC,CAAC,CACTX,OAAO,CAAClB,IAAI,IAAI;UACbA,IAAI,CAACR,sBAAsB,GAAGhB,cAAc,CAACoD,UAAU,CAACE,YAAY,CAAC;UACrEtD,cAAc,CAACmB,EAAE,CAAC,CAACoC,OAAO,CAAC/B,IAAI,CAAC;QACpC,CAAC,CAAC;MACV,CAAC,MAAM;QACH;QACA,IAAMgC,OAAO,GAAG1D,4BAA4B,CAACkD,KAAK,CAAC;QACnDZ,KAAK,CAACM,OAAO,CAAClB,IAAI,IAAIA,IAAI,CAACD,MAAM,CAACiC,OAAO,CAAC,CAAC;MAC/C;IACJ,CAAC,CAAC;IACN,IAAI,CAAC5C,SAAS,GAAG,KAAK;;IAEtB;AACR;AACA;AACA;AACA;IACQ,OAAO,IAAI,CAACc,UAAU,CAAC,CAAC;EAC5B,CAAC;EAAA,OAAArB,qBAAA;AAAA;AAIL,OAAO,SAASoD,4BAA4BA,CACxCC,cAAyC,EACN;EACnC,IAAMtC,GAAG,GAAG,MAAOuC,OAAkC,IAAK;IACtD,IAAMC,WAAmC,GAAGxD,yBAAyB,CAACuD,OAAO,CAAQ;IACrFC,WAAW,CAACC,QAAQ,GAAGF,OAAO,CAACE,QAAQ;IACvC,IAAMC,QAAQ,GAAG,MAAMJ,cAAc,CAACE,WAAW,CAAC;IAClD,IAAMG,cAAyC,GAAGC,MAAM,CAACC,MAAM,CAAC,CAAC,CAAC,EAAEH,QAAQ,EAAE;MAC1EI,KAAK,EAAEP,OAAO,CAACO,KAAK;MACpBC,YAAY,EAAER,OAAO,CAACQ,YAAY;MAClCC,IAAI,EAAET,OAAO,CAACS,IAAI;MAClBP,QAAQ,EAAE,OAAQC,QAAQ,CAA4BD,QAAQ,KAAK,WAAW,GACzEC,QAAQ,CAA4BD,QAAQ,GAC7CF,OAAO,CAACE;IAChB,CAAC,CAAC;IACF,IAAI,OAAOE,cAAc,CAACF,QAAQ,KAAK,WAAW,EAAE;MAChDE,cAAc,CAACF,QAAQ,GAAG,KAAK;IACnC;IACA,OAAOE,cAAc;EACzB,CAAC;EACD,OAAO3C,GAAG;AACd;AAGA,OAAO,SAASkB,0BAA0BA,CACtC+B,IAAiC,EACR;EAEzB,IAAIC,MAAM,GAAGD,IAAI,CAAC,CAAC,CAAC;EACpB,IAAIE,oBAAoB,GAAGpE,mBAAmB,CAACmE,MAAM,CAACF,IAAI,CAAC;EAC3DC,IAAI,CAAC3B,OAAO,CAAC8B,GAAG,IAAI;IAChB,IAAMC,MAAM,GAAGtE,mBAAmB,CAACqE,GAAG,CAACJ,IAAI,CAAC;IAC5C,IAAIK,MAAM,GAAGF,oBAAoB,EAAE;MAC/BD,MAAM,GAAGE,GAAG;MACZD,oBAAoB,GAAGE,MAAM;IACjC;EACJ,CAAC,CAAC;EACF,OAAOH,MAAM;AACjB","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/index.js b/dist/esm/index.js deleted file mode 100644 index 9be32098583..00000000000 --- a/dist/esm/index.js +++ /dev/null @@ -1,30 +0,0 @@ -/** - * this is the main entry-point - * for when the you call "import from 'rxdb'". - */ - -export * from "./plugin.js"; -export * from "./rx-database.js"; -export * from "./rx-error.js"; -export * from "./rx-database-internal-store.js"; -export * from "./overwritable.js"; -export * from "./rx-collection.js"; -export * from "./rx-collection-helper.js"; -export * from "./rx-document.js"; -export * from "./rx-change-event.js"; -export * from "./rx-document-prototype-merge.js"; -export * from "./rx-query.js"; -export * from "./rx-query-single-result.js"; -export * from "./rx-query-helper.js"; -export * from "./rx-schema.js"; -export * from "./rx-schema-helper.js"; -export * from "./rx-storage-helper.js"; -export * from "./replication-protocol/index.js"; -export * from "./rx-storage-multiinstance.js"; -export * from "./custom-index.js"; -export * from "./query-planner.js"; -export * from "./plugin-helpers.js"; -export * from "./plugins/utils/index.js"; -export * from "./hooks.js"; -export * from "./query-cache.js"; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/esm/index.js.map b/dist/esm/index.js.map deleted file mode 100644 index ac3193510c7..00000000000 --- a/dist/esm/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":[],"sources":["../../src/index.ts"],"sourcesContent":["/**\n * this is the main entry-point\n * for when the you call \"import from 'rxdb'\".\n */\n\nexport * from './plugin.ts';\nexport * from './rx-database.ts';\nexport * from './rx-error.ts';\nexport * from './rx-database-internal-store.ts';\nexport * from './overwritable.ts';\nexport * from './rx-collection.ts';\nexport * from './rx-collection-helper.ts';\nexport * from './rx-document.ts';\nexport * from './rx-change-event.ts';\nexport * from './rx-document-prototype-merge.ts';\nexport * from './rx-query.ts';\nexport * from './rx-query-single-result.ts';\nexport * from './rx-query-helper.ts';\nexport * from './rx-schema.ts';\nexport * from './rx-schema-helper.ts';\nexport * from './rx-storage-helper.ts';\nexport * from './replication-protocol/index.ts';\nexport * from './rx-storage-multiinstance.ts';\nexport * from './custom-index.ts';\nexport * from './query-planner.ts';\nexport * from './plugin-helpers.ts';\nexport * from './plugins/utils/index.ts';\nexport * from './hooks.ts';\nexport * from './query-cache.ts';\nexport type * from './types/index.ts';\n"],"mappings":"AAAA;AACA;AACA;AACA;;AAEA,cAAc,aAAa;AAC3B,cAAc,kBAAkB;AAChC,cAAc,eAAe;AAC7B,cAAc,iCAAiC;AAC/C,cAAc,mBAAmB;AACjC,cAAc,oBAAoB;AAClC,cAAc,2BAA2B;AACzC,cAAc,kBAAkB;AAChC,cAAc,sBAAsB;AACpC,cAAc,kCAAkC;AAChD,cAAc,eAAe;AAC7B,cAAc,6BAA6B;AAC3C,cAAc,sBAAsB;AACpC,cAAc,gBAAgB;AAC9B,cAAc,uBAAuB;AACrC,cAAc,wBAAwB;AACtC,cAAc,iCAAiC;AAC/C,cAAc,+BAA+B;AAC7C,cAAc,mBAAmB;AACjC,cAAc,oBAAoB;AAClC,cAAc,qBAAqB;AACnC,cAAc,0BAA0B;AACxC,cAAc,YAAY;AAC1B,cAAc,kBAAkB","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/overwritable.js b/dist/esm/overwritable.js deleted file mode 100644 index d31dadeaef6..00000000000 --- a/dist/esm/overwritable.js +++ /dev/null @@ -1,33 +0,0 @@ -/** - * functions that can or should be overwritten by plugins - * IMPORTANT: Do not import any big stuff from RxDB here! - * An 'overwritable' can be used inside WebWorkers for RxStorage only, - * and we do not want to have the full RxDB lib bundled in them. - */ - -export var overwritable = { - /** - * if this method is overwritten with one - * that returns true, we do additional checks - * which help the developer but have bad performance - */ - isDevMode() { - return false; - }, - /** - * Deep freezes and object when in dev-mode. - * Deep-Freezing has the same performance as deep-cloning, so we only do that in dev-mode. - * Also, we can ensure the readonly state via typescript - * @link https://developer.mozilla.org/de/docs/Web/JavaScript/Reference/Global_Objects/Object/freeze - */ - deepFreezeWhenDevMode(obj) { - return obj; - }, - /** - * overwritten to map error-codes to text-messages - */ - tunnelErrorMessage(message) { - return "RxDB Error-Code " + message + ".\n Error messages are not included in RxDB core to reduce build size.\n "; - } -}; -//# sourceMappingURL=overwritable.js.map \ No newline at end of file diff --git a/dist/esm/overwritable.js.map b/dist/esm/overwritable.js.map deleted file mode 100644 index 45233761201..00000000000 --- a/dist/esm/overwritable.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"overwritable.js","names":["overwritable","isDevMode","deepFreezeWhenDevMode","obj","tunnelErrorMessage","message"],"sources":["../../src/overwritable.ts"],"sourcesContent":["/**\n * functions that can or should be overwritten by plugins\n * IMPORTANT: Do not import any big stuff from RxDB here!\n * An 'overwritable' can be used inside WebWorkers for RxStorage only,\n * and we do not want to have the full RxDB lib bundled in them.\n */\n\nimport type { DeepReadonly } from './types/util.d.ts';\n\nexport const overwritable = {\n /**\n * if this method is overwritten with one\n * that returns true, we do additional checks\n * which help the developer but have bad performance\n */\n isDevMode(): boolean {\n return false;\n },\n\n /**\n * Deep freezes and object when in dev-mode.\n * Deep-Freezing has the same performance as deep-cloning, so we only do that in dev-mode.\n * Also, we can ensure the readonly state via typescript\n * @link https://developer.mozilla.org/de/docs/Web/JavaScript/Reference/Global_Objects/Object/freeze\n */\n deepFreezeWhenDevMode(obj: T): DeepReadonly {\n return obj as any;\n },\n\n /**\n * overwritten to map error-codes to text-messages\n */\n tunnelErrorMessage(message: string): string {\n return `RxDB Error-Code ${message}.\n Error messages are not included in RxDB core to reduce build size.\n `;\n }\n};\n"],"mappings":"AAAA;AACA;AACA;AACA;AACA;AACA;;AAIA,OAAO,IAAMA,YAAY,GAAG;EACxB;AACJ;AACA;AACA;AACA;EACIC,SAASA,CAAA,EAAY;IACjB,OAAO,KAAK;EAChB,CAAC;EAED;AACJ;AACA;AACA;AACA;AACA;EACIC,qBAAqBA,CAAIC,GAAM,EAAmB;IAC9C,OAAOA,GAAG;EACd,CAAC;EAED;AACJ;AACA;EACIC,kBAAkBA,CAACC,OAAe,EAAU;IACxC,4BAA0BA,OAAO;EAGrC;AACJ,CAAC","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/package.json b/dist/esm/package.json deleted file mode 100644 index 876964185fe..00000000000 --- a/dist/esm/package.json +++ /dev/null @@ -1 +0,0 @@ -{ "type": "module", "sideEffects": false } diff --git a/dist/esm/plugin-helpers.js b/dist/esm/plugin-helpers.js deleted file mode 100644 index 9368cfe4aa1..00000000000 --- a/dist/esm/plugin-helpers.js +++ /dev/null @@ -1,249 +0,0 @@ -import { filter, mergeMap, tap } from 'rxjs/operators'; -import { getPrimaryFieldOfPrimaryKey } from "./rx-schema-helper.js"; -import { flatClone, getFromMapOrCreate, requestIdleCallbackIfAvailable } from "./plugins/utils/index.js"; -import { BehaviorSubject, firstValueFrom } from 'rxjs'; - -/** - * Returns the validation errors. - * If document is fully valid, returns an empty array. - */ - -/** - * cache the validators by the schema string - * so we can reuse them when multiple collections have the same schema - * - * Notice: to make it easier and not dependent on a hash function, - * we use the plain json string. - */ -var VALIDATOR_CACHE_BY_VALIDATOR_KEY = new Map(); - -/** - * This factory is used in the validation plugins - * so that we can reuse the basic storage wrapping code. - */ -export function wrappedValidateStorageFactory( -/** - * Returns a method that can be used to validate - * documents and throws when the document is not valid. - */ -getValidator, -/** - * A string to identify the validation library. - */ -validatorKey) { - var VALIDATOR_CACHE = getFromMapOrCreate(VALIDATOR_CACHE_BY_VALIDATOR_KEY, validatorKey, () => new Map()); - function initValidator(schema) { - return getFromMapOrCreate(VALIDATOR_CACHE, JSON.stringify(schema), () => getValidator(schema)); - } - return args => { - return Object.assign({}, args.storage, { - async createStorageInstance(params) { - var instance = await args.storage.createStorageInstance(params); - var primaryPath = getPrimaryFieldOfPrimaryKey(params.schema.primaryKey); - - /** - * Lazy initialize the validator - * to save initial page load performance. - * Some libraries take really long to initialize the validator - * from the schema. - */ - var validatorCached; - requestIdleCallbackIfAvailable(() => validatorCached = initValidator(params.schema)); - var oldBulkWrite = instance.bulkWrite.bind(instance); - instance.bulkWrite = (documentWrites, context) => { - if (!validatorCached) { - validatorCached = initValidator(params.schema); - } - var errors = []; - var continueWrites = []; - documentWrites.forEach(row => { - var documentId = row.document[primaryPath]; - var validationErrors = validatorCached(row.document); - if (validationErrors.length > 0) { - errors.push({ - status: 422, - isError: true, - documentId, - writeRow: row, - validationErrors - }); - } else { - continueWrites.push(row); - } - }); - var writePromise = continueWrites.length > 0 ? oldBulkWrite(continueWrites, context) : Promise.resolve({ - error: [], - success: [] - }); - return writePromise.then(writeResult => { - errors.forEach(validationError => { - writeResult.error.push(validationError); - }); - return writeResult; - }); - }; - return instance; - } - }); - }; -} - -/** - * Used in plugins to easily modify all in- and outgoing - * data of that storage instance. - */ -export function wrapRxStorageInstance(originalSchema, instance, modifyToStorage, modifyFromStorage, modifyAttachmentFromStorage = v => v) { - async function toStorage(docData) { - if (!docData) { - return docData; - } - return await modifyToStorage(docData); - } - async function fromStorage(docData) { - if (!docData) { - return docData; - } - return await modifyFromStorage(docData); - } - async function errorFromStorage(error) { - var ret = flatClone(error); - ret.writeRow = flatClone(ret.writeRow); - if (ret.documentInDb) { - ret.documentInDb = await fromStorage(ret.documentInDb); - } - if (ret.writeRow.previous) { - ret.writeRow.previous = await fromStorage(ret.writeRow.previous); - } - ret.writeRow.document = await fromStorage(ret.writeRow.document); - return ret; - } - var processingChangesCount$ = new BehaviorSubject(0); - var wrappedInstance = { - databaseName: instance.databaseName, - internals: instance.internals, - cleanup: instance.cleanup.bind(instance), - options: instance.options, - close: instance.close.bind(instance), - schema: originalSchema, - collectionName: instance.collectionName, - count: instance.count.bind(instance), - remove: instance.remove.bind(instance), - originalStorageInstance: instance, - bulkWrite: async (documentWrites, context) => { - var useRows = []; - await Promise.all(documentWrites.map(async row => { - var [previous, document] = await Promise.all([row.previous ? toStorage(row.previous) : undefined, toStorage(row.document)]); - useRows.push({ - previous, - document - }); - })); - var writeResult = await instance.bulkWrite(useRows, context); - var ret = { - success: [], - error: [] - }; - var promises = []; - writeResult.success.forEach(v => { - promises.push(fromStorage(v).then(v2 => ret.success.push(v2))); - }); - writeResult.error.forEach(error => { - promises.push(errorFromStorage(error).then(err => ret.error.push(err))); - }); - await Promise.all(promises); - - /** - * By definition, all change events must be emitted - * BEFORE the write call resolves. - * To ensure that even when the modifiers are async, - * we wait here until the processing queue is empty. - */ - await firstValueFrom(processingChangesCount$.pipe(filter(v => v === 0))); - return ret; - }, - query: preparedQuery => { - return instance.query(preparedQuery).then(queryResult => { - return Promise.all(queryResult.documents.map(doc => fromStorage(doc))); - }).then(documents => ({ - documents: documents - })); - }, - getAttachmentData: async (documentId, attachmentId, digest) => { - var data = await instance.getAttachmentData(documentId, attachmentId, digest); - data = await modifyAttachmentFromStorage(data); - return data; - }, - findDocumentsById: (ids, deleted) => { - return instance.findDocumentsById(ids, deleted).then(async findResult => { - var ret = []; - await Promise.all(findResult.map(async doc => { - ret.push(await fromStorage(doc)); - })); - return ret; - }); - }, - getChangedDocumentsSince: !instance.getChangedDocumentsSince ? undefined : (limit, checkpoint) => { - return instance.getChangedDocumentsSince(limit, checkpoint).then(async result => { - return { - checkpoint: result.checkpoint, - documents: await Promise.all(result.documents.map(d => fromStorage(d))) - }; - }); - }, - changeStream: () => { - return instance.changeStream().pipe(tap(() => processingChangesCount$.next(processingChangesCount$.getValue() + 1)), mergeMap(async eventBulk => { - var useEvents = await Promise.all(eventBulk.events.map(async event => { - var [documentData, previousDocumentData] = await Promise.all([fromStorage(event.documentData), fromStorage(event.previousDocumentData)]); - var ev = { - operation: event.operation, - documentId: event.documentId, - documentData: documentData, - previousDocumentData: previousDocumentData, - isLocal: false - }; - return ev; - })); - var ret = { - id: eventBulk.id, - events: useEvents, - checkpoint: eventBulk.checkpoint, - context: eventBulk.context, - startTime: eventBulk.startTime, - endTime: eventBulk.endTime - }; - return ret; - }), tap(() => processingChangesCount$.next(processingChangesCount$.getValue() - 1))); - }, - conflictResultionTasks: () => { - return instance.conflictResultionTasks().pipe(mergeMap(async task => { - var assumedMasterState = await fromStorage(task.input.assumedMasterState); - var newDocumentState = await fromStorage(task.input.newDocumentState); - var realMasterState = await fromStorage(task.input.realMasterState); - return { - id: task.id, - context: task.context, - input: { - assumedMasterState, - realMasterState, - newDocumentState - } - }; - })); - }, - resolveConflictResultionTask: taskSolution => { - if (taskSolution.output.isEqual) { - return instance.resolveConflictResultionTask(taskSolution); - } - var useSolution = { - id: taskSolution.id, - output: { - isEqual: false, - documentData: taskSolution.output.documentData - } - }; - return instance.resolveConflictResultionTask(useSolution); - } - }; - return wrappedInstance; -} -//# sourceMappingURL=plugin-helpers.js.map \ No newline at end of file diff --git a/dist/esm/plugin-helpers.js.map b/dist/esm/plugin-helpers.js.map deleted file mode 100644 index de2de14b0f0..00000000000 --- a/dist/esm/plugin-helpers.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"plugin-helpers.js","names":["filter","mergeMap","tap","getPrimaryFieldOfPrimaryKey","flatClone","getFromMapOrCreate","requestIdleCallbackIfAvailable","BehaviorSubject","firstValueFrom","VALIDATOR_CACHE_BY_VALIDATOR_KEY","Map","wrappedValidateStorageFactory","getValidator","validatorKey","VALIDATOR_CACHE","initValidator","schema","JSON","stringify","args","Object","assign","storage","createStorageInstance","params","instance","primaryPath","primaryKey","validatorCached","oldBulkWrite","bulkWrite","bind","documentWrites","context","errors","continueWrites","forEach","row","documentId","document","validationErrors","length","push","status","isError","writeRow","writePromise","Promise","resolve","error","success","then","writeResult","validationError","wrapRxStorageInstance","originalSchema","modifyToStorage","modifyFromStorage","modifyAttachmentFromStorage","v","toStorage","docData","fromStorage","errorFromStorage","ret","documentInDb","previous","processingChangesCount$","wrappedInstance","databaseName","internals","cleanup","options","close","collectionName","count","remove","originalStorageInstance","useRows","all","map","undefined","promises","v2","err","pipe","query","preparedQuery","queryResult","documents","doc","getAttachmentData","attachmentId","digest","data","findDocumentsById","ids","deleted","findResult","getChangedDocumentsSince","limit","checkpoint","result","d","changeStream","next","getValue","eventBulk","useEvents","events","event","documentData","previousDocumentData","ev","operation","isLocal","id","startTime","endTime","conflictResultionTasks","task","assumedMasterState","input","newDocumentState","realMasterState","resolveConflictResultionTask","taskSolution","output","isEqual","useSolution"],"sources":["../../src/plugin-helpers.ts"],"sourcesContent":["import { filter, mergeMap, tap } from 'rxjs/operators';\nimport { getPrimaryFieldOfPrimaryKey } from './rx-schema-helper.ts';\nimport { WrappedRxStorageInstance } from './rx-storage-helper.ts';\nimport type {\n BulkWriteRow,\n EventBulk,\n RxChangeEvent,\n RxDocumentData,\n RxDocumentWriteData,\n RxJsonSchema,\n RxStorage,\n RxStorageWriteError,\n RxStorageBulkWriteResponse,\n RxStorageChangeEvent,\n RxStorageInstance,\n RxStorageInstanceCreationParams,\n RxValidationError,\n RxStorageWriteErrorConflict,\n MaybePromise\n} from './types/index.d.ts';\nimport {\n flatClone,\n getFromMapOrCreate,\n requestIdleCallbackIfAvailable\n} from './plugins/utils/index.ts';\nimport { BehaviorSubject, firstValueFrom } from 'rxjs';\n\n\ntype WrappedStorageFunction = (\n args: {\n storage: RxStorage;\n }\n) => RxStorage;\n\n/**\n * Returns the validation errors.\n * If document is fully valid, returns an empty array.\n */\ntype ValidatorFunction = (docData: RxDocumentData) => RxValidationError[];\n\n/**\n * cache the validators by the schema string\n * so we can reuse them when multiple collections have the same schema\n *\n * Notice: to make it easier and not dependent on a hash function,\n * we use the plain json string.\n */\nconst VALIDATOR_CACHE_BY_VALIDATOR_KEY: Map> = new Map();\n\n/**\n * This factory is used in the validation plugins\n * so that we can reuse the basic storage wrapping code.\n */\nexport function wrappedValidateStorageFactory(\n /**\n * Returns a method that can be used to validate\n * documents and throws when the document is not valid.\n */\n getValidator: (schema: RxJsonSchema) => ValidatorFunction,\n /**\n * A string to identify the validation library.\n */\n validatorKey: string\n): WrappedStorageFunction {\n const VALIDATOR_CACHE = getFromMapOrCreate(\n VALIDATOR_CACHE_BY_VALIDATOR_KEY,\n validatorKey,\n () => new Map()\n );\n\n function initValidator(\n schema: RxJsonSchema\n ): ValidatorFunction {\n return getFromMapOrCreate(\n VALIDATOR_CACHE,\n JSON.stringify(schema),\n () => getValidator(schema)\n );\n }\n\n return (args) => {\n return Object.assign(\n {},\n args.storage,\n {\n async createStorageInstance(\n params: RxStorageInstanceCreationParams\n ) {\n const instance = await args.storage.createStorageInstance(params);\n const primaryPath = getPrimaryFieldOfPrimaryKey(params.schema.primaryKey);\n\n /**\n * Lazy initialize the validator\n * to save initial page load performance.\n * Some libraries take really long to initialize the validator\n * from the schema.\n */\n let validatorCached: ValidatorFunction;\n requestIdleCallbackIfAvailable(() => validatorCached = initValidator(params.schema));\n\n const oldBulkWrite = instance.bulkWrite.bind(instance);\n instance.bulkWrite = (\n documentWrites: BulkWriteRow[],\n context: string\n ) => {\n if (!validatorCached) {\n validatorCached = initValidator(params.schema);\n }\n const errors: RxStorageWriteError[] = [];\n const continueWrites: typeof documentWrites = [];\n documentWrites.forEach(row => {\n const documentId: string = row.document[primaryPath] as any;\n const validationErrors = validatorCached(row.document);\n if (validationErrors.length > 0) {\n errors.push({\n status: 422,\n isError: true,\n documentId,\n writeRow: row,\n validationErrors\n });\n } else {\n continueWrites.push(row);\n }\n });\n const writePromise: Promise> = continueWrites.length > 0 ?\n oldBulkWrite(continueWrites, context) :\n Promise.resolve({ error: [], success: [] });\n return writePromise.then(writeResult => {\n errors.forEach(validationError => {\n writeResult.error.push(validationError);\n });\n return writeResult;\n });\n };\n\n return instance;\n }\n }\n );\n };\n\n}\n\n\n\n/**\n * Used in plugins to easily modify all in- and outgoing\n * data of that storage instance.\n */\nexport function wrapRxStorageInstance(\n originalSchema: RxJsonSchema>,\n instance: RxStorageInstance,\n modifyToStorage: (docData: RxDocumentWriteData) => MaybePromise>,\n modifyFromStorage: (docData: RxDocumentData) => MaybePromise>,\n modifyAttachmentFromStorage: (attachmentData: string) => MaybePromise = (v) => v\n): WrappedRxStorageInstance {\n async function toStorage(docData: RxDocumentWriteData): Promise> {\n if (!docData) {\n return docData;\n }\n return await modifyToStorage(docData);\n }\n async function fromStorage(docData: RxDocumentData | null): Promise> {\n if (!docData) {\n return docData;\n }\n return await modifyFromStorage(docData);\n }\n async function errorFromStorage(\n error: RxStorageWriteError\n ): Promise> {\n const ret = flatClone(error);\n ret.writeRow = flatClone(ret.writeRow);\n if ((ret as RxStorageWriteErrorConflict).documentInDb) {\n (ret as RxStorageWriteErrorConflict).documentInDb = await fromStorage((ret as RxStorageWriteErrorConflict).documentInDb);\n }\n if (ret.writeRow.previous) {\n ret.writeRow.previous = await fromStorage(ret.writeRow.previous);\n }\n ret.writeRow.document = await fromStorage(ret.writeRow.document);\n return ret;\n }\n\n\n const processingChangesCount$ = new BehaviorSubject(0);\n\n const wrappedInstance: WrappedRxStorageInstance = {\n databaseName: instance.databaseName,\n internals: instance.internals,\n cleanup: instance.cleanup.bind(instance),\n options: instance.options,\n close: instance.close.bind(instance),\n schema: originalSchema,\n collectionName: instance.collectionName,\n count: instance.count.bind(instance),\n remove: instance.remove.bind(instance),\n originalStorageInstance: instance,\n bulkWrite: async (\n documentWrites: BulkWriteRow[],\n context: string\n ) => {\n const useRows: BulkWriteRow[] = [];\n await Promise.all(\n documentWrites.map(async (row) => {\n const [previous, document] = await Promise.all([\n row.previous ? toStorage(row.previous) : undefined,\n toStorage(row.document)\n ]);\n useRows.push({ previous, document });\n })\n );\n\n const writeResult = await instance.bulkWrite(useRows, context);\n const ret: RxStorageBulkWriteResponse = {\n success: [],\n error: []\n };\n const promises: Promise[] = [];\n writeResult.success.forEach(v => {\n promises.push(\n fromStorage(v).then(v2 => ret.success.push(v2))\n );\n });\n writeResult.error.forEach(error => {\n promises.push(\n errorFromStorage(error).then(err => ret.error.push(err))\n );\n });\n await Promise.all(promises);\n\n /**\n * By definition, all change events must be emitted\n * BEFORE the write call resolves.\n * To ensure that even when the modifiers are async,\n * we wait here until the processing queue is empty.\n */\n await firstValueFrom(\n processingChangesCount$.pipe(\n filter(v => v === 0)\n )\n );\n return ret;\n },\n query: (preparedQuery) => {\n return instance.query(preparedQuery)\n .then(queryResult => {\n return Promise.all(queryResult.documents.map(doc => fromStorage(doc)));\n })\n .then(documents => ({ documents: documents as any }));\n },\n getAttachmentData: async (\n documentId: string,\n attachmentId: string,\n digest: string\n ) => {\n let data = await instance.getAttachmentData(documentId, attachmentId, digest);\n data = await modifyAttachmentFromStorage(data);\n return data;\n },\n findDocumentsById: (ids, deleted) => {\n return instance.findDocumentsById(ids, deleted)\n .then(async (findResult) => {\n const ret: RxDocumentData[] = [];\n await Promise.all(\n findResult\n .map(async (doc) => {\n ret.push(await fromStorage(doc));\n })\n );\n return ret;\n });\n },\n getChangedDocumentsSince: !instance.getChangedDocumentsSince ? undefined : (limit, checkpoint) => {\n return ((instance as any).getChangedDocumentsSince)(limit, checkpoint)\n .then(async (result: any) => {\n return {\n checkpoint: result.checkpoint,\n documents: await Promise.all(\n result.documents.map((d: any) => fromStorage(d))\n )\n };\n });\n },\n changeStream: () => {\n return instance.changeStream().pipe(\n tap(() => processingChangesCount$.next(processingChangesCount$.getValue() + 1)),\n mergeMap(async (eventBulk) => {\n const useEvents = await Promise.all(\n eventBulk.events.map(async (event) => {\n const [\n documentData,\n previousDocumentData\n ] = await Promise.all([\n fromStorage(event.documentData),\n fromStorage(event.previousDocumentData)\n ]);\n const ev: RxChangeEvent = {\n operation: event.operation,\n documentId: event.documentId,\n documentData: documentData as any,\n previousDocumentData: previousDocumentData as any,\n isLocal: false\n };\n return ev;\n })\n );\n const ret: EventBulk>, any> = {\n id: eventBulk.id,\n events: useEvents,\n checkpoint: eventBulk.checkpoint,\n context: eventBulk.context,\n startTime: eventBulk.startTime,\n endTime: eventBulk.endTime\n };\n return ret;\n }),\n tap(() => processingChangesCount$.next(processingChangesCount$.getValue() - 1))\n );\n },\n conflictResultionTasks: () => {\n return instance.conflictResultionTasks().pipe(\n mergeMap(async (task) => {\n const assumedMasterState = await fromStorage(task.input.assumedMasterState);\n const newDocumentState = await fromStorage(task.input.newDocumentState);\n const realMasterState = await fromStorage(task.input.realMasterState);\n return {\n id: task.id,\n context: task.context,\n input: {\n assumedMasterState,\n realMasterState,\n newDocumentState\n }\n };\n })\n );\n },\n resolveConflictResultionTask: (taskSolution) => {\n if (taskSolution.output.isEqual) {\n return instance.resolveConflictResultionTask(taskSolution);\n }\n const useSolution = {\n id: taskSolution.id,\n output: {\n isEqual: false,\n documentData: taskSolution.output.documentData\n }\n };\n return instance.resolveConflictResultionTask(useSolution);\n }\n };\n\n return wrappedInstance;\n}\n"],"mappings":"AAAA,SAASA,MAAM,EAAEC,QAAQ,EAAEC,GAAG,QAAQ,gBAAgB;AACtD,SAASC,2BAA2B,QAAQ,uBAAuB;AAmBnE,SACIC,SAAS,EACTC,kBAAkB,EAClBC,8BAA8B,QAC3B,0BAA0B;AACjC,SAASC,eAAe,EAAEC,cAAc,QAAQ,MAAM;;AAStD;AACA;AACA;AACA;;AAGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,IAAMC,gCAA6E,GAAG,IAAIC,GAAG,CAAC,CAAC;;AAE/F;AACA;AACA;AACA;AACA,OAAO,SAASC,6BAA6BA;AACzC;AACJ;AACA;AACA;AACIC,YAA8D;AAC9D;AACJ;AACA;AACIC,YAAoB,EACE;EACtB,IAAMC,eAAe,GAAGT,kBAAkB,CACtCI,gCAAgC,EAChCI,YAAY,EACZ,MAAM,IAAIH,GAAG,CAAC,CAClB,CAAC;EAED,SAASK,aAAaA,CAClBC,MAAyB,EACR;IACjB,OAAOX,kBAAkB,CACrBS,eAAe,EACfG,IAAI,CAACC,SAAS,CAACF,MAAM,CAAC,EACtB,MAAMJ,YAAY,CAACI,MAAM,CAC7B,CAAC;EACL;EAEA,OAAQG,IAAI,IAAK;IACb,OAAOC,MAAM,CAACC,MAAM,CAChB,CAAC,CAAC,EACFF,IAAI,CAACG,OAAO,EACZ;MACI,MAAMC,qBAAqBA,CACvBC,MAAuD,EACzD;QACE,IAAMC,QAAQ,GAAG,MAAMN,IAAI,CAACG,OAAO,CAACC,qBAAqB,CAACC,MAAM,CAAC;QACjE,IAAME,WAAW,GAAGvB,2BAA2B,CAACqB,MAAM,CAACR,MAAM,CAACW,UAAU,CAAC;;QAEzE;AACpB;AACA;AACA;AACA;AACA;QACoB,IAAIC,eAAkC;QACtCtB,8BAA8B,CAAC,MAAMsB,eAAe,GAAGb,aAAa,CAACS,MAAM,CAACR,MAAM,CAAC,CAAC;QAEpF,IAAMa,YAAY,GAAGJ,QAAQ,CAACK,SAAS,CAACC,IAAI,CAACN,QAAQ,CAAC;QACtDA,QAAQ,CAACK,SAAS,GAAG,CACjBE,cAAyC,EACzCC,OAAe,KACd;UACD,IAAI,CAACL,eAAe,EAAE;YAClBA,eAAe,GAAGb,aAAa,CAACS,MAAM,CAACR,MAAM,CAAC;UAClD;UACA,IAAMkB,MAAwC,GAAG,EAAE;UACnD,IAAMC,cAAqC,GAAG,EAAE;UAChDH,cAAc,CAACI,OAAO,CAACC,GAAG,IAAI;YAC1B,IAAMC,UAAkB,GAAGD,GAAG,CAACE,QAAQ,CAACb,WAAW,CAAQ;YAC3D,IAAMc,gBAAgB,GAAGZ,eAAe,CAACS,GAAG,CAACE,QAAQ,CAAC;YACtD,IAAIC,gBAAgB,CAACC,MAAM,GAAG,CAAC,EAAE;cAC7BP,MAAM,CAACQ,IAAI,CAAC;gBACRC,MAAM,EAAE,GAAG;gBACXC,OAAO,EAAE,IAAI;gBACbN,UAAU;gBACVO,QAAQ,EAAER,GAAG;gBACbG;cACJ,CAAC,CAAC;YACN,CAAC,MAAM;cACHL,cAAc,CAACO,IAAI,CAACL,GAAG,CAAC;YAC5B;UACJ,CAAC,CAAC;UACF,IAAMS,YAA4D,GAAGX,cAAc,CAACM,MAAM,GAAG,CAAC,GAC1FZ,YAAY,CAACM,cAAc,EAAEF,OAAO,CAAC,GACrCc,OAAO,CAACC,OAAO,CAAC;YAAEC,KAAK,EAAE,EAAE;YAAEC,OAAO,EAAE;UAAG,CAAC,CAAC;UAC/C,OAAOJ,YAAY,CAACK,IAAI,CAACC,WAAW,IAAI;YACpClB,MAAM,CAACE,OAAO,CAACiB,eAAe,IAAI;cAC9BD,WAAW,CAACH,KAAK,CAACP,IAAI,CAACW,eAAe,CAAC;YAC3C,CAAC,CAAC;YACF,OAAOD,WAAW;UACtB,CAAC,CAAC;QACN,CAAC;QAED,OAAO3B,QAAQ;MACnB;IACJ,CACJ,CAAC;EACL,CAAC;AAEL;;AAIA;AACA;AACA;AACA;AACA,OAAO,SAAS6B,qBAAqBA,CACjCC,cAAuD,EACvD9B,QAAgD,EAChD+B,eAA+F,EAC/FC,iBAA4F,EAC5FC,2BAA6E,GAAIC,CAAC,IAAKA,CAAC,EAC3C;EAC7C,eAAeC,SAASA,CAACC,OAAuC,EAAgC;IAC5F,IAAI,CAACA,OAAO,EAAE;MACV,OAAOA,OAAO;IAClB;IACA,OAAO,MAAML,eAAe,CAACK,OAAO,CAAC;EACzC;EACA,eAAeC,WAAWA,CAACD,OAAmC,EAAsC;IAChG,IAAI,CAACA,OAAO,EAAE;MACV,OAAOA,OAAO;IAClB;IACA,OAAO,MAAMJ,iBAAiB,CAACI,OAAO,CAAC;EAC3C;EACA,eAAeE,gBAAgBA,CAC3Bd,KAA+B,EACQ;IACvC,IAAMe,GAAG,GAAG5D,SAAS,CAAC6C,KAAK,CAAC;IAC5Be,GAAG,CAACnB,QAAQ,GAAGzC,SAAS,CAAC4D,GAAG,CAACnB,QAAQ,CAAC;IACtC,IAAKmB,GAAG,CAAsCC,YAAY,EAAE;MACvDD,GAAG,CAAsCC,YAAY,GAAG,MAAMH,WAAW,CAAEE,GAAG,CAAsCC,YAAY,CAAC;IACtI;IACA,IAAID,GAAG,CAACnB,QAAQ,CAACqB,QAAQ,EAAE;MACvBF,GAAG,CAACnB,QAAQ,CAACqB,QAAQ,GAAG,MAAMJ,WAAW,CAACE,GAAG,CAACnB,QAAQ,CAACqB,QAAQ,CAAC;IACpE;IACAF,GAAG,CAACnB,QAAQ,CAACN,QAAQ,GAAG,MAAMuB,WAAW,CAACE,GAAG,CAACnB,QAAQ,CAACN,QAAQ,CAAC;IAChE,OAAOyB,GAAG;EACd;EAGA,IAAMG,uBAAuB,GAAG,IAAI5D,eAAe,CAAC,CAAC,CAAC;EAEtD,IAAM6D,eAA8D,GAAG;IACnEC,YAAY,EAAE5C,QAAQ,CAAC4C,YAAY;IACnCC,SAAS,EAAE7C,QAAQ,CAAC6C,SAAS;IAC7BC,OAAO,EAAE9C,QAAQ,CAAC8C,OAAO,CAACxC,IAAI,CAACN,QAAQ,CAAC;IACxC+C,OAAO,EAAE/C,QAAQ,CAAC+C,OAAO;IACzBC,KAAK,EAAEhD,QAAQ,CAACgD,KAAK,CAAC1C,IAAI,CAACN,QAAQ,CAAC;IACpCT,MAAM,EAAEuC,cAAc;IACtBmB,cAAc,EAAEjD,QAAQ,CAACiD,cAAc;IACvCC,KAAK,EAAElD,QAAQ,CAACkD,KAAK,CAAC5C,IAAI,CAACN,QAAQ,CAAC;IACpCmD,MAAM,EAAEnD,QAAQ,CAACmD,MAAM,CAAC7C,IAAI,CAACN,QAAQ,CAAC;IACtCoD,uBAAuB,EAAEpD,QAAQ;IACjCK,SAAS,EAAE,MAAAA,CACPE,cAAyC,EACzCC,OAAe,KACd;MACD,IAAM6C,OAA4B,GAAG,EAAE;MACvC,MAAM/B,OAAO,CAACgC,GAAG,CACb/C,cAAc,CAACgD,GAAG,CAAC,MAAO3C,GAAG,IAAK;QAC9B,IAAM,CAAC6B,QAAQ,EAAE3B,QAAQ,CAAC,GAAG,MAAMQ,OAAO,CAACgC,GAAG,CAAC,CAC3C1C,GAAG,CAAC6B,QAAQ,GAAGN,SAAS,CAACvB,GAAG,CAAC6B,QAAQ,CAAC,GAAGe,SAAS,EAClDrB,SAAS,CAACvB,GAAG,CAACE,QAAQ,CAAC,CAC1B,CAAC;QACFuC,OAAO,CAACpC,IAAI,CAAC;UAAEwB,QAAQ;UAAE3B;QAAS,CAAC,CAAC;MACxC,CAAC,CACL,CAAC;MAED,IAAMa,WAAW,GAAG,MAAM3B,QAAQ,CAACK,SAAS,CAACgD,OAAO,EAAE7C,OAAO,CAAC;MAC9D,IAAM+B,GAA0C,GAAG;QAC/Cd,OAAO,EAAE,EAAE;QACXD,KAAK,EAAE;MACX,CAAC;MACD,IAAMiC,QAAwB,GAAG,EAAE;MACnC9B,WAAW,CAACF,OAAO,CAACd,OAAO,CAACuB,CAAC,IAAI;QAC7BuB,QAAQ,CAACxC,IAAI,CACToB,WAAW,CAACH,CAAC,CAAC,CAACR,IAAI,CAACgC,EAAE,IAAInB,GAAG,CAACd,OAAO,CAACR,IAAI,CAACyC,EAAE,CAAC,CAClD,CAAC;MACL,CAAC,CAAC;MACF/B,WAAW,CAACH,KAAK,CAACb,OAAO,CAACa,KAAK,IAAI;QAC/BiC,QAAQ,CAACxC,IAAI,CACTqB,gBAAgB,CAACd,KAAK,CAAC,CAACE,IAAI,CAACiC,GAAG,IAAIpB,GAAG,CAACf,KAAK,CAACP,IAAI,CAAC0C,GAAG,CAAC,CAC3D,CAAC;MACL,CAAC,CAAC;MACF,MAAMrC,OAAO,CAACgC,GAAG,CAACG,QAAQ,CAAC;;MAE3B;AACZ;AACA;AACA;AACA;AACA;MACY,MAAM1E,cAAc,CAChB2D,uBAAuB,CAACkB,IAAI,CACxBrF,MAAM,CAAC2D,CAAC,IAAIA,CAAC,KAAK,CAAC,CACvB,CACJ,CAAC;MACD,OAAOK,GAAG;IACd,CAAC;IACDsB,KAAK,EAAGC,aAAa,IAAK;MACtB,OAAO9D,QAAQ,CAAC6D,KAAK,CAACC,aAAa,CAAC,CAC/BpC,IAAI,CAACqC,WAAW,IAAI;QACjB,OAAOzC,OAAO,CAACgC,GAAG,CAACS,WAAW,CAACC,SAAS,CAACT,GAAG,CAACU,GAAG,IAAI5B,WAAW,CAAC4B,GAAG,CAAC,CAAC,CAAC;MAC1E,CAAC,CAAC,CACDvC,IAAI,CAACsC,SAAS,KAAK;QAAEA,SAAS,EAAEA;MAAiB,CAAC,CAAC,CAAC;IAC7D,CAAC;IACDE,iBAAiB,EAAE,MAAAA,CACfrD,UAAkB,EAClBsD,YAAoB,EACpBC,MAAc,KACb;MACD,IAAIC,IAAI,GAAG,MAAMrE,QAAQ,CAACkE,iBAAiB,CAACrD,UAAU,EAAEsD,YAAY,EAAEC,MAAM,CAAC;MAC7EC,IAAI,GAAG,MAAMpC,2BAA2B,CAACoC,IAAI,CAAC;MAC9C,OAAOA,IAAI;IACf,CAAC;IACDC,iBAAiB,EAAEA,CAACC,GAAG,EAAEC,OAAO,KAAK;MACjC,OAAOxE,QAAQ,CAACsE,iBAAiB,CAACC,GAAG,EAAEC,OAAO,CAAC,CAC1C9C,IAAI,CAAC,MAAO+C,UAAU,IAAK;QACxB,IAAMlC,GAAgC,GAAG,EAAE;QAC3C,MAAMjB,OAAO,CAACgC,GAAG,CACbmB,UAAU,CACLlB,GAAG,CAAC,MAAOU,GAAG,IAAK;UAChB1B,GAAG,CAACtB,IAAI,CAAC,MAAMoB,WAAW,CAAC4B,GAAG,CAAC,CAAC;QACpC,CAAC,CACT,CAAC;QACD,OAAO1B,GAAG;MACd,CAAC,CAAC;IACV,CAAC;IACDmC,wBAAwB,EAAE,CAAC1E,QAAQ,CAAC0E,wBAAwB,GAAGlB,SAAS,GAAG,CAACmB,KAAK,EAAEC,UAAU,KAAK;MAC9F,OAAS5E,QAAQ,CAAS0E,wBAAwB,CAAEC,KAAK,EAAEC,UAAU,CAAC,CACjElD,IAAI,CAAC,MAAOmD,MAAW,IAAK;QACzB,OAAO;UACHD,UAAU,EAAEC,MAAM,CAACD,UAAU;UAC7BZ,SAAS,EAAE,MAAM1C,OAAO,CAACgC,GAAG,CACxBuB,MAAM,CAACb,SAAS,CAACT,GAAG,CAAEuB,CAAM,IAAKzC,WAAW,CAACyC,CAAC,CAAC,CACnD;QACJ,CAAC;MACL,CAAC,CAAC;IACV,CAAC;IACDC,YAAY,EAAEA,CAAA,KAAM;MAChB,OAAO/E,QAAQ,CAAC+E,YAAY,CAAC,CAAC,CAACnB,IAAI,CAC/BnF,GAAG,CAAC,MAAMiE,uBAAuB,CAACsC,IAAI,CAACtC,uBAAuB,CAACuC,QAAQ,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAC/EzG,QAAQ,CAAC,MAAO0G,SAAS,IAAK;QAC1B,IAAMC,SAAS,GAAG,MAAM7D,OAAO,CAACgC,GAAG,CAC/B4B,SAAS,CAACE,MAAM,CAAC7B,GAAG,CAAC,MAAO8B,KAAK,IAAK;UAClC,IAAM,CACFC,YAAY,EACZC,oBAAoB,CACvB,GAAG,MAAMjE,OAAO,CAACgC,GAAG,CAAC,CAClBjB,WAAW,CAACgD,KAAK,CAACC,YAAY,CAAC,EAC/BjD,WAAW,CAACgD,KAAK,CAACE,oBAAoB,CAAC,CAC1C,CAAC;UACF,IAAMC,EAA4B,GAAG;YACjCC,SAAS,EAAEJ,KAAK,CAACI,SAAS;YAC1B5E,UAAU,EAAEwE,KAAK,CAACxE,UAAU;YAC5ByE,YAAY,EAAEA,YAAmB;YACjCC,oBAAoB,EAAEA,oBAA2B;YACjDG,OAAO,EAAE;UACb,CAAC;UACD,OAAOF,EAAE;QACb,CAAC,CACL,CAAC;QACD,IAAMjD,GAAoE,GAAG;UACzEoD,EAAE,EAAET,SAAS,CAACS,EAAE;UAChBP,MAAM,EAAED,SAAS;UACjBP,UAAU,EAAEM,SAAS,CAACN,UAAU;UAChCpE,OAAO,EAAE0E,SAAS,CAAC1E,OAAO;UAC1BoF,SAAS,EAAEV,SAAS,CAACU,SAAS;UAC9BC,OAAO,EAAEX,SAAS,CAACW;QACvB,CAAC;QACD,OAAOtD,GAAG;MACd,CAAC,CAAC,EACF9D,GAAG,CAAC,MAAMiE,uBAAuB,CAACsC,IAAI,CAACtC,uBAAuB,CAACuC,QAAQ,CAAC,CAAC,GAAG,CAAC,CAAC,CAClF,CAAC;IACL,CAAC;IACDa,sBAAsB,EAAEA,CAAA,KAAM;MAC1B,OAAO9F,QAAQ,CAAC8F,sBAAsB,CAAC,CAAC,CAAClC,IAAI,CACzCpF,QAAQ,CAAC,MAAOuH,IAAI,IAAK;QACrB,IAAMC,kBAAkB,GAAG,MAAM3D,WAAW,CAAC0D,IAAI,CAACE,KAAK,CAACD,kBAAkB,CAAC;QAC3E,IAAME,gBAAgB,GAAG,MAAM7D,WAAW,CAAC0D,IAAI,CAACE,KAAK,CAACC,gBAAgB,CAAC;QACvE,IAAMC,eAAe,GAAG,MAAM9D,WAAW,CAAC0D,IAAI,CAACE,KAAK,CAACE,eAAe,CAAC;QACrE,OAAO;UACHR,EAAE,EAAEI,IAAI,CAACJ,EAAE;UACXnF,OAAO,EAAEuF,IAAI,CAACvF,OAAO;UACrByF,KAAK,EAAE;YACHD,kBAAkB;YAClBG,eAAe;YACfD;UACJ;QACJ,CAAC;MACL,CAAC,CACL,CAAC;IACL,CAAC;IACDE,4BAA4B,EAAGC,YAAY,IAAK;MAC5C,IAAIA,YAAY,CAACC,MAAM,CAACC,OAAO,EAAE;QAC7B,OAAOvG,QAAQ,CAACoG,4BAA4B,CAACC,YAAY,CAAC;MAC9D;MACA,IAAMG,WAAW,GAAG;QAChBb,EAAE,EAAEU,YAAY,CAACV,EAAE;QACnBW,MAAM,EAAE;UACJC,OAAO,EAAE,KAAK;UACdjB,YAAY,EAAEe,YAAY,CAACC,MAAM,CAAChB;QACtC;MACJ,CAAC;MACD,OAAOtF,QAAQ,CAACoG,4BAA4B,CAACI,WAAW,CAAC;IAC7D;EACJ,CAAC;EAED,OAAO7D,eAAe;AAC1B","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugin.js b/dist/esm/plugin.js deleted file mode 100644 index 3ef9992c31c..00000000000 --- a/dist/esm/plugin.js +++ /dev/null @@ -1,88 +0,0 @@ -/** - * this handles how plugins are added to rxdb - * basically it changes the internal prototypes - * by passing them to the plugins-functions - */ -import { RxSchema } from "./rx-schema.js"; -import { basePrototype as RxDocumentPrototype } from "./rx-document.js"; -import { RxQueryBase } from "./rx-query.js"; -import { RxCollectionBase } from "./rx-collection.js"; -import { RxDatabaseBase } from "./rx-database.js"; -import { overwritable } from "./overwritable.js"; -import { HOOKS, runPluginHooks } from "./hooks.js"; -import { newRxError, newRxTypeError } from "./rx-error.js"; - -/** - * prototypes that can be manipulated with a plugin - */ -var PROTOTYPES = { - RxSchema: RxSchema.prototype, - RxDocument: RxDocumentPrototype, - RxQuery: RxQueryBase.prototype, - RxCollection: RxCollectionBase.prototype, - RxDatabase: RxDatabaseBase.prototype -}; -var ADDED_PLUGINS = new Set(); -var ADDED_PLUGIN_NAMES = new Set(); - -/** - * Add a plugin to the RxDB library. - * Plugins are added globally and cannot be removed. - */ -export function addRxPlugin(plugin) { - runPluginHooks('preAddRxPlugin', { - plugin, - plugins: ADDED_PLUGINS - }); - - // do nothing if added before - if (ADDED_PLUGINS.has(plugin)) { - return; - } else { - // ensure no other plugin with the same name was already added - if (ADDED_PLUGIN_NAMES.has(plugin.name)) { - throw newRxError('PL3', { - name: plugin.name, - plugin - }); - } - ADDED_PLUGINS.add(plugin); - ADDED_PLUGIN_NAMES.add(plugin.name); - } - - /** - * To identify broken configurations, - * we only allow RxDB plugins to be passed into addRxPlugin(). - */ - if (!plugin.rxdb) { - throw newRxTypeError('PL1', { - plugin - }); - } - if (plugin.init) { - plugin.init(); - } - - // prototype-overwrites - if (plugin.prototypes) { - Object.entries(plugin.prototypes).forEach(([name, fun]) => { - return fun(PROTOTYPES[name]); - }); - } - // overwritable-overwrites - if (plugin.overwritable) { - Object.assign(overwritable, plugin.overwritable); - } - // extend-hooks - if (plugin.hooks) { - Object.entries(plugin.hooks).forEach(([name, hooksObj]) => { - if (hooksObj.after) { - HOOKS[name].push(hooksObj.after); - } - if (hooksObj.before) { - HOOKS[name].unshift(hooksObj.before); - } - }); - } -} -//# sourceMappingURL=plugin.js.map \ No newline at end of file diff --git a/dist/esm/plugin.js.map b/dist/esm/plugin.js.map deleted file mode 100644 index 2936cc859f0..00000000000 --- a/dist/esm/plugin.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"plugin.js","names":["RxSchema","basePrototype","RxDocumentPrototype","RxQueryBase","RxCollectionBase","RxDatabaseBase","overwritable","HOOKS","runPluginHooks","newRxError","newRxTypeError","PROTOTYPES","prototype","RxDocument","RxQuery","RxCollection","RxDatabase","ADDED_PLUGINS","Set","ADDED_PLUGIN_NAMES","addRxPlugin","plugin","plugins","has","name","add","rxdb","init","prototypes","Object","entries","forEach","fun","assign","hooks","hooksObj","after","push","before","unshift"],"sources":["../../src/plugin.ts"],"sourcesContent":["/**\n * this handles how plugins are added to rxdb\n * basically it changes the internal prototypes\n * by passing them to the plugins-functions\n */\nimport {\n RxSchema\n} from './rx-schema.ts';\nimport {\n basePrototype as RxDocumentPrototype\n} from './rx-document.ts';\nimport {\n RxQueryBase\n} from './rx-query.ts';\nimport {\n RxCollectionBase\n} from './rx-collection.ts';\nimport {\n RxDatabaseBase\n} from './rx-database.ts';\nimport type {\n RxPlugin\n} from './types/index.d.ts';\n\nimport { overwritable } from './overwritable.ts';\nimport {\n HOOKS,\n runPluginHooks\n} from './hooks.ts';\nimport { newRxError, newRxTypeError } from './rx-error.ts';\n\n/**\n * prototypes that can be manipulated with a plugin\n */\nconst PROTOTYPES: { [k: string]: any; } = {\n RxSchema: RxSchema.prototype,\n RxDocument: RxDocumentPrototype,\n RxQuery: RxQueryBase.prototype,\n RxCollection: RxCollectionBase.prototype,\n RxDatabase: RxDatabaseBase.prototype\n};\n\nconst ADDED_PLUGINS: Set = new Set();\nconst ADDED_PLUGIN_NAMES: Set = new Set();\n\n/**\n * Add a plugin to the RxDB library.\n * Plugins are added globally and cannot be removed.\n */\nexport function addRxPlugin(plugin: RxPlugin) {\n runPluginHooks('preAddRxPlugin', { plugin, plugins: ADDED_PLUGINS });\n\n // do nothing if added before\n if (ADDED_PLUGINS.has(plugin)) {\n return;\n } else {\n\n // ensure no other plugin with the same name was already added\n if (ADDED_PLUGIN_NAMES.has(plugin.name)) {\n throw newRxError('PL3', {\n name: plugin.name,\n plugin,\n });\n }\n\n ADDED_PLUGINS.add(plugin);\n ADDED_PLUGIN_NAMES.add(plugin.name);\n }\n\n /**\n * To identify broken configurations,\n * we only allow RxDB plugins to be passed into addRxPlugin().\n */\n if (!plugin.rxdb) {\n throw newRxTypeError('PL1', {\n plugin\n });\n }\n\n if (plugin.init) {\n plugin.init();\n }\n\n // prototype-overwrites\n if (plugin.prototypes) {\n Object\n .entries(plugin.prototypes)\n .forEach(([name, fun]) => {\n return (fun as any)(PROTOTYPES[name]);\n });\n }\n // overwritable-overwrites\n if (plugin.overwritable) {\n Object.assign(\n overwritable,\n plugin.overwritable\n );\n }\n // extend-hooks\n if (plugin.hooks) {\n Object\n .entries(plugin.hooks)\n .forEach(([name, hooksObj]) => {\n if (hooksObj.after) {\n HOOKS[name].push(hooksObj.after);\n }\n if (hooksObj.before) {\n HOOKS[name].unshift(hooksObj.before);\n }\n });\n }\n}\n\n"],"mappings":"AAAA;AACA;AACA;AACA;AACA;AACA,SACIA,QAAQ,QACL,gBAAgB;AACvB,SACIC,aAAa,IAAIC,mBAAmB,QACjC,kBAAkB;AACzB,SACIC,WAAW,QACR,eAAe;AACtB,SACIC,gBAAgB,QACb,oBAAoB;AAC3B,SACIC,cAAc,QACX,kBAAkB;AAKzB,SAASC,YAAY,QAAQ,mBAAmB;AAChD,SACIC,KAAK,EACLC,cAAc,QACX,YAAY;AACnB,SAASC,UAAU,EAAEC,cAAc,QAAQ,eAAe;;AAE1D;AACA;AACA;AACA,IAAMC,UAAiC,GAAG;EACtCX,QAAQ,EAAEA,QAAQ,CAACY,SAAS;EAC5BC,UAAU,EAAEX,mBAAmB;EAC/BY,OAAO,EAAEX,WAAW,CAACS,SAAS;EAC9BG,YAAY,EAAEX,gBAAgB,CAACQ,SAAS;EACxCI,UAAU,EAAEX,cAAc,CAACO;AAC/B,CAAC;AAED,IAAMK,aAAkC,GAAG,IAAIC,GAAG,CAAC,CAAC;AACpD,IAAMC,kBAA+B,GAAG,IAAID,GAAG,CAAC,CAAC;;AAEjD;AACA;AACA;AACA;AACA,OAAO,SAASE,WAAWA,CAACC,MAAgB,EAAE;EAC1Cb,cAAc,CAAC,gBAAgB,EAAE;IAAEa,MAAM;IAAEC,OAAO,EAAEL;EAAc,CAAC,CAAC;;EAEpE;EACA,IAAIA,aAAa,CAACM,GAAG,CAACF,MAAM,CAAC,EAAE;IAC3B;EACJ,CAAC,MAAM;IAEH;IACA,IAAIF,kBAAkB,CAACI,GAAG,CAACF,MAAM,CAACG,IAAI,CAAC,EAAE;MACrC,MAAMf,UAAU,CAAC,KAAK,EAAE;QACpBe,IAAI,EAAEH,MAAM,CAACG,IAAI;QACjBH;MACJ,CAAC,CAAC;IACN;IAEAJ,aAAa,CAACQ,GAAG,CAACJ,MAAM,CAAC;IACzBF,kBAAkB,CAACM,GAAG,CAACJ,MAAM,CAACG,IAAI,CAAC;EACvC;;EAEA;AACJ;AACA;AACA;EACI,IAAI,CAACH,MAAM,CAACK,IAAI,EAAE;IACd,MAAMhB,cAAc,CAAC,KAAK,EAAE;MACxBW;IACJ,CAAC,CAAC;EACN;EAEA,IAAIA,MAAM,CAACM,IAAI,EAAE;IACbN,MAAM,CAACM,IAAI,CAAC,CAAC;EACjB;;EAEA;EACA,IAAIN,MAAM,CAACO,UAAU,EAAE;IACnBC,MAAM,CACDC,OAAO,CAACT,MAAM,CAACO,UAAU,CAAC,CAC1BG,OAAO,CAAC,CAAC,CAACP,IAAI,EAAEQ,GAAG,CAAC,KAAK;MACtB,OAAQA,GAAG,CAASrB,UAAU,CAACa,IAAI,CAAC,CAAC;IACzC,CAAC,CAAC;EACV;EACA;EACA,IAAIH,MAAM,CAACf,YAAY,EAAE;IACrBuB,MAAM,CAACI,MAAM,CACT3B,YAAY,EACZe,MAAM,CAACf,YACX,CAAC;EACL;EACA;EACA,IAAIe,MAAM,CAACa,KAAK,EAAE;IACdL,MAAM,CACDC,OAAO,CAACT,MAAM,CAACa,KAAK,CAAC,CACrBH,OAAO,CAAC,CAAC,CAACP,IAAI,EAAEW,QAAQ,CAAC,KAAK;MAC3B,IAAIA,QAAQ,CAACC,KAAK,EAAE;QAChB7B,KAAK,CAACiB,IAAI,CAAC,CAACa,IAAI,CAACF,QAAQ,CAACC,KAAK,CAAC;MACpC;MACA,IAAID,QAAQ,CAACG,MAAM,EAAE;QACjB/B,KAAK,CAACiB,IAAI,CAAC,CAACe,OAAO,CAACJ,QAAQ,CAACG,MAAM,CAAC;MACxC;IACJ,CAAC,CAAC;EACV;AACJ","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/attachments-compression/index.js b/dist/esm/plugins/attachments-compression/index.js deleted file mode 100644 index 34fa11756b6..00000000000 --- a/dist/esm/plugins/attachments-compression/index.js +++ /dev/null @@ -1,64 +0,0 @@ -import { wrapRxStorageInstance } from "../../plugin-helpers.js"; -import { arrayBufferToBase64, base64ToArrayBuffer, ensureNotFalsy, flatClone } from "../utils/index.js"; - -/** - * @link https://github.com/WICG/compression/blob/main/explainer.md - */ -export async function compressBase64(mode, base64String) { - var arrayBuffer = base64ToArrayBuffer(base64String); - var stream = ensureNotFalsy(new Response(arrayBuffer).body).pipeThrough(new CompressionStream(mode)); - var result = await new Response(stream).arrayBuffer(); - return arrayBufferToBase64(result); -} -export async function decompressBase64(mode, base64String) { - var arrayBuffer = base64ToArrayBuffer(base64String); - var stream = ensureNotFalsy(new Response(arrayBuffer).body).pipeThrough(new DecompressionStream(mode)); - var result = await new Response(stream).arrayBuffer(); - return arrayBufferToBase64(result); -} - -/** - * A RxStorage wrapper that compresses attachment data on writes - * and decompresses the data on reads. - * - * This is using the CompressionStream API, - * @link https://caniuse.com/?search=compressionstream - */ -export function wrappedAttachmentsCompressionStorage(args) { - return Object.assign({}, args.storage, { - async createStorageInstance(params) { - if (!params.schema.attachments || !params.schema.attachments.compression) { - return args.storage.createStorageInstance(params); - } - var mode = params.schema.attachments.compression; - async function modifyToStorage(docData) { - await Promise.all(Object.values(docData._attachments).map(async attachment => { - if (!attachment.data) { - return; - } - var attachmentWriteData = attachment; - attachmentWriteData.data = await compressBase64(mode, attachmentWriteData.data); - })); - return docData; - } - function modifyAttachmentFromStorage(attachmentData) { - return decompressBase64(mode, attachmentData); - } - - /** - * Because this wrapper resolves the attachments.compression, - * we have to remove it before sending it to the underlying RxStorage. - * which allows underlying storages to detect wrong configurations - * like when compression is set to false but no attachment-compression module is used. - */ - var childSchema = flatClone(params.schema); - childSchema.attachments = flatClone(childSchema.attachments); - delete ensureNotFalsy(childSchema.attachments).compression; - var instance = await args.storage.createStorageInstance(Object.assign({}, params, { - schema: childSchema - })); - return wrapRxStorageInstance(params.schema, instance, modifyToStorage, d => d, modifyAttachmentFromStorage); - } - }); -} -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/esm/plugins/attachments-compression/index.js.map b/dist/esm/plugins/attachments-compression/index.js.map deleted file mode 100644 index 8debace7d1f..00000000000 --- a/dist/esm/plugins/attachments-compression/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["wrapRxStorageInstance","arrayBufferToBase64","base64ToArrayBuffer","ensureNotFalsy","flatClone","compressBase64","mode","base64String","arrayBuffer","stream","Response","body","pipeThrough","CompressionStream","result","decompressBase64","DecompressionStream","wrappedAttachmentsCompressionStorage","args","Object","assign","storage","createStorageInstance","params","schema","attachments","compression","modifyToStorage","docData","Promise","all","values","_attachments","map","attachment","data","attachmentWriteData","modifyAttachmentFromStorage","attachmentData","childSchema","instance","d"],"sources":["../../../../src/plugins/attachments-compression/index.ts"],"sourcesContent":["import { wrapRxStorageInstance } from '../../plugin-helpers.ts';\nimport type {\n RxStorage,\n RxStorageInstanceCreationParams,\n RxDocumentWriteData,\n CompressionMode,\n RxAttachmentWriteData\n} from '../../types/index.d.ts';\n\nimport {\n arrayBufferToBase64,\n base64ToArrayBuffer,\n ensureNotFalsy,\n flatClone\n} from '../utils/index.ts';\n\n\n/**\n * @link https://github.com/WICG/compression/blob/main/explainer.md\n */\nexport async function compressBase64(\n mode: CompressionMode,\n base64String: string\n): Promise {\n const arrayBuffer = base64ToArrayBuffer(base64String);\n const stream = ensureNotFalsy(new Response(arrayBuffer).body)\n .pipeThrough(new CompressionStream(mode));\n const result = await new Response(stream).arrayBuffer();\n return arrayBufferToBase64(result);\n}\nexport async function decompressBase64(\n mode: CompressionMode,\n base64String: string\n): Promise {\n const arrayBuffer = base64ToArrayBuffer(base64String);\n const stream = ensureNotFalsy(new Response(arrayBuffer).body)\n .pipeThrough(new DecompressionStream(mode));\n const result = await new Response(stream).arrayBuffer();\n return arrayBufferToBase64(result);\n}\n\n\n/**\n * A RxStorage wrapper that compresses attachment data on writes\n * and decompresses the data on reads.\n *\n * This is using the CompressionStream API,\n * @link https://caniuse.com/?search=compressionstream\n */\nexport function wrappedAttachmentsCompressionStorage(\n args: {\n storage: RxStorage;\n }\n): RxStorage {\n return Object.assign(\n {},\n args.storage,\n {\n async createStorageInstance(\n params: RxStorageInstanceCreationParams\n ) {\n if (\n !params.schema.attachments ||\n !params.schema.attachments.compression\n ) {\n return args.storage.createStorageInstance(params);\n }\n\n const mode = params.schema.attachments.compression;\n\n async function modifyToStorage(docData: RxDocumentWriteData) {\n await Promise.all(\n Object.values(docData._attachments).map(async (attachment) => {\n if (!(attachment as RxAttachmentWriteData).data) {\n return;\n }\n const attachmentWriteData = attachment as RxAttachmentWriteData;\n attachmentWriteData.data = await compressBase64(mode, attachmentWriteData.data);\n })\n );\n return docData;\n }\n function modifyAttachmentFromStorage(attachmentData: string): Promise {\n return decompressBase64(mode, attachmentData);\n }\n\n /**\n * Because this wrapper resolves the attachments.compression,\n * we have to remove it before sending it to the underlying RxStorage.\n * which allows underlying storages to detect wrong configurations\n * like when compression is set to false but no attachment-compression module is used.\n */\n const childSchema = flatClone(params.schema);\n childSchema.attachments = flatClone(childSchema.attachments);\n delete ensureNotFalsy(childSchema.attachments).compression;\n\n const instance = await args.storage.createStorageInstance(\n Object.assign(\n {},\n params,\n {\n schema: childSchema\n }\n )\n );\n\n return wrapRxStorageInstance(\n params.schema,\n instance,\n modifyToStorage,\n d => d,\n modifyAttachmentFromStorage\n );\n }\n }\n );\n}\n"],"mappings":"AAAA,SAASA,qBAAqB,QAAQ,yBAAyB;AAS/D,SACIC,mBAAmB,EACnBC,mBAAmB,EACnBC,cAAc,EACdC,SAAS,QACN,mBAAmB;;AAG1B;AACA;AACA;AACA,OAAO,eAAeC,cAAcA,CAChCC,IAAqB,EACrBC,YAAoB,EACL;EACf,IAAMC,WAAW,GAAGN,mBAAmB,CAACK,YAAY,CAAC;EACrD,IAAME,MAAM,GAAGN,cAAc,CAAC,IAAIO,QAAQ,CAACF,WAAW,CAAC,CAACG,IAAI,CAAC,CACxDC,WAAW,CAAC,IAAIC,iBAAiB,CAACP,IAAI,CAAC,CAAC;EAC7C,IAAMQ,MAAM,GAAG,MAAM,IAAIJ,QAAQ,CAACD,MAAM,CAAC,CAACD,WAAW,CAAC,CAAC;EACvD,OAAOP,mBAAmB,CAACa,MAAM,CAAC;AACtC;AACA,OAAO,eAAeC,gBAAgBA,CAClCT,IAAqB,EACrBC,YAAoB,EACL;EACf,IAAMC,WAAW,GAAGN,mBAAmB,CAACK,YAAY,CAAC;EACrD,IAAME,MAAM,GAAGN,cAAc,CAAC,IAAIO,QAAQ,CAACF,WAAW,CAAC,CAACG,IAAI,CAAC,CACxDC,WAAW,CAAC,IAAII,mBAAmB,CAACV,IAAI,CAAC,CAAC;EAC/C,IAAMQ,MAAM,GAAG,MAAM,IAAIJ,QAAQ,CAACD,MAAM,CAAC,CAACD,WAAW,CAAC,CAAC;EACvD,OAAOP,mBAAmB,CAACa,MAAM,CAAC;AACtC;;AAGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,SAASG,oCAAoCA,CAChDC,IAEC,EAC4C;EAC7C,OAAOC,MAAM,CAACC,MAAM,CAChB,CAAC,CAAC,EACFF,IAAI,CAACG,OAAO,EACZ;IACI,MAAMC,qBAAqBA,CACvBC,MAAuD,EACzD;MACE,IACI,CAACA,MAAM,CAACC,MAAM,CAACC,WAAW,IAC1B,CAACF,MAAM,CAACC,MAAM,CAACC,WAAW,CAACC,WAAW,EACxC;QACE,OAAOR,IAAI,CAACG,OAAO,CAACC,qBAAqB,CAACC,MAAM,CAAC;MACrD;MAEA,IAAMjB,IAAI,GAAGiB,MAAM,CAACC,MAAM,CAACC,WAAW,CAACC,WAAW;MAElD,eAAeC,eAAeA,CAACC,OAAuC,EAAE;QACpE,MAAMC,OAAO,CAACC,GAAG,CACbX,MAAM,CAACY,MAAM,CAACH,OAAO,CAACI,YAAY,CAAC,CAACC,GAAG,CAAC,MAAOC,UAAU,IAAK;UAC1D,IAAI,CAAEA,UAAU,CAA2BC,IAAI,EAAE;YAC7C;UACJ;UACA,IAAMC,mBAAmB,GAAGF,UAAmC;UAC/DE,mBAAmB,CAACD,IAAI,GAAG,MAAM9B,cAAc,CAACC,IAAI,EAAE8B,mBAAmB,CAACD,IAAI,CAAC;QACnF,CAAC,CACL,CAAC;QACD,OAAOP,OAAO;MAClB;MACA,SAASS,2BAA2BA,CAACC,cAAsB,EAAmB;QAC1E,OAAOvB,gBAAgB,CAACT,IAAI,EAAEgC,cAAc,CAAC;MACjD;;MAEA;AAChB;AACA;AACA;AACA;AACA;MACgB,IAAMC,WAAW,GAAGnC,SAAS,CAACmB,MAAM,CAACC,MAAM,CAAC;MAC5Ce,WAAW,CAACd,WAAW,GAAGrB,SAAS,CAACmC,WAAW,CAACd,WAAW,CAAC;MAC5D,OAAOtB,cAAc,CAACoC,WAAW,CAACd,WAAW,CAAC,CAACC,WAAW;MAE1D,IAAMc,QAAQ,GAAG,MAAMtB,IAAI,CAACG,OAAO,CAACC,qBAAqB,CACrDH,MAAM,CAACC,MAAM,CACT,CAAC,CAAC,EACFG,MAAM,EACN;QACIC,MAAM,EAAEe;MACZ,CACJ,CACJ,CAAC;MAED,OAAOvC,qBAAqB,CACxBuB,MAAM,CAACC,MAAM,EACbgB,QAAQ,EACRb,eAAe,EACfc,CAAC,IAAIA,CAAC,EACNJ,2BACJ,CAAC;IACL;EACJ,CACJ,CAAC;AACL","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/attachments/attachments-utils.js b/dist/esm/plugins/attachments/attachments-utils.js deleted file mode 100644 index 9c06a6c10cc..00000000000 --- a/dist/esm/plugins/attachments/attachments-utils.js +++ /dev/null @@ -1,39 +0,0 @@ -import { newRxError } from "../../rx-error.js"; -import { ensureNotFalsy } from "../utils/index.js"; -export function ensureSchemaSupportsAttachments(doc) { - var schemaJson = doc.collection.schema.jsonSchema; - if (!schemaJson.attachments) { - throw newRxError('AT1', { - link: 'https://pubkey.github.io/rxdb/rx-attachment.html' - }); - } -} -export function assignMethodsToAttachment(attachment) { - Object.entries(attachment.doc.collection.attachments).forEach(([funName, fun]) => { - Object.defineProperty(attachment, funName, { - get: () => fun.bind(attachment) - }); - }); -} - -/** - * Fill up the missing attachment.data of the newDocument - * so that the new document can be send to somewhere else - * which could then receive all required attachments data - * that it did not have before. - */ -export async function fillWriteDataForAttachmentsChange(primaryPath, storageInstance, newDocument, originalDocument) { - if (!newDocument._attachments || originalDocument && !originalDocument._attachments) { - throw new Error('_attachments missing'); - } - var docId = newDocument[primaryPath]; - var originalAttachmentsIds = new Set(originalDocument && originalDocument._attachments ? Object.keys(originalDocument._attachments) : []); - await Promise.all(Object.entries(newDocument._attachments).map(async ([key, value]) => { - if ((!originalAttachmentsIds.has(key) || originalDocument && ensureNotFalsy(originalDocument._attachments)[key].digest !== value.digest) && !value.data) { - var attachmentDataString = await storageInstance.getAttachmentData(docId, key, value.digest); - value.data = attachmentDataString; - } - })); - return newDocument; -} -//# sourceMappingURL=attachments-utils.js.map \ No newline at end of file diff --git a/dist/esm/plugins/attachments/attachments-utils.js.map b/dist/esm/plugins/attachments/attachments-utils.js.map deleted file mode 100644 index 0149074d4ae..00000000000 --- a/dist/esm/plugins/attachments/attachments-utils.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"attachments-utils.js","names":["newRxError","ensureNotFalsy","ensureSchemaSupportsAttachments","doc","schemaJson","collection","schema","jsonSchema","attachments","link","assignMethodsToAttachment","attachment","Object","entries","forEach","funName","fun","defineProperty","get","bind","fillWriteDataForAttachmentsChange","primaryPath","storageInstance","newDocument","originalDocument","_attachments","Error","docId","originalAttachmentsIds","Set","keys","Promise","all","map","key","value","has","digest","data","attachmentDataString","getAttachmentData"],"sources":["../../../../src/plugins/attachments/attachments-utils.ts"],"sourcesContent":["import { newRxError } from '../../rx-error.ts';\nimport type {\n RxAttachmentWriteData,\n RxStorageInstance,\n WithDeletedAndAttachments\n} from '../../types/index.d.ts';\nimport { ensureNotFalsy } from '../utils/index.ts';\n\nexport function ensureSchemaSupportsAttachments(doc: any) {\n const schemaJson = doc.collection.schema.jsonSchema;\n if (!schemaJson.attachments) {\n throw newRxError('AT1', {\n link: 'https://pubkey.github.io/rxdb/rx-attachment.html'\n });\n }\n}\n\nexport function assignMethodsToAttachment(attachment: any) {\n Object\n .entries(attachment.doc.collection.attachments)\n .forEach(([funName, fun]) => {\n Object.defineProperty(attachment, funName, {\n get: () => (fun as any).bind(attachment)\n });\n });\n}\n\n/**\n * Fill up the missing attachment.data of the newDocument\n * so that the new document can be send to somewhere else\n * which could then receive all required attachments data\n * that it did not have before.\n */\nexport async function fillWriteDataForAttachmentsChange(\n primaryPath: string,\n storageInstance: RxStorageInstance,\n newDocument: WithDeletedAndAttachments,\n originalDocument?: WithDeletedAndAttachments\n): Promise> {\n\n if (\n !newDocument._attachments ||\n (\n originalDocument &&\n !originalDocument._attachments\n )\n ) {\n throw new Error('_attachments missing');\n }\n\n const docId: string = (newDocument as any)[primaryPath];\n const originalAttachmentsIds = new Set(\n originalDocument && originalDocument._attachments\n ? Object.keys(originalDocument._attachments)\n : []\n );\n await Promise.all(\n Object\n .entries(newDocument._attachments)\n .map(async ([key, value]) => {\n if (\n (\n !originalAttachmentsIds.has(key) ||\n (\n originalDocument &&\n ensureNotFalsy(originalDocument._attachments)[key].digest !== value.digest\n )\n ) &&\n !(value as RxAttachmentWriteData).data\n ) {\n const attachmentDataString = await storageInstance.getAttachmentData(\n docId,\n key,\n value.digest\n );\n (value as RxAttachmentWriteData).data = attachmentDataString;\n }\n })\n );\n\n return newDocument;\n}\n"],"mappings":"AAAA,SAASA,UAAU,QAAQ,mBAAmB;AAM9C,SAASC,cAAc,QAAQ,mBAAmB;AAElD,OAAO,SAASC,+BAA+BA,CAACC,GAAQ,EAAE;EACtD,IAAMC,UAAU,GAAGD,GAAG,CAACE,UAAU,CAACC,MAAM,CAACC,UAAU;EACnD,IAAI,CAACH,UAAU,CAACI,WAAW,EAAE;IACzB,MAAMR,UAAU,CAAC,KAAK,EAAE;MACpBS,IAAI,EAAE;IACV,CAAC,CAAC;EACN;AACJ;AAEA,OAAO,SAASC,yBAAyBA,CAACC,UAAe,EAAE;EACvDC,MAAM,CACDC,OAAO,CAACF,UAAU,CAACR,GAAG,CAACE,UAAU,CAACG,WAAW,CAAC,CAC9CM,OAAO,CAAC,CAAC,CAACC,OAAO,EAAEC,GAAG,CAAC,KAAK;IACzBJ,MAAM,CAACK,cAAc,CAACN,UAAU,EAAEI,OAAO,EAAE;MACvCG,GAAG,EAAEA,CAAA,KAAOF,GAAG,CAASG,IAAI,CAACR,UAAU;IAC3C,CAAC,CAAC;EACN,CAAC,CAAC;AACV;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,eAAeS,iCAAiCA,CACnDC,WAAmB,EACnBC,eAA4D,EAC5DC,WAAiD,EACjDC,gBAAuD,EACV;EAE7C,IACI,CAACD,WAAW,CAACE,YAAY,IAErBD,gBAAgB,IAChB,CAACA,gBAAgB,CAACC,YACrB,EACH;IACE,MAAM,IAAIC,KAAK,CAAC,sBAAsB,CAAC;EAC3C;EAEA,IAAMC,KAAa,GAAIJ,WAAW,CAASF,WAAW,CAAC;EACvD,IAAMO,sBAAsB,GAAG,IAAIC,GAAG,CAClCL,gBAAgB,IAAIA,gBAAgB,CAACC,YAAY,GAC3Cb,MAAM,CAACkB,IAAI,CAACN,gBAAgB,CAACC,YAAY,CAAC,GAC1C,EACV,CAAC;EACD,MAAMM,OAAO,CAACC,GAAG,CACbpB,MAAM,CACDC,OAAO,CAACU,WAAW,CAACE,YAAY,CAAC,CACjCQ,GAAG,CAAC,OAAO,CAACC,GAAG,EAAEC,KAAK,CAAC,KAAK;IACzB,IACI,CACI,CAACP,sBAAsB,CAACQ,GAAG,CAACF,GAAG,CAAC,IAE5BV,gBAAgB,IAChBvB,cAAc,CAACuB,gBAAgB,CAACC,YAAY,CAAC,CAACS,GAAG,CAAC,CAACG,MAAM,KAAKF,KAAK,CAACE,MACvE,KAEL,CAAEF,KAAK,CAA2BG,IAAI,EACxC;MACE,IAAMC,oBAAoB,GAAG,MAAMjB,eAAe,CAACkB,iBAAiB,CAChEb,KAAK,EACLO,GAAG,EACHC,KAAK,CAACE,MACV,CAAC;MACAF,KAAK,CAA2BG,IAAI,GAAGC,oBAAoB;IAChE;EACJ,CAAC,CACT,CAAC;EAED,OAAOhB,WAAW;AACtB","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/attachments/index.js b/dist/esm/plugins/attachments/index.js deleted file mode 100644 index c10f94897cf..00000000000 --- a/dist/esm/plugins/attachments/index.js +++ /dev/null @@ -1,170 +0,0 @@ -import { map } from 'rxjs'; -import { blobToBase64String, blobToString, createBlobFromBase64, flatClone, getBlobSize, PROMISE_RESOLVE_VOID } from "../../plugins/utils/index.js"; -import { assignMethodsToAttachment, ensureSchemaSupportsAttachments } from "./attachments-utils.js"; - -/** - * an RxAttachment is basically just the attachment-stub - * wrapped so that you can access the attachment-data - */ -export var RxAttachment = /*#__PURE__*/function () { - function RxAttachment({ - doc, - id, - type, - length, - digest - }) { - this.doc = doc; - this.id = id; - this.type = type; - this.length = length; - this.digest = digest; - assignMethodsToAttachment(this); - } - var _proto = RxAttachment.prototype; - _proto.remove = function remove() { - return this.doc.collection.incrementalWriteQueue.addWrite(this.doc._data, docWriteData => { - delete docWriteData._attachments[this.id]; - return docWriteData; - }).then(() => {}); - } - - /** - * returns the data for the attachment - */; - _proto.getData = async function getData() { - var plainDataBase64 = await this.doc.collection.storageInstance.getAttachmentData(this.doc.primary, this.id, this.digest); - var ret = await createBlobFromBase64(plainDataBase64, this.type); - return ret; - }; - _proto.getStringData = async function getStringData() { - var data = await this.getData(); - var asString = await blobToString(data); - return asString; - }; - return RxAttachment; -}(); -export function fromStorageInstanceResult(id, attachmentData, rxDocument) { - return new RxAttachment({ - doc: rxDocument, - id, - type: attachmentData.type, - length: attachmentData.length, - digest: attachmentData.digest - }); -} -export async function putAttachment(attachmentData) { - ensureSchemaSupportsAttachments(this); - var dataSize = getBlobSize(attachmentData.data); - var dataString = await blobToBase64String(attachmentData.data); - var digest = await this.collection.database.hashFunction(dataString); - var id = attachmentData.id; - var type = attachmentData.type; - var data = dataString; - return this.collection.incrementalWriteQueue.addWrite(this._data, docWriteData => { - docWriteData = flatClone(docWriteData); - docWriteData._attachments = flatClone(docWriteData._attachments); - docWriteData._attachments[id] = { - length: dataSize, - type, - data, - digest - }; - return docWriteData; - }).then(writeResult => { - var newDocument = this.collection._docCache.getCachedRxDocument(writeResult); - var attachmentDataOfId = writeResult._attachments[id]; - var attachment = fromStorageInstanceResult(id, attachmentDataOfId, newDocument); - return attachment; - }); -} - -/** - * get an attachment of the document by its id - */ -export function getAttachment(id) { - ensureSchemaSupportsAttachments(this); - var docData = this._data; - if (!docData._attachments || !docData._attachments[id]) return null; - var attachmentData = docData._attachments[id]; - var attachment = fromStorageInstanceResult(id, attachmentData, this); - return attachment; -} - -/** - * returns all attachments of the document - */ -export function allAttachments() { - ensureSchemaSupportsAttachments(this); - var docData = this._data; - - // if there are no attachments, the field is missing - if (!docData._attachments) { - return []; - } - return Object.keys(docData._attachments).map(id => { - return fromStorageInstanceResult(id, docData._attachments[id], this); - }); -} -export async function preMigrateDocument(data) { - var attachments = data.docData._attachments; - if (attachments) { - var newAttachments = {}; - await Promise.all(Object.keys(attachments).map(async attachmentId => { - var attachment = attachments[attachmentId]; - var docPrimary = data.docData[data.oldCollection.schema.primaryPath]; - var rawAttachmentData = await data.oldCollection.storageInstance.getAttachmentData(docPrimary, attachmentId, attachment.digest); - var digest = await data.oldCollection.database.hashFunction(rawAttachmentData); - newAttachments[attachmentId] = { - length: attachment.length, - type: attachment.type, - data: rawAttachmentData, - digest - }; - })); - - /** - * Hooks mutate the input - * instead of returning stuff - */ - data.docData._attachments = newAttachments; - } -} -export function postMigrateDocument(_action) { - /** - * No longer needed because - * we store the attachments data buffers directly in the document. - */ - return PROMISE_RESOLVE_VOID; -} -export var RxDBAttachmentsPlugin = { - name: 'attachments', - rxdb: true, - prototypes: { - RxDocument: proto => { - proto.putAttachment = putAttachment; - proto.getAttachment = getAttachment; - proto.allAttachments = allAttachments; - Object.defineProperty(proto, 'allAttachments$', { - get: function allAttachments$() { - return this.$.pipe(map(rxDocument => Object.entries(rxDocument.toJSON(true)._attachments)), map(entries => { - return entries.map(([id, attachmentData]) => { - return fromStorageInstanceResult(id, attachmentData, this); - }); - })); - } - }); - } - }, - overwritable: {}, - hooks: { - preMigrateDocument: { - after: preMigrateDocument - }, - postMigrateDocument: { - after: postMigrateDocument - } - } -}; -export * from "./attachments-utils.js"; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/esm/plugins/attachments/index.js.map b/dist/esm/plugins/attachments/index.js.map deleted file mode 100644 index 4c04a74fae1..00000000000 --- a/dist/esm/plugins/attachments/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["map","blobToBase64String","blobToString","createBlobFromBase64","flatClone","getBlobSize","PROMISE_RESOLVE_VOID","assignMethodsToAttachment","ensureSchemaSupportsAttachments","RxAttachment","doc","id","type","length","digest","_proto","prototype","remove","collection","incrementalWriteQueue","addWrite","_data","docWriteData","_attachments","then","getData","plainDataBase64","storageInstance","getAttachmentData","primary","ret","getStringData","data","asString","fromStorageInstanceResult","attachmentData","rxDocument","putAttachment","dataSize","dataString","database","hashFunction","writeResult","newDocument","_docCache","getCachedRxDocument","attachmentDataOfId","attachment","getAttachment","docData","allAttachments","Object","keys","preMigrateDocument","attachments","newAttachments","Promise","all","attachmentId","docPrimary","oldCollection","schema","primaryPath","rawAttachmentData","postMigrateDocument","_action","RxDBAttachmentsPlugin","name","rxdb","prototypes","RxDocument","proto","defineProperty","get","allAttachments$","$","pipe","entries","toJSON","overwritable","hooks","after"],"sources":["../../../../src/plugins/attachments/index.ts"],"sourcesContent":["import {\n map\n} from 'rxjs';\n\nimport {\n blobToBase64String,\n blobToString,\n createBlobFromBase64,\n flatClone,\n getBlobSize,\n PROMISE_RESOLVE_VOID\n} from '../../plugins/utils/index.ts';\nimport type {\n RxDocument,\n RxPlugin,\n RxDocumentWriteData,\n RxAttachmentData,\n RxDocumentData,\n RxAttachmentCreator,\n RxAttachmentWriteData\n} from '../../types/index.ts';\nimport { assignMethodsToAttachment, ensureSchemaSupportsAttachments } from './attachments-utils.ts';\n\n\n\n/**\n * an RxAttachment is basically just the attachment-stub\n * wrapped so that you can access the attachment-data\n */\nexport class RxAttachment {\n public doc: RxDocument;\n public id: string;\n public type: string;\n public length: number;\n public digest: string;\n constructor({\n doc,\n id,\n type,\n length,\n digest\n }: any) {\n this.doc = doc;\n this.id = id;\n this.type = type;\n this.length = length;\n this.digest = digest;\n\n assignMethodsToAttachment(this);\n }\n\n remove(): Promise {\n return this.doc.collection.incrementalWriteQueue.addWrite(\n this.doc._data,\n docWriteData => {\n delete docWriteData._attachments[this.id];\n return docWriteData;\n }\n ).then(() => { });\n }\n\n /**\n * returns the data for the attachment\n */\n async getData(): Promise {\n const plainDataBase64 = await this.doc.collection.storageInstance.getAttachmentData(\n this.doc.primary,\n this.id,\n this.digest\n );\n const ret = await createBlobFromBase64(\n plainDataBase64,\n this.type as any\n );\n return ret;\n }\n\n async getStringData(): Promise {\n const data = await this.getData();\n const asString = await blobToString(data);\n return asString;\n }\n}\n\nexport function fromStorageInstanceResult(\n id: string,\n attachmentData: RxAttachmentData,\n rxDocument: RxDocument\n) {\n return new RxAttachment({\n doc: rxDocument,\n id,\n type: attachmentData.type,\n length: attachmentData.length,\n digest: attachmentData.digest\n });\n}\n\n\n\nexport async function putAttachment(\n this: RxDocument,\n attachmentData: RxAttachmentCreator\n): Promise {\n ensureSchemaSupportsAttachments(this);\n\n const dataSize = getBlobSize(attachmentData.data);\n const dataString = await blobToBase64String(attachmentData.data);\n const digest = await this.collection.database.hashFunction(dataString);\n\n const id = attachmentData.id;\n const type = attachmentData.type;\n const data = dataString;\n\n return this.collection.incrementalWriteQueue.addWrite(\n this._data,\n (docWriteData: RxDocumentWriteData) => {\n docWriteData = flatClone(docWriteData);\n docWriteData._attachments = flatClone(docWriteData._attachments);\n docWriteData._attachments[id] = {\n length: dataSize,\n type,\n data,\n digest\n };\n return docWriteData;\n }).then(writeResult => {\n const newDocument = this.collection._docCache.getCachedRxDocument(writeResult);\n const attachmentDataOfId = writeResult._attachments[id];\n const attachment = fromStorageInstanceResult(\n id,\n attachmentDataOfId,\n newDocument\n );\n return attachment;\n });\n}\n\n/**\n * get an attachment of the document by its id\n */\nexport function getAttachment(\n this: RxDocument,\n id: string\n): RxAttachment | null {\n ensureSchemaSupportsAttachments(this);\n const docData: any = this._data;\n if (!docData._attachments || !docData._attachments[id])\n return null;\n\n const attachmentData = docData._attachments[id];\n const attachment = fromStorageInstanceResult(\n id,\n attachmentData,\n this\n );\n return attachment;\n}\n\n/**\n * returns all attachments of the document\n */\nexport function allAttachments(\n this: RxDocument\n): RxAttachment[] {\n ensureSchemaSupportsAttachments(this);\n const docData: any = this._data;\n\n // if there are no attachments, the field is missing\n if (!docData._attachments) {\n return [];\n }\n return Object.keys(docData._attachments)\n .map(id => {\n return fromStorageInstanceResult(\n id,\n docData._attachments[id],\n this\n );\n });\n}\n\nexport async function preMigrateDocument(\n data: {\n docData: RxDocumentData;\n oldCollection: any; // TODO\n }\n): Promise {\n const attachments = data.docData._attachments;\n if (attachments) {\n const newAttachments: { [attachmentId: string]: RxAttachmentWriteData; } = {};\n await Promise.all(\n Object.keys(attachments).map(async (attachmentId) => {\n const attachment: RxAttachmentData = attachments[attachmentId];\n const docPrimary: string = (data.docData as any)[data.oldCollection.schema.primaryPath];\n const rawAttachmentData = await data.oldCollection.storageInstance.getAttachmentData(\n docPrimary,\n attachmentId,\n attachment.digest\n );\n const digest = await data.oldCollection.database.hashFunction(rawAttachmentData);\n newAttachments[attachmentId] = {\n length: attachment.length,\n type: attachment.type,\n data: rawAttachmentData,\n digest\n };\n })\n );\n\n /**\n * Hooks mutate the input\n * instead of returning stuff\n */\n (data.docData as RxDocumentWriteData)._attachments = newAttachments;\n }\n}\n\nexport function postMigrateDocument(_action: any): Promise {\n /**\n * No longer needed because\n * we store the attachments data buffers directly in the document.\n */\n return PROMISE_RESOLVE_VOID;\n}\n\nexport const RxDBAttachmentsPlugin: RxPlugin = {\n name: 'attachments',\n rxdb: true,\n prototypes: {\n RxDocument: (proto: any) => {\n proto.putAttachment = putAttachment;\n proto.getAttachment = getAttachment;\n proto.allAttachments = allAttachments;\n Object.defineProperty(proto, 'allAttachments$', {\n get: function allAttachments$(this: RxDocument) {\n return this.$\n .pipe(\n map(rxDocument => Object.entries(\n rxDocument.toJSON(true)._attachments\n )),\n map(entries => {\n return (entries as any)\n .map(([id, attachmentData]: any) => {\n return fromStorageInstanceResult(\n id,\n attachmentData,\n this\n );\n });\n })\n );\n }\n });\n }\n },\n overwritable: {},\n hooks: {\n preMigrateDocument: {\n after: preMigrateDocument\n },\n postMigrateDocument: {\n after: postMigrateDocument\n }\n }\n};\n\n\nexport * from './attachments-utils.ts';\n"],"mappings":"AAAA,SACIA,GAAG,QACA,MAAM;AAEb,SACIC,kBAAkB,EAClBC,YAAY,EACZC,oBAAoB,EACpBC,SAAS,EACTC,WAAW,EACXC,oBAAoB,QACjB,8BAA8B;AAUrC,SAASC,yBAAyB,EAAEC,+BAA+B,QAAQ,wBAAwB;;AAInG;AACA;AACA;AACA;AACA,WAAaC,YAAY;EAMrB,SAAAA,aAAY;IACRC,GAAG;IACHC,EAAE;IACFC,IAAI;IACJC,MAAM;IACNC;EACC,CAAC,EAAE;IACJ,IAAI,CAACJ,GAAG,GAAGA,GAAG;IACd,IAAI,CAACC,EAAE,GAAGA,EAAE;IACZ,IAAI,CAACC,IAAI,GAAGA,IAAI;IAChB,IAAI,CAACC,MAAM,GAAGA,MAAM;IACpB,IAAI,CAACC,MAAM,GAAGA,MAAM;IAEpBP,yBAAyB,CAAC,IAAI,CAAC;EACnC;EAAC,IAAAQ,MAAA,GAAAN,YAAA,CAAAO,SAAA;EAAAD,MAAA,CAEDE,MAAM,GAAN,SAAAA,OAAA,EAAwB;IACpB,OAAO,IAAI,CAACP,GAAG,CAACQ,UAAU,CAACC,qBAAqB,CAACC,QAAQ,CACrD,IAAI,CAACV,GAAG,CAACW,KAAK,EACdC,YAAY,IAAI;MACZ,OAAOA,YAAY,CAACC,YAAY,CAAC,IAAI,CAACZ,EAAE,CAAC;MACzC,OAAOW,YAAY;IACvB,CACJ,CAAC,CAACE,IAAI,CAAC,MAAM,CAAE,CAAC,CAAC;EACrB;;EAEA;AACJ;AACA,KAFI;EAAAT,MAAA,CAGMU,OAAO,GAAb,eAAAA,QAAA,EAA+B;IAC3B,IAAMC,eAAe,GAAG,MAAM,IAAI,CAAChB,GAAG,CAACQ,UAAU,CAACS,eAAe,CAACC,iBAAiB,CAC/E,IAAI,CAAClB,GAAG,CAACmB,OAAO,EAChB,IAAI,CAAClB,EAAE,EACP,IAAI,CAACG,MACT,CAAC;IACD,IAAMgB,GAAG,GAAG,MAAM3B,oBAAoB,CAClCuB,eAAe,EACf,IAAI,CAACd,IACT,CAAC;IACD,OAAOkB,GAAG;EACd,CAAC;EAAAf,MAAA,CAEKgB,aAAa,GAAnB,eAAAA,cAAA,EAAuC;IACnC,IAAMC,IAAI,GAAG,MAAM,IAAI,CAACP,OAAO,CAAC,CAAC;IACjC,IAAMQ,QAAQ,GAAG,MAAM/B,YAAY,CAAC8B,IAAI,CAAC;IACzC,OAAOC,QAAQ;EACnB,CAAC;EAAA,OAAAxB,YAAA;AAAA;AAGL,OAAO,SAASyB,yBAAyBA,CACrCvB,EAAU,EACVwB,cAAgC,EAChCC,UAAiC,EACnC;EACE,OAAO,IAAI3B,YAAY,CAAC;IACpBC,GAAG,EAAE0B,UAAU;IACfzB,EAAE;IACFC,IAAI,EAAEuB,cAAc,CAACvB,IAAI;IACzBC,MAAM,EAAEsB,cAAc,CAACtB,MAAM;IAC7BC,MAAM,EAAEqB,cAAc,CAACrB;EAC3B,CAAC,CAAC;AACN;AAIA,OAAO,eAAeuB,aAAaA,CAE/BF,cAAmC,EACd;EACrB3B,+BAA+B,CAAC,IAAI,CAAC;EAErC,IAAM8B,QAAQ,GAAGjC,WAAW,CAAC8B,cAAc,CAACH,IAAI,CAAC;EACjD,IAAMO,UAAU,GAAG,MAAMtC,kBAAkB,CAACkC,cAAc,CAACH,IAAI,CAAC;EAChE,IAAMlB,MAAM,GAAG,MAAM,IAAI,CAACI,UAAU,CAACsB,QAAQ,CAACC,YAAY,CAACF,UAAU,CAAC;EAEtE,IAAM5B,EAAE,GAAGwB,cAAc,CAACxB,EAAE;EAC5B,IAAMC,IAAI,GAAGuB,cAAc,CAACvB,IAAI;EAChC,IAAMoB,IAAI,GAAGO,UAAU;EAEvB,OAAO,IAAI,CAACrB,UAAU,CAACC,qBAAqB,CAACC,QAAQ,CACjD,IAAI,CAACC,KAAK,EACTC,YAA4C,IAAK;IAC9CA,YAAY,GAAGlB,SAAS,CAACkB,YAAY,CAAC;IACtCA,YAAY,CAACC,YAAY,GAAGnB,SAAS,CAACkB,YAAY,CAACC,YAAY,CAAC;IAChED,YAAY,CAACC,YAAY,CAACZ,EAAE,CAAC,GAAG;MAC5BE,MAAM,EAAEyB,QAAQ;MAChB1B,IAAI;MACJoB,IAAI;MACJlB;IACJ,CAAC;IACD,OAAOQ,YAAY;EACvB,CAAC,CAAC,CAACE,IAAI,CAACkB,WAAW,IAAI;IACnB,IAAMC,WAAW,GAAG,IAAI,CAACzB,UAAU,CAAC0B,SAAS,CAACC,mBAAmB,CAACH,WAAW,CAAC;IAC9E,IAAMI,kBAAkB,GAAGJ,WAAW,CAACnB,YAAY,CAACZ,EAAE,CAAC;IACvD,IAAMoC,UAAU,GAAGb,yBAAyB,CACxCvB,EAAE,EACFmC,kBAAkB,EAClBH,WACJ,CAAC;IACD,OAAOI,UAAU;EACrB,CAAC,CAAC;AACV;;AAEA;AACA;AACA;AACA,OAAO,SAASC,aAAaA,CAEzBrC,EAAU,EACS;EACnBH,+BAA+B,CAAC,IAAI,CAAC;EACrC,IAAMyC,OAAY,GAAG,IAAI,CAAC5B,KAAK;EAC/B,IAAI,CAAC4B,OAAO,CAAC1B,YAAY,IAAI,CAAC0B,OAAO,CAAC1B,YAAY,CAACZ,EAAE,CAAC,EAClD,OAAO,IAAI;EAEf,IAAMwB,cAAc,GAAGc,OAAO,CAAC1B,YAAY,CAACZ,EAAE,CAAC;EAC/C,IAAMoC,UAAU,GAAGb,yBAAyB,CACxCvB,EAAE,EACFwB,cAAc,EACd,IACJ,CAAC;EACD,OAAOY,UAAU;AACrB;;AAEA;AACA;AACA;AACA,OAAO,SAASG,cAAcA,CAAA,EAEZ;EACd1C,+BAA+B,CAAC,IAAI,CAAC;EACrC,IAAMyC,OAAY,GAAG,IAAI,CAAC5B,KAAK;;EAE/B;EACA,IAAI,CAAC4B,OAAO,CAAC1B,YAAY,EAAE;IACvB,OAAO,EAAE;EACb;EACA,OAAO4B,MAAM,CAACC,IAAI,CAACH,OAAO,CAAC1B,YAAY,CAAC,CACnCvB,GAAG,CAACW,EAAE,IAAI;IACP,OAAOuB,yBAAyB,CAC5BvB,EAAE,EACFsC,OAAO,CAAC1B,YAAY,CAACZ,EAAE,CAAC,EACxB,IACJ,CAAC;EACL,CAAC,CAAC;AACV;AAEA,OAAO,eAAe0C,kBAAkBA,CACpCrB,IAGC,EACY;EACb,IAAMsB,WAAW,GAAGtB,IAAI,CAACiB,OAAO,CAAC1B,YAAY;EAC7C,IAAI+B,WAAW,EAAE;IACb,IAAMC,cAAkE,GAAG,CAAC,CAAC;IAC7E,MAAMC,OAAO,CAACC,GAAG,CACbN,MAAM,CAACC,IAAI,CAACE,WAAW,CAAC,CAACtD,GAAG,CAAC,MAAO0D,YAAY,IAAK;MACjD,IAAMX,UAA4B,GAAGO,WAAW,CAACI,YAAY,CAAC;MAC9D,IAAMC,UAAkB,GAAI3B,IAAI,CAACiB,OAAO,CAASjB,IAAI,CAAC4B,aAAa,CAACC,MAAM,CAACC,WAAW,CAAC;MACvF,IAAMC,iBAAiB,GAAG,MAAM/B,IAAI,CAAC4B,aAAa,CAACjC,eAAe,CAACC,iBAAiB,CAChF+B,UAAU,EACVD,YAAY,EACZX,UAAU,CAACjC,MACf,CAAC;MACD,IAAMA,MAAM,GAAG,MAAMkB,IAAI,CAAC4B,aAAa,CAACpB,QAAQ,CAACC,YAAY,CAACsB,iBAAiB,CAAC;MAChFR,cAAc,CAACG,YAAY,CAAC,GAAG;QAC3B7C,MAAM,EAAEkC,UAAU,CAAClC,MAAM;QACzBD,IAAI,EAAEmC,UAAU,CAACnC,IAAI;QACrBoB,IAAI,EAAE+B,iBAAiB;QACvBjD;MACJ,CAAC;IACL,CAAC,CACL,CAAC;;IAED;AACR;AACA;AACA;IACSkB,IAAI,CAACiB,OAAO,CAAoC1B,YAAY,GAAGgC,cAAc;EAClF;AACJ;AAEA,OAAO,SAASS,mBAAmBA,CAACC,OAAY,EAAiB;EAC7D;AACJ;AACA;AACA;EACI,OAAO3D,oBAAoB;AAC/B;AAEA,OAAO,IAAM4D,qBAA+B,GAAG;EAC3CC,IAAI,EAAE,aAAa;EACnBC,IAAI,EAAE,IAAI;EACVC,UAAU,EAAE;IACRC,UAAU,EAAGC,KAAU,IAAK;MACxBA,KAAK,CAAClC,aAAa,GAAGA,aAAa;MACnCkC,KAAK,CAACvB,aAAa,GAAGA,aAAa;MACnCuB,KAAK,CAACrB,cAAc,GAAGA,cAAc;MACrCC,MAAM,CAACqB,cAAc,CAACD,KAAK,EAAE,iBAAiB,EAAE;QAC5CE,GAAG,EAAE,SAASC,eAAeA,CAAA,EAAmB;UAC5C,OAAO,IAAI,CAACC,CAAC,CACRC,IAAI,CACD5E,GAAG,CAACoC,UAAU,IAAIe,MAAM,CAAC0B,OAAO,CAC5BzC,UAAU,CAAC0C,MAAM,CAAC,IAAI,CAAC,CAACvD,YAC5B,CAAC,CAAC,EACFvB,GAAG,CAAC6E,OAAO,IAAI;YACX,OAAQA,OAAO,CACV7E,GAAG,CAAC,CAAC,CAACW,EAAE,EAAEwB,cAAc,CAAM,KAAK;cAChC,OAAOD,yBAAyB,CAC5BvB,EAAE,EACFwB,cAAc,EACd,IACJ,CAAC;YACL,CAAC,CAAC;UACV,CAAC,CACL,CAAC;QACT;MACJ,CAAC,CAAC;IACN;EACJ,CAAC;EACD4C,YAAY,EAAE,CAAC,CAAC;EAChBC,KAAK,EAAE;IACH3B,kBAAkB,EAAE;MAChB4B,KAAK,EAAE5B;IACX,CAAC;IACDW,mBAAmB,EAAE;MACjBiB,KAAK,EAAEjB;IACX;EACJ;AACJ,CAAC;AAGD,cAAc,wBAAwB","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/backup/file-util.js b/dist/esm/plugins/backup/file-util.js deleted file mode 100644 index cfb345e5f82..00000000000 --- a/dist/esm/plugins/backup/file-util.js +++ /dev/null @@ -1,87 +0,0 @@ -import * as fs from 'node:fs'; -import * as path from 'node:path'; -import { blobToString, now } from "../../plugins/utils/index.js"; - -/** - * ensure that the given folder exists - */ -export function ensureFolderExists(folderPath) { - if (!fs.existsSync(folderPath)) { - fs.mkdirSync(folderPath, { - recursive: true - }); - } -} - -/** - * deletes and recreates the folder - */ -export function clearFolder(folderPath) { - deleteFolder(folderPath); - ensureFolderExists(folderPath); -} -export function deleteFolder(folderPath) { - // only remove if exists to not raise warning - if (fs.existsSync(folderPath)) { - fs.rmdirSync(folderPath, { - recursive: true - }); - } -} -export function prepareFolders(database, options) { - ensureFolderExists(options.directory); - var metaLoc = metaFileLocation(options); - if (!fs.existsSync(metaLoc)) { - var currentTime = now(); - var metaData = { - createdAt: currentTime, - updatedAt: currentTime, - collectionStates: {} - }; - fs.writeFileSync(metaLoc, JSON.stringify(metaData), 'utf-8'); - } - Object.keys(database.collections).forEach(collectionName => { - ensureFolderExists(path.join(options.directory, collectionName)); - }); -} -export async function writeToFile(location, data) { - if (typeof data !== 'string') { - data = await blobToString(data); - } - return new Promise(function (res, rej) { - fs.writeFile(location, data, 'utf-8', err => { - if (err) { - rej(err); - } else { - res(); - } - }); - }); -} -export function writeJsonToFile(location, data) { - return writeToFile(location, JSON.stringify(data)); -} -export function metaFileLocation(options) { - return path.join(options.directory, 'backup_meta.json'); -} -export function getMeta(options) { - var loc = metaFileLocation(options); - return new Promise((res, rej) => { - fs.readFile(loc, 'utf-8', (err, data) => { - if (err) { - rej(err); - } else { - var metaContent = JSON.parse(data); - res(metaContent); - } - }); - }); -} -export function setMeta(options, meta) { - var loc = metaFileLocation(options); - return writeJsonToFile(loc, meta); -} -export function documentFolder(options, docId) { - return path.join(options.directory, docId); -} -//# sourceMappingURL=file-util.js.map \ No newline at end of file diff --git a/dist/esm/plugins/backup/file-util.js.map b/dist/esm/plugins/backup/file-util.js.map deleted file mode 100644 index e885a9059f7..00000000000 --- a/dist/esm/plugins/backup/file-util.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"file-util.js","names":["fs","path","blobToString","now","ensureFolderExists","folderPath","existsSync","mkdirSync","recursive","clearFolder","deleteFolder","rmdirSync","prepareFolders","database","options","directory","metaLoc","metaFileLocation","currentTime","metaData","createdAt","updatedAt","collectionStates","writeFileSync","JSON","stringify","Object","keys","collections","forEach","collectionName","join","writeToFile","location","data","Promise","res","rej","writeFile","err","writeJsonToFile","getMeta","loc","readFile","metaContent","parse","setMeta","meta","documentFolder","docId"],"sources":["../../../../src/plugins/backup/file-util.ts"],"sourcesContent":["import * as fs from 'node:fs';\nimport * as path from 'node:path';\nimport type {\n BackupMetaFileContent,\n BackupOptions,\n RxDatabase\n} from '../../types/index.d.ts';\nimport { blobToString, now } from '../../plugins/utils/index.ts';\n\n/**\n * ensure that the given folder exists\n */\nexport function ensureFolderExists(folderPath: string): void {\n if (!fs.existsSync(folderPath)) {\n fs.mkdirSync(folderPath, { recursive: true });\n }\n}\n\n/**\n * deletes and recreates the folder\n */\nexport function clearFolder(folderPath: string): void {\n deleteFolder(folderPath);\n ensureFolderExists(folderPath);\n}\n\nexport function deleteFolder(folderPath: string): void {\n // only remove if exists to not raise warning\n if (fs.existsSync(folderPath)) {\n fs.rmdirSync(folderPath, { recursive: true });\n }\n}\n\nexport function prepareFolders(\n database: RxDatabase,\n options: BackupOptions\n) {\n ensureFolderExists(options.directory);\n\n const metaLoc = metaFileLocation(options);\n\n if (!fs.existsSync(metaLoc)) {\n const currentTime = now();\n const metaData: BackupMetaFileContent = {\n createdAt: currentTime,\n updatedAt: currentTime,\n collectionStates: {}\n };\n fs.writeFileSync(metaLoc, JSON.stringify(metaData), 'utf-8');\n }\n\n Object.keys(database.collections).forEach(collectionName => {\n ensureFolderExists(\n path.join(\n options.directory,\n collectionName\n )\n );\n });\n}\n\nexport async function writeToFile(\n location: string,\n data: string | Blob\n): Promise {\n if (typeof data !== 'string') {\n data = await blobToString(data);\n }\n return new Promise(function (res, rej) {\n fs.writeFile(\n location,\n data as string,\n 'utf-8',\n (err) => {\n if (err) {\n rej(err);\n } else {\n res();\n }\n }\n );\n });\n}\n\nexport function writeJsonToFile(\n location: string,\n data: any\n): Promise {\n return writeToFile(\n location,\n JSON.stringify(data)\n );\n}\n\nexport function metaFileLocation(options: BackupOptions): string {\n return path.join(\n options.directory,\n 'backup_meta.json'\n );\n}\n\nexport function getMeta(options: BackupOptions): Promise {\n const loc = metaFileLocation(options);\n return new Promise((res, rej) => {\n fs.readFile(loc, 'utf-8', (err, data) => {\n if (err) {\n rej(err);\n } else {\n const metaContent = JSON.parse(data);\n res(metaContent);\n }\n });\n });\n}\n\nexport function setMeta(\n options: BackupOptions,\n meta: BackupMetaFileContent\n): Promise {\n const loc = metaFileLocation(options);\n return writeJsonToFile(loc, meta);\n}\n\nexport function documentFolder(\n options: BackupOptions,\n docId: string\n): string {\n return path.join(\n options.directory,\n docId\n );\n}\n"],"mappings":"AAAA,OAAO,KAAKA,EAAE,MAAM,SAAS;AAC7B,OAAO,KAAKC,IAAI,MAAM,WAAW;AAMjC,SAASC,YAAY,EAAEC,GAAG,QAAQ,8BAA8B;;AAEhE;AACA;AACA;AACA,OAAO,SAASC,kBAAkBA,CAACC,UAAkB,EAAQ;EACzD,IAAI,CAACL,EAAE,CAACM,UAAU,CAACD,UAAU,CAAC,EAAE;IAC5BL,EAAE,CAACO,SAAS,CAACF,UAAU,EAAE;MAAEG,SAAS,EAAE;IAAK,CAAC,CAAC;EACjD;AACJ;;AAEA;AACA;AACA;AACA,OAAO,SAASC,WAAWA,CAACJ,UAAkB,EAAQ;EAClDK,YAAY,CAACL,UAAU,CAAC;EACxBD,kBAAkB,CAACC,UAAU,CAAC;AAClC;AAEA,OAAO,SAASK,YAAYA,CAACL,UAAkB,EAAQ;EACnD;EACA,IAAIL,EAAE,CAACM,UAAU,CAACD,UAAU,CAAC,EAAE;IAC3BL,EAAE,CAACW,SAAS,CAACN,UAAU,EAAE;MAAEG,SAAS,EAAE;IAAK,CAAC,CAAC;EACjD;AACJ;AAEA,OAAO,SAASI,cAAcA,CAC1BC,QAAoB,EACpBC,OAAsB,EACxB;EACEV,kBAAkB,CAACU,OAAO,CAACC,SAAS,CAAC;EAErC,IAAMC,OAAO,GAAGC,gBAAgB,CAACH,OAAO,CAAC;EAEzC,IAAI,CAACd,EAAE,CAACM,UAAU,CAACU,OAAO,CAAC,EAAE;IACzB,IAAME,WAAW,GAAGf,GAAG,CAAC,CAAC;IACzB,IAAMgB,QAA+B,GAAG;MACpCC,SAAS,EAAEF,WAAW;MACtBG,SAAS,EAAEH,WAAW;MACtBI,gBAAgB,EAAE,CAAC;IACvB,CAAC;IACDtB,EAAE,CAACuB,aAAa,CAACP,OAAO,EAAEQ,IAAI,CAACC,SAAS,CAACN,QAAQ,CAAC,EAAE,OAAO,CAAC;EAChE;EAEAO,MAAM,CAACC,IAAI,CAACd,QAAQ,CAACe,WAAW,CAAC,CAACC,OAAO,CAACC,cAAc,IAAI;IACxD1B,kBAAkB,CACdH,IAAI,CAAC8B,IAAI,CACLjB,OAAO,CAACC,SAAS,EACjBe,cACJ,CACJ,CAAC;EACL,CAAC,CAAC;AACN;AAEA,OAAO,eAAeE,WAAWA,CAC7BC,QAAgB,EAChBC,IAAmB,EACN;EACb,IAAI,OAAOA,IAAI,KAAK,QAAQ,EAAE;IAC1BA,IAAI,GAAG,MAAMhC,YAAY,CAACgC,IAAI,CAAC;EACnC;EACA,OAAO,IAAIC,OAAO,CAAC,UAAUC,GAAG,EAAEC,GAAG,EAAE;IACnCrC,EAAE,CAACsC,SAAS,CACRL,QAAQ,EACRC,IAAI,EACJ,OAAO,EACNK,GAAG,IAAK;MACL,IAAIA,GAAG,EAAE;QACLF,GAAG,CAACE,GAAG,CAAC;MACZ,CAAC,MAAM;QACHH,GAAG,CAAC,CAAC;MACT;IACJ,CACJ,CAAC;EACL,CAAC,CAAC;AACN;AAEA,OAAO,SAASI,eAAeA,CAC3BP,QAAgB,EAChBC,IAAS,EACI;EACb,OAAOF,WAAW,CACdC,QAAQ,EACRT,IAAI,CAACC,SAAS,CAACS,IAAI,CACvB,CAAC;AACL;AAEA,OAAO,SAASjB,gBAAgBA,CAACH,OAAsB,EAAU;EAC7D,OAAOb,IAAI,CAAC8B,IAAI,CACZjB,OAAO,CAACC,SAAS,EACjB,kBACJ,CAAC;AACL;AAEA,OAAO,SAAS0B,OAAOA,CAAC3B,OAAsB,EAAkC;EAC5E,IAAM4B,GAAG,GAAGzB,gBAAgB,CAACH,OAAO,CAAC;EACrC,OAAO,IAAIqB,OAAO,CAAC,CAACC,GAAG,EAAEC,GAAG,KAAK;IAC7BrC,EAAE,CAAC2C,QAAQ,CAACD,GAAG,EAAE,OAAO,EAAE,CAACH,GAAG,EAAEL,IAAI,KAAK;MACrC,IAAIK,GAAG,EAAE;QACLF,GAAG,CAACE,GAAG,CAAC;MACZ,CAAC,MAAM;QACH,IAAMK,WAAW,GAAGpB,IAAI,CAACqB,KAAK,CAACX,IAAI,CAAC;QACpCE,GAAG,CAACQ,WAAW,CAAC;MACpB;IACJ,CAAC,CAAC;EACN,CAAC,CAAC;AACN;AAEA,OAAO,SAASE,OAAOA,CACnBhC,OAAsB,EACtBiC,IAA2B,EACd;EACb,IAAML,GAAG,GAAGzB,gBAAgB,CAACH,OAAO,CAAC;EACrC,OAAO0B,eAAe,CAACE,GAAG,EAAEK,IAAI,CAAC;AACrC;AAEA,OAAO,SAASC,cAAcA,CAC1BlC,OAAsB,EACtBmC,KAAa,EACP;EACN,OAAOhD,IAAI,CAAC8B,IAAI,CACZjB,OAAO,CAACC,SAAS,EACjBkC,KACJ,CAAC;AACL","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/backup/index.js b/dist/esm/plugins/backup/index.js deleted file mode 100644 index 052c7373d9e..00000000000 --- a/dist/esm/plugins/backup/index.js +++ /dev/null @@ -1,183 +0,0 @@ -import * as path from 'node:path'; -import { BehaviorSubject, firstValueFrom, Subject } from 'rxjs'; -import { filter, map } from 'rxjs'; -import { getFromMapOrCreate, PROMISE_RESOLVE_FALSE, PROMISE_RESOLVE_TRUE, PROMISE_RESOLVE_VOID } from "../../plugins/utils/index.js"; -import { clearFolder, deleteFolder, documentFolder, ensureFolderExists, getMeta, prepareFolders, setMeta, writeJsonToFile, writeToFile } from "./file-util.js"; -import { getChangedDocumentsSince } from "../../rx-storage-helper.js"; - -/** - * Backups a single documents, - * returns the paths to all written files - */ -export async function backupSingleDocument(rxDocument, options) { - var data = rxDocument.toJSON(true); - var writtenFiles = []; - var docFolder = documentFolder(options, rxDocument.primary); - await clearFolder(docFolder); - var fileLocation = path.join(docFolder, 'document.json'); - await writeJsonToFile(fileLocation, data); - writtenFiles.push(fileLocation); - if (options.attachments) { - var attachmentsFolder = path.join(docFolder, 'attachments'); - ensureFolderExists(attachmentsFolder); - var attachments = rxDocument.allAttachments(); - await Promise.all(attachments.map(async attachment => { - var content = await attachment.getData(); - var attachmentFileLocation = path.join(attachmentsFolder, attachment.id); - await writeToFile(attachmentFileLocation, content); - writtenFiles.push(attachmentFileLocation); - })); - } - return writtenFiles; -} -var BACKUP_STATES_BY_DB = new WeakMap(); -function addToBackupStates(db, state) { - var ar = getFromMapOrCreate(BACKUP_STATES_BY_DB, db, () => []); - ar.push(state); -} -export var RxBackupState = /*#__PURE__*/function () { - function RxBackupState(database, options) { - this.isStopped = false; - this.subs = []; - this.persistRunning = PROMISE_RESOLVE_VOID; - this.initialReplicationDone$ = new BehaviorSubject(false); - this.internalWriteEvents$ = new Subject(); - this.writeEvents$ = this.internalWriteEvents$.asObservable(); - this.database = database; - this.options = options; - if (!this.options.batchSize) { - this.options.batchSize = 10; - } - addToBackupStates(database, this); - prepareFolders(database, options); - } - - /** - * Persists all data from all collections, - * beginning from the oldest sequence checkpoint - * to the newest one. - * Do not call this while it is already running. - * Returns true if there are more documents to process - */ - var _proto = RxBackupState.prototype; - _proto.persistOnce = function persistOnce() { - return this.persistRunning = this.persistRunning.then(() => this._persistOnce()); - }; - _proto._persistOnce = async function _persistOnce() { - var _this = this; - var meta = await getMeta(this.options); - await Promise.all(Object.entries(this.database.collections).map(async ([collectionName, collection]) => { - var primaryKey = collection.schema.primaryPath; - var processedDocuments = new Set(); - await this.database.requestIdlePromise(); - if (!meta.collectionStates[collectionName]) { - meta.collectionStates[collectionName] = {}; - } - var lastCheckpoint = meta.collectionStates[collectionName].checkpoint; - var hasMore = true; - var _loop = async function () { - await _this.database.requestIdlePromise(); - var changesResult = await getChangedDocumentsSince(collection.storageInstance, _this.options.batchSize ? _this.options.batchSize : 0, lastCheckpoint); - lastCheckpoint = changesResult.documents.length > 0 ? changesResult.checkpoint : lastCheckpoint; - meta.collectionStates[collectionName].checkpoint = lastCheckpoint; - var docIds = changesResult.documents.map(doc => doc[primaryKey]).filter(id => { - if (processedDocuments.has(id)) { - return false; - } else { - processedDocuments.add(id); - return true; - } - }).filter((elem, pos, arr) => arr.indexOf(elem) === pos); // unique - await _this.database.requestIdlePromise(); - var docs = await collection.findByIds(docIds).exec(); - if (docs.size === 0) { - hasMore = false; - return 1; // continue - } - await Promise.all(Array.from(docs.values()).map(async doc => { - var writtenFiles = await backupSingleDocument(doc, _this.options); - _this.internalWriteEvents$.next({ - collectionName: collection.name, - documentId: doc.primary, - files: writtenFiles, - deleted: false - }); - })); - // handle deleted documents - await Promise.all(docIds.filter(docId => !docs.has(docId)).map(async docId => { - await deleteFolder(documentFolder(_this.options, docId)); - _this.internalWriteEvents$.next({ - collectionName: collection.name, - documentId: docId, - files: [], - deleted: true - }); - })); - }; - while (hasMore && !this.isStopped) { - if (await _loop()) continue; - } - meta.collectionStates[collectionName].checkpoint = lastCheckpoint; - await setMeta(this.options, meta); - })); - if (!this.initialReplicationDone$.getValue()) { - this.initialReplicationDone$.next(true); - } - }; - _proto.watchForChanges = function watchForChanges() { - var collections = Object.values(this.database.collections); - collections.forEach(collection => { - var changes$ = collection.storageInstance.changeStream(); - var sub = changes$.subscribe(() => { - this.persistOnce(); - }); - this.subs.push(sub); - }); - } - - /** - * Returns a promise that resolves when the initial backup is done - * and the filesystem is in sync with the database state - */; - _proto.awaitInitialBackup = function awaitInitialBackup() { - return firstValueFrom(this.initialReplicationDone$.pipe(filter(v => !!v), map(() => true))); - }; - _proto.cancel = function cancel() { - if (this.isStopped) { - return PROMISE_RESOLVE_FALSE; - } - this.isStopped = true; - this.subs.forEach(sub => sub.unsubscribe()); - return PROMISE_RESOLVE_TRUE; - }; - return RxBackupState; -}(); -export function backup(options) { - var backupState = new RxBackupState(this, options); - backupState.persistOnce(); - if (options.live) { - backupState.watchForChanges(); - } - return backupState; -} -export * from "./file-util.js"; -export var RxDBBackupPlugin = { - name: 'backup', - rxdb: true, - prototypes: { - RxDatabase(proto) { - proto.backup = backup; - } - }, - hooks: { - preDestroyRxDatabase: { - after: function preDestroyRxDatabase(db) { - var states = BACKUP_STATES_BY_DB.get(db); - if (states) { - states.forEach(state => state.cancel()); - } - } - } - } -}; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/esm/plugins/backup/index.js.map b/dist/esm/plugins/backup/index.js.map deleted file mode 100644 index 132ec6db74a..00000000000 --- a/dist/esm/plugins/backup/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["path","BehaviorSubject","firstValueFrom","Subject","filter","map","getFromMapOrCreate","PROMISE_RESOLVE_FALSE","PROMISE_RESOLVE_TRUE","PROMISE_RESOLVE_VOID","clearFolder","deleteFolder","documentFolder","ensureFolderExists","getMeta","prepareFolders","setMeta","writeJsonToFile","writeToFile","getChangedDocumentsSince","backupSingleDocument","rxDocument","options","data","toJSON","writtenFiles","docFolder","primary","fileLocation","join","push","attachments","attachmentsFolder","allAttachments","Promise","all","attachment","content","getData","attachmentFileLocation","id","BACKUP_STATES_BY_DB","WeakMap","addToBackupStates","db","state","ar","RxBackupState","database","isStopped","subs","persistRunning","initialReplicationDone$","internalWriteEvents$","writeEvents$","asObservable","batchSize","_proto","prototype","persistOnce","then","_persistOnce","_this","meta","Object","entries","collections","collectionName","collection","primaryKey","schema","primaryPath","processedDocuments","Set","requestIdlePromise","collectionStates","lastCheckpoint","checkpoint","hasMore","_loop","changesResult","storageInstance","documents","length","docIds","doc","has","add","elem","pos","arr","indexOf","docs","findByIds","exec","size","Array","from","values","next","name","documentId","files","deleted","docId","getValue","watchForChanges","forEach","changes$","changeStream","sub","subscribe","awaitInitialBackup","pipe","v","cancel","unsubscribe","backup","backupState","live","RxDBBackupPlugin","rxdb","prototypes","RxDatabase","proto","hooks","preDestroyRxDatabase","after","states","get"],"sources":["../../../../src/plugins/backup/index.ts"],"sourcesContent":["import * as path from 'node:path';\nimport {\n BehaviorSubject,\n firstValueFrom,\n Observable,\n Subject,\n Subscription\n} from 'rxjs';\nimport {\n filter,\n map\n} from 'rxjs';\nimport type {\n BackupOptions,\n RxBackupWriteEvent,\n RxCollection,\n RxDatabase,\n RxDocument,\n RxPlugin\n} from '../../types/index.d.ts';\nimport {\n getFromMapOrCreate,\n PROMISE_RESOLVE_FALSE,\n PROMISE_RESOLVE_TRUE,\n PROMISE_RESOLVE_VOID\n} from '../../plugins/utils/index.ts';\nimport {\n clearFolder,\n deleteFolder,\n documentFolder,\n ensureFolderExists,\n getMeta,\n prepareFolders,\n setMeta,\n writeJsonToFile,\n writeToFile\n} from './file-util.ts';\nimport { getChangedDocumentsSince } from '../../rx-storage-helper.ts';\n\n\n/**\n * Backups a single documents,\n * returns the paths to all written files\n */\nexport async function backupSingleDocument(\n rxDocument: RxDocument,\n options: BackupOptions\n): Promise {\n const data = rxDocument.toJSON(true);\n const writtenFiles: string[] = [];\n\n const docFolder = documentFolder(options, rxDocument.primary);\n await clearFolder(docFolder);\n\n const fileLocation = path.join(\n docFolder,\n 'document.json'\n );\n await writeJsonToFile(fileLocation, data);\n writtenFiles.push(fileLocation);\n\n if (options.attachments) {\n const attachmentsFolder = path.join(\n docFolder,\n 'attachments'\n );\n ensureFolderExists(attachmentsFolder);\n const attachments = (rxDocument as RxDocument).allAttachments();\n await Promise.all(\n attachments\n .map(async (attachment) => {\n const content = await attachment.getData();\n const attachmentFileLocation = path.join(\n attachmentsFolder,\n attachment.id\n );\n await writeToFile(attachmentFileLocation, content);\n writtenFiles.push(attachmentFileLocation);\n })\n );\n }\n\n return writtenFiles;\n}\n\nconst BACKUP_STATES_BY_DB: WeakMap = new WeakMap();\nfunction addToBackupStates(db: RxDatabase, state: RxBackupState) {\n const ar = getFromMapOrCreate(\n BACKUP_STATES_BY_DB,\n db,\n () => []\n );\n ar.push(state);\n}\n\nexport class RxBackupState {\n public isStopped: boolean = false;\n private subs: Subscription[] = [];\n private persistRunning: Promise = PROMISE_RESOLVE_VOID;\n private initialReplicationDone$: BehaviorSubject = new BehaviorSubject(false as any);\n\n private readonly internalWriteEvents$: Subject = new Subject();\n public readonly writeEvents$: Observable = this.internalWriteEvents$.asObservable();\n\n constructor(\n public readonly database: RxDatabase,\n public readonly options: BackupOptions\n ) {\n if (!this.options.batchSize) {\n this.options.batchSize = 10;\n }\n addToBackupStates(database, this);\n prepareFolders(database, options);\n }\n\n /**\n * Persists all data from all collections,\n * beginning from the oldest sequence checkpoint\n * to the newest one.\n * Do not call this while it is already running.\n * Returns true if there are more documents to process\n */\n public persistOnce() {\n return this.persistRunning = this.persistRunning.then(() => this._persistOnce());\n }\n\n public async _persistOnce() {\n const meta = await getMeta(this.options);\n\n await Promise.all(\n Object\n .entries(this.database.collections)\n .map(async ([collectionName, collection]) => {\n const primaryKey = collection.schema.primaryPath;\n const processedDocuments: Set = new Set();\n\n await this.database.requestIdlePromise();\n\n if (!meta.collectionStates[collectionName]) {\n meta.collectionStates[collectionName] = {};\n }\n let lastCheckpoint = meta.collectionStates[collectionName].checkpoint;\n\n let hasMore = true;\n while (hasMore && !this.isStopped) {\n await this.database.requestIdlePromise();\n const changesResult = await getChangedDocumentsSince(\n collection.storageInstance,\n this.options.batchSize ? this.options.batchSize : 0,\n lastCheckpoint\n );\n lastCheckpoint = changesResult.documents.length > 0 ? changesResult.checkpoint : lastCheckpoint;\n meta.collectionStates[collectionName].checkpoint = lastCheckpoint;\n\n const docIds: string[] = changesResult.documents\n .map(doc => doc[primaryKey])\n .filter(id => {\n if (\n processedDocuments.has(id)\n ) {\n return false;\n } else {\n processedDocuments.add(id);\n return true;\n }\n })\n .filter((elem, pos, arr) => arr.indexOf(elem) === pos); // unique\n await this.database.requestIdlePromise();\n\n const docs: Map = await collection.findByIds(docIds).exec();\n if (docs.size === 0) {\n hasMore = false;\n continue;\n }\n await Promise.all(\n Array\n .from(docs.values())\n .map(async (doc) => {\n const writtenFiles = await backupSingleDocument(doc, this.options);\n this.internalWriteEvents$.next({\n collectionName: collection.name,\n documentId: doc.primary,\n files: writtenFiles,\n deleted: false\n });\n })\n );\n // handle deleted documents\n await Promise.all(\n docIds\n .filter(docId => !docs.has(docId))\n .map(async (docId) => {\n await deleteFolder(documentFolder(this.options, docId));\n this.internalWriteEvents$.next({\n collectionName: collection.name,\n documentId: docId,\n files: [],\n deleted: true\n });\n })\n );\n }\n meta.collectionStates[collectionName].checkpoint = lastCheckpoint;\n await setMeta(this.options, meta);\n })\n );\n\n if (!this.initialReplicationDone$.getValue()) {\n this.initialReplicationDone$.next(true);\n }\n }\n\n public watchForChanges() {\n const collections: RxCollection[] = Object.values(this.database.collections);\n collections.forEach(collection => {\n const changes$ = collection.storageInstance.changeStream();\n const sub = changes$.subscribe(() => {\n this.persistOnce();\n });\n this.subs.push(sub);\n });\n }\n\n /**\n * Returns a promise that resolves when the initial backup is done\n * and the filesystem is in sync with the database state\n */\n public awaitInitialBackup(): Promise {\n return firstValueFrom(\n this.initialReplicationDone$.pipe(\n filter(v => !!v),\n map(() => true)\n )\n );\n }\n\n cancel(): Promise {\n if (this.isStopped) {\n return PROMISE_RESOLVE_FALSE;\n }\n this.isStopped = true;\n this.subs.forEach(sub => sub.unsubscribe());\n return PROMISE_RESOLVE_TRUE;\n }\n}\n\n\nexport function backup(\n this: RxDatabase,\n options: BackupOptions\n): RxBackupState {\n const backupState = new RxBackupState(this, options);\n backupState.persistOnce();\n\n if (options.live) {\n backupState.watchForChanges();\n }\n\n return backupState;\n}\n\nexport * from './file-util.ts';\nexport const RxDBBackupPlugin: RxPlugin = {\n name: 'backup',\n rxdb: true,\n prototypes: {\n RxDatabase(proto: any) {\n proto.backup = backup;\n }\n },\n hooks: {\n preDestroyRxDatabase: {\n after: function preDestroyRxDatabase(db: RxDatabase) {\n const states = BACKUP_STATES_BY_DB.get(db);\n if (states) {\n states.forEach(state => state.cancel());\n }\n }\n }\n }\n};\n"],"mappings":"AAAA,OAAO,KAAKA,IAAI,MAAM,WAAW;AACjC,SACIC,eAAe,EACfC,cAAc,EAEdC,OAAO,QAEJ,MAAM;AACb,SACIC,MAAM,EACNC,GAAG,QACA,MAAM;AASb,SACIC,kBAAkB,EAClBC,qBAAqB,EACrBC,oBAAoB,EACpBC,oBAAoB,QACjB,8BAA8B;AACrC,SACIC,WAAW,EACXC,YAAY,EACZC,cAAc,EACdC,kBAAkB,EAClBC,OAAO,EACPC,cAAc,EACdC,OAAO,EACPC,eAAe,EACfC,WAAW,QACR,gBAAgB;AACvB,SAASC,wBAAwB,QAAQ,4BAA4B;;AAGrE;AACA;AACA;AACA;AACA,OAAO,eAAeC,oBAAoBA,CACtCC,UAAgC,EAChCC,OAAsB,EACL;EACjB,IAAMC,IAAI,GAAGF,UAAU,CAACG,MAAM,CAAC,IAAI,CAAC;EACpC,IAAMC,YAAsB,GAAG,EAAE;EAEjC,IAAMC,SAAS,GAAGd,cAAc,CAACU,OAAO,EAAED,UAAU,CAACM,OAAO,CAAC;EAC7D,MAAMjB,WAAW,CAACgB,SAAS,CAAC;EAE5B,IAAME,YAAY,GAAG5B,IAAI,CAAC6B,IAAI,CAC1BH,SAAS,EACT,eACJ,CAAC;EACD,MAAMT,eAAe,CAACW,YAAY,EAAEL,IAAI,CAAC;EACzCE,YAAY,CAACK,IAAI,CAACF,YAAY,CAAC;EAE/B,IAAIN,OAAO,CAACS,WAAW,EAAE;IACrB,IAAMC,iBAAiB,GAAGhC,IAAI,CAAC6B,IAAI,CAC/BH,SAAS,EACT,aACJ,CAAC;IACDb,kBAAkB,CAACmB,iBAAiB,CAAC;IACrC,IAAMD,WAAW,GAAIV,UAAU,CAAgBY,cAAc,CAAC,CAAC;IAC/D,MAAMC,OAAO,CAACC,GAAG,CACbJ,WAAW,CACN1B,GAAG,CAAC,MAAO+B,UAAU,IAAK;MACvB,IAAMC,OAAO,GAAG,MAAMD,UAAU,CAACE,OAAO,CAAC,CAAC;MAC1C,IAAMC,sBAAsB,GAAGvC,IAAI,CAAC6B,IAAI,CACpCG,iBAAiB,EACjBI,UAAU,CAACI,EACf,CAAC;MACD,MAAMtB,WAAW,CAACqB,sBAAsB,EAAEF,OAAO,CAAC;MAClDZ,YAAY,CAACK,IAAI,CAACS,sBAAsB,CAAC;IAC7C,CAAC,CACT,CAAC;EACL;EAEA,OAAOd,YAAY;AACvB;AAEA,IAAMgB,mBAAyD,GAAG,IAAIC,OAAO,CAAC,CAAC;AAC/E,SAASC,iBAAiBA,CAACC,EAAc,EAAEC,KAAoB,EAAE;EAC7D,IAAMC,EAAE,GAAGxC,kBAAkB,CACzBmC,mBAAmB,EACnBG,EAAE,EACF,MAAM,EACV,CAAC;EACDE,EAAE,CAAChB,IAAI,CAACe,KAAK,CAAC;AAClB;AAEA,WAAaE,aAAa;EAStB,SAAAA,cACoBC,QAAoB,EACpB1B,OAAsB,EACxC;IAAA,KAXK2B,SAAS,GAAY,KAAK;IAAA,KACzBC,IAAI,GAAmB,EAAE;IAAA,KACzBC,cAAc,GAAkB1C,oBAAoB;IAAA,KACpD2C,uBAAuB,GAA6B,IAAInD,eAAe,CAAC,KAAY,CAAC;IAAA,KAE5EoD,oBAAoB,GAAgC,IAAIlD,OAAO,CAAC,CAAC;IAAA,KAClEmD,YAAY,GAAmC,IAAI,CAACD,oBAAoB,CAACE,YAAY,CAAC,CAAC;IAAA,KAGnFP,QAAoB,GAApBA,QAAoB;IAAA,KACpB1B,OAAsB,GAAtBA,OAAsB;IAEtC,IAAI,CAAC,IAAI,CAACA,OAAO,CAACkC,SAAS,EAAE;MACzB,IAAI,CAAClC,OAAO,CAACkC,SAAS,GAAG,EAAE;IAC/B;IACAb,iBAAiB,CAACK,QAAQ,EAAE,IAAI,CAAC;IACjCjC,cAAc,CAACiC,QAAQ,EAAE1B,OAAO,CAAC;EACrC;;EAEA;AACJ;AACA;AACA;AACA;AACA;AACA;EANI,IAAAmC,MAAA,GAAAV,aAAA,CAAAW,SAAA;EAAAD,MAAA,CAOOE,WAAW,GAAlB,SAAAA,YAAA,EAAqB;IACjB,OAAO,IAAI,CAACR,cAAc,GAAG,IAAI,CAACA,cAAc,CAACS,IAAI,CAAC,MAAM,IAAI,CAACC,YAAY,CAAC,CAAC,CAAC;EACpF,CAAC;EAAAJ,MAAA,CAEYI,YAAY,GAAzB,eAAAA,aAAA,EAA4B;IAAA,IAAAC,KAAA;IACxB,IAAMC,IAAI,GAAG,MAAMjD,OAAO,CAAC,IAAI,CAACQ,OAAO,CAAC;IAExC,MAAMY,OAAO,CAACC,GAAG,CACb6B,MAAM,CACDC,OAAO,CAAC,IAAI,CAACjB,QAAQ,CAACkB,WAAW,CAAC,CAClC7D,GAAG,CAAC,OAAO,CAAC8D,cAAc,EAAEC,UAAU,CAAC,KAAK;MACzC,IAAMC,UAAU,GAAGD,UAAU,CAACE,MAAM,CAACC,WAAW;MAChD,IAAMC,kBAA+B,GAAG,IAAIC,GAAG,CAAC,CAAC;MAEjD,MAAM,IAAI,CAACzB,QAAQ,CAAC0B,kBAAkB,CAAC,CAAC;MAExC,IAAI,CAACX,IAAI,CAACY,gBAAgB,CAACR,cAAc,CAAC,EAAE;QACxCJ,IAAI,CAACY,gBAAgB,CAACR,cAAc,CAAC,GAAG,CAAC,CAAC;MAC9C;MACA,IAAIS,cAAc,GAAGb,IAAI,CAACY,gBAAgB,CAACR,cAAc,CAAC,CAACU,UAAU;MAErE,IAAIC,OAAO,GAAG,IAAI;MAAC,IAAAC,KAAA,kBAAAA,CAAA,EACgB;QAC/B,MAAMjB,KAAI,CAACd,QAAQ,CAAC0B,kBAAkB,CAAC,CAAC;QACxC,IAAMM,aAAa,GAAG,MAAM7D,wBAAwB,CAChDiD,UAAU,CAACa,eAAe,EAC1BnB,KAAI,CAACxC,OAAO,CAACkC,SAAS,GAAGM,KAAI,CAACxC,OAAO,CAACkC,SAAS,GAAG,CAAC,EACnDoB,cACJ,CAAC;QACDA,cAAc,GAAGI,aAAa,CAACE,SAAS,CAACC,MAAM,GAAG,CAAC,GAAGH,aAAa,CAACH,UAAU,GAAGD,cAAc;QAC/Fb,IAAI,CAACY,gBAAgB,CAACR,cAAc,CAAC,CAACU,UAAU,GAAGD,cAAc;QAEjE,IAAMQ,MAAgB,GAAGJ,aAAa,CAACE,SAAS,CAC3C7E,GAAG,CAACgF,GAAG,IAAIA,GAAG,CAAChB,UAAU,CAAC,CAAC,CAC3BjE,MAAM,CAACoC,EAAE,IAAI;UACV,IACIgC,kBAAkB,CAACc,GAAG,CAAC9C,EAAE,CAAC,EAC5B;YACE,OAAO,KAAK;UAChB,CAAC,MAAM;YACHgC,kBAAkB,CAACe,GAAG,CAAC/C,EAAE,CAAC;YAC1B,OAAO,IAAI;UACf;QACJ,CAAC,CAAC,CACDpC,MAAM,CAAC,CAACoF,IAAI,EAAEC,GAAG,EAAEC,GAAG,KAAKA,GAAG,CAACC,OAAO,CAACH,IAAI,CAAC,KAAKC,GAAG,CAAC,CAAC,CAAC;QAC5D,MAAM3B,KAAI,CAACd,QAAQ,CAAC0B,kBAAkB,CAAC,CAAC;QAExC,IAAMkB,IAA6B,GAAG,MAAMxB,UAAU,CAACyB,SAAS,CAACT,MAAM,CAAC,CAACU,IAAI,CAAC,CAAC;QAC/E,IAAIF,IAAI,CAACG,IAAI,KAAK,CAAC,EAAE;UACjBjB,OAAO,GAAG,KAAK;UAAC;QAEpB;QACA,MAAM5C,OAAO,CAACC,GAAG,CACb6D,KAAK,CACAC,IAAI,CAACL,IAAI,CAACM,MAAM,CAAC,CAAC,CAAC,CACnB7F,GAAG,CAAC,MAAOgF,GAAG,IAAK;UAChB,IAAM5D,YAAY,GAAG,MAAML,oBAAoB,CAACiE,GAAG,EAAEvB,KAAI,CAACxC,OAAO,CAAC;UAClEwC,KAAI,CAACT,oBAAoB,CAAC8C,IAAI,CAAC;YAC3BhC,cAAc,EAAEC,UAAU,CAACgC,IAAI;YAC/BC,UAAU,EAAEhB,GAAG,CAAC1D,OAAO;YACvB2E,KAAK,EAAE7E,YAAY;YACnB8E,OAAO,EAAE;UACb,CAAC,CAAC;QACN,CAAC,CACT,CAAC;QACD;QACA,MAAMrE,OAAO,CAACC,GAAG,CACbiD,MAAM,CACDhF,MAAM,CAACoG,KAAK,IAAI,CAACZ,IAAI,CAACN,GAAG,CAACkB,KAAK,CAAC,CAAC,CACjCnG,GAAG,CAAC,MAAOmG,KAAK,IAAK;UAClB,MAAM7F,YAAY,CAACC,cAAc,CAACkD,KAAI,CAACxC,OAAO,EAAEkF,KAAK,CAAC,CAAC;UACvD1C,KAAI,CAACT,oBAAoB,CAAC8C,IAAI,CAAC;YAC3BhC,cAAc,EAAEC,UAAU,CAACgC,IAAI;YAC/BC,UAAU,EAAEG,KAAK;YACjBF,KAAK,EAAE,EAAE;YACTC,OAAO,EAAE;UACb,CAAC,CAAC;QACN,CAAC,CACT,CAAC;MACL,CAAC;MAzDD,OAAOzB,OAAO,IAAI,CAAC,IAAI,CAAC7B,SAAS;QAAA,UAAA8B,KAAA,IA4BzB;MAAS;MA8BjBhB,IAAI,CAACY,gBAAgB,CAACR,cAAc,CAAC,CAACU,UAAU,GAAGD,cAAc;MACjE,MAAM5D,OAAO,CAAC,IAAI,CAACM,OAAO,EAAEyC,IAAI,CAAC;IACrC,CAAC,CACT,CAAC;IAED,IAAI,CAAC,IAAI,CAACX,uBAAuB,CAACqD,QAAQ,CAAC,CAAC,EAAE;MAC1C,IAAI,CAACrD,uBAAuB,CAAC+C,IAAI,CAAC,IAAI,CAAC;IAC3C;EACJ,CAAC;EAAA1C,MAAA,CAEMiD,eAAe,GAAtB,SAAAA,gBAAA,EAAyB;IACrB,IAAMxC,WAA2B,GAAGF,MAAM,CAACkC,MAAM,CAAC,IAAI,CAAClD,QAAQ,CAACkB,WAAW,CAAC;IAC5EA,WAAW,CAACyC,OAAO,CAACvC,UAAU,IAAI;MAC9B,IAAMwC,QAAQ,GAAGxC,UAAU,CAACa,eAAe,CAAC4B,YAAY,CAAC,CAAC;MAC1D,IAAMC,GAAG,GAAGF,QAAQ,CAACG,SAAS,CAAC,MAAM;QACjC,IAAI,CAACpD,WAAW,CAAC,CAAC;MACtB,CAAC,CAAC;MACF,IAAI,CAACT,IAAI,CAACpB,IAAI,CAACgF,GAAG,CAAC;IACvB,CAAC,CAAC;EACN;;EAEA;AACJ;AACA;AACA,KAHI;EAAArD,MAAA,CAIOuD,kBAAkB,GAAzB,SAAAA,mBAAA,EAA8C;IAC1C,OAAO9G,cAAc,CACjB,IAAI,CAACkD,uBAAuB,CAAC6D,IAAI,CAC7B7G,MAAM,CAAC8G,CAAC,IAAI,CAAC,CAACA,CAAC,CAAC,EAChB7G,GAAG,CAAC,MAAM,IAAI,CAClB,CACJ,CAAC;EACL,CAAC;EAAAoD,MAAA,CAED0D,MAAM,GAAN,SAAAA,OAAA,EAA2B;IACvB,IAAI,IAAI,CAAClE,SAAS,EAAE;MAChB,OAAO1C,qBAAqB;IAChC;IACA,IAAI,CAAC0C,SAAS,GAAG,IAAI;IACrB,IAAI,CAACC,IAAI,CAACyD,OAAO,CAACG,GAAG,IAAIA,GAAG,CAACM,WAAW,CAAC,CAAC,CAAC;IAC3C,OAAO5G,oBAAoB;EAC/B,CAAC;EAAA,OAAAuC,aAAA;AAAA;AAIL,OAAO,SAASsE,MAAMA,CAElB/F,OAAsB,EACT;EACb,IAAMgG,WAAW,GAAG,IAAIvE,aAAa,CAAC,IAAI,EAAEzB,OAAO,CAAC;EACpDgG,WAAW,CAAC3D,WAAW,CAAC,CAAC;EAEzB,IAAIrC,OAAO,CAACiG,IAAI,EAAE;IACdD,WAAW,CAACZ,eAAe,CAAC,CAAC;EACjC;EAEA,OAAOY,WAAW;AACtB;AAEA,cAAc,gBAAgB;AAC9B,OAAO,IAAME,gBAA0B,GAAG;EACtCpB,IAAI,EAAE,QAAQ;EACdqB,IAAI,EAAE,IAAI;EACVC,UAAU,EAAE;IACRC,UAAUA,CAACC,KAAU,EAAE;MACnBA,KAAK,CAACP,MAAM,GAAGA,MAAM;IACzB;EACJ,CAAC;EACDQ,KAAK,EAAE;IACHC,oBAAoB,EAAE;MAClBC,KAAK,EAAE,SAASD,oBAAoBA,CAAClF,EAAc,EAAE;QACjD,IAAMoF,MAAM,GAAGvF,mBAAmB,CAACwF,GAAG,CAACrF,EAAE,CAAC;QAC1C,IAAIoF,MAAM,EAAE;UACRA,MAAM,CAACrB,OAAO,CAAC9D,KAAK,IAAIA,KAAK,CAACsE,MAAM,CAAC,CAAC,CAAC;QAC3C;MACJ;IACJ;EACJ;AACJ,CAAC","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/cleanup/cleanup-helper.js b/dist/esm/plugins/cleanup/cleanup-helper.js deleted file mode 100644 index 3cb02c290dc..00000000000 --- a/dist/esm/plugins/cleanup/cleanup-helper.js +++ /dev/null @@ -1,11 +0,0 @@ -export var DEFAULT_CLEANUP_POLICY = { - minimumDeletedTime: 1000 * 60 * 60 * 24 * 31, - // one month - minimumCollectionAge: 1000 * 60, - // 60 seconds - runEach: 1000 * 60 * 5, - // 5 minutes - awaitReplicationsInSync: true, - waitForLeadership: true -}; -//# sourceMappingURL=cleanup-helper.js.map \ No newline at end of file diff --git a/dist/esm/plugins/cleanup/cleanup-helper.js.map b/dist/esm/plugins/cleanup/cleanup-helper.js.map deleted file mode 100644 index 142d258b26a..00000000000 --- a/dist/esm/plugins/cleanup/cleanup-helper.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"cleanup-helper.js","names":["DEFAULT_CLEANUP_POLICY","minimumDeletedTime","minimumCollectionAge","runEach","awaitReplicationsInSync","waitForLeadership"],"sources":["../../../../src/plugins/cleanup/cleanup-helper.ts"],"sourcesContent":["import type {\n RxCleanupPolicy\n} from '../../types/index.d.ts';\n\nexport const DEFAULT_CLEANUP_POLICY: RxCleanupPolicy = {\n minimumDeletedTime: 1000 * 60 * 60 * 24 * 31, // one month\n minimumCollectionAge: 1000 * 60, // 60 seconds\n runEach: 1000 * 60 * 5, // 5 minutes\n awaitReplicationsInSync: true,\n waitForLeadership: true\n};\n"],"mappings":"AAIA,OAAO,IAAMA,sBAAuC,GAAG;EACnDC,kBAAkB,EAAE,IAAI,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE;EAAE;EAC9CC,oBAAoB,EAAE,IAAI,GAAG,EAAE;EAAE;EACjCC,OAAO,EAAE,IAAI,GAAG,EAAE,GAAG,CAAC;EAAE;EACxBC,uBAAuB,EAAE,IAAI;EAC7BC,iBAAiB,EAAE;AACvB,CAAC","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/cleanup/cleanup-state.js b/dist/esm/plugins/cleanup/cleanup-state.js deleted file mode 100644 index 6f07c7a13d2..00000000000 --- a/dist/esm/plugins/cleanup/cleanup-state.js +++ /dev/null @@ -1,73 +0,0 @@ -import { PROMISE_RESOLVE_TRUE } from "../../plugins/utils/index.js"; -import { REPLICATION_STATE_BY_COLLECTION } from "../replication/index.js"; -import { DEFAULT_CLEANUP_POLICY } from "./cleanup-helper.js"; -import { initialCleanupWait } from "./cleanup.js"; -var RXSTATE_CLEANUP_QUEUE = PROMISE_RESOLVE_TRUE; -export async function startCleanupForRxState(state) { - var rxCollection = state.collection; - var rxDatabase = rxCollection.database; - var cleanupPolicy = Object.assign({}, DEFAULT_CLEANUP_POLICY, rxDatabase.cleanupPolicy ? rxDatabase.cleanupPolicy : {}); - await initialCleanupWait(rxCollection, cleanupPolicy); - if (rxCollection.destroyed) { - return; - } - - // initially cleanup the state - await cleanupRxState(state, cleanupPolicy); - - /** - * Afterwards we listen to writes - * and only re-run the cleanup if there was a write - * to the state. - */ - await runCleanupAfterWrite(state, cleanupPolicy); -} -/** - * Runs the cleanup for a single RxState - */ -export async function cleanupRxState(state, cleanupPolicy) { - var rxCollection = state.collection; - var rxDatabase = rxCollection.database; - - // run cleanup() until it returns true - var isDone = false; - while (!isDone && !rxCollection.destroyed) { - if (cleanupPolicy.awaitReplicationsInSync) { - var replicationStates = REPLICATION_STATE_BY_COLLECTION.get(rxCollection); - if (replicationStates) { - await Promise.all(replicationStates.map(replicationState => { - if (!replicationState.isStopped()) { - return replicationState.awaitInSync(); - } - })); - } - } - if (rxCollection.destroyed) { - return; - } - RXSTATE_CLEANUP_QUEUE = RXSTATE_CLEANUP_QUEUE.then(async () => { - if (rxCollection.destroyed) { - return true; - } - await rxDatabase.requestIdlePromise(); - return state._cleanup(); - }); - isDone = await RXSTATE_CLEANUP_QUEUE; - } -} - -/** - * TODO this is not waiting for writes! - * it just runs on interval. - */ -export async function runCleanupAfterWrite(state, cleanupPolicy) { - var rxCollection = state.collection; - while (!rxCollection.destroyed) { - await rxCollection.promiseWait(cleanupPolicy.runEach); - if (rxCollection.destroyed) { - return; - } - await cleanupRxState(state, cleanupPolicy); - } -} -//# sourceMappingURL=cleanup-state.js.map \ No newline at end of file diff --git a/dist/esm/plugins/cleanup/cleanup-state.js.map b/dist/esm/plugins/cleanup/cleanup-state.js.map deleted file mode 100644 index 26fef735553..00000000000 --- a/dist/esm/plugins/cleanup/cleanup-state.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"cleanup-state.js","names":["PROMISE_RESOLVE_TRUE","REPLICATION_STATE_BY_COLLECTION","DEFAULT_CLEANUP_POLICY","initialCleanupWait","RXSTATE_CLEANUP_QUEUE","startCleanupForRxState","state","rxCollection","collection","rxDatabase","database","cleanupPolicy","Object","assign","destroyed","cleanupRxState","runCleanupAfterWrite","isDone","awaitReplicationsInSync","replicationStates","get","Promise","all","map","replicationState","isStopped","awaitInSync","then","requestIdlePromise","_cleanup","promiseWait","runEach"],"sources":["../../../../src/plugins/cleanup/cleanup-state.ts"],"sourcesContent":["import type { RxCleanupPolicy, RxCollection, RxState } from '../../types/index.d.ts';\nimport { PROMISE_RESOLVE_TRUE } from '../../plugins/utils/index.ts';\nimport { REPLICATION_STATE_BY_COLLECTION } from '../replication/index.ts';\nimport { DEFAULT_CLEANUP_POLICY } from './cleanup-helper.ts';\nimport { initialCleanupWait } from './cleanup.ts';\n\nlet RXSTATE_CLEANUP_QUEUE: Promise = PROMISE_RESOLVE_TRUE;\n\nexport async function startCleanupForRxState(state: RxState) {\n const rxCollection = state.collection;\n const rxDatabase = rxCollection.database;\n const cleanupPolicy = Object.assign(\n {},\n DEFAULT_CLEANUP_POLICY,\n rxDatabase.cleanupPolicy ? rxDatabase.cleanupPolicy : {}\n );\n\n await initialCleanupWait(rxCollection, cleanupPolicy);\n if (rxCollection.destroyed) {\n return;\n }\n\n // initially cleanup the state\n await cleanupRxState(state, cleanupPolicy);\n\n /**\n * Afterwards we listen to writes\n * and only re-run the cleanup if there was a write\n * to the state.\n */\n await runCleanupAfterWrite(state, cleanupPolicy);\n}\n/**\n * Runs the cleanup for a single RxState\n */\nexport async function cleanupRxState(\n state: RxState,\n cleanupPolicy: RxCleanupPolicy\n) {\n const rxCollection = state.collection;\n const rxDatabase = rxCollection.database;\n\n // run cleanup() until it returns true\n let isDone = false;\n while (!isDone && !rxCollection.destroyed) {\n if (cleanupPolicy.awaitReplicationsInSync) {\n const replicationStates = REPLICATION_STATE_BY_COLLECTION.get(rxCollection);\n if (replicationStates) {\n await Promise.all(\n replicationStates.map(replicationState => {\n if (!replicationState.isStopped()) {\n return replicationState.awaitInSync();\n }\n })\n );\n }\n }\n if (rxCollection.destroyed) {\n return;\n }\n RXSTATE_CLEANUP_QUEUE = RXSTATE_CLEANUP_QUEUE\n .then(async () => {\n if (rxCollection.destroyed) {\n return true;\n }\n await rxDatabase.requestIdlePromise();\n return state._cleanup();\n });\n isDone = await RXSTATE_CLEANUP_QUEUE;\n }\n}\n\n/**\n * TODO this is not waiting for writes!\n * it just runs on interval.\n */\nexport async function runCleanupAfterWrite(\n state: RxState,\n cleanupPolicy: RxCleanupPolicy\n) {\n const rxCollection = state.collection;\n while (!rxCollection.destroyed) {\n await rxCollection.promiseWait(cleanupPolicy.runEach);\n if (rxCollection.destroyed) {\n return;\n }\n await cleanupRxState(state, cleanupPolicy);\n }\n}\n"],"mappings":"AACA,SAASA,oBAAoB,QAAQ,8BAA8B;AACnE,SAASC,+BAA+B,QAAQ,yBAAyB;AACzE,SAASC,sBAAsB,QAAQ,qBAAqB;AAC5D,SAASC,kBAAkB,QAAQ,cAAc;AAEjD,IAAIC,qBAAmC,GAAGJ,oBAAoB;AAE9D,OAAO,eAAeK,sBAAsBA,CAACC,KAAgC,EAAE;EAC3E,IAAMC,YAAY,GAAGD,KAAK,CAACE,UAAU;EACrC,IAAMC,UAAU,GAAGF,YAAY,CAACG,QAAQ;EACxC,IAAMC,aAAa,GAAGC,MAAM,CAACC,MAAM,CAC/B,CAAC,CAAC,EACFX,sBAAsB,EACtBO,UAAU,CAACE,aAAa,GAAGF,UAAU,CAACE,aAAa,GAAG,CAAC,CAC3D,CAAC;EAED,MAAMR,kBAAkB,CAACI,YAAY,EAAEI,aAAa,CAAC;EACrD,IAAIJ,YAAY,CAACO,SAAS,EAAE;IACxB;EACJ;;EAEA;EACA,MAAMC,cAAc,CAACT,KAAK,EAAEK,aAAa,CAAC;;EAE1C;AACJ;AACA;AACA;AACA;EACI,MAAMK,oBAAoB,CAACV,KAAK,EAAEK,aAAa,CAAC;AACpD;AACA;AACA;AACA;AACA,OAAO,eAAeI,cAAcA,CAChCT,KAAgC,EAChCK,aAA8B,EAChC;EACE,IAAMJ,YAAY,GAAGD,KAAK,CAACE,UAAU;EACrC,IAAMC,UAAU,GAAGF,YAAY,CAACG,QAAQ;;EAExC;EACA,IAAIO,MAAM,GAAG,KAAK;EAClB,OAAO,CAACA,MAAM,IAAI,CAACV,YAAY,CAACO,SAAS,EAAE;IACvC,IAAIH,aAAa,CAACO,uBAAuB,EAAE;MACvC,IAAMC,iBAAiB,GAAGlB,+BAA+B,CAACmB,GAAG,CAACb,YAAY,CAAC;MAC3E,IAAIY,iBAAiB,EAAE;QACnB,MAAME,OAAO,CAACC,GAAG,CACbH,iBAAiB,CAACI,GAAG,CAACC,gBAAgB,IAAI;UACtC,IAAI,CAACA,gBAAgB,CAACC,SAAS,CAAC,CAAC,EAAE;YAC/B,OAAOD,gBAAgB,CAACE,WAAW,CAAC,CAAC;UACzC;QACJ,CAAC,CACL,CAAC;MACL;IACJ;IACA,IAAInB,YAAY,CAACO,SAAS,EAAE;MACxB;IACJ;IACAV,qBAAqB,GAAGA,qBAAqB,CACxCuB,IAAI,CAAC,YAAY;MACd,IAAIpB,YAAY,CAACO,SAAS,EAAE;QACxB,OAAO,IAAI;MACf;MACA,MAAML,UAAU,CAACmB,kBAAkB,CAAC,CAAC;MACrC,OAAOtB,KAAK,CAACuB,QAAQ,CAAC,CAAC;IAC3B,CAAC,CAAC;IACNZ,MAAM,GAAG,MAAMb,qBAAqB;EACxC;AACJ;;AAEA;AACA;AACA;AACA;AACA,OAAO,eAAeY,oBAAoBA,CACtCV,KAAgC,EAChCK,aAA8B,EAChC;EACE,IAAMJ,YAAY,GAAGD,KAAK,CAACE,UAAU;EACrC,OAAO,CAACD,YAAY,CAACO,SAAS,EAAE;IAC5B,MAAMP,YAAY,CAACuB,WAAW,CAACnB,aAAa,CAACoB,OAAO,CAAC;IACrD,IAAIxB,YAAY,CAACO,SAAS,EAAE;MACxB;IACJ;IACA,MAAMC,cAAc,CAACT,KAAK,EAAEK,aAAa,CAAC;EAC9C;AACJ","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/cleanup/cleanup.js b/dist/esm/plugins/cleanup/cleanup.js deleted file mode 100644 index 806ccfc38ca..00000000000 --- a/dist/esm/plugins/cleanup/cleanup.js +++ /dev/null @@ -1,92 +0,0 @@ -import { PROMISE_RESOLVE_TRUE } from "../../plugins/utils/index.js"; -import { REPLICATION_STATE_BY_COLLECTION } from "../replication/index.js"; -import { DEFAULT_CLEANUP_POLICY } from "./cleanup-helper.js"; - -/** - * Even on multiple databases, - * the calls to RxStorage().cleanup() - * must never run in parallel. - * The cleanup is a background task which should - * not affect the performance of other, more important tasks. - */ -var RXSTORAGE_CLEANUP_QUEUE = PROMISE_RESOLVE_TRUE; -export async function startCleanupForRxCollection(rxCollection) { - var rxDatabase = rxCollection.database; - var cleanupPolicy = Object.assign({}, DEFAULT_CLEANUP_POLICY, rxDatabase.cleanupPolicy ? rxDatabase.cleanupPolicy : {}); - await initialCleanupWait(rxCollection, cleanupPolicy); - if (rxCollection.destroyed) { - return; - } - - // initially cleanup the collection - await cleanupRxCollection(rxCollection, cleanupPolicy); - - /** - * Afterwards we listen to deletes - * and only re-run the cleanup after - * minimumDeletedTime is reached. - */ - await runCleanupAfterDelete(rxCollection, cleanupPolicy); -} -export async function initialCleanupWait(collection, cleanupPolicy) { - /** - * Wait until minimumDatabaseInstanceAge is reached - * or collection is destroyed. - */ - await collection.promiseWait(cleanupPolicy.minimumCollectionAge); - if (collection.destroyed) { - return; - } - if (cleanupPolicy.waitForLeadership) { - await collection.database.waitForLeadership(); - } -} - -/** - * Runs the cleanup for a single RxCollection - */ -export async function cleanupRxCollection(rxCollection, cleanupPolicy) { - var rxDatabase = rxCollection.database; - var storageInstance = rxCollection.storageInstance; - - // run cleanup() until it returns true - var isDone = false; - while (!isDone && !rxCollection.destroyed) { - if (cleanupPolicy.awaitReplicationsInSync) { - var replicationStates = REPLICATION_STATE_BY_COLLECTION.get(rxCollection); - if (replicationStates) { - await Promise.all(replicationStates.map(replicationState => { - if (!replicationState.isStopped()) { - return replicationState.awaitInSync(); - } - })); - } - } - if (rxCollection.destroyed) { - return; - } - RXSTORAGE_CLEANUP_QUEUE = RXSTORAGE_CLEANUP_QUEUE.then(async () => { - if (rxCollection.destroyed) { - return true; - } - await rxDatabase.requestIdlePromise(); - return storageInstance.cleanup(cleanupPolicy.minimumDeletedTime); - }); - isDone = await RXSTORAGE_CLEANUP_QUEUE; - } -} - -/** - * TODO this is not waiting for deletes! - * it just runs on interval. - */ -export async function runCleanupAfterDelete(rxCollection, cleanupPolicy) { - while (!rxCollection.destroyed) { - await rxCollection.promiseWait(cleanupPolicy.runEach); - if (rxCollection.destroyed) { - return; - } - await cleanupRxCollection(rxCollection, cleanupPolicy); - } -} -//# sourceMappingURL=cleanup.js.map \ No newline at end of file diff --git a/dist/esm/plugins/cleanup/cleanup.js.map b/dist/esm/plugins/cleanup/cleanup.js.map deleted file mode 100644 index 505fb318a77..00000000000 --- a/dist/esm/plugins/cleanup/cleanup.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"cleanup.js","names":["PROMISE_RESOLVE_TRUE","REPLICATION_STATE_BY_COLLECTION","DEFAULT_CLEANUP_POLICY","RXSTORAGE_CLEANUP_QUEUE","startCleanupForRxCollection","rxCollection","rxDatabase","database","cleanupPolicy","Object","assign","initialCleanupWait","destroyed","cleanupRxCollection","runCleanupAfterDelete","collection","promiseWait","minimumCollectionAge","waitForLeadership","storageInstance","isDone","awaitReplicationsInSync","replicationStates","get","Promise","all","map","replicationState","isStopped","awaitInSync","then","requestIdlePromise","cleanup","minimumDeletedTime","runEach"],"sources":["../../../../src/plugins/cleanup/cleanup.ts"],"sourcesContent":["import type { RxCleanupPolicy, RxCollection } from '../../types/index.d.ts';\nimport { PROMISE_RESOLVE_TRUE } from '../../plugins/utils/index.ts';\nimport { REPLICATION_STATE_BY_COLLECTION } from '../replication/index.ts';\nimport { DEFAULT_CLEANUP_POLICY } from './cleanup-helper.ts';\n\n/**\n * Even on multiple databases,\n * the calls to RxStorage().cleanup()\n * must never run in parallel.\n * The cleanup is a background task which should\n * not affect the performance of other, more important tasks.\n */\nlet RXSTORAGE_CLEANUP_QUEUE: Promise = PROMISE_RESOLVE_TRUE;\n\nexport async function startCleanupForRxCollection(\n rxCollection: RxCollection\n) {\n const rxDatabase = rxCollection.database;\n const cleanupPolicy = Object.assign(\n {},\n DEFAULT_CLEANUP_POLICY,\n rxDatabase.cleanupPolicy ? rxDatabase.cleanupPolicy : {}\n );\n\n\n await initialCleanupWait(rxCollection, cleanupPolicy);\n if (rxCollection.destroyed) {\n return;\n }\n\n // initially cleanup the collection\n await cleanupRxCollection(rxCollection, cleanupPolicy);\n\n /**\n * Afterwards we listen to deletes\n * and only re-run the cleanup after\n * minimumDeletedTime is reached.\n */\n await runCleanupAfterDelete(rxCollection, cleanupPolicy);\n}\n\n\nexport async function initialCleanupWait(collection: RxCollection, cleanupPolicy: RxCleanupPolicy) {\n /**\n * Wait until minimumDatabaseInstanceAge is reached\n * or collection is destroyed.\n */\n await collection.promiseWait(cleanupPolicy.minimumCollectionAge);\n if (collection.destroyed) {\n return;\n }\n\n if (cleanupPolicy.waitForLeadership) {\n await collection.database.waitForLeadership();\n }\n}\n\n/**\n * Runs the cleanup for a single RxCollection\n */\nexport async function cleanupRxCollection(\n rxCollection: RxCollection,\n cleanupPolicy: RxCleanupPolicy\n) {\n const rxDatabase = rxCollection.database;\n const storageInstance = rxCollection.storageInstance;\n\n // run cleanup() until it returns true\n let isDone = false;\n while (!isDone && !rxCollection.destroyed) {\n if (cleanupPolicy.awaitReplicationsInSync) {\n const replicationStates = REPLICATION_STATE_BY_COLLECTION.get(rxCollection);\n if (replicationStates) {\n await Promise.all(\n replicationStates.map(replicationState => {\n if (!replicationState.isStopped()) {\n return replicationState.awaitInSync();\n }\n })\n );\n }\n }\n if (rxCollection.destroyed) {\n return;\n }\n RXSTORAGE_CLEANUP_QUEUE = RXSTORAGE_CLEANUP_QUEUE\n .then(async () => {\n if (rxCollection.destroyed) {\n return true;\n }\n await rxDatabase.requestIdlePromise();\n return storageInstance.cleanup(cleanupPolicy.minimumDeletedTime);\n });\n isDone = await RXSTORAGE_CLEANUP_QUEUE;\n }\n}\n\n/**\n * TODO this is not waiting for deletes!\n * it just runs on interval.\n */\nexport async function runCleanupAfterDelete(\n rxCollection: RxCollection,\n cleanupPolicy: RxCleanupPolicy\n) {\n while (!rxCollection.destroyed) {\n await rxCollection.promiseWait(cleanupPolicy.runEach);\n if (rxCollection.destroyed) {\n return;\n }\n await cleanupRxCollection(rxCollection, cleanupPolicy);\n }\n}\n"],"mappings":"AACA,SAASA,oBAAoB,QAAQ,8BAA8B;AACnE,SAASC,+BAA+B,QAAQ,yBAAyB;AACzE,SAASC,sBAAsB,QAAQ,qBAAqB;;AAE5D;AACA;AACA;AACA;AACA;AACA;AACA;AACA,IAAIC,uBAAyC,GAAGH,oBAAoB;AAEpE,OAAO,eAAeI,2BAA2BA,CAC7CC,YAA0B,EAC5B;EACE,IAAMC,UAAU,GAAGD,YAAY,CAACE,QAAQ;EACxC,IAAMC,aAAa,GAAGC,MAAM,CAACC,MAAM,CAC/B,CAAC,CAAC,EACFR,sBAAsB,EACtBI,UAAU,CAACE,aAAa,GAAGF,UAAU,CAACE,aAAa,GAAG,CAAC,CAC3D,CAAC;EAGD,MAAMG,kBAAkB,CAACN,YAAY,EAAEG,aAAa,CAAC;EACrD,IAAIH,YAAY,CAACO,SAAS,EAAE;IACxB;EACJ;;EAEA;EACA,MAAMC,mBAAmB,CAACR,YAAY,EAAEG,aAAa,CAAC;;EAEtD;AACJ;AACA;AACA;AACA;EACI,MAAMM,qBAAqB,CAACT,YAAY,EAAEG,aAAa,CAAC;AAC5D;AAGA,OAAO,eAAeG,kBAAkBA,CAACI,UAAwB,EAAEP,aAA8B,EAAE;EAC/F;AACJ;AACA;AACA;EACI,MAAMO,UAAU,CAACC,WAAW,CAACR,aAAa,CAACS,oBAAoB,CAAC;EAChE,IAAIF,UAAU,CAACH,SAAS,EAAE;IACtB;EACJ;EAEA,IAAIJ,aAAa,CAACU,iBAAiB,EAAE;IACjC,MAAMH,UAAU,CAACR,QAAQ,CAACW,iBAAiB,CAAC,CAAC;EACjD;AACJ;;AAEA;AACA;AACA;AACA,OAAO,eAAeL,mBAAmBA,CACrCR,YAA0B,EAC1BG,aAA8B,EAChC;EACE,IAAMF,UAAU,GAAGD,YAAY,CAACE,QAAQ;EACxC,IAAMY,eAAe,GAAGd,YAAY,CAACc,eAAe;;EAEpD;EACA,IAAIC,MAAM,GAAG,KAAK;EAClB,OAAO,CAACA,MAAM,IAAI,CAACf,YAAY,CAACO,SAAS,EAAE;IACvC,IAAIJ,aAAa,CAACa,uBAAuB,EAAE;MACvC,IAAMC,iBAAiB,GAAGrB,+BAA+B,CAACsB,GAAG,CAAClB,YAAY,CAAC;MAC3E,IAAIiB,iBAAiB,EAAE;QACnB,MAAME,OAAO,CAACC,GAAG,CACbH,iBAAiB,CAACI,GAAG,CAACC,gBAAgB,IAAI;UACtC,IAAI,CAACA,gBAAgB,CAACC,SAAS,CAAC,CAAC,EAAE;YAC/B,OAAOD,gBAAgB,CAACE,WAAW,CAAC,CAAC;UACzC;QACJ,CAAC,CACL,CAAC;MACL;IACJ;IACA,IAAIxB,YAAY,CAACO,SAAS,EAAE;MACxB;IACJ;IACAT,uBAAuB,GAAGA,uBAAuB,CAC5C2B,IAAI,CAAC,YAAY;MACd,IAAIzB,YAAY,CAACO,SAAS,EAAE;QACxB,OAAO,IAAI;MACf;MACA,MAAMN,UAAU,CAACyB,kBAAkB,CAAC,CAAC;MACrC,OAAOZ,eAAe,CAACa,OAAO,CAACxB,aAAa,CAACyB,kBAAkB,CAAC;IACpE,CAAC,CAAC;IACNb,MAAM,GAAG,MAAMjB,uBAAuB;EAC1C;AACJ;;AAEA;AACA;AACA;AACA;AACA,OAAO,eAAeW,qBAAqBA,CACvCT,YAA0B,EAC1BG,aAA8B,EAChC;EACE,OAAO,CAACH,YAAY,CAACO,SAAS,EAAE;IAC5B,MAAMP,YAAY,CAACW,WAAW,CAACR,aAAa,CAAC0B,OAAO,CAAC;IACrD,IAAI7B,YAAY,CAACO,SAAS,EAAE;MACxB;IACJ;IACA,MAAMC,mBAAmB,CAACR,YAAY,EAAEG,aAAa,CAAC;EAC1D;AACJ","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/cleanup/index.js b/dist/esm/plugins/cleanup/index.js deleted file mode 100644 index 9c47a6f36b0..00000000000 --- a/dist/esm/plugins/cleanup/index.js +++ /dev/null @@ -1,37 +0,0 @@ -import { DEFAULT_CLEANUP_POLICY } from "./cleanup-helper.js"; -import { startCleanupForRxState } from "./cleanup-state.js"; -import { startCleanupForRxCollection } from "./cleanup.js"; -export var RxDBCleanupPlugin = { - name: 'cleanup', - rxdb: true, - prototypes: { - RxCollection: proto => { - proto.cleanup = async function (minimumDeletedTime) { - var cleanupPolicy = Object.assign({}, DEFAULT_CLEANUP_POLICY, this.database.cleanupPolicy ? this.database.cleanupPolicy : {}); - if (typeof minimumDeletedTime === 'undefined') { - minimumDeletedTime = cleanupPolicy.minimumDeletedTime; - } - - // run cleanup() until it returns true - var isDone = false; - while (!isDone && !this.destroyed) { - isDone = await this.storageInstance.cleanup(minimumDeletedTime); - } - }; - } - }, - hooks: { - createRxCollection: { - after: i => { - startCleanupForRxCollection(i.collection); - } - }, - createRxState: { - after: i => { - startCleanupForRxState(i.state); - } - } - } -}; -export * from "./cleanup.js"; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/esm/plugins/cleanup/index.js.map b/dist/esm/plugins/cleanup/index.js.map deleted file mode 100644 index 28bb8e5dd17..00000000000 --- a/dist/esm/plugins/cleanup/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["DEFAULT_CLEANUP_POLICY","startCleanupForRxState","startCleanupForRxCollection","RxDBCleanupPlugin","name","rxdb","prototypes","RxCollection","proto","cleanup","minimumDeletedTime","cleanupPolicy","Object","assign","database","isDone","destroyed","storageInstance","hooks","createRxCollection","after","i","collection","createRxState","state"],"sources":["../../../../src/plugins/cleanup/index.ts"],"sourcesContent":["import type {\n RxCollection,\n RxPlugin\n} from '../../types/index.d.ts';\nimport { DEFAULT_CLEANUP_POLICY } from './cleanup-helper.ts';\nimport { startCleanupForRxState } from './cleanup-state.ts';\nimport { startCleanupForRxCollection } from './cleanup.ts';\n\nexport const RxDBCleanupPlugin: RxPlugin = {\n name: 'cleanup',\n rxdb: true,\n prototypes: {\n RxCollection: (proto: any) => {\n proto.cleanup = async function (this: RxCollection, minimumDeletedTime?: number): Promise {\n const cleanupPolicy = Object.assign(\n {},\n DEFAULT_CLEANUP_POLICY,\n this.database.cleanupPolicy ? this.database.cleanupPolicy : {}\n );\n\n if (typeof minimumDeletedTime === 'undefined') {\n minimumDeletedTime = cleanupPolicy.minimumDeletedTime;\n }\n\n // run cleanup() until it returns true\n let isDone = false;\n while (!isDone && !this.destroyed) {\n isDone = await this.storageInstance.cleanup(minimumDeletedTime);\n }\n };\n }\n },\n hooks: {\n createRxCollection: {\n after: (i) => {\n startCleanupForRxCollection(i.collection);\n }\n },\n createRxState: {\n after: (i) => {\n startCleanupForRxState(i.state);\n }\n }\n }\n};\n\nexport * from './cleanup.ts';\n"],"mappings":"AAIA,SAASA,sBAAsB,QAAQ,qBAAqB;AAC5D,SAASC,sBAAsB,QAAQ,oBAAoB;AAC3D,SAASC,2BAA2B,QAAQ,cAAc;AAE1D,OAAO,IAAMC,iBAA2B,GAAG;EACvCC,IAAI,EAAE,SAAS;EACfC,IAAI,EAAE,IAAI;EACVC,UAAU,EAAE;IACRC,YAAY,EAAGC,KAAU,IAAK;MAC1BA,KAAK,CAACC,OAAO,GAAG,gBAAoCC,kBAA2B,EAAiB;QAC5F,IAAMC,aAAa,GAAGC,MAAM,CAACC,MAAM,CAC/B,CAAC,CAAC,EACFb,sBAAsB,EACtB,IAAI,CAACc,QAAQ,CAACH,aAAa,GAAG,IAAI,CAACG,QAAQ,CAACH,aAAa,GAAG,CAAC,CACjE,CAAC;QAED,IAAI,OAAOD,kBAAkB,KAAK,WAAW,EAAE;UAC3CA,kBAAkB,GAAGC,aAAa,CAACD,kBAAkB;QACzD;;QAEA;QACA,IAAIK,MAAM,GAAG,KAAK;QAClB,OAAO,CAACA,MAAM,IAAI,CAAC,IAAI,CAACC,SAAS,EAAE;UAC/BD,MAAM,GAAG,MAAM,IAAI,CAACE,eAAe,CAACR,OAAO,CAACC,kBAAkB,CAAC;QACnE;MACJ,CAAC;IACL;EACJ,CAAC;EACDQ,KAAK,EAAE;IACHC,kBAAkB,EAAE;MAChBC,KAAK,EAAGC,CAAC,IAAK;QACVnB,2BAA2B,CAACmB,CAAC,CAACC,UAAU,CAAC;MAC7C;IACJ,CAAC;IACDC,aAAa,EAAE;MACXH,KAAK,EAAGC,CAAC,IAAK;QACVpB,sBAAsB,CAACoB,CAAC,CAACG,KAAK,CAAC;MACnC;IACJ;EACJ;AACJ,CAAC;AAED,cAAc,cAAc","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/crdt/index.js b/dist/esm/plugins/crdt/index.js deleted file mode 100644 index 9d49d1669e5..00000000000 --- a/dist/esm/plugins/crdt/index.js +++ /dev/null @@ -1,381 +0,0 @@ -import { newRxError } from "../../rx-error.js"; -import { clone, deepEqual, ensureNotFalsy, getProperty, now, objectPathMonad, setProperty, toArray } from "../../plugins/utils/index.js"; -import { getQueryMatcher, overwritable } from "../../index.js"; -import { mingoUpdater } from "../update/mingo-updater.js"; -export async function updateCRDT(entry) { - entry = overwritable.deepFreezeWhenDevMode(entry); - var jsonSchema = this.collection.schema.jsonSchema; - if (!jsonSchema.crdt) { - throw newRxError('CRDT1', { - schema: jsonSchema, - queryObj: entry - }); - } - var crdtOptions = ensureNotFalsy(jsonSchema.crdt); - var storageToken = await this.collection.database.storageToken; - return this.incrementalModify(async docData => { - var crdtDocField = clone(getProperty(docData, crdtOptions.field)); - var operation = { - body: toArray(entry), - creator: storageToken, - time: now() - }; - - /** - * A new write will ALWAYS be an operation in the last - * array which was non existing before. - */ - var lastAr = [operation]; - crdtDocField.operations.push(lastAr); - crdtDocField.hash = await hashCRDTOperations(this.collection.database.hashFunction, crdtDocField); - docData = runOperationOnDocument(this.collection.schema.jsonSchema, docData, operation); - setProperty(docData, crdtOptions.field, crdtDocField); - return docData; - }, RX_CRDT_CONTEXT); -} -export async function insertCRDT(entry) { - entry = overwritable.deepFreezeWhenDevMode(entry); - var jsonSchema = this.schema.jsonSchema; - if (!jsonSchema.crdt) { - throw newRxError('CRDT1', { - schema: jsonSchema, - queryObj: entry - }); - } - var crdtOptions = ensureNotFalsy(jsonSchema.crdt); - var storageToken = await this.database.storageToken; - var operation = { - body: Array.isArray(entry) ? entry : [entry], - creator: storageToken, - time: now() - }; - var insertData = {}; - insertData = runOperationOnDocument(this.schema.jsonSchema, insertData, operation); - var crdtDocField = { - operations: [], - hash: '' - }; - setProperty(insertData, crdtOptions.field, crdtDocField); - var lastAr = [operation]; - crdtDocField.operations.push(lastAr); - crdtDocField.hash = await hashCRDTOperations(this.database.hashFunction, crdtDocField); - var result = await this.insert(insertData).catch(async err => { - if (err.code === 'CONFLICT') { - // was a conflict, update document instead of inserting - var doc = await this.findOne(err.parameters.id).exec(true); - return doc.updateCRDT(entry); - } else { - throw err; - } - }); - return result; -} -export function sortOperationComparator(a, b) { - return a.creator > b.creator ? 1 : -1; -} -function runOperationOnDocument(schema, docData, operation) { - var entryParts = operation.body; - entryParts.forEach(entryPart => { - var isMatching; - if (entryPart.selector) { - var query = { - selector: ensureNotFalsy(entryPart.selector), - sort: [], - skip: 0 - }; - var matcher = getQueryMatcher(schema, query); - isMatching = matcher(docData); - } else { - isMatching = true; - } - if (isMatching) { - if (entryPart.ifMatch) { - docData = mingoUpdater(docData, entryPart.ifMatch); - } - } else { - if (entryPart.ifNotMatch) { - docData = mingoUpdater(docData, entryPart.ifNotMatch); - } - } - }); - return docData; -} -export async function hashCRDTOperations(hashFunction, crdts) { - var hashObj = crdts.operations.map(operations => { - return operations.map(op => op.creator); - }); - var hash = await hashFunction(JSON.stringify(hashObj)); - return hash; -} -export function getCRDTSchemaPart() { - var operationSchema = { - type: 'object', - properties: { - body: { - type: 'array', - items: { - type: 'object', - properties: { - selector: { - type: 'object' - }, - ifMatch: { - type: 'object' - }, - ifNotMatch: { - type: 'object' - } - }, - additionalProperties: false - }, - minItems: 1 - }, - creator: { - type: 'string' - }, - time: { - type: 'number', - minimum: 1, - maximum: 1000000000000000, - multipleOf: 0.01 - } - }, - additionalProperties: false, - required: ['body', 'creator', 'time'] - }; - return { - type: 'object', - properties: { - operations: { - type: 'array', - items: { - type: 'array', - items: operationSchema - } - }, - hash: { - type: 'string', - // set a minLength to not accidentally store an empty string - minLength: 2 - } - }, - additionalProperties: false, - required: ['operations', 'hash'] - }; -} -export async function mergeCRDTFields(hashFunction, crdtsA, crdtsB) { - // the value with most operations must be A to - // ensure we not miss out rows when iterating over both fields. - if (crdtsA.operations.length < crdtsB.operations.length) { - [crdtsA, crdtsB] = [crdtsB, crdtsA]; - } - var ret = { - operations: [], - hash: '' - }; - crdtsA.operations.forEach((row, index) => { - var mergedOps = []; - var ids = new Set(); // used to deduplicate - - row.forEach(op => { - ids.add(op.creator); - mergedOps.push(op); - }); - if (crdtsB.operations[index]) { - crdtsB.operations[index].forEach(op => { - if (!ids.has(op.creator)) { - mergedOps.push(op); - } - }); - } - mergedOps = mergedOps.sort(sortOperationComparator); - ret.operations[index] = mergedOps; - }); - ret.hash = await hashCRDTOperations(hashFunction, ret); - return ret; -} -export function rebuildFromCRDT(schema, docData, crdts) { - var base = { - _deleted: false - }; - setProperty(base, ensureNotFalsy(schema.crdt).field, crdts); - crdts.operations.forEach(operations => { - operations.forEach(op => { - base = runOperationOnDocument(schema, base, op); - }); - }); - return base; -} -export function getCRDTConflictHandler(hashFunction, schema) { - var crdtOptions = ensureNotFalsy(schema.crdt); - var crdtField = crdtOptions.field; - var getCRDTValue = objectPathMonad(crdtField); - var conflictHandler = async (i, _context) => { - var newDocCrdt = getCRDTValue(i.newDocumentState); - var masterDocCrdt = getCRDTValue(i.realMasterState); - if (newDocCrdt.hash === masterDocCrdt.hash) { - return Promise.resolve({ - isEqual: true - }); - } - var mergedCrdt = await mergeCRDTFields(hashFunction, newDocCrdt, masterDocCrdt); - var mergedDoc = rebuildFromCRDT(schema, i.newDocumentState, mergedCrdt); - return Promise.resolve({ - isEqual: false, - documentData: mergedDoc - }); - }; - return conflictHandler; -} -export var RX_CRDT_CONTEXT = 'rx-crdt'; -export var RxDBcrdtPlugin = { - name: 'crdt', - rxdb: true, - prototypes: { - RxDocument: proto => { - proto.updateCRDT = updateCRDT; - var oldRemove = proto.remove; - proto.remove = function () { - if (!this.collection.schema.jsonSchema.crdt) { - return oldRemove.bind(this)(); - } - return this.updateCRDT({ - ifMatch: { - $set: { - _deleted: true - } - } - }); - }; - var oldincrementalPatch = proto.incrementalPatch; - proto.incrementalPatch = function (patch) { - if (!this.collection.schema.jsonSchema.crdt) { - return oldincrementalPatch.bind(this)(patch); - } - return this.updateCRDT({ - ifMatch: { - $set: patch - } - }); - }; - var oldincrementalModify = proto.incrementalModify; - proto.incrementalModify = function (fn, context) { - if (!this.collection.schema.jsonSchema.crdt) { - return oldincrementalModify.bind(this)(fn); - } - if (context === RX_CRDT_CONTEXT) { - return oldincrementalModify.bind(this)(fn); - } else { - throw newRxError('CRDT2', { - id: this.primary, - args: { - context - } - }); - } - }; - }, - RxCollection: proto => { - proto.insertCRDT = insertCRDT; - } - }, - overwritable: {}, - hooks: { - preCreateRxCollection: { - after: data => { - if (!data.schema.crdt) { - return; - } - if (data.conflictHandler) { - throw newRxError('CRDT3', { - collection: data.name, - schema: data.schema - }); - } - data.conflictHandler = getCRDTConflictHandler(data.database.hashFunction, data.schema); - } - }, - createRxCollection: { - after: ({ - collection - }) => { - if (!collection.schema.jsonSchema.crdt) { - return; - } - var crdtOptions = ensureNotFalsy(collection.schema.jsonSchema.crdt); - var crdtField = crdtOptions.field; - var getCrdt = objectPathMonad(crdtOptions.field); - - /** - * In dev-mode we have to ensure that all document writes - * have the correct crdt state so that nothing is missed out - * or could accidentally do non-crdt writes to the document. - */ - if (overwritable.isDevMode()) { - var bulkWriteBefore = collection.storageInstance.bulkWrite.bind(collection.storageInstance); - collection.storageInstance.bulkWrite = async function (writes, context) { - await Promise.all(writes.map(async write => { - var newDocState = clone(write.document); - var crdts = getCrdt(newDocState); - var rebuild = rebuildFromCRDT(collection.schema.jsonSchema, newDocState, crdts); - function docWithoutMeta(doc) { - var ret = {}; - Object.entries(doc).forEach(([k, v]) => { - if (!k.startsWith('_') && typeof v !== 'undefined') { - ret[k] = v; - } - }); - return ret; - } - if (!deepEqual(docWithoutMeta(newDocState), docWithoutMeta(rebuild))) { - throw newRxError('SNH', { - document: newDocState - }); - } - var recalculatedHash = await hashCRDTOperations(collection.database.hashFunction, crdts); - if (crdts.hash !== recalculatedHash) { - throw newRxError('SNH', { - document: newDocState, - args: { - hash: crdts.hash, - recalculatedHash - } - }); - } - })); - return bulkWriteBefore(writes, context); - }; - } - var bulkInsertBefore = collection.bulkInsert.bind(collection); - collection.bulkInsert = async function (docsData) { - var storageToken = await collection.database.storageToken; - var useDocsData = await Promise.all(docsData.map(async docData => { - var setMe = {}; - Object.entries(docData).forEach(([key, value]) => { - if (!key.startsWith('_') && key !== crdtField) { - setMe[key] = value; - } - }); - var crdtOperations = { - operations: [[{ - creator: storageToken, - body: [{ - ifMatch: { - $set: setMe - } - }], - time: now() - }]], - hash: '' - }; - crdtOperations.hash = await hashCRDTOperations(collection.database.hashFunction, crdtOperations); - setProperty(docData, crdtOptions.field, crdtOperations); - return docData; - })); - return bulkInsertBefore(useDocsData); - }; - } - } - } -}; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/esm/plugins/crdt/index.js.map b/dist/esm/plugins/crdt/index.js.map deleted file mode 100644 index 837e23843b6..00000000000 --- a/dist/esm/plugins/crdt/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["newRxError","clone","deepEqual","ensureNotFalsy","getProperty","now","objectPathMonad","setProperty","toArray","getQueryMatcher","overwritable","mingoUpdater","updateCRDT","entry","deepFreezeWhenDevMode","jsonSchema","collection","schema","crdt","queryObj","crdtOptions","storageToken","database","incrementalModify","docData","crdtDocField","field","operation","body","creator","time","lastAr","operations","push","hash","hashCRDTOperations","hashFunction","runOperationOnDocument","RX_CRDT_CONTEXT","insertCRDT","Array","isArray","insertData","result","insert","catch","err","code","doc","findOne","parameters","id","exec","sortOperationComparator","a","b","entryParts","forEach","entryPart","isMatching","selector","query","sort","skip","matcher","ifMatch","ifNotMatch","crdts","hashObj","map","op","JSON","stringify","getCRDTSchemaPart","operationSchema","type","properties","items","additionalProperties","minItems","minimum","maximum","multipleOf","required","minLength","mergeCRDTFields","crdtsA","crdtsB","length","ret","row","index","mergedOps","ids","Set","add","has","rebuildFromCRDT","base","_deleted","getCRDTConflictHandler","crdtField","getCRDTValue","conflictHandler","i","_context","newDocCrdt","newDocumentState","masterDocCrdt","realMasterState","Promise","resolve","isEqual","mergedCrdt","mergedDoc","documentData","RxDBcrdtPlugin","name","rxdb","prototypes","RxDocument","proto","oldRemove","remove","bind","$set","oldincrementalPatch","incrementalPatch","patch","oldincrementalModify","fn","context","primary","args","RxCollection","hooks","preCreateRxCollection","after","data","createRxCollection","getCrdt","isDevMode","bulkWriteBefore","storageInstance","bulkWrite","writes","all","write","newDocState","document","rebuild","docWithoutMeta","Object","entries","k","v","startsWith","recalculatedHash","bulkInsertBefore","bulkInsert","docsData","useDocsData","setMe","key","value","crdtOperations"],"sources":["../../../../src/plugins/crdt/index.ts"],"sourcesContent":["import { newRxError } from '../../rx-error.ts';\nimport type {\n CRDTDocumentField,\n CRDTEntry,\n CRDTOperation,\n FilledMangoQuery,\n HashFunction,\n JsonSchema,\n RxConflictHandler,\n RxConflictHandlerInput,\n RxDocument,\n RxDocumentData,\n RxJsonSchema,\n RxPlugin,\n WithDeleted\n} from '../../types/index.d.ts';\nimport {\n clone,\n deepEqual,\n ensureNotFalsy,\n getProperty,\n now,\n objectPathMonad,\n setProperty,\n toArray\n} from '../../plugins/utils/index.ts';\nimport {\n getQueryMatcher,\n overwritable,\n RxCollection,\n RxDocumentWriteData,\n RxError\n} from '../../index.ts';\nimport { mingoUpdater } from '../update/mingo-updater.ts';\n\n\n\nexport async function updateCRDT(\n this: RxDocument,\n entry: CRDTEntry | CRDTEntry[]\n) {\n entry = overwritable.deepFreezeWhenDevMode(entry) as any;\n\n const jsonSchema = this.collection.schema.jsonSchema;\n if (!jsonSchema.crdt) {\n throw newRxError('CRDT1', {\n schema: jsonSchema,\n queryObj: entry\n });\n }\n const crdtOptions = ensureNotFalsy(jsonSchema.crdt);\n const storageToken = await this.collection.database.storageToken;\n\n return this.incrementalModify(async (docData) => {\n const crdtDocField: CRDTDocumentField = clone(getProperty(docData as any, crdtOptions.field));\n const operation: CRDTOperation = {\n body: toArray(entry),\n creator: storageToken,\n time: now()\n };\n\n /**\n * A new write will ALWAYS be an operation in the last\n * array which was non existing before.\n */\n const lastAr: CRDTOperation[] = [operation];\n crdtDocField.operations.push(lastAr);\n crdtDocField.hash = await hashCRDTOperations(this.collection.database.hashFunction, crdtDocField);\n\n docData = runOperationOnDocument(\n this.collection.schema.jsonSchema,\n docData,\n operation\n );\n setProperty(docData, crdtOptions.field, crdtDocField);\n return docData;\n }, RX_CRDT_CONTEXT);\n}\n\n\nexport async function insertCRDT(\n this: RxCollection,\n entry: CRDTEntry | CRDTEntry[]\n) {\n entry = overwritable.deepFreezeWhenDevMode(entry) as any;\n\n const jsonSchema = this.schema.jsonSchema;\n if (!jsonSchema.crdt) {\n throw newRxError('CRDT1', {\n schema: jsonSchema,\n queryObj: entry\n });\n }\n const crdtOptions = ensureNotFalsy(jsonSchema.crdt);\n const storageToken = await this.database.storageToken;\n const operation: CRDTOperation = {\n body: Array.isArray(entry) ? entry : [entry],\n creator: storageToken,\n time: now()\n };\n\n let insertData: RxDocumentWriteData = {} as any;\n insertData = runOperationOnDocument(\n this.schema.jsonSchema,\n insertData as any,\n operation\n ) as any;\n const crdtDocField: CRDTDocumentField = {\n operations: [],\n hash: ''\n };\n setProperty(insertData as any, crdtOptions.field, crdtDocField);\n\n const lastAr: CRDTOperation[] = [operation];\n crdtDocField.operations.push(lastAr);\n crdtDocField.hash = await hashCRDTOperations(this.database.hashFunction, crdtDocField);\n\n const result = await this.insert(insertData).catch(async (err: RxError) => {\n if (err.code === 'CONFLICT') {\n // was a conflict, update document instead of inserting\n const doc = await this.findOne(err.parameters.id).exec(true);\n return doc.updateCRDT(entry);\n } else {\n throw err;\n }\n });\n return result;\n}\n\n\nexport function sortOperationComparator(a: CRDTOperation, b: CRDTOperation) {\n return a.creator > b.creator ? 1 : -1;\n}\n\n\nfunction runOperationOnDocument(\n schema: RxJsonSchema>,\n docData: WithDeleted,\n operation: CRDTOperation\n): WithDeleted {\n const entryParts = operation.body;\n entryParts.forEach(entryPart => {\n let isMatching: boolean;\n if (entryPart.selector) {\n const query: FilledMangoQuery = {\n selector: ensureNotFalsy(entryPart.selector as any),\n sort: [],\n skip: 0\n };\n const matcher = getQueryMatcher(schema, query);\n isMatching = matcher(docData as any);\n } else {\n isMatching = true;\n }\n if (isMatching) {\n if (entryPart.ifMatch) {\n docData = mingoUpdater>(docData, entryPart.ifMatch);\n }\n } else {\n if (entryPart.ifNotMatch) {\n docData = mingoUpdater>(docData, entryPart.ifNotMatch);\n }\n }\n });\n return docData;\n}\n\nexport async function hashCRDTOperations(\n hashFunction: HashFunction,\n crdts: CRDTDocumentField\n): Promise {\n const hashObj = crdts.operations.map((operations) => {\n return operations.map(op => op.creator);\n });\n const hash = await hashFunction(JSON.stringify(hashObj));\n return hash;\n}\n\nexport function getCRDTSchemaPart(): JsonSchema> {\n const operationSchema: JsonSchema> = {\n type: 'object',\n properties: {\n body: {\n type: 'array',\n items: {\n type: 'object',\n properties: {\n selector: {\n type: 'object'\n },\n ifMatch: {\n type: 'object'\n },\n ifNotMatch: {\n type: 'object'\n }\n },\n additionalProperties: false\n },\n minItems: 1\n },\n creator: {\n type: 'string'\n },\n time: {\n type: 'number',\n minimum: 1,\n maximum: 1000000000000000,\n multipleOf: 0.01\n }\n },\n additionalProperties: false,\n required: [\n 'body',\n 'creator',\n 'time'\n ]\n };\n return {\n type: 'object',\n properties: {\n operations: {\n type: 'array',\n items: {\n type: 'array',\n items: operationSchema\n }\n },\n hash: {\n type: 'string',\n // set a minLength to not accidentally store an empty string\n minLength: 2\n }\n },\n additionalProperties: false,\n required: ['operations', 'hash']\n };\n}\n\n\nexport async function mergeCRDTFields(\n hashFunction: HashFunction,\n crdtsA: CRDTDocumentField,\n crdtsB: CRDTDocumentField\n): Promise> {\n\n // the value with most operations must be A to\n // ensure we not miss out rows when iterating over both fields.\n if (crdtsA.operations.length < crdtsB.operations.length) {\n [crdtsA, crdtsB] = [crdtsB, crdtsA];\n }\n\n const ret: CRDTDocumentField = {\n operations: [],\n hash: ''\n };\n crdtsA.operations.forEach((row, index) => {\n let mergedOps: CRDTOperation[] = [];\n const ids = new Set(); // used to deduplicate\n\n row.forEach(op => {\n ids.add(op.creator);\n mergedOps.push(op);\n });\n if (crdtsB.operations[index]) {\n crdtsB.operations[index].forEach(op => {\n if (!ids.has(op.creator)) {\n mergedOps.push(op);\n }\n });\n }\n mergedOps = mergedOps.sort(sortOperationComparator);\n ret.operations[index] = mergedOps;\n });\n\n\n ret.hash = await hashCRDTOperations(hashFunction, ret);\n return ret;\n}\n\nexport function rebuildFromCRDT(\n schema: RxJsonSchema>,\n docData: WithDeleted | RxDocType,\n crdts: CRDTDocumentField\n): WithDeleted {\n let base: WithDeleted = {\n _deleted: false\n } as any;\n setProperty(base, ensureNotFalsy(schema.crdt).field, crdts);\n crdts.operations.forEach(operations => {\n operations.forEach(op => {\n base = runOperationOnDocument(\n schema,\n base,\n op\n );\n });\n });\n return base;\n}\n\n\nexport function getCRDTConflictHandler(\n hashFunction: HashFunction,\n schema: RxJsonSchema>\n): RxConflictHandler {\n const crdtOptions = ensureNotFalsy(schema.crdt);\n const crdtField = crdtOptions.field;\n const getCRDTValue = objectPathMonad | RxDocType, CRDTDocumentField>(crdtField);\n\n const conflictHandler: RxConflictHandler = async (\n i: RxConflictHandlerInput,\n _context: string\n ) => {\n const newDocCrdt = getCRDTValue(i.newDocumentState);\n const masterDocCrdt = getCRDTValue(i.realMasterState);\n\n if (newDocCrdt.hash === masterDocCrdt.hash) {\n return Promise.resolve({\n isEqual: true\n });\n }\n\n const mergedCrdt = await mergeCRDTFields(hashFunction, newDocCrdt, masterDocCrdt);\n const mergedDoc = rebuildFromCRDT(\n schema,\n i.newDocumentState,\n mergedCrdt\n );\n return Promise.resolve({\n isEqual: false,\n documentData: mergedDoc\n });\n };\n\n return conflictHandler;\n}\n\n\nexport const RX_CRDT_CONTEXT = 'rx-crdt';\n\nexport const RxDBcrdtPlugin: RxPlugin = {\n name: 'crdt',\n rxdb: true,\n prototypes: {\n RxDocument: (proto: any) => {\n proto.updateCRDT = updateCRDT;\n\n const oldRemove = proto.remove;\n proto.remove = function (this: RxDocument) {\n if (!this.collection.schema.jsonSchema.crdt) {\n return oldRemove.bind(this)();\n }\n return this.updateCRDT({\n ifMatch: {\n $set: {\n _deleted: true\n }\n }\n });\n };\n\n const oldincrementalPatch = proto.incrementalPatch;\n proto.incrementalPatch = function (this: RxDocument, patch: any) {\n if (!this.collection.schema.jsonSchema.crdt) {\n return oldincrementalPatch.bind(this)(patch);\n }\n return this.updateCRDT({\n ifMatch: {\n $set: patch\n }\n });\n };\n const oldincrementalModify = proto.incrementalModify;\n proto.incrementalModify = function (fn: any, context: string) {\n if (!this.collection.schema.jsonSchema.crdt) {\n return oldincrementalModify.bind(this)(fn);\n }\n if (context === RX_CRDT_CONTEXT) {\n return oldincrementalModify.bind(this)(fn);\n } else {\n throw newRxError('CRDT2', {\n id: this.primary,\n args: { context }\n });\n }\n };\n },\n RxCollection: (proto: any) => {\n proto.insertCRDT = insertCRDT;\n }\n },\n overwritable: {},\n hooks: {\n preCreateRxCollection: {\n after: (data) => {\n if (!data.schema.crdt) {\n return;\n }\n if (data.conflictHandler) {\n throw newRxError('CRDT3', {\n collection: data.name,\n schema: data.schema\n });\n }\n data.conflictHandler = getCRDTConflictHandler(\n data.database.hashFunction,\n data.schema\n );\n }\n },\n createRxCollection: {\n after: ({ collection }) => {\n if (!collection.schema.jsonSchema.crdt) {\n return;\n }\n\n const crdtOptions = ensureNotFalsy(collection.schema.jsonSchema.crdt);\n const crdtField = crdtOptions.field;\n const getCrdt = objectPathMonad>(crdtOptions.field);\n\n /**\n * In dev-mode we have to ensure that all document writes\n * have the correct crdt state so that nothing is missed out\n * or could accidentally do non-crdt writes to the document.\n */\n if (overwritable.isDevMode()) {\n const bulkWriteBefore = collection.storageInstance.bulkWrite.bind(collection.storageInstance);\n collection.storageInstance.bulkWrite = async function (writes, context) {\n\n await Promise.all(\n writes.map(async (write) => {\n const newDocState: typeof write.document = clone(write.document);\n const crdts = getCrdt(newDocState);\n\n const rebuild = rebuildFromCRDT(\n collection.schema.jsonSchema,\n newDocState,\n crdts\n );\n\n function docWithoutMeta(doc: any) {\n const ret: any = {};\n Object.entries(doc).forEach(([k, v]) => {\n if (\n !k.startsWith('_') &&\n typeof v !== 'undefined'\n ) {\n ret[k] = v;\n }\n });\n return ret;\n }\n if (!deepEqual(docWithoutMeta(newDocState), docWithoutMeta(rebuild))) {\n throw newRxError('SNH', {\n document: newDocState\n });\n }\n const recalculatedHash = await hashCRDTOperations(collection.database.hashFunction, crdts);\n if (crdts.hash !== recalculatedHash) {\n throw newRxError('SNH', {\n document: newDocState,\n args: { hash: crdts.hash, recalculatedHash }\n });\n }\n })\n );\n\n return bulkWriteBefore(writes, context);\n };\n }\n\n\n const bulkInsertBefore = collection.bulkInsert.bind(collection);\n collection.bulkInsert = async function (docsData: any[]) {\n const storageToken = await collection.database.storageToken;\n const useDocsData = await Promise.all(\n docsData.map(async (docData) => {\n const setMe: Partial> = {};\n Object.entries(docData).forEach(([key, value]) => {\n if (\n !key.startsWith('_') &&\n key !== crdtField\n ) {\n setMe[key] = value;\n }\n });\n\n const crdtOperations: CRDTDocumentField = {\n operations: [\n [{\n creator: storageToken,\n body: [{\n ifMatch: {\n $set: setMe\n }\n }],\n time: now()\n }]\n ],\n hash: ''\n };\n crdtOperations.hash = await hashCRDTOperations(collection.database.hashFunction, crdtOperations);\n setProperty(docData, crdtOptions.field, crdtOperations);\n return docData;\n })\n );\n return bulkInsertBefore(useDocsData);\n };\n }\n }\n }\n};\n"],"mappings":"AAAA,SAASA,UAAU,QAAQ,mBAAmB;AAgB9C,SACIC,KAAK,EACLC,SAAS,EACTC,cAAc,EACdC,WAAW,EACXC,GAAG,EACHC,eAAe,EACfC,WAAW,EACXC,OAAO,QACJ,8BAA8B;AACrC,SACIC,eAAe,EACfC,YAAY,QAIT,gBAAgB;AACvB,SAASC,YAAY,QAAQ,4BAA4B;AAIzD,OAAO,eAAeC,UAAUA,CAE5BC,KAAoD,EACtD;EACEA,KAAK,GAAGH,YAAY,CAACI,qBAAqB,CAACD,KAAK,CAAQ;EAExD,IAAME,UAAU,GAAG,IAAI,CAACC,UAAU,CAACC,MAAM,CAACF,UAAU;EACpD,IAAI,CAACA,UAAU,CAACG,IAAI,EAAE;IAClB,MAAMlB,UAAU,CAAC,OAAO,EAAE;MACtBiB,MAAM,EAAEF,UAAU;MAClBI,QAAQ,EAAEN;IACd,CAAC,CAAC;EACN;EACA,IAAMO,WAAW,GAAGjB,cAAc,CAACY,UAAU,CAACG,IAAI,CAAC;EACnD,IAAMG,YAAY,GAAG,MAAM,IAAI,CAACL,UAAU,CAACM,QAAQ,CAACD,YAAY;EAEhE,OAAO,IAAI,CAACE,iBAAiB,CAAC,MAAOC,OAAO,IAAK;IAC7C,IAAMC,YAA0C,GAAGxB,KAAK,CAACG,WAAW,CAACoB,OAAO,EAASJ,WAAW,CAACM,KAAK,CAAC,CAAC;IACxG,IAAMC,SAAmC,GAAG;MACxCC,IAAI,EAAEpB,OAAO,CAACK,KAAK,CAAC;MACpBgB,OAAO,EAAER,YAAY;MACrBS,IAAI,EAAEzB,GAAG,CAAC;IACd,CAAC;;IAED;AACR;AACA;AACA;IACQ,IAAM0B,MAAkC,GAAG,CAACJ,SAAS,CAAC;IACtDF,YAAY,CAACO,UAAU,CAACC,IAAI,CAACF,MAAM,CAAC;IACpCN,YAAY,CAACS,IAAI,GAAG,MAAMC,kBAAkB,CAAC,IAAI,CAACnB,UAAU,CAACM,QAAQ,CAACc,YAAY,EAAEX,YAAY,CAAC;IAEjGD,OAAO,GAAGa,sBAAsB,CAC5B,IAAI,CAACrB,UAAU,CAACC,MAAM,CAACF,UAAU,EACjCS,OAAO,EACPG,SACJ,CAAC;IACDpB,WAAW,CAACiB,OAAO,EAAEJ,WAAW,CAACM,KAAK,EAAED,YAAY,CAAC;IACrD,OAAOD,OAAO;EAClB,CAAC,EAAEc,eAAe,CAAC;AACvB;AAGA,OAAO,eAAeC,UAAUA,CAE5B1B,KAAoD,EACtD;EACEA,KAAK,GAAGH,YAAY,CAACI,qBAAqB,CAACD,KAAK,CAAQ;EAExD,IAAME,UAAU,GAAG,IAAI,CAACE,MAAM,CAACF,UAAU;EACzC,IAAI,CAACA,UAAU,CAACG,IAAI,EAAE;IAClB,MAAMlB,UAAU,CAAC,OAAO,EAAE;MACtBiB,MAAM,EAAEF,UAAU;MAClBI,QAAQ,EAAEN;IACd,CAAC,CAAC;EACN;EACA,IAAMO,WAAW,GAAGjB,cAAc,CAACY,UAAU,CAACG,IAAI,CAAC;EACnD,IAAMG,YAAY,GAAG,MAAM,IAAI,CAACC,QAAQ,CAACD,YAAY;EACrD,IAAMM,SAAmC,GAAG;IACxCC,IAAI,EAAEY,KAAK,CAACC,OAAO,CAAC5B,KAAK,CAAC,GAAGA,KAAK,GAAG,CAACA,KAAK,CAAC;IAC5CgB,OAAO,EAAER,YAAY;IACrBS,IAAI,EAAEzB,GAAG,CAAC;EACd,CAAC;EAED,IAAIqC,UAA0C,GAAG,CAAC,CAAQ;EAC1DA,UAAU,GAAGL,sBAAsB,CAC/B,IAAI,CAACpB,MAAM,CAACF,UAAU,EACtB2B,UAAU,EACVf,SACJ,CAAQ;EACR,IAAMF,YAA0C,GAAG;IAC/CO,UAAU,EAAE,EAAE;IACdE,IAAI,EAAE;EACV,CAAC;EACD3B,WAAW,CAACmC,UAAU,EAAStB,WAAW,CAACM,KAAK,EAAED,YAAY,CAAC;EAE/D,IAAMM,MAAkC,GAAG,CAACJ,SAAS,CAAC;EACtDF,YAAY,CAACO,UAAU,CAACC,IAAI,CAACF,MAAM,CAAC;EACpCN,YAAY,CAACS,IAAI,GAAG,MAAMC,kBAAkB,CAAC,IAAI,CAACb,QAAQ,CAACc,YAAY,EAAEX,YAAY,CAAC;EAEtF,IAAMkB,MAAM,GAAG,MAAM,IAAI,CAACC,MAAM,CAACF,UAAU,CAAC,CAACG,KAAK,CAAC,MAAOC,GAAY,IAAK;IACvE,IAAIA,GAAG,CAACC,IAAI,KAAK,UAAU,EAAE;MACzB;MACA,IAAMC,GAAG,GAAG,MAAM,IAAI,CAACC,OAAO,CAACH,GAAG,CAACI,UAAU,CAACC,EAAE,CAAC,CAACC,IAAI,CAAC,IAAI,CAAC;MAC5D,OAAOJ,GAAG,CAACpC,UAAU,CAACC,KAAK,CAAC;IAChC,CAAC,MAAM;MACH,MAAMiC,GAAG;IACb;EACJ,CAAC,CAAC;EACF,OAAOH,MAAM;AACjB;AAGA,OAAO,SAASU,uBAAuBA,CAAYC,CAA2B,EAAEC,CAA2B,EAAE;EACzG,OAAOD,CAAC,CAACzB,OAAO,GAAG0B,CAAC,CAAC1B,OAAO,GAAG,CAAC,GAAG,CAAC,CAAC;AACzC;AAGA,SAASQ,sBAAsBA,CAC3BpB,MAA+C,EAC/CO,OAA+B,EAC/BG,SAAmC,EACb;EACtB,IAAM6B,UAAU,GAAG7B,SAAS,CAACC,IAAI;EACjC4B,UAAU,CAACC,OAAO,CAACC,SAAS,IAAI;IAC5B,IAAIC,UAAmB;IACvB,IAAID,SAAS,CAACE,QAAQ,EAAE;MACpB,IAAMC,KAAkC,GAAG;QACvCD,QAAQ,EAAEzD,cAAc,CAACuD,SAAS,CAACE,QAAe,CAAC;QACnDE,IAAI,EAAE,EAAE;QACRC,IAAI,EAAE;MACV,CAAC;MACD,IAAMC,OAAO,GAAGvD,eAAe,CAACQ,MAAM,EAAE4C,KAAK,CAAC;MAC9CF,UAAU,GAAGK,OAAO,CAACxC,OAAc,CAAC;IACxC,CAAC,MAAM;MACHmC,UAAU,GAAG,IAAI;IACrB;IACA,IAAIA,UAAU,EAAE;MACZ,IAAID,SAAS,CAACO,OAAO,EAAE;QACnBzC,OAAO,GAAGb,YAAY,CAAyBa,OAAO,EAAEkC,SAAS,CAACO,OAAO,CAAC;MAC9E;IACJ,CAAC,MAAM;MACH,IAAIP,SAAS,CAACQ,UAAU,EAAE;QACtB1C,OAAO,GAAGb,YAAY,CAAyBa,OAAO,EAAEkC,SAAS,CAACQ,UAAU,CAAC;MACjF;IACJ;EACJ,CAAC,CAAC;EACF,OAAO1C,OAAO;AAClB;AAEA,OAAO,eAAeW,kBAAkBA,CACpCC,YAA0B,EAC1B+B,KAA6B,EACd;EACf,IAAMC,OAAO,GAAGD,KAAK,CAACnC,UAAU,CAACqC,GAAG,CAAErC,UAAU,IAAK;IACjD,OAAOA,UAAU,CAACqC,GAAG,CAACC,EAAE,IAAIA,EAAE,CAACzC,OAAO,CAAC;EAC3C,CAAC,CAAC;EACF,IAAMK,IAAI,GAAG,MAAME,YAAY,CAACmC,IAAI,CAACC,SAAS,CAACJ,OAAO,CAAC,CAAC;EACxD,OAAOlC,IAAI;AACf;AAEA,OAAO,SAASuC,iBAAiBA,CAAA,EAAwD;EACrF,IAAMC,eAAqD,GAAG;IAC1DC,IAAI,EAAE,QAAQ;IACdC,UAAU,EAAE;MACRhD,IAAI,EAAE;QACF+C,IAAI,EAAE,OAAO;QACbE,KAAK,EAAE;UACHF,IAAI,EAAE,QAAQ;UACdC,UAAU,EAAE;YACRhB,QAAQ,EAAE;cACNe,IAAI,EAAE;YACV,CAAC;YACDV,OAAO,EAAE;cACLU,IAAI,EAAE;YACV,CAAC;YACDT,UAAU,EAAE;cACRS,IAAI,EAAE;YACV;UACJ,CAAC;UACDG,oBAAoB,EAAE;QAC1B,CAAC;QACDC,QAAQ,EAAE;MACd,CAAC;MACDlD,OAAO,EAAE;QACL8C,IAAI,EAAE;MACV,CAAC;MACD7C,IAAI,EAAE;QACF6C,IAAI,EAAE,QAAQ;QACdK,OAAO,EAAE,CAAC;QACVC,OAAO,EAAE,gBAAgB;QACzBC,UAAU,EAAE;MAChB;IACJ,CAAC;IACDJ,oBAAoB,EAAE,KAAK;IAC3BK,QAAQ,EAAE,CACN,MAAM,EACN,SAAS,EACT,MAAM;EAEd,CAAC;EACD,OAAO;IACHR,IAAI,EAAE,QAAQ;IACdC,UAAU,EAAE;MACR5C,UAAU,EAAE;QACR2C,IAAI,EAAE,OAAO;QACbE,KAAK,EAAE;UACHF,IAAI,EAAE,OAAO;UACbE,KAAK,EAAEH;QACX;MACJ,CAAC;MACDxC,IAAI,EAAE;QACFyC,IAAI,EAAE,QAAQ;QACd;QACAS,SAAS,EAAE;MACf;IACJ,CAAC;IACDN,oBAAoB,EAAE,KAAK;IAC3BK,QAAQ,EAAE,CAAC,YAAY,EAAE,MAAM;EACnC,CAAC;AACL;AAGA,OAAO,eAAeE,eAAeA,CACjCjD,YAA0B,EAC1BkD,MAAoC,EACpCC,MAAoC,EACC;EAErC;EACA;EACA,IAAID,MAAM,CAACtD,UAAU,CAACwD,MAAM,GAAGD,MAAM,CAACvD,UAAU,CAACwD,MAAM,EAAE;IACrD,CAACF,MAAM,EAAEC,MAAM,CAAC,GAAG,CAACA,MAAM,EAAED,MAAM,CAAC;EACvC;EAEA,IAAMG,GAAiC,GAAG;IACtCzD,UAAU,EAAE,EAAE;IACdE,IAAI,EAAE;EACV,CAAC;EACDoD,MAAM,CAACtD,UAAU,CAACyB,OAAO,CAAC,CAACiC,GAAG,EAAEC,KAAK,KAAK;IACtC,IAAIC,SAAqC,GAAG,EAAE;IAC9C,IAAMC,GAAG,GAAG,IAAIC,GAAG,CAAS,CAAC,CAAC,CAAC;;IAE/BJ,GAAG,CAACjC,OAAO,CAACa,EAAE,IAAI;MACduB,GAAG,CAACE,GAAG,CAACzB,EAAE,CAACzC,OAAO,CAAC;MACnB+D,SAAS,CAAC3D,IAAI,CAACqC,EAAE,CAAC;IACtB,CAAC,CAAC;IACF,IAAIiB,MAAM,CAACvD,UAAU,CAAC2D,KAAK,CAAC,EAAE;MAC1BJ,MAAM,CAACvD,UAAU,CAAC2D,KAAK,CAAC,CAAClC,OAAO,CAACa,EAAE,IAAI;QACnC,IAAI,CAACuB,GAAG,CAACG,GAAG,CAAC1B,EAAE,CAACzC,OAAO,CAAC,EAAE;UACtB+D,SAAS,CAAC3D,IAAI,CAACqC,EAAE,CAAC;QACtB;MACJ,CAAC,CAAC;IACN;IACAsB,SAAS,GAAGA,SAAS,CAAC9B,IAAI,CAACT,uBAAuB,CAAC;IACnDoC,GAAG,CAACzD,UAAU,CAAC2D,KAAK,CAAC,GAAGC,SAAS;EACrC,CAAC,CAAC;EAGFH,GAAG,CAACvD,IAAI,GAAG,MAAMC,kBAAkB,CAACC,YAAY,EAAEqD,GAAG,CAAC;EACtD,OAAOA,GAAG;AACd;AAEA,OAAO,SAASQ,eAAeA,CAC3BhF,MAA+C,EAC/CO,OAA2C,EAC3C2C,KAAmC,EACb;EACtB,IAAI+B,IAA4B,GAAG;IAC/BC,QAAQ,EAAE;EACd,CAAQ;EACR5F,WAAW,CAAC2F,IAAI,EAAE/F,cAAc,CAACc,MAAM,CAACC,IAAI,CAAC,CAACQ,KAAK,EAAEyC,KAAK,CAAC;EAC3DA,KAAK,CAACnC,UAAU,CAACyB,OAAO,CAACzB,UAAU,IAAI;IACnCA,UAAU,CAACyB,OAAO,CAACa,EAAE,IAAI;MACrB4B,IAAI,GAAG7D,sBAAsB,CACzBpB,MAAM,EACNiF,IAAI,EACJ5B,EACJ,CAAC;IACL,CAAC,CAAC;EACN,CAAC,CAAC;EACF,OAAO4B,IAAI;AACf;AAGA,OAAO,SAASE,sBAAsBA,CAClChE,YAA0B,EAC1BnB,MAA+C,EACnB;EAC5B,IAAMG,WAAW,GAAGjB,cAAc,CAACc,MAAM,CAACC,IAAI,CAAC;EAC/C,IAAMmF,SAAS,GAAGjF,WAAW,CAACM,KAAK;EACnC,IAAM4E,YAAY,GAAGhG,eAAe,CAAmE+F,SAAS,CAAC;EAEjH,IAAME,eAA6C,GAAG,MAAAA,CAClDC,CAAoC,EACpCC,QAAgB,KACf;IACD,IAAMC,UAAU,GAAGJ,YAAY,CAACE,CAAC,CAACG,gBAAgB,CAAC;IACnD,IAAMC,aAAa,GAAGN,YAAY,CAACE,CAAC,CAACK,eAAe,CAAC;IAErD,IAAIH,UAAU,CAACxE,IAAI,KAAK0E,aAAa,CAAC1E,IAAI,EAAE;MACxC,OAAO4E,OAAO,CAACC,OAAO,CAAC;QACnBC,OAAO,EAAE;MACb,CAAC,CAAC;IACN;IAEA,IAAMC,UAAU,GAAG,MAAM5B,eAAe,CAACjD,YAAY,EAAEsE,UAAU,EAAEE,aAAa,CAAC;IACjF,IAAMM,SAAS,GAAGjB,eAAe,CAC7BhF,MAAM,EACNuF,CAAC,CAACG,gBAAgB,EAClBM,UACJ,CAAC;IACD,OAAOH,OAAO,CAACC,OAAO,CAAC;MACnBC,OAAO,EAAE,KAAK;MACdG,YAAY,EAAED;IAClB,CAAC,CAAC;EACN,CAAC;EAED,OAAOX,eAAe;AAC1B;AAGA,OAAO,IAAMjE,eAAe,GAAG,SAAS;AAExC,OAAO,IAAM8E,cAAwB,GAAG;EACpCC,IAAI,EAAE,MAAM;EACZC,IAAI,EAAE,IAAI;EACVC,UAAU,EAAE;IACRC,UAAU,EAAGC,KAAU,IAAK;MACxBA,KAAK,CAAC7G,UAAU,GAAGA,UAAU;MAE7B,IAAM8G,SAAS,GAAGD,KAAK,CAACE,MAAM;MAC9BF,KAAK,CAACE,MAAM,GAAG,YAA4B;QACvC,IAAI,CAAC,IAAI,CAAC3G,UAAU,CAACC,MAAM,CAACF,UAAU,CAACG,IAAI,EAAE;UACzC,OAAOwG,SAAS,CAACE,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC;QACjC;QACA,OAAO,IAAI,CAAChH,UAAU,CAAC;UACnBqD,OAAO,EAAE;YACL4D,IAAI,EAAE;cACF1B,QAAQ,EAAE;YACd;UACJ;QACJ,CAAC,CAAC;MACN,CAAC;MAED,IAAM2B,mBAAmB,GAAGL,KAAK,CAACM,gBAAgB;MAClDN,KAAK,CAACM,gBAAgB,GAAG,UAA4BC,KAAU,EAAE;QAC7D,IAAI,CAAC,IAAI,CAAChH,UAAU,CAACC,MAAM,CAACF,UAAU,CAACG,IAAI,EAAE;UACzC,OAAO4G,mBAAmB,CAACF,IAAI,CAAC,IAAI,CAAC,CAACI,KAAK,CAAC;QAChD;QACA,OAAO,IAAI,CAACpH,UAAU,CAAC;UACnBqD,OAAO,EAAE;YACL4D,IAAI,EAAEG;UACV;QACJ,CAAC,CAAC;MACN,CAAC;MACD,IAAMC,oBAAoB,GAAGR,KAAK,CAAClG,iBAAiB;MACpDkG,KAAK,CAAClG,iBAAiB,GAAG,UAAU2G,EAAO,EAAEC,OAAe,EAAE;QAC1D,IAAI,CAAC,IAAI,CAACnH,UAAU,CAACC,MAAM,CAACF,UAAU,CAACG,IAAI,EAAE;UACzC,OAAO+G,oBAAoB,CAACL,IAAI,CAAC,IAAI,CAAC,CAACM,EAAE,CAAC;QAC9C;QACA,IAAIC,OAAO,KAAK7F,eAAe,EAAE;UAC7B,OAAO2F,oBAAoB,CAACL,IAAI,CAAC,IAAI,CAAC,CAACM,EAAE,CAAC;QAC9C,CAAC,MAAM;UACH,MAAMlI,UAAU,CAAC,OAAO,EAAE;YACtBmD,EAAE,EAAE,IAAI,CAACiF,OAAO;YAChBC,IAAI,EAAE;cAAEF;YAAQ;UACpB,CAAC,CAAC;QACN;MACJ,CAAC;IACL,CAAC;IACDG,YAAY,EAAGb,KAAU,IAAK;MAC1BA,KAAK,CAAClF,UAAU,GAAGA,UAAU;IACjC;EACJ,CAAC;EACD7B,YAAY,EAAE,CAAC,CAAC;EAChB6H,KAAK,EAAE;IACHC,qBAAqB,EAAE;MACnBC,KAAK,EAAGC,IAAI,IAAK;QACb,IAAI,CAACA,IAAI,CAACzH,MAAM,CAACC,IAAI,EAAE;UACnB;QACJ;QACA,IAAIwH,IAAI,CAACnC,eAAe,EAAE;UACtB,MAAMvG,UAAU,CAAC,OAAO,EAAE;YACtBgB,UAAU,EAAE0H,IAAI,CAACrB,IAAI;YACrBpG,MAAM,EAAEyH,IAAI,CAACzH;UACjB,CAAC,CAAC;QACN;QACAyH,IAAI,CAACnC,eAAe,GAAGH,sBAAsB,CACzCsC,IAAI,CAACpH,QAAQ,CAACc,YAAY,EAC1BsG,IAAI,CAACzH,MACT,CAAC;MACL;IACJ,CAAC;IACD0H,kBAAkB,EAAE;MAChBF,KAAK,EAAEA,CAAC;QAAEzH;MAAW,CAAC,KAAK;QACvB,IAAI,CAACA,UAAU,CAACC,MAAM,CAACF,UAAU,CAACG,IAAI,EAAE;UACpC;QACJ;QAEA,IAAME,WAAW,GAAGjB,cAAc,CAACa,UAAU,CAACC,MAAM,CAACF,UAAU,CAACG,IAAI,CAAC;QACrE,IAAMmF,SAAS,GAAGjF,WAAW,CAACM,KAAK;QACnC,IAAMkH,OAAO,GAAGtI,eAAe,CAA8Bc,WAAW,CAACM,KAAK,CAAC;;QAE/E;AAChB;AACA;AACA;AACA;QACgB,IAAIhB,YAAY,CAACmI,SAAS,CAAC,CAAC,EAAE;UAC1B,IAAMC,eAAe,GAAG9H,UAAU,CAAC+H,eAAe,CAACC,SAAS,CAACpB,IAAI,CAAC5G,UAAU,CAAC+H,eAAe,CAAC;UAC7F/H,UAAU,CAAC+H,eAAe,CAACC,SAAS,GAAG,gBAAgBC,MAAM,EAAEd,OAAO,EAAE;YAEpE,MAAMrB,OAAO,CAACoC,GAAG,CACbD,MAAM,CAAC5E,GAAG,CAAC,MAAO8E,KAAK,IAAK;cACxB,IAAMC,WAAkC,GAAGnJ,KAAK,CAACkJ,KAAK,CAACE,QAAQ,CAAC;cAChE,IAAMlF,KAAK,GAAGyE,OAAO,CAACQ,WAAW,CAAC;cAElC,IAAME,OAAO,GAAGrD,eAAe,CAC3BjF,UAAU,CAACC,MAAM,CAACF,UAAU,EAC5BqI,WAAW,EACXjF,KACJ,CAAC;cAED,SAASoF,cAAcA,CAACvG,GAAQ,EAAE;gBAC9B,IAAMyC,GAAQ,GAAG,CAAC,CAAC;gBACnB+D,MAAM,CAACC,OAAO,CAACzG,GAAG,CAAC,CAACS,OAAO,CAAC,CAAC,CAACiG,CAAC,EAAEC,CAAC,CAAC,KAAK;kBACpC,IACI,CAACD,CAAC,CAACE,UAAU,CAAC,GAAG,CAAC,IAClB,OAAOD,CAAC,KAAK,WAAW,EAC1B;oBACElE,GAAG,CAACiE,CAAC,CAAC,GAAGC,CAAC;kBACd;gBACJ,CAAC,CAAC;gBACF,OAAOlE,GAAG;cACd;cACA,IAAI,CAACvF,SAAS,CAACqJ,cAAc,CAACH,WAAW,CAAC,EAAEG,cAAc,CAACD,OAAO,CAAC,CAAC,EAAE;gBAClE,MAAMtJ,UAAU,CAAC,KAAK,EAAE;kBACpBqJ,QAAQ,EAAED;gBACd,CAAC,CAAC;cACN;cACA,IAAMS,gBAAgB,GAAG,MAAM1H,kBAAkB,CAACnB,UAAU,CAACM,QAAQ,CAACc,YAAY,EAAE+B,KAAK,CAAC;cAC1F,IAAIA,KAAK,CAACjC,IAAI,KAAK2H,gBAAgB,EAAE;gBACjC,MAAM7J,UAAU,CAAC,KAAK,EAAE;kBACpBqJ,QAAQ,EAAED,WAAW;kBACrBf,IAAI,EAAE;oBAAEnG,IAAI,EAAEiC,KAAK,CAACjC,IAAI;oBAAE2H;kBAAiB;gBAC/C,CAAC,CAAC;cACN;YACJ,CAAC,CACL,CAAC;YAED,OAAOf,eAAe,CAACG,MAAM,EAAEd,OAAO,CAAC;UAC3C,CAAC;QACL;QAGA,IAAM2B,gBAAgB,GAAG9I,UAAU,CAAC+I,UAAU,CAACnC,IAAI,CAAC5G,UAAU,CAAC;QAC/DA,UAAU,CAAC+I,UAAU,GAAG,gBAAgBC,QAAe,EAAE;UACrD,IAAM3I,YAAY,GAAG,MAAML,UAAU,CAACM,QAAQ,CAACD,YAAY;UAC3D,IAAM4I,WAAW,GAAG,MAAMnD,OAAO,CAACoC,GAAG,CACjCc,QAAQ,CAAC3F,GAAG,CAAC,MAAO7C,OAAO,IAAK;YAC5B,IAAM0I,KAAmC,GAAG,CAAC,CAAC;YAC9CV,MAAM,CAACC,OAAO,CAACjI,OAAO,CAAC,CAACiC,OAAO,CAAC,CAAC,CAAC0G,GAAG,EAAEC,KAAK,CAAC,KAAK;cAC9C,IACI,CAACD,GAAG,CAACP,UAAU,CAAC,GAAG,CAAC,IACpBO,GAAG,KAAK9D,SAAS,EACnB;gBACE6D,KAAK,CAACC,GAAG,CAAC,GAAGC,KAAK;cACtB;YACJ,CAAC,CAAC;YAEF,IAAMC,cAAsC,GAAG;cAC3CrI,UAAU,EAAE,CACR,CAAC;gBACGH,OAAO,EAAER,YAAY;gBACrBO,IAAI,EAAE,CAAC;kBACHqC,OAAO,EAAE;oBACL4D,IAAI,EAAEqC;kBACV;gBACJ,CAAC,CAAC;gBACFpI,IAAI,EAAEzB,GAAG,CAAC;cACd,CAAC,CAAC,CACL;cACD6B,IAAI,EAAE;YACV,CAAC;YACDmI,cAAc,CAACnI,IAAI,GAAG,MAAMC,kBAAkB,CAACnB,UAAU,CAACM,QAAQ,CAACc,YAAY,EAAEiI,cAAc,CAAC;YAChG9J,WAAW,CAACiB,OAAO,EAAEJ,WAAW,CAACM,KAAK,EAAE2I,cAAc,CAAC;YACvD,OAAO7I,OAAO;UAClB,CAAC,CACL,CAAC;UACD,OAAOsI,gBAAgB,CAACG,WAAW,CAAC;QACxC,CAAC;MACL;IACJ;EACJ;AACJ,CAAC","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/dev-mode/check-document.js b/dist/esm/plugins/dev-mode/check-document.js deleted file mode 100644 index 441b3da62bf..00000000000 --- a/dist/esm/plugins/dev-mode/check-document.js +++ /dev/null @@ -1,120 +0,0 @@ -import { newRxError } from "../../rx-error.js"; -import { fillPrimaryKey, getPrimaryFieldOfPrimaryKey } from "../../rx-schema-helper.js"; -export function ensurePrimaryKeyValid(primaryKey, docData) { - if (!primaryKey) { - throw newRxError('DOC20', { - primaryKey, - document: docData - }); - } - - /** - * This is required so that we can left-pad - * the primaryKey and we are still able to de-left-pad - * it to get again the original key. - */ - if (primaryKey !== primaryKey.trim()) { - throw newRxError('DOC21', { - primaryKey, - document: docData - }); - } - if (primaryKey.includes('\r') || primaryKey.includes('\n')) { - throw newRxError('DOC22', { - primaryKey, - document: docData - }); - } - if (primaryKey.includes('"')) { - throw newRxError('DOC23', { - primaryKey, - document: docData - }); - } -} - -/** - * Deeply checks if the object contains an - * instance of the JavaScript Date class. - * @recursive - */ -export function containsDateInstance(obj) { - if (typeof obj !== 'object' || obj === null) { - return false; - } - for (var key in obj) { - if (obj.hasOwnProperty(key)) { - if (obj[key] instanceof Date) { - return true; - } - if (typeof obj[key] === 'object' && containsDateInstance(obj[key])) { - return true; - } - } - } - return false; -} -export function checkWriteRows(storageInstance, rows) { - var primaryPath = getPrimaryFieldOfPrimaryKey(storageInstance.schema.primaryKey); - var _loop = function (writeRow) { - // ensure that the primary key has not been changed - writeRow.document = fillPrimaryKey(primaryPath, storageInstance.schema, writeRow.document); - - /** - * Ensure that _meta fields have been merged - * and not replaced. - * This is important so that when one plugin A - * sets a _meta field and another plugin B does a write - * to the document, it must be ensured that the - * field of plugin A was not removed. - */ - if (writeRow.previous) { - Object.keys(writeRow.previous._meta).forEach(metaFieldName => { - if (!Object.prototype.hasOwnProperty.call(writeRow.document._meta, metaFieldName)) { - throw newRxError('SNH', { - dataBefore: writeRow.previous, - dataAfter: writeRow.document, - args: { - metaFieldName - } - }); - } - }); - } - - /** - * Ensure it can be structured cloned - */ - try { - /** - * Notice that structuredClone() is not available - * in ReactNative, so we test for JSON.stringify() instead - * @link https://github.com/pubkey/rxdb/issues/5046#issuecomment-1827374498 - */ - if (typeof structuredClone === 'function') { - structuredClone(writeRow); - } else { - JSON.parse(JSON.stringify(writeRow)); - } - } catch (err) { - throw newRxError('DOC24', { - collection: storageInstance.collectionName, - document: writeRow.document - }); - } - - /** - * Ensure it does not contain a Date() object - */ - if (containsDateInstance(writeRow.document)) { - throw newRxError('DOC24', { - collection: storageInstance.collectionName, - document: writeRow.document - }); - } - }; - for (var writeRow of rows) { - _loop(writeRow); - } -} -//# sourceMappingURL=check-document.js.map \ No newline at end of file diff --git a/dist/esm/plugins/dev-mode/check-document.js.map b/dist/esm/plugins/dev-mode/check-document.js.map deleted file mode 100644 index ec21f74c57c..00000000000 --- a/dist/esm/plugins/dev-mode/check-document.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"check-document.js","names":["newRxError","fillPrimaryKey","getPrimaryFieldOfPrimaryKey","ensurePrimaryKeyValid","primaryKey","docData","document","trim","includes","containsDateInstance","obj","key","hasOwnProperty","Date","checkWriteRows","storageInstance","rows","primaryPath","schema","_loop","writeRow","previous","Object","keys","_meta","forEach","metaFieldName","prototype","call","dataBefore","dataAfter","args","structuredClone","JSON","parse","stringify","err","collection","collectionName"],"sources":["../../../../src/plugins/dev-mode/check-document.ts"],"sourcesContent":["import { newRxError } from '../../rx-error.ts';\nimport { fillPrimaryKey, getPrimaryFieldOfPrimaryKey } from '../../rx-schema-helper.ts';\nimport type { BulkWriteRow, RxDocumentData, RxStorageInstance } from '../../types/index.d.ts';\n\nexport function ensurePrimaryKeyValid(\n primaryKey: string,\n docData: RxDocumentData\n) {\n if (!primaryKey) {\n throw newRxError('DOC20', {\n primaryKey,\n document: docData\n });\n }\n\n\n /**\n * This is required so that we can left-pad\n * the primaryKey and we are still able to de-left-pad\n * it to get again the original key.\n */\n if (\n primaryKey !== primaryKey.trim()\n ) {\n throw newRxError('DOC21', {\n primaryKey,\n document: docData\n });\n }\n if (\n primaryKey.includes('\\r') ||\n primaryKey.includes('\\n')\n ) {\n throw newRxError('DOC22', {\n primaryKey,\n document: docData\n });\n }\n if (\n primaryKey.includes('\"')\n ) {\n throw newRxError('DOC23', {\n primaryKey,\n document: docData\n });\n }\n}\n\n/**\n * Deeply checks if the object contains an\n * instance of the JavaScript Date class.\n * @recursive\n */\nexport function containsDateInstance(obj: any): boolean {\n if (typeof obj !== 'object' || obj === null) {\n return false;\n }\n for (let key in obj) {\n if (obj.hasOwnProperty(key)) {\n if (obj[key] instanceof Date) {\n return true;\n }\n if (typeof obj[key] === 'object' && containsDateInstance(obj[key])) {\n return true;\n }\n }\n }\n return false;\n}\n\n\nexport function checkWriteRows(\n storageInstance: RxStorageInstance,\n rows: BulkWriteRow[]\n) {\n const primaryPath = getPrimaryFieldOfPrimaryKey(storageInstance.schema.primaryKey);\n for (const writeRow of rows) {\n // ensure that the primary key has not been changed\n writeRow.document = fillPrimaryKey(\n primaryPath,\n storageInstance.schema,\n writeRow.document\n );\n\n\n\n /**\n * Ensure that _meta fields have been merged\n * and not replaced.\n * This is important so that when one plugin A\n * sets a _meta field and another plugin B does a write\n * to the document, it must be ensured that the\n * field of plugin A was not removed.\n */\n if (writeRow.previous) {\n Object.keys(writeRow.previous._meta)\n .forEach(metaFieldName => {\n if (!Object.prototype.hasOwnProperty.call(writeRow.document._meta, metaFieldName)) {\n throw newRxError('SNH', {\n dataBefore: writeRow.previous,\n dataAfter: writeRow.document,\n args: {\n metaFieldName\n }\n });\n }\n });\n }\n\n /**\n * Ensure it can be structured cloned\n */\n try {\n /**\n * Notice that structuredClone() is not available\n * in ReactNative, so we test for JSON.stringify() instead\n * @link https://github.com/pubkey/rxdb/issues/5046#issuecomment-1827374498\n */\n if (typeof structuredClone === 'function') {\n structuredClone(writeRow);\n } else {\n JSON.parse(JSON.stringify(writeRow));\n }\n } catch (err) {\n throw newRxError('DOC24', {\n collection: storageInstance.collectionName,\n document: writeRow.document\n });\n }\n\n\n /**\n * Ensure it does not contain a Date() object\n */\n if (containsDateInstance(writeRow.document)) {\n throw newRxError('DOC24', {\n collection: storageInstance.collectionName,\n document: writeRow.document\n });\n }\n }\n\n}\n"],"mappings":"AAAA,SAASA,UAAU,QAAQ,mBAAmB;AAC9C,SAASC,cAAc,EAAEC,2BAA2B,QAAQ,2BAA2B;AAGvF,OAAO,SAASC,qBAAqBA,CACjCC,UAAkB,EAClBC,OAA4B,EAC9B;EACE,IAAI,CAACD,UAAU,EAAE;IACb,MAAMJ,UAAU,CAAC,OAAO,EAAE;MACtBI,UAAU;MACVE,QAAQ,EAAED;IACd,CAAC,CAAC;EACN;;EAGA;AACJ;AACA;AACA;AACA;EACI,IACID,UAAU,KAAKA,UAAU,CAACG,IAAI,CAAC,CAAC,EAClC;IACE,MAAMP,UAAU,CAAC,OAAO,EAAE;MACtBI,UAAU;MACVE,QAAQ,EAAED;IACd,CAAC,CAAC;EACN;EACA,IACID,UAAU,CAACI,QAAQ,CAAC,IAAI,CAAC,IACzBJ,UAAU,CAACI,QAAQ,CAAC,IAAI,CAAC,EAC3B;IACE,MAAMR,UAAU,CAAC,OAAO,EAAE;MACtBI,UAAU;MACVE,QAAQ,EAAED;IACd,CAAC,CAAC;EACN;EACA,IACID,UAAU,CAACI,QAAQ,CAAC,GAAG,CAAC,EAC1B;IACE,MAAMR,UAAU,CAAC,OAAO,EAAE;MACtBI,UAAU;MACVE,QAAQ,EAAED;IACd,CAAC,CAAC;EACN;AACJ;;AAEA;AACA;AACA;AACA;AACA;AACA,OAAO,SAASI,oBAAoBA,CAACC,GAAQ,EAAW;EACpD,IAAI,OAAOA,GAAG,KAAK,QAAQ,IAAIA,GAAG,KAAK,IAAI,EAAE;IACzC,OAAO,KAAK;EAChB;EACA,KAAK,IAAIC,GAAG,IAAID,GAAG,EAAE;IACjB,IAAIA,GAAG,CAACE,cAAc,CAACD,GAAG,CAAC,EAAE;MACzB,IAAID,GAAG,CAACC,GAAG,CAAC,YAAYE,IAAI,EAAE;QAC1B,OAAO,IAAI;MACf;MACA,IAAI,OAAOH,GAAG,CAACC,GAAG,CAAC,KAAK,QAAQ,IAAIF,oBAAoB,CAACC,GAAG,CAACC,GAAG,CAAC,CAAC,EAAE;QAChE,OAAO,IAAI;MACf;IACJ;EACJ;EACA,OAAO,KAAK;AAChB;AAGA,OAAO,SAASG,cAAcA,CAC1BC,eAA4D,EAC5DC,IAA+B,EACjC;EACE,IAAMC,WAAW,GAAGf,2BAA2B,CAACa,eAAe,CAACG,MAAM,CAACd,UAAU,CAAC;EAAC,IAAAe,KAAA,YAAAA,CAAAC,QAAA,EACtD;IACzB;IACAA,QAAQ,CAACd,QAAQ,GAAGL,cAAc,CAC9BgB,WAAW,EACXF,eAAe,CAACG,MAAM,EACtBE,QAAQ,CAACd,QACb,CAAC;;IAID;AACR;AACA;AACA;AACA;AACA;AACA;AACA;IACQ,IAAIc,QAAQ,CAACC,QAAQ,EAAE;MACnBC,MAAM,CAACC,IAAI,CAACH,QAAQ,CAACC,QAAQ,CAACG,KAAK,CAAC,CAC/BC,OAAO,CAACC,aAAa,IAAI;QACtB,IAAI,CAACJ,MAAM,CAACK,SAAS,CAACf,cAAc,CAACgB,IAAI,CAACR,QAAQ,CAACd,QAAQ,CAACkB,KAAK,EAAEE,aAAa,CAAC,EAAE;UAC/E,MAAM1B,UAAU,CAAC,KAAK,EAAE;YACpB6B,UAAU,EAAET,QAAQ,CAACC,QAAQ;YAC7BS,SAAS,EAAEV,QAAQ,CAACd,QAAQ;YAC5ByB,IAAI,EAAE;cACFL;YACJ;UACJ,CAAC,CAAC;QACN;MACJ,CAAC,CAAC;IACV;;IAEA;AACR;AACA;IACQ,IAAI;MACA;AACZ;AACA;AACA;AACA;MACY,IAAI,OAAOM,eAAe,KAAK,UAAU,EAAE;QACvCA,eAAe,CAACZ,QAAQ,CAAC;MAC7B,CAAC,MAAM;QACHa,IAAI,CAACC,KAAK,CAACD,IAAI,CAACE,SAAS,CAACf,QAAQ,CAAC,CAAC;MACxC;IACJ,CAAC,CAAC,OAAOgB,GAAG,EAAE;MACV,MAAMpC,UAAU,CAAC,OAAO,EAAE;QACtBqC,UAAU,EAAEtB,eAAe,CAACuB,cAAc;QAC1ChC,QAAQ,EAAEc,QAAQ,CAACd;MACvB,CAAC,CAAC;IACN;;IAGA;AACR;AACA;IACQ,IAAIG,oBAAoB,CAACW,QAAQ,CAACd,QAAQ,CAAC,EAAE;MACzC,MAAMN,UAAU,CAAC,OAAO,EAAE;QACtBqC,UAAU,EAAEtB,eAAe,CAACuB,cAAc;QAC1ChC,QAAQ,EAAEc,QAAQ,CAACd;MACvB,CAAC,CAAC;IACN;EACJ,CAAC;EAhED,KAAK,IAAMc,QAAQ,IAAIJ,IAAI;IAAAG,KAAA,CAAAC,QAAA;EAAA;AAkE/B","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/dev-mode/check-migration-strategies.js b/dist/esm/plugins/dev-mode/check-migration-strategies.js deleted file mode 100644 index a4c740c33eb..00000000000 --- a/dist/esm/plugins/dev-mode/check-migration-strategies.js +++ /dev/null @@ -1,38 +0,0 @@ -import { newRxTypeError, newRxError } from "../../rx-error.js"; -import { getPreviousVersions } from "../../rx-schema.js"; - -/** - * checks if the migrationStrategies are ok, throws if not - * @throws {Error|TypeError} if not ok - */ -export function checkMigrationStrategies(schema, migrationStrategies) { - // migrationStrategies must be object not array - if (typeof migrationStrategies !== 'object' || Array.isArray(migrationStrategies)) { - throw newRxTypeError('COL11', { - schema - }); - } - var previousVersions = getPreviousVersions(schema); - - // for every previousVersion there must be strategy - if (previousVersions.length !== Object.keys(migrationStrategies).length) { - throw newRxError('COL12', { - have: Object.keys(migrationStrategies), - should: previousVersions - }); - } - - // every strategy must have number as property and be a function - previousVersions.map(vNr => ({ - v: vNr, - s: migrationStrategies[vNr + 1] - })).filter(strategy => typeof strategy.s !== 'function').forEach(strategy => { - throw newRxTypeError('COL13', { - version: strategy.v, - type: typeof strategy, - schema - }); - }); - return true; -} -//# sourceMappingURL=check-migration-strategies.js.map \ No newline at end of file diff --git a/dist/esm/plugins/dev-mode/check-migration-strategies.js.map b/dist/esm/plugins/dev-mode/check-migration-strategies.js.map deleted file mode 100644 index f921ec80ed2..00000000000 --- a/dist/esm/plugins/dev-mode/check-migration-strategies.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"check-migration-strategies.js","names":["newRxTypeError","newRxError","getPreviousVersions","checkMigrationStrategies","schema","migrationStrategies","Array","isArray","previousVersions","length","Object","keys","have","should","map","vNr","v","s","filter","strategy","forEach","version","type"],"sources":["../../../../src/plugins/dev-mode/check-migration-strategies.ts"],"sourcesContent":["import type { RxJsonSchema, NumberFunctionMap } from '../../types/index.d.ts';\nimport { newRxTypeError, newRxError } from '../../rx-error.ts';\nimport { getPreviousVersions } from '../../rx-schema.ts';\n\n\n/**\n * checks if the migrationStrategies are ok, throws if not\n * @throws {Error|TypeError} if not ok\n */\nexport function checkMigrationStrategies(\n schema: RxJsonSchema,\n migrationStrategies: NumberFunctionMap\n): boolean {\n // migrationStrategies must be object not array\n if (\n typeof migrationStrategies !== 'object' ||\n Array.isArray(migrationStrategies)\n ) {\n throw newRxTypeError('COL11', {\n schema\n });\n }\n\n const previousVersions = getPreviousVersions(schema);\n\n // for every previousVersion there must be strategy\n if (\n previousVersions.length !== Object\n .keys(migrationStrategies).length\n ) {\n throw newRxError('COL12', {\n have: Object.keys(migrationStrategies),\n should: previousVersions\n });\n }\n\n // every strategy must have number as property and be a function\n previousVersions\n .map(vNr => ({\n v: vNr,\n s: migrationStrategies[(vNr + 1)]\n }))\n .filter(strategy => typeof strategy.s !== 'function')\n .forEach(strategy => {\n throw newRxTypeError('COL13', {\n version: strategy.v,\n type: typeof strategy,\n schema\n });\n });\n\n return true;\n}\n"],"mappings":"AACA,SAASA,cAAc,EAAEC,UAAU,QAAQ,mBAAmB;AAC9D,SAASC,mBAAmB,QAAQ,oBAAoB;;AAGxD;AACA;AACA;AACA;AACA,OAAO,SAASC,wBAAwBA,CACpCC,MAAyB,EACzBC,mBAAsC,EAC/B;EACP;EACA,IACI,OAAOA,mBAAmB,KAAK,QAAQ,IACvCC,KAAK,CAACC,OAAO,CAACF,mBAAmB,CAAC,EACpC;IACE,MAAML,cAAc,CAAC,OAAO,EAAE;MAC1BI;IACJ,CAAC,CAAC;EACN;EAEA,IAAMI,gBAAgB,GAAGN,mBAAmB,CAACE,MAAM,CAAC;;EAEpD;EACA,IACII,gBAAgB,CAACC,MAAM,KAAKC,MAAM,CAC7BC,IAAI,CAACN,mBAAmB,CAAC,CAACI,MAAM,EACvC;IACE,MAAMR,UAAU,CAAC,OAAO,EAAE;MACtBW,IAAI,EAAEF,MAAM,CAACC,IAAI,CAACN,mBAAmB,CAAC;MACtCQ,MAAM,EAAEL;IACZ,CAAC,CAAC;EACN;;EAEA;EACAA,gBAAgB,CACXM,GAAG,CAACC,GAAG,KAAK;IACTC,CAAC,EAAED,GAAG;IACNE,CAAC,EAAEZ,mBAAmB,CAAEU,GAAG,GAAG,CAAC;EACnC,CAAC,CAAC,CAAC,CACFG,MAAM,CAACC,QAAQ,IAAI,OAAOA,QAAQ,CAACF,CAAC,KAAK,UAAU,CAAC,CACpDG,OAAO,CAACD,QAAQ,IAAI;IACjB,MAAMnB,cAAc,CAAC,OAAO,EAAE;MAC1BqB,OAAO,EAAEF,QAAQ,CAACH,CAAC;MACnBM,IAAI,EAAE,OAAOH,QAAQ;MACrBf;IACJ,CAAC,CAAC;EACN,CAAC,CAAC;EAEN,OAAO,IAAI;AACf","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/dev-mode/check-orm.js b/dist/esm/plugins/dev-mode/check-orm.js deleted file mode 100644 index 3aa45996ae5..00000000000 --- a/dist/esm/plugins/dev-mode/check-orm.js +++ /dev/null @@ -1,47 +0,0 @@ -import { newRxError, newRxTypeError } from "../../rx-error.js"; -import { rxCollectionProperties, rxDocumentProperties } from "./entity-properties.js"; - -/** - * checks if the given static methods are allowed - * @throws if not allowed - */ -export function checkOrmMethods(statics) { - if (!statics) { - return; - } - Object.entries(statics).forEach(([k, v]) => { - if (typeof k !== 'string') { - throw newRxTypeError('COL14', { - name: k - }); - } - if (k.startsWith('_')) { - throw newRxTypeError('COL15', { - name: k - }); - } - if (typeof v !== 'function') { - throw newRxTypeError('COL16', { - name: k, - type: typeof k - }); - } - if (rxCollectionProperties().includes(k) || rxDocumentProperties().includes(k)) { - throw newRxError('COL17', { - name: k - }); - } - }); -} -export function checkOrmDocumentMethods(schema, methods) { - var topLevelFields = Object.keys(schema.properties); - if (!methods) { - return; - } - Object.keys(methods).filter(funName => topLevelFields.includes(funName)).forEach(funName => { - throw newRxError('COL18', { - funName - }); - }); -} -//# sourceMappingURL=check-orm.js.map \ No newline at end of file diff --git a/dist/esm/plugins/dev-mode/check-orm.js.map b/dist/esm/plugins/dev-mode/check-orm.js.map deleted file mode 100644 index 1d62f385fd3..00000000000 --- a/dist/esm/plugins/dev-mode/check-orm.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"check-orm.js","names":["newRxError","newRxTypeError","rxCollectionProperties","rxDocumentProperties","checkOrmMethods","statics","Object","entries","forEach","k","v","name","startsWith","type","includes","checkOrmDocumentMethods","schema","methods","topLevelFields","keys","properties","filter","funName"],"sources":["../../../../src/plugins/dev-mode/check-orm.ts"],"sourcesContent":["import {\n newRxError,\n newRxTypeError\n} from '../../rx-error.ts';\nimport type { KeyFunctionMap, RxJsonSchema } from '../../types/index.d.ts';\nimport { rxCollectionProperties, rxDocumentProperties } from './entity-properties.ts';\n\n/**\n * checks if the given static methods are allowed\n * @throws if not allowed\n */\nexport function checkOrmMethods(statics?: KeyFunctionMap) {\n if (!statics) {\n return;\n }\n Object\n .entries(statics)\n .forEach(([k, v]) => {\n if (typeof k !== 'string') {\n throw newRxTypeError('COL14', {\n name: k\n });\n }\n\n if (k.startsWith('_')) {\n throw newRxTypeError('COL15', {\n name: k\n });\n }\n\n if (typeof v !== 'function') {\n throw newRxTypeError('COL16', {\n name: k,\n type: typeof k\n });\n }\n\n if (\n rxCollectionProperties().includes(k) ||\n rxDocumentProperties().includes(k)\n ) {\n throw newRxError('COL17', {\n name: k\n });\n }\n });\n}\n\n\nexport function checkOrmDocumentMethods(\n schema: RxJsonSchema,\n methods?: any,\n) {\n const topLevelFields = Object.keys(schema.properties) as (keyof RxDocType)[];\n if (!methods) {\n return;\n }\n Object.keys(methods)\n .filter(funName => topLevelFields.includes(funName as any))\n .forEach(funName => {\n throw newRxError('COL18', {\n funName\n });\n });\n}\n"],"mappings":"AAAA,SACIA,UAAU,EACVC,cAAc,QACX,mBAAmB;AAE1B,SAASC,sBAAsB,EAAEC,oBAAoB,QAAQ,wBAAwB;;AAErF;AACA;AACA;AACA;AACA,OAAO,SAASC,eAAeA,CAACC,OAAwB,EAAE;EACtD,IAAI,CAACA,OAAO,EAAE;IACV;EACJ;EACAC,MAAM,CACDC,OAAO,CAACF,OAAO,CAAC,CAChBG,OAAO,CAAC,CAAC,CAACC,CAAC,EAAEC,CAAC,CAAC,KAAK;IACjB,IAAI,OAAOD,CAAC,KAAK,QAAQ,EAAE;MACvB,MAAMR,cAAc,CAAC,OAAO,EAAE;QAC1BU,IAAI,EAAEF;MACV,CAAC,CAAC;IACN;IAEA,IAAIA,CAAC,CAACG,UAAU,CAAC,GAAG,CAAC,EAAE;MACnB,MAAMX,cAAc,CAAC,OAAO,EAAE;QAC1BU,IAAI,EAAEF;MACV,CAAC,CAAC;IACN;IAEA,IAAI,OAAOC,CAAC,KAAK,UAAU,EAAE;MACzB,MAAMT,cAAc,CAAC,OAAO,EAAE;QAC1BU,IAAI,EAAEF,CAAC;QACPI,IAAI,EAAE,OAAOJ;MACjB,CAAC,CAAC;IACN;IAEA,IACIP,sBAAsB,CAAC,CAAC,CAACY,QAAQ,CAACL,CAAC,CAAC,IACpCN,oBAAoB,CAAC,CAAC,CAACW,QAAQ,CAACL,CAAC,CAAC,EACpC;MACE,MAAMT,UAAU,CAAC,OAAO,EAAE;QACtBW,IAAI,EAAEF;MACV,CAAC,CAAC;IACN;EACJ,CAAC,CAAC;AACV;AAGA,OAAO,SAASM,uBAAuBA,CACnCC,MAA+B,EAC/BC,OAAa,EACf;EACE,IAAMC,cAAc,GAAGZ,MAAM,CAACa,IAAI,CAACH,MAAM,CAACI,UAAU,CAAwB;EAC5E,IAAI,CAACH,OAAO,EAAE;IACV;EACJ;EACAX,MAAM,CAACa,IAAI,CAACF,OAAO,CAAC,CACfI,MAAM,CAACC,OAAO,IAAIJ,cAAc,CAACJ,QAAQ,CAACQ,OAAc,CAAC,CAAC,CAC1Dd,OAAO,CAACc,OAAO,IAAI;IAChB,MAAMtB,UAAU,CAAC,OAAO,EAAE;MACtBsB;IACJ,CAAC,CAAC;EACN,CAAC,CAAC;AACV","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/dev-mode/check-query.js b/dist/esm/plugins/dev-mode/check-query.js deleted file mode 100644 index 1712c6b019e..00000000000 --- a/dist/esm/plugins/dev-mode/check-query.js +++ /dev/null @@ -1,143 +0,0 @@ -import { newRxError, newRxTypeError } from "../../rx-error.js"; -import { deepEqual } from "../utils/index.js"; -import { prepareQuery } from "../../rx-query.js"; - -/** - * accidentally passing a non-valid object into the query params - * is very hard to debug especially when queries are observed - * This is why we do some checks here in dev-mode - */ -export function checkQuery(args) { - var isPlainObject = Object.prototype.toString.call(args.queryObj) === '[object Object]'; - if (!isPlainObject) { - throw newRxTypeError('QU11', { - op: args.op, - collection: args.collection.name, - queryObj: args.queryObj - }); - } - var validKeys = ['selector', 'limit', 'skip', 'sort', 'index']; - Object.keys(args.queryObj).forEach(key => { - if (!validKeys.includes(key)) { - throw newRxTypeError('QU11', { - op: args.op, - collection: args.collection.name, - queryObj: args.queryObj, - key, - args: { - validKeys - } - }); - } - }); - - // do not allow skip or limit for count queries - if (args.op === 'count' && (args.queryObj.limit || args.queryObj.skip)) { - throw newRxError('QU15', { - collection: args.collection.name, - query: args.queryObj - }); - } - ensureObjectDoesNotContainRegExp(args.queryObj); -} -export function checkMangoQuery(args) { - var schema = args.rxQuery.collection.schema.jsonSchema; - - /** - * Ensure that all top level fields are included in the schema. - * TODO this check can be augmented to also check sub-fields. - */ - var massagedSelector = args.mangoQuery.selector; - var schemaTopLevelFields = Object.keys(schema.properties); - Object.keys(massagedSelector) - // do not check operators - .filter(fieldOrOperator => !fieldOrOperator.startsWith('$')) - // skip this check on non-top-level fields - .filter(field => !field.includes('.')).forEach(field => { - if (!schemaTopLevelFields.includes(field)) { - throw newRxError('QU13', { - schema, - field, - query: args.mangoQuery - }); - } - }); - - /** - * ensure if custom index is set, - * it is also defined in the schema - */ - var schemaIndexes = schema.indexes ? schema.indexes : []; - var index = args.mangoQuery.index; - if (index) { - var isInSchema = schemaIndexes.find(schemaIndex => deepEqual(schemaIndex, index)); - if (!isInSchema) { - throw newRxError('QU12', { - collection: args.rxQuery.collection.name, - query: args.mangoQuery, - schema - }); - } - } - - /** - * Ensure that a count() query can only be used - * with selectors that are fully satisfied by the used index. - */ - if (args.rxQuery.op === 'count') { - if (!areSelectorsSatisfiedByIndex(args.rxQuery.collection.schema.jsonSchema, args.mangoQuery) && !args.rxQuery.collection.database.allowSlowCount) { - throw newRxError('QU14', { - collection: args.rxQuery.collection, - query: args.mangoQuery - }); - } - } - - /** - * Ensure that sort only runs on known fields - * TODO also check nested fields - */ - if (args.mangoQuery.sort) { - args.mangoQuery.sort.map(sortPart => Object.keys(sortPart)[0]).filter(field => !field.includes('.')).forEach(field => { - if (!schemaTopLevelFields.includes(field)) { - throw newRxError('QU13', { - schema, - field, - query: args.mangoQuery - }); - } - }); - } - - // Do not allow RexExp instances - ensureObjectDoesNotContainRegExp(args.mangoQuery); -} -export function areSelectorsSatisfiedByIndex(schema, query) { - var preparedQuery = prepareQuery(schema, query); - return preparedQuery.queryPlan.selectorSatisfiedByIndex; -} - -/** - * Ensures that the selector does not contain any RegExp instance. - * @recursive - */ -export function ensureObjectDoesNotContainRegExp(selector) { - if (typeof selector !== 'object' || selector === null) { - return; - } - var keys = Object.keys(selector); - keys.forEach(key => { - var value = selector[key]; - if (value instanceof RegExp) { - throw newRxError('QU16', { - field: key, - query: selector - }); - } else if (Array.isArray(value)) { - value.forEach(item => ensureObjectDoesNotContainRegExp(item)); - } else { - ensureObjectDoesNotContainRegExp(value); - } - }); -} -//# sourceMappingURL=check-query.js.map \ No newline at end of file diff --git a/dist/esm/plugins/dev-mode/check-query.js.map b/dist/esm/plugins/dev-mode/check-query.js.map deleted file mode 100644 index 2c82aa9acd6..00000000000 --- a/dist/esm/plugins/dev-mode/check-query.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"check-query.js","names":["newRxError","newRxTypeError","deepEqual","prepareQuery","checkQuery","args","isPlainObject","Object","prototype","toString","call","queryObj","op","collection","name","validKeys","keys","forEach","key","includes","limit","skip","query","ensureObjectDoesNotContainRegExp","checkMangoQuery","schema","rxQuery","jsonSchema","massagedSelector","mangoQuery","selector","schemaTopLevelFields","properties","filter","fieldOrOperator","startsWith","field","schemaIndexes","indexes","index","isInSchema","find","schemaIndex","areSelectorsSatisfiedByIndex","database","allowSlowCount","sort","map","sortPart","preparedQuery","queryPlan","selectorSatisfiedByIndex","value","RegExp","Array","isArray","item"],"sources":["../../../../src/plugins/dev-mode/check-query.ts"],"sourcesContent":["import type {\n RxPluginPreCreateRxQueryArgs,\n MangoQuery,\n RxPluginPrePrepareQueryArgs,\n FilledMangoQuery,\n RxJsonSchema,\n RxDocumentData,\n MangoQuerySelector,\n PreparedQuery\n} from '../../types/index.d.ts';\nimport { newRxError, newRxTypeError } from '../../rx-error.ts';\nimport { deepEqual } from '../utils/index.ts';\nimport { prepareQuery } from '../../rx-query.ts';\n\n/**\n * accidentally passing a non-valid object into the query params\n * is very hard to debug especially when queries are observed\n * This is why we do some checks here in dev-mode\n */\nexport function checkQuery(args: RxPluginPreCreateRxQueryArgs) {\n const isPlainObject = Object.prototype.toString.call(args.queryObj) === '[object Object]';\n if (!isPlainObject) {\n throw newRxTypeError('QU11', {\n op: args.op,\n collection: args.collection.name,\n queryObj: args.queryObj\n });\n }\n\n const validKeys: (keyof MangoQuery)[] = [\n 'selector',\n 'limit',\n 'skip',\n 'sort',\n 'index'\n ];\n Object.keys(args.queryObj).forEach(key => {\n if (!(validKeys as string[]).includes(key)) {\n throw newRxTypeError('QU11', {\n op: args.op,\n collection: args.collection.name,\n queryObj: args.queryObj,\n key,\n args: {\n validKeys\n }\n });\n }\n });\n\n // do not allow skip or limit for count queries\n if (\n args.op === 'count' &&\n (\n args.queryObj.limit ||\n args.queryObj.skip\n )\n ) {\n throw newRxError(\n 'QU15',\n {\n collection: args.collection.name,\n query: args.queryObj\n }\n );\n }\n\n ensureObjectDoesNotContainRegExp(args.queryObj);\n}\n\n\nexport function checkMangoQuery(args: RxPluginPrePrepareQueryArgs) {\n const schema = args.rxQuery.collection.schema.jsonSchema;\n\n /**\n * Ensure that all top level fields are included in the schema.\n * TODO this check can be augmented to also check sub-fields.\n */\n const massagedSelector: MangoQuerySelector = args.mangoQuery.selector;\n const schemaTopLevelFields = Object.keys(schema.properties);\n Object.keys(massagedSelector)\n // do not check operators\n .filter(fieldOrOperator => !fieldOrOperator.startsWith('$'))\n // skip this check on non-top-level fields\n .filter(field => !field.includes('.'))\n .forEach(field => {\n if (!schemaTopLevelFields.includes(field)) {\n throw newRxError('QU13', {\n schema,\n field,\n query: args.mangoQuery,\n });\n }\n });\n\n /**\n * ensure if custom index is set,\n * it is also defined in the schema\n */\n const schemaIndexes = schema.indexes ? schema.indexes : [];\n const index = args.mangoQuery.index;\n if (index) {\n const isInSchema = schemaIndexes.find(schemaIndex => deepEqual(schemaIndex, index));\n if (!isInSchema) {\n throw newRxError(\n 'QU12',\n {\n collection: args.rxQuery.collection.name,\n query: args.mangoQuery,\n schema\n }\n );\n }\n }\n\n\n /**\n * Ensure that a count() query can only be used\n * with selectors that are fully satisfied by the used index.\n */\n if (args.rxQuery.op === 'count') {\n if (\n !areSelectorsSatisfiedByIndex(\n args.rxQuery.collection.schema.jsonSchema,\n args.mangoQuery\n ) &&\n !args.rxQuery.collection.database.allowSlowCount\n ) {\n throw newRxError('QU14', {\n collection: args.rxQuery.collection,\n query: args.mangoQuery\n });\n }\n }\n\n /**\n * Ensure that sort only runs on known fields\n * TODO also check nested fields\n */\n if (args.mangoQuery.sort) {\n args.mangoQuery.sort\n .map(sortPart => Object.keys(sortPart)[0])\n .filter(field => !field.includes('.'))\n .forEach(field => {\n if (!schemaTopLevelFields.includes(field)) {\n throw newRxError('QU13', {\n schema,\n field,\n query: args.mangoQuery,\n });\n }\n });\n }\n\n // Do not allow RexExp instances\n ensureObjectDoesNotContainRegExp(args.mangoQuery);\n}\n\n\nexport function areSelectorsSatisfiedByIndex(\n schema: RxJsonSchema>,\n query: FilledMangoQuery\n): boolean {\n const preparedQuery: PreparedQuery = prepareQuery(\n schema,\n query\n );\n return preparedQuery.queryPlan.selectorSatisfiedByIndex;\n}\n\n/**\n * Ensures that the selector does not contain any RegExp instance.\n * @recursive\n */\nexport function ensureObjectDoesNotContainRegExp(selector: any) {\n if (typeof selector !== 'object' || selector === null) {\n return;\n }\n const keys = Object.keys(selector);\n keys.forEach(key => {\n const value: any = selector[key];\n if (value instanceof RegExp) {\n throw newRxError('QU16', {\n field: key,\n query: selector,\n });\n } else if (Array.isArray(value)) {\n value.forEach(item => ensureObjectDoesNotContainRegExp(item));\n } else {\n ensureObjectDoesNotContainRegExp(value);\n }\n });\n}\n"],"mappings":"AAUA,SAASA,UAAU,EAAEC,cAAc,QAAQ,mBAAmB;AAC9D,SAASC,SAAS,QAAQ,mBAAmB;AAC7C,SAASC,YAAY,QAAQ,mBAAmB;;AAEhD;AACA;AACA;AACA;AACA;AACA,OAAO,SAASC,UAAUA,CAACC,IAAkC,EAAE;EAC3D,IAAMC,aAAa,GAAGC,MAAM,CAACC,SAAS,CAACC,QAAQ,CAACC,IAAI,CAACL,IAAI,CAACM,QAAQ,CAAC,KAAK,iBAAiB;EACzF,IAAI,CAACL,aAAa,EAAE;IAChB,MAAML,cAAc,CAAC,MAAM,EAAE;MACzBW,EAAE,EAAEP,IAAI,CAACO,EAAE;MACXC,UAAU,EAAER,IAAI,CAACQ,UAAU,CAACC,IAAI;MAChCH,QAAQ,EAAEN,IAAI,CAACM;IACnB,CAAC,CAAC;EACN;EAEA,IAAMI,SAA+B,GAAG,CACpC,UAAU,EACV,OAAO,EACP,MAAM,EACN,MAAM,EACN,OAAO,CACV;EACDR,MAAM,CAACS,IAAI,CAACX,IAAI,CAACM,QAAQ,CAAC,CAACM,OAAO,CAACC,GAAG,IAAI;IACtC,IAAI,CAAEH,SAAS,CAAcI,QAAQ,CAACD,GAAG,CAAC,EAAE;MACxC,MAAMjB,cAAc,CAAC,MAAM,EAAE;QACzBW,EAAE,EAAEP,IAAI,CAACO,EAAE;QACXC,UAAU,EAAER,IAAI,CAACQ,UAAU,CAACC,IAAI;QAChCH,QAAQ,EAAEN,IAAI,CAACM,QAAQ;QACvBO,GAAG;QACHb,IAAI,EAAE;UACFU;QACJ;MACJ,CAAC,CAAC;IACN;EACJ,CAAC,CAAC;;EAEF;EACA,IACIV,IAAI,CAACO,EAAE,KAAK,OAAO,KAEfP,IAAI,CAACM,QAAQ,CAACS,KAAK,IACnBf,IAAI,CAACM,QAAQ,CAACU,IAAI,CACrB,EACH;IACE,MAAMrB,UAAU,CACZ,MAAM,EACN;MACIa,UAAU,EAAER,IAAI,CAACQ,UAAU,CAACC,IAAI;MAChCQ,KAAK,EAAEjB,IAAI,CAACM;IAChB,CACJ,CAAC;EACL;EAEAY,gCAAgC,CAAClB,IAAI,CAACM,QAAQ,CAAC;AACnD;AAGA,OAAO,SAASa,eAAeA,CAACnB,IAAiC,EAAE;EAC/D,IAAMoB,MAAM,GAAGpB,IAAI,CAACqB,OAAO,CAACb,UAAU,CAACY,MAAM,CAACE,UAAU;;EAExD;AACJ;AACA;AACA;EACI,IAAMC,gBAAyC,GAAGvB,IAAI,CAACwB,UAAU,CAACC,QAAQ;EAC1E,IAAMC,oBAAoB,GAAGxB,MAAM,CAACS,IAAI,CAACS,MAAM,CAACO,UAAU,CAAC;EAC3DzB,MAAM,CAACS,IAAI,CAACY,gBAAgB;EACxB;EAAA,CACCK,MAAM,CAACC,eAAe,IAAI,CAACA,eAAe,CAACC,UAAU,CAAC,GAAG,CAAC;EAC3D;EAAA,CACCF,MAAM,CAACG,KAAK,IAAI,CAACA,KAAK,CAACjB,QAAQ,CAAC,GAAG,CAAC,CAAC,CACrCF,OAAO,CAACmB,KAAK,IAAI;IACd,IAAI,CAACL,oBAAoB,CAACZ,QAAQ,CAACiB,KAAK,CAAC,EAAE;MACvC,MAAMpC,UAAU,CAAC,MAAM,EAAE;QACrByB,MAAM;QACNW,KAAK;QACLd,KAAK,EAAEjB,IAAI,CAACwB;MAChB,CAAC,CAAC;IACN;EACJ,CAAC,CAAC;;EAEN;AACJ;AACA;AACA;EACI,IAAMQ,aAAa,GAAGZ,MAAM,CAACa,OAAO,GAAGb,MAAM,CAACa,OAAO,GAAG,EAAE;EAC1D,IAAMC,KAAK,GAAGlC,IAAI,CAACwB,UAAU,CAACU,KAAK;EACnC,IAAIA,KAAK,EAAE;IACP,IAAMC,UAAU,GAAGH,aAAa,CAACI,IAAI,CAACC,WAAW,IAAIxC,SAAS,CAACwC,WAAW,EAAEH,KAAK,CAAC,CAAC;IACnF,IAAI,CAACC,UAAU,EAAE;MACb,MAAMxC,UAAU,CACZ,MAAM,EACN;QACIa,UAAU,EAAER,IAAI,CAACqB,OAAO,CAACb,UAAU,CAACC,IAAI;QACxCQ,KAAK,EAAEjB,IAAI,CAACwB,UAAU;QACtBJ;MACJ,CACJ,CAAC;IACL;EACJ;;EAGA;AACJ;AACA;AACA;EACI,IAAIpB,IAAI,CAACqB,OAAO,CAACd,EAAE,KAAK,OAAO,EAAE;IAC7B,IACI,CAAC+B,4BAA4B,CACzBtC,IAAI,CAACqB,OAAO,CAACb,UAAU,CAACY,MAAM,CAACE,UAAU,EACzCtB,IAAI,CAACwB,UACT,CAAC,IACD,CAACxB,IAAI,CAACqB,OAAO,CAACb,UAAU,CAAC+B,QAAQ,CAACC,cAAc,EAClD;MACE,MAAM7C,UAAU,CAAC,MAAM,EAAE;QACrBa,UAAU,EAAER,IAAI,CAACqB,OAAO,CAACb,UAAU;QACnCS,KAAK,EAAEjB,IAAI,CAACwB;MAChB,CAAC,CAAC;IACN;EACJ;;EAEA;AACJ;AACA;AACA;EACI,IAAIxB,IAAI,CAACwB,UAAU,CAACiB,IAAI,EAAE;IACtBzC,IAAI,CAACwB,UAAU,CAACiB,IAAI,CACfC,GAAG,CAACC,QAAQ,IAAIzC,MAAM,CAACS,IAAI,CAACgC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,CACzCf,MAAM,CAACG,KAAK,IAAI,CAACA,KAAK,CAACjB,QAAQ,CAAC,GAAG,CAAC,CAAC,CACrCF,OAAO,CAACmB,KAAK,IAAI;MACd,IAAI,CAACL,oBAAoB,CAACZ,QAAQ,CAACiB,KAAK,CAAC,EAAE;QACvC,MAAMpC,UAAU,CAAC,MAAM,EAAE;UACrByB,MAAM;UACNW,KAAK;UACLd,KAAK,EAAEjB,IAAI,CAACwB;QAChB,CAAC,CAAC;MACN;IACJ,CAAC,CAAC;EACV;;EAEA;EACAN,gCAAgC,CAAClB,IAAI,CAACwB,UAAU,CAAC;AACrD;AAGA,OAAO,SAASc,4BAA4BA,CACxClB,MAA+C,EAC/CH,KAAkC,EAC3B;EACP,IAAM2B,aAAiC,GAAG9C,YAAY,CAClDsB,MAAM,EACNH,KACJ,CAAC;EACD,OAAO2B,aAAa,CAACC,SAAS,CAACC,wBAAwB;AAC3D;;AAEA;AACA;AACA;AACA;AACA,OAAO,SAAS5B,gCAAgCA,CAACO,QAAa,EAAE;EAC5D,IAAI,OAAOA,QAAQ,KAAK,QAAQ,IAAIA,QAAQ,KAAK,IAAI,EAAE;IACnD;EACJ;EACA,IAAMd,IAAI,GAAGT,MAAM,CAACS,IAAI,CAACc,QAAQ,CAAC;EAClCd,IAAI,CAACC,OAAO,CAACC,GAAG,IAAI;IAChB,IAAMkC,KAAU,GAAGtB,QAAQ,CAACZ,GAAG,CAAC;IAChC,IAAIkC,KAAK,YAAYC,MAAM,EAAE;MACzB,MAAMrD,UAAU,CAAC,MAAM,EAAE;QACrBoC,KAAK,EAAElB,GAAG;QACVI,KAAK,EAAEQ;MACX,CAAC,CAAC;IACN,CAAC,MAAM,IAAIwB,KAAK,CAACC,OAAO,CAACH,KAAK,CAAC,EAAE;MAC7BA,KAAK,CAACnC,OAAO,CAACuC,IAAI,IAAIjC,gCAAgC,CAACiC,IAAI,CAAC,CAAC;IACjE,CAAC,MAAM;MACHjC,gCAAgC,CAAC6B,KAAK,CAAC;IAC3C;EACJ,CAAC,CAAC;AACN","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/dev-mode/check-schema.js b/dist/esm/plugins/dev-mode/check-schema.js deleted file mode 100644 index bcb1e1a3024..00000000000 --- a/dist/esm/plugins/dev-mode/check-schema.js +++ /dev/null @@ -1,497 +0,0 @@ -/** - * does additional checks over the schema-json - * to ensure nothing is broken or not supported - */ -import { newRxError } from "../../rx-error.js"; -import { getPrimaryFieldOfPrimaryKey, getSchemaByObjectPath } from "../../rx-schema-helper.js"; -import { appendToArray, flattenObject, getProperty, isMaybeReadonlyArray, trimDots } from "../../plugins/utils/index.js"; -import { rxDocumentProperties } from "./entity-properties.js"; - -/** - * checks if the fieldname is allowed - * this makes sure that the fieldnames can be transformed into javascript-vars - * and does not conquer the observe$ and populate_ fields - * @throws {Error} - */ -export function checkFieldNameRegex(fieldName) { - if (fieldName === '_deleted') { - return; - } - if (['properties'].includes(fieldName)) { - throw newRxError('SC23', { - fieldName - }); - } - var regexStr = '^[a-zA-Z](?:[[a-zA-Z0-9_]*]?[a-zA-Z0-9])?$'; - var regex = new RegExp(regexStr); - if ( - /** - * It must be allowed to set _id as primaryKey. - * This makes it sometimes easier to work with RxDB+CouchDB - * @link https://github.com/pubkey/rxdb/issues/681 - */ - fieldName !== '_id' && !fieldName.match(regex)) { - throw newRxError('SC1', { - regex: regexStr, - fieldName - }); - } -} - -/** - * validate that all schema-related things are ok - */ -export function validateFieldsDeep(rxJsonSchema) { - var primaryPath = getPrimaryFieldOfPrimaryKey(rxJsonSchema.primaryKey); - function checkField(fieldName, schemaObj, path) { - if (typeof fieldName === 'string' && typeof schemaObj === 'object' && !Array.isArray(schemaObj) && path.split('.').pop() !== 'patternProperties') checkFieldNameRegex(fieldName); - - // 'item' only allowed it type=='array' - if (Object.prototype.hasOwnProperty.call(schemaObj, 'item') && schemaObj.type !== 'array') { - throw newRxError('SC2', { - fieldName - }); - } - - /** - * required fields cannot be set via 'required: true', - * but must be set via required: [] - */ - if (Object.prototype.hasOwnProperty.call(schemaObj, 'required') && typeof schemaObj.required === 'boolean') { - throw newRxError('SC24', { - fieldName - }); - } - - // $ref is not allowed - if (Object.prototype.hasOwnProperty.call(schemaObj, '$ref')) { - throw newRxError('SC40', { - fieldName - }); - } - - // if ref given, must be type=='string', type=='array' with string-items or type==['string','null'] - if (Object.prototype.hasOwnProperty.call(schemaObj, 'ref')) { - if (Array.isArray(schemaObj.type)) { - if (schemaObj.type.length > 2 || !schemaObj.type.includes('string') || !schemaObj.type.includes('null')) { - throw newRxError('SC4', { - fieldName - }); - } - } else { - switch (schemaObj.type) { - case 'string': - break; - case 'array': - if (!schemaObj.items || !schemaObj.items.type || schemaObj.items.type !== 'string') { - throw newRxError('SC3', { - fieldName - }); - } - break; - default: - throw newRxError('SC4', { - fieldName - }); - } - } - } - var isNested = path.split('.').length >= 2; - - // nested only - if (isNested) { - if (schemaObj.default) { - throw newRxError('SC7', { - path - }); - } - } - - // first level - if (!isNested) { - // if _id is used, it must be primaryKey - if (fieldName === '_id' && primaryPath !== '_id') { - throw newRxError('COL2', { - fieldName - }); - } - - // check underscore fields - if (fieldName.charAt(0) === '_') { - if ( - // exceptional allow underscore on these fields. - fieldName === '_id' || fieldName === '_deleted') { - return; - } - throw newRxError('SC8', { - fieldName - }); - } - } - } - function traverse(currentObj, currentPath) { - if (!currentObj || typeof currentObj !== 'object') { - return; - } - Object.keys(currentObj).forEach(attributeName => { - var schemaObj = currentObj[attributeName]; - if (!currentObj.properties && schemaObj && typeof schemaObj === 'object' && !Array.isArray(currentObj)) { - checkField(attributeName, schemaObj, currentPath); - } - var nextPath = currentPath; - if (attributeName !== 'properties') nextPath = nextPath + '.' + attributeName; - traverse(schemaObj, nextPath); - }); - } - traverse(rxJsonSchema, ''); - return true; -} -export function checkPrimaryKey(jsonSchema) { - if (!jsonSchema.primaryKey) { - throw newRxError('SC30', { - schema: jsonSchema - }); - } - function validatePrimarySchemaPart(schemaPart) { - if (!schemaPart) { - throw newRxError('SC33', { - schema: jsonSchema - }); - } - var type = schemaPart.type; - if (!type || !['string', 'number', 'integer'].includes(type)) { - throw newRxError('SC32', { - schema: jsonSchema, - args: { - schemaPart - } - }); - } - } - if (typeof jsonSchema.primaryKey === 'string') { - var key = jsonSchema.primaryKey; - var schemaPart = jsonSchema.properties[key]; - validatePrimarySchemaPart(schemaPart); - } else { - var compositePrimaryKey = jsonSchema.primaryKey; - var keySchemaPart = getSchemaByObjectPath(jsonSchema, compositePrimaryKey.key); - validatePrimarySchemaPart(keySchemaPart); - compositePrimaryKey.fields.forEach(field => { - var schemaPart = getSchemaByObjectPath(jsonSchema, field); - validatePrimarySchemaPart(schemaPart); - }); - } - - /** - * The primary key must have a maxLength set - * which is required by some RxStorage implementations - * to ensure we can craft custom index strings. - */ - var primaryPath = getPrimaryFieldOfPrimaryKey(jsonSchema.primaryKey); - var primaryPathSchemaPart = jsonSchema.properties[primaryPath]; - if (!primaryPathSchemaPart.maxLength) { - throw newRxError('SC39', { - schema: jsonSchema, - args: { - primaryPathSchemaPart - } - }); - } else if (!isFinite(primaryPathSchemaPart.maxLength)) { - throw newRxError('SC41', { - schema: jsonSchema, - args: { - primaryPathSchemaPart - } - }); - } -} - -/** - * computes real path of the object path in the collection schema - */ -function getSchemaPropertyRealPath(shortPath) { - var pathParts = shortPath.split('.'); - var realPath = ''; - for (var i = 0; i < pathParts.length; i += 1) { - if (pathParts[i] !== '[]') { - realPath = realPath.concat('.properties.'.concat(pathParts[i])); - } else { - realPath = realPath.concat('.items'); - } - } - return trimDots(realPath); -} - -/** - * does the checking - * @throws {Error} if something is not ok - */ -export function checkSchema(jsonSchema) { - if (!jsonSchema.primaryKey) { - throw newRxError('SC30', { - schema: jsonSchema - }); - } - if (!Object.prototype.hasOwnProperty.call(jsonSchema, 'properties')) { - throw newRxError('SC29', { - schema: jsonSchema - }); - } - - // _rev MUST NOT exist, it is added by RxDB - if (jsonSchema.properties._rev) { - throw newRxError('SC10', { - schema: jsonSchema - }); - } - - // check version - if (!Object.prototype.hasOwnProperty.call(jsonSchema, 'version') || typeof jsonSchema.version !== 'number' || jsonSchema.version < 0) { - throw newRxError('SC11', { - version: jsonSchema.version - }); - } - validateFieldsDeep(jsonSchema); - checkPrimaryKey(jsonSchema); - Object.keys(jsonSchema.properties).forEach(key => { - var value = jsonSchema.properties[key]; - // check primary - if (key === jsonSchema.primaryKey) { - if (jsonSchema.indexes && jsonSchema.indexes.includes(key)) { - throw newRxError('SC13', { - value, - schema: jsonSchema - }); - } - if (value.unique) { - throw newRxError('SC14', { - value, - schema: jsonSchema - }); - } - if (jsonSchema.encrypted && jsonSchema.encrypted.includes(key)) { - throw newRxError('SC15', { - value, - schema: jsonSchema - }); - } - if (value.type !== 'string') { - throw newRxError('SC16', { - value, - schema: jsonSchema - }); - } - } - - // check if RxDocument-property - if (rxDocumentProperties().includes(key)) { - throw newRxError('SC17', { - key, - schema: jsonSchema - }); - } - }); - - // check format of jsonSchema.indexes - if (jsonSchema.indexes) { - // should be an array - if (!isMaybeReadonlyArray(jsonSchema.indexes)) { - throw newRxError('SC18', { - indexes: jsonSchema.indexes, - schema: jsonSchema - }); - } - jsonSchema.indexes.forEach(index => { - // should contain strings or array of strings - if (!(typeof index === 'string' || Array.isArray(index))) { - throw newRxError('SC19', { - index, - schema: jsonSchema - }); - } - // if is a compound index it must contain strings - if (Array.isArray(index)) { - for (var i = 0; i < index.length; i += 1) { - if (typeof index[i] !== 'string') { - throw newRxError('SC20', { - index, - schema: jsonSchema - }); - } - } - } - - /** - * To be able to craft custom indexable string with compound fields, - * we need to know the maximum fieldlength of the fields values - * when they are transformed to strings. - * Therefore we need to enforce some properties inside of the schema. - */ - var indexAsArray = isMaybeReadonlyArray(index) ? index : [index]; - indexAsArray.forEach(fieldName => { - var schemaPart = getSchemaByObjectPath(jsonSchema, fieldName); - var type = schemaPart.type; - switch (type) { - case 'string': - var maxLength = schemaPart.maxLength; - if (!maxLength) { - throw newRxError('SC34', { - index, - field: fieldName, - schema: jsonSchema - }); - } - break; - case 'number': - case 'integer': - var multipleOf = schemaPart.multipleOf; - if (!multipleOf) { - throw newRxError('SC35', { - index, - field: fieldName, - schema: jsonSchema - }); - } - var maximum = schemaPart.maximum; - var minimum = schemaPart.minimum; - if (typeof maximum === 'undefined' || typeof minimum === 'undefined') { - throw newRxError('SC37', { - index, - field: fieldName, - schema: jsonSchema - }); - } - if (!isFinite(maximum) || !isFinite(minimum)) { - throw newRxError('SC41', { - index, - field: fieldName, - schema: jsonSchema - }); - } - break; - case 'boolean': - /** - * If a boolean field is used as an index, - * it must be required. - */ - var parentPath = ''; - var lastPathPart = fieldName; - if (fieldName.includes('.')) { - var partParts = fieldName.split('.'); - lastPathPart = partParts.pop(); - parentPath = partParts.join('.'); - } - var parentSchemaPart = parentPath === '' ? jsonSchema : getSchemaByObjectPath(jsonSchema, parentPath); - if (!parentSchemaPart.required || !parentSchemaPart.required.includes(lastPathPart)) { - throw newRxError('SC38', { - index, - field: fieldName, - schema: jsonSchema - }); - } - break; - default: - throw newRxError('SC36', { - fieldName, - type: schemaPart.type, - schema: jsonSchema - }); - } - }); - }); - } - - // remove backward-compatibility for index: true - Object.keys(flattenObject(jsonSchema)).map(key => { - // flattenObject returns only ending paths, we need all paths pointing to an object - var split = key.split('.'); - split.pop(); // all but last - return split.join('.'); - }).filter(key => key !== '').filter((elem, pos, arr) => arr.indexOf(elem) === pos) // unique - .filter(key => { - // check if this path defines an index - var value = getProperty(jsonSchema, key); - return value && !!value.index; - }).forEach(key => { - // replace inner properties - key = key.replace('properties.', ''); // first - key = key.replace(/\.properties\./g, '.'); // middle - throw newRxError('SC26', { - index: trimDots(key), - schema: jsonSchema - }); - }); - - /* check types of the indexes */ - (jsonSchema.indexes || []).reduce((indexPaths, currentIndex) => { - if (isMaybeReadonlyArray(currentIndex)) { - appendToArray(indexPaths, currentIndex); - } else { - indexPaths.push(currentIndex); - } - return indexPaths; - }, []).filter((elem, pos, arr) => arr.indexOf(elem) === pos) // from now on working only with unique indexes - .map(indexPath => { - var realPath = getSchemaPropertyRealPath(indexPath); // real path in the collection schema - var schemaObj = getProperty(jsonSchema, realPath); // get the schema of the indexed property - if (!schemaObj || typeof schemaObj !== 'object') { - throw newRxError('SC21', { - index: indexPath, - schema: jsonSchema - }); - } - return { - indexPath, - schemaObj - }; - }).filter(index => index.schemaObj.type !== 'string' && index.schemaObj.type !== 'integer' && index.schemaObj.type !== 'number' && index.schemaObj.type !== 'boolean').forEach(index => { - throw newRxError('SC22', { - key: index.indexPath, - type: index.schemaObj.type, - schema: jsonSchema - }); - }); - - /** - * TODO - * in 9.0.0 we changed the way encrypted fields are defined - * This check ensures people do not oversee the breaking change - * Remove this check in the future - */ - Object.keys(flattenObject(jsonSchema)).map(key => { - // flattenObject returns only ending paths, we need all paths pointing to an object - var split = key.split('.'); - split.pop(); // all but last - return split.join('.'); - }).filter(key => key !== '' && key !== 'attachments').filter((elem, pos, arr) => arr.indexOf(elem) === pos) // unique - .filter(key => { - // check if this path defines an encrypted field - var value = getProperty(jsonSchema, key); - return value && !!value.encrypted; - }).forEach(key => { - // replace inner properties - key = key.replace('properties.', ''); // first - key = key.replace(/\.properties\./g, '.'); // middle - throw newRxError('SC27', { - index: trimDots(key), - schema: jsonSchema - }); - }); - - /* ensure encrypted fields exist in the schema */ - if (jsonSchema.encrypted) { - jsonSchema.encrypted.forEach(propPath => { - // real path in the collection schema - var realPath = getSchemaPropertyRealPath(propPath); - // get the schema of the indexed property - var schemaObj = getProperty(jsonSchema, realPath); - if (!schemaObj || typeof schemaObj !== 'object') { - throw newRxError('SC28', { - field: propPath, - schema: jsonSchema - }); - } - }); - } -} -//# sourceMappingURL=check-schema.js.map \ No newline at end of file diff --git a/dist/esm/plugins/dev-mode/check-schema.js.map b/dist/esm/plugins/dev-mode/check-schema.js.map deleted file mode 100644 index 458a7f089ab..00000000000 --- a/dist/esm/plugins/dev-mode/check-schema.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"check-schema.js","names":["newRxError","getPrimaryFieldOfPrimaryKey","getSchemaByObjectPath","appendToArray","flattenObject","getProperty","isMaybeReadonlyArray","trimDots","rxDocumentProperties","checkFieldNameRegex","fieldName","includes","regexStr","regex","RegExp","match","validateFieldsDeep","rxJsonSchema","primaryPath","primaryKey","checkField","schemaObj","path","Array","isArray","split","pop","Object","prototype","hasOwnProperty","call","type","required","length","items","isNested","default","charAt","traverse","currentObj","currentPath","keys","forEach","attributeName","properties","nextPath","checkPrimaryKey","jsonSchema","schema","validatePrimarySchemaPart","schemaPart","args","key","compositePrimaryKey","keySchemaPart","fields","field","primaryPathSchemaPart","maxLength","isFinite","getSchemaPropertyRealPath","shortPath","pathParts","realPath","i","concat","checkSchema","_rev","version","value","indexes","unique","encrypted","index","indexAsArray","multipleOf","maximum","minimum","parentPath","lastPathPart","partParts","join","parentSchemaPart","map","filter","elem","pos","arr","indexOf","replace","reduce","indexPaths","currentIndex","push","indexPath","propPath"],"sources":["../../../../src/plugins/dev-mode/check-schema.ts"],"sourcesContent":["/**\n * does additional checks over the schema-json\n * to ensure nothing is broken or not supported\n */\nimport {\n newRxError\n} from '../../rx-error.ts';\nimport { getPrimaryFieldOfPrimaryKey, getSchemaByObjectPath } from '../../rx-schema-helper.ts';\nimport type {\n CompositePrimaryKey,\n JsonSchema,\n JsonSchemaTypes,\n RxJsonSchema,\n TopLevelProperty\n} from '../../types/index.d.ts';\nimport {\n appendToArray,\n flattenObject, getProperty, isMaybeReadonlyArray,\n trimDots\n} from '../../plugins/utils/index.ts';\nimport { rxDocumentProperties } from './entity-properties.ts';\n\n/**\n * checks if the fieldname is allowed\n * this makes sure that the fieldnames can be transformed into javascript-vars\n * and does not conquer the observe$ and populate_ fields\n * @throws {Error}\n */\nexport function checkFieldNameRegex(fieldName: string) {\n if (fieldName === '_deleted') {\n return;\n }\n\n if (['properties'].includes(fieldName)) {\n throw newRxError('SC23', {\n fieldName\n });\n }\n\n const regexStr = '^[a-zA-Z](?:[[a-zA-Z0-9_]*]?[a-zA-Z0-9])?$';\n const regex = new RegExp(regexStr);\n if (\n /**\n * It must be allowed to set _id as primaryKey.\n * This makes it sometimes easier to work with RxDB+CouchDB\n * @link https://github.com/pubkey/rxdb/issues/681\n */\n fieldName !== '_id' &&\n !fieldName.match(regex)\n ) {\n throw newRxError('SC1', {\n regex: regexStr,\n fieldName\n });\n }\n}\n\n/**\n * validate that all schema-related things are ok\n */\nexport function validateFieldsDeep(rxJsonSchema: RxJsonSchema): true {\n\n const primaryPath = getPrimaryFieldOfPrimaryKey(rxJsonSchema.primaryKey);\n\n function checkField(\n fieldName: string,\n schemaObj: JsonSchema,\n path: string\n ) {\n if (\n typeof fieldName === 'string' &&\n typeof schemaObj === 'object' &&\n !Array.isArray(schemaObj) &&\n path.split('.').pop() !== 'patternProperties'\n ) checkFieldNameRegex(fieldName);\n\n // 'item' only allowed it type=='array'\n if (Object.prototype.hasOwnProperty.call(schemaObj, 'item') && schemaObj.type !== 'array') {\n throw newRxError('SC2', {\n fieldName\n });\n }\n\n /**\n * required fields cannot be set via 'required: true',\n * but must be set via required: []\n */\n if (\n Object.prototype.hasOwnProperty.call(schemaObj, 'required') &&\n typeof schemaObj.required === 'boolean'\n ) {\n throw newRxError('SC24', {\n fieldName\n });\n }\n\n // $ref is not allowed\n if (Object.prototype.hasOwnProperty.call(schemaObj, '$ref')) {\n throw newRxError('SC40', {\n fieldName\n });\n }\n\n\n // if ref given, must be type=='string', type=='array' with string-items or type==['string','null']\n if (Object.prototype.hasOwnProperty.call(schemaObj, 'ref')) {\n if (Array.isArray(schemaObj.type)) {\n if (schemaObj.type.length > 2 || !schemaObj.type.includes('string') || !schemaObj.type.includes('null')) {\n throw newRxError('SC4', {\n fieldName\n });\n }\n } else {\n switch (schemaObj.type) {\n case 'string':\n break;\n case 'array':\n if (\n !schemaObj.items ||\n !(schemaObj.items as any).type ||\n (schemaObj.items as any).type !== 'string'\n ) {\n throw newRxError('SC3', {\n fieldName\n });\n }\n break;\n default:\n throw newRxError('SC4', {\n fieldName\n });\n }\n }\n }\n\n const isNested = path.split('.').length >= 2;\n\n // nested only\n if (isNested) {\n if ((schemaObj as any).default) {\n throw newRxError('SC7', {\n path\n });\n }\n }\n\n // first level\n if (!isNested) {\n\n // if _id is used, it must be primaryKey\n if (\n fieldName === '_id' &&\n primaryPath !== '_id'\n ) {\n throw newRxError('COL2', {\n fieldName\n });\n }\n\n // check underscore fields\n if (fieldName.charAt(0) === '_') {\n if (\n // exceptional allow underscore on these fields.\n fieldName === '_id' ||\n fieldName === '_deleted'\n ) {\n return;\n }\n throw newRxError('SC8', {\n fieldName\n });\n }\n }\n }\n\n function traverse(currentObj: any, currentPath: any) {\n if (!currentObj || typeof currentObj !== 'object') {\n return;\n }\n Object.keys(currentObj).forEach(attributeName => {\n const schemaObj = currentObj[attributeName];\n if (\n !currentObj.properties &&\n schemaObj &&\n typeof schemaObj === 'object' &&\n !Array.isArray(currentObj)\n ) {\n checkField(\n attributeName,\n schemaObj,\n currentPath\n );\n }\n let nextPath = currentPath;\n if (attributeName !== 'properties') nextPath = nextPath + '.' + attributeName;\n traverse(schemaObj, nextPath);\n });\n }\n traverse(rxJsonSchema, '');\n return true;\n}\n\nexport function checkPrimaryKey(\n jsonSchema: RxJsonSchema\n) {\n if (!jsonSchema.primaryKey) {\n throw newRxError('SC30', { schema: jsonSchema });\n }\n\n\n\n function validatePrimarySchemaPart(\n schemaPart: JsonSchema | TopLevelProperty\n ) {\n if (!schemaPart) {\n throw newRxError('SC33', { schema: jsonSchema });\n }\n\n const type: string = schemaPart.type as any;\n if (\n !type ||\n !['string', 'number', 'integer'].includes(type)\n ) {\n throw newRxError('SC32', { schema: jsonSchema, args: { schemaPart } });\n }\n }\n\n if (typeof jsonSchema.primaryKey === 'string') {\n const key = jsonSchema.primaryKey;\n const schemaPart = jsonSchema.properties[key];\n validatePrimarySchemaPart(schemaPart);\n } else {\n const compositePrimaryKey: CompositePrimaryKey = jsonSchema.primaryKey as any;\n\n const keySchemaPart = getSchemaByObjectPath(jsonSchema, compositePrimaryKey.key);\n validatePrimarySchemaPart(keySchemaPart);\n\n compositePrimaryKey.fields.forEach(field => {\n const schemaPart = getSchemaByObjectPath(jsonSchema, field);\n validatePrimarySchemaPart(schemaPart);\n });\n }\n\n\n /**\n * The primary key must have a maxLength set\n * which is required by some RxStorage implementations\n * to ensure we can craft custom index strings.\n */\n const primaryPath = getPrimaryFieldOfPrimaryKey(jsonSchema.primaryKey);\n const primaryPathSchemaPart = jsonSchema.properties[primaryPath];\n if (!primaryPathSchemaPart.maxLength) {\n throw newRxError('SC39', { schema: jsonSchema, args: { primaryPathSchemaPart } });\n } else if (!isFinite(primaryPathSchemaPart.maxLength)) {\n throw newRxError('SC41', { schema: jsonSchema, args: { primaryPathSchemaPart } });\n }\n}\n\n/**\n * computes real path of the object path in the collection schema\n */\nfunction getSchemaPropertyRealPath(shortPath: string) {\n const pathParts = shortPath.split('.');\n let realPath = '';\n for (let i = 0; i < pathParts.length; i += 1) {\n if (pathParts[i] !== '[]') {\n realPath = realPath.concat('.properties.'.concat(pathParts[i]));\n } else {\n realPath = realPath.concat('.items');\n }\n }\n return trimDots(realPath);\n}\n\n/**\n * does the checking\n * @throws {Error} if something is not ok\n */\nexport function checkSchema(jsonSchema: RxJsonSchema) {\n\n if (!jsonSchema.primaryKey) {\n throw newRxError('SC30', {\n schema: jsonSchema\n });\n }\n\n if (!Object.prototype.hasOwnProperty.call(jsonSchema, 'properties')) {\n throw newRxError('SC29', {\n schema: jsonSchema\n });\n }\n\n // _rev MUST NOT exist, it is added by RxDB\n if (jsonSchema.properties._rev) {\n throw newRxError('SC10', {\n schema: jsonSchema\n });\n }\n\n // check version\n if (!Object.prototype.hasOwnProperty.call(jsonSchema, 'version') ||\n typeof jsonSchema.version !== 'number' ||\n jsonSchema.version < 0\n ) {\n throw newRxError('SC11', {\n version: jsonSchema.version\n });\n }\n\n validateFieldsDeep(jsonSchema);\n checkPrimaryKey(jsonSchema);\n\n Object.keys(jsonSchema.properties).forEach(key => {\n const value: any = jsonSchema.properties[key];\n // check primary\n if (key === jsonSchema.primaryKey) {\n if (jsonSchema.indexes && jsonSchema.indexes.includes(key)) {\n throw newRxError('SC13', {\n value,\n schema: jsonSchema\n });\n }\n if (value.unique) {\n throw newRxError('SC14', {\n value,\n schema: jsonSchema\n });\n }\n if (jsonSchema.encrypted && jsonSchema.encrypted.includes(key)) {\n throw newRxError('SC15', {\n value,\n schema: jsonSchema\n });\n }\n if (value.type !== 'string') {\n throw newRxError('SC16', {\n value,\n schema: jsonSchema\n });\n }\n }\n\n // check if RxDocument-property\n if (rxDocumentProperties().includes(key)) {\n throw newRxError('SC17', {\n key,\n schema: jsonSchema\n });\n }\n });\n\n // check format of jsonSchema.indexes\n if (jsonSchema.indexes) {\n // should be an array\n if (!isMaybeReadonlyArray(jsonSchema.indexes)) {\n throw newRxError('SC18', {\n indexes: jsonSchema.indexes,\n schema: jsonSchema\n });\n }\n\n jsonSchema.indexes.forEach(index => {\n // should contain strings or array of strings\n if (!(typeof index === 'string' || Array.isArray(index))) {\n throw newRxError('SC19', { index, schema: jsonSchema });\n }\n // if is a compound index it must contain strings\n if (Array.isArray(index)) {\n for (let i = 0; i < index.length; i += 1) {\n if (typeof index[i] !== 'string') {\n throw newRxError('SC20', { index, schema: jsonSchema });\n }\n }\n }\n\n /**\n * To be able to craft custom indexable string with compound fields,\n * we need to know the maximum fieldlength of the fields values\n * when they are transformed to strings.\n * Therefore we need to enforce some properties inside of the schema.\n */\n const indexAsArray = isMaybeReadonlyArray(index) ? index : [index];\n indexAsArray.forEach(fieldName => {\n const schemaPart = getSchemaByObjectPath(\n jsonSchema,\n fieldName\n );\n\n\n const type: JsonSchemaTypes = schemaPart.type as any;\n switch (type) {\n case 'string':\n const maxLength = schemaPart.maxLength;\n if (!maxLength) {\n throw newRxError('SC34', {\n index,\n field: fieldName,\n schema: jsonSchema\n });\n }\n break;\n case 'number':\n case 'integer':\n const multipleOf = schemaPart.multipleOf;\n if (!multipleOf) {\n throw newRxError('SC35', {\n index,\n field: fieldName,\n schema: jsonSchema\n });\n }\n const maximum = schemaPart.maximum;\n const minimum = schemaPart.minimum;\n if (\n typeof maximum === 'undefined' ||\n typeof minimum === 'undefined'\n ) {\n throw newRxError('SC37', {\n index,\n field: fieldName,\n schema: jsonSchema\n });\n }\n\n if (\n !isFinite(maximum) ||\n !isFinite(minimum)\n ) {\n throw newRxError('SC41', {\n index,\n field: fieldName,\n schema: jsonSchema\n });\n }\n\n break;\n case 'boolean':\n /**\n * If a boolean field is used as an index,\n * it must be required.\n */\n let parentPath = '';\n let lastPathPart = fieldName;\n if (fieldName.includes('.')) {\n const partParts = fieldName.split('.');\n lastPathPart = partParts.pop();\n parentPath = partParts.join('.');\n }\n const parentSchemaPart = parentPath === '' ? jsonSchema : getSchemaByObjectPath(\n jsonSchema,\n parentPath\n );\n\n if (\n !parentSchemaPart.required ||\n !parentSchemaPart.required.includes(lastPathPart)\n ) {\n throw newRxError('SC38', {\n index,\n field: fieldName,\n schema: jsonSchema\n });\n }\n break;\n\n default:\n throw newRxError('SC36', {\n fieldName,\n type: schemaPart.type as any,\n schema: jsonSchema,\n });\n }\n });\n\n });\n }\n\n // remove backward-compatibility for index: true\n Object.keys(flattenObject(jsonSchema))\n .map(key => {\n // flattenObject returns only ending paths, we need all paths pointing to an object\n const split = key.split('.');\n split.pop(); // all but last\n return split.join('.');\n })\n .filter(key => key !== '')\n .filter((elem, pos, arr) => arr.indexOf(elem) === pos) // unique\n .filter(key => { // check if this path defines an index\n const value = getProperty(jsonSchema, key);\n return value && !!value.index;\n })\n .forEach(key => { // replace inner properties\n key = key.replace('properties.', ''); // first\n key = key.replace(/\\.properties\\./g, '.'); // middle\n throw newRxError('SC26', {\n index: trimDots(key),\n schema: jsonSchema\n });\n });\n\n /* check types of the indexes */\n (jsonSchema.indexes || [])\n .reduce((indexPaths: string[], currentIndex) => {\n if (isMaybeReadonlyArray(currentIndex)) {\n appendToArray(indexPaths, currentIndex);\n } else {\n indexPaths.push(currentIndex);\n }\n return indexPaths;\n }, [])\n .filter((elem, pos, arr) => arr.indexOf(elem) === pos) // from now on working only with unique indexes\n .map(indexPath => {\n const realPath = getSchemaPropertyRealPath(indexPath); // real path in the collection schema\n const schemaObj = getProperty(jsonSchema, realPath); // get the schema of the indexed property\n if (!schemaObj || typeof schemaObj !== 'object') {\n throw newRxError('SC21', {\n index: indexPath,\n schema: jsonSchema\n });\n }\n return { indexPath, schemaObj };\n })\n .filter(index =>\n index.schemaObj.type !== 'string' &&\n index.schemaObj.type !== 'integer' &&\n index.schemaObj.type !== 'number' &&\n index.schemaObj.type !== 'boolean'\n )\n .forEach(index => {\n throw newRxError('SC22', {\n key: index.indexPath,\n type: index.schemaObj.type,\n schema: jsonSchema\n });\n });\n\n\n /**\n * TODO\n * in 9.0.0 we changed the way encrypted fields are defined\n * This check ensures people do not oversee the breaking change\n * Remove this check in the future\n */\n Object.keys(flattenObject(jsonSchema))\n .map(key => {\n // flattenObject returns only ending paths, we need all paths pointing to an object\n const split = key.split('.');\n split.pop(); // all but last\n return split.join('.');\n })\n .filter(key => key !== '' && key !== 'attachments')\n .filter((elem, pos, arr) => arr.indexOf(elem) === pos) // unique\n .filter(key => {\n // check if this path defines an encrypted field\n const value = getProperty(jsonSchema, key);\n return value && !!value.encrypted;\n })\n .forEach(key => { // replace inner properties\n key = key.replace('properties.', ''); // first\n key = key.replace(/\\.properties\\./g, '.'); // middle\n throw newRxError('SC27', {\n index: trimDots(key),\n schema: jsonSchema\n });\n });\n\n /* ensure encrypted fields exist in the schema */\n if (jsonSchema.encrypted) {\n jsonSchema.encrypted\n .forEach(propPath => {\n // real path in the collection schema\n const realPath = getSchemaPropertyRealPath(propPath);\n // get the schema of the indexed property\n const schemaObj = getProperty(jsonSchema, realPath);\n if (!schemaObj || typeof schemaObj !== 'object') {\n throw newRxError('SC28', {\n field: propPath,\n schema: jsonSchema\n });\n }\n });\n }\n}\n"],"mappings":"AAAA;AACA;AACA;AACA;AACA,SACIA,UAAU,QACP,mBAAmB;AAC1B,SAASC,2BAA2B,EAAEC,qBAAqB,QAAQ,2BAA2B;AAQ9F,SACIC,aAAa,EACbC,aAAa,EAAEC,WAAW,EAAEC,oBAAoB,EAChDC,QAAQ,QACL,8BAA8B;AACrC,SAASC,oBAAoB,QAAQ,wBAAwB;;AAE7D;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,SAASC,mBAAmBA,CAACC,SAAiB,EAAE;EACnD,IAAIA,SAAS,KAAK,UAAU,EAAE;IAC1B;EACJ;EAEA,IAAI,CAAC,YAAY,CAAC,CAACC,QAAQ,CAACD,SAAS,CAAC,EAAE;IACpC,MAAMV,UAAU,CAAC,MAAM,EAAE;MACrBU;IACJ,CAAC,CAAC;EACN;EAEA,IAAME,QAAQ,GAAG,4CAA4C;EAC7D,IAAMC,KAAK,GAAG,IAAIC,MAAM,CAACF,QAAQ,CAAC;EAClC;EACI;AACR;AACA;AACA;AACA;EACQF,SAAS,KAAK,KAAK,IACnB,CAACA,SAAS,CAACK,KAAK,CAACF,KAAK,CAAC,EACzB;IACE,MAAMb,UAAU,CAAC,KAAK,EAAE;MACpBa,KAAK,EAAED,QAAQ;MACfF;IACJ,CAAC,CAAC;EACN;AACJ;;AAEA;AACA;AACA;AACA,OAAO,SAASM,kBAAkBA,CAACC,YAA+B,EAAQ;EAEtE,IAAMC,WAAW,GAAGjB,2BAA2B,CAACgB,YAAY,CAACE,UAAU,CAAC;EAExE,SAASC,UAAUA,CACfV,SAAiB,EACjBW,SAA0B,EAC1BC,IAAY,EACd;IACE,IACI,OAAOZ,SAAS,KAAK,QAAQ,IAC7B,OAAOW,SAAS,KAAK,QAAQ,IAC7B,CAACE,KAAK,CAACC,OAAO,CAACH,SAAS,CAAC,IACzBC,IAAI,CAACG,KAAK,CAAC,GAAG,CAAC,CAACC,GAAG,CAAC,CAAC,KAAK,mBAAmB,EAC/CjB,mBAAmB,CAACC,SAAS,CAAC;;IAEhC;IACA,IAAIiB,MAAM,CAACC,SAAS,CAACC,cAAc,CAACC,IAAI,CAACT,SAAS,EAAE,MAAM,CAAC,IAAIA,SAAS,CAACU,IAAI,KAAK,OAAO,EAAE;MACvF,MAAM/B,UAAU,CAAC,KAAK,EAAE;QACpBU;MACJ,CAAC,CAAC;IACN;;IAEA;AACR;AACA;AACA;IACQ,IACIiB,MAAM,CAACC,SAAS,CAACC,cAAc,CAACC,IAAI,CAACT,SAAS,EAAE,UAAU,CAAC,IAC3D,OAAOA,SAAS,CAACW,QAAQ,KAAK,SAAS,EACzC;MACE,MAAMhC,UAAU,CAAC,MAAM,EAAE;QACrBU;MACJ,CAAC,CAAC;IACN;;IAEA;IACA,IAAIiB,MAAM,CAACC,SAAS,CAACC,cAAc,CAACC,IAAI,CAACT,SAAS,EAAE,MAAM,CAAC,EAAE;MACzD,MAAMrB,UAAU,CAAC,MAAM,EAAE;QACrBU;MACJ,CAAC,CAAC;IACN;;IAGA;IACA,IAAIiB,MAAM,CAACC,SAAS,CAACC,cAAc,CAACC,IAAI,CAACT,SAAS,EAAE,KAAK,CAAC,EAAE;MACxD,IAAIE,KAAK,CAACC,OAAO,CAACH,SAAS,CAACU,IAAI,CAAC,EAAE;QAC/B,IAAIV,SAAS,CAACU,IAAI,CAACE,MAAM,GAAG,CAAC,IAAI,CAACZ,SAAS,CAACU,IAAI,CAACpB,QAAQ,CAAC,QAAQ,CAAC,IAAI,CAACU,SAAS,CAACU,IAAI,CAACpB,QAAQ,CAAC,MAAM,CAAC,EAAE;UACrG,MAAMX,UAAU,CAAC,KAAK,EAAE;YACpBU;UACJ,CAAC,CAAC;QACN;MACJ,CAAC,MAAM;QACH,QAAQW,SAAS,CAACU,IAAI;UAClB,KAAK,QAAQ;YACT;UACJ,KAAK,OAAO;YACR,IACI,CAACV,SAAS,CAACa,KAAK,IAChB,CAAEb,SAAS,CAACa,KAAK,CAASH,IAAI,IAC7BV,SAAS,CAACa,KAAK,CAASH,IAAI,KAAK,QAAQ,EAC5C;cACE,MAAM/B,UAAU,CAAC,KAAK,EAAE;gBACpBU;cACJ,CAAC,CAAC;YACN;YACA;UACJ;YACI,MAAMV,UAAU,CAAC,KAAK,EAAE;cACpBU;YACJ,CAAC,CAAC;QACV;MACJ;IACJ;IAEA,IAAMyB,QAAQ,GAAGb,IAAI,CAACG,KAAK,CAAC,GAAG,CAAC,CAACQ,MAAM,IAAI,CAAC;;IAE5C;IACA,IAAIE,QAAQ,EAAE;MACV,IAAKd,SAAS,CAASe,OAAO,EAAE;QAC5B,MAAMpC,UAAU,CAAC,KAAK,EAAE;UACpBsB;QACJ,CAAC,CAAC;MACN;IACJ;;IAEA;IACA,IAAI,CAACa,QAAQ,EAAE;MAEX;MACA,IACIzB,SAAS,KAAK,KAAK,IACnBQ,WAAW,KAAK,KAAK,EACvB;QACE,MAAMlB,UAAU,CAAC,MAAM,EAAE;UACrBU;QACJ,CAAC,CAAC;MACN;;MAEA;MACA,IAAIA,SAAS,CAAC2B,MAAM,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE;QAC7B;QACI;QACA3B,SAAS,KAAK,KAAK,IACnBA,SAAS,KAAK,UAAU,EAC1B;UACE;QACJ;QACA,MAAMV,UAAU,CAAC,KAAK,EAAE;UACpBU;QACJ,CAAC,CAAC;MACN;IACJ;EACJ;EAEA,SAAS4B,QAAQA,CAACC,UAAe,EAAEC,WAAgB,EAAE;IACjD,IAAI,CAACD,UAAU,IAAI,OAAOA,UAAU,KAAK,QAAQ,EAAE;MAC/C;IACJ;IACAZ,MAAM,CAACc,IAAI,CAACF,UAAU,CAAC,CAACG,OAAO,CAACC,aAAa,IAAI;MAC7C,IAAMtB,SAAS,GAAGkB,UAAU,CAACI,aAAa,CAAC;MAC3C,IACI,CAACJ,UAAU,CAACK,UAAU,IACtBvB,SAAS,IACT,OAAOA,SAAS,KAAK,QAAQ,IAC7B,CAACE,KAAK,CAACC,OAAO,CAACe,UAAU,CAAC,EAC5B;QACEnB,UAAU,CACNuB,aAAa,EACbtB,SAAS,EACTmB,WACJ,CAAC;MACL;MACA,IAAIK,QAAQ,GAAGL,WAAW;MAC1B,IAAIG,aAAa,KAAK,YAAY,EAAEE,QAAQ,GAAGA,QAAQ,GAAG,GAAG,GAAGF,aAAa;MAC7EL,QAAQ,CAACjB,SAAS,EAAEwB,QAAQ,CAAC;IACjC,CAAC,CAAC;EACN;EACAP,QAAQ,CAACrB,YAAY,EAAE,EAAE,CAAC;EAC1B,OAAO,IAAI;AACf;AAEA,OAAO,SAAS6B,eAAeA,CAC3BC,UAA6B,EAC/B;EACE,IAAI,CAACA,UAAU,CAAC5B,UAAU,EAAE;IACxB,MAAMnB,UAAU,CAAC,MAAM,EAAE;MAAEgD,MAAM,EAAED;IAAW,CAAC,CAAC;EACpD;EAIA,SAASE,yBAAyBA,CAC9BC,UAAyC,EAC3C;IACE,IAAI,CAACA,UAAU,EAAE;MACb,MAAMlD,UAAU,CAAC,MAAM,EAAE;QAAEgD,MAAM,EAAED;MAAW,CAAC,CAAC;IACpD;IAEA,IAAMhB,IAAY,GAAGmB,UAAU,CAACnB,IAAW;IAC3C,IACI,CAACA,IAAI,IACL,CAAC,CAAC,QAAQ,EAAE,QAAQ,EAAE,SAAS,CAAC,CAACpB,QAAQ,CAACoB,IAAI,CAAC,EACjD;MACE,MAAM/B,UAAU,CAAC,MAAM,EAAE;QAAEgD,MAAM,EAAED,UAAU;QAAEI,IAAI,EAAE;UAAED;QAAW;MAAE,CAAC,CAAC;IAC1E;EACJ;EAEA,IAAI,OAAOH,UAAU,CAAC5B,UAAU,KAAK,QAAQ,EAAE;IAC3C,IAAMiC,GAAG,GAAGL,UAAU,CAAC5B,UAAU;IACjC,IAAM+B,UAAU,GAAGH,UAAU,CAACH,UAAU,CAACQ,GAAG,CAAC;IAC7CH,yBAAyB,CAACC,UAAU,CAAC;EACzC,CAAC,MAAM;IACH,IAAMG,mBAA6C,GAAGN,UAAU,CAAC5B,UAAiB;IAElF,IAAMmC,aAAa,GAAGpD,qBAAqB,CAAC6C,UAAU,EAAEM,mBAAmB,CAACD,GAAG,CAAC;IAChFH,yBAAyB,CAACK,aAAa,CAAC;IAExCD,mBAAmB,CAACE,MAAM,CAACb,OAAO,CAACc,KAAK,IAAI;MACxC,IAAMN,UAAU,GAAGhD,qBAAqB,CAAC6C,UAAU,EAAES,KAAK,CAAC;MAC3DP,yBAAyB,CAACC,UAAU,CAAC;IACzC,CAAC,CAAC;EACN;;EAGA;AACJ;AACA;AACA;AACA;EACI,IAAMhC,WAAW,GAAGjB,2BAA2B,CAAC8C,UAAU,CAAC5B,UAAU,CAAC;EACtE,IAAMsC,qBAAqB,GAAGV,UAAU,CAACH,UAAU,CAAC1B,WAAW,CAAC;EAChE,IAAI,CAACuC,qBAAqB,CAACC,SAAS,EAAE;IAClC,MAAM1D,UAAU,CAAC,MAAM,EAAE;MAAEgD,MAAM,EAAED,UAAU;MAAEI,IAAI,EAAE;QAAEM;MAAsB;IAAE,CAAC,CAAC;EACrF,CAAC,MAAM,IAAI,CAACE,QAAQ,CAACF,qBAAqB,CAACC,SAAS,CAAC,EAAE;IACnD,MAAM1D,UAAU,CAAC,MAAM,EAAE;MAAEgD,MAAM,EAAED,UAAU;MAAEI,IAAI,EAAE;QAAEM;MAAsB;IAAE,CAAC,CAAC;EACrF;AACJ;;AAEA;AACA;AACA;AACA,SAASG,yBAAyBA,CAACC,SAAiB,EAAE;EAClD,IAAMC,SAAS,GAAGD,SAAS,CAACpC,KAAK,CAAC,GAAG,CAAC;EACtC,IAAIsC,QAAQ,GAAG,EAAE;EACjB,KAAK,IAAIC,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGF,SAAS,CAAC7B,MAAM,EAAE+B,CAAC,IAAI,CAAC,EAAE;IAC1C,IAAIF,SAAS,CAACE,CAAC,CAAC,KAAK,IAAI,EAAE;MACvBD,QAAQ,GAAGA,QAAQ,CAACE,MAAM,CAAC,cAAc,CAACA,MAAM,CAACH,SAAS,CAACE,CAAC,CAAC,CAAC,CAAC;IACnE,CAAC,MAAM;MACHD,QAAQ,GAAGA,QAAQ,CAACE,MAAM,CAAC,QAAQ,CAAC;IACxC;EACJ;EACA,OAAO1D,QAAQ,CAACwD,QAAQ,CAAC;AAC7B;;AAEA;AACA;AACA;AACA;AACA,OAAO,SAASG,WAAWA,CAACnB,UAA6B,EAAE;EAEvD,IAAI,CAACA,UAAU,CAAC5B,UAAU,EAAE;IACxB,MAAMnB,UAAU,CAAC,MAAM,EAAE;MACrBgD,MAAM,EAAED;IACZ,CAAC,CAAC;EACN;EAEA,IAAI,CAACpB,MAAM,CAACC,SAAS,CAACC,cAAc,CAACC,IAAI,CAACiB,UAAU,EAAE,YAAY,CAAC,EAAE;IACjE,MAAM/C,UAAU,CAAC,MAAM,EAAE;MACrBgD,MAAM,EAAED;IACZ,CAAC,CAAC;EACN;;EAEA;EACA,IAAIA,UAAU,CAACH,UAAU,CAACuB,IAAI,EAAE;IAC5B,MAAMnE,UAAU,CAAC,MAAM,EAAE;MACrBgD,MAAM,EAAED;IACZ,CAAC,CAAC;EACN;;EAEA;EACA,IAAI,CAACpB,MAAM,CAACC,SAAS,CAACC,cAAc,CAACC,IAAI,CAACiB,UAAU,EAAE,SAAS,CAAC,IAC5D,OAAOA,UAAU,CAACqB,OAAO,KAAK,QAAQ,IACtCrB,UAAU,CAACqB,OAAO,GAAG,CAAC,EACxB;IACE,MAAMpE,UAAU,CAAC,MAAM,EAAE;MACrBoE,OAAO,EAAErB,UAAU,CAACqB;IACxB,CAAC,CAAC;EACN;EAEApD,kBAAkB,CAAC+B,UAAU,CAAC;EAC9BD,eAAe,CAACC,UAAU,CAAC;EAE3BpB,MAAM,CAACc,IAAI,CAACM,UAAU,CAACH,UAAU,CAAC,CAACF,OAAO,CAACU,GAAG,IAAI;IAC9C,IAAMiB,KAAU,GAAGtB,UAAU,CAACH,UAAU,CAACQ,GAAG,CAAC;IAC7C;IACA,IAAIA,GAAG,KAAKL,UAAU,CAAC5B,UAAU,EAAE;MAC/B,IAAI4B,UAAU,CAACuB,OAAO,IAAIvB,UAAU,CAACuB,OAAO,CAAC3D,QAAQ,CAACyC,GAAG,CAAC,EAAE;QACxD,MAAMpD,UAAU,CAAC,MAAM,EAAE;UACrBqE,KAAK;UACLrB,MAAM,EAAED;QACZ,CAAC,CAAC;MACN;MACA,IAAIsB,KAAK,CAACE,MAAM,EAAE;QACd,MAAMvE,UAAU,CAAC,MAAM,EAAE;UACrBqE,KAAK;UACLrB,MAAM,EAAED;QACZ,CAAC,CAAC;MACN;MACA,IAAIA,UAAU,CAACyB,SAAS,IAAIzB,UAAU,CAACyB,SAAS,CAAC7D,QAAQ,CAACyC,GAAG,CAAC,EAAE;QAC5D,MAAMpD,UAAU,CAAC,MAAM,EAAE;UACrBqE,KAAK;UACLrB,MAAM,EAAED;QACZ,CAAC,CAAC;MACN;MACA,IAAIsB,KAAK,CAACtC,IAAI,KAAK,QAAQ,EAAE;QACzB,MAAM/B,UAAU,CAAC,MAAM,EAAE;UACrBqE,KAAK;UACLrB,MAAM,EAAED;QACZ,CAAC,CAAC;MACN;IACJ;;IAEA;IACA,IAAIvC,oBAAoB,CAAC,CAAC,CAACG,QAAQ,CAACyC,GAAG,CAAC,EAAE;MACtC,MAAMpD,UAAU,CAAC,MAAM,EAAE;QACrBoD,GAAG;QACHJ,MAAM,EAAED;MACZ,CAAC,CAAC;IACN;EACJ,CAAC,CAAC;;EAEF;EACA,IAAIA,UAAU,CAACuB,OAAO,EAAE;IACpB;IACA,IAAI,CAAChE,oBAAoB,CAACyC,UAAU,CAACuB,OAAO,CAAC,EAAE;MAC3C,MAAMtE,UAAU,CAAC,MAAM,EAAE;QACrBsE,OAAO,EAAEvB,UAAU,CAACuB,OAAO;QAC3BtB,MAAM,EAAED;MACZ,CAAC,CAAC;IACN;IAEAA,UAAU,CAACuB,OAAO,CAAC5B,OAAO,CAAC+B,KAAK,IAAI;MAChC;MACA,IAAI,EAAE,OAAOA,KAAK,KAAK,QAAQ,IAAIlD,KAAK,CAACC,OAAO,CAACiD,KAAK,CAAC,CAAC,EAAE;QACtD,MAAMzE,UAAU,CAAC,MAAM,EAAE;UAAEyE,KAAK;UAAEzB,MAAM,EAAED;QAAW,CAAC,CAAC;MAC3D;MACA;MACA,IAAIxB,KAAK,CAACC,OAAO,CAACiD,KAAK,CAAC,EAAE;QACtB,KAAK,IAAIT,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGS,KAAK,CAACxC,MAAM,EAAE+B,CAAC,IAAI,CAAC,EAAE;UACtC,IAAI,OAAOS,KAAK,CAACT,CAAC,CAAC,KAAK,QAAQ,EAAE;YAC9B,MAAMhE,UAAU,CAAC,MAAM,EAAE;cAAEyE,KAAK;cAAEzB,MAAM,EAAED;YAAW,CAAC,CAAC;UAC3D;QACJ;MACJ;;MAEA;AACZ;AACA;AACA;AACA;AACA;MACY,IAAM2B,YAAY,GAAGpE,oBAAoB,CAACmE,KAAK,CAAC,GAAGA,KAAK,GAAG,CAACA,KAAK,CAAC;MAClEC,YAAY,CAAChC,OAAO,CAAChC,SAAS,IAAI;QAC9B,IAAMwC,UAAU,GAAGhD,qBAAqB,CACpC6C,UAAU,EACVrC,SACJ,CAAC;QAGD,IAAMqB,IAAqB,GAAGmB,UAAU,CAACnB,IAAW;QACpD,QAAQA,IAAI;UACR,KAAK,QAAQ;YACT,IAAM2B,SAAS,GAAGR,UAAU,CAACQ,SAAS;YACtC,IAAI,CAACA,SAAS,EAAE;cACZ,MAAM1D,UAAU,CAAC,MAAM,EAAE;gBACrByE,KAAK;gBACLjB,KAAK,EAAE9C,SAAS;gBAChBsC,MAAM,EAAED;cACZ,CAAC,CAAC;YACN;YACA;UACJ,KAAK,QAAQ;UACb,KAAK,SAAS;YACV,IAAM4B,UAAU,GAAGzB,UAAU,CAACyB,UAAU;YACxC,IAAI,CAACA,UAAU,EAAE;cACb,MAAM3E,UAAU,CAAC,MAAM,EAAE;gBACrByE,KAAK;gBACLjB,KAAK,EAAE9C,SAAS;gBAChBsC,MAAM,EAAED;cACZ,CAAC,CAAC;YACN;YACA,IAAM6B,OAAO,GAAG1B,UAAU,CAAC0B,OAAO;YAClC,IAAMC,OAAO,GAAG3B,UAAU,CAAC2B,OAAO;YAClC,IACI,OAAOD,OAAO,KAAK,WAAW,IAC9B,OAAOC,OAAO,KAAK,WAAW,EAChC;cACE,MAAM7E,UAAU,CAAC,MAAM,EAAE;gBACrByE,KAAK;gBACLjB,KAAK,EAAE9C,SAAS;gBAChBsC,MAAM,EAAED;cACZ,CAAC,CAAC;YACN;YAEA,IACI,CAACY,QAAQ,CAACiB,OAAO,CAAC,IAClB,CAACjB,QAAQ,CAACkB,OAAO,CAAC,EACpB;cACE,MAAM7E,UAAU,CAAC,MAAM,EAAE;gBACrByE,KAAK;gBACLjB,KAAK,EAAE9C,SAAS;gBAChBsC,MAAM,EAAED;cACZ,CAAC,CAAC;YACN;YAEA;UACJ,KAAK,SAAS;YACV;AACxB;AACA;AACA;YACwB,IAAI+B,UAAU,GAAG,EAAE;YACnB,IAAIC,YAAY,GAAGrE,SAAS;YAC5B,IAAIA,SAAS,CAACC,QAAQ,CAAC,GAAG,CAAC,EAAE;cACzB,IAAMqE,SAAS,GAAGtE,SAAS,CAACe,KAAK,CAAC,GAAG,CAAC;cACtCsD,YAAY,GAAGC,SAAS,CAACtD,GAAG,CAAC,CAAC;cAC9BoD,UAAU,GAAGE,SAAS,CAACC,IAAI,CAAC,GAAG,CAAC;YACpC;YACA,IAAMC,gBAAgB,GAAGJ,UAAU,KAAK,EAAE,GAAG/B,UAAU,GAAG7C,qBAAqB,CAC3E6C,UAAU,EACV+B,UACJ,CAAC;YAED,IACI,CAACI,gBAAgB,CAAClD,QAAQ,IAC1B,CAACkD,gBAAgB,CAAClD,QAAQ,CAACrB,QAAQ,CAACoE,YAAY,CAAC,EACnD;cACE,MAAM/E,UAAU,CAAC,MAAM,EAAE;gBACrByE,KAAK;gBACLjB,KAAK,EAAE9C,SAAS;gBAChBsC,MAAM,EAAED;cACZ,CAAC,CAAC;YACN;YACA;UAEJ;YACI,MAAM/C,UAAU,CAAC,MAAM,EAAE;cACrBU,SAAS;cACTqB,IAAI,EAAEmB,UAAU,CAACnB,IAAW;cAC5BiB,MAAM,EAAED;YACZ,CAAC,CAAC;QACV;MACJ,CAAC,CAAC;IAEN,CAAC,CAAC;EACN;;EAEA;EACApB,MAAM,CAACc,IAAI,CAACrC,aAAa,CAAC2C,UAAU,CAAC,CAAC,CACjCoC,GAAG,CAAC/B,GAAG,IAAI;IACR;IACA,IAAM3B,KAAK,GAAG2B,GAAG,CAAC3B,KAAK,CAAC,GAAG,CAAC;IAC5BA,KAAK,CAACC,GAAG,CAAC,CAAC,CAAC,CAAC;IACb,OAAOD,KAAK,CAACwD,IAAI,CAAC,GAAG,CAAC;EAC1B,CAAC,CAAC,CACDG,MAAM,CAAChC,GAAG,IAAIA,GAAG,KAAK,EAAE,CAAC,CACzBgC,MAAM,CAAC,CAACC,IAAI,EAAEC,GAAG,EAAEC,GAAG,KAAKA,GAAG,CAACC,OAAO,CAACH,IAAI,CAAC,KAAKC,GAAG,CAAC,CAAC;EAAA,CACtDF,MAAM,CAAChC,GAAG,IAAI;IAAE;IACb,IAAMiB,KAAK,GAAGhE,WAAW,CAAC0C,UAAU,EAAEK,GAAG,CAAC;IAC1C,OAAOiB,KAAK,IAAI,CAAC,CAACA,KAAK,CAACI,KAAK;EACjC,CAAC,CAAC,CACD/B,OAAO,CAACU,GAAG,IAAI;IAAE;IACdA,GAAG,GAAGA,GAAG,CAACqC,OAAO,CAAC,aAAa,EAAE,EAAE,CAAC,CAAC,CAAC;IACtCrC,GAAG,GAAGA,GAAG,CAACqC,OAAO,CAAC,iBAAiB,EAAE,GAAG,CAAC,CAAC,CAAC;IAC3C,MAAMzF,UAAU,CAAC,MAAM,EAAE;MACrByE,KAAK,EAAElE,QAAQ,CAAC6C,GAAG,CAAC;MACpBJ,MAAM,EAAED;IACZ,CAAC,CAAC;EACN,CAAC,CAAC;;EAEN;EACA,CAACA,UAAU,CAACuB,OAAO,IAAI,EAAE,EACpBoB,MAAM,CAAC,CAACC,UAAoB,EAAEC,YAAY,KAAK;IAC5C,IAAItF,oBAAoB,CAACsF,YAAY,CAAC,EAAE;MACpCzF,aAAa,CAACwF,UAAU,EAAEC,YAAY,CAAC;IAC3C,CAAC,MAAM;MACHD,UAAU,CAACE,IAAI,CAACD,YAAY,CAAC;IACjC;IACA,OAAOD,UAAU;EACrB,CAAC,EAAE,EAAE,CAAC,CACLP,MAAM,CAAC,CAACC,IAAI,EAAEC,GAAG,EAAEC,GAAG,KAAKA,GAAG,CAACC,OAAO,CAACH,IAAI,CAAC,KAAKC,GAAG,CAAC,CAAC;EAAA,CACtDH,GAAG,CAACW,SAAS,IAAI;IACd,IAAM/B,QAAQ,GAAGH,yBAAyB,CAACkC,SAAS,CAAC,CAAC,CAAC;IACvD,IAAMzE,SAAS,GAAGhB,WAAW,CAAC0C,UAAU,EAAEgB,QAAQ,CAAC,CAAC,CAAC;IACrD,IAAI,CAAC1C,SAAS,IAAI,OAAOA,SAAS,KAAK,QAAQ,EAAE;MAC7C,MAAMrB,UAAU,CAAC,MAAM,EAAE;QACrByE,KAAK,EAAEqB,SAAS;QAChB9C,MAAM,EAAED;MACZ,CAAC,CAAC;IACN;IACA,OAAO;MAAE+C,SAAS;MAAEzE;IAAU,CAAC;EACnC,CAAC,CAAC,CACD+D,MAAM,CAACX,KAAK,IACTA,KAAK,CAACpD,SAAS,CAACU,IAAI,KAAK,QAAQ,IACjC0C,KAAK,CAACpD,SAAS,CAACU,IAAI,KAAK,SAAS,IAClC0C,KAAK,CAACpD,SAAS,CAACU,IAAI,KAAK,QAAQ,IACjC0C,KAAK,CAACpD,SAAS,CAACU,IAAI,KAAK,SAC7B,CAAC,CACAW,OAAO,CAAC+B,KAAK,IAAI;IACd,MAAMzE,UAAU,CAAC,MAAM,EAAE;MACrBoD,GAAG,EAAEqB,KAAK,CAACqB,SAAS;MACpB/D,IAAI,EAAE0C,KAAK,CAACpD,SAAS,CAACU,IAAI;MAC1BiB,MAAM,EAAED;IACZ,CAAC,CAAC;EACN,CAAC,CAAC;;EAGN;AACJ;AACA;AACA;AACA;AACA;EACIpB,MAAM,CAACc,IAAI,CAACrC,aAAa,CAAC2C,UAAU,CAAC,CAAC,CACjCoC,GAAG,CAAC/B,GAAG,IAAI;IACR;IACA,IAAM3B,KAAK,GAAG2B,GAAG,CAAC3B,KAAK,CAAC,GAAG,CAAC;IAC5BA,KAAK,CAACC,GAAG,CAAC,CAAC,CAAC,CAAC;IACb,OAAOD,KAAK,CAACwD,IAAI,CAAC,GAAG,CAAC;EAC1B,CAAC,CAAC,CACDG,MAAM,CAAChC,GAAG,IAAIA,GAAG,KAAK,EAAE,IAAIA,GAAG,KAAK,aAAa,CAAC,CAClDgC,MAAM,CAAC,CAACC,IAAI,EAAEC,GAAG,EAAEC,GAAG,KAAKA,GAAG,CAACC,OAAO,CAACH,IAAI,CAAC,KAAKC,GAAG,CAAC,CAAC;EAAA,CACtDF,MAAM,CAAChC,GAAG,IAAI;IACX;IACA,IAAMiB,KAAK,GAAGhE,WAAW,CAAC0C,UAAU,EAAEK,GAAG,CAAC;IAC1C,OAAOiB,KAAK,IAAI,CAAC,CAACA,KAAK,CAACG,SAAS;EACrC,CAAC,CAAC,CACD9B,OAAO,CAACU,GAAG,IAAI;IAAE;IACdA,GAAG,GAAGA,GAAG,CAACqC,OAAO,CAAC,aAAa,EAAE,EAAE,CAAC,CAAC,CAAC;IACtCrC,GAAG,GAAGA,GAAG,CAACqC,OAAO,CAAC,iBAAiB,EAAE,GAAG,CAAC,CAAC,CAAC;IAC3C,MAAMzF,UAAU,CAAC,MAAM,EAAE;MACrByE,KAAK,EAAElE,QAAQ,CAAC6C,GAAG,CAAC;MACpBJ,MAAM,EAAED;IACZ,CAAC,CAAC;EACN,CAAC,CAAC;;EAEN;EACA,IAAIA,UAAU,CAACyB,SAAS,EAAE;IACtBzB,UAAU,CAACyB,SAAS,CACf9B,OAAO,CAACqD,QAAQ,IAAI;MACjB;MACA,IAAMhC,QAAQ,GAAGH,yBAAyB,CAACmC,QAAQ,CAAC;MACpD;MACA,IAAM1E,SAAS,GAAGhB,WAAW,CAAC0C,UAAU,EAAEgB,QAAQ,CAAC;MACnD,IAAI,CAAC1C,SAAS,IAAI,OAAOA,SAAS,KAAK,QAAQ,EAAE;QAC7C,MAAMrB,UAAU,CAAC,MAAM,EAAE;UACrBwD,KAAK,EAAEuC,QAAQ;UACf/C,MAAM,EAAED;QACZ,CAAC,CAAC;MACN;IACJ,CAAC,CAAC;EACV;AACJ","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/dev-mode/entity-properties.js b/dist/esm/plugins/dev-mode/entity-properties.js deleted file mode 100644 index ab51ba940c8..00000000000 --- a/dist/esm/plugins/dev-mode/entity-properties.js +++ /dev/null @@ -1,49 +0,0 @@ -import { RxCollectionBase } from "../../rx-collection.js"; -import { RxDatabaseBase } from "../../rx-database.js"; -import { createRxDocumentConstructor, basePrototype } from "../../rx-document.js"; - -/** - * returns all possible properties of a RxCollection-instance - */ -var _rxCollectionProperties; -export function rxCollectionProperties() { - if (!_rxCollectionProperties) { - var pseudoInstance = new RxCollectionBase(); - var ownProperties = Object.getOwnPropertyNames(pseudoInstance); - var prototypeProperties = Object.getOwnPropertyNames(Object.getPrototypeOf(pseudoInstance)); - _rxCollectionProperties = [...ownProperties, ...prototypeProperties]; - } - return _rxCollectionProperties; -} - -/** - * returns all possible properties of a RxDatabase-instance - */ -var _rxDatabaseProperties; -export function rxDatabaseProperties() { - if (!_rxDatabaseProperties) { - var pseudoInstance = new RxDatabaseBase('pseudoInstance', 'memory'); - var ownProperties = Object.getOwnPropertyNames(pseudoInstance); - var prototypeProperties = Object.getOwnPropertyNames(Object.getPrototypeOf(pseudoInstance)); - _rxDatabaseProperties = [...ownProperties, ...prototypeProperties]; - pseudoInstance.destroy(); - } - return _rxDatabaseProperties; -} - -/** - * returns all possible properties of a RxDocument - */ -var pseudoConstructor = createRxDocumentConstructor(basePrototype); -var pseudoRxDocument = new pseudoConstructor(); -var _rxDocumentProperties; -export function rxDocumentProperties() { - if (!_rxDocumentProperties) { - var reserved = ['deleted', 'synced']; - var ownProperties = Object.getOwnPropertyNames(pseudoRxDocument); - var prototypeProperties = Object.getOwnPropertyNames(basePrototype); - _rxDocumentProperties = [...ownProperties, ...prototypeProperties, ...reserved]; - } - return _rxDocumentProperties; -} -//# sourceMappingURL=entity-properties.js.map \ No newline at end of file diff --git a/dist/esm/plugins/dev-mode/entity-properties.js.map b/dist/esm/plugins/dev-mode/entity-properties.js.map deleted file mode 100644 index a9de55da5ad..00000000000 --- a/dist/esm/plugins/dev-mode/entity-properties.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"entity-properties.js","names":["RxCollectionBase","RxDatabaseBase","createRxDocumentConstructor","basePrototype","_rxCollectionProperties","rxCollectionProperties","pseudoInstance","ownProperties","Object","getOwnPropertyNames","prototypeProperties","getPrototypeOf","_rxDatabaseProperties","rxDatabaseProperties","destroy","pseudoConstructor","pseudoRxDocument","_rxDocumentProperties","rxDocumentProperties","reserved"],"sources":["../../../../src/plugins/dev-mode/entity-properties.ts"],"sourcesContent":["import { RxCollectionBase } from '../../rx-collection.ts';\nimport { RxDatabaseBase } from '../../rx-database.ts';\nimport { createRxDocumentConstructor, basePrototype } from '../../rx-document.ts';\n\n/**\n * returns all possible properties of a RxCollection-instance\n */\nlet _rxCollectionProperties: string[];\nexport function rxCollectionProperties(): string[] {\n if (!_rxCollectionProperties) {\n const pseudoInstance = new (RxCollectionBase as any)();\n const ownProperties = Object.getOwnPropertyNames(pseudoInstance);\n const prototypeProperties = Object.getOwnPropertyNames(\n Object.getPrototypeOf(pseudoInstance)\n );\n _rxCollectionProperties = [...ownProperties, ...prototypeProperties];\n }\n return _rxCollectionProperties;\n}\n\n\n/**\n * returns all possible properties of a RxDatabase-instance\n */\nlet _rxDatabaseProperties: string[];\nexport function rxDatabaseProperties(): string[] {\n if (!_rxDatabaseProperties) {\n const pseudoInstance: RxDatabaseBase = new (RxDatabaseBase as any)(\n 'pseudoInstance',\n 'memory'\n );\n const ownProperties = Object.getOwnPropertyNames(pseudoInstance);\n const prototypeProperties = Object.getOwnPropertyNames(\n Object.getPrototypeOf(pseudoInstance)\n );\n _rxDatabaseProperties = [...ownProperties, ...prototypeProperties];\n pseudoInstance.destroy();\n }\n return _rxDatabaseProperties;\n}\n\n/**\n * returns all possible properties of a RxDocument\n */\nconst pseudoConstructor = createRxDocumentConstructor(basePrototype);\nconst pseudoRxDocument = new (pseudoConstructor as any)();\nlet _rxDocumentProperties: string[];\nexport function rxDocumentProperties(): string[] {\n if (!_rxDocumentProperties) {\n const reserved = ['deleted', 'synced'];\n const ownProperties = Object.getOwnPropertyNames(pseudoRxDocument);\n const prototypeProperties = Object.getOwnPropertyNames(basePrototype);\n _rxDocumentProperties = [...ownProperties, ...prototypeProperties, ...reserved];\n }\n return _rxDocumentProperties;\n}\n"],"mappings":"AAAA,SAASA,gBAAgB,QAAQ,wBAAwB;AACzD,SAASC,cAAc,QAAQ,sBAAsB;AACrD,SAASC,2BAA2B,EAAEC,aAAa,QAAQ,sBAAsB;;AAEjF;AACA;AACA;AACA,IAAIC,uBAAiC;AACrC,OAAO,SAASC,sBAAsBA,CAAA,EAAa;EAC/C,IAAI,CAACD,uBAAuB,EAAE;IAC1B,IAAME,cAAc,GAAG,IAAKN,gBAAgB,CAAS,CAAC;IACtD,IAAMO,aAAa,GAAGC,MAAM,CAACC,mBAAmB,CAACH,cAAc,CAAC;IAChE,IAAMI,mBAAmB,GAAGF,MAAM,CAACC,mBAAmB,CAClDD,MAAM,CAACG,cAAc,CAACL,cAAc,CACxC,CAAC;IACDF,uBAAuB,GAAG,CAAC,GAAGG,aAAa,EAAE,GAAGG,mBAAmB,CAAC;EACxE;EACA,OAAON,uBAAuB;AAClC;;AAGA;AACA;AACA;AACA,IAAIQ,qBAA+B;AACnC,OAAO,SAASC,oBAAoBA,CAAA,EAAa;EAC7C,IAAI,CAACD,qBAAqB,EAAE;IACxB,IAAMN,cAAwC,GAAG,IAAKL,cAAc,CAChE,gBAAgB,EAChB,QACJ,CAAC;IACD,IAAMM,aAAa,GAAGC,MAAM,CAACC,mBAAmB,CAACH,cAAc,CAAC;IAChE,IAAMI,mBAAmB,GAAGF,MAAM,CAACC,mBAAmB,CAClDD,MAAM,CAACG,cAAc,CAACL,cAAc,CACxC,CAAC;IACDM,qBAAqB,GAAG,CAAC,GAAGL,aAAa,EAAE,GAAGG,mBAAmB,CAAC;IAClEJ,cAAc,CAACQ,OAAO,CAAC,CAAC;EAC5B;EACA,OAAOF,qBAAqB;AAChC;;AAEA;AACA;AACA;AACA,IAAMG,iBAAiB,GAAGb,2BAA2B,CAACC,aAAa,CAAC;AACpE,IAAMa,gBAAgB,GAAG,IAAKD,iBAAiB,CAAS,CAAC;AACzD,IAAIE,qBAA+B;AACnC,OAAO,SAASC,oBAAoBA,CAAA,EAAa;EAC7C,IAAI,CAACD,qBAAqB,EAAE;IACxB,IAAME,QAAQ,GAAG,CAAC,SAAS,EAAE,QAAQ,CAAC;IACtC,IAAMZ,aAAa,GAAGC,MAAM,CAACC,mBAAmB,CAACO,gBAAgB,CAAC;IAClE,IAAMN,mBAAmB,GAAGF,MAAM,CAACC,mBAAmB,CAACN,aAAa,CAAC;IACrEc,qBAAqB,GAAG,CAAC,GAAGV,aAAa,EAAE,GAAGG,mBAAmB,EAAE,GAAGS,QAAQ,CAAC;EACnF;EACA,OAAOF,qBAAqB;AAChC","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/dev-mode/error-messages.js b/dist/esm/plugins/dev-mode/error-messages.js deleted file mode 100644 index dd7c34c8005..00000000000 --- a/dist/esm/plugins/dev-mode/error-messages.js +++ /dev/null @@ -1,231 +0,0 @@ -/** - * this plugin adds the error-messages - * without it, only error-codes will be shown - * This is mainly because error-string are hard to compress and we need a smaller build - */ - -export var ERROR_MESSAGES = { - // util.js / config - UT1: 'given name is no string or empty', - UT2: "collection- and database-names must match the regex to be compatible with couchdb databases.\n See https://neighbourhood.ie/blog/2020/10/13/everything-you-need-to-know-about-couchdb-database-names/\n info: if your database-name specifies a folder, the name must contain the slash-char '/' or '\\'", - UT3: 'replication-direction must either be push or pull or both. But not none', - UT4: 'given leveldown is no valid adapter', - UT5: 'keyCompression is set to true in the schema but no key-compression handler is used in the storage', - UT6: 'schema contains encrypted fields but no encryption handler is used in the storage', - UT7: 'attachments.compression is enabled but no attachment-compression plugin is used', - // plugins - PL1: 'Given plugin is not RxDB plugin.', - // removed in 14.0.0 - PouchDB RxStorage was removed - PL2: 'You tried importing a RxDB plugin to pouchdb. Use addRxPlugin() instead.', - PL3: 'A plugin with the same name was already added but it was not the exact same JavaScript object', - // pouch-db.js - // removed in 12.0.0 - P1: 'PouchDB.getBatch: limit must be > 2', - P2: 'bulkWrite() cannot be called with an empty array', - // removed in 12.0.0 - P3: 'bulkAddRevisions cannot be called with an empty array', - - // rx-query - QU1: 'RxQuery._execOverDatabase(): op not known', - // removed in 9.0.0 - QU2: 'limit() must get a number', - // removed in 9.0.0 - QU3: 'skip() must get a number', - QU4: 'RxQuery.regex(): You cannot use .regex() on the primary field', - QU5: 'RxQuery.sort(): does not work because key is not defined in the schema', - QU6: 'RxQuery.limit(): cannot be called on .findOne()', - // removed in 12.0.0 (should by ensured by the typings) - QU7: 'query must be an object', - // removed in 12.0.0 (should by ensured by the typings) - QU8: 'query cannot be an array', - QU9: 'throwIfMissing can only be used in findOne queries', - QU10: 'result empty and throwIfMissing: true', - QU11: 'RxQuery: no valid query params given', - QU12: 'Given index is not in schema', - QU13: 'A top level field of the query is not included in the schema', - QU14: 'Running a count() query in slow mode is now allowed. Either run a count() query with a selector that fully matches an index ' + 'or set allowSlowCount=true when calling the createRxDatabase', - QU15: 'For count queries it is not allowed to use skip or limit', - QU16: '$regex queries must be defined by a string, not an RegExp instance. ' + 'This is because RegExp objects cannot be JSON stringified and also they are mutable which would be dangerous', - // mquery.js - MQ1: 'path must be a string or object', - MQ2: 'Invalid argument', - MQ3: 'Invalid sort() argument. Must be a string, object, or array', - MQ4: 'Invalid argument. Expected instanceof mquery or plain object', - MQ5: 'method must be used after where() when called with these arguments', - MQ6: 'Can\'t mix sort syntaxes. Use either array or object | .sort([[\'field\', 1], [\'test\', -1]]) | .sort({ field: 1, test: -1 })', - MQ7: 'Invalid sort value', - MQ8: 'Can\'t mix sort syntaxes. Use either array or object', - // rx-database - DB1: 'RxDocument.prepare(): another instance on this adapter has a different password', - DB2: 'RxDatabase.addCollections(): collection-names cannot start with underscore _', - DB3: 'RxDatabase.addCollections(): collection already exists. use myDatabase[collectionName] to get it', - DB4: 'RxDatabase.addCollections(): schema is missing', - DB5: 'RxDatabase.addCollections(): collection-name not allowed', - DB6: 'RxDatabase.addCollections(): another instance created this collection with a different schema. Read this https://rxdb.info/questions-answers.html?console=qa#cant-change-the-schema ', - // removed in 13.0.0 (now part of the encryption plugin) DB7: 'RxDatabase.addCollections(): schema encrypted but no password given', - DB8: 'createRxDatabase(): A RxDatabase with the same name and adapter already exists.\n' + 'Make sure to use this combination only once or set ignoreDuplicate to true if you do this intentional-\n' + 'This often happens in react projects with hot reload that reloads the code without reloading the process.', - // removed in 14.0.0 - PouchDB RxStorage is removed - DB9: 'createRxDatabase(): Adapter not added. Use addPouchPlugin(require(\'pouchdb-adapter-[adaptername]\'));', - // removed in 14.0.0 - PouchDB RxStorage is removed DB10: 'createRxDatabase(): To use leveldown-adapters, you have to add the leveldb-plugin. Use addPouchPlugin(require(\'pouchdb-adapter-leveldb\'));', - DB11: 'createRxDatabase(): Invalid db-name, folder-paths must not have an ending slash', - DB12: 'RxDatabase.addCollections(): could not write to internal store', - DB13: 'createRxDatabase(): Invalid db-name or collection name, name contains the dollar sign', - DB14: 'no custom reactivity factory added on database creation', - // rx-collection - COL1: 'RxDocument.insert() You cannot insert an existing document', - COL2: 'RxCollection.insert() fieldName ._id can only be used as primaryKey', - COL3: 'RxCollection.upsert() does not work without primary', - COL4: 'RxCollection.incrementalUpsert() does not work without primary', - COL5: 'RxCollection.find() if you want to search by _id, use .findOne(_id)', - COL6: 'RxCollection.findOne() needs a queryObject or string', - COL7: 'hook must be a function', - COL8: 'hooks-when not known', - COL9: 'RxCollection.addHook() hook-name not known', - COL10: 'RxCollection .postCreate-hooks cannot be async', - COL11: 'migrationStrategies must be an object', - COL12: 'A migrationStrategy is missing or too much', - COL13: 'migrationStrategy must be a function', - COL14: 'given static method-name is not a string', - COL15: 'static method-names cannot start with underscore _', - COL16: 'given static method is not a function', - COL17: 'RxCollection.ORM: statics-name not allowed', - COL18: 'collection-method not allowed because fieldname is in the schema', - // removed in 14.0.0, use CONFLICT instead - COL19: 'Document update conflict. When changing a document you must work on the previous revision', - COL20: 'Storage write error', - COL21: 'The RxCollection is destroyed or removed already, either from this JavaScript realm or from another, like a browser tab', - CONFLICT: 'Document update conflict. When changing a document you must work on the previous revision', - // rx-document.js - DOC1: 'RxDocument.get$ cannot get observable of in-array fields because order cannot be guessed', - DOC2: 'cannot observe primary path', - DOC3: 'final fields cannot be observed', - DOC4: 'RxDocument.get$ cannot observe a non-existed field', - DOC5: 'RxDocument.populate() cannot populate a non-existed field', - DOC6: 'RxDocument.populate() cannot populate because path has no ref', - DOC7: 'RxDocument.populate() ref-collection not in database', - DOC8: 'RxDocument.set(): primary-key cannot be modified', - DOC9: 'final fields cannot be modified', - DOC10: 'RxDocument.set(): cannot set childpath when rootPath not selected', - DOC11: 'RxDocument.save(): can\'t save deleted document', - // removed in 10.0.0 DOC12: 'RxDocument.save(): error', - DOC13: 'RxDocument.remove(): Document is already deleted', - DOC14: 'RxDocument.destroy() does not exist', - DOC15: 'query cannot be an array', - DOC16: 'Since version 8.0.0 RxDocument.set() can only be called on temporary RxDocuments', - DOC17: 'Since version 8.0.0 RxDocument.save() can only be called on non-temporary documents', - DOC18: 'Document property for composed primary key is missing', - DOC19: 'Value of primary key(s) cannot be changed', - DOC20: 'PrimaryKey missing', - DOC21: 'PrimaryKey must be equal to PrimaryKey.trim(). It cannot start or end with a whitespace', - DOC22: 'PrimaryKey must not contain a linebreak', - DOC23: 'PrimaryKey must not contain a double-quote ["]', - DOC24: 'Given document data could not be structured cloned. This happens if you pass non-plain-json data into it, like a Date() object or a Function. ' + 'In vue.js this happens if you use ref() on the document data which transforms it into a Proxy object.', - // data-migrator.js - DM1: 'migrate() Migration has already run', - DM2: 'migration of document failed final document does not match final schema', - DM3: 'migration already running', - DM4: 'Migration errored', - DM5: 'Cannot open database state with newer RxDB version. You have to migrate your database state first. See https://rxdb.info/migration-storage.html?console=storage ', - // plugins/attachments.js - AT1: 'to use attachments, please define this in your schema', - // plugins/encryption-crypto-js.js - EN1: 'password is not valid', - EN2: 'validatePassword: min-length of password not complied', - EN3: 'Schema contains encrypted properties but no password is given', - EN4: 'Password not valid', - // plugins/json-dump.js - JD1: 'You must create the collections before you can import their data', - JD2: 'RxCollection.importJSON(): the imported json relies on a different schema', - JD3: 'RxCollection.importJSON(): json.passwordHash does not match the own', - // plugins/leader-election.js - - // plugins/local-documents.js - LD1: 'RxDocument.allAttachments$ can\'t use attachments on local documents', - LD2: 'RxDocument.get(): objPath must be a string', - LD3: 'RxDocument.get$ cannot get observable of in-array fields because order cannot be guessed', - LD4: 'cannot observe primary path', - LD5: 'RxDocument.set() id cannot be modified', - LD6: 'LocalDocument: Function is not usable on local documents', - LD7: 'Local document already exists', - LD8: 'localDocuments not activated. Set localDocuments=true on creation, when you want to store local documents on the RxDatabase or RxCollection.', - // plugins/replication.js - RC1: 'Replication: already added', - RC2: 'replicateCouchDB() query must be from the same RxCollection', - // removed in 14.0.0 - PouchDB RxStorage is removed RC3: 'RxCollection.syncCouchDB() Do not use a collection\'s pouchdb as remote, use the collection instead', - RC4: 'RxCouchDBReplicationState.awaitInitialReplication() cannot await initial replication when live: true', - RC5: 'RxCouchDBReplicationState.awaitInitialReplication() cannot await initial replication if multiInstance because the replication might run on another instance', - RC6: 'syncFirestore() serverTimestampField MUST NOT be part of the collections schema and MUST NOT be nested.', - RC7: 'SimplePeer requires to have process.nextTick() polyfilled, see https://rxdb.info/replication-webrtc.html?console=webrtc ', - RC_PULL: 'RxReplication pull handler threw an error - see .errors for more details', - RC_STREAM: 'RxReplication pull stream$ threw an error - see .errors for more details', - RC_PUSH: 'RxReplication push handler threw an error - see .errors for more details', - RC_PUSH_NO_AR: 'RxReplication push handler did not return an array with the conflicts', - RC_WEBRTC_PEER: 'RxReplication WebRTC Peer has error', - RC_COUCHDB_1: 'replicateCouchDB() url must end with a slash like \'https://example.com/mydatabase/\'', - RC_COUCHDB_2: 'replicateCouchDB() did not get valid result with rows.', - RC_OUTDATED: 'Outdated client, update required. Replication was canceled', - RC_UNAUTHORIZED: 'Unauthorized client, update the replicationState.headers to set correct auth data', - RC_FORBIDDEN: 'Client behaves wrong so the replication was canceled. Mostly happens if the client tries to write data that it is not allowed to', - // plugins/dev-mode/check-schema.js - SC1: 'fieldnames do not match the regex', - SC2: 'SchemaCheck: name \'item\' reserved for array-fields', - SC3: 'SchemaCheck: fieldname has a ref-array but items-type is not string', - SC4: 'SchemaCheck: fieldname has a ref but is not type string, [string,null] or array', - SC6: 'SchemaCheck: primary can only be defined at top-level', - SC7: 'SchemaCheck: default-values can only be defined at top-level', - SC8: 'SchemaCheck: first level-fields cannot start with underscore _', - SC10: 'SchemaCheck: schema defines ._rev, this will be done automatically', - SC11: 'SchemaCheck: schema needs a number >=0 as version', - // removed in 10.0.0 - SC12: 'SchemaCheck: primary can only be defined once', - SC13: 'SchemaCheck: primary is always index, do not declare it as index', - SC14: 'SchemaCheck: primary is always unique, do not declare it as index', - SC15: 'SchemaCheck: primary cannot be encrypted', - SC16: 'SchemaCheck: primary must have type: string', - SC17: 'SchemaCheck: top-level fieldname is not allowed', - SC18: 'SchemaCheck: indexes must be an array', - SC19: 'SchemaCheck: indexes must contain strings or arrays of strings', - SC20: 'SchemaCheck: indexes.array must contain strings', - SC21: 'SchemaCheck: given index is not defined in schema', - SC22: 'SchemaCheck: given indexKey is not type:string', - SC23: 'SchemaCheck: fieldname is not allowed', - SC24: 'SchemaCheck: required fields must be set via array. See https://spacetelescope.github.io/understanding-json-schema/reference/object.html#required', - SC25: 'SchemaCheck: compoundIndexes needs to be specified in the indexes field', - SC26: 'SchemaCheck: indexes needs to be specified at collection schema level', - SC27: 'SchemaCheck: encrypted fields need to be specified at collection schema level', - SC28: 'SchemaCheck: encrypted fields is not defined in the schema', - SC29: 'SchemaCheck: missing object key \'properties\'', - SC30: 'SchemaCheck: primaryKey is required', - SC32: 'SchemaCheck: primary field must have the type string/number/integer', - SC33: 'SchemaCheck: used primary key is not a property in the schema', - SC34: 'Fields of type string that are used in an index, must have set the maxLength attribute in the schema', - SC35: 'Fields of type number/integer that are used in an index, must have set the multipleOf attribute in the schema', - SC36: 'A field of this type cannot be used as index', - SC37: 'Fields of type number that are used in an index, must have set the minimum and maximum attribute in the schema', - SC38: 'Fields of type boolean that are used in an index, must be required in the schema', - SC39: 'The primary key must have the maxLength attribute set', - SC40: '$ref fields in the schema are not allowed. RxDB cannot resolve related schemas because it would have a negative performance impact.' + 'It would have to run http requests on runtime. $ref fields should be resolved during build time.', - SC41: 'minimum, maximum and maxLength values for indexes must be real numbers, not Infinity or -Infinity', - // plugins/dev-mode - // removed in 13.9.0, use PL3 instead - DEV1: 'dev-mode added multiple times', - - // plugins/validate.js - VD1: 'Sub-schema not found, does the schemaPath exists in your schema?', - VD2: 'object does not match schema', - // plugins/in-memory.js - // removed in 14.0.0 - PouchDB RxStorage is removed IM1: 'InMemory: Memory-Adapter must be added. Use addPouchPlugin(require(\'pouchdb-adapter-memory\'));', - // removed in 14.0.0 - PouchDB RxStorage is removed IM2: 'inMemoryCollection.sync(): Do not replicate with the in-memory instance. Replicate with the parent instead', - - // plugins/server.js - S1: 'You cannot create collections after calling RxDatabase.server()', - // plugins/replication-graphql.js - GQL1: 'GraphQL replication: cannot find sub schema by key', - // removed in 13.0.0, use RC_PULL instead - GQL2: 'GraphQL replication: unknown errors occurred in replication pull - see innerErrors for more details', - GQL3: 'GraphQL replication: pull returns more documents then batchSize', - // removed in 13.0.0, use RC_PUSH instead - GQL4: 'GraphQL replication: unknown errors occurred in replication push - see innerErrors for more details', - - // plugins/crdt/ - CRDT1: 'CRDT operations cannot be used because the crdt options are not set in the schema.', - CRDT2: 'RxDocument.incrementalModify() cannot be used when CRDTs are activated.', - CRDT3: 'To use CRDTs you MUST NOT set a conflictHandler because the default CRDT conflict handler must be used', - // plugins/storage-dexie/ - // removed in 15.0.0, added boolean index support to dexie storage - DXE1: 'The dexie.js RxStorage does not support boolean indexes, see https://rxdb.info/rx-storage-dexie.html#boolean-index', - - /** - * Should never be thrown, use this for - * null checks etc. so you do not have to increase the - * build size with error message strings. - */ - SNH: 'This should never happen' -}; -//# sourceMappingURL=error-messages.js.map \ No newline at end of file diff --git a/dist/esm/plugins/dev-mode/error-messages.js.map b/dist/esm/plugins/dev-mode/error-messages.js.map deleted file mode 100644 index dd0911b7882..00000000000 --- a/dist/esm/plugins/dev-mode/error-messages.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"error-messages.js","names":["ERROR_MESSAGES","UT1","UT2","UT3","UT4","UT5","UT6","UT7","PL1","PL3","P2","QU1","QU4","QU5","QU6","QU9","QU10","QU11","QU12","QU13","QU14","QU15","QU16","MQ1","MQ2","MQ3","MQ4","MQ5","MQ6","MQ7","MQ8","DB1","DB2","DB3","DB4","DB5","DB6","DB8","DB11","DB12","DB13","DB14","COL1","COL2","COL3","COL4","COL5","COL6","COL7","COL8","COL9","COL10","COL11","COL12","COL13","COL14","COL15","COL16","COL17","COL18","COL20","COL21","CONFLICT","DOC1","DOC2","DOC3","DOC4","DOC5","DOC6","DOC7","DOC8","DOC9","DOC10","DOC11","DOC13","DOC14","DOC15","DOC16","DOC17","DOC18","DOC19","DOC20","DOC21","DOC22","DOC23","DOC24","DM1","DM2","DM3","DM4","DM5","AT1","EN1","EN2","EN3","EN4","JD1","JD2","JD3","LD1","LD2","LD3","LD4","LD5","LD6","LD7","LD8","RC1","RC2","RC4","RC5","RC6","RC7","RC_PULL","RC_STREAM","RC_PUSH","RC_PUSH_NO_AR","RC_WEBRTC_PEER","RC_COUCHDB_1","RC_COUCHDB_2","RC_OUTDATED","RC_UNAUTHORIZED","RC_FORBIDDEN","SC1","SC2","SC3","SC4","SC6","SC7","SC8","SC10","SC11","SC13","SC14","SC15","SC16","SC17","SC18","SC19","SC20","SC21","SC22","SC23","SC24","SC25","SC26","SC27","SC28","SC29","SC30","SC32","SC33","SC34","SC35","SC36","SC37","SC38","SC39","SC40","SC41","VD1","VD2","S1","GQL1","GQL3","CRDT1","CRDT2","CRDT3","SNH"],"sources":["../../../../src/plugins/dev-mode/error-messages.ts"],"sourcesContent":["/**\n * this plugin adds the error-messages\n * without it, only error-codes will be shown\n * This is mainly because error-string are hard to compress and we need a smaller build\n */\n\n\nexport const ERROR_MESSAGES = {\n // util.js / config\n UT1: 'given name is no string or empty',\n UT2: `collection- and database-names must match the regex to be compatible with couchdb databases.\n See https://neighbourhood.ie/blog/2020/10/13/everything-you-need-to-know-about-couchdb-database-names/\n info: if your database-name specifies a folder, the name must contain the slash-char '/' or '\\\\'`,\n UT3: 'replication-direction must either be push or pull or both. But not none',\n UT4: 'given leveldown is no valid adapter',\n UT5: 'keyCompression is set to true in the schema but no key-compression handler is used in the storage',\n UT6: 'schema contains encrypted fields but no encryption handler is used in the storage',\n UT7: 'attachments.compression is enabled but no attachment-compression plugin is used',\n\n // plugins\n PL1: 'Given plugin is not RxDB plugin.',\n // removed in 14.0.0 - PouchDB RxStorage was removed - PL2: 'You tried importing a RxDB plugin to pouchdb. Use addRxPlugin() instead.',\n PL3: 'A plugin with the same name was already added but it was not the exact same JavaScript object',\n\n // pouch-db.js\n // removed in 12.0.0 - P1: 'PouchDB.getBatch: limit must be > 2',\n P2: 'bulkWrite() cannot be called with an empty array',\n // removed in 12.0.0 - P3: 'bulkAddRevisions cannot be called with an empty array',\n\n // rx-query\n QU1: 'RxQuery._execOverDatabase(): op not known',\n // removed in 9.0.0 - QU2: 'limit() must get a number',\n // removed in 9.0.0 - QU3: 'skip() must get a number',\n QU4: 'RxQuery.regex(): You cannot use .regex() on the primary field',\n QU5: 'RxQuery.sort(): does not work because key is not defined in the schema',\n QU6: 'RxQuery.limit(): cannot be called on .findOne()',\n // removed in 12.0.0 (should by ensured by the typings) - QU7: 'query must be an object',\n // removed in 12.0.0 (should by ensured by the typings) - QU8: 'query cannot be an array',\n QU9: 'throwIfMissing can only be used in findOne queries',\n QU10: 'result empty and throwIfMissing: true',\n QU11: 'RxQuery: no valid query params given',\n QU12: 'Given index is not in schema',\n QU13: 'A top level field of the query is not included in the schema',\n QU14: 'Running a count() query in slow mode is now allowed. Either run a count() query with a selector that fully matches an index ' +\n 'or set allowSlowCount=true when calling the createRxDatabase',\n QU15: 'For count queries it is not allowed to use skip or limit',\n QU16: '$regex queries must be defined by a string, not an RegExp instance. ' +\n 'This is because RegExp objects cannot be JSON stringified and also they are mutable which would be dangerous',\n\n // mquery.js\n MQ1: 'path must be a string or object',\n MQ2: 'Invalid argument',\n MQ3: 'Invalid sort() argument. Must be a string, object, or array',\n MQ4: 'Invalid argument. Expected instanceof mquery or plain object',\n MQ5: 'method must be used after where() when called with these arguments',\n MQ6: 'Can\\'t mix sort syntaxes. Use either array or object | .sort([[\\'field\\', 1], [\\'test\\', -1]]) | .sort({ field: 1, test: -1 })',\n MQ7: 'Invalid sort value',\n MQ8: 'Can\\'t mix sort syntaxes. Use either array or object',\n\n // rx-database\n DB1: 'RxDocument.prepare(): another instance on this adapter has a different password',\n DB2: 'RxDatabase.addCollections(): collection-names cannot start with underscore _',\n DB3: 'RxDatabase.addCollections(): collection already exists. use myDatabase[collectionName] to get it',\n DB4: 'RxDatabase.addCollections(): schema is missing',\n DB5: 'RxDatabase.addCollections(): collection-name not allowed',\n DB6: 'RxDatabase.addCollections(): another instance created this collection with a different schema. Read this https://rxdb.info/questions-answers.html?console=qa#cant-change-the-schema ',\n // removed in 13.0.0 (now part of the encryption plugin) DB7: 'RxDatabase.addCollections(): schema encrypted but no password given',\n DB8: 'createRxDatabase(): A RxDatabase with the same name and adapter already exists.\\n' +\n 'Make sure to use this combination only once or set ignoreDuplicate to true if you do this intentional-\\n' +\n 'This often happens in react projects with hot reload that reloads the code without reloading the process.',\n // removed in 14.0.0 - PouchDB RxStorage is removed - DB9: 'createRxDatabase(): Adapter not added. Use addPouchPlugin(require(\\'pouchdb-adapter-[adaptername]\\'));',\n // removed in 14.0.0 - PouchDB RxStorage is removed DB10: 'createRxDatabase(): To use leveldown-adapters, you have to add the leveldb-plugin. Use addPouchPlugin(require(\\'pouchdb-adapter-leveldb\\'));',\n DB11: 'createRxDatabase(): Invalid db-name, folder-paths must not have an ending slash',\n DB12: 'RxDatabase.addCollections(): could not write to internal store',\n DB13: 'createRxDatabase(): Invalid db-name or collection name, name contains the dollar sign',\n DB14: 'no custom reactivity factory added on database creation',\n\n // rx-collection\n COL1: 'RxDocument.insert() You cannot insert an existing document',\n COL2: 'RxCollection.insert() fieldName ._id can only be used as primaryKey',\n COL3: 'RxCollection.upsert() does not work without primary',\n COL4: 'RxCollection.incrementalUpsert() does not work without primary',\n COL5: 'RxCollection.find() if you want to search by _id, use .findOne(_id)',\n COL6: 'RxCollection.findOne() needs a queryObject or string',\n COL7: 'hook must be a function',\n COL8: 'hooks-when not known',\n COL9: 'RxCollection.addHook() hook-name not known',\n COL10: 'RxCollection .postCreate-hooks cannot be async',\n COL11: 'migrationStrategies must be an object',\n COL12: 'A migrationStrategy is missing or too much',\n COL13: 'migrationStrategy must be a function',\n COL14: 'given static method-name is not a string',\n COL15: 'static method-names cannot start with underscore _',\n COL16: 'given static method is not a function',\n COL17: 'RxCollection.ORM: statics-name not allowed',\n COL18: 'collection-method not allowed because fieldname is in the schema',\n // removed in 14.0.0, use CONFLICT instead - COL19: 'Document update conflict. When changing a document you must work on the previous revision',\n COL20: 'Storage write error',\n COL21: 'The RxCollection is destroyed or removed already, either from this JavaScript realm or from another, like a browser tab',\n CONFLICT: 'Document update conflict. When changing a document you must work on the previous revision',\n\n // rx-document.js\n DOC1: 'RxDocument.get$ cannot get observable of in-array fields because order cannot be guessed',\n DOC2: 'cannot observe primary path',\n DOC3: 'final fields cannot be observed',\n DOC4: 'RxDocument.get$ cannot observe a non-existed field',\n DOC5: 'RxDocument.populate() cannot populate a non-existed field',\n DOC6: 'RxDocument.populate() cannot populate because path has no ref',\n DOC7: 'RxDocument.populate() ref-collection not in database',\n DOC8: 'RxDocument.set(): primary-key cannot be modified',\n DOC9: 'final fields cannot be modified',\n DOC10: 'RxDocument.set(): cannot set childpath when rootPath not selected',\n DOC11: 'RxDocument.save(): can\\'t save deleted document',\n // removed in 10.0.0 DOC12: 'RxDocument.save(): error',\n DOC13: 'RxDocument.remove(): Document is already deleted',\n DOC14: 'RxDocument.destroy() does not exist',\n DOC15: 'query cannot be an array',\n DOC16: 'Since version 8.0.0 RxDocument.set() can only be called on temporary RxDocuments',\n DOC17: 'Since version 8.0.0 RxDocument.save() can only be called on non-temporary documents',\n DOC18: 'Document property for composed primary key is missing',\n DOC19: 'Value of primary key(s) cannot be changed',\n DOC20: 'PrimaryKey missing',\n DOC21: 'PrimaryKey must be equal to PrimaryKey.trim(). It cannot start or end with a whitespace',\n DOC22: 'PrimaryKey must not contain a linebreak',\n DOC23: 'PrimaryKey must not contain a double-quote [\"]',\n DOC24: 'Given document data could not be structured cloned. This happens if you pass non-plain-json data into it, like a Date() object or a Function. ' +\n 'In vue.js this happens if you use ref() on the document data which transforms it into a Proxy object.',\n\n // data-migrator.js\n DM1: 'migrate() Migration has already run',\n DM2: 'migration of document failed final document does not match final schema',\n DM3: 'migration already running',\n DM4: 'Migration errored',\n DM5: 'Cannot open database state with newer RxDB version. You have to migrate your database state first. See https://rxdb.info/migration-storage.html?console=storage ',\n\n // plugins/attachments.js\n AT1: 'to use attachments, please define this in your schema',\n\n // plugins/encryption-crypto-js.js\n EN1: 'password is not valid',\n EN2: 'validatePassword: min-length of password not complied',\n EN3: 'Schema contains encrypted properties but no password is given',\n EN4: 'Password not valid',\n\n // plugins/json-dump.js\n JD1: 'You must create the collections before you can import their data',\n JD2: 'RxCollection.importJSON(): the imported json relies on a different schema',\n JD3: 'RxCollection.importJSON(): json.passwordHash does not match the own',\n\n // plugins/leader-election.js\n\n // plugins/local-documents.js\n LD1: 'RxDocument.allAttachments$ can\\'t use attachments on local documents',\n LD2: 'RxDocument.get(): objPath must be a string',\n LD3: 'RxDocument.get$ cannot get observable of in-array fields because order cannot be guessed',\n LD4: 'cannot observe primary path',\n LD5: 'RxDocument.set() id cannot be modified',\n LD6: 'LocalDocument: Function is not usable on local documents',\n LD7: 'Local document already exists',\n LD8: 'localDocuments not activated. Set localDocuments=true on creation, when you want to store local documents on the RxDatabase or RxCollection.',\n\n // plugins/replication.js\n RC1: 'Replication: already added',\n RC2: 'replicateCouchDB() query must be from the same RxCollection',\n // removed in 14.0.0 - PouchDB RxStorage is removed RC3: 'RxCollection.syncCouchDB() Do not use a collection\\'s pouchdb as remote, use the collection instead',\n RC4: 'RxCouchDBReplicationState.awaitInitialReplication() cannot await initial replication when live: true',\n RC5: 'RxCouchDBReplicationState.awaitInitialReplication() cannot await initial replication if multiInstance because the replication might run on another instance',\n RC6: 'syncFirestore() serverTimestampField MUST NOT be part of the collections schema and MUST NOT be nested.',\n RC7: 'SimplePeer requires to have process.nextTick() polyfilled, see https://rxdb.info/replication-webrtc.html?console=webrtc ',\n RC_PULL: 'RxReplication pull handler threw an error - see .errors for more details',\n RC_STREAM: 'RxReplication pull stream$ threw an error - see .errors for more details',\n RC_PUSH: 'RxReplication push handler threw an error - see .errors for more details',\n RC_PUSH_NO_AR: 'RxReplication push handler did not return an array with the conflicts',\n RC_WEBRTC_PEER: 'RxReplication WebRTC Peer has error',\n RC_COUCHDB_1: 'replicateCouchDB() url must end with a slash like \\'https://example.com/mydatabase/\\'',\n RC_COUCHDB_2: 'replicateCouchDB() did not get valid result with rows.',\n RC_OUTDATED: 'Outdated client, update required. Replication was canceled',\n RC_UNAUTHORIZED: 'Unauthorized client, update the replicationState.headers to set correct auth data',\n RC_FORBIDDEN: 'Client behaves wrong so the replication was canceled. Mostly happens if the client tries to write data that it is not allowed to',\n\n // plugins/dev-mode/check-schema.js\n SC1: 'fieldnames do not match the regex',\n SC2: 'SchemaCheck: name \\'item\\' reserved for array-fields',\n SC3: 'SchemaCheck: fieldname has a ref-array but items-type is not string',\n SC4: 'SchemaCheck: fieldname has a ref but is not type string, [string,null] or array',\n SC6: 'SchemaCheck: primary can only be defined at top-level',\n SC7: 'SchemaCheck: default-values can only be defined at top-level',\n SC8: 'SchemaCheck: first level-fields cannot start with underscore _',\n SC10: 'SchemaCheck: schema defines ._rev, this will be done automatically',\n SC11: 'SchemaCheck: schema needs a number >=0 as version',\n // removed in 10.0.0 - SC12: 'SchemaCheck: primary can only be defined once',\n SC13: 'SchemaCheck: primary is always index, do not declare it as index',\n SC14: 'SchemaCheck: primary is always unique, do not declare it as index',\n SC15: 'SchemaCheck: primary cannot be encrypted',\n SC16: 'SchemaCheck: primary must have type: string',\n SC17: 'SchemaCheck: top-level fieldname is not allowed',\n SC18: 'SchemaCheck: indexes must be an array',\n SC19: 'SchemaCheck: indexes must contain strings or arrays of strings',\n SC20: 'SchemaCheck: indexes.array must contain strings',\n SC21: 'SchemaCheck: given index is not defined in schema',\n SC22: 'SchemaCheck: given indexKey is not type:string',\n SC23: 'SchemaCheck: fieldname is not allowed',\n SC24: 'SchemaCheck: required fields must be set via array. See https://spacetelescope.github.io/understanding-json-schema/reference/object.html#required',\n SC25: 'SchemaCheck: compoundIndexes needs to be specified in the indexes field',\n SC26: 'SchemaCheck: indexes needs to be specified at collection schema level',\n SC27: 'SchemaCheck: encrypted fields need to be specified at collection schema level',\n SC28: 'SchemaCheck: encrypted fields is not defined in the schema',\n SC29: 'SchemaCheck: missing object key \\'properties\\'',\n SC30: 'SchemaCheck: primaryKey is required',\n SC32: 'SchemaCheck: primary field must have the type string/number/integer',\n SC33: 'SchemaCheck: used primary key is not a property in the schema',\n SC34: 'Fields of type string that are used in an index, must have set the maxLength attribute in the schema',\n SC35: 'Fields of type number/integer that are used in an index, must have set the multipleOf attribute in the schema',\n SC36: 'A field of this type cannot be used as index',\n SC37: 'Fields of type number that are used in an index, must have set the minimum and maximum attribute in the schema',\n SC38: 'Fields of type boolean that are used in an index, must be required in the schema',\n SC39: 'The primary key must have the maxLength attribute set',\n SC40: '$ref fields in the schema are not allowed. RxDB cannot resolve related schemas because it would have a negative performance impact.' +\n 'It would have to run http requests on runtime. $ref fields should be resolved during build time.',\n SC41: 'minimum, maximum and maxLength values for indexes must be real numbers, not Infinity or -Infinity',\n\n // plugins/dev-mode\n // removed in 13.9.0, use PL3 instead - DEV1: 'dev-mode added multiple times',\n\n // plugins/validate.js\n VD1: 'Sub-schema not found, does the schemaPath exists in your schema?',\n VD2: 'object does not match schema',\n\n // plugins/in-memory.js\n // removed in 14.0.0 - PouchDB RxStorage is removed IM1: 'InMemory: Memory-Adapter must be added. Use addPouchPlugin(require(\\'pouchdb-adapter-memory\\'));',\n // removed in 14.0.0 - PouchDB RxStorage is removed IM2: 'inMemoryCollection.sync(): Do not replicate with the in-memory instance. Replicate with the parent instead',\n\n // plugins/server.js\n S1: 'You cannot create collections after calling RxDatabase.server()',\n\n // plugins/replication-graphql.js\n GQL1: 'GraphQL replication: cannot find sub schema by key',\n // removed in 13.0.0, use RC_PULL instead - GQL2: 'GraphQL replication: unknown errors occurred in replication pull - see innerErrors for more details',\n GQL3: 'GraphQL replication: pull returns more documents then batchSize',\n // removed in 13.0.0, use RC_PUSH instead - GQL4: 'GraphQL replication: unknown errors occurred in replication push - see innerErrors for more details',\n\n // plugins/crdt/\n CRDT1: 'CRDT operations cannot be used because the crdt options are not set in the schema.',\n CRDT2: 'RxDocument.incrementalModify() cannot be used when CRDTs are activated.',\n CRDT3: 'To use CRDTs you MUST NOT set a conflictHandler because the default CRDT conflict handler must be used',\n\n // plugins/storage-dexie/\n // removed in 15.0.0, added boolean index support to dexie storage - DXE1: 'The dexie.js RxStorage does not support boolean indexes, see https://rxdb.info/rx-storage-dexie.html#boolean-index',\n\n /**\n * Should never be thrown, use this for\n * null checks etc. so you do not have to increase the\n * build size with error message strings.\n */\n SNH: 'This should never happen'\n};\n"],"mappings":"AAAA;AACA;AACA;AACA;AACA;;AAGA,OAAO,IAAMA,cAAc,GAAG;EAC1B;EACAC,GAAG,EAAE,kCAAkC;EACvCC,GAAG,kTAE8F;EACjGC,GAAG,EAAE,yEAAyE;EAC9EC,GAAG,EAAE,qCAAqC;EAC1CC,GAAG,EAAE,mGAAmG;EACxGC,GAAG,EAAE,mFAAmF;EACxFC,GAAG,EAAE,iFAAiF;EAEtF;EACAC,GAAG,EAAE,kCAAkC;EACvC;EACAC,GAAG,EAAE,+FAA+F;EAEpG;EACA;EACAC,EAAE,EAAE,kDAAkD;EACtD;;EAEA;EACAC,GAAG,EAAE,2CAA2C;EAChD;EACA;EACAC,GAAG,EAAE,+DAA+D;EACpEC,GAAG,EAAE,wEAAwE;EAC7EC,GAAG,EAAE,iDAAiD;EACtD;EACA;EACAC,GAAG,EAAE,oDAAoD;EACzDC,IAAI,EAAE,uCAAuC;EAC7CC,IAAI,EAAE,sCAAsC;EAC5CC,IAAI,EAAE,8BAA8B;EACpCC,IAAI,EAAE,8DAA8D;EACpEC,IAAI,EAAE,8HAA8H,GAChI,8DAA8D;EAClEC,IAAI,EAAE,0DAA0D;EAChEC,IAAI,EAAE,sEAAsE,GACxE,8GAA8G;EAElH;EACAC,GAAG,EAAE,iCAAiC;EACtCC,GAAG,EAAE,kBAAkB;EACvBC,GAAG,EAAE,6DAA6D;EAClEC,GAAG,EAAE,8DAA8D;EACnEC,GAAG,EAAE,oEAAoE;EACzEC,GAAG,EAAE,gIAAgI;EACrIC,GAAG,EAAE,oBAAoB;EACzBC,GAAG,EAAE,sDAAsD;EAE3D;EACAC,GAAG,EAAE,iFAAiF;EACtFC,GAAG,EAAE,8EAA8E;EACnFC,GAAG,EAAE,kGAAkG;EACvGC,GAAG,EAAE,gDAAgD;EACrDC,GAAG,EAAE,0DAA0D;EAC/DC,GAAG,EAAE,sLAAsL;EAC3L;EACAC,GAAG,EAAE,mFAAmF,GACpF,0GAA0G,GAC1G,2GAA2G;EAC/G;EACA;EACAC,IAAI,EAAE,iFAAiF;EACvFC,IAAI,EAAE,gEAAgE;EACtEC,IAAI,EAAE,uFAAuF;EAC7FC,IAAI,EAAE,yDAAyD;EAE/D;EACAC,IAAI,EAAE,4DAA4D;EAClEC,IAAI,EAAE,qEAAqE;EAC3EC,IAAI,EAAE,qDAAqD;EAC3DC,IAAI,EAAE,gEAAgE;EACtEC,IAAI,EAAE,qEAAqE;EAC3EC,IAAI,EAAE,sDAAsD;EAC5DC,IAAI,EAAE,yBAAyB;EAC/BC,IAAI,EAAE,sBAAsB;EAC5BC,IAAI,EAAE,4CAA4C;EAClDC,KAAK,EAAE,gDAAgD;EACvDC,KAAK,EAAE,uCAAuC;EAC9CC,KAAK,EAAE,4CAA4C;EACnDC,KAAK,EAAE,sCAAsC;EAC7CC,KAAK,EAAE,0CAA0C;EACjDC,KAAK,EAAE,oDAAoD;EAC3DC,KAAK,EAAE,uCAAuC;EAC9CC,KAAK,EAAE,4CAA4C;EACnDC,KAAK,EAAE,kEAAkE;EACzE;EACAC,KAAK,EAAE,qBAAqB;EAC5BC,KAAK,EAAE,yHAAyH;EAChIC,QAAQ,EAAE,2FAA2F;EAErG;EACAC,IAAI,EAAE,0FAA0F;EAChGC,IAAI,EAAE,6BAA6B;EACnCC,IAAI,EAAE,iCAAiC;EACvCC,IAAI,EAAE,oDAAoD;EAC1DC,IAAI,EAAE,2DAA2D;EACjEC,IAAI,EAAE,+DAA+D;EACrEC,IAAI,EAAE,sDAAsD;EAC5DC,IAAI,EAAE,kDAAkD;EACxDC,IAAI,EAAE,iCAAiC;EACvCC,KAAK,EAAE,mEAAmE;EAC1EC,KAAK,EAAE,iDAAiD;EACxD;EACAC,KAAK,EAAE,kDAAkD;EACzDC,KAAK,EAAE,qCAAqC;EAC5CC,KAAK,EAAE,0BAA0B;EACjCC,KAAK,EAAE,kFAAkF;EACzFC,KAAK,EAAE,qFAAqF;EAC5FC,KAAK,EAAE,uDAAuD;EAC9DC,KAAK,EAAE,2CAA2C;EAClDC,KAAK,EAAE,oBAAoB;EAC3BC,KAAK,EAAE,yFAAyF;EAChGC,KAAK,EAAE,yCAAyC;EAChDC,KAAK,EAAE,gDAAgD;EACvDC,KAAK,EAAE,gJAAgJ,GACnJ,uGAAuG;EAE3G;EACAC,GAAG,EAAE,qCAAqC;EAC1CC,GAAG,EAAE,yEAAyE;EAC9EC,GAAG,EAAE,2BAA2B;EAChCC,GAAG,EAAE,mBAAmB;EACxBC,GAAG,EAAE,kKAAkK;EAEvK;EACAC,GAAG,EAAE,uDAAuD;EAE5D;EACAC,GAAG,EAAE,uBAAuB;EAC5BC,GAAG,EAAE,uDAAuD;EAC5DC,GAAG,EAAE,+DAA+D;EACpEC,GAAG,EAAE,oBAAoB;EAEzB;EACAC,GAAG,EAAE,kEAAkE;EACvEC,GAAG,EAAE,2EAA2E;EAChFC,GAAG,EAAE,qEAAqE;EAE1E;;EAEA;EACAC,GAAG,EAAE,sEAAsE;EAC3EC,GAAG,EAAE,4CAA4C;EACjDC,GAAG,EAAE,0FAA0F;EAC/FC,GAAG,EAAE,6BAA6B;EAClCC,GAAG,EAAE,wCAAwC;EAC7CC,GAAG,EAAE,0DAA0D;EAC/DC,GAAG,EAAE,+BAA+B;EACpCC,GAAG,EAAE,8IAA8I;EAEnJ;EACAC,GAAG,EAAE,4BAA4B;EACjCC,GAAG,EAAE,6DAA6D;EAClE;EACAC,GAAG,EAAE,sGAAsG;EAC3GC,GAAG,EAAE,6JAA6J;EAClKC,GAAG,EAAE,yGAAyG;EAC9GC,GAAG,EAAE,0HAA0H;EAC/HC,OAAO,EAAE,0EAA0E;EACnFC,SAAS,EAAE,0EAA0E;EACrFC,OAAO,EAAE,0EAA0E;EACnFC,aAAa,EAAE,uEAAuE;EACtFC,cAAc,EAAE,qCAAqC;EACrDC,YAAY,EAAE,uFAAuF;EACrGC,YAAY,EAAE,wDAAwD;EACtEC,WAAW,EAAE,4DAA4D;EACzEC,eAAe,EAAE,mFAAmF;EACpGC,YAAY,EAAE,kIAAkI;EAEhJ;EACAC,GAAG,EAAE,mCAAmC;EACxCC,GAAG,EAAE,sDAAsD;EAC3DC,GAAG,EAAE,qEAAqE;EAC1EC,GAAG,EAAE,yFAAyF;EAC9FC,GAAG,EAAE,uDAAuD;EAC5DC,GAAG,EAAE,8DAA8D;EACnEC,GAAG,EAAE,gEAAgE;EACrEC,IAAI,EAAE,oEAAoE;EAC1EC,IAAI,EAAE,mDAAmD;EACzD;EACAC,IAAI,EAAE,kEAAkE;EACxEC,IAAI,EAAE,mEAAmE;EACzEC,IAAI,EAAE,0CAA0C;EAChDC,IAAI,EAAE,6CAA6C;EACnDC,IAAI,EAAE,iDAAiD;EACvDC,IAAI,EAAE,uCAAuC;EAC7CC,IAAI,EAAE,gEAAgE;EACtEC,IAAI,EAAE,iDAAiD;EACvDC,IAAI,EAAE,mDAAmD;EACzDC,IAAI,EAAE,gDAAgD;EACtDC,IAAI,EAAE,uCAAuC;EAC7CC,IAAI,EAAE,mJAAmJ;EACzJC,IAAI,EAAE,yEAAyE;EAC/EC,IAAI,EAAE,uEAAuE;EAC7EC,IAAI,EAAE,+EAA+E;EACrFC,IAAI,EAAE,4DAA4D;EAClEC,IAAI,EAAE,gDAAgD;EACtDC,IAAI,EAAE,qCAAqC;EAC3CC,IAAI,EAAE,qEAAqE;EAC3EC,IAAI,EAAE,+DAA+D;EACrEC,IAAI,EAAE,sGAAsG;EAC5GC,IAAI,EAAE,+GAA+G;EACrHC,IAAI,EAAE,8CAA8C;EACpDC,IAAI,EAAE,gHAAgH;EACtHC,IAAI,EAAE,kFAAkF;EACxFC,IAAI,EAAE,uDAAuD;EAC7DC,IAAI,EAAE,qIAAqI,GACvI,kGAAkG;EACtGC,IAAI,EAAE,mGAAmG;EAEzG;EACA;;EAEA;EACAC,GAAG,EAAE,kEAAkE;EACvEC,GAAG,EAAE,8BAA8B;EAEnC;EACA;EACA;;EAEA;EACAC,EAAE,EAAE,iEAAiE;EAErE;EACAC,IAAI,EAAE,oDAAoD;EAC1D;EACAC,IAAI,EAAE,iEAAiE;EACvE;;EAEA;EACAC,KAAK,EAAE,oFAAoF;EAC3FC,KAAK,EAAE,yEAAyE;EAChFC,KAAK,EAAE,wGAAwG;EAE/G;EACA;;EAEA;AACJ;AACA;AACA;AACA;EACIC,GAAG,EAAE;AACT,CAAC","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/dev-mode/index.js b/dist/esm/plugins/dev-mode/index.js deleted file mode 100644 index 2fb111708fd..00000000000 --- a/dist/esm/plugins/dev-mode/index.js +++ /dev/null @@ -1,122 +0,0 @@ -import { ERROR_MESSAGES } from "./error-messages.js"; -import { checkSchema } from "./check-schema.js"; -import { checkOrmDocumentMethods, checkOrmMethods } from "./check-orm.js"; -import { checkMigrationStrategies } from "./check-migration-strategies.js"; -import { ensureCollectionNameValid, ensureDatabaseNameIsValid } from "./unallowed-properties.js"; -import { checkMangoQuery, checkQuery } from "./check-query.js"; -import { newRxError } from "../../rx-error.js"; -import { deepFreeze } from "../../plugins/utils/index.js"; -import { checkWriteRows, ensurePrimaryKeyValid } from "./check-document.js"; -export * from "./check-schema.js"; -export * from "./unallowed-properties.js"; -export * from "./check-query.js"; -var showDevModeWarning = true; - -/** - * Suppresses the warning message shown in the console, typically invoked once the developer (hello!) - * has acknowledged it. - */ -export function disableWarnings() { - showDevModeWarning = false; -} - -/** - * Deep freezes and object when in dev-mode. - * Deep-Freezing has the same performance as deep-cloning, so we only do that in dev-mode. - * Also we can ensure the readonly state via typescript - * @link https://developer.mozilla.org/de/docs/Web/JavaScript/Reference/Global_Objects/Object/freeze - */ -export function deepFreezeWhenDevMode(obj) { - // direct return if not suitable for deepFreeze() - if (!obj || typeof obj === 'string' || typeof obj === 'number') { - return obj; - } - return deepFreeze(obj); -} -export var DEV_MODE_PLUGIN_NAME = 'dev-mode'; -export var RxDBDevModePlugin = { - name: DEV_MODE_PLUGIN_NAME, - rxdb: true, - init: () => { - if (showDevModeWarning) { - console.warn(['-------------- RxDB dev-mode warning -------------------------------', 'you are seeing this because you use the RxDB dev-mode plugin https://rxdb.info/dev-mode.html?console=dev-mode ', 'This is great in development mode, because it will run many checks to ensure', 'that you use RxDB correct. If you see this in production mode,', 'you did something wrong because the dev-mode plugin will decrease the performance.', '', '🤗 Hint: To get the most out of RxDB, check out the Premium Plugins', 'to get access to faster storages and more professional features: https://rxdb.info/premium?console=dev-mode ', '', 'You can disable this warning by calling disableWarnings() from the dev-mode plugin.', - // '', - // 'Also take part in the RxDB User Survey: https://rxdb.info/survey.html', - '---------------------------------------------------------------------'].join('\n')); - } - }, - overwritable: { - isDevMode() { - return true; - }, - deepFreezeWhenDevMode, - tunnelErrorMessage(code) { - if (!ERROR_MESSAGES[code]) { - console.error('RxDB: Error-Code not known: ' + code); - throw new Error('Error-Code ' + code + ' not known, contact the maintainer'); - } - return ERROR_MESSAGES[code]; - } - }, - hooks: { - preCreateRxSchema: { - after: checkSchema - }, - preCreateRxDatabase: { - after: function (args) { - ensureDatabaseNameIsValid(args); - } - }, - preCreateRxCollection: { - after: function (args) { - ensureCollectionNameValid(args); - checkOrmDocumentMethods(args.schema, args.methods); - if (args.name.charAt(0) === '_') { - throw newRxError('DB2', { - name: args.name - }); - } - if (!args.schema) { - throw newRxError('DB4', { - name: args.name, - args - }); - } - } - }, - createRxDocument: { - before: function (doc) { - ensurePrimaryKeyValid(doc.primary, doc.toJSON(true)); - } - }, - preCreateRxQuery: { - after: function (args) { - checkQuery(args); - } - }, - prePrepareQuery: { - after: args => { - checkMangoQuery(args); - } - }, - preStorageWrite: { - before: args => { - checkWriteRows(args.storageInstance, args.rows); - } - }, - createRxCollection: { - after: args => { - // check ORM-methods - checkOrmMethods(args.creator.statics); - checkOrmMethods(args.creator.methods); - checkOrmMethods(args.creator.attachments); - - // check migration strategies - if (args.creator.schema && args.creator.migrationStrategies) { - checkMigrationStrategies(args.creator.schema, args.creator.migrationStrategies); - } - } - } - } -}; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/esm/plugins/dev-mode/index.js.map b/dist/esm/plugins/dev-mode/index.js.map deleted file mode 100644 index 057e08258ea..00000000000 --- a/dist/esm/plugins/dev-mode/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["ERROR_MESSAGES","checkSchema","checkOrmDocumentMethods","checkOrmMethods","checkMigrationStrategies","ensureCollectionNameValid","ensureDatabaseNameIsValid","checkMangoQuery","checkQuery","newRxError","deepFreeze","checkWriteRows","ensurePrimaryKeyValid","showDevModeWarning","disableWarnings","deepFreezeWhenDevMode","obj","DEV_MODE_PLUGIN_NAME","RxDBDevModePlugin","name","rxdb","init","console","warn","join","overwritable","isDevMode","tunnelErrorMessage","code","error","Error","hooks","preCreateRxSchema","after","preCreateRxDatabase","args","preCreateRxCollection","schema","methods","charAt","createRxDocument","before","doc","primary","toJSON","preCreateRxQuery","prePrepareQuery","preStorageWrite","storageInstance","rows","createRxCollection","creator","statics","attachments","migrationStrategies"],"sources":["../../../../src/plugins/dev-mode/index.ts"],"sourcesContent":["import type {\n RxPlugin,\n RxCollectionCreator,\n RxDatabaseCreator,\n RxErrorKey,\n RxDocument\n} from '../../types/index.d.ts';\n\nimport {\n ERROR_MESSAGES\n} from './error-messages.ts';\nimport {\n checkSchema\n} from './check-schema.ts';\nimport {\n checkOrmDocumentMethods,\n checkOrmMethods\n} from './check-orm.ts';\nimport { checkMigrationStrategies } from './check-migration-strategies.ts';\nimport {\n ensureCollectionNameValid,\n ensureDatabaseNameIsValid\n} from './unallowed-properties.ts';\nimport { checkMangoQuery, checkQuery } from './check-query.ts';\nimport { newRxError } from '../../rx-error.ts';\nimport { DeepReadonly } from '../../types/util.ts';\nimport { deepFreeze } from '../../plugins/utils/index.ts';\nimport { checkWriteRows, ensurePrimaryKeyValid } from './check-document.ts';\n\nexport * from './check-schema.ts';\nexport * from './unallowed-properties.ts';\nexport * from './check-query.ts';\n\nlet showDevModeWarning = true;\n\n/**\n * Suppresses the warning message shown in the console, typically invoked once the developer (hello!) \n * has acknowledged it.\n */\nexport function disableWarnings() {\n showDevModeWarning = false;\n}\n\n/**\n * Deep freezes and object when in dev-mode.\n * Deep-Freezing has the same performance as deep-cloning, so we only do that in dev-mode.\n * Also we can ensure the readonly state via typescript\n * @link https://developer.mozilla.org/de/docs/Web/JavaScript/Reference/Global_Objects/Object/freeze\n */\nexport function deepFreezeWhenDevMode(obj: T): DeepReadonly {\n // direct return if not suitable for deepFreeze()\n if (\n !obj ||\n typeof obj === 'string' ||\n typeof obj === 'number'\n ) {\n return obj as any;\n }\n\n return deepFreeze(obj) as any;\n}\n\n\nexport const DEV_MODE_PLUGIN_NAME = 'dev-mode';\nexport const RxDBDevModePlugin: RxPlugin = {\n name: DEV_MODE_PLUGIN_NAME,\n rxdb: true,\n init: () => {\n if (showDevModeWarning) {\n console.warn(\n [\n '-------------- RxDB dev-mode warning -------------------------------',\n 'you are seeing this because you use the RxDB dev-mode plugin https://rxdb.info/dev-mode.html?console=dev-mode ',\n 'This is great in development mode, because it will run many checks to ensure',\n 'that you use RxDB correct. If you see this in production mode,',\n 'you did something wrong because the dev-mode plugin will decrease the performance.',\n '',\n '🤗 Hint: To get the most out of RxDB, check out the Premium Plugins',\n 'to get access to faster storages and more professional features: https://rxdb.info/premium?console=dev-mode ',\n '',\n 'You can disable this warning by calling disableWarnings() from the dev-mode plugin.',\n // '',\n // 'Also take part in the RxDB User Survey: https://rxdb.info/survey.html',\n '---------------------------------------------------------------------'\n ].join('\\n')\n );\n }\n },\n overwritable: {\n isDevMode() {\n return true;\n },\n deepFreezeWhenDevMode,\n tunnelErrorMessage(code: RxErrorKey) {\n if (!ERROR_MESSAGES[code]) {\n console.error('RxDB: Error-Code not known: ' + code);\n throw new Error('Error-Code ' + code + ' not known, contact the maintainer');\n }\n return ERROR_MESSAGES[code];\n }\n },\n hooks: {\n preCreateRxSchema: {\n after: checkSchema\n },\n preCreateRxDatabase: {\n after: function (args: RxDatabaseCreator) {\n ensureDatabaseNameIsValid(args);\n }\n },\n preCreateRxCollection: {\n after: function (args: RxCollectionCreator & { name: string; }) {\n ensureCollectionNameValid(args);\n checkOrmDocumentMethods(args.schema as any, args.methods);\n if (args.name.charAt(0) === '_') {\n throw newRxError('DB2', {\n name: args.name\n });\n }\n if (!args.schema) {\n throw newRxError('DB4', {\n name: args.name,\n args\n });\n }\n }\n },\n createRxDocument: {\n before: function (doc: RxDocument) {\n ensurePrimaryKeyValid(doc.primary, doc.toJSON(true));\n }\n },\n preCreateRxQuery: {\n after: function (args) {\n checkQuery(args);\n }\n },\n prePrepareQuery: {\n after: (args) => {\n checkMangoQuery(args);\n }\n },\n preStorageWrite: {\n before: (args) => {\n checkWriteRows(args.storageInstance, args.rows);\n }\n },\n createRxCollection: {\n after: (args) => {\n // check ORM-methods\n checkOrmMethods(args.creator.statics);\n checkOrmMethods(args.creator.methods);\n checkOrmMethods(args.creator.attachments);\n\n // check migration strategies\n if (args.creator.schema && args.creator.migrationStrategies) {\n checkMigrationStrategies(\n args.creator.schema,\n args.creator.migrationStrategies\n );\n }\n }\n }\n }\n};\n"],"mappings":"AAQA,SACIA,cAAc,QACX,qBAAqB;AAC5B,SACIC,WAAW,QACR,mBAAmB;AAC1B,SACIC,uBAAuB,EACvBC,eAAe,QACZ,gBAAgB;AACvB,SAASC,wBAAwB,QAAQ,iCAAiC;AAC1E,SACIC,yBAAyB,EACzBC,yBAAyB,QACtB,2BAA2B;AAClC,SAASC,eAAe,EAAEC,UAAU,QAAQ,kBAAkB;AAC9D,SAASC,UAAU,QAAQ,mBAAmB;AAE9C,SAASC,UAAU,QAAQ,8BAA8B;AACzD,SAASC,cAAc,EAAEC,qBAAqB,QAAQ,qBAAqB;AAE3E,cAAc,mBAAmB;AACjC,cAAc,2BAA2B;AACzC,cAAc,kBAAkB;AAEhC,IAAIC,kBAAkB,GAAG,IAAI;;AAE7B;AACA;AACA;AACA;AACA,OAAO,SAASC,eAAeA,CAAA,EAAG;EAC9BD,kBAAkB,GAAG,KAAK;AAC9B;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,SAASE,qBAAqBA,CAAIC,GAAM,EAAmB;EAC9D;EACA,IACI,CAACA,GAAG,IACJ,OAAOA,GAAG,KAAK,QAAQ,IACvB,OAAOA,GAAG,KAAK,QAAQ,EACzB;IACE,OAAOA,GAAG;EACd;EAEA,OAAON,UAAU,CAACM,GAAG,CAAC;AAC1B;AAGA,OAAO,IAAMC,oBAAoB,GAAG,UAAU;AAC9C,OAAO,IAAMC,iBAA2B,GAAG;EACvCC,IAAI,EAAEF,oBAAoB;EAC1BG,IAAI,EAAE,IAAI;EACVC,IAAI,EAAEA,CAAA,KAAM;IACR,IAAIR,kBAAkB,EAAE;MACpBS,OAAO,CAACC,IAAI,CACZ,CACI,sEAAsE,EACtE,gHAAgH,EAChH,8EAA8E,EAC9E,gEAAgE,EAChE,oFAAoF,EACpF,EAAE,EACF,qEAAqE,EACrE,8GAA8G,EAC9G,EAAE,EACF,qFAAqF;MACrF;MACA;MACA,uEAAuE,CAC1E,CAACC,IAAI,CAAC,IAAI,CACf,CAAC;IACD;EACJ,CAAC;EACDC,YAAY,EAAE;IACVC,SAASA,CAAA,EAAG;MACR,OAAO,IAAI;IACf,CAAC;IACDX,qBAAqB;IACrBY,kBAAkBA,CAACC,IAAgB,EAAE;MACjC,IAAI,CAAC5B,cAAc,CAAC4B,IAAI,CAAC,EAAE;QACvBN,OAAO,CAACO,KAAK,CAAC,8BAA8B,GAAGD,IAAI,CAAC;QACpD,MAAM,IAAIE,KAAK,CAAC,aAAa,GAAGF,IAAI,GAAG,oCAAoC,CAAC;MAChF;MACA,OAAO5B,cAAc,CAAC4B,IAAI,CAAC;IAC/B;EACJ,CAAC;EACDG,KAAK,EAAE;IACHC,iBAAiB,EAAE;MACfC,KAAK,EAAEhC;IACX,CAAC;IACDiC,mBAAmB,EAAE;MACjBD,KAAK,EAAE,SAAAA,CAAUE,IAAiC,EAAE;QAChD7B,yBAAyB,CAAC6B,IAAI,CAAC;MACnC;IACJ,CAAC;IACDC,qBAAqB,EAAE;MACnBH,KAAK,EAAE,SAAAA,CAAUE,IAAkD,EAAE;QACjE9B,yBAAyB,CAAC8B,IAAI,CAAC;QAC/BjC,uBAAuB,CAACiC,IAAI,CAACE,MAAM,EAASF,IAAI,CAACG,OAAO,CAAC;QACzD,IAAIH,IAAI,CAAChB,IAAI,CAACoB,MAAM,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE;UAC7B,MAAM9B,UAAU,CAAC,KAAK,EAAE;YACpBU,IAAI,EAAEgB,IAAI,CAAChB;UACf,CAAC,CAAC;QACN;QACA,IAAI,CAACgB,IAAI,CAACE,MAAM,EAAE;UACd,MAAM5B,UAAU,CAAC,KAAK,EAAE;YACpBU,IAAI,EAAEgB,IAAI,CAAChB,IAAI;YACfgB;UACJ,CAAC,CAAC;QACN;MACJ;IACJ,CAAC;IACDK,gBAAgB,EAAE;MACdC,MAAM,EAAE,SAAAA,CAAUC,GAAe,EAAE;QAC/B9B,qBAAqB,CAAC8B,GAAG,CAACC,OAAO,EAAED,GAAG,CAACE,MAAM,CAAC,IAAI,CAAC,CAAC;MACxD;IACJ,CAAC;IACDC,gBAAgB,EAAE;MACdZ,KAAK,EAAE,SAAAA,CAAUE,IAAI,EAAE;QACnB3B,UAAU,CAAC2B,IAAI,CAAC;MACpB;IACJ,CAAC;IACDW,eAAe,EAAE;MACbb,KAAK,EAAGE,IAAI,IAAK;QACb5B,eAAe,CAAC4B,IAAI,CAAC;MACzB;IACJ,CAAC;IACDY,eAAe,EAAE;MACbN,MAAM,EAAGN,IAAI,IAAK;QACdxB,cAAc,CAACwB,IAAI,CAACa,eAAe,EAAEb,IAAI,CAACc,IAAI,CAAC;MACnD;IACJ,CAAC;IACDC,kBAAkB,EAAE;MAChBjB,KAAK,EAAGE,IAAI,IAAK;QACb;QACAhC,eAAe,CAACgC,IAAI,CAACgB,OAAO,CAACC,OAAO,CAAC;QACrCjD,eAAe,CAACgC,IAAI,CAACgB,OAAO,CAACb,OAAO,CAAC;QACrCnC,eAAe,CAACgC,IAAI,CAACgB,OAAO,CAACE,WAAW,CAAC;;QAEzC;QACA,IAAIlB,IAAI,CAACgB,OAAO,CAACd,MAAM,IAAIF,IAAI,CAACgB,OAAO,CAACG,mBAAmB,EAAE;UACzDlD,wBAAwB,CACpB+B,IAAI,CAACgB,OAAO,CAACd,MAAM,EACnBF,IAAI,CAACgB,OAAO,CAACG,mBACjB,CAAC;QACL;MACJ;IACJ;EACJ;AACJ,CAAC","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/dev-mode/unallowed-properties.js b/dist/esm/plugins/dev-mode/unallowed-properties.js deleted file mode 100644 index 42738fe7a2a..00000000000 --- a/dist/esm/plugins/dev-mode/unallowed-properties.js +++ /dev/null @@ -1,75 +0,0 @@ -import { newRxError, newRxTypeError } from "../../rx-error.js"; -import { rxDatabaseProperties } from "./entity-properties.js"; -import { isFolderPath } from "../../plugins/utils/index.js"; - -/** - * if the name of a collection - * clashes with a property of RxDatabase, - * we get problems so this function prohibits this - */ -export function ensureCollectionNameValid(args) { - if (rxDatabaseProperties().includes(args.name)) { - throw newRxError('DB5', { - name: args.name - }); - } - validateDatabaseName(args.name); -} -export function ensureDatabaseNameIsValid(args) { - validateDatabaseName(args.name); - if (args.name.includes('$')) { - throw newRxError('DB13', { - name: args.name - }); - } - - /** - * The server-plugin has problems when a path with and ending slash is given - * So we do not allow this. - * @link https://github.com/pubkey/rxdb/issues/2251 - */ - if (isFolderPath(args.name)) { - if (args.name.endsWith('/') || args.name.endsWith('\\')) { - throw newRxError('DB11', { - name: args.name - }); - } - } -} -var validCouchDBStringRegexStr = '^[a-z][_$a-z0-9\\-]*$'; -var validCouchDBStringRegex = new RegExp(validCouchDBStringRegexStr); - -/** - * Validates that a given string is ok to be used with couchdb-collection-names. - * We only allow these strings as database- or collection names because it ensures - * that you later do not get in trouble when you want to use the database together witch couchdb. - * - * @link https://docs.couchdb.org/en/stable/api/database/common.html - * @link https://neighbourhood.ie/blog/2020/10/13/everything-you-need-to-know-about-couchdb-database-names/ - * @throws {RxError} - */ -export function validateDatabaseName(name) { - if (typeof name !== 'string' || name.length === 0) { - throw newRxTypeError('UT1', { - name - }); - } - - // do not check, if foldername is given - if (isFolderPath(name)) { - return true; - } - if (!name.match(validCouchDBStringRegex) && - /** - * The string ':memory:' is used in the SQLite RxStorage - * to persist data into a memory state. Often used in tests. - */ - name !== ':memory:') { - throw newRxError('UT2', { - regex: validCouchDBStringRegexStr, - givenName: name - }); - } - return true; -} -//# sourceMappingURL=unallowed-properties.js.map \ No newline at end of file diff --git a/dist/esm/plugins/dev-mode/unallowed-properties.js.map b/dist/esm/plugins/dev-mode/unallowed-properties.js.map deleted file mode 100644 index 8830fd41a85..00000000000 --- a/dist/esm/plugins/dev-mode/unallowed-properties.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"unallowed-properties.js","names":["newRxError","newRxTypeError","rxDatabaseProperties","isFolderPath","ensureCollectionNameValid","args","includes","name","validateDatabaseName","ensureDatabaseNameIsValid","endsWith","validCouchDBStringRegexStr","validCouchDBStringRegex","RegExp","length","match","regex","givenName"],"sources":["../../../../src/plugins/dev-mode/unallowed-properties.ts"],"sourcesContent":["import type { RxCollectionCreator, RxDatabaseCreator } from '../../types/index.d.ts';\nimport { newRxError, newRxTypeError } from '../../rx-error.ts';\nimport { rxDatabaseProperties } from './entity-properties.ts';\nimport { isFolderPath } from '../../plugins/utils/index.ts';\n\n/**\n * if the name of a collection\n * clashes with a property of RxDatabase,\n * we get problems so this function prohibits this\n */\nexport function ensureCollectionNameValid(\n args: RxCollectionCreator & { name: string; }\n) {\n if (rxDatabaseProperties().includes(args.name)) {\n throw newRxError('DB5', {\n name: args.name\n });\n }\n validateDatabaseName(args.name);\n}\n\nexport function ensureDatabaseNameIsValid(args: RxDatabaseCreator) {\n\n validateDatabaseName(args.name);\n\n if(args.name.includes('$')){\n throw newRxError('DB13', {\n name: args.name,\n });\n }\n\n /**\n * The server-plugin has problems when a path with and ending slash is given\n * So we do not allow this.\n * @link https://github.com/pubkey/rxdb/issues/2251\n */\n if (isFolderPath(args.name)) {\n if (args.name.endsWith('/') || args.name.endsWith('\\\\')) {\n throw newRxError('DB11', {\n name: args.name,\n });\n }\n }\n}\n\n\n\nconst validCouchDBStringRegexStr = '^[a-z][_$a-z0-9\\\\-]*$';\nconst validCouchDBStringRegex = new RegExp(validCouchDBStringRegexStr);\n\n/**\n * Validates that a given string is ok to be used with couchdb-collection-names.\n * We only allow these strings as database- or collection names because it ensures\n * that you later do not get in trouble when you want to use the database together witch couchdb.\n *\n * @link https://docs.couchdb.org/en/stable/api/database/common.html\n * @link https://neighbourhood.ie/blog/2020/10/13/everything-you-need-to-know-about-couchdb-database-names/\n * @throws {RxError}\n */\nexport function validateDatabaseName(name: string): true {\n if (\n typeof name !== 'string' ||\n name.length === 0\n ) {\n throw newRxTypeError('UT1', {\n name\n });\n }\n\n\n // do not check, if foldername is given\n if (isFolderPath(name)) {\n return true;\n }\n\n if (\n !name.match(validCouchDBStringRegex) &&\n /**\n * The string ':memory:' is used in the SQLite RxStorage\n * to persist data into a memory state. Often used in tests.\n */\n name !== ':memory:'\n ) {\n throw newRxError('UT2', {\n regex: validCouchDBStringRegexStr,\n givenName: name,\n });\n }\n\n return true;\n}\n"],"mappings":"AACA,SAASA,UAAU,EAAEC,cAAc,QAAQ,mBAAmB;AAC9D,SAASC,oBAAoB,QAAQ,wBAAwB;AAC7D,SAASC,YAAY,QAAQ,8BAA8B;;AAE3D;AACA;AACA;AACA;AACA;AACA,OAAO,SAASC,yBAAyBA,CACrCC,IAA6C,EAC/C;EACE,IAAIH,oBAAoB,CAAC,CAAC,CAACI,QAAQ,CAACD,IAAI,CAACE,IAAI,CAAC,EAAE;IAC5C,MAAMP,UAAU,CAAC,KAAK,EAAE;MACpBO,IAAI,EAAEF,IAAI,CAACE;IACf,CAAC,CAAC;EACN;EACAC,oBAAoB,CAACH,IAAI,CAACE,IAAI,CAAC;AACnC;AAEA,OAAO,SAASE,yBAAyBA,CAACJ,IAAiC,EAAE;EAEzEG,oBAAoB,CAACH,IAAI,CAACE,IAAI,CAAC;EAE/B,IAAGF,IAAI,CAACE,IAAI,CAACD,QAAQ,CAAC,GAAG,CAAC,EAAC;IACvB,MAAMN,UAAU,CAAC,MAAM,EAAE;MACrBO,IAAI,EAAEF,IAAI,CAACE;IACf,CAAC,CAAC;EACN;;EAEA;AACJ;AACA;AACA;AACA;EACI,IAAIJ,YAAY,CAACE,IAAI,CAACE,IAAI,CAAC,EAAE;IACzB,IAAIF,IAAI,CAACE,IAAI,CAACG,QAAQ,CAAC,GAAG,CAAC,IAAIL,IAAI,CAACE,IAAI,CAACG,QAAQ,CAAC,IAAI,CAAC,EAAE;MACrD,MAAMV,UAAU,CAAC,MAAM,EAAE;QACrBO,IAAI,EAAEF,IAAI,CAACE;MACf,CAAC,CAAC;IACN;EACJ;AACJ;AAIA,IAAMI,0BAA0B,GAAG,uBAAuB;AAC1D,IAAMC,uBAAuB,GAAG,IAAIC,MAAM,CAACF,0BAA0B,CAAC;;AAEtE;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,SAASH,oBAAoBA,CAACD,IAAY,EAAQ;EACrD,IACI,OAAOA,IAAI,KAAK,QAAQ,IACxBA,IAAI,CAACO,MAAM,KAAK,CAAC,EACnB;IACE,MAAMb,cAAc,CAAC,KAAK,EAAE;MACxBM;IACJ,CAAC,CAAC;EACN;;EAGA;EACA,IAAIJ,YAAY,CAACI,IAAI,CAAC,EAAE;IACpB,OAAO,IAAI;EACf;EAEA,IACI,CAACA,IAAI,CAACQ,KAAK,CAACH,uBAAuB,CAAC;EACpC;AACR;AACA;AACA;EACQL,IAAI,KAAK,UAAU,EACrB;IACE,MAAMP,UAAU,CAAC,KAAK,EAAE;MACpBgB,KAAK,EAAEL,0BAA0B;MACjCM,SAAS,EAAEV;IACf,CAAC,CAAC;EACN;EAEA,OAAO,IAAI;AACf","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/electron/electron-helper.js b/dist/esm/plugins/electron/electron-helper.js deleted file mode 100644 index a9476e3b10c..00000000000 --- a/dist/esm/plugins/electron/electron-helper.js +++ /dev/null @@ -1,3 +0,0 @@ -export var IPC_RENDERER_KEY_PREFIX = 'rxdb-ipc-renderer-storage'; -export var IPC_RENDERER_TO_MAIN = 'rxdb-renderer-to-main'; -//# sourceMappingURL=electron-helper.js.map \ No newline at end of file diff --git a/dist/esm/plugins/electron/electron-helper.js.map b/dist/esm/plugins/electron/electron-helper.js.map deleted file mode 100644 index 23759c0edc2..00000000000 --- a/dist/esm/plugins/electron/electron-helper.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"electron-helper.js","names":["IPC_RENDERER_KEY_PREFIX","IPC_RENDERER_TO_MAIN"],"sources":["../../../../src/plugins/electron/electron-helper.ts"],"sourcesContent":["export const IPC_RENDERER_KEY_PREFIX = 'rxdb-ipc-renderer-storage';\nexport const IPC_RENDERER_TO_MAIN = 'rxdb-renderer-to-main';\n"],"mappings":"AAAA,OAAO,IAAMA,uBAAuB,GAAG,2BAA2B;AAClE,OAAO,IAAMC,oBAAoB,GAAG,uBAAuB","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/electron/index.js b/dist/esm/plugins/electron/index.js deleted file mode 100644 index e1c83b15aa5..00000000000 --- a/dist/esm/plugins/electron/index.js +++ /dev/null @@ -1,4 +0,0 @@ -export * from "./rx-storage-ipc-renderer.js"; -export * from "./rx-storage-ipc-main.js"; -export * from "./electron-helper.js"; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/esm/plugins/electron/index.js.map b/dist/esm/plugins/electron/index.js.map deleted file mode 100644 index 17a30de33cb..00000000000 --- a/dist/esm/plugins/electron/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":[],"sources":["../../../../src/plugins/electron/index.ts"],"sourcesContent":["export * from './rx-storage-ipc-renderer.ts';\nexport * from './rx-storage-ipc-main.ts';\nexport * from './electron-helper.ts';\n"],"mappings":"AAAA,cAAc,8BAA8B;AAC5C,cAAc,0BAA0B;AACxC,cAAc,sBAAsB","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/electron/rx-storage-ipc-main.js b/dist/esm/plugins/electron/rx-storage-ipc-main.js deleted file mode 100644 index 0bc59e180fe..00000000000 --- a/dist/esm/plugins/electron/rx-storage-ipc-main.js +++ /dev/null @@ -1,39 +0,0 @@ -/** - * This file contains everything - * that is supposed to run inside of the electron main process - */ - -import { Subject } from 'rxjs'; -import { IPC_RENDERER_KEY_PREFIX } from "./electron-helper.js"; -import { exposeRxStorageRemote } from "../storage-remote/index.js"; -export function exposeIpcMainRxStorage(args) { - var channelId = [IPC_RENDERER_KEY_PREFIX, args.key].join('|'); - var messages$ = new Subject(); - var openRenderers = new Set(); - args.ipcMain.on(channelId, (event, message) => { - addOpenRenderer(event.sender); - if (message) { - messages$.next(message); - } - }); - var addOpenRenderer = renderer => { - if (openRenderers.has(renderer)) return; - openRenderers.add(renderer); - renderer.on('destroyed', () => openRenderers.delete(renderer)); - }; - var send = msg => { - /** - * TODO we could improve performance - * by only sending the message to the 'correct' sender. - */ - openRenderers.forEach(sender => { - sender.send(channelId, msg); - }); - }; - exposeRxStorageRemote({ - storage: args.storage, - messages$, - send - }); -} -//# sourceMappingURL=rx-storage-ipc-main.js.map \ No newline at end of file diff --git a/dist/esm/plugins/electron/rx-storage-ipc-main.js.map b/dist/esm/plugins/electron/rx-storage-ipc-main.js.map deleted file mode 100644 index c578d88de0d..00000000000 --- a/dist/esm/plugins/electron/rx-storage-ipc-main.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-storage-ipc-main.js","names":["Subject","IPC_RENDERER_KEY_PREFIX","exposeRxStorageRemote","exposeIpcMainRxStorage","args","channelId","key","join","messages$","openRenderers","Set","ipcMain","on","event","message","addOpenRenderer","sender","next","renderer","has","add","delete","send","msg","forEach","storage"],"sources":["../../../../src/plugins/electron/rx-storage-ipc-main.ts"],"sourcesContent":["/**\n * This file contains everything\n * that is supposed to run inside of the electron main process\n */\nimport type {\n RxStorage\n} from '../../types/index.d.ts';\nimport { Subject } from 'rxjs';\nimport {\n IPC_RENDERER_KEY_PREFIX\n} from './electron-helper.ts';\nimport {\n exposeRxStorageRemote,\n RxStorageRemoteExposeSettings,\n MessageToRemote\n} from '../storage-remote/index.ts';\nexport function exposeIpcMainRxStorage(\n args: {\n key: string;\n storage: RxStorage;\n ipcMain: any;\n }\n) {\n const channelId = [\n IPC_RENDERER_KEY_PREFIX,\n args.key,\n ].join('|');\n const messages$ = new Subject();\n const openRenderers: Set = new Set();\n args.ipcMain.on(\n channelId,\n (event: any, message: any) => {\n addOpenRenderer(event.sender);\n if (message) {\n messages$.next(message);\n }\n }\n );\n const addOpenRenderer = (renderer: any) => {\n if (openRenderers.has(renderer)) return;\n openRenderers.add(renderer);\n renderer.on('destroyed', () => openRenderers.delete(renderer));\n };\n const send: RxStorageRemoteExposeSettings['send'] = (msg) => {\n /**\n * TODO we could improve performance\n * by only sending the message to the 'correct' sender.\n */\n openRenderers.forEach(sender => {\n sender.send(channelId, msg);\n });\n };\n exposeRxStorageRemote({\n storage: args.storage,\n messages$,\n send\n });\n}\n"],"mappings":"AAAA;AACA;AACA;AACA;;AAIA,SAASA,OAAO,QAAQ,MAAM;AAC9B,SACIC,uBAAuB,QACpB,sBAAsB;AAC7B,SACIC,qBAAqB,QAGlB,4BAA4B;AACnC,OAAO,SAASC,sBAAsBA,CAClCC,IAIC,EACH;EACE,IAAMC,SAAS,GAAG,CACdJ,uBAAuB,EACvBG,IAAI,CAACE,GAAG,CACX,CAACC,IAAI,CAAC,GAAG,CAAC;EACX,IAAMC,SAAS,GAAG,IAAIR,OAAO,CAAkB,CAAC;EAChD,IAAMS,aAAuB,GAAG,IAAIC,GAAG,CAAC,CAAC;EACzCN,IAAI,CAACO,OAAO,CAACC,EAAE,CACXP,SAAS,EACT,CAACQ,KAAU,EAAEC,OAAY,KAAK;IAC1BC,eAAe,CAACF,KAAK,CAACG,MAAM,CAAC;IAC7B,IAAIF,OAAO,EAAE;MACTN,SAAS,CAACS,IAAI,CAACH,OAAO,CAAC;IAC3B;EACJ,CACJ,CAAC;EACD,IAAMC,eAAe,GAAIG,QAAa,IAAK;IACvC,IAAIT,aAAa,CAACU,GAAG,CAACD,QAAQ,CAAC,EAAE;IACjCT,aAAa,CAACW,GAAG,CAACF,QAAQ,CAAC;IAC3BA,QAAQ,CAACN,EAAE,CAAC,WAAW,EAAE,MAAMH,aAAa,CAACY,MAAM,CAACH,QAAQ,CAAC,CAAC;EAClE,CAAC;EACD,IAAMI,IAA2C,GAAIC,GAAG,IAAK;IACzD;AACR;AACA;AACA;IACQd,aAAa,CAACe,OAAO,CAACR,MAAM,IAAI;MAC5BA,MAAM,CAACM,IAAI,CAACjB,SAAS,EAAEkB,GAAG,CAAC;IAC/B,CAAC,CAAC;EACN,CAAC;EACDrB,qBAAqB,CAAC;IAClBuB,OAAO,EAAErB,IAAI,CAACqB,OAAO;IACrBjB,SAAS;IACTc;EACJ,CAAC,CAAC;AACN","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/electron/rx-storage-ipc-renderer.js b/dist/esm/plugins/electron/rx-storage-ipc-renderer.js deleted file mode 100644 index 56b6795c897..00000000000 --- a/dist/esm/plugins/electron/rx-storage-ipc-renderer.js +++ /dev/null @@ -1,31 +0,0 @@ -import { Subject } from 'rxjs'; -import { getRxStorageRemote } from "../storage-remote/index.js"; -import { IPC_RENDERER_KEY_PREFIX } from "./electron-helper.js"; -import { PROMISE_RESOLVE_VOID } from "../utils/index.js"; -export function getRxStorageIpcRenderer(settings) { - var channelId = [IPC_RENDERER_KEY_PREFIX, settings.key].join('|'); - var storage = getRxStorageRemote({ - identifier: 'electron-ipc-renderer', - mode: settings.mode, - messageChannelCreator() { - var messages$ = new Subject(); - var listener = (_event, message) => { - messages$.next(message); - }; - settings.ipcRenderer.on(channelId, listener); - settings.ipcRenderer.postMessage(channelId, false); - return Promise.resolve({ - messages$, - send(msg) { - settings.ipcRenderer.postMessage(channelId, msg); - }, - close() { - settings.ipcRenderer.removeListener(channelId, listener); - return PROMISE_RESOLVE_VOID; - } - }); - } - }); - return storage; -} -//# sourceMappingURL=rx-storage-ipc-renderer.js.map \ No newline at end of file diff --git a/dist/esm/plugins/electron/rx-storage-ipc-renderer.js.map b/dist/esm/plugins/electron/rx-storage-ipc-renderer.js.map deleted file mode 100644 index 59fd41b0b9d..00000000000 --- a/dist/esm/plugins/electron/rx-storage-ipc-renderer.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-storage-ipc-renderer.js","names":["Subject","getRxStorageRemote","IPC_RENDERER_KEY_PREFIX","PROMISE_RESOLVE_VOID","getRxStorageIpcRenderer","settings","channelId","key","join","storage","identifier","mode","messageChannelCreator","messages$","listener","_event","message","next","ipcRenderer","on","postMessage","Promise","resolve","send","msg","close","removeListener"],"sources":["../../../../src/plugins/electron/rx-storage-ipc-renderer.ts"],"sourcesContent":["import { Subject } from 'rxjs';\nimport {\n getRxStorageRemote,\n RxStorageRemote,\n RxStorageRemoteSettings,\n MessageFromRemote\n} from '../storage-remote/index.ts';\nimport {\n IPC_RENDERER_KEY_PREFIX\n} from './electron-helper.ts';\nimport { PROMISE_RESOLVE_VOID } from '../utils/index.ts';\n\nexport type RxStorageIpcRendererSettings = {\n /**\n * Set the same key on both sides\n * to ensure that messages do not get mixed\n * up when you use more then one storage.\n */\n key: string;\n ipcRenderer: any;\n mode: RxStorageRemoteSettings['mode'];\n};\n\nexport type RxStorageIpcRenderer = RxStorageRemote;\nexport function getRxStorageIpcRenderer(\n settings: RxStorageIpcRendererSettings\n): RxStorageIpcRenderer {\n const channelId = [\n IPC_RENDERER_KEY_PREFIX,\n settings.key\n ].join('|');\n\n const storage = getRxStorageRemote({\n identifier: 'electron-ipc-renderer',\n mode: settings.mode,\n messageChannelCreator() {\n const messages$ = new Subject();\n const listener = (_event: any, message: any) => {\n messages$.next(message);\n };\n settings.ipcRenderer.on(channelId, listener);\n settings.ipcRenderer.postMessage(\n channelId,\n false\n );\n return Promise.resolve({\n messages$,\n send(msg) {\n settings.ipcRenderer.postMessage(\n channelId,\n msg\n );\n },\n close() {\n settings.ipcRenderer.removeListener(channelId, listener);\n return PROMISE_RESOLVE_VOID;\n }\n });\n },\n });\n return storage;\n}\n"],"mappings":"AAAA,SAASA,OAAO,QAAQ,MAAM;AAC9B,SACIC,kBAAkB,QAIf,4BAA4B;AACnC,SACIC,uBAAuB,QACpB,sBAAsB;AAC7B,SAASC,oBAAoB,QAAQ,mBAAmB;AAcxD,OAAO,SAASC,uBAAuBA,CACnCC,QAAsC,EAClB;EACpB,IAAMC,SAAS,GAAG,CACdJ,uBAAuB,EACvBG,QAAQ,CAACE,GAAG,CACf,CAACC,IAAI,CAAC,GAAG,CAAC;EAEX,IAAMC,OAAO,GAAGR,kBAAkB,CAAC;IAC/BS,UAAU,EAAE,uBAAuB;IACnCC,IAAI,EAAEN,QAAQ,CAACM,IAAI;IACnBC,qBAAqBA,CAAA,EAAG;MACpB,IAAMC,SAAS,GAAG,IAAIb,OAAO,CAAoB,CAAC;MAClD,IAAMc,QAAQ,GAAGA,CAACC,MAAW,EAAEC,OAAY,KAAK;QAC5CH,SAAS,CAACI,IAAI,CAACD,OAAO,CAAC;MAC3B,CAAC;MACDX,QAAQ,CAACa,WAAW,CAACC,EAAE,CAACb,SAAS,EAAEQ,QAAQ,CAAC;MAC5CT,QAAQ,CAACa,WAAW,CAACE,WAAW,CAC5Bd,SAAS,EACT,KACJ,CAAC;MACD,OAAOe,OAAO,CAACC,OAAO,CAAC;QACnBT,SAAS;QACTU,IAAIA,CAACC,GAAG,EAAE;UACNnB,QAAQ,CAACa,WAAW,CAACE,WAAW,CAC5Bd,SAAS,EACTkB,GACJ,CAAC;QACL,CAAC;QACDC,KAAKA,CAAA,EAAG;UACJpB,QAAQ,CAACa,WAAW,CAACQ,cAAc,CAACpB,SAAS,EAAEQ,QAAQ,CAAC;UACxD,OAAOX,oBAAoB;QAC/B;MACJ,CAAC,CAAC;IACN;EACJ,CAAC,CAAC;EACF,OAAOM,OAAO;AAClB","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/encryption-crypto-js/index.js b/dist/esm/plugins/encryption-crypto-js/index.js deleted file mode 100644 index a15ff3103e9..00000000000 --- a/dist/esm/plugins/encryption-crypto-js/index.js +++ /dev/null @@ -1,136 +0,0 @@ -/** - * this plugin adds the encryption-capabilities to rxdb - * It's using crypto-js/aes for password-encryption - * @link https://github.com/brix/crypto-js - */ -import pkg from 'crypto-js'; -var { - AES, - enc: cryptoEnc -} = pkg; -import { wrapRxStorageInstance } from "../../plugin-helpers.js"; -import { newRxError, newRxTypeError } from "../../rx-error.js"; -import { hasEncryption } from "../../rx-storage-helper.js"; -import { b64DecodeUnicode, b64EncodeUnicode, clone, ensureNotFalsy, flatClone, getProperty, setProperty } from "../../plugins/utils/index.js"; -export var MINIMUM_PASSWORD_LENGTH = 8; -export function encryptString(value, password) { - var encrypted = AES.encrypt(value, password); - return encrypted.toString(); -} -export function decryptString(cipherText, password) { - /** - * Trying to decrypt non-strings - * will cause no errors and will be hard to debug. - * So instead we do this check here. - */ - if (typeof cipherText !== 'string') { - throw newRxError('SNH', { - args: { - cipherText - } - }); - } - var decrypted = AES.decrypt(cipherText, password); - var ret = decrypted.toString(cryptoEnc.Utf8); - return ret; -} -export function wrappedKeyEncryptionCryptoJsStorage(args) { - return Object.assign({}, args.storage, { - async createStorageInstance(params) { - if (typeof params.password !== 'undefined') { - validatePassword(params.password); - } - if (!hasEncryption(params.schema)) { - var retInstance = await args.storage.createStorageInstance(params); - return retInstance; - } - if (!params.password) { - throw newRxError('EN3', { - database: params.databaseName, - collection: params.collectionName, - schema: params.schema - }); - } - var password = params.password; - var schemaWithoutEncrypted = clone(params.schema); - delete schemaWithoutEncrypted.encrypted; - if (schemaWithoutEncrypted.attachments) { - schemaWithoutEncrypted.attachments.encrypted = false; - } - var instance = await args.storage.createStorageInstance(Object.assign({}, params, { - schema: schemaWithoutEncrypted - })); - function modifyToStorage(docData) { - docData = cloneWithoutAttachments(docData); - ensureNotFalsy(params.schema.encrypted).forEach(path => { - var value = getProperty(docData, path); - if (typeof value === 'undefined') { - return; - } - var stringValue = JSON.stringify(value); - var encrypted = encryptString(stringValue, password); - setProperty(docData, path, encrypted); - }); - - // handle attachments - if (params.schema.attachments && params.schema.attachments.encrypted) { - var newAttachments = {}; - Object.entries(docData._attachments).forEach(([id, attachment]) => { - var useAttachment = flatClone(attachment); - if (useAttachment.data) { - var dataString = useAttachment.data; - useAttachment.data = b64EncodeUnicode(encryptString(dataString, password)); - } - newAttachments[id] = useAttachment; - }); - docData._attachments = newAttachments; - } - return docData; - } - function modifyFromStorage(docData) { - docData = cloneWithoutAttachments(docData); - ensureNotFalsy(params.schema.encrypted).forEach(path => { - var value = getProperty(docData, path); - if (typeof value === 'undefined') { - return; - } - var decrypted = decryptString(value, password); - var decryptedParsed = JSON.parse(decrypted); - setProperty(docData, path, decryptedParsed); - }); - return docData; - } - function modifyAttachmentFromStorage(attachmentData) { - if (params.schema.attachments && params.schema.attachments.encrypted) { - var decrypted = decryptString(b64DecodeUnicode(attachmentData), password); - return decrypted; - } else { - return attachmentData; - } - } - return wrapRxStorageInstance(params.schema, instance, modifyToStorage, modifyFromStorage, modifyAttachmentFromStorage); - } - }); -} -function cloneWithoutAttachments(data) { - var attachments = data._attachments; - data = flatClone(data); - delete data._attachments; - data = clone(data); - data._attachments = attachments; - return data; -} -function validatePassword(password) { - if (typeof password !== 'string') { - throw newRxTypeError('EN1', { - password - }); - } - if (password.length < MINIMUM_PASSWORD_LENGTH) { - throw newRxError('EN2', { - minPassLength: MINIMUM_PASSWORD_LENGTH, - password - }); - } -} -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/esm/plugins/encryption-crypto-js/index.js.map b/dist/esm/plugins/encryption-crypto-js/index.js.map deleted file mode 100644 index da1db897e9b..00000000000 --- a/dist/esm/plugins/encryption-crypto-js/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["pkg","AES","enc","cryptoEnc","wrapRxStorageInstance","newRxError","newRxTypeError","hasEncryption","b64DecodeUnicode","b64EncodeUnicode","clone","ensureNotFalsy","flatClone","getProperty","setProperty","MINIMUM_PASSWORD_LENGTH","encryptString","value","password","encrypted","encrypt","toString","decryptString","cipherText","args","decrypted","decrypt","ret","Utf8","wrappedKeyEncryptionCryptoJsStorage","Object","assign","storage","createStorageInstance","params","validatePassword","schema","retInstance","database","databaseName","collection","collectionName","schemaWithoutEncrypted","attachments","instance","modifyToStorage","docData","cloneWithoutAttachments","forEach","path","stringValue","JSON","stringify","newAttachments","entries","_attachments","id","attachment","useAttachment","data","dataString","modifyFromStorage","decryptedParsed","parse","modifyAttachmentFromStorage","attachmentData","length","minPassLength"],"sources":["../../../../src/plugins/encryption-crypto-js/index.ts"],"sourcesContent":["/**\n * this plugin adds the encryption-capabilities to rxdb\n * It's using crypto-js/aes for password-encryption\n * @link https://github.com/brix/crypto-js\n */\nimport pkg from 'crypto-js';\nconst { AES, enc: cryptoEnc } = pkg;\n\nimport { wrapRxStorageInstance } from '../../plugin-helpers.ts';\nimport { newRxError, newRxTypeError } from '../../rx-error.ts';\nimport { hasEncryption } from '../../rx-storage-helper.ts';\nimport type {\n InternalStoreDocType,\n RxAttachmentWriteData,\n RxDocumentData,\n RxDocumentWriteData,\n RxJsonSchema,\n RxStorage,\n RxStorageInstanceCreationParams\n} from '../../types/index.d.ts';\nimport {\n b64DecodeUnicode,\n b64EncodeUnicode,\n clone,\n ensureNotFalsy,\n flatClone,\n getProperty,\n setProperty\n} from '../../plugins/utils/index.ts';\n\nexport const MINIMUM_PASSWORD_LENGTH: 8 = 8;\n\n\nexport function encryptString(value: string, password: string): string {\n const encrypted = AES.encrypt(value, password);\n return encrypted.toString();\n}\n\nexport function decryptString(cipherText: string, password: any): string {\n /**\n * Trying to decrypt non-strings\n * will cause no errors and will be hard to debug.\n * So instead we do this check here.\n */\n if (typeof cipherText !== 'string') {\n throw newRxError('SNH', {\n args: {\n cipherText\n }\n });\n }\n\n const decrypted = AES.decrypt(cipherText, password);\n const ret = decrypted.toString(cryptoEnc.Utf8);\n return ret;\n}\n\nexport type InternalStorePasswordDocType = InternalStoreDocType<{\n hash: string;\n}>;\n\nexport function wrappedKeyEncryptionCryptoJsStorage(\n args: {\n storage: RxStorage;\n }\n): RxStorage {\n return Object.assign(\n {},\n args.storage,\n {\n async createStorageInstance(\n params: RxStorageInstanceCreationParams\n ) {\n if (typeof params.password !== 'undefined') {\n validatePassword(params.password as any);\n }\n\n if (!hasEncryption(params.schema)) {\n const retInstance = await args.storage.createStorageInstance(params);\n return retInstance;\n }\n\n if (!params.password) {\n throw newRxError('EN3', {\n database: params.databaseName,\n collection: params.collectionName,\n schema: params.schema\n });\n }\n const password = params.password;\n\n const schemaWithoutEncrypted: RxJsonSchema> = clone(params.schema);\n delete schemaWithoutEncrypted.encrypted;\n if (schemaWithoutEncrypted.attachments) {\n schemaWithoutEncrypted.attachments.encrypted = false;\n }\n\n const instance = await args.storage.createStorageInstance(\n Object.assign(\n {},\n params,\n {\n schema: schemaWithoutEncrypted\n }\n )\n );\n\n function modifyToStorage(docData: RxDocumentWriteData) {\n docData = cloneWithoutAttachments(docData);\n ensureNotFalsy(params.schema.encrypted)\n .forEach(path => {\n const value = getProperty(docData, path);\n if (typeof value === 'undefined') {\n return;\n }\n\n const stringValue = JSON.stringify(value);\n const encrypted = encryptString(stringValue, password);\n setProperty(docData, path, encrypted);\n });\n\n // handle attachments\n if (\n params.schema.attachments &&\n params.schema.attachments.encrypted\n ) {\n const newAttachments: typeof docData._attachments = {};\n Object.entries(docData._attachments).forEach(([id, attachment]) => {\n const useAttachment: RxAttachmentWriteData = flatClone(attachment) as any;\n if (useAttachment.data) {\n const dataString = useAttachment.data;\n useAttachment.data = b64EncodeUnicode(encryptString(dataString, password));\n }\n newAttachments[id] = useAttachment;\n });\n docData._attachments = newAttachments;\n }\n return docData;\n }\n function modifyFromStorage(docData: RxDocumentData): Promise> {\n docData = cloneWithoutAttachments(docData);\n ensureNotFalsy(params.schema.encrypted)\n .forEach(path => {\n const value = getProperty(docData, path);\n if (typeof value === 'undefined') {\n return;\n }\n const decrypted = decryptString(value, password);\n const decryptedParsed = JSON.parse(decrypted);\n setProperty(docData, path, decryptedParsed);\n });\n return docData;\n }\n\n function modifyAttachmentFromStorage(attachmentData: string): string {\n if (\n params.schema.attachments &&\n params.schema.attachments.encrypted\n ) {\n const decrypted = decryptString(b64DecodeUnicode(attachmentData), password);\n return decrypted;\n } else {\n return attachmentData;\n }\n }\n\n return wrapRxStorageInstance(\n params.schema,\n instance,\n modifyToStorage,\n modifyFromStorage,\n modifyAttachmentFromStorage\n );\n }\n }\n );\n}\n\n\n\n\n\nfunction cloneWithoutAttachments(data: RxDocumentWriteData): RxDocumentData {\n const attachments = data._attachments;\n data = flatClone(data);\n delete (data as any)._attachments;\n data = clone(data);\n data._attachments = attachments;\n return data as any;\n}\n\nfunction validatePassword(password: string) {\n if (typeof password !== 'string') {\n throw newRxTypeError('EN1', {\n password\n });\n }\n if (password.length < MINIMUM_PASSWORD_LENGTH) {\n throw newRxError('EN2', {\n minPassLength: MINIMUM_PASSWORD_LENGTH,\n password\n });\n }\n}\n"],"mappings":"AAAA;AACA;AACA;AACA;AACA;AACA,OAAOA,GAAG,MAAM,WAAW;AAC3B,IAAM;EAAEC,GAAG;EAAEC,GAAG,EAAEC;AAAU,CAAC,GAAGH,GAAG;AAEnC,SAASI,qBAAqB,QAAQ,yBAAyB;AAC/D,SAASC,UAAU,EAAEC,cAAc,QAAQ,mBAAmB;AAC9D,SAASC,aAAa,QAAQ,4BAA4B;AAU1D,SACIC,gBAAgB,EAChBC,gBAAgB,EAChBC,KAAK,EACLC,cAAc,EACdC,SAAS,EACTC,WAAW,EACXC,WAAW,QACR,8BAA8B;AAErC,OAAO,IAAMC,uBAA0B,GAAG,CAAC;AAG3C,OAAO,SAASC,aAAaA,CAACC,KAAa,EAAEC,QAAgB,EAAU;EACnE,IAAMC,SAAS,GAAGlB,GAAG,CAACmB,OAAO,CAACH,KAAK,EAAEC,QAAQ,CAAC;EAC9C,OAAOC,SAAS,CAACE,QAAQ,CAAC,CAAC;AAC/B;AAEA,OAAO,SAASC,aAAaA,CAACC,UAAkB,EAAEL,QAAa,EAAU;EACrE;AACJ;AACA;AACA;AACA;EACI,IAAI,OAAOK,UAAU,KAAK,QAAQ,EAAE;IAChC,MAAMlB,UAAU,CAAC,KAAK,EAAE;MACpBmB,IAAI,EAAE;QACFD;MACJ;IACJ,CAAC,CAAC;EACN;EAEA,IAAME,SAAS,GAAGxB,GAAG,CAACyB,OAAO,CAACH,UAAU,EAAEL,QAAQ,CAAC;EACnD,IAAMS,GAAG,GAAGF,SAAS,CAACJ,QAAQ,CAAClB,SAAS,CAACyB,IAAI,CAAC;EAC9C,OAAOD,GAAG;AACd;AAMA,OAAO,SAASE,mCAAmCA,CAC/CL,IAEC,EAC4C;EAC7C,OAAOM,MAAM,CAACC,MAAM,CAChB,CAAC,CAAC,EACFP,IAAI,CAACQ,OAAO,EACZ;IACI,MAAMC,qBAAqBA,CACvBC,MAAuD,EACzD;MACE,IAAI,OAAOA,MAAM,CAAChB,QAAQ,KAAK,WAAW,EAAE;QACxCiB,gBAAgB,CAACD,MAAM,CAAChB,QAAe,CAAC;MAC5C;MAEA,IAAI,CAACX,aAAa,CAAC2B,MAAM,CAACE,MAAM,CAAC,EAAE;QAC/B,IAAMC,WAAW,GAAG,MAAMb,IAAI,CAACQ,OAAO,CAACC,qBAAqB,CAACC,MAAM,CAAC;QACpE,OAAOG,WAAW;MACtB;MAEA,IAAI,CAACH,MAAM,CAAChB,QAAQ,EAAE;QAClB,MAAMb,UAAU,CAAC,KAAK,EAAE;UACpBiC,QAAQ,EAAEJ,MAAM,CAACK,YAAY;UAC7BC,UAAU,EAAEN,MAAM,CAACO,cAAc;UACjCL,MAAM,EAAEF,MAAM,CAACE;QACnB,CAAC,CAAC;MACN;MACA,IAAMlB,QAAQ,GAAGgB,MAAM,CAAChB,QAAQ;MAEhC,IAAMwB,sBAA+D,GAAGhC,KAAK,CAACwB,MAAM,CAACE,MAAM,CAAC;MAC5F,OAAOM,sBAAsB,CAACvB,SAAS;MACvC,IAAIuB,sBAAsB,CAACC,WAAW,EAAE;QACpCD,sBAAsB,CAACC,WAAW,CAACxB,SAAS,GAAG,KAAK;MACxD;MAEA,IAAMyB,QAAQ,GAAG,MAAMpB,IAAI,CAACQ,OAAO,CAACC,qBAAqB,CACrDH,MAAM,CAACC,MAAM,CACT,CAAC,CAAC,EACFG,MAAM,EACN;QACIE,MAAM,EAAEM;MACZ,CACJ,CACJ,CAAC;MAED,SAASG,eAAeA,CAACC,OAAuC,EAAE;QAC9DA,OAAO,GAAGC,uBAAuB,CAACD,OAAO,CAAC;QAC1CnC,cAAc,CAACuB,MAAM,CAACE,MAAM,CAACjB,SAAS,CAAC,CAClC6B,OAAO,CAACC,IAAI,IAAI;UACb,IAAMhC,KAAK,GAAGJ,WAAW,CAACiC,OAAO,EAAEG,IAAI,CAAC;UACxC,IAAI,OAAOhC,KAAK,KAAK,WAAW,EAAE;YAC9B;UACJ;UAEA,IAAMiC,WAAW,GAAGC,IAAI,CAACC,SAAS,CAACnC,KAAK,CAAC;UACzC,IAAME,SAAS,GAAGH,aAAa,CAACkC,WAAW,EAAEhC,QAAQ,CAAC;UACtDJ,WAAW,CAACgC,OAAO,EAAEG,IAAI,EAAE9B,SAAS,CAAC;QACzC,CAAC,CAAC;;QAEN;QACA,IACIe,MAAM,CAACE,MAAM,CAACO,WAAW,IACzBT,MAAM,CAACE,MAAM,CAACO,WAAW,CAACxB,SAAS,EACrC;UACE,IAAMkC,cAA2C,GAAG,CAAC,CAAC;UACtDvB,MAAM,CAACwB,OAAO,CAACR,OAAO,CAACS,YAAY,CAAC,CAACP,OAAO,CAAC,CAAC,CAACQ,EAAE,EAAEC,UAAU,CAAC,KAAK;YAC/D,IAAMC,aAAoC,GAAG9C,SAAS,CAAC6C,UAAU,CAAQ;YACzE,IAAIC,aAAa,CAACC,IAAI,EAAE;cACpB,IAAMC,UAAU,GAAGF,aAAa,CAACC,IAAI;cACrCD,aAAa,CAACC,IAAI,GAAGlD,gBAAgB,CAACO,aAAa,CAAC4C,UAAU,EAAE1C,QAAQ,CAAC,CAAC;YAC9E;YACAmC,cAAc,CAACG,EAAE,CAAC,GAAGE,aAAa;UACtC,CAAC,CAAC;UACFZ,OAAO,CAACS,YAAY,GAAGF,cAAc;QACzC;QACA,OAAOP,OAAO;MAClB;MACA,SAASe,iBAAiBA,CAACf,OAA4B,EAAsC;QACzFA,OAAO,GAAGC,uBAAuB,CAACD,OAAO,CAAC;QAC1CnC,cAAc,CAACuB,MAAM,CAACE,MAAM,CAACjB,SAAS,CAAC,CAClC6B,OAAO,CAACC,IAAI,IAAI;UACb,IAAMhC,KAAK,GAAGJ,WAAW,CAACiC,OAAO,EAAEG,IAAI,CAAC;UACxC,IAAI,OAAOhC,KAAK,KAAK,WAAW,EAAE;YAC9B;UACJ;UACA,IAAMQ,SAAS,GAAGH,aAAa,CAACL,KAAK,EAAEC,QAAQ,CAAC;UAChD,IAAM4C,eAAe,GAAGX,IAAI,CAACY,KAAK,CAACtC,SAAS,CAAC;UAC7CX,WAAW,CAACgC,OAAO,EAAEG,IAAI,EAAEa,eAAe,CAAC;QAC/C,CAAC,CAAC;QACN,OAAOhB,OAAO;MAClB;MAEA,SAASkB,2BAA2BA,CAACC,cAAsB,EAAU;QACjE,IACI/B,MAAM,CAACE,MAAM,CAACO,WAAW,IACzBT,MAAM,CAACE,MAAM,CAACO,WAAW,CAACxB,SAAS,EACrC;UACE,IAAMM,SAAS,GAAGH,aAAa,CAACd,gBAAgB,CAACyD,cAAc,CAAC,EAAE/C,QAAQ,CAAC;UAC3E,OAAOO,SAAS;QACpB,CAAC,MAAM;UACH,OAAOwC,cAAc;QACzB;MACJ;MAEA,OAAO7D,qBAAqB,CACxB8B,MAAM,CAACE,MAAM,EACbQ,QAAQ,EACRC,eAAe,EACfgB,iBAAiB,EACjBG,2BACJ,CAAC;IACL;EACJ,CACJ,CAAC;AACL;AAMA,SAASjB,uBAAuBA,CAAIY,IAA4B,EAAqB;EACjF,IAAMhB,WAAW,GAAGgB,IAAI,CAACJ,YAAY;EACrCI,IAAI,GAAG/C,SAAS,CAAC+C,IAAI,CAAC;EACtB,OAAQA,IAAI,CAASJ,YAAY;EACjCI,IAAI,GAAGjD,KAAK,CAACiD,IAAI,CAAC;EAClBA,IAAI,CAACJ,YAAY,GAAGZ,WAAW;EAC/B,OAAOgB,IAAI;AACf;AAEA,SAASxB,gBAAgBA,CAACjB,QAAgB,EAAE;EACxC,IAAI,OAAOA,QAAQ,KAAK,QAAQ,EAAE;IAC9B,MAAMZ,cAAc,CAAC,KAAK,EAAE;MACxBY;IACJ,CAAC,CAAC;EACN;EACA,IAAIA,QAAQ,CAACgD,MAAM,GAAGnD,uBAAuB,EAAE;IAC3C,MAAMV,UAAU,CAAC,KAAK,EAAE;MACpB8D,aAAa,EAAEpD,uBAAuB;MACtCG;IACJ,CAAC,CAAC;EACN;AACJ","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/flutter/index.js b/dist/esm/plugins/flutter/index.js deleted file mode 100644 index e1778c3f691..00000000000 --- a/dist/esm/plugins/flutter/index.js +++ /dev/null @@ -1,55 +0,0 @@ -export function setFlutterRxDatabaseConnector(createDB) { - process.init = async databaseName => { - var db = await createDB(databaseName); - db.eventBulks$.subscribe(eventBulk => { - // eslint-disable-next-line @typescript-eslint/ban-ts-comment - // @ts-ignore - sendRxDBEvent(JSON.stringify(eventBulk)); - }); - process.db = db; - var collections = []; - Object.entries(db.collections).forEach(([collectionName, collection]) => { - collections.push({ - name: collectionName, - primaryKey: collection.schema.primaryPath - }); - }); - return { - databaseName, - collections - }; - }; -} - -/** - * Create a simple lokijs adapter so that we can persist string via flutter - * @link https://github.com/techfort/LokiJS/blob/master/tutorials/Persistence%20Adapters.md#creating-your-own-basic-persistence-adapter - */ -export function getLokijsAdapterFlutter() { - var ret = { - async loadDatabase(databaseName, callback) { - // eslint-disable-next-line @typescript-eslint/ban-ts-comment - // @ts-ignore - var serializedDb = await readKeyValue(databaseName); - var success = true; - if (success) { - callback(serializedDb); - } else { - callback(new Error('There was a problem loading the database')); - } - }, - async saveDatabase(databaseName, dbstring, callback) { - // eslint-disable-next-line @typescript-eslint/ban-ts-comment - // @ts-ignore - await persistKeyValue(databaseName, dbstring); - var success = true; // make your own determinations - if (success) { - callback(null); - } else { - callback(new Error('An error was encountered loading " + dbname + " database.')); - } - } - }; - return ret; -} -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/esm/plugins/flutter/index.js.map b/dist/esm/plugins/flutter/index.js.map deleted file mode 100644 index 1387c3e5911..00000000000 --- a/dist/esm/plugins/flutter/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["setFlutterRxDatabaseConnector","createDB","process","init","databaseName","db","eventBulks$","subscribe","eventBulk","sendRxDBEvent","JSON","stringify","collections","Object","entries","forEach","collectionName","collection","push","name","primaryKey","schema","primaryPath","getLokijsAdapterFlutter","ret","loadDatabase","callback","serializedDb","readKeyValue","success","Error","saveDatabase","dbstring","persistKeyValue"],"sources":["../../../../src/plugins/flutter/index.ts"],"sourcesContent":["import type {\n RxDatabase\n} from '../../types/index.d.ts';\n\nexport type CreateRxDatabaseFunctionType = (databaseName: string) => Promise;\n\nexport function setFlutterRxDatabaseConnector(\n createDB: CreateRxDatabaseFunctionType\n) {\n (process as any).init = async (databaseName: string) => {\n const db = await createDB(databaseName);\n db.eventBulks$.subscribe(eventBulk => {\n // eslint-disable-next-line @typescript-eslint/ban-ts-comment\n // @ts-ignore\n sendRxDBEvent(JSON.stringify(eventBulk));\n });\n (process as any).db = db;\n const collections: { name: string; primaryKey: string; }[] = [];\n Object.entries(db.collections).forEach(([collectionName, collection]) => {\n collections.push({\n name: collectionName,\n primaryKey: collection.schema.primaryPath\n });\n });\n return {\n databaseName,\n collections\n };\n };\n}\n\n/**\n * Create a simple lokijs adapter so that we can persist string via flutter\n * @link https://github.com/techfort/LokiJS/blob/master/tutorials/Persistence%20Adapters.md#creating-your-own-basic-persistence-adapter\n */\nexport function getLokijsAdapterFlutter() {\n const ret = {\n async loadDatabase(databaseName: string, callback: (v: string | Error) => {}) {\n // eslint-disable-next-line @typescript-eslint/ban-ts-comment\n // @ts-ignore\n const serializedDb: string = await readKeyValue(databaseName);\n\n const success = true;\n if (success) {\n callback(serializedDb);\n } else {\n callback(new Error('There was a problem loading the database'));\n }\n },\n async saveDatabase(databaseName: string, dbstring: string, callback: (v: string | Error | null) => {}) {\n // eslint-disable-next-line @typescript-eslint/ban-ts-comment\n // @ts-ignore\n await persistKeyValue(databaseName, dbstring);\n\n const success = true; // make your own determinations\n if (success) {\n callback(null);\n } else {\n callback(new Error('An error was encountered loading \" + dbname + \" database.'));\n }\n }\n };\n return ret;\n}\n"],"mappings":"AAMA,OAAO,SAASA,6BAA6BA,CACzCC,QAAsC,EACxC;EACGC,OAAO,CAASC,IAAI,GAAG,MAAOC,YAAoB,IAAK;IACpD,IAAMC,EAAE,GAAG,MAAMJ,QAAQ,CAACG,YAAY,CAAC;IACvCC,EAAE,CAACC,WAAW,CAACC,SAAS,CAACC,SAAS,IAAI;MAClC;MACA;MACAC,aAAa,CAACC,IAAI,CAACC,SAAS,CAACH,SAAS,CAAC,CAAC;IAC5C,CAAC,CAAC;IACDN,OAAO,CAASG,EAAE,GAAGA,EAAE;IACxB,IAAMO,WAAoD,GAAG,EAAE;IAC/DC,MAAM,CAACC,OAAO,CAACT,EAAE,CAACO,WAAW,CAAC,CAACG,OAAO,CAAC,CAAC,CAACC,cAAc,EAAEC,UAAU,CAAC,KAAK;MACrEL,WAAW,CAACM,IAAI,CAAC;QACbC,IAAI,EAAEH,cAAc;QACpBI,UAAU,EAAEH,UAAU,CAACI,MAAM,CAACC;MAClC,CAAC,CAAC;IACN,CAAC,CAAC;IACF,OAAO;MACHlB,YAAY;MACZQ;IACJ,CAAC;EACL,CAAC;AACL;;AAEA;AACA;AACA;AACA;AACA,OAAO,SAASW,uBAAuBA,CAAA,EAAG;EACtC,IAAMC,GAAG,GAAG;IACR,MAAMC,YAAYA,CAACrB,YAAoB,EAAEsB,QAAmC,EAAE;MAC1E;MACA;MACA,IAAMC,YAAoB,GAAG,MAAMC,YAAY,CAACxB,YAAY,CAAC;MAE7D,IAAMyB,OAAO,GAAG,IAAI;MACpB,IAAIA,OAAO,EAAE;QACTH,QAAQ,CAACC,YAAY,CAAC;MAC1B,CAAC,MAAM;QACHD,QAAQ,CAAC,IAAII,KAAK,CAAC,0CAA0C,CAAC,CAAC;MACnE;IACJ,CAAC;IACD,MAAMC,YAAYA,CAAC3B,YAAoB,EAAE4B,QAAgB,EAAEN,QAA0C,EAAE;MACnG;MACA;MACA,MAAMO,eAAe,CAAC7B,YAAY,EAAE4B,QAAQ,CAAC;MAE7C,IAAMH,OAAO,GAAG,IAAI,CAAC,CAAE;MACvB,IAAIA,OAAO,EAAE;QACTH,QAAQ,CAAC,IAAI,CAAC;MAClB,CAAC,MAAM;QACHA,QAAQ,CAAC,IAAII,KAAK,CAAC,2DAA2D,CAAC,CAAC;MACpF;IACJ;EACJ,CAAC;EACD,OAAON,GAAG;AACd","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/json-dump/index.js b/dist/esm/plugins/json-dump/index.js deleted file mode 100644 index c8af749bfb8..00000000000 --- a/dist/esm/plugins/json-dump/index.js +++ /dev/null @@ -1,87 +0,0 @@ -/** - * this plugin adds the json export/import capabilities to RxDB - */ -import { createRxQuery, queryCollection, _getDefaultQuery } from "../../rx-query.js"; -import { newRxError } from "../../rx-error.js"; -import { flatClone, getDefaultRevision, now } from "../../plugins/utils/index.js"; -function dumpRxDatabase(collections) { - var json = { - name: this.name, - instanceToken: this.token, - collections: [] - }; - var useCollections = Object.keys(this.collections).filter(colName => !collections || collections.includes(colName)).filter(colName => colName.charAt(0) !== '_').map(colName => this.collections[colName]); - return Promise.all(useCollections.map(col => col.exportJSON())).then(cols => { - json.collections = cols; - return json; - }); -} -var importDumpRxDatabase = function (dump) { - /** - * collections must be created before the import - * because we do not know about the other collection-settings here - */ - var missingCollections = dump.collections.filter(col => !this.collections[col.name]).map(col => col.name); - if (missingCollections.length > 0) { - throw newRxError('JD1', { - missingCollections - }); - } - return Promise.all(dump.collections.map(colDump => this.collections[colDump.name].importJSON(colDump))); -}; -var dumpRxCollection = async function () { - var json = { - name: this.name, - schemaHash: await this.schema.hash, - docs: [] - }; - var query = createRxQuery('find', _getDefaultQuery(), this); - return queryCollection(query).then(docs => { - json.docs = docs.map(docData => { - docData = flatClone(docData); - delete docData._rev; - delete docData._attachments; - return docData; - }); - return json; - }); -}; -async function importDumpRxCollection(exportedJSON) { - // check schemaHash - if (exportedJSON.schemaHash !== (await this.schema.hash)) { - throw newRxError('JD2', { - schemaHash: exportedJSON.schemaHash, - own: this.schema.hash - }); - } - var docs = exportedJSON.docs; - return this.storageInstance.bulkWrite(docs.map(docData => { - var document = Object.assign({}, docData, { - _meta: { - lwt: now() - }, - _rev: getDefaultRevision(), - _attachments: {}, - _deleted: false - }); - return { - document - }; - }), 'json-dump-import'); -} -export var RxDBJsonDumpPlugin = { - name: 'json-dump', - rxdb: true, - prototypes: { - RxDatabase: proto => { - proto.exportJSON = dumpRxDatabase; - proto.importJSON = importDumpRxDatabase; - }, - RxCollection: proto => { - proto.exportJSON = dumpRxCollection; - proto.importJSON = importDumpRxCollection; - } - }, - overwritable: {} -}; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/esm/plugins/json-dump/index.js.map b/dist/esm/plugins/json-dump/index.js.map deleted file mode 100644 index 6ec9f4a10e3..00000000000 --- a/dist/esm/plugins/json-dump/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["createRxQuery","queryCollection","_getDefaultQuery","newRxError","flatClone","getDefaultRevision","now","dumpRxDatabase","collections","json","name","instanceToken","token","useCollections","Object","keys","filter","colName","includes","charAt","map","Promise","all","col","exportJSON","then","cols","importDumpRxDatabase","dump","missingCollections","length","colDump","importJSON","dumpRxCollection","schemaHash","schema","hash","docs","query","docData","_rev","_attachments","importDumpRxCollection","exportedJSON","own","storageInstance","bulkWrite","document","assign","_meta","lwt","_deleted","RxDBJsonDumpPlugin","rxdb","prototypes","RxDatabase","proto","RxCollection","overwritable"],"sources":["../../../../src/plugins/json-dump/index.ts"],"sourcesContent":["/**\n * this plugin adds the json export/import capabilities to RxDB\n */\nimport {\n createRxQuery,\n queryCollection,\n _getDefaultQuery\n} from '../../rx-query.ts';\nimport {\n newRxError\n} from '../../rx-error.ts';\nimport type {\n RxDatabase,\n RxCollection,\n RxPlugin,\n RxDocumentData\n} from '../../types/index.d.ts';\nimport {\n flatClone,\n getDefaultRevision,\n now\n} from '../../plugins/utils/index.ts';\n\nfunction dumpRxDatabase(\n this: RxDatabase,\n collections?: string[]\n): Promise {\n const json: any = {\n name: this.name,\n instanceToken: this.token,\n collections: []\n };\n\n const useCollections = Object.keys(this.collections)\n .filter(colName => !collections || collections.includes(colName))\n .filter(colName => colName.charAt(0) !== '_')\n .map(colName => this.collections[colName]);\n\n return Promise.all(\n useCollections\n .map(col => col.exportJSON())\n ).then(cols => {\n json.collections = cols;\n return json;\n });\n}\n\nconst importDumpRxDatabase = function (\n this: RxDatabase,\n dump: any\n) {\n /**\n * collections must be created before the import\n * because we do not know about the other collection-settings here\n */\n const missingCollections = dump.collections\n .filter((col: any) => !this.collections[col.name])\n .map((col: any) => col.name);\n if (missingCollections.length > 0) {\n throw newRxError('JD1', {\n missingCollections\n });\n }\n\n return Promise.all(\n dump.collections\n .map((colDump: any) => this.collections[colDump.name].importJSON(colDump))\n );\n};\n\nconst dumpRxCollection = async function (\n this: RxCollection\n) {\n const json: any = {\n name: this.name,\n schemaHash: await this.schema.hash,\n docs: []\n };\n\n const query = createRxQuery(\n 'find',\n _getDefaultQuery(),\n this\n );\n return queryCollection(query)\n .then((docs: any) => {\n json.docs = docs.map((docData: any) => {\n docData = flatClone(docData);\n delete docData._rev;\n delete docData._attachments;\n return docData;\n });\n return json;\n });\n};\n\nasync function importDumpRxCollection(\n this: RxCollection,\n exportedJSON: any\n): Promise {\n // check schemaHash\n if (exportedJSON.schemaHash !== await this.schema.hash) {\n throw newRxError('JD2', {\n schemaHash: exportedJSON.schemaHash,\n own: this.schema.hash\n });\n }\n\n const docs: RxDocType[] = exportedJSON.docs;\n return this.storageInstance.bulkWrite(\n docs.map(docData => {\n const document: RxDocumentData = Object.assign(\n {},\n docData,\n {\n _meta: {\n lwt: now()\n },\n _rev: getDefaultRevision(),\n _attachments: {},\n _deleted: false\n }\n );\n return {\n document\n };\n }),\n 'json-dump-import'\n );\n}\n\nexport const RxDBJsonDumpPlugin: RxPlugin = {\n name: 'json-dump',\n rxdb: true,\n prototypes: {\n RxDatabase: (proto: any) => {\n proto.exportJSON = dumpRxDatabase;\n proto.importJSON = importDumpRxDatabase;\n },\n RxCollection: (proto: any) => {\n proto.exportJSON = dumpRxCollection;\n proto.importJSON = importDumpRxCollection;\n }\n },\n overwritable: {}\n};\n"],"mappings":"AAAA;AACA;AACA;AACA,SACIA,aAAa,EACbC,eAAe,EACfC,gBAAgB,QACb,mBAAmB;AAC1B,SACIC,UAAU,QACP,mBAAmB;AAO1B,SACIC,SAAS,EACTC,kBAAkB,EAClBC,GAAG,QACA,8BAA8B;AAErC,SAASC,cAAcA,CAEnBC,WAAsB,EACV;EACZ,IAAMC,IAAS,GAAG;IACdC,IAAI,EAAE,IAAI,CAACA,IAAI;IACfC,aAAa,EAAE,IAAI,CAACC,KAAK;IACzBJ,WAAW,EAAE;EACjB,CAAC;EAED,IAAMK,cAAc,GAAGC,MAAM,CAACC,IAAI,CAAC,IAAI,CAACP,WAAW,CAAC,CAC/CQ,MAAM,CAACC,OAAO,IAAI,CAACT,WAAW,IAAIA,WAAW,CAACU,QAAQ,CAACD,OAAO,CAAC,CAAC,CAChED,MAAM,CAACC,OAAO,IAAIA,OAAO,CAACE,MAAM,CAAC,CAAC,CAAC,KAAK,GAAG,CAAC,CAC5CC,GAAG,CAACH,OAAO,IAAI,IAAI,CAACT,WAAW,CAACS,OAAO,CAAC,CAAC;EAE9C,OAAOI,OAAO,CAACC,GAAG,CACdT,cAAc,CACTO,GAAG,CAACG,GAAG,IAAIA,GAAG,CAACC,UAAU,CAAC,CAAC,CACpC,CAAC,CAACC,IAAI,CAACC,IAAI,IAAI;IACXjB,IAAI,CAACD,WAAW,GAAGkB,IAAI;IACvB,OAAOjB,IAAI;EACf,CAAC,CAAC;AACN;AAEA,IAAMkB,oBAAoB,GAAG,SAAAA,CAEzBC,IAAS,EACX;EACE;AACJ;AACA;AACA;EACI,IAAMC,kBAAkB,GAAGD,IAAI,CAACpB,WAAW,CACtCQ,MAAM,CAAEO,GAAQ,IAAK,CAAC,IAAI,CAACf,WAAW,CAACe,GAAG,CAACb,IAAI,CAAC,CAAC,CACjDU,GAAG,CAAEG,GAAQ,IAAKA,GAAG,CAACb,IAAI,CAAC;EAChC,IAAImB,kBAAkB,CAACC,MAAM,GAAG,CAAC,EAAE;IAC/B,MAAM3B,UAAU,CAAC,KAAK,EAAE;MACpB0B;IACJ,CAAC,CAAC;EACN;EAEA,OAAOR,OAAO,CAACC,GAAG,CACdM,IAAI,CAACpB,WAAW,CACXY,GAAG,CAAEW,OAAY,IAAK,IAAI,CAACvB,WAAW,CAACuB,OAAO,CAACrB,IAAI,CAAC,CAACsB,UAAU,CAACD,OAAO,CAAC,CACjF,CAAC;AACL,CAAC;AAED,IAAME,gBAAgB,GAAG,eAAAA,CAAA,EAEvB;EACE,IAAMxB,IAAS,GAAG;IACdC,IAAI,EAAE,IAAI,CAACA,IAAI;IACfwB,UAAU,EAAE,MAAM,IAAI,CAACC,MAAM,CAACC,IAAI;IAClCC,IAAI,EAAE;EACV,CAAC;EAED,IAAMC,KAAK,GAAGtC,aAAa,CACvB,MAAM,EACNE,gBAAgB,CAAC,CAAC,EAClB,IACJ,CAAC;EACD,OAAOD,eAAe,CAACqC,KAAK,CAAC,CACxBb,IAAI,CAAEY,IAAS,IAAK;IACjB5B,IAAI,CAAC4B,IAAI,GAAGA,IAAI,CAACjB,GAAG,CAAEmB,OAAY,IAAK;MACnCA,OAAO,GAAGnC,SAAS,CAACmC,OAAO,CAAC;MAC5B,OAAOA,OAAO,CAACC,IAAI;MACnB,OAAOD,OAAO,CAACE,YAAY;MAC3B,OAAOF,OAAO;IAClB,CAAC,CAAC;IACF,OAAO9B,IAAI;EACf,CAAC,CAAC;AACV,CAAC;AAED,eAAeiC,sBAAsBA,CAEjCC,YAAiB,EACL;EACZ;EACA,IAAIA,YAAY,CAACT,UAAU,MAAK,MAAM,IAAI,CAACC,MAAM,CAACC,IAAI,GAAE;IACpD,MAAMjC,UAAU,CAAC,KAAK,EAAE;MACpB+B,UAAU,EAAES,YAAY,CAACT,UAAU;MACnCU,GAAG,EAAE,IAAI,CAACT,MAAM,CAACC;IACrB,CAAC,CAAC;EACN;EAEA,IAAMC,IAAiB,GAAGM,YAAY,CAACN,IAAI;EAC3C,OAAO,IAAI,CAACQ,eAAe,CAACC,SAAS,CACjCT,IAAI,CAACjB,GAAG,CAACmB,OAAO,IAAI;IAChB,IAAMQ,QAAmC,GAAGjC,MAAM,CAACkC,MAAM,CACrD,CAAC,CAAC,EACFT,OAAO,EACP;MACIU,KAAK,EAAE;QACHC,GAAG,EAAE5C,GAAG,CAAC;MACb,CAAC;MACDkC,IAAI,EAAEnC,kBAAkB,CAAC,CAAC;MAC1BoC,YAAY,EAAE,CAAC,CAAC;MAChBU,QAAQ,EAAE;IACd,CACJ,CAAC;IACD,OAAO;MACHJ;IACJ,CAAC;EACL,CAAC,CAAC,EACF,kBACJ,CAAC;AACL;AAEA,OAAO,IAAMK,kBAA4B,GAAG;EACxC1C,IAAI,EAAE,WAAW;EACjB2C,IAAI,EAAE,IAAI;EACVC,UAAU,EAAE;IACRC,UAAU,EAAGC,KAAU,IAAK;MACxBA,KAAK,CAAChC,UAAU,GAAGjB,cAAc;MACjCiD,KAAK,CAACxB,UAAU,GAAGL,oBAAoB;IAC3C,CAAC;IACD8B,YAAY,EAAGD,KAAU,IAAK;MAC1BA,KAAK,CAAChC,UAAU,GAAGS,gBAAgB;MACnCuB,KAAK,CAACxB,UAAU,GAAGU,sBAAsB;IAC7C;EACJ,CAAC;EACDgB,YAAY,EAAE,CAAC;AACnB,CAAC","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/key-compression/index.js b/dist/esm/plugins/key-compression/index.js deleted file mode 100644 index 0caff429220..00000000000 --- a/dist/esm/plugins/key-compression/index.js +++ /dev/null @@ -1,127 +0,0 @@ -/** - * this plugin adds the keycompression-capabilities to rxdb - * if you don't use this, ensure that you set disableKeyCompression to false in your schema - */ - -import { createCompressionTable, compressObject, decompressObject, compressedPath, DEFAULT_COMPRESSION_FLAG, createCompressedJsonSchema, compressQuery } from 'jsonschema-key-compression'; -import { overwritable } from "../../overwritable.js"; -import { wrapRxStorageInstance } from "../../plugin-helpers.js"; -import { getPrimaryFieldOfPrimaryKey } from "../../rx-schema-helper.js"; -import { flatCloneDocWithMeta } from "../../rx-storage-helper.js"; -import { flatClone, getFromMapOrCreate, isMaybeReadonlyArray } from "../../plugins/utils/index.js"; -import { prepareQuery } from "../../rx-query.js"; -/** - * Cache the compression table and the compressed schema - * by the storage instance for better performance. - */ -var COMPRESSION_STATE_BY_SCHEMA = new WeakMap(); -export function getCompressionStateByRxJsonSchema(schema) { - /** - * Because we cache the state by the JsonSchema, - * it must be assured that the given schema object - * is never mutated. - */ - overwritable.deepFreezeWhenDevMode(schema); - return getFromMapOrCreate(COMPRESSION_STATE_BY_SCHEMA, schema, () => { - var compressionSchema = flatClone(schema); - delete compressionSchema.primaryKey; - var table = createCompressionTable(compressionSchema, DEFAULT_COMPRESSION_FLAG, [ - /** - * Do not compress the primary field - * for easier debugging. - */ - getPrimaryFieldOfPrimaryKey(schema.primaryKey), '_rev', '_attachments', '_deleted', '_meta']); - delete compressionSchema.primaryKey; - var compressedSchema = createCompressedJsonSchema(table, compressionSchema); - - // also compress primary key - if (typeof schema.primaryKey !== 'string') { - var composedPrimary = schema.primaryKey; - var newComposedPrimary = { - key: compressedPath(table, composedPrimary.key), - fields: composedPrimary.fields.map(field => compressedPath(table, field)), - separator: composedPrimary.separator - }; - compressedSchema.primaryKey = newComposedPrimary; - } else { - compressedSchema.primaryKey = compressedPath(table, schema.primaryKey); - } - - /** - * the key compression module does not know about indexes - * in the schema, so we have to also compress them here. - */ - if (schema.indexes) { - var newIndexes = schema.indexes.map(idx => { - if (isMaybeReadonlyArray(idx)) { - return idx.map(subIdx => compressedPath(table, subIdx)); - } else { - return compressedPath(table, idx); - } - }); - compressedSchema.indexes = newIndexes; - } - var compressionState = { - table, - schema, - compressedSchema - }; - return compressionState; - }); -} -export function wrappedKeyCompressionStorage(args) { - return Object.assign({}, args.storage, { - async createStorageInstance(params) { - if (!params.schema.keyCompression) { - return args.storage.createStorageInstance(params); - } - var compressionState = getCompressionStateByRxJsonSchema(params.schema); - function modifyToStorage(docData) { - var ret = compressDocumentData(compressionState, docData); - return ret; - } - function modifyFromStorage(docData) { - return decompressDocumentData(compressionState, docData); - } - - /** - * Because this wrapper resolves the key-compression, - * we can set the flag to false - * which allows underlying storages to detect wrong configurations - * like when keyCompression is set to false but no key-compression module is used. - */ - var childSchema = flatClone(compressionState.compressedSchema); - childSchema.keyCompression = false; - var instance = await args.storage.createStorageInstance(Object.assign({}, params, { - schema: childSchema - })); - var wrappedInstance = wrapRxStorageInstance(params.schema, instance, modifyToStorage, modifyFromStorage); - var overwriteMethods = ['query', 'count']; - overwriteMethods.forEach(methodName => { - var methodBefore = wrappedInstance[methodName].bind(wrappedInstance); - wrappedInstance[methodName] = async preparedQuery => { - var compressedQuery = compressQuery(compressionState.table, preparedQuery.query); - var compressedPreparedQuery = prepareQuery(compressionState.compressedSchema, compressedQuery); - return methodBefore(compressedPreparedQuery); - }; - }); - return wrappedInstance; - } - }); -} -export function compressDocumentData(compressionState, docData) { - /** - * Do not send attachments to compressObject() - * because it will deep clone which does not work on Blob or Buffer. - */ - docData = flatCloneDocWithMeta(docData); - var attachments = docData._attachments; - delete docData._attachments; - docData = compressObject(compressionState.table, docData); - docData._attachments = attachments; - return docData; -} -export function decompressDocumentData(compressionState, docData) { - return decompressObject(compressionState.table, docData); -} -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/esm/plugins/key-compression/index.js.map b/dist/esm/plugins/key-compression/index.js.map deleted file mode 100644 index 7d5c4a9a37f..00000000000 --- a/dist/esm/plugins/key-compression/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["createCompressionTable","compressObject","decompressObject","compressedPath","DEFAULT_COMPRESSION_FLAG","createCompressedJsonSchema","compressQuery","overwritable","wrapRxStorageInstance","getPrimaryFieldOfPrimaryKey","flatCloneDocWithMeta","flatClone","getFromMapOrCreate","isMaybeReadonlyArray","prepareQuery","COMPRESSION_STATE_BY_SCHEMA","WeakMap","getCompressionStateByRxJsonSchema","schema","deepFreezeWhenDevMode","compressionSchema","primaryKey","table","compressedSchema","composedPrimary","newComposedPrimary","key","fields","map","field","separator","indexes","newIndexes","idx","subIdx","compressionState","wrappedKeyCompressionStorage","args","Object","assign","storage","createStorageInstance","params","keyCompression","modifyToStorage","docData","ret","compressDocumentData","modifyFromStorage","decompressDocumentData","childSchema","instance","wrappedInstance","overwriteMethods","forEach","methodName","methodBefore","bind","preparedQuery","compressedQuery","query","compressedPreparedQuery","attachments","_attachments"],"sources":["../../../../src/plugins/key-compression/index.ts"],"sourcesContent":["/**\n * this plugin adds the keycompression-capabilities to rxdb\n * if you don't use this, ensure that you set disableKeyCompression to false in your schema\n */\n\nimport {\n createCompressionTable,\n CompressionTable,\n JsonSchema as KeyCompressionJsonSchema,\n compressObject,\n decompressObject,\n compressedPath,\n DEFAULT_COMPRESSION_FLAG,\n createCompressedJsonSchema,\n compressQuery\n} from 'jsonschema-key-compression';\nimport {\n overwritable\n} from '../../overwritable.ts';\nimport { wrapRxStorageInstance } from '../../plugin-helpers.ts';\nimport { getPrimaryFieldOfPrimaryKey } from '../../rx-schema-helper.ts';\nimport { flatCloneDocWithMeta } from '../../rx-storage-helper.ts';\n\nimport type {\n RxJsonSchema,\n CompositePrimaryKey,\n RxStorage,\n RxStorageInstanceCreationParams,\n RxDocumentData,\n FilledMangoQuery,\n PreparedQuery,\n RxDocumentWriteData\n} from '../../types/index.d.ts';\nimport {\n clone,\n flatClone,\n getFromMapOrCreate,\n isMaybeReadonlyArray\n} from '../../plugins/utils/index.ts';\nimport { prepareQuery } from '../../rx-query.ts';\n\ndeclare type CompressionState = {\n table: CompressionTable;\n schema: RxJsonSchema;\n compressedSchema: RxJsonSchema;\n};\n\n/**\n * Cache the compression table and the compressed schema\n * by the storage instance for better performance.\n */\nconst COMPRESSION_STATE_BY_SCHEMA: WeakMap<\n RxJsonSchema,\n CompressionState\n> = new WeakMap();\n\n\nexport function getCompressionStateByRxJsonSchema(\n schema: RxJsonSchema\n): CompressionState {\n /**\n * Because we cache the state by the JsonSchema,\n * it must be assured that the given schema object\n * is never mutated.\n */\n overwritable.deepFreezeWhenDevMode(schema);\n\n return getFromMapOrCreate(\n COMPRESSION_STATE_BY_SCHEMA,\n schema,\n () => {\n const compressionSchema: KeyCompressionJsonSchema = flatClone(schema) as any;\n delete (compressionSchema as any).primaryKey;\n\n const table = createCompressionTable(\n compressionSchema,\n DEFAULT_COMPRESSION_FLAG,\n [\n /**\n * Do not compress the primary field\n * for easier debugging.\n */\n getPrimaryFieldOfPrimaryKey(schema.primaryKey),\n '_rev',\n '_attachments',\n '_deleted',\n '_meta'\n ]\n );\n\n delete (compressionSchema as any).primaryKey;\n const compressedSchema: RxJsonSchema = createCompressedJsonSchema(\n table,\n compressionSchema\n ) as RxJsonSchema;\n\n // also compress primary key\n if (typeof schema.primaryKey !== 'string') {\n const composedPrimary: CompositePrimaryKey = schema.primaryKey;\n const newComposedPrimary: CompositePrimaryKey = {\n key: compressedPath(table, composedPrimary.key as string),\n fields: composedPrimary.fields.map(field => compressedPath(table, field as string)),\n separator: composedPrimary.separator\n };\n compressedSchema.primaryKey = newComposedPrimary;\n } else {\n compressedSchema.primaryKey = compressedPath(table, schema.primaryKey);\n }\n\n /**\n * the key compression module does not know about indexes\n * in the schema, so we have to also compress them here.\n */\n if (schema.indexes) {\n const newIndexes = schema.indexes.map(idx => {\n if (isMaybeReadonlyArray(idx)) {\n return idx.map(subIdx => compressedPath(table, subIdx));\n } else {\n return compressedPath(table, idx);\n }\n });\n compressedSchema.indexes = newIndexes;\n }\n\n const compressionState = {\n table,\n schema,\n compressedSchema\n };\n return compressionState;\n }\n );\n}\n\nexport function wrappedKeyCompressionStorage(\n args: {\n storage: RxStorage;\n }\n): RxStorage {\n return Object.assign(\n {},\n args.storage,\n {\n async createStorageInstance(\n params: RxStorageInstanceCreationParams\n ) {\n if (!params.schema.keyCompression) {\n return args.storage.createStorageInstance(params);\n }\n\n const compressionState = getCompressionStateByRxJsonSchema(params.schema);\n function modifyToStorage(docData: RxDocumentWriteData) {\n const ret = compressDocumentData(compressionState, docData);\n return ret;\n }\n function modifyFromStorage(docData: RxDocumentData): Promise> {\n return decompressDocumentData(compressionState, docData);\n }\n\n /**\n * Because this wrapper resolves the key-compression,\n * we can set the flag to false\n * which allows underlying storages to detect wrong configurations\n * like when keyCompression is set to false but no key-compression module is used.\n */\n const childSchema = flatClone(compressionState.compressedSchema);\n childSchema.keyCompression = false;\n\n const instance = await args.storage.createStorageInstance(\n Object.assign(\n {},\n params,\n {\n schema: childSchema\n }\n )\n );\n\n const wrappedInstance = wrapRxStorageInstance(\n params.schema,\n instance,\n modifyToStorage,\n modifyFromStorage\n );\n\n\n const overwriteMethods = ['query', 'count'] as const;\n overwriteMethods.forEach(methodName => {\n const methodBefore = wrappedInstance[methodName].bind(wrappedInstance);\n (wrappedInstance as any)[methodName] = async (preparedQuery: PreparedQuery) => {\n const compressedQuery: FilledMangoQuery = compressQuery(\n compressionState.table,\n preparedQuery.query as any\n ) as any;\n\n const compressedPreparedQuery = prepareQuery(\n compressionState.compressedSchema,\n compressedQuery\n );\n return methodBefore(compressedPreparedQuery);\n }\n });\n\n return wrappedInstance;\n }\n }\n );\n}\n\nexport function compressDocumentData(\n compressionState: CompressionState,\n docData: RxDocumentData\n): RxDocumentData {\n /**\n * Do not send attachments to compressObject()\n * because it will deep clone which does not work on Blob or Buffer.\n */\n docData = flatCloneDocWithMeta(docData);\n const attachments = docData._attachments;\n delete docData._attachments;\n\n docData = compressObject(\n compressionState.table,\n docData\n );\n docData._attachments = attachments;\n return docData;\n}\n\nexport function decompressDocumentData(\n compressionState: CompressionState,\n docData: RxDocumentData\n): RxDocumentData {\n return decompressObject(\n compressionState.table,\n docData\n );\n}\n"],"mappings":"AAAA;AACA;AACA;AACA;;AAEA,SACIA,sBAAsB,EAGtBC,cAAc,EACdC,gBAAgB,EAChBC,cAAc,EACdC,wBAAwB,EACxBC,0BAA0B,EAC1BC,aAAa,QACV,4BAA4B;AACnC,SACIC,YAAY,QACT,uBAAuB;AAC9B,SAASC,qBAAqB,QAAQ,yBAAyB;AAC/D,SAASC,2BAA2B,QAAQ,2BAA2B;AACvE,SAASC,oBAAoB,QAAQ,4BAA4B;AAYjE,SAEIC,SAAS,EACTC,kBAAkB,EAClBC,oBAAoB,QACjB,8BAA8B;AACrC,SAASC,YAAY,QAAQ,mBAAmB;AAQhD;AACA;AACA;AACA;AACA,IAAMC,2BAGL,GAAG,IAAIC,OAAO,CAAC,CAAC;AAGjB,OAAO,SAASC,iCAAiCA,CAC7CC,MAAyB,EACT;EAChB;AACJ;AACA;AACA;AACA;EACIX,YAAY,CAACY,qBAAqB,CAACD,MAAM,CAAC;EAE1C,OAAON,kBAAkB,CACrBG,2BAA2B,EAC3BG,MAAM,EACN,MAAM;IACF,IAAME,iBAA2C,GAAGT,SAAS,CAACO,MAAM,CAAQ;IAC5E,OAAQE,iBAAiB,CAASC,UAAU;IAE5C,IAAMC,KAAK,GAAGtB,sBAAsB,CAChCoB,iBAAiB,EACjBhB,wBAAwB,EACxB;IACI;AACpB;AACA;AACA;IACoBK,2BAA2B,CAACS,MAAM,CAACG,UAAU,CAAC,EAC9C,MAAM,EACN,cAAc,EACd,UAAU,EACV,OAAO,CAEf,CAAC;IAED,OAAQD,iBAAiB,CAASC,UAAU;IAC5C,IAAME,gBAAmC,GAAGlB,0BAA0B,CAClEiB,KAAK,EACLF,iBACJ,CAAsB;;IAEtB;IACA,IAAI,OAAOF,MAAM,CAACG,UAAU,KAAK,QAAQ,EAAE;MACvC,IAAMG,eAAyC,GAAGN,MAAM,CAACG,UAAU;MACnE,IAAMI,kBAA4C,GAAG;QACjDC,GAAG,EAAEvB,cAAc,CAACmB,KAAK,EAAEE,eAAe,CAACE,GAAa,CAAC;QACzDC,MAAM,EAAEH,eAAe,CAACG,MAAM,CAACC,GAAG,CAACC,KAAK,IAAI1B,cAAc,CAACmB,KAAK,EAAEO,KAAe,CAAC,CAAC;QACnFC,SAAS,EAAEN,eAAe,CAACM;MAC/B,CAAC;MACDP,gBAAgB,CAACF,UAAU,GAAGI,kBAAkB;IACpD,CAAC,MAAM;MACHF,gBAAgB,CAACF,UAAU,GAAGlB,cAAc,CAACmB,KAAK,EAAEJ,MAAM,CAACG,UAAU,CAAC;IAC1E;;IAEA;AACZ;AACA;AACA;IACY,IAAIH,MAAM,CAACa,OAAO,EAAE;MAChB,IAAMC,UAAU,GAAGd,MAAM,CAACa,OAAO,CAACH,GAAG,CAACK,GAAG,IAAI;QACzC,IAAIpB,oBAAoB,CAACoB,GAAG,CAAC,EAAE;UAC3B,OAAOA,GAAG,CAACL,GAAG,CAACM,MAAM,IAAI/B,cAAc,CAACmB,KAAK,EAAEY,MAAM,CAAC,CAAC;QAC3D,CAAC,MAAM;UACH,OAAO/B,cAAc,CAACmB,KAAK,EAAEW,GAAG,CAAC;QACrC;MACJ,CAAC,CAAC;MACFV,gBAAgB,CAACQ,OAAO,GAAGC,UAAU;IACzC;IAEA,IAAMG,gBAAgB,GAAG;MACrBb,KAAK;MACLJ,MAAM;MACNK;IACJ,CAAC;IACD,OAAOY,gBAAgB;EAC3B,CACJ,CAAC;AACL;AAEA,OAAO,SAASC,4BAA4BA,CACxCC,IAEC,EAC4C;EAC7C,OAAOC,MAAM,CAACC,MAAM,CAChB,CAAC,CAAC,EACFF,IAAI,CAACG,OAAO,EACZ;IACI,MAAMC,qBAAqBA,CACvBC,MAAuD,EACzD;MACE,IAAI,CAACA,MAAM,CAACxB,MAAM,CAACyB,cAAc,EAAE;QAC/B,OAAON,IAAI,CAACG,OAAO,CAACC,qBAAqB,CAACC,MAAM,CAAC;MACrD;MAEA,IAAMP,gBAAgB,GAAGlB,iCAAiC,CAACyB,MAAM,CAACxB,MAAM,CAAC;MACzE,SAAS0B,eAAeA,CAACC,OAAuC,EAAE;QAC9D,IAAMC,GAAG,GAAGC,oBAAoB,CAACZ,gBAAgB,EAAEU,OAAO,CAAC;QAC3D,OAAOC,GAAG;MACd;MACA,SAASE,iBAAiBA,CAACH,OAA4B,EAAsC;QACzF,OAAOI,sBAAsB,CAACd,gBAAgB,EAAEU,OAAO,CAAC;MAC5D;;MAEA;AAChB;AACA;AACA;AACA;AACA;MACgB,IAAMK,WAAW,GAAGvC,SAAS,CAACwB,gBAAgB,CAACZ,gBAAgB,CAAC;MAChE2B,WAAW,CAACP,cAAc,GAAG,KAAK;MAElC,IAAMQ,QAAQ,GAAG,MAAMd,IAAI,CAACG,OAAO,CAACC,qBAAqB,CACrDH,MAAM,CAACC,MAAM,CACT,CAAC,CAAC,EACFG,MAAM,EACN;QACIxB,MAAM,EAAEgC;MACZ,CACJ,CACJ,CAAC;MAED,IAAME,eAAe,GAAG5C,qBAAqB,CACzCkC,MAAM,CAACxB,MAAM,EACbiC,QAAQ,EACRP,eAAe,EACfI,iBACJ,CAAC;MAGD,IAAMK,gBAAgB,GAAG,CAAC,OAAO,EAAE,OAAO,CAAU;MACpDA,gBAAgB,CAACC,OAAO,CAACC,UAAU,IAAI;QACnC,IAAMC,YAAY,GAAGJ,eAAe,CAACG,UAAU,CAAC,CAACE,IAAI,CAACL,eAAe,CAAC;QACrEA,eAAe,CAASG,UAAU,CAAC,GAAG,MAAOG,aAAuC,IAAK;UACtF,IAAMC,eAA4C,GAAGrD,aAAa,CAC9D6B,gBAAgB,CAACb,KAAK,EACtBoC,aAAa,CAACE,KAClB,CAAQ;UAER,IAAMC,uBAAuB,GAAG/C,YAAY,CACxCqB,gBAAgB,CAACZ,gBAAgB,EACjCoC,eACJ,CAAC;UACD,OAAOH,YAAY,CAACK,uBAAuB,CAAC;QAChD,CAAC;MACL,CAAC,CAAC;MAEF,OAAOT,eAAe;IAC1B;EACJ,CACJ,CAAC;AACL;AAEA,OAAO,SAASL,oBAAoBA,CAChCZ,gBAAkC,EAClCU,OAA4B,EACT;EACnB;AACJ;AACA;AACA;EACIA,OAAO,GAAGnC,oBAAoB,CAACmC,OAAO,CAAC;EACvC,IAAMiB,WAAW,GAAGjB,OAAO,CAACkB,YAAY;EACxC,OAAOlB,OAAO,CAACkB,YAAY;EAE3BlB,OAAO,GAAG5C,cAAc,CACpBkC,gBAAgB,CAACb,KAAK,EACtBuB,OACJ,CAAC;EACDA,OAAO,CAACkB,YAAY,GAAGD,WAAW;EAClC,OAAOjB,OAAO;AAClB;AAEA,OAAO,SAASI,sBAAsBA,CAClCd,gBAAkC,EAClCU,OAA4B,EACT;EACnB,OAAO3C,gBAAgB,CACnBiC,gBAAgB,CAACb,KAAK,EACtBuB,OACJ,CAAC;AACL","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/leader-election/index.js b/dist/esm/plugins/leader-election/index.js deleted file mode 100644 index 279a5484024..00000000000 --- a/dist/esm/plugins/leader-election/index.js +++ /dev/null @@ -1,86 +0,0 @@ -/** - * this plugin adds the leader-election-capabilities to rxdb - */ - -import { createLeaderElection } from 'broadcast-channel'; -import { getBroadcastChannelReference, removeBroadcastChannelReference } from "../../rx-storage-multiinstance.js"; -import { PROMISE_RESOLVE_TRUE, getFromMapOrCreate } from "../utils/index.js"; -var LEADER_ELECTORS_OF_DB = new WeakMap(); -var LEADER_ELECTOR_BY_BROADCAST_CHANNEL = new WeakMap(); - -/** - * Returns the leader elector of a broadcast channel. - * Used to ensure we reuse the same elector for the channel each time. - */ -export function getLeaderElectorByBroadcastChannel(broadcastChannel) { - return getFromMapOrCreate(LEADER_ELECTOR_BY_BROADCAST_CHANNEL, broadcastChannel, () => createLeaderElection(broadcastChannel)); -} - -/** - * @overwrites RxDatabase().leaderElector for caching - */ -export function getForDatabase() { - var broadcastChannel = getBroadcastChannelReference(this.storage.name, this.token, this.name, this); - - /** - * Clean up the reference on RxDatabase.destroy() - */ - var oldDestroy = this.destroy.bind(this); - this.destroy = function () { - removeBroadcastChannelReference(this.token, this); - return oldDestroy(); - }; - var elector = getLeaderElectorByBroadcastChannel(broadcastChannel); - if (!elector) { - elector = getLeaderElectorByBroadcastChannel(broadcastChannel); - LEADER_ELECTORS_OF_DB.set(this, elector); - } - - /** - * Overwrite for caching - */ - this.leaderElector = () => elector; - return elector; -} -export function isLeader() { - if (!this.multiInstance) { - return true; - } - return this.leaderElector().isLeader; -} -export function waitForLeadership() { - if (!this.multiInstance) { - return PROMISE_RESOLVE_TRUE; - } else { - return this.leaderElector().awaitLeadership().then(() => true); - } -} - -/** - * runs when the database gets destroyed - */ -export function onDestroy(db) { - var has = LEADER_ELECTORS_OF_DB.get(db); - if (has) { - has.die(); - } -} -export var rxdb = true; -export var prototypes = { - RxDatabase: proto => { - proto.leaderElector = getForDatabase; - proto.isLeader = isLeader; - proto.waitForLeadership = waitForLeadership; - } -}; -export var RxDBLeaderElectionPlugin = { - name: 'leader-election', - rxdb, - prototypes, - hooks: { - preDestroyRxDatabase: { - after: onDestroy - } - } -}; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/esm/plugins/leader-election/index.js.map b/dist/esm/plugins/leader-election/index.js.map deleted file mode 100644 index 43253bfc733..00000000000 --- a/dist/esm/plugins/leader-election/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["createLeaderElection","getBroadcastChannelReference","removeBroadcastChannelReference","PROMISE_RESOLVE_TRUE","getFromMapOrCreate","LEADER_ELECTORS_OF_DB","WeakMap","LEADER_ELECTOR_BY_BROADCAST_CHANNEL","getLeaderElectorByBroadcastChannel","broadcastChannel","getForDatabase","storage","name","token","oldDestroy","destroy","bind","elector","set","leaderElector","isLeader","multiInstance","waitForLeadership","awaitLeadership","then","onDestroy","db","has","get","die","rxdb","prototypes","RxDatabase","proto","RxDBLeaderElectionPlugin","hooks","preDestroyRxDatabase","after"],"sources":["../../../../src/plugins/leader-election/index.ts"],"sourcesContent":["/**\n * this plugin adds the leader-election-capabilities to rxdb\n */\n\nimport {\n createLeaderElection,\n LeaderElector,\n BroadcastChannel\n} from 'broadcast-channel';\nimport {\n getBroadcastChannelReference,\n removeBroadcastChannelReference\n} from '../../rx-storage-multiinstance.ts';\n\nimport type {\n RxDatabase,\n RxPlugin\n} from '../../types/index.d.ts';\nimport { PROMISE_RESOLVE_TRUE, getFromMapOrCreate } from '../utils/index.ts';\n\nconst LEADER_ELECTORS_OF_DB: WeakMap = new WeakMap();\nconst LEADER_ELECTOR_BY_BROADCAST_CHANNEL: WeakMap = new WeakMap();\n\n\n/**\n * Returns the leader elector of a broadcast channel.\n * Used to ensure we reuse the same elector for the channel each time.\n */\nexport function getLeaderElectorByBroadcastChannel(broadcastChannel: BroadcastChannel): LeaderElector {\n return getFromMapOrCreate(\n LEADER_ELECTOR_BY_BROADCAST_CHANNEL,\n broadcastChannel,\n () => createLeaderElection(broadcastChannel)\n );\n}\n\n/**\n * @overwrites RxDatabase().leaderElector for caching\n */\nexport function getForDatabase(this: RxDatabase): LeaderElector {\n\n\n const broadcastChannel = getBroadcastChannelReference(\n this.storage.name,\n this.token,\n this.name,\n this\n );\n\n /**\n * Clean up the reference on RxDatabase.destroy()\n */\n const oldDestroy = this.destroy.bind(this);\n this.destroy = function () {\n removeBroadcastChannelReference(this.token, this);\n return oldDestroy();\n };\n\n\n let elector = getLeaderElectorByBroadcastChannel(broadcastChannel);\n if (!elector) {\n elector = getLeaderElectorByBroadcastChannel(broadcastChannel);\n LEADER_ELECTORS_OF_DB.set(\n this,\n elector\n );\n }\n\n /**\n * Overwrite for caching\n */\n this.leaderElector = () => elector;\n\n return elector;\n}\n\nexport function isLeader(this: RxDatabase): boolean {\n if (!this.multiInstance) {\n return true;\n }\n return this.leaderElector().isLeader;\n}\n\nexport function waitForLeadership(this: RxDatabase): Promise {\n if (!this.multiInstance) {\n return PROMISE_RESOLVE_TRUE;\n } else {\n return this.leaderElector()\n .awaitLeadership()\n .then(() => true);\n }\n}\n\n/**\n * runs when the database gets destroyed\n */\nexport function onDestroy(db: RxDatabase) {\n const has = LEADER_ELECTORS_OF_DB.get(db);\n if (has) {\n has.die();\n }\n}\n\nexport const rxdb = true;\nexport const prototypes = {\n RxDatabase: (proto: any) => {\n proto.leaderElector = getForDatabase;\n proto.isLeader = isLeader;\n proto.waitForLeadership = waitForLeadership;\n }\n};\n\nexport const RxDBLeaderElectionPlugin: RxPlugin = {\n name: 'leader-election',\n rxdb,\n prototypes,\n hooks: {\n preDestroyRxDatabase: {\n after: onDestroy\n }\n }\n};\n"],"mappings":"AAAA;AACA;AACA;;AAEA,SACIA,oBAAoB,QAGjB,mBAAmB;AAC1B,SACIC,4BAA4B,EAC5BC,+BAA+B,QAC5B,mCAAmC;AAM1C,SAASC,oBAAoB,EAAEC,kBAAkB,QAAQ,mBAAmB;AAE5E,IAAMC,qBAAyD,GAAG,IAAIC,OAAO,CAAC,CAAC;AAC/E,IAAMC,mCAA6E,GAAG,IAAID,OAAO,CAAC,CAAC;;AAGnG;AACA;AACA;AACA;AACA,OAAO,SAASE,kCAAkCA,CAACC,gBAAkC,EAAiB;EAClG,OAAOL,kBAAkB,CACrBG,mCAAmC,EACnCE,gBAAgB,EAChB,MAAMT,oBAAoB,CAACS,gBAAgB,CAC/C,CAAC;AACL;;AAEA;AACA;AACA;AACA,OAAO,SAASC,cAAcA,CAAA,EAAkC;EAG5D,IAAMD,gBAAgB,GAAGR,4BAA4B,CACjD,IAAI,CAACU,OAAO,CAACC,IAAI,EACjB,IAAI,CAACC,KAAK,EACV,IAAI,CAACD,IAAI,EACT,IACJ,CAAC;;EAED;AACJ;AACA;EACI,IAAME,UAAU,GAAG,IAAI,CAACC,OAAO,CAACC,IAAI,CAAC,IAAI,CAAC;EAC1C,IAAI,CAACD,OAAO,GAAG,YAAY;IACvBb,+BAA+B,CAAC,IAAI,CAACW,KAAK,EAAE,IAAI,CAAC;IACjD,OAAOC,UAAU,CAAC,CAAC;EACvB,CAAC;EAGD,IAAIG,OAAO,GAAGT,kCAAkC,CAACC,gBAAgB,CAAC;EAClE,IAAI,CAACQ,OAAO,EAAE;IACVA,OAAO,GAAGT,kCAAkC,CAACC,gBAAgB,CAAC;IAC9DJ,qBAAqB,CAACa,GAAG,CACrB,IAAI,EACJD,OACJ,CAAC;EACL;;EAEA;AACJ;AACA;EACI,IAAI,CAACE,aAAa,GAAG,MAAMF,OAAO;EAElC,OAAOA,OAAO;AAClB;AAEA,OAAO,SAASG,QAAQA,CAAA,EAA4B;EAChD,IAAI,CAAC,IAAI,CAACC,aAAa,EAAE;IACrB,OAAO,IAAI;EACf;EACA,OAAO,IAAI,CAACF,aAAa,CAAC,CAAC,CAACC,QAAQ;AACxC;AAEA,OAAO,SAASE,iBAAiBA,CAAA,EAAqC;EAClE,IAAI,CAAC,IAAI,CAACD,aAAa,EAAE;IACrB,OAAOlB,oBAAoB;EAC/B,CAAC,MAAM;IACH,OAAO,IAAI,CAACgB,aAAa,CAAC,CAAC,CACtBI,eAAe,CAAC,CAAC,CACjBC,IAAI,CAAC,MAAM,IAAI,CAAC;EACzB;AACJ;;AAEA;AACA;AACA;AACA,OAAO,SAASC,SAASA,CAACC,EAAc,EAAE;EACtC,IAAMC,GAAG,GAAGtB,qBAAqB,CAACuB,GAAG,CAACF,EAAE,CAAC;EACzC,IAAIC,GAAG,EAAE;IACLA,GAAG,CAACE,GAAG,CAAC,CAAC;EACb;AACJ;AAEA,OAAO,IAAMC,IAAI,GAAG,IAAI;AACxB,OAAO,IAAMC,UAAU,GAAG;EACtBC,UAAU,EAAGC,KAAU,IAAK;IACxBA,KAAK,CAACd,aAAa,GAAGT,cAAc;IACpCuB,KAAK,CAACb,QAAQ,GAAGA,QAAQ;IACzBa,KAAK,CAACX,iBAAiB,GAAGA,iBAAiB;EAC/C;AACJ,CAAC;AAED,OAAO,IAAMY,wBAAkC,GAAG;EAC9CtB,IAAI,EAAE,iBAAiB;EACvBkB,IAAI;EACJC,UAAU;EACVI,KAAK,EAAE;IACHC,oBAAoB,EAAE;MAClBC,KAAK,EAAEZ;IACX;EACJ;AACJ,CAAC","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/local-documents/index.js b/dist/esm/plugins/local-documents/index.js deleted file mode 100644 index 6216c52f829..00000000000 --- a/dist/esm/plugins/local-documents/index.js +++ /dev/null @@ -1,69 +0,0 @@ -import { getLocal, getLocal$, insertLocal, upsertLocal } from "./local-documents.js"; -import { closeStateByParent, createLocalDocStateByParent, removeLocalDocumentsStorageInstance } from "./local-documents-helper.js"; -export * from "./local-documents-helper.js"; -export * from "./local-documents.js"; -export * from "./rx-local-document.js"; -export var RxDBLocalDocumentsPlugin = { - name: 'local-documents', - rxdb: true, - prototypes: { - RxCollection: proto => { - proto.insertLocal = insertLocal; - proto.upsertLocal = upsertLocal; - proto.getLocal = getLocal; - proto.getLocal$ = getLocal$; - }, - RxDatabase: proto => { - proto.insertLocal = insertLocal; - proto.upsertLocal = upsertLocal; - proto.getLocal = getLocal; - proto.getLocal$ = getLocal$; - } - }, - hooks: { - createRxDatabase: { - before: args => { - if (args.creator.localDocuments) { - /** - * We do not have to await - * the creation to speed up initial page load. - */ - /* await */ - createLocalDocStateByParent(args.database); - } - } - }, - createRxCollection: { - before: args => { - if (args.creator.localDocuments) { - /** - * We do not have to await - * the creation to speed up initial page load. - */ - /* await */ - createLocalDocStateByParent(args.collection); - } - } - }, - preDestroyRxDatabase: { - after: db => { - return closeStateByParent(db); - } - }, - postDestroyRxCollection: { - after: collection => closeStateByParent(collection) - }, - postRemoveRxDatabase: { - after: args => { - return removeLocalDocumentsStorageInstance(args.storage, args.databaseName, ''); - } - }, - postRemoveRxCollection: { - after: args => { - return removeLocalDocumentsStorageInstance(args.storage, args.databaseName, args.collectionName); - } - } - }, - overwritable: {} -}; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/esm/plugins/local-documents/index.js.map b/dist/esm/plugins/local-documents/index.js.map deleted file mode 100644 index 7cf8d832851..00000000000 --- a/dist/esm/plugins/local-documents/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["getLocal","getLocal$","insertLocal","upsertLocal","closeStateByParent","createLocalDocStateByParent","removeLocalDocumentsStorageInstance","RxDBLocalDocumentsPlugin","name","rxdb","prototypes","RxCollection","proto","RxDatabase","hooks","createRxDatabase","before","args","creator","localDocuments","database","createRxCollection","collection","preDestroyRxDatabase","after","db","postDestroyRxCollection","postRemoveRxDatabase","storage","databaseName","postRemoveRxCollection","collectionName","overwritable"],"sources":["../../../../src/plugins/local-documents/index.ts"],"sourcesContent":["import type {\n RxPlugin\n} from '../../types/index.d.ts';\nimport {\n getLocal,\n getLocal$,\n insertLocal,\n upsertLocal\n} from './local-documents.ts';\nimport {\n closeStateByParent,\n createLocalDocStateByParent,\n removeLocalDocumentsStorageInstance\n} from './local-documents-helper.ts';\n\nexport * from './local-documents-helper.ts';\nexport * from './local-documents.ts';\nexport * from './rx-local-document.ts';\nexport type {\n LocalDocumentParent,\n LocalDocumentState,\n RxLocalDocument,\n RxLocalDocumentData\n} from '../../types/plugins/local-documents.d.ts';\n\n\nexport const RxDBLocalDocumentsPlugin: RxPlugin = {\n name: 'local-documents',\n rxdb: true,\n prototypes: {\n RxCollection: (proto: any) => {\n proto.insertLocal = insertLocal;\n proto.upsertLocal = upsertLocal;\n proto.getLocal = getLocal;\n proto.getLocal$ = getLocal$;\n },\n RxDatabase: (proto: any) => {\n proto.insertLocal = insertLocal;\n proto.upsertLocal = upsertLocal;\n proto.getLocal = getLocal;\n proto.getLocal$ = getLocal$;\n }\n },\n hooks: {\n createRxDatabase: {\n before: args => {\n if (args.creator.localDocuments) {\n /**\n * We do not have to await\n * the creation to speed up initial page load.\n */\n /* await */ createLocalDocStateByParent(args.database);\n }\n }\n },\n createRxCollection: {\n before: args => {\n if (args.creator.localDocuments) {\n /**\n * We do not have to await\n * the creation to speed up initial page load.\n */\n /* await */ createLocalDocStateByParent(args.collection);\n }\n }\n },\n preDestroyRxDatabase: {\n after: db => {\n return closeStateByParent(db);\n }\n },\n postDestroyRxCollection: {\n after: collection => closeStateByParent(collection)\n },\n postRemoveRxDatabase: {\n after: args => {\n return removeLocalDocumentsStorageInstance(\n args.storage,\n args.databaseName,\n ''\n );\n }\n },\n postRemoveRxCollection: {\n after: args => {\n return removeLocalDocumentsStorageInstance(\n args.storage,\n args.databaseName,\n args.collectionName\n );\n }\n }\n },\n overwritable: {}\n};\n"],"mappings":"AAGA,SACIA,QAAQ,EACRC,SAAS,EACTC,WAAW,EACXC,WAAW,QACR,sBAAsB;AAC7B,SACIC,kBAAkB,EAClBC,2BAA2B,EAC3BC,mCAAmC,QAChC,6BAA6B;AAEpC,cAAc,6BAA6B;AAC3C,cAAc,sBAAsB;AACpC,cAAc,wBAAwB;AAStC,OAAO,IAAMC,wBAAkC,GAAG;EAC9CC,IAAI,EAAE,iBAAiB;EACvBC,IAAI,EAAE,IAAI;EACVC,UAAU,EAAE;IACRC,YAAY,EAAGC,KAAU,IAAK;MAC1BA,KAAK,CAACV,WAAW,GAAGA,WAAW;MAC/BU,KAAK,CAACT,WAAW,GAAGA,WAAW;MAC/BS,KAAK,CAACZ,QAAQ,GAAGA,QAAQ;MACzBY,KAAK,CAACX,SAAS,GAAGA,SAAS;IAC/B,CAAC;IACDY,UAAU,EAAGD,KAAU,IAAK;MACxBA,KAAK,CAACV,WAAW,GAAGA,WAAW;MAC/BU,KAAK,CAACT,WAAW,GAAGA,WAAW;MAC/BS,KAAK,CAACZ,QAAQ,GAAGA,QAAQ;MACzBY,KAAK,CAACX,SAAS,GAAGA,SAAS;IAC/B;EACJ,CAAC;EACDa,KAAK,EAAE;IACHC,gBAAgB,EAAE;MACdC,MAAM,EAAEC,IAAI,IAAI;QACZ,IAAIA,IAAI,CAACC,OAAO,CAACC,cAAc,EAAE;UAC7B;AACpB;AACA;AACA;UACoB;UAAYd,2BAA2B,CAACY,IAAI,CAACG,QAAQ,CAAC;QAC1D;MACJ;IACJ,CAAC;IACDC,kBAAkB,EAAE;MAChBL,MAAM,EAAEC,IAAI,IAAI;QACZ,IAAIA,IAAI,CAACC,OAAO,CAACC,cAAc,EAAE;UAC7B;AACpB;AACA;AACA;UACoB;UAAYd,2BAA2B,CAACY,IAAI,CAACK,UAAU,CAAC;QAC5D;MACJ;IACJ,CAAC;IACDC,oBAAoB,EAAE;MAClBC,KAAK,EAAEC,EAAE,IAAI;QACT,OAAOrB,kBAAkB,CAACqB,EAAE,CAAC;MACjC;IACJ,CAAC;IACDC,uBAAuB,EAAE;MACrBF,KAAK,EAAEF,UAAU,IAAIlB,kBAAkB,CAACkB,UAAU;IACtD,CAAC;IACDK,oBAAoB,EAAE;MAClBH,KAAK,EAAEP,IAAI,IAAI;QACX,OAAOX,mCAAmC,CACtCW,IAAI,CAACW,OAAO,EACZX,IAAI,CAACY,YAAY,EACjB,EACJ,CAAC;MACL;IACJ,CAAC;IACDC,sBAAsB,EAAE;MACpBN,KAAK,EAAEP,IAAI,IAAI;QACX,OAAOX,mCAAmC,CACtCW,IAAI,CAACW,OAAO,EACZX,IAAI,CAACY,YAAY,EACjBZ,IAAI,CAACc,cACT,CAAC;MACL;IACJ;EACJ,CAAC;EACDC,YAAY,EAAE,CAAC;AACnB,CAAC","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/local-documents/local-documents-helper.js b/dist/esm/plugins/local-documents/local-documents-helper.js deleted file mode 100644 index 8bf1d1a0805..00000000000 --- a/dist/esm/plugins/local-documents/local-documents-helper.js +++ /dev/null @@ -1,127 +0,0 @@ -import { filter } from 'rxjs'; -import { DocumentCache } from "../../doc-cache.js"; -import { IncrementalWriteQueue } from "../../incremental-write.js"; -import { newRxError } from "../../rx-error.js"; -import { fillWithDefaultSettings } from "../../rx-schema-helper.js"; -import { getWrappedStorageInstance } from "../../rx-storage-helper.js"; -import { randomCouchString } from "../../plugins/utils/index.js"; -import { createRxLocalDocument } from "./rx-local-document.js"; -import { overwritable } from "../../overwritable.js"; -export var LOCAL_DOC_STATE_BY_PARENT = new WeakMap(); -export var LOCAL_DOC_STATE_BY_PARENT_RESOLVED = new WeakMap(); -export function createLocalDocStateByParent(parent) { - var database = parent.database ? parent.database : parent; - var collectionName = parent.database ? parent.name : ''; - var statePromise = (async () => { - var storageInstance = await createLocalDocumentStorageInstance(database.token, database.storage, database.name, collectionName, database.instanceCreationOptions, database.multiInstance); - storageInstance = getWrappedStorageInstance(database, storageInstance, RX_LOCAL_DOCUMENT_SCHEMA); - var docCache = new DocumentCache('id', parent.$.pipe(filter(cE => cE.isLocal)), docData => createRxLocalDocument(docData, parent)); - var incrementalWriteQueue = new IncrementalWriteQueue(storageInstance, 'id', () => {}, () => {}); - - /** - * Emit the changestream into the collections change stream - */ - var databaseStorageToken = await database.storageToken; - var subLocalDocs = storageInstance.changeStream().subscribe(eventBulk => { - var events = new Array(eventBulk.events.length); - var rawEvents = eventBulk.events; - var collectionName = parent.database ? parent.name : undefined; - for (var index = 0; index < rawEvents.length; index++) { - var event = rawEvents[index]; - events[index] = { - documentId: event.documentId, - collectionName, - isLocal: true, - operation: event.operation, - documentData: overwritable.deepFreezeWhenDevMode(event.documentData), - previousDocumentData: overwritable.deepFreezeWhenDevMode(event.previousDocumentData) - }; - } - var changeEventBulk = { - id: eventBulk.id, - internal: false, - collectionName: parent.database ? parent.name : undefined, - storageToken: databaseStorageToken, - events, - databaseToken: database.token, - checkpoint: eventBulk.checkpoint, - context: eventBulk.context, - endTime: eventBulk.endTime, - startTime: eventBulk.startTime - }; - database.$emit(changeEventBulk); - }); - parent._subs.push(subLocalDocs); - var state = { - database, - parent, - storageInstance, - docCache, - incrementalWriteQueue - }; - LOCAL_DOC_STATE_BY_PARENT_RESOLVED.set(parent, state); - return state; - })(); - LOCAL_DOC_STATE_BY_PARENT.set(parent, statePromise); -} -export function getLocalDocStateByParent(parent) { - var statePromise = LOCAL_DOC_STATE_BY_PARENT.get(parent); - if (!statePromise) { - var database = parent.database ? parent.database : parent; - var collectionName = parent.database ? parent.name : ''; - throw newRxError('LD8', { - database: database.name, - collection: collectionName - }); - } - return statePromise; -} -export function createLocalDocumentStorageInstance(databaseInstanceToken, storage, databaseName, collectionName, instanceCreationOptions, multiInstance) { - return storage.createStorageInstance({ - databaseInstanceToken, - databaseName: databaseName, - /** - * Use a different collection name for the local documents instance - * so that the local docs can be kept while deleting the normal instance - * after migration. - */ - collectionName: getCollectionLocalInstanceName(collectionName), - schema: RX_LOCAL_DOCUMENT_SCHEMA, - options: instanceCreationOptions, - multiInstance, - devMode: overwritable.isDevMode() - }); -} -export function closeStateByParent(parent) { - var statePromise = LOCAL_DOC_STATE_BY_PARENT.get(parent); - if (statePromise) { - LOCAL_DOC_STATE_BY_PARENT.delete(parent); - return statePromise.then(state => state.storageInstance.close()); - } -} -export async function removeLocalDocumentsStorageInstance(storage, databaseName, collectionName) { - var databaseInstanceToken = randomCouchString(10); - var storageInstance = await createLocalDocumentStorageInstance(databaseInstanceToken, storage, databaseName, collectionName, {}, false); - await storageInstance.remove(); -} -export function getCollectionLocalInstanceName(collectionName) { - return 'plugin-local-documents-' + collectionName; -} -export var RX_LOCAL_DOCUMENT_SCHEMA = fillWithDefaultSettings({ - title: 'RxLocalDocument', - version: 0, - primaryKey: 'id', - type: 'object', - properties: { - id: { - type: 'string', - maxLength: 128 - }, - data: { - type: 'object', - additionalProperties: true - } - }, - required: ['id', 'data'] -}); -//# sourceMappingURL=local-documents-helper.js.map \ No newline at end of file diff --git a/dist/esm/plugins/local-documents/local-documents-helper.js.map b/dist/esm/plugins/local-documents/local-documents-helper.js.map deleted file mode 100644 index 071afe93626..00000000000 --- a/dist/esm/plugins/local-documents/local-documents-helper.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"local-documents-helper.js","names":["filter","DocumentCache","IncrementalWriteQueue","newRxError","fillWithDefaultSettings","getWrappedStorageInstance","randomCouchString","createRxLocalDocument","overwritable","LOCAL_DOC_STATE_BY_PARENT","WeakMap","LOCAL_DOC_STATE_BY_PARENT_RESOLVED","createLocalDocStateByParent","parent","database","collectionName","name","statePromise","storageInstance","createLocalDocumentStorageInstance","token","storage","instanceCreationOptions","multiInstance","RX_LOCAL_DOCUMENT_SCHEMA","docCache","$","pipe","cE","isLocal","docData","incrementalWriteQueue","databaseStorageToken","storageToken","subLocalDocs","changeStream","subscribe","eventBulk","events","Array","length","rawEvents","undefined","index","event","documentId","operation","documentData","deepFreezeWhenDevMode","previousDocumentData","changeEventBulk","id","internal","databaseToken","checkpoint","context","endTime","startTime","$emit","_subs","push","state","set","getLocalDocStateByParent","get","collection","databaseInstanceToken","databaseName","createStorageInstance","getCollectionLocalInstanceName","schema","options","devMode","isDevMode","closeStateByParent","delete","then","close","removeLocalDocumentsStorageInstance","remove","title","version","primaryKey","type","properties","maxLength","data","additionalProperties","required"],"sources":["../../../../src/plugins/local-documents/local-documents-helper.ts"],"sourcesContent":["import { filter } from 'rxjs';\nimport { DocumentCache } from '../../doc-cache.ts';\nimport { IncrementalWriteQueue } from '../../incremental-write.ts';\nimport { newRxError } from '../../rx-error.ts';\nimport { fillWithDefaultSettings } from '../../rx-schema-helper.ts';\nimport {\n getWrappedStorageInstance\n} from '../../rx-storage-helper.ts';\nimport type {\n LocalDocumentParent,\n LocalDocumentState,\n RxChangeEvent,\n RxChangeEventBulk,\n RxDatabase,\n RxDocumentData,\n RxJsonSchema,\n RxLocalDocumentData,\n RxStorage\n} from '../../types/index.d.ts';\nimport { randomCouchString } from '../../plugins/utils/index.ts';\nimport { createRxLocalDocument } from './rx-local-document.ts';\nimport { overwritable } from '../../overwritable.ts';\n\nexport const LOCAL_DOC_STATE_BY_PARENT: WeakMap> = new WeakMap();\nexport const LOCAL_DOC_STATE_BY_PARENT_RESOLVED: WeakMap = new WeakMap();\n\nexport function createLocalDocStateByParent(parent: LocalDocumentParent): void {\n const database: RxDatabase = parent.database ? parent.database : parent as any;\n const collectionName = parent.database ? parent.name : '';\n const statePromise = (async () => {\n let storageInstance = await createLocalDocumentStorageInstance(\n database.token,\n database.storage,\n database.name,\n collectionName,\n database.instanceCreationOptions,\n database.multiInstance\n );\n storageInstance = getWrappedStorageInstance(\n database,\n storageInstance,\n RX_LOCAL_DOCUMENT_SCHEMA\n );\n const docCache = new DocumentCache(\n 'id',\n parent.$.pipe(\n filter(cE => (cE as RxChangeEvent).isLocal)\n ),\n docData => createRxLocalDocument(docData, parent) as any\n );\n\n const incrementalWriteQueue = new IncrementalWriteQueue(\n storageInstance,\n 'id',\n () => { },\n () => { }\n );\n\n /**\n * Emit the changestream into the collections change stream\n */\n const databaseStorageToken = await database.storageToken;\n const subLocalDocs = storageInstance.changeStream().subscribe(eventBulk => {\n const events = new Array(eventBulk.events.length);\n const rawEvents = eventBulk.events;\n const collectionName = parent.database ? parent.name : undefined;\n for (let index = 0; index < rawEvents.length; index++) {\n const event = rawEvents[index];\n events[index] = {\n documentId: event.documentId,\n collectionName,\n isLocal: true,\n operation: event.operation,\n documentData: overwritable.deepFreezeWhenDevMode(event.documentData) as any,\n previousDocumentData: overwritable.deepFreezeWhenDevMode(event.previousDocumentData) as any\n };\n }\n const changeEventBulk: RxChangeEventBulk = {\n id: eventBulk.id,\n internal: false,\n collectionName: parent.database ? parent.name : undefined,\n storageToken: databaseStorageToken,\n events,\n databaseToken: database.token,\n checkpoint: eventBulk.checkpoint,\n context: eventBulk.context,\n endTime: eventBulk.endTime,\n startTime: eventBulk.startTime\n };\n database.$emit(changeEventBulk);\n });\n parent._subs.push(subLocalDocs);\n\n const state = {\n database,\n parent,\n storageInstance,\n docCache,\n incrementalWriteQueue\n };\n LOCAL_DOC_STATE_BY_PARENT_RESOLVED.set(parent, state);\n return state;\n })();\n LOCAL_DOC_STATE_BY_PARENT.set(parent, statePromise);\n}\n\nexport function getLocalDocStateByParent(parent: LocalDocumentParent): Promise {\n const statePromise = LOCAL_DOC_STATE_BY_PARENT.get(parent);\n if (!statePromise) {\n const database: RxDatabase = parent.database ? parent.database : parent as any;\n const collectionName = parent.database ? parent.name : '';\n throw newRxError('LD8', {\n database: database.name,\n collection: collectionName\n });\n }\n return statePromise;\n}\n\nexport function createLocalDocumentStorageInstance(\n databaseInstanceToken: string,\n storage: RxStorage,\n databaseName: string,\n collectionName: string,\n instanceCreationOptions: any,\n multiInstance: boolean\n) {\n return storage.createStorageInstance({\n databaseInstanceToken,\n databaseName: databaseName,\n /**\n * Use a different collection name for the local documents instance\n * so that the local docs can be kept while deleting the normal instance\n * after migration.\n */\n collectionName: getCollectionLocalInstanceName(collectionName),\n schema: RX_LOCAL_DOCUMENT_SCHEMA,\n options: instanceCreationOptions,\n multiInstance,\n devMode: overwritable.isDevMode()\n });\n}\n\nexport function closeStateByParent(parent: LocalDocumentParent) {\n const statePromise = LOCAL_DOC_STATE_BY_PARENT.get(parent);\n if (statePromise) {\n LOCAL_DOC_STATE_BY_PARENT.delete(parent);\n return statePromise.then(state => state.storageInstance.close());\n }\n}\n\nexport async function removeLocalDocumentsStorageInstance(\n storage: RxStorage,\n databaseName: string,\n collectionName: string\n) {\n const databaseInstanceToken = randomCouchString(10);\n const storageInstance = await createLocalDocumentStorageInstance(\n databaseInstanceToken,\n storage,\n databaseName,\n collectionName,\n {},\n false\n );\n await storageInstance.remove();\n}\n\nexport function getCollectionLocalInstanceName(collectionName: string): string {\n return 'plugin-local-documents-' + collectionName;\n}\n\nexport const RX_LOCAL_DOCUMENT_SCHEMA: RxJsonSchema> = fillWithDefaultSettings({\n title: 'RxLocalDocument',\n version: 0,\n primaryKey: 'id',\n type: 'object',\n properties: {\n id: {\n type: 'string',\n maxLength: 128\n },\n data: {\n type: 'object',\n additionalProperties: true\n }\n },\n required: [\n 'id',\n 'data'\n ]\n});\n"],"mappings":"AAAA,SAASA,MAAM,QAAQ,MAAM;AAC7B,SAASC,aAAa,QAAQ,oBAAoB;AAClD,SAASC,qBAAqB,QAAQ,4BAA4B;AAClE,SAASC,UAAU,QAAQ,mBAAmB;AAC9C,SAASC,uBAAuB,QAAQ,2BAA2B;AACnE,SACIC,yBAAyB,QACtB,4BAA4B;AAYnC,SAASC,iBAAiB,QAAQ,8BAA8B;AAChE,SAASC,qBAAqB,QAAQ,wBAAwB;AAC9D,SAASC,YAAY,QAAQ,uBAAuB;AAEpD,OAAO,IAAMC,yBAAoF,GAAG,IAAIC,OAAO,CAAC,CAAC;AACjH,OAAO,IAAMC,kCAAoF,GAAG,IAAID,OAAO,CAAC,CAAC;AAEjH,OAAO,SAASE,2BAA2BA,CAACC,MAA2B,EAAQ;EAC3E,IAAMC,QAAoB,GAAGD,MAAM,CAACC,QAAQ,GAAGD,MAAM,CAACC,QAAQ,GAAGD,MAAa;EAC9E,IAAME,cAAc,GAAGF,MAAM,CAACC,QAAQ,GAAGD,MAAM,CAACG,IAAI,GAAG,EAAE;EACzD,IAAMC,YAAY,GAAG,CAAC,YAAY;IAC9B,IAAIC,eAAe,GAAG,MAAMC,kCAAkC,CAC1DL,QAAQ,CAACM,KAAK,EACdN,QAAQ,CAACO,OAAO,EAChBP,QAAQ,CAACE,IAAI,EACbD,cAAc,EACdD,QAAQ,CAACQ,uBAAuB,EAChCR,QAAQ,CAACS,aACb,CAAC;IACDL,eAAe,GAAGb,yBAAyB,CACvCS,QAAQ,EACRI,eAAe,EACfM,wBACJ,CAAC;IACD,IAAMC,QAAQ,GAAG,IAAIxB,aAAa,CAC9B,IAAI,EACJY,MAAM,CAACa,CAAC,CAACC,IAAI,CACT3B,MAAM,CAAC4B,EAAE,IAAKA,EAAE,CAAwBC,OAAO,CACnD,CAAC,EACDC,OAAO,IAAIvB,qBAAqB,CAACuB,OAAO,EAAEjB,MAAM,CACpD,CAAC;IAED,IAAMkB,qBAAqB,GAAG,IAAI7B,qBAAqB,CACnDgB,eAAe,EACf,IAAI,EACJ,MAAM,CAAE,CAAC,EACT,MAAM,CAAE,CACZ,CAAC;;IAED;AACR;AACA;IACQ,IAAMc,oBAAoB,GAAG,MAAMlB,QAAQ,CAACmB,YAAY;IACxD,IAAMC,YAAY,GAAGhB,eAAe,CAACiB,YAAY,CAAC,CAAC,CAACC,SAAS,CAACC,SAAS,IAAI;MACvE,IAAMC,MAAM,GAAG,IAAIC,KAAK,CAACF,SAAS,CAACC,MAAM,CAACE,MAAM,CAAC;MACjD,IAAMC,SAAS,GAAGJ,SAAS,CAACC,MAAM;MAClC,IAAMvB,cAAc,GAAGF,MAAM,CAACC,QAAQ,GAAGD,MAAM,CAACG,IAAI,GAAG0B,SAAS;MAChE,KAAK,IAAIC,KAAK,GAAG,CAAC,EAAEA,KAAK,GAAGF,SAAS,CAACD,MAAM,EAAEG,KAAK,EAAE,EAAE;QACnD,IAAMC,KAAK,GAAGH,SAAS,CAACE,KAAK,CAAC;QAC9BL,MAAM,CAACK,KAAK,CAAC,GAAG;UACZE,UAAU,EAAED,KAAK,CAACC,UAAU;UAC5B9B,cAAc;UACdc,OAAO,EAAE,IAAI;UACbiB,SAAS,EAAEF,KAAK,CAACE,SAAS;UAC1BC,YAAY,EAAEvC,YAAY,CAACwC,qBAAqB,CAACJ,KAAK,CAACG,YAAY,CAAQ;UAC3EE,oBAAoB,EAAEzC,YAAY,CAACwC,qBAAqB,CAACJ,KAAK,CAACK,oBAAoB;QACvF,CAAC;MACL;MACA,IAAMC,eAAuD,GAAG;QAC5DC,EAAE,EAAEd,SAAS,CAACc,EAAE;QAChBC,QAAQ,EAAE,KAAK;QACfrC,cAAc,EAAEF,MAAM,CAACC,QAAQ,GAAGD,MAAM,CAACG,IAAI,GAAG0B,SAAS;QACzDT,YAAY,EAAED,oBAAoB;QAClCM,MAAM;QACNe,aAAa,EAAEvC,QAAQ,CAACM,KAAK;QAC7BkC,UAAU,EAAEjB,SAAS,CAACiB,UAAU;QAChCC,OAAO,EAAElB,SAAS,CAACkB,OAAO;QAC1BC,OAAO,EAAEnB,SAAS,CAACmB,OAAO;QAC1BC,SAAS,EAAEpB,SAAS,CAACoB;MACzB,CAAC;MACD3C,QAAQ,CAAC4C,KAAK,CAACR,eAAe,CAAC;IACnC,CAAC,CAAC;IACFrC,MAAM,CAAC8C,KAAK,CAACC,IAAI,CAAC1B,YAAY,CAAC;IAE/B,IAAM2B,KAAK,GAAG;MACV/C,QAAQ;MACRD,MAAM;MACNK,eAAe;MACfO,QAAQ;MACRM;IACJ,CAAC;IACDpB,kCAAkC,CAACmD,GAAG,CAACjD,MAAM,EAAEgD,KAAK,CAAC;IACrD,OAAOA,KAAK;EAChB,CAAC,EAAE,CAAC;EACJpD,yBAAyB,CAACqD,GAAG,CAACjD,MAAM,EAAEI,YAAY,CAAC;AACvD;AAEA,OAAO,SAAS8C,wBAAwBA,CAAClD,MAA2B,EAA+B;EAC/F,IAAMI,YAAY,GAAGR,yBAAyB,CAACuD,GAAG,CAACnD,MAAM,CAAC;EAC1D,IAAI,CAACI,YAAY,EAAE;IACf,IAAMH,QAAoB,GAAGD,MAAM,CAACC,QAAQ,GAAGD,MAAM,CAACC,QAAQ,GAAGD,MAAa;IAC9E,IAAME,cAAc,GAAGF,MAAM,CAACC,QAAQ,GAAGD,MAAM,CAACG,IAAI,GAAG,EAAE;IACzD,MAAMb,UAAU,CAAC,KAAK,EAAE;MACpBW,QAAQ,EAAEA,QAAQ,CAACE,IAAI;MACvBiD,UAAU,EAAElD;IAChB,CAAC,CAAC;EACN;EACA,OAAOE,YAAY;AACvB;AAEA,OAAO,SAASE,kCAAkCA,CAC9C+C,qBAA6B,EAC7B7C,OAA4B,EAC5B8C,YAAoB,EACpBpD,cAAsB,EACtBO,uBAA4B,EAC5BC,aAAsB,EACxB;EACE,OAAOF,OAAO,CAAC+C,qBAAqB,CAAsB;IACtDF,qBAAqB;IACrBC,YAAY,EAAEA,YAAY;IAC1B;AACR;AACA;AACA;AACA;IACQpD,cAAc,EAAEsD,8BAA8B,CAACtD,cAAc,CAAC;IAC9DuD,MAAM,EAAE9C,wBAAwB;IAChC+C,OAAO,EAAEjD,uBAAuB;IAChCC,aAAa;IACbiD,OAAO,EAAEhE,YAAY,CAACiE,SAAS,CAAC;EACpC,CAAC,CAAC;AACN;AAEA,OAAO,SAASC,kBAAkBA,CAAC7D,MAA2B,EAAE;EAC5D,IAAMI,YAAY,GAAGR,yBAAyB,CAACuD,GAAG,CAACnD,MAAM,CAAC;EAC1D,IAAII,YAAY,EAAE;IACdR,yBAAyB,CAACkE,MAAM,CAAC9D,MAAM,CAAC;IACxC,OAAOI,YAAY,CAAC2D,IAAI,CAACf,KAAK,IAAIA,KAAK,CAAC3C,eAAe,CAAC2D,KAAK,CAAC,CAAC,CAAC;EACpE;AACJ;AAEA,OAAO,eAAeC,mCAAmCA,CACrDzD,OAA4B,EAC5B8C,YAAoB,EACpBpD,cAAsB,EACxB;EACE,IAAMmD,qBAAqB,GAAG5D,iBAAiB,CAAC,EAAE,CAAC;EACnD,IAAMY,eAAe,GAAG,MAAMC,kCAAkC,CAC5D+C,qBAAqB,EACrB7C,OAAO,EACP8C,YAAY,EACZpD,cAAc,EACd,CAAC,CAAC,EACF,KACJ,CAAC;EACD,MAAMG,eAAe,CAAC6D,MAAM,CAAC,CAAC;AAClC;AAEA,OAAO,SAASV,8BAA8BA,CAACtD,cAAsB,EAAU;EAC3E,OAAO,yBAAyB,GAAGA,cAAc;AACrD;AAEA,OAAO,IAAMS,wBAA2E,GAAGpB,uBAAuB,CAAC;EAC/G4E,KAAK,EAAE,iBAAiB;EACxBC,OAAO,EAAE,CAAC;EACVC,UAAU,EAAE,IAAI;EAChBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRjC,EAAE,EAAE;MACAgC,IAAI,EAAE,QAAQ;MACdE,SAAS,EAAE;IACf,CAAC;IACDC,IAAI,EAAE;MACFH,IAAI,EAAE,QAAQ;MACdI,oBAAoB,EAAE;IAC1B;EACJ,CAAC;EACDC,QAAQ,EAAE,CACN,IAAI,EACJ,MAAM;AAEd,CAAC,CAAC","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/local-documents/local-documents.js b/dist/esm/plugins/local-documents/local-documents.js deleted file mode 100644 index 6203e512064..00000000000 --- a/dist/esm/plugins/local-documents/local-documents.js +++ /dev/null @@ -1,99 +0,0 @@ -import { getDefaultRevision, getDefaultRxDocumentMeta } from "../../plugins/utils/index.js"; -import { filter, map, startWith, mergeMap } from 'rxjs'; -import { getLocalDocStateByParent } from "./local-documents-helper.js"; -import { getSingleDocument, writeSingle } from "../../rx-storage-helper.js"; - -/** - * save the local-document-data - * throws if already exists - */ -export async function insertLocal(id, data) { - var state = await getLocalDocStateByParent(this); - - // create new one - var docData = { - id: id, - data, - _deleted: false, - _meta: getDefaultRxDocumentMeta(), - _rev: getDefaultRevision(), - _attachments: {} - }; - return writeSingle(state.storageInstance, { - document: docData - }, 'local-document-insert').then(newDocData => state.docCache.getCachedRxDocument(newDocData)); -} - -/** - * save the local-document-data - * overwrites existing if exists - */ -export function upsertLocal(id, data) { - return this.getLocal(id).then(existing => { - if (!existing) { - // create new one - var docPromise = this.insertLocal(id, data); - return docPromise; - } else { - // update existing - return existing.incrementalModify(() => { - return data; - }); - } - }); -} -export async function getLocal(id) { - var state = await getLocalDocStateByParent(this); - var docCache = state.docCache; - - // check in doc-cache - var found = docCache.getLatestDocumentDataIfExists(id); - if (found) { - return Promise.resolve(docCache.getCachedRxDocument(found)); - } - - // if not found, check in storage instance - return getSingleDocument(state.storageInstance, id).then(docData => { - if (!docData) { - return null; - } - return state.docCache.getCachedRxDocument(docData); - }); -} -export function getLocal$(id) { - return this.$.pipe(startWith(null), mergeMap(async cE => { - if (cE) { - return { - changeEvent: cE - }; - } else { - var doc = await this.getLocal(id); - return { - doc: doc - }; - } - }), mergeMap(async changeEventOrDoc => { - if (changeEventOrDoc.changeEvent) { - var cE = changeEventOrDoc.changeEvent; - if (!cE.isLocal || cE.documentId !== id) { - return { - use: false - }; - } else { - var doc = await this.getLocal(id); - return { - use: true, - doc: doc - }; - } - } else { - return { - use: true, - doc: changeEventOrDoc.doc - }; - } - }), filter(filterFlagged => filterFlagged.use), map(filterFlagged => { - return filterFlagged.doc; - })); -} -//# sourceMappingURL=local-documents.js.map \ No newline at end of file diff --git a/dist/esm/plugins/local-documents/local-documents.js.map b/dist/esm/plugins/local-documents/local-documents.js.map deleted file mode 100644 index e34b9ed8767..00000000000 --- a/dist/esm/plugins/local-documents/local-documents.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"local-documents.js","names":["getDefaultRevision","getDefaultRxDocumentMeta","filter","map","startWith","mergeMap","getLocalDocStateByParent","getSingleDocument","writeSingle","insertLocal","id","data","state","docData","_deleted","_meta","_rev","_attachments","storageInstance","document","then","newDocData","docCache","getCachedRxDocument","upsertLocal","getLocal","existing","docPromise","incrementalModify","found","getLatestDocumentDataIfExists","Promise","resolve","getLocal$","$","pipe","cE","changeEvent","doc","changeEventOrDoc","isLocal","documentId","use","filterFlagged"],"sources":["../../../../src/plugins/local-documents/local-documents.ts"],"sourcesContent":["import {\n getDefaultRevision,\n getDefaultRxDocumentMeta\n} from '../../plugins/utils/index.ts';\n\nimport type {\n RxChangeEvent,\n RxCollection,\n RxDatabase,\n RxDocument,\n RxDocumentWriteData,\n RxLocalDocument,\n RxLocalDocumentData\n} from '../../types/index.d.ts';\n\nimport {\n filter,\n map,\n startWith,\n mergeMap\n} from 'rxjs';\nimport { Observable } from 'rxjs';\n\nimport { getLocalDocStateByParent } from './local-documents-helper.ts';\nimport { getSingleDocument, writeSingle } from '../../rx-storage-helper.ts';\n\n\n\n/**\n * save the local-document-data\n * throws if already exists\n */\nexport async function insertLocal = any, Reactivity = unknown>(\n this: RxDatabase | RxCollection,\n id: string,\n data: DocData\n): Promise> {\n const state = await getLocalDocStateByParent(this);\n\n // create new one\n const docData: RxDocumentWriteData> = {\n id: id,\n data,\n _deleted: false,\n _meta: getDefaultRxDocumentMeta(),\n _rev: getDefaultRevision(),\n _attachments: {}\n };\n\n return writeSingle(\n state.storageInstance,\n {\n document: docData\n },\n 'local-document-insert'\n ).then(newDocData => state.docCache.getCachedRxDocument(newDocData) as any);\n}\n\n/**\n * save the local-document-data\n * overwrites existing if exists\n */\nexport function upsertLocal = any, Reactivity = unknown>(\n this: any,\n id: string,\n data: DocData\n): Promise> {\n return this.getLocal(id)\n .then((existing: RxDocument) => {\n if (!existing) {\n // create new one\n const docPromise = this.insertLocal(id, data);\n return docPromise;\n } else {\n // update existing\n return existing.incrementalModify(() => {\n return data;\n });\n }\n });\n}\n\nexport async function getLocal(this: any, id: string): Promise | null> {\n const state = await getLocalDocStateByParent(this);\n const docCache = state.docCache;\n\n // check in doc-cache\n const found = docCache.getLatestDocumentDataIfExists(id);\n if (found) {\n return Promise.resolve(\n docCache.getCachedRxDocument(found) as any\n );\n }\n\n // if not found, check in storage instance\n return getSingleDocument(state.storageInstance, id)\n .then((docData) => {\n if (!docData) {\n return null;\n }\n return state.docCache.getCachedRxDocument(docData) as any;\n });\n}\n\nexport function getLocal$(this: RxCollection, id: string): Observable | null> {\n return this.$.pipe(\n startWith(null),\n mergeMap(async (cE: RxChangeEvent | null) => {\n if (cE) {\n return {\n changeEvent: cE\n };\n } else {\n const doc = await this.getLocal(id);\n return {\n doc: doc\n };\n }\n }),\n mergeMap(async (changeEventOrDoc) => {\n if (changeEventOrDoc.changeEvent) {\n const cE = changeEventOrDoc.changeEvent;\n if (!cE.isLocal || cE.documentId !== id) {\n return {\n use: false\n };\n } else {\n const doc = await this.getLocal(id);\n return {\n use: true,\n doc: doc\n };\n }\n } else {\n return {\n use: true,\n doc: changeEventOrDoc.doc\n };\n }\n }),\n filter(filterFlagged => filterFlagged.use),\n map(filterFlagged => {\n return filterFlagged.doc as any;\n })\n );\n}\n"],"mappings":"AAAA,SACIA,kBAAkB,EAClBC,wBAAwB,QACrB,8BAA8B;AAYrC,SACIC,MAAM,EACNC,GAAG,EACHC,SAAS,EACTC,QAAQ,QACL,MAAM;AAGb,SAASC,wBAAwB,QAAQ,6BAA6B;AACtE,SAASC,iBAAiB,EAAEC,WAAW,QAAQ,4BAA4B;;AAI3E;AACA;AACA;AACA;AACA,OAAO,eAAeC,WAAWA,CAE7BC,EAAU,EACVC,IAAa,EACqC;EAClD,IAAMC,KAAK,GAAG,MAAMN,wBAAwB,CAAC,IAAI,CAAC;;EAElD;EACA,IAAMO,OAA0D,GAAG;IAC/DH,EAAE,EAAEA,EAAE;IACNC,IAAI;IACJG,QAAQ,EAAE,KAAK;IACfC,KAAK,EAAEd,wBAAwB,CAAC,CAAC;IACjCe,IAAI,EAAEhB,kBAAkB,CAAC,CAAC;IAC1BiB,YAAY,EAAE,CAAC;EACnB,CAAC;EAED,OAAOT,WAAW,CACdI,KAAK,CAACM,eAAe,EACrB;IACIC,QAAQ,EAAEN;EACd,CAAC,EACD,uBACJ,CAAC,CAACO,IAAI,CAACC,UAAU,IAAIT,KAAK,CAACU,QAAQ,CAACC,mBAAmB,CAACF,UAAU,CAAQ,CAAC;AAC/E;;AAEA;AACA;AACA;AACA;AACA,OAAO,SAASG,WAAWA,CAEvBd,EAAU,EACVC,IAAa,EACqC;EAClD,OAAO,IAAI,CAACc,QAAQ,CAACf,EAAE,CAAC,CACnBU,IAAI,CAAEM,QAAoB,IAAK;IAC5B,IAAI,CAACA,QAAQ,EAAE;MACX;MACA,IAAMC,UAAU,GAAG,IAAI,CAAClB,WAAW,CAACC,EAAE,EAAEC,IAAI,CAAC;MAC7C,OAAOgB,UAAU;IACrB,CAAC,MAAM;MACH;MACA,OAAOD,QAAQ,CAACE,iBAAiB,CAAC,MAAM;QACpC,OAAOjB,IAAI;MACf,CAAC,CAAC;IACN;EACJ,CAAC,CAAC;AACV;AAEA,OAAO,eAAec,QAAQA,CAAiDf,EAAU,EAA6D;EAClJ,IAAME,KAAK,GAAG,MAAMN,wBAAwB,CAAC,IAAI,CAAC;EAClD,IAAMgB,QAAQ,GAAGV,KAAK,CAACU,QAAQ;;EAE/B;EACA,IAAMO,KAAK,GAAGP,QAAQ,CAACQ,6BAA6B,CAACpB,EAAE,CAAC;EACxD,IAAImB,KAAK,EAAE;IACP,OAAOE,OAAO,CAACC,OAAO,CAClBV,QAAQ,CAACC,mBAAmB,CAACM,KAAK,CACtC,CAAC;EACL;;EAEA;EACA,OAAOtB,iBAAiB,CAACK,KAAK,CAACM,eAAe,EAAER,EAAE,CAAC,CAC9CU,IAAI,CAAEP,OAAO,IAAK;IACf,IAAI,CAACA,OAAO,EAAE;MACV,OAAO,IAAI;IACf;IACA,OAAOD,KAAK,CAACU,QAAQ,CAACC,mBAAmB,CAACV,OAAO,CAAC;EACtD,CAAC,CAAC;AACV;AAEA,OAAO,SAASoB,SAASA,CAA0DvB,EAAU,EAAgE;EACzJ,OAAO,IAAI,CAACwB,CAAC,CAACC,IAAI,CACd/B,SAAS,CAAC,IAAI,CAAC,EACfC,QAAQ,CAAC,MAAO+B,EAA6C,IAAK;IAC9D,IAAIA,EAAE,EAAE;MACJ,OAAO;QACHC,WAAW,EAAED;MACjB,CAAC;IACL,CAAC,MAAM;MACH,IAAME,GAAG,GAAG,MAAM,IAAI,CAACb,QAAQ,CAACf,EAAE,CAAC;MACnC,OAAO;QACH4B,GAAG,EAAEA;MACT,CAAC;IACL;EACJ,CAAC,CAAC,EACFjC,QAAQ,CAAC,MAAOkC,gBAAgB,IAAK;IACjC,IAAIA,gBAAgB,CAACF,WAAW,EAAE;MAC9B,IAAMD,EAAE,GAAGG,gBAAgB,CAACF,WAAW;MACvC,IAAI,CAACD,EAAE,CAACI,OAAO,IAAIJ,EAAE,CAACK,UAAU,KAAK/B,EAAE,EAAE;QACrC,OAAO;UACHgC,GAAG,EAAE;QACT,CAAC;MACL,CAAC,MAAM;QACH,IAAMJ,GAAG,GAAG,MAAM,IAAI,CAACb,QAAQ,CAACf,EAAE,CAAC;QACnC,OAAO;UACHgC,GAAG,EAAE,IAAI;UACTJ,GAAG,EAAEA;QACT,CAAC;MACL;IACJ,CAAC,MAAM;MACH,OAAO;QACHI,GAAG,EAAE,IAAI;QACTJ,GAAG,EAAEC,gBAAgB,CAACD;MAC1B,CAAC;IACL;EACJ,CAAC,CAAC,EACFpC,MAAM,CAACyC,aAAa,IAAIA,aAAa,CAACD,GAAG,CAAC,EAC1CvC,GAAG,CAACwC,aAAa,IAAI;IACjB,OAAOA,aAAa,CAACL,GAAG;EAC5B,CAAC,CACL,CAAC;AACL","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/local-documents/rx-local-document.js b/dist/esm/plugins/local-documents/rx-local-document.js deleted file mode 100644 index b41fa09487b..00000000000 --- a/dist/esm/plugins/local-documents/rx-local-document.js +++ /dev/null @@ -1,178 +0,0 @@ -import _inheritsLoose from "@babel/runtime/helpers/inheritsLoose"; -import { distinctUntilChanged, filter, map, shareReplay, startWith } from 'rxjs'; -import { overwritable } from "../../overwritable.js"; -import { getDocumentDataOfRxChangeEvent } from "../../rx-change-event.js"; -import { basePrototype, createRxDocumentConstructor } from "../../rx-document.js"; -import { newRxError, newRxTypeError } from "../../rx-error.js"; -import { writeSingle } from "../../rx-storage-helper.js"; -import { flatClone, getFromMapOrThrow, getProperty, RXJS_SHARE_REPLAY_DEFAULTS } from "../../plugins/utils/index.js"; -import { getLocalDocStateByParent, LOCAL_DOC_STATE_BY_PARENT_RESOLVED } from "./local-documents-helper.js"; -import { isRxDatabase } from "../../rx-database.js"; -var RxDocumentParent = createRxDocumentConstructor(); -var RxLocalDocumentClass = /*#__PURE__*/function (_RxDocumentParent) { - function RxLocalDocumentClass(id, jsonData, parent) { - var _this2; - _this2 = _RxDocumentParent.call(this, null, jsonData) || this; - _this2.id = id; - _this2.parent = parent; - return _this2; - } - _inheritsLoose(RxLocalDocumentClass, _RxDocumentParent); - return RxLocalDocumentClass; -}(RxDocumentParent); -var RxLocalDocumentPrototype = { - get isLocal() { - return true; - }, - // - // overwrites - // - get allAttachments$() { - // this is overwritten here because we cannot re-set getters on the prototype - throw newRxError('LD1', { - document: this - }); - }, - get primaryPath() { - return 'id'; - }, - get primary() { - return this.id; - }, - get $() { - var _this = this; - var state = getFromMapOrThrow(LOCAL_DOC_STATE_BY_PARENT_RESOLVED, this.parent); - return _this.parent.$.pipe(filter(changeEvent => changeEvent.documentId === this.primary), filter(changeEvent => changeEvent.isLocal), map(changeEvent => getDocumentDataOfRxChangeEvent(changeEvent)), startWith(state.docCache.getLatestDocumentData(this.primary)), distinctUntilChanged((prev, curr) => prev._rev === curr._rev), map(docData => state.docCache.getCachedRxDocument(docData)), shareReplay(RXJS_SHARE_REPLAY_DEFAULTS)); - }, - get $$() { - var _this = this; - var db = getRxDatabaseFromLocalDocument(_this); - var reactivity = db.getReactivityFactory(); - return reactivity.fromObservable(_this.$, _this.getLatest()._data, db); - }, - get deleted$$() { - var _this = this; - var db = getRxDatabaseFromLocalDocument(_this); - var reactivity = db.getReactivityFactory(); - return reactivity.fromObservable(_this.deleted$, _this.getLatest().deleted, db); - }, - getLatest() { - var state = getFromMapOrThrow(LOCAL_DOC_STATE_BY_PARENT_RESOLVED, this.parent); - var latestDocData = state.docCache.getLatestDocumentData(this.primary); - return state.docCache.getCachedRxDocument(latestDocData); - }, - get(objPath) { - objPath = 'data.' + objPath; - if (!this._data) { - return undefined; - } - if (typeof objPath !== 'string') { - throw newRxTypeError('LD2', { - objPath - }); - } - var valueObj = getProperty(this._data, objPath); - valueObj = overwritable.deepFreezeWhenDevMode(valueObj); - return valueObj; - }, - get$(objPath) { - objPath = 'data.' + objPath; - if (overwritable.isDevMode()) { - if (objPath.includes('.item.')) { - throw newRxError('LD3', { - objPath - }); - } - if (objPath === this.primaryPath) { - throw newRxError('LD4'); - } - } - return this.$.pipe(map(localDocument => localDocument._data), map(data => getProperty(data, objPath)), distinctUntilChanged()); - }, - get$$(objPath) { - var db = getRxDatabaseFromLocalDocument(this); - var reactivity = db.getReactivityFactory(); - return reactivity.fromObservable(this.get$(objPath), this.getLatest().get(objPath), db); - }, - async incrementalModify(mutationFunction) { - var state = await getLocalDocStateByParent(this.parent); - return state.incrementalWriteQueue.addWrite(this._data, async docData => { - docData.data = await mutationFunction(docData.data, this); - return docData; - }).then(result => state.docCache.getCachedRxDocument(result)); - }, - incrementalPatch(patch) { - return this.incrementalModify(docData => { - Object.entries(patch).forEach(([k, v]) => { - docData[k] = v; - }); - return docData; - }); - }, - async _saveData(newData) { - var state = await getLocalDocStateByParent(this.parent); - var oldData = this._data; - newData.id = this.id; - return state.storageInstance.bulkWrite([{ - previous: oldData, - document: newData - }], 'local-document-save-data').then(res => { - var docResult = res.success[0]; - if (!docResult) { - throw res.error[0]; - } - newData = flatClone(newData); - newData._rev = docResult._rev; - }); - }, - async remove() { - var state = await getLocalDocStateByParent(this.parent); - var writeData = flatClone(this._data); - writeData._deleted = true; - return writeSingle(state.storageInstance, { - previous: this._data, - document: writeData - }, 'local-document-remove').then(writeResult => state.docCache.getCachedRxDocument(writeResult)); - } -}; -var INIT_DONE = false; -var _init = () => { - if (INIT_DONE) return;else INIT_DONE = true; - - // add functions of RxDocument - var docBaseProto = basePrototype; - var props = Object.getOwnPropertyNames(docBaseProto); - props.forEach(key => { - var exists = Object.getOwnPropertyDescriptor(RxLocalDocumentPrototype, key); - if (exists) return; - var desc = Object.getOwnPropertyDescriptor(docBaseProto, key); - Object.defineProperty(RxLocalDocumentPrototype, key, desc); - }); - - /** - * Overwrite things that do not work on local documents - * with a throwing function. - */ - var getThrowingFun = k => () => { - throw newRxError('LD6', { - functionName: k - }); - }; - ['populate', 'update', 'putAttachment', 'getAttachment', 'allAttachments'].forEach(k => RxLocalDocumentPrototype[k] = getThrowingFun(k)); -}; -export function createRxLocalDocument(data, parent) { - _init(); - var newDoc = new RxLocalDocumentClass(data.id, data, parent); - Object.setPrototypeOf(newDoc, RxLocalDocumentPrototype); - newDoc.prototype = RxLocalDocumentPrototype; - return newDoc; -} -export function getRxDatabaseFromLocalDocument(doc) { - var parent = doc.parent; - if (isRxDatabase(parent)) { - return parent; - } else { - return parent.database; - } -} -//# sourceMappingURL=rx-local-document.js.map \ No newline at end of file diff --git a/dist/esm/plugins/local-documents/rx-local-document.js.map b/dist/esm/plugins/local-documents/rx-local-document.js.map deleted file mode 100644 index f60538419a6..00000000000 --- a/dist/esm/plugins/local-documents/rx-local-document.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-local-document.js","names":["distinctUntilChanged","filter","map","shareReplay","startWith","overwritable","getDocumentDataOfRxChangeEvent","basePrototype","createRxDocumentConstructor","newRxError","newRxTypeError","writeSingle","flatClone","getFromMapOrThrow","getProperty","RXJS_SHARE_REPLAY_DEFAULTS","getLocalDocStateByParent","LOCAL_DOC_STATE_BY_PARENT_RESOLVED","isRxDatabase","RxDocumentParent","RxLocalDocumentClass","_RxDocumentParent","id","jsonData","parent","_this2","call","_inheritsLoose","RxLocalDocumentPrototype","isLocal","allAttachments$","document","primaryPath","primary","$","_this","state","pipe","changeEvent","documentId","docCache","getLatestDocumentData","prev","curr","_rev","docData","getCachedRxDocument","$$","db","getRxDatabaseFromLocalDocument","reactivity","getReactivityFactory","fromObservable","getLatest","_data","deleted$$","deleted$","deleted","latestDocData","get","objPath","undefined","valueObj","deepFreezeWhenDevMode","get$","isDevMode","includes","localDocument","data","get$$","incrementalModify","mutationFunction","incrementalWriteQueue","addWrite","then","result","incrementalPatch","patch","Object","entries","forEach","k","v","_saveData","newData","oldData","storageInstance","bulkWrite","previous","res","docResult","success","error","remove","writeData","_deleted","writeResult","INIT_DONE","_init","docBaseProto","props","getOwnPropertyNames","key","exists","getOwnPropertyDescriptor","desc","defineProperty","getThrowingFun","functionName","createRxLocalDocument","newDoc","setPrototypeOf","prototype","doc","database"],"sources":["../../../../src/plugins/local-documents/rx-local-document.ts"],"sourcesContent":["import { Observable } from 'rxjs';\nimport {\n distinctUntilChanged,\n filter,\n map,\n shareReplay,\n startWith\n} from 'rxjs';\nimport { overwritable } from '../../overwritable.ts';\nimport { getDocumentDataOfRxChangeEvent } from '../../rx-change-event.ts';\nimport {\n basePrototype,\n createRxDocumentConstructor\n} from '../../rx-document.ts';\nimport {\n newRxError,\n newRxTypeError\n} from '../../rx-error.ts';\nimport { writeSingle } from '../../rx-storage-helper.ts';\nimport type {\n LocalDocumentModifyFunction,\n RxCollection,\n RxDatabase,\n RxDocument,\n RxDocumentData,\n RxDocumentWriteData,\n RxLocalDocument,\n RxLocalDocumentData\n} from '../../types/index.d.ts';\nimport {\n flatClone,\n getDefaultRevision,\n getDefaultRxDocumentMeta,\n getFromMapOrThrow,\n getProperty,\n RXJS_SHARE_REPLAY_DEFAULTS\n} from '../../plugins/utils/index.ts';\nimport { getLocalDocStateByParent, LOCAL_DOC_STATE_BY_PARENT_RESOLVED } from './local-documents-helper.ts';\nimport { isRxDatabase } from '../../rx-database.ts';\n\nconst RxDocumentParent = createRxDocumentConstructor() as any;\n\nclass RxLocalDocumentClass extends RxDocumentParent {\n constructor(\n public readonly id: string,\n jsonData: DocData,\n public readonly parent: RxCollection | RxDatabase\n ) {\n super(null, jsonData);\n }\n}\n\n\n\nconst RxLocalDocumentPrototype: any = {\n get isLocal() {\n return true;\n },\n\n //\n // overwrites\n //\n get allAttachments$() {\n // this is overwritten here because we cannot re-set getters on the prototype\n throw newRxError('LD1', {\n document: this\n });\n },\n get primaryPath() {\n return 'id';\n },\n get primary() {\n return this.id;\n },\n get $(): Observable> {\n const _this: RxLocalDocumentClass = this as any;\n const state = getFromMapOrThrow(LOCAL_DOC_STATE_BY_PARENT_RESOLVED, this.parent);\n return _this.parent.$.pipe(\n filter(changeEvent => changeEvent.documentId === this.primary),\n filter(changeEvent => changeEvent.isLocal),\n map(changeEvent => getDocumentDataOfRxChangeEvent(changeEvent)),\n startWith(state.docCache.getLatestDocumentData(this.primary)),\n distinctUntilChanged((prev, curr) => prev._rev === curr._rev),\n map(docData => state.docCache.getCachedRxDocument(docData)),\n shareReplay(RXJS_SHARE_REPLAY_DEFAULTS)\n ) as Observable;\n },\n get $$(): any {\n const _this: RxLocalDocumentClass = this as any;\n const db = getRxDatabaseFromLocalDocument(_this);\n const reactivity = db.getReactivityFactory();\n return reactivity.fromObservable(\n _this.$,\n _this.getLatest()._data,\n db\n );\n },\n get deleted$$() {\n const _this: RxLocalDocumentClass = this as any;\n const db = getRxDatabaseFromLocalDocument(_this);\n const reactivity = db.getReactivityFactory();\n return reactivity.fromObservable(\n _this.deleted$,\n _this.getLatest().deleted,\n db\n );\n },\n getLatest(this: RxLocalDocument): RxLocalDocument {\n const state = getFromMapOrThrow(LOCAL_DOC_STATE_BY_PARENT_RESOLVED, this.parent);\n const latestDocData = state.docCache.getLatestDocumentData(this.primary);\n return state.docCache.getCachedRxDocument(latestDocData) as any;\n },\n get(this: RxDocument, objPath: string) {\n objPath = 'data.' + objPath;\n\n if (!this._data) {\n return undefined;\n }\n if (typeof objPath !== 'string') {\n throw newRxTypeError('LD2', {\n objPath\n });\n }\n\n let valueObj = getProperty(this._data, objPath);\n valueObj = overwritable.deepFreezeWhenDevMode(valueObj);\n return valueObj;\n },\n get$(this: RxDocument, objPath: string) {\n objPath = 'data.' + objPath;\n\n if (overwritable.isDevMode()) {\n if (objPath.includes('.item.')) {\n throw newRxError('LD3', {\n objPath\n });\n }\n if (objPath === this.primaryPath) {\n throw newRxError('LD4');\n }\n }\n return this.$\n .pipe(\n map(localDocument => localDocument._data),\n map(data => getProperty(data, objPath)),\n distinctUntilChanged()\n );\n },\n get$$(this: RxDocument, objPath: string) {\n const db = getRxDatabaseFromLocalDocument(this as any);\n const reactivity = db.getReactivityFactory();\n return reactivity.fromObservable(\n this.get$(objPath),\n this.getLatest().get(objPath),\n db\n );\n },\n async incrementalModify(\n this: RxLocalDocument,\n mutationFunction: LocalDocumentModifyFunction\n ) {\n const state = await getLocalDocStateByParent(this.parent);\n\n return state.incrementalWriteQueue.addWrite(\n this._data as any,\n async (docData) => {\n docData.data = await mutationFunction(docData.data, this);\n return docData;\n }\n ).then(result => state.docCache.getCachedRxDocument(result as any));\n },\n incrementalPatch(patch: Partial) {\n return this.incrementalModify((docData: any) => {\n Object\n .entries(patch)\n .forEach(([k, v]) => {\n docData[k] = v;\n });\n return docData;\n });\n },\n async _saveData(this: RxLocalDocument, newData: RxDocumentData) {\n const state = await getLocalDocStateByParent(this.parent);\n const oldData: RxDocumentData = this._data;\n newData.id = (this as any).id;\n return state.storageInstance.bulkWrite([{\n previous: oldData,\n document: newData\n }], 'local-document-save-data')\n .then((res) => {\n const docResult = res.success[0];\n if (!docResult) {\n throw res.error[0];\n }\n newData = flatClone(newData);\n newData._rev = docResult._rev;\n });\n },\n\n async remove(this: RxLocalDocument): Promise> {\n const state = await getLocalDocStateByParent(this.parent);\n const writeData = flatClone(this._data);\n writeData._deleted = true;\n return writeSingle(state.storageInstance, {\n previous: this._data,\n document: writeData\n }, 'local-document-remove')\n .then((writeResult) => state.docCache.getCachedRxDocument(writeResult) as any);\n }\n};\n\n\n\nlet INIT_DONE = false;\nconst _init = () => {\n if (INIT_DONE) return;\n else INIT_DONE = true;\n\n // add functions of RxDocument\n const docBaseProto = basePrototype;\n const props = Object.getOwnPropertyNames(docBaseProto);\n props.forEach(key => {\n const exists = Object.getOwnPropertyDescriptor(RxLocalDocumentPrototype, key);\n if (exists) return;\n const desc: any = Object.getOwnPropertyDescriptor(docBaseProto, key);\n Object.defineProperty(RxLocalDocumentPrototype, key, desc);\n });\n\n\n /**\n * Overwrite things that do not work on local documents\n * with a throwing function.\n */\n const getThrowingFun = (k: string) => () => {\n throw newRxError('LD6', {\n functionName: k\n });\n };\n [\n 'populate',\n 'update',\n 'putAttachment',\n 'getAttachment',\n 'allAttachments'\n ].forEach((k: string) => RxLocalDocumentPrototype[k] = getThrowingFun(k));\n};\n\n\n\nexport function createRxLocalDocument(\n data: RxDocumentData>,\n parent: any\n): RxLocalDocument {\n _init();\n const newDoc = new RxLocalDocumentClass(data.id, data, parent);\n Object.setPrototypeOf(newDoc, RxLocalDocumentPrototype);\n newDoc.prototype = RxLocalDocumentPrototype;\n return newDoc as any;\n}\n\n\nexport function getRxDatabaseFromLocalDocument(doc: RxLocalDocument | RxLocalDocumentClass) {\n const parent = doc.parent;\n if (isRxDatabase(parent)) {\n return parent;\n } else {\n return (parent as RxCollection).database;\n }\n}\n"],"mappings":";AACA,SACIA,oBAAoB,EACpBC,MAAM,EACNC,GAAG,EACHC,WAAW,EACXC,SAAS,QACN,MAAM;AACb,SAASC,YAAY,QAAQ,uBAAuB;AACpD,SAASC,8BAA8B,QAAQ,0BAA0B;AACzE,SACIC,aAAa,EACbC,2BAA2B,QACxB,sBAAsB;AAC7B,SACIC,UAAU,EACVC,cAAc,QACX,mBAAmB;AAC1B,SAASC,WAAW,QAAQ,4BAA4B;AAWxD,SACIC,SAAS,EAGTC,iBAAiB,EACjBC,WAAW,EACXC,0BAA0B,QACvB,8BAA8B;AACrC,SAASC,wBAAwB,EAAEC,kCAAkC,QAAQ,6BAA6B;AAC1G,SAASC,YAAY,QAAQ,sBAAsB;AAEnD,IAAMC,gBAAgB,GAAGX,2BAA2B,CAAC,CAAQ;AAAC,IAExDY,oBAAoB,0BAAAC,iBAAA;EACtB,SAAAD,qBACoBE,EAAU,EAC1BC,QAAiB,EACDC,MAAiC,EACnD;IAAA,IAAAC,MAAA;IACEA,MAAA,GAAAJ,iBAAA,CAAAK,IAAA,OAAM,IAAI,EAAEH,QAAQ,CAAC;IAACE,MAAA,CAJNH,EAAU,GAAVA,EAAU;IAAAG,MAAA,CAEVD,MAAiC,GAAjCA,MAAiC;IAAA,OAAAC,MAAA;EAGrD;EAACE,cAAA,CAAAP,oBAAA,EAAAC,iBAAA;EAAA,OAAAD,oBAAA;AAAA,EAP6CD,gBAAgB;AAYlE,IAAMS,wBAA6B,GAAG;EAClC,IAAIC,OAAOA,CAAA,EAAG;IACV,OAAO,IAAI;EACf,CAAC;EAED;EACA;EACA;EACA,IAAIC,eAAeA,CAAA,EAAG;IAClB;IACA,MAAMrB,UAAU,CAAC,KAAK,EAAE;MACpBsB,QAAQ,EAAE;IACd,CAAC,CAAC;EACN,CAAC;EACD,IAAIC,WAAWA,CAAA,EAAG;IACd,OAAO,IAAI;EACf,CAAC;EACD,IAAIC,OAAOA,CAAA,EAAG;IACV,OAAO,IAAI,CAACX,EAAE;EAClB,CAAC;EACD,IAAIY,CAACA,CAAA,EAA0C;IAC3C,IAAMC,KAA2B,GAAG,IAAW;IAC/C,IAAMC,KAAK,GAAGvB,iBAAiB,CAACI,kCAAkC,EAAE,IAAI,CAACO,MAAM,CAAC;IAChF,OAAOW,KAAK,CAACX,MAAM,CAACU,CAAC,CAACG,IAAI,CACtBpC,MAAM,CAACqC,WAAW,IAAIA,WAAW,CAACC,UAAU,KAAK,IAAI,CAACN,OAAO,CAAC,EAC9DhC,MAAM,CAACqC,WAAW,IAAIA,WAAW,CAACT,OAAO,CAAC,EAC1C3B,GAAG,CAACoC,WAAW,IAAIhC,8BAA8B,CAACgC,WAAW,CAAC,CAAC,EAC/DlC,SAAS,CAACgC,KAAK,CAACI,QAAQ,CAACC,qBAAqB,CAAC,IAAI,CAACR,OAAO,CAAC,CAAC,EAC7DjC,oBAAoB,CAAC,CAAC0C,IAAI,EAAEC,IAAI,KAAKD,IAAI,CAACE,IAAI,KAAKD,IAAI,CAACC,IAAI,CAAC,EAC7D1C,GAAG,CAAC2C,OAAO,IAAIT,KAAK,CAACI,QAAQ,CAACM,mBAAmB,CAACD,OAAO,CAAC,CAAC,EAC3D1C,WAAW,CAACY,0BAA0B,CAC1C,CAAC;EACL,CAAC;EACD,IAAIgC,EAAEA,CAAA,EAAQ;IACV,IAAMZ,KAA2B,GAAG,IAAW;IAC/C,IAAMa,EAAE,GAAGC,8BAA8B,CAACd,KAAK,CAAC;IAChD,IAAMe,UAAU,GAAGF,EAAE,CAACG,oBAAoB,CAAC,CAAC;IAC5C,OAAOD,UAAU,CAACE,cAAc,CAC5BjB,KAAK,CAACD,CAAC,EACPC,KAAK,CAACkB,SAAS,CAAC,CAAC,CAACC,KAAK,EACvBN,EACJ,CAAC;EACL,CAAC;EACD,IAAIO,SAASA,CAAA,EAAG;IACZ,IAAMpB,KAA2B,GAAG,IAAW;IAC/C,IAAMa,EAAE,GAAGC,8BAA8B,CAACd,KAAK,CAAC;IAChD,IAAMe,UAAU,GAAGF,EAAE,CAACG,oBAAoB,CAAC,CAAC;IAC5C,OAAOD,UAAU,CAACE,cAAc,CAC5BjB,KAAK,CAACqB,QAAQ,EACdrB,KAAK,CAACkB,SAAS,CAAC,CAAC,CAACI,OAAO,EACzBT,EACJ,CAAC;EACL,CAAC;EACDK,SAASA,CAAA,EAAmD;IACxD,IAAMjB,KAAK,GAAGvB,iBAAiB,CAACI,kCAAkC,EAAE,IAAI,CAACO,MAAM,CAAC;IAChF,IAAMkC,aAAa,GAAGtB,KAAK,CAACI,QAAQ,CAACC,qBAAqB,CAAC,IAAI,CAACR,OAAO,CAAC;IACxE,OAAOG,KAAK,CAACI,QAAQ,CAACM,mBAAmB,CAACY,aAAa,CAAC;EAC5D,CAAC;EACDC,GAAGA,CAAmBC,OAAe,EAAE;IACnCA,OAAO,GAAG,OAAO,GAAGA,OAAO;IAE3B,IAAI,CAAC,IAAI,CAACN,KAAK,EAAE;MACb,OAAOO,SAAS;IACpB;IACA,IAAI,OAAOD,OAAO,KAAK,QAAQ,EAAE;MAC7B,MAAMlD,cAAc,CAAC,KAAK,EAAE;QACxBkD;MACJ,CAAC,CAAC;IACN;IAEA,IAAIE,QAAQ,GAAGhD,WAAW,CAAC,IAAI,CAACwC,KAAK,EAAEM,OAAO,CAAC;IAC/CE,QAAQ,GAAGzD,YAAY,CAAC0D,qBAAqB,CAACD,QAAQ,CAAC;IACvD,OAAOA,QAAQ;EACnB,CAAC;EACDE,IAAIA,CAAmBJ,OAAe,EAAE;IACpCA,OAAO,GAAG,OAAO,GAAGA,OAAO;IAE3B,IAAIvD,YAAY,CAAC4D,SAAS,CAAC,CAAC,EAAE;MAC1B,IAAIL,OAAO,CAACM,QAAQ,CAAC,QAAQ,CAAC,EAAE;QAC5B,MAAMzD,UAAU,CAAC,KAAK,EAAE;UACpBmD;QACJ,CAAC,CAAC;MACN;MACA,IAAIA,OAAO,KAAK,IAAI,CAAC5B,WAAW,EAAE;QAC9B,MAAMvB,UAAU,CAAC,KAAK,CAAC;MAC3B;IACJ;IACA,OAAO,IAAI,CAACyB,CAAC,CACRG,IAAI,CACDnC,GAAG,CAACiE,aAAa,IAAIA,aAAa,CAACb,KAAK,CAAC,EACzCpD,GAAG,CAACkE,IAAI,IAAItD,WAAW,CAACsD,IAAI,EAAER,OAAO,CAAC,CAAC,EACvC5D,oBAAoB,CAAC,CACzB,CAAC;EACT,CAAC;EACDqE,KAAKA,CAAmBT,OAAe,EAAE;IACrC,IAAMZ,EAAE,GAAGC,8BAA8B,CAAC,IAAW,CAAC;IACtD,IAAMC,UAAU,GAAGF,EAAE,CAACG,oBAAoB,CAAC,CAAC;IAC5C,OAAOD,UAAU,CAACE,cAAc,CAC5B,IAAI,CAACY,IAAI,CAACJ,OAAO,CAAC,EAClB,IAAI,CAACP,SAAS,CAAC,CAAC,CAACM,GAAG,CAACC,OAAO,CAAC,EAC7BZ,EACJ,CAAC;EACL,CAAC;EACD,MAAMsB,iBAAiBA,CAEnBC,gBAAkD,EACpD;IACE,IAAMnC,KAAK,GAAG,MAAMpB,wBAAwB,CAAC,IAAI,CAACQ,MAAM,CAAC;IAEzD,OAAOY,KAAK,CAACoC,qBAAqB,CAACC,QAAQ,CACvC,IAAI,CAACnB,KAAK,EACV,MAAOT,OAAO,IAAK;MACfA,OAAO,CAACuB,IAAI,GAAG,MAAMG,gBAAgB,CAAC1B,OAAO,CAACuB,IAAI,EAAE,IAAI,CAAC;MACzD,OAAOvB,OAAO;IAClB,CACJ,CAAC,CAAC6B,IAAI,CAACC,MAAM,IAAIvC,KAAK,CAACI,QAAQ,CAACM,mBAAmB,CAAC6B,MAAa,CAAC,CAAC;EACvE,CAAC;EACDC,gBAAgBA,CAACC,KAAmB,EAAE;IAClC,OAAO,IAAI,CAACP,iBAAiB,CAAEzB,OAAY,IAAK;MAC5CiC,MAAM,CACDC,OAAO,CAACF,KAAK,CAAC,CACdG,OAAO,CAAC,CAAC,CAACC,CAAC,EAAEC,CAAC,CAAC,KAAK;QACjBrC,OAAO,CAACoC,CAAC,CAAC,GAAGC,CAAC;MAClB,CAAC,CAAC;MACN,OAAOrC,OAAO;IAClB,CAAC,CAAC;EACN,CAAC;EACD,MAAMsC,SAASA,CAA6BC,OAA4C,EAAE;IACtF,IAAMhD,KAAK,GAAG,MAAMpB,wBAAwB,CAAC,IAAI,CAACQ,MAAM,CAAC;IACzD,IAAM6D,OAA4C,GAAG,IAAI,CAAC/B,KAAK;IAC/D8B,OAAO,CAAC9D,EAAE,GAAI,IAAI,CAASA,EAAE;IAC7B,OAAOc,KAAK,CAACkD,eAAe,CAACC,SAAS,CAAC,CAAC;MACpCC,QAAQ,EAAEH,OAAO;MACjBtD,QAAQ,EAAEqD;IACd,CAAC,CAAC,EAAE,0BAA0B,CAAC,CAC1BV,IAAI,CAAEe,GAAG,IAAK;MACX,IAAMC,SAAS,GAAGD,GAAG,CAACE,OAAO,CAAC,CAAC,CAAC;MAChC,IAAI,CAACD,SAAS,EAAE;QACZ,MAAMD,GAAG,CAACG,KAAK,CAAC,CAAC,CAAC;MACtB;MACAR,OAAO,GAAGxE,SAAS,CAACwE,OAAO,CAAC;MAC5BA,OAAO,CAACxC,IAAI,GAAG8C,SAAS,CAAC9C,IAAI;IACjC,CAAC,CAAC;EACV,CAAC;EAED,MAAMiD,MAAMA,CAAA,EAA4D;IACpE,IAAMzD,KAAK,GAAG,MAAMpB,wBAAwB,CAAC,IAAI,CAACQ,MAAM,CAAC;IACzD,IAAMsE,SAAS,GAAGlF,SAAS,CAAC,IAAI,CAAC0C,KAAK,CAAC;IACvCwC,SAAS,CAACC,QAAQ,GAAG,IAAI;IACzB,OAAOpF,WAAW,CAACyB,KAAK,CAACkD,eAAe,EAAE;MACtCE,QAAQ,EAAE,IAAI,CAAClC,KAAK;MACpBvB,QAAQ,EAAE+D;IACd,CAAC,EAAE,uBAAuB,CAAC,CACtBpB,IAAI,CAAEsB,WAAW,IAAK5D,KAAK,CAACI,QAAQ,CAACM,mBAAmB,CAACkD,WAAW,CAAQ,CAAC;EACtF;AACJ,CAAC;AAID,IAAIC,SAAS,GAAG,KAAK;AACrB,IAAMC,KAAK,GAAGA,CAAA,KAAM;EAChB,IAAID,SAAS,EAAE,OAAO,KACjBA,SAAS,GAAG,IAAI;;EAErB;EACA,IAAME,YAAY,GAAG5F,aAAa;EAClC,IAAM6F,KAAK,GAAGtB,MAAM,CAACuB,mBAAmB,CAACF,YAAY,CAAC;EACtDC,KAAK,CAACpB,OAAO,CAACsB,GAAG,IAAI;IACjB,IAAMC,MAAM,GAAGzB,MAAM,CAAC0B,wBAAwB,CAAC5E,wBAAwB,EAAE0E,GAAG,CAAC;IAC7E,IAAIC,MAAM,EAAE;IACZ,IAAME,IAAS,GAAG3B,MAAM,CAAC0B,wBAAwB,CAACL,YAAY,EAAEG,GAAG,CAAC;IACpExB,MAAM,CAAC4B,cAAc,CAAC9E,wBAAwB,EAAE0E,GAAG,EAAEG,IAAI,CAAC;EAC9D,CAAC,CAAC;;EAGF;AACJ;AACA;AACA;EACI,IAAME,cAAc,GAAI1B,CAAS,IAAK,MAAM;IACxC,MAAMxE,UAAU,CAAC,KAAK,EAAE;MACpBmG,YAAY,EAAE3B;IAClB,CAAC,CAAC;EACN,CAAC;EACD,CACI,UAAU,EACV,QAAQ,EACR,eAAe,EACf,eAAe,EACf,gBAAgB,CACnB,CAACD,OAAO,CAAEC,CAAS,IAAKrD,wBAAwB,CAACqD,CAAC,CAAC,GAAG0B,cAAc,CAAC1B,CAAC,CAAC,CAAC;AAC7E,CAAC;AAID,OAAO,SAAS4B,qBAAqBA,CACjCzC,IAAkD,EAClD5C,MAAW,EACa;EACxB0E,KAAK,CAAC,CAAC;EACP,IAAMY,MAAM,GAAG,IAAI1F,oBAAoB,CAACgD,IAAI,CAAC9C,EAAE,EAAE8C,IAAI,EAAE5C,MAAM,CAAC;EAC9DsD,MAAM,CAACiC,cAAc,CAACD,MAAM,EAAElF,wBAAwB,CAAC;EACvDkF,MAAM,CAACE,SAAS,GAAGpF,wBAAwB;EAC3C,OAAOkF,MAAM;AACjB;AAGA,OAAO,SAAS7D,8BAA8BA,CAACgE,GAAgD,EAAE;EAC7F,IAAMzF,MAAM,GAAGyF,GAAG,CAACzF,MAAM;EACzB,IAAIN,YAAY,CAACM,MAAM,CAAC,EAAE;IACtB,OAAOA,MAAM;EACjB,CAAC,MAAM;IACH,OAAQA,MAAM,CAAkB0F,QAAQ;EAC5C;AACJ","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/migration-schema/index.js b/dist/esm/plugins/migration-schema/index.js deleted file mode 100644 index d6b4096acb3..00000000000 --- a/dist/esm/plugins/migration-schema/index.js +++ /dev/null @@ -1,42 +0,0 @@ -import { shareReplay } from 'rxjs'; -import { getFromMapOrCreate, PROMISE_RESOLVE_FALSE, RXJS_SHARE_REPLAY_DEFAULTS } from "../../plugins/utils/index.js"; -import { RxMigrationState } from "./rx-migration-state.js"; -import { getMigrationStateByDatabase, mustMigrate, onDatabaseDestroy } from "./migration-helpers.js"; -import { addRxPlugin } from "../../plugin.js"; -import { RxDBLocalDocumentsPlugin } from "../local-documents/index.js"; -export var DATA_MIGRATOR_BY_COLLECTION = new WeakMap(); -export var RxDBMigrationPlugin = { - name: 'migration-schema', - rxdb: true, - init() { - addRxPlugin(RxDBLocalDocumentsPlugin); - }, - hooks: { - preDestroyRxDatabase: { - after: onDatabaseDestroy - } - }, - prototypes: { - RxDatabase: proto => { - proto.migrationStates = function () { - return getMigrationStateByDatabase(this).pipe(shareReplay(RXJS_SHARE_REPLAY_DEFAULTS)); - }; - }, - RxCollection: proto => { - proto.getMigrationState = function () { - return getFromMapOrCreate(DATA_MIGRATOR_BY_COLLECTION, this, () => new RxMigrationState(this.asRxCollection, this.migrationStrategies)); - }; - proto.migrationNeeded = function () { - if (this.schema.version === 0) { - return PROMISE_RESOLVE_FALSE; - } - return mustMigrate(this.getMigrationState()); - }; - } - } -}; -export var RxDBMigrationSchemaPlugin = RxDBMigrationPlugin; -export * from "./rx-migration-state.js"; -export * from "./migration-helpers.js"; -export * from "./migration-types.js"; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/esm/plugins/migration-schema/index.js.map b/dist/esm/plugins/migration-schema/index.js.map deleted file mode 100644 index f01f1c5901b..00000000000 --- a/dist/esm/plugins/migration-schema/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["shareReplay","getFromMapOrCreate","PROMISE_RESOLVE_FALSE","RXJS_SHARE_REPLAY_DEFAULTS","RxMigrationState","getMigrationStateByDatabase","mustMigrate","onDatabaseDestroy","addRxPlugin","RxDBLocalDocumentsPlugin","DATA_MIGRATOR_BY_COLLECTION","WeakMap","RxDBMigrationPlugin","name","rxdb","init","hooks","preDestroyRxDatabase","after","prototypes","RxDatabase","proto","migrationStates","pipe","RxCollection","getMigrationState","asRxCollection","migrationStrategies","migrationNeeded","schema","version","RxDBMigrationSchemaPlugin"],"sources":["../../../../src/plugins/migration-schema/index.ts"],"sourcesContent":["import {\n Observable\n} from 'rxjs';\nimport {\n shareReplay\n} from 'rxjs';\nimport type {\n RxPlugin,\n RxCollection,\n RxDatabase\n} from '../../types/index.ts';\nimport {\n getFromMapOrCreate,\n PROMISE_RESOLVE_FALSE,\n RXJS_SHARE_REPLAY_DEFAULTS\n} from '../../plugins/utils/index.ts';\nimport {\n RxMigrationState\n} from './rx-migration-state.ts';\nimport {\n getMigrationStateByDatabase,\n mustMigrate,\n onDatabaseDestroy\n} from './migration-helpers.ts';\nimport { addRxPlugin } from '../../plugin.ts';\nimport { RxDBLocalDocumentsPlugin } from '../local-documents/index.ts';\n\nexport const DATA_MIGRATOR_BY_COLLECTION: WeakMap = new WeakMap();\n\nexport const RxDBMigrationPlugin: RxPlugin = {\n name: 'migration-schema',\n rxdb: true,\n init() {\n addRxPlugin(RxDBLocalDocumentsPlugin);\n },\n hooks: {\n preDestroyRxDatabase: {\n after: onDatabaseDestroy\n }\n },\n prototypes: {\n RxDatabase: (proto: any) => {\n proto.migrationStates = function (this: RxDatabase): Observable {\n return getMigrationStateByDatabase(this).pipe(\n shareReplay(RXJS_SHARE_REPLAY_DEFAULTS)\n );\n };\n },\n RxCollection: (proto: any) => {\n proto.getMigrationState = function (this: RxCollection): RxMigrationState {\n return getFromMapOrCreate(\n DATA_MIGRATOR_BY_COLLECTION,\n this,\n () => new RxMigrationState(\n this.asRxCollection,\n this.migrationStrategies\n )\n );\n };\n proto.migrationNeeded = function (this: RxCollection) {\n if (this.schema.version === 0) {\n return PROMISE_RESOLVE_FALSE;\n }\n return mustMigrate(this.getMigrationState());\n };\n }\n }\n};\n\nexport const RxDBMigrationSchemaPlugin = RxDBMigrationPlugin;\n\n\nexport * from './rx-migration-state.ts';\nexport * from './migration-helpers.ts';\nexport * from './migration-types.ts';\n"],"mappings":"AAGA,SACIA,WAAW,QACR,MAAM;AAMb,SACIC,kBAAkB,EAClBC,qBAAqB,EACrBC,0BAA0B,QACvB,8BAA8B;AACrC,SACIC,gBAAgB,QACb,yBAAyB;AAChC,SACIC,2BAA2B,EAC3BC,WAAW,EACXC,iBAAiB,QACd,wBAAwB;AAC/B,SAASC,WAAW,QAAQ,iBAAiB;AAC7C,SAASC,wBAAwB,QAAQ,6BAA6B;AAEtE,OAAO,IAAMC,2BAAoE,GAAG,IAAIC,OAAO,CAAC,CAAC;AAEjG,OAAO,IAAMC,mBAA6B,GAAG;EACzCC,IAAI,EAAE,kBAAkB;EACxBC,IAAI,EAAE,IAAI;EACVC,IAAIA,CAAA,EAAG;IACHP,WAAW,CAACC,wBAAwB,CAAC;EACzC,CAAC;EACDO,KAAK,EAAE;IACHC,oBAAoB,EAAE;MAClBC,KAAK,EAAEX;IACX;EACJ,CAAC;EACDY,UAAU,EAAE;IACRC,UAAU,EAAGC,KAAU,IAAK;MACxBA,KAAK,CAACC,eAAe,GAAG,YAA4D;QAChF,OAAOjB,2BAA2B,CAAC,IAAI,CAAC,CAACkB,IAAI,CACzCvB,WAAW,CAACG,0BAA0B,CAC1C,CAAC;MACL,CAAC;IACL,CAAC;IACDqB,YAAY,EAAGH,KAAU,IAAK;MAC1BA,KAAK,CAACI,iBAAiB,GAAG,YAAgD;QACtE,OAAOxB,kBAAkB,CACrBS,2BAA2B,EAC3B,IAAI,EACJ,MAAM,IAAIN,gBAAgB,CACtB,IAAI,CAACsB,cAAc,EACnB,IAAI,CAACC,mBACT,CACJ,CAAC;MACL,CAAC;MACDN,KAAK,CAACO,eAAe,GAAG,YAA8B;QAClD,IAAI,IAAI,CAACC,MAAM,CAACC,OAAO,KAAK,CAAC,EAAE;UAC3B,OAAO5B,qBAAqB;QAChC;QACA,OAAOI,WAAW,CAAC,IAAI,CAACmB,iBAAiB,CAAC,CAAC,CAAC;MAChD,CAAC;IACL;EACJ;AACJ,CAAC;AAED,OAAO,IAAMM,yBAAyB,GAAGnB,mBAAmB;AAG5D,cAAc,yBAAyB;AACvC,cAAc,wBAAwB;AACtC,cAAc,sBAAsB","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/migration-schema/migration-helpers.js b/dist/esm/plugins/migration-schema/migration-helpers.js deleted file mode 100644 index 088cd1d0b58..00000000000 --- a/dist/esm/plugins/migration-schema/migration-helpers.js +++ /dev/null @@ -1,93 +0,0 @@ -import { BehaviorSubject } from 'rxjs'; -import { INTERNAL_CONTEXT_COLLECTION, getPrimaryKeyOfInternalDocument } from "../../rx-database-internal-store.js"; -import { getPreviousVersions } from "../../rx-schema.js"; -import { PROMISE_RESOLVE_FALSE, PROMISE_RESOLVE_NULL, clone, flatClone, getFromMapOrCreate, toPromise } from "../utils/index.js"; -export async function getOldCollectionMeta(migrationState) { - var collectionDocKeys = getPreviousVersions(migrationState.collection.schema.jsonSchema).map(version => migrationState.collection.name + '-' + version); - var found = await migrationState.database.internalStore.findDocumentsById(collectionDocKeys.map(key => getPrimaryKeyOfInternalDocument(key, INTERNAL_CONTEXT_COLLECTION)), false); - if (found.length > 1) { - throw new Error('more than one old collection meta found'); - } - return found[0]; -} - -/** - * runs the doc-data through all following migrationStrategies - * so it will match the newest schema. - * @throws Error if final doc does not match final schema or migrationStrategy crashes - * @return final object or null if migrationStrategy deleted it - */ -export function migrateDocumentData(collection, docSchemaVersion, docData) { - /** - * We cannot deep-clone Blob or Buffer - * so we just flat clone it here - * and attach it to the deep cloned document data. - */ - var attachmentsBefore = flatClone(docData._attachments); - var mutateableDocData = clone(docData); - var meta = mutateableDocData._meta; - delete mutateableDocData._meta; - mutateableDocData._attachments = attachmentsBefore; - var nextVersion = docSchemaVersion + 1; - - // run the document through migrationStrategies - var currentPromise = Promise.resolve(mutateableDocData); - var _loop = function () { - var version = nextVersion; - currentPromise = currentPromise.then(docOrNull => runStrategyIfNotNull(collection, version, docOrNull)); - nextVersion++; - }; - while (nextVersion <= collection.schema.version) { - _loop(); - } - return currentPromise.then(doc => { - if (doc === null) { - return PROMISE_RESOLVE_NULL; - } - doc._meta = meta; - return doc; - }); -} -export function runStrategyIfNotNull(collection, version, docOrNull) { - if (docOrNull === null) { - return PROMISE_RESOLVE_NULL; - } else { - var ret = collection.migrationStrategies[version](docOrNull, collection); - var retPromise = toPromise(ret); - return retPromise; - } -} - -/** - * returns true if a migration is needed - */ -export async function mustMigrate(migrationState) { - if (migrationState.collection.schema.version === 0) { - return PROMISE_RESOLVE_FALSE; - } - var oldColDoc = await getOldCollectionMeta(migrationState); - return !!oldColDoc; -} -export var MIGRATION_DEFAULT_BATCH_SIZE = 200; -export var DATA_MIGRATION_STATE_SUBJECT_BY_DATABASE = new WeakMap(); -export function addMigrationStateToDatabase(migrationState) { - var allSubject = getMigrationStateByDatabase(migrationState.database); - var allList = allSubject.getValue().slice(0); - allList.push(migrationState); - allSubject.next(allList); -} -export function getMigrationStateByDatabase(database) { - return getFromMapOrCreate(DATA_MIGRATION_STATE_SUBJECT_BY_DATABASE, database, () => new BehaviorSubject([])); -} - -/** - * Complete on database destroy - * so people do not have to unsubscribe - */ -export function onDatabaseDestroy(database) { - var subject = DATA_MIGRATION_STATE_SUBJECT_BY_DATABASE.get(database); - if (subject) { - subject.complete(); - } -} -//# sourceMappingURL=migration-helpers.js.map \ No newline at end of file diff --git a/dist/esm/plugins/migration-schema/migration-helpers.js.map b/dist/esm/plugins/migration-schema/migration-helpers.js.map deleted file mode 100644 index 23331389eaf..00000000000 --- a/dist/esm/plugins/migration-schema/migration-helpers.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"migration-helpers.js","names":["BehaviorSubject","INTERNAL_CONTEXT_COLLECTION","getPrimaryKeyOfInternalDocument","getPreviousVersions","PROMISE_RESOLVE_FALSE","PROMISE_RESOLVE_NULL","clone","flatClone","getFromMapOrCreate","toPromise","getOldCollectionMeta","migrationState","collectionDocKeys","collection","schema","jsonSchema","map","version","name","found","database","internalStore","findDocumentsById","key","length","Error","migrateDocumentData","docSchemaVersion","docData","attachmentsBefore","_attachments","mutateableDocData","meta","_meta","nextVersion","currentPromise","Promise","resolve","_loop","then","docOrNull","runStrategyIfNotNull","doc","ret","migrationStrategies","retPromise","mustMigrate","oldColDoc","MIGRATION_DEFAULT_BATCH_SIZE","DATA_MIGRATION_STATE_SUBJECT_BY_DATABASE","WeakMap","addMigrationStateToDatabase","allSubject","getMigrationStateByDatabase","allList","getValue","slice","push","next","onDatabaseDestroy","subject","get","complete"],"sources":["../../../../src/plugins/migration-schema/migration-helpers.ts"],"sourcesContent":["import { BehaviorSubject } from 'rxjs';\nimport {\n INTERNAL_CONTEXT_COLLECTION,\n getPrimaryKeyOfInternalDocument\n} from '../../rx-database-internal-store.ts';\nimport { getPreviousVersions } from '../../rx-schema.ts';\nimport type {\n InternalStoreCollectionDocType,\n RxCollection,\n RxDatabase,\n RxDocumentData\n} from '../../types/index.d.ts';\nimport {\n PROMISE_RESOLVE_FALSE,\n PROMISE_RESOLVE_NULL,\n clone,\n flatClone,\n getFromMapOrCreate,\n toPromise\n} from '../utils/index.ts';\nimport { RxMigrationState } from './rx-migration-state.ts';\n\nexport async function getOldCollectionMeta(\n migrationState: RxMigrationState\n): Promise> {\n\n const collectionDocKeys = getPreviousVersions(migrationState.collection.schema.jsonSchema)\n .map(version => migrationState.collection.name + '-' + version);\n\n const found = await migrationState.database.internalStore.findDocumentsById(\n collectionDocKeys.map(key => getPrimaryKeyOfInternalDocument(\n key,\n INTERNAL_CONTEXT_COLLECTION\n )),\n false\n );\n if (found.length > 1) {\n throw new Error('more than one old collection meta found');\n }\n return found[0];\n}\n\n\n/**\n * runs the doc-data through all following migrationStrategies\n * so it will match the newest schema.\n * @throws Error if final doc does not match final schema or migrationStrategy crashes\n * @return final object or null if migrationStrategy deleted it\n */\nexport function migrateDocumentData(\n collection: RxCollection,\n docSchemaVersion: number,\n docData: any\n): Promise {\n /**\n * We cannot deep-clone Blob or Buffer\n * so we just flat clone it here\n * and attach it to the deep cloned document data.\n */\n const attachmentsBefore = flatClone(docData._attachments);\n const mutateableDocData = clone(docData);\n const meta = mutateableDocData._meta;\n delete mutateableDocData._meta;\n mutateableDocData._attachments = attachmentsBefore;\n\n let nextVersion = docSchemaVersion + 1;\n\n // run the document through migrationStrategies\n let currentPromise = Promise.resolve(mutateableDocData);\n while (nextVersion <= collection.schema.version) {\n const version = nextVersion;\n currentPromise = currentPromise.then(docOrNull => runStrategyIfNotNull(\n collection,\n version,\n docOrNull\n ));\n nextVersion++;\n }\n\n return currentPromise.then(doc => {\n if (doc === null) {\n return PROMISE_RESOLVE_NULL;\n }\n doc._meta = meta;\n return doc;\n });\n}\n\nexport function runStrategyIfNotNull(\n collection: RxCollection,\n version: number,\n docOrNull: any | null\n): Promise {\n if (docOrNull === null) {\n return PROMISE_RESOLVE_NULL;\n } else {\n const ret = collection.migrationStrategies[version](docOrNull, collection);\n const retPromise = toPromise(ret);\n return retPromise;\n }\n}\n\n/**\n * returns true if a migration is needed\n */\nexport async function mustMigrate(\n migrationState: RxMigrationState\n): Promise {\n if (migrationState.collection.schema.version === 0) {\n return PROMISE_RESOLVE_FALSE;\n }\n const oldColDoc = await getOldCollectionMeta(migrationState);\n return !!oldColDoc;\n}\nexport const MIGRATION_DEFAULT_BATCH_SIZE = 200;\n\n\nexport type MigrationStateWithCollection = {\n collection: RxCollection;\n migrationState: RxMigrationState;\n};\n\nexport const DATA_MIGRATION_STATE_SUBJECT_BY_DATABASE = new WeakMap>();\nexport function addMigrationStateToDatabase(\n migrationState: RxMigrationState\n) {\n const allSubject = getMigrationStateByDatabase(migrationState.database);\n const allList = allSubject.getValue().slice(0);\n allList.push(migrationState);\n allSubject.next(allList);\n}\nexport function getMigrationStateByDatabase(database: RxDatabase): BehaviorSubject {\n return getFromMapOrCreate(\n DATA_MIGRATION_STATE_SUBJECT_BY_DATABASE,\n database,\n () => new BehaviorSubject([])\n );\n}\n\n/**\n * Complete on database destroy\n * so people do not have to unsubscribe\n */\nexport function onDatabaseDestroy(database: RxDatabase) {\n const subject = DATA_MIGRATION_STATE_SUBJECT_BY_DATABASE.get(database);\n if (subject) {\n subject.complete();\n }\n}\n"],"mappings":"AAAA,SAASA,eAAe,QAAQ,MAAM;AACtC,SACIC,2BAA2B,EAC3BC,+BAA+B,QAC5B,qCAAqC;AAC5C,SAASC,mBAAmB,QAAQ,oBAAoB;AAOxD,SACIC,qBAAqB,EACrBC,oBAAoB,EACpBC,KAAK,EACLC,SAAS,EACTC,kBAAkB,EAClBC,SAAS,QACN,mBAAmB;AAG1B,OAAO,eAAeC,oBAAoBA,CACtCC,cAAgC,EACuB;EAEvD,IAAMC,iBAAiB,GAAGT,mBAAmB,CAACQ,cAAc,CAACE,UAAU,CAACC,MAAM,CAACC,UAAU,CAAC,CACrFC,GAAG,CAACC,OAAO,IAAIN,cAAc,CAACE,UAAU,CAACK,IAAI,GAAG,GAAG,GAAGD,OAAO,CAAC;EAEnE,IAAME,KAAK,GAAG,MAAMR,cAAc,CAACS,QAAQ,CAACC,aAAa,CAACC,iBAAiB,CACvEV,iBAAiB,CAACI,GAAG,CAACO,GAAG,IAAIrB,+BAA+B,CACxDqB,GAAG,EACHtB,2BACJ,CAAC,CAAC,EACF,KACJ,CAAC;EACD,IAAIkB,KAAK,CAACK,MAAM,GAAG,CAAC,EAAE;IAClB,MAAM,IAAIC,KAAK,CAAC,yCAAyC,CAAC;EAC9D;EACA,OAAON,KAAK,CAAC,CAAC,CAAC;AACnB;;AAGA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,SAASO,mBAAmBA,CAC/Bb,UAAwB,EACxBc,gBAAwB,EACxBC,OAAY,EACO;EACnB;AACJ;AACA;AACA;AACA;EACI,IAAMC,iBAAiB,GAAGtB,SAAS,CAACqB,OAAO,CAACE,YAAY,CAAC;EACzD,IAAMC,iBAAiB,GAAGzB,KAAK,CAACsB,OAAO,CAAC;EACxC,IAAMI,IAAI,GAAGD,iBAAiB,CAACE,KAAK;EACpC,OAAOF,iBAAiB,CAACE,KAAK;EAC9BF,iBAAiB,CAACD,YAAY,GAAGD,iBAAiB;EAElD,IAAIK,WAAW,GAAGP,gBAAgB,GAAG,CAAC;;EAEtC;EACA,IAAIQ,cAAc,GAAGC,OAAO,CAACC,OAAO,CAACN,iBAAiB,CAAC;EAAC,IAAAO,KAAA,YAAAA,CAAA,EACP;IAC7C,IAAMrB,OAAO,GAAGiB,WAAW;IAC3BC,cAAc,GAAGA,cAAc,CAACI,IAAI,CAACC,SAAS,IAAIC,oBAAoB,CAClE5B,UAAU,EACVI,OAAO,EACPuB,SACJ,CAAC,CAAC;IACFN,WAAW,EAAE;EACjB,CAAC;EARD,OAAOA,WAAW,IAAIrB,UAAU,CAACC,MAAM,CAACG,OAAO;IAAAqB,KAAA;EAAA;EAU/C,OAAOH,cAAc,CAACI,IAAI,CAACG,GAAG,IAAI;IAC9B,IAAIA,GAAG,KAAK,IAAI,EAAE;MACd,OAAOrC,oBAAoB;IAC/B;IACAqC,GAAG,CAACT,KAAK,GAAGD,IAAI;IAChB,OAAOU,GAAG;EACd,CAAC,CAAC;AACN;AAEA,OAAO,SAASD,oBAAoBA,CAChC5B,UAAwB,EACxBI,OAAe,EACfuB,SAAqB,EACF;EACnB,IAAIA,SAAS,KAAK,IAAI,EAAE;IACpB,OAAOnC,oBAAoB;EAC/B,CAAC,MAAM;IACH,IAAMsC,GAAG,GAAG9B,UAAU,CAAC+B,mBAAmB,CAAC3B,OAAO,CAAC,CAACuB,SAAS,EAAE3B,UAAU,CAAC;IAC1E,IAAMgC,UAAU,GAAGpC,SAAS,CAACkC,GAAG,CAAC;IACjC,OAAOE,UAAU;EACrB;AACJ;;AAEA;AACA;AACA;AACA,OAAO,eAAeC,WAAWA,CAC7BnC,cAAgC,EAChB;EAChB,IAAIA,cAAc,CAACE,UAAU,CAACC,MAAM,CAACG,OAAO,KAAK,CAAC,EAAE;IAChD,OAAOb,qBAAqB;EAChC;EACA,IAAM2C,SAAS,GAAG,MAAMrC,oBAAoB,CAACC,cAAc,CAAC;EAC5D,OAAO,CAAC,CAACoC,SAAS;AACtB;AACA,OAAO,IAAMC,4BAA4B,GAAG,GAAG;AAQ/C,OAAO,IAAMC,wCAAwC,GAAG,IAAIC,OAAO,CAAkD,CAAC;AACtH,OAAO,SAASC,2BAA2BA,CACvCxC,cAAgC,EAClC;EACE,IAAMyC,UAAU,GAAGC,2BAA2B,CAAC1C,cAAc,CAACS,QAAQ,CAAC;EACvE,IAAMkC,OAAO,GAAGF,UAAU,CAACG,QAAQ,CAAC,CAAC,CAACC,KAAK,CAAC,CAAC,CAAC;EAC9CF,OAAO,CAACG,IAAI,CAAC9C,cAAc,CAAC;EAC5ByC,UAAU,CAACM,IAAI,CAACJ,OAAO,CAAC;AAC5B;AACA,OAAO,SAASD,2BAA2BA,CAACjC,QAAoB,EAAuC;EACnG,OAAOZ,kBAAkB,CACrByC,wCAAwC,EACxC7B,QAAQ,EACR,MAAM,IAAIpB,eAAe,CAAqB,EAAE,CACpD,CAAC;AACL;;AAEA;AACA;AACA;AACA;AACA,OAAO,SAAS2D,iBAAiBA,CAACvC,QAAoB,EAAE;EACpD,IAAMwC,OAAO,GAAGX,wCAAwC,CAACY,GAAG,CAACzC,QAAQ,CAAC;EACtE,IAAIwC,OAAO,EAAE;IACTA,OAAO,CAACE,QAAQ,CAAC,CAAC;EACtB;AACJ","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/migration-schema/migration-types.js b/dist/esm/plugins/migration-schema/migration-types.js deleted file mode 100644 index fbc02c9c316..00000000000 --- a/dist/esm/plugins/migration-schema/migration-types.js +++ /dev/null @@ -1,3 +0,0 @@ -export {}; -export {}; -//# sourceMappingURL=migration-types.js.map \ No newline at end of file diff --git a/dist/esm/plugins/migration-schema/migration-types.js.map b/dist/esm/plugins/migration-schema/migration-types.js.map deleted file mode 100644 index 9fd87e6a5e9..00000000000 --- a/dist/esm/plugins/migration-schema/migration-types.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"migration-types.js","names":[],"sources":["../../../../src/plugins/migration-schema/migration-types.ts"],"sourcesContent":["import type {\n InternalStoreDocType,\n PlainJsonError\n} from '../../types/index.d.ts';\n\nexport type RxMigrationStatus = {\n collectionName: string;\n status: 'RUNNING' | 'DONE' | 'ERROR';\n error?: PlainJsonError;\n\n /**\n * Counters so that you can display\n * the migration state to your user in the UI\n * and show a loading bar.\n */\n count: {\n /**\n * Total amount of documents that\n * have to be migrated\n */\n total: number;\n /**\n * Amount of documents that have been migrated already\n * = success + purged\n */\n handled: number;\n /**\n * Total percentage [0-100]\n */\n percent: number;\n };\n};\n\n\n/**\n * To be shared between browser tabs,\n * the migration status is written into a document in the internal storage of the database.\n */\nexport type RxMigrationStatusDocument = InternalStoreDocType;\n\n\nexport type MigrationStatusUpdate = (before: RxMigrationStatus) => RxMigrationStatus;\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/migration-schema/rx-migration-state.js b/dist/esm/plugins/migration-schema/rx-migration-state.js deleted file mode 100644 index 00619b96a8a..00000000000 --- a/dist/esm/plugins/migration-schema/rx-migration-state.js +++ /dev/null @@ -1,347 +0,0 @@ -import { Subject, filter, firstValueFrom, map, shareReplay } from 'rxjs'; -import { isBulkWriteConflictError, newRxError } from "../../rx-error.js"; -import { MIGRATION_DEFAULT_BATCH_SIZE, addMigrationStateToDatabase, getOldCollectionMeta, migrateDocumentData, mustMigrate } from "./migration-helpers.js"; -import { PROMISE_RESOLVE_TRUE, RXJS_SHARE_REPLAY_DEFAULTS, clone, deepEqual, ensureNotFalsy, errorToPlainJson, getDefaultRevision, getDefaultRxDocumentMeta } from "../utils/index.js"; -import { getSingleDocument, hasEncryption, observeSingle, writeSingle } from "../../rx-storage-helper.js"; -import { BroadcastChannel, createLeaderElection } from 'broadcast-channel'; -import { META_INSTANCE_SCHEMA_TITLE, awaitRxStorageReplicationFirstInSync, cancelRxStorageReplication, defaultConflictHandler, getRxReplicationMetaInstanceSchema, replicateRxStorageInstance, rxStorageInstanceToReplicationHandler } from "../../replication-protocol/index.js"; -import { overwritable } from "../../overwritable.js"; -import { INTERNAL_CONTEXT_MIGRATION_STATUS, addConnectedStorageToCollection, getPrimaryKeyOfInternalDocument } from "../../rx-database-internal-store.js"; -import { prepareQuery } from "../../rx-query.js"; -import { normalizeMangoQuery } from "../../rx-query-helper.js"; -export var RxMigrationState = /*#__PURE__*/function () { - function RxMigrationState(collection, migrationStrategies, statusDocKey = [collection.name, 'v', collection.schema.version].join('-')) { - this.started = false; - this.updateStatusHandlers = []; - this.updateStatusQueue = PROMISE_RESOLVE_TRUE; - this.collection = collection; - this.migrationStrategies = migrationStrategies; - this.statusDocKey = statusDocKey; - this.database = collection.database; - this.oldCollectionMeta = getOldCollectionMeta(this); - this.mustMigrate = mustMigrate(this); - this.statusDocId = getPrimaryKeyOfInternalDocument(this.statusDocKey, INTERNAL_CONTEXT_MIGRATION_STATUS); - addMigrationStateToDatabase(this); - this.$ = observeSingle(this.database.internalStore, this.statusDocId).pipe(filter(d => !!d), map(d => ensureNotFalsy(d).data), shareReplay(RXJS_SHARE_REPLAY_DEFAULTS)); - } - var _proto = RxMigrationState.prototype; - _proto.getStatus = function getStatus() { - return firstValueFrom(this.$); - } - - /** - * Starts the migration. - * Returns void so that people to not get the idea to await - * this function. - * Instead use migratePromise() if you want to await - * the migration. This ensures it works even if the migration - * is run on a different browser tab. - */; - _proto.startMigration = async function startMigration(batchSize = MIGRATION_DEFAULT_BATCH_SIZE) { - var must = await this.mustMigrate; - if (!must) { - return; - } - if (this.started) { - throw newRxError('DM1'); - } - this.started = true; - var broadcastChannel = undefined; - /** - * To ensure that multiple tabs do not migrate the same collection, - * we use a new broadcastChannel/leaderElector for each collection. - * This is required because collections can be added dynamically and - * not all tabs might know about this collection. - */ - if (this.database.multiInstance) { - broadcastChannel = new BroadcastChannel(['rx-migration-state', this.database.name, this.collection.name, this.collection.schema.version].join('|')); - var leaderElector = createLeaderElection(broadcastChannel); - await leaderElector.awaitLeadership(); - } - - /** - * Instead of writing a custom migration protocol, - * we do a push-only replication from the old collection data to the new one. - * This also ensure that restarting the replication works without problems. - */ - var oldCollectionMeta = await this.oldCollectionMeta; - var oldStorageInstance = await this.database.storage.createStorageInstance({ - databaseName: this.database.name, - collectionName: this.collection.name, - databaseInstanceToken: this.database.token, - multiInstance: this.database.multiInstance, - options: {}, - schema: oldCollectionMeta.data.schema, - password: this.database.password, - devMode: overwritable.isDevMode() - }); - var connectedInstances = await this.getConnectedStorageInstances(); - - /** - * Initially write the migration status into a meta document. - */ - var totalCount = await this.countAllDoucments([oldStorageInstance].concat(connectedInstances.map(r => r.oldStorage))); - await this.updateStatus(s => { - s.count.total = totalCount; - return s; - }); - try { - /** - * First migrate the connected storages, - * afterwards migrate the normal collection. - */ - await Promise.all(connectedInstances.map(async connectedInstance => { - await addConnectedStorageToCollection(this.collection, connectedInstance.newStorage.collectionName, connectedInstance.newStorage.schema); - await this.migrateStorage(connectedInstance.oldStorage, connectedInstance.newStorage, batchSize); - await connectedInstance.newStorage.close(); - })); - await this.migrateStorage(oldStorageInstance, - /** - * Use the originalStorageInstance here - * so that the _meta.lwt time keeps the same - * and our replication checkpoints still point to the - * correct checkpoint. - */ - this.collection.storageInstance.originalStorageInstance, batchSize); - } catch (err) { - await oldStorageInstance.close(); - await this.updateStatus(s => { - s.status = 'ERROR'; - s.error = errorToPlainJson(err); - return s; - }); - return; - } - - // remove old collection meta doc - await writeSingle(this.database.internalStore, { - previous: oldCollectionMeta, - document: Object.assign({}, oldCollectionMeta, { - _deleted: true - }) - }, 'rx-migration-remove-collection-meta'); - await this.updateStatus(s => { - s.status = 'DONE'; - return s; - }); - if (broadcastChannel) { - await broadcastChannel.close(); - } - }; - _proto.updateStatus = function updateStatus(handler) { - this.updateStatusHandlers.push(handler); - this.updateStatusQueue = this.updateStatusQueue.then(async () => { - if (this.updateStatusHandlers.length === 0) { - return; - } - // re-run until no conflict - var useHandlers = this.updateStatusHandlers; - this.updateStatusHandlers = []; - while (true) { - var previous = await getSingleDocument(this.database.internalStore, this.statusDocId); - var newDoc = clone(previous); - if (!previous) { - newDoc = { - id: this.statusDocId, - key: this.statusDocKey, - context: INTERNAL_CONTEXT_MIGRATION_STATUS, - data: { - collectionName: this.collection.name, - status: 'RUNNING', - count: { - total: 0, - handled: 0, - percent: 0 - } - }, - _deleted: false, - _meta: getDefaultRxDocumentMeta(), - _rev: getDefaultRevision(), - _attachments: {} - }; - } - var status = ensureNotFalsy(newDoc).data; - for (var oneHandler of useHandlers) { - status = oneHandler(status); - } - status.count.percent = Math.round(status.count.handled / status.count.total * 100); - if (newDoc && previous && deepEqual(newDoc.data, previous.data)) { - break; - } - try { - await writeSingle(this.database.internalStore, { - previous, - document: ensureNotFalsy(newDoc) - }, INTERNAL_CONTEXT_MIGRATION_STATUS); - - // write successful - break; - } catch (err) { - // ignore conflicts - if (!isBulkWriteConflictError(err)) { - throw err; - } - } - } - }); - return this.updateStatusQueue; - }; - _proto.migrateStorage = async function migrateStorage(oldStorage, newStorage, batchSize) { - var replicationMetaStorageInstance = await this.database.storage.createStorageInstance({ - databaseName: this.database.name, - collectionName: 'rx-migration-state-meta-' + this.collection.name + '-' + this.collection.schema.version, - databaseInstanceToken: this.database.token, - multiInstance: this.database.multiInstance, - options: {}, - schema: getRxReplicationMetaInstanceSchema(oldStorage.schema, hasEncryption(oldStorage.schema)), - password: this.database.password, - devMode: overwritable.isDevMode() - }); - var replicationHandlerBase = rxStorageInstanceToReplicationHandler(newStorage, - /** - * Ignore push-conflicts. - * If this happens we drop the 'old' document state. - */ - defaultConflictHandler, this.database.token, true); - var replicationState = replicateRxStorageInstance({ - keepMeta: true, - identifier: ['rx-migration-state', this.collection.name, oldStorage.schema.version, this.collection.schema.version].join('-'), - replicationHandler: { - masterChangesSince() { - return Promise.resolve({ - checkpoint: null, - documents: [] - }); - }, - masterWrite: async rows => { - rows = await Promise.all(rows.map(async row => { - var newDocData = row.newDocumentState; - if (newStorage.schema.title === META_INSTANCE_SCHEMA_TITLE) { - newDocData = row.newDocumentState.docData; - if (row.newDocumentState.isCheckpoint === '1') { - return { - assumedMasterState: undefined, - newDocumentState: row.newDocumentState - }; - } - } - var migratedDocData = await migrateDocumentData(this.collection, oldStorage.schema.version, newDocData); - var newRow = { - // drop the assumed master state, we do not have to care about conflicts here. - assumedMasterState: undefined, - newDocumentState: newStorage.schema.title === META_INSTANCE_SCHEMA_TITLE ? Object.assign({}, row.newDocumentState, { - docData: migratedDocData - }) : migratedDocData - }; - return newRow; - })); - - // filter out the documents where the migration strategy returned null - rows = rows.filter(row => !!row.newDocumentState); - var result = await replicationHandlerBase.masterWrite(rows); - return result; - }, - masterChangeStream$: new Subject().asObservable() - }, - forkInstance: oldStorage, - metaInstance: replicationMetaStorageInstance, - pushBatchSize: batchSize, - pullBatchSize: 0, - conflictHandler: defaultConflictHandler, - hashFunction: this.database.hashFunction - }); - var hasError = false; - replicationState.events.error.subscribe(err => hasError = err); - - // update replication status on each change - replicationState.events.processed.up.subscribe(() => { - this.updateStatus(status => { - status.count.handled = status.count.handled + 1; - return status; - }); - }); - await awaitRxStorageReplicationFirstInSync(replicationState); - await cancelRxStorageReplication(replicationState); - await this.updateStatusQueue; - if (hasError) { - await replicationMetaStorageInstance.close(); - throw hasError; - } - - // cleanup old storages - await Promise.all([oldStorage.remove(), replicationMetaStorageInstance.remove()]); - }; - _proto.countAllDoucments = async function countAllDoucments(storageInstances) { - var ret = 0; - await Promise.all(storageInstances.map(async instance => { - var preparedQuery = prepareQuery(instance.schema, normalizeMangoQuery(instance.schema, { - selector: {} - })); - var countResult = await instance.count(preparedQuery); - ret += countResult.count; - })); - return ret; - }; - _proto.getConnectedStorageInstances = async function getConnectedStorageInstances() { - var oldCollectionMeta = await this.oldCollectionMeta; - var ret = []; - await Promise.all(await Promise.all(oldCollectionMeta.data.connectedStorages.map(async connectedStorage => { - // atm we can only migrate replication states. - if (connectedStorage.schema.title !== META_INSTANCE_SCHEMA_TITLE) { - throw new Error('unknown migration handling for schema'); - } - var newSchema = getRxReplicationMetaInstanceSchema(clone(this.collection.schema.jsonSchema), hasEncryption(connectedStorage.schema)); - newSchema.version = this.collection.schema.version; - var [oldStorage, newStorage] = await Promise.all([this.database.storage.createStorageInstance({ - databaseInstanceToken: this.database.token, - databaseName: this.database.name, - devMode: overwritable.isDevMode(), - multiInstance: this.database.multiInstance, - options: {}, - schema: connectedStorage.schema, - password: this.database.password, - collectionName: connectedStorage.collectionName - }), this.database.storage.createStorageInstance({ - databaseInstanceToken: this.database.token, - databaseName: this.database.name, - devMode: overwritable.isDevMode(), - multiInstance: this.database.multiInstance, - options: {}, - schema: newSchema, - password: this.database.password, - collectionName: connectedStorage.collectionName - })]); - ret.push({ - oldStorage, - newStorage - }); - }))); - return ret; - }; - _proto.migratePromise = async function migratePromise(batchSize) { - this.startMigration(batchSize); - var must = await this.mustMigrate; - if (!must) { - return { - status: 'DONE', - collectionName: this.collection.name, - count: { - handled: 0, - percent: 0, - total: 0 - } - }; - } - var result = await Promise.race([firstValueFrom(this.$.pipe(filter(d => d.status === 'DONE'))), firstValueFrom(this.$.pipe(filter(d => d.status === 'ERROR')))]); - if (result.status === 'ERROR') { - throw newRxError('DM4', { - collection: this.collection.name, - error: result.error - }); - } else { - return result; - } - }; - return RxMigrationState; -}(); -//# sourceMappingURL=rx-migration-state.js.map \ No newline at end of file diff --git a/dist/esm/plugins/migration-schema/rx-migration-state.js.map b/dist/esm/plugins/migration-schema/rx-migration-state.js.map deleted file mode 100644 index a80cf260185..00000000000 --- a/dist/esm/plugins/migration-schema/rx-migration-state.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-migration-state.js","names":["Subject","filter","firstValueFrom","map","shareReplay","isBulkWriteConflictError","newRxError","MIGRATION_DEFAULT_BATCH_SIZE","addMigrationStateToDatabase","getOldCollectionMeta","migrateDocumentData","mustMigrate","PROMISE_RESOLVE_TRUE","RXJS_SHARE_REPLAY_DEFAULTS","clone","deepEqual","ensureNotFalsy","errorToPlainJson","getDefaultRevision","getDefaultRxDocumentMeta","getSingleDocument","hasEncryption","observeSingle","writeSingle","BroadcastChannel","createLeaderElection","META_INSTANCE_SCHEMA_TITLE","awaitRxStorageReplicationFirstInSync","cancelRxStorageReplication","defaultConflictHandler","getRxReplicationMetaInstanceSchema","replicateRxStorageInstance","rxStorageInstanceToReplicationHandler","overwritable","INTERNAL_CONTEXT_MIGRATION_STATUS","addConnectedStorageToCollection","getPrimaryKeyOfInternalDocument","prepareQuery","normalizeMangoQuery","RxMigrationState","collection","migrationStrategies","statusDocKey","name","schema","version","join","started","updateStatusHandlers","updateStatusQueue","database","oldCollectionMeta","statusDocId","$","internalStore","pipe","d","data","_proto","prototype","getStatus","startMigration","batchSize","must","broadcastChannel","undefined","multiInstance","leaderElector","awaitLeadership","oldStorageInstance","storage","createStorageInstance","databaseName","collectionName","databaseInstanceToken","token","options","password","devMode","isDevMode","connectedInstances","getConnectedStorageInstances","totalCount","countAllDoucments","concat","r","oldStorage","updateStatus","s","count","total","Promise","all","connectedInstance","newStorage","migrateStorage","close","storageInstance","originalStorageInstance","err","status","error","previous","document","Object","assign","_deleted","handler","push","then","length","useHandlers","newDoc","id","key","context","handled","percent","_meta","_rev","_attachments","oneHandler","Math","round","replicationMetaStorageInstance","replicationHandlerBase","replicationState","keepMeta","identifier","replicationHandler","masterChangesSince","resolve","checkpoint","documents","masterWrite","rows","row","newDocData","newDocumentState","title","docData","isCheckpoint","assumedMasterState","migratedDocData","newRow","result","masterChangeStream$","asObservable","forkInstance","metaInstance","pushBatchSize","pullBatchSize","conflictHandler","hashFunction","hasError","events","subscribe","processed","up","remove","storageInstances","ret","instance","preparedQuery","selector","countResult","connectedStorages","connectedStorage","Error","newSchema","jsonSchema","migratePromise","race"],"sources":["../../../../src/plugins/migration-schema/rx-migration-state.ts"],"sourcesContent":["import {\n Observable,\n Subject,\n filter,\n firstValueFrom,\n map,\n shareReplay\n} from 'rxjs';\nimport {\n isBulkWriteConflictError,\n newRxError\n} from '../../rx-error.ts';\nimport type {\n NumberFunctionMap,\n RxCollection,\n RxDatabase,\n RxError,\n RxReplicationWriteToMasterRow,\n RxStorageInstance,\n RxTypeError\n} from '../../types/index.d.ts';\nimport {\n MIGRATION_DEFAULT_BATCH_SIZE,\n addMigrationStateToDatabase,\n getOldCollectionMeta,\n migrateDocumentData,\n mustMigrate\n} from './migration-helpers.ts';\nimport {\n PROMISE_RESOLVE_TRUE,\n RXJS_SHARE_REPLAY_DEFAULTS,\n clone,\n deepEqual,\n ensureNotFalsy,\n errorToPlainJson,\n getDefaultRevision,\n getDefaultRxDocumentMeta\n} from '../utils/index.ts';\nimport type {\n MigrationStatusUpdate,\n RxMigrationStatus,\n RxMigrationStatusDocument\n} from './migration-types.ts';\nimport {\n getSingleDocument,\n hasEncryption,\n observeSingle,\n writeSingle\n} from '../../rx-storage-helper.ts';\nimport {\n BroadcastChannel,\n createLeaderElection\n} from 'broadcast-channel';\nimport {\n META_INSTANCE_SCHEMA_TITLE,\n awaitRxStorageReplicationFirstInSync,\n cancelRxStorageReplication,\n defaultConflictHandler,\n getRxReplicationMetaInstanceSchema,\n replicateRxStorageInstance,\n rxStorageInstanceToReplicationHandler\n} from '../../replication-protocol/index.ts';\nimport { overwritable } from '../../overwritable.ts';\nimport {\n INTERNAL_CONTEXT_MIGRATION_STATUS,\n addConnectedStorageToCollection,\n getPrimaryKeyOfInternalDocument\n} from '../../rx-database-internal-store.ts';\nimport { prepareQuery } from '../../rx-query.ts';\nimport { normalizeMangoQuery } from '../../rx-query-helper.ts';\n\n\n\nexport class RxMigrationState {\n\n public database: RxDatabase;\n\n\n private started: boolean = false;\n public readonly oldCollectionMeta: ReturnType;\n public readonly mustMigrate: ReturnType;\n public readonly statusDocId: string;\n public readonly $: Observable;\n\n constructor(\n public readonly collection: RxCollection,\n public readonly migrationStrategies: NumberFunctionMap,\n public readonly statusDocKey = [\n collection.name,\n 'v',\n collection.schema.version\n ].join('-'),\n ) {\n this.database = collection.database;\n this.oldCollectionMeta = getOldCollectionMeta(this);\n this.mustMigrate = mustMigrate(this);\n this.statusDocId = getPrimaryKeyOfInternalDocument(\n this.statusDocKey,\n INTERNAL_CONTEXT_MIGRATION_STATUS\n );\n addMigrationStateToDatabase(this);\n\n this.$ = observeSingle(\n this.database.internalStore,\n this.statusDocId\n ).pipe(\n filter(d => !!d),\n map(d => ensureNotFalsy(d).data),\n shareReplay(RXJS_SHARE_REPLAY_DEFAULTS)\n );\n }\n\n getStatus() {\n return firstValueFrom(this.$);\n }\n\n\n /**\n * Starts the migration.\n * Returns void so that people to not get the idea to await\n * this function.\n * Instead use migratePromise() if you want to await\n * the migration. This ensures it works even if the migration\n * is run on a different browser tab.\n */\n async startMigration(batchSize: number = MIGRATION_DEFAULT_BATCH_SIZE): Promise {\n const must = await this.mustMigrate;\n if (!must) {\n return;\n }\n if (this.started) {\n throw newRxError('DM1');\n }\n this.started = true;\n\n\n let broadcastChannel: BroadcastChannel | undefined = undefined;\n /**\n * To ensure that multiple tabs do not migrate the same collection,\n * we use a new broadcastChannel/leaderElector for each collection.\n * This is required because collections can be added dynamically and\n * not all tabs might know about this collection.\n */\n if (this.database.multiInstance) {\n broadcastChannel = new BroadcastChannel([\n 'rx-migration-state',\n this.database.name,\n this.collection.name,\n this.collection.schema.version\n ].join('|'));\n const leaderElector = createLeaderElection(broadcastChannel);\n await leaderElector.awaitLeadership();\n }\n\n /**\n * Instead of writing a custom migration protocol,\n * we do a push-only replication from the old collection data to the new one.\n * This also ensure that restarting the replication works without problems.\n */\n const oldCollectionMeta = await this.oldCollectionMeta;\n const oldStorageInstance = await this.database.storage.createStorageInstance({\n databaseName: this.database.name,\n collectionName: this.collection.name,\n databaseInstanceToken: this.database.token,\n multiInstance: this.database.multiInstance,\n options: {},\n schema: oldCollectionMeta.data.schema,\n password: this.database.password,\n devMode: overwritable.isDevMode()\n });\n\n\n const connectedInstances = await this.getConnectedStorageInstances();\n\n\n /**\n * Initially write the migration status into a meta document.\n */\n const totalCount = await this.countAllDoucments(\n [oldStorageInstance].concat(connectedInstances.map(r => r.oldStorage))\n );\n await this.updateStatus(s => {\n s.count.total = totalCount;\n return s;\n });\n\n\n try {\n /**\n * First migrate the connected storages,\n * afterwards migrate the normal collection.\n */\n await Promise.all(\n connectedInstances.map(async (connectedInstance) => {\n await addConnectedStorageToCollection(\n this.collection,\n connectedInstance.newStorage.collectionName,\n connectedInstance.newStorage.schema\n );\n await this.migrateStorage(\n connectedInstance.oldStorage,\n connectedInstance.newStorage,\n batchSize\n );\n await connectedInstance.newStorage.close();\n })\n );\n\n await this.migrateStorage(\n oldStorageInstance,\n /**\n * Use the originalStorageInstance here\n * so that the _meta.lwt time keeps the same\n * and our replication checkpoints still point to the\n * correct checkpoint.\n */\n this.collection.storageInstance.originalStorageInstance,\n batchSize\n );\n } catch (err) {\n await oldStorageInstance.close();\n await this.updateStatus(s => {\n s.status = 'ERROR';\n s.error = errorToPlainJson(err as Error);\n return s;\n });\n return;\n }\n\n\n // remove old collection meta doc\n await writeSingle(\n this.database.internalStore,\n {\n previous: oldCollectionMeta,\n document: Object.assign(\n {},\n oldCollectionMeta,\n {\n _deleted: true\n }\n )\n },\n 'rx-migration-remove-collection-meta'\n );\n\n await this.updateStatus(s => {\n s.status = 'DONE';\n return s;\n });\n if (broadcastChannel) {\n await broadcastChannel.close();\n }\n }\n\n public updateStatusHandlers: MigrationStatusUpdate[] = [];\n public updateStatusQueue: Promise = PROMISE_RESOLVE_TRUE;\n public updateStatus(\n handler: MigrationStatusUpdate\n ) {\n this.updateStatusHandlers.push(handler);\n this.updateStatusQueue = this.updateStatusQueue.then(async () => {\n if (this.updateStatusHandlers.length === 0) {\n return;\n }\n // re-run until no conflict\n const useHandlers = this.updateStatusHandlers;\n this.updateStatusHandlers = [];\n while (true) {\n const previous = await getSingleDocument(\n this.database.internalStore,\n this.statusDocId\n );\n let newDoc = clone(previous);\n if (!previous) {\n newDoc = {\n id: this.statusDocId,\n key: this.statusDocKey,\n context: INTERNAL_CONTEXT_MIGRATION_STATUS,\n data: {\n collectionName: this.collection.name,\n status: 'RUNNING',\n count: {\n total: 0,\n handled: 0,\n percent: 0\n }\n },\n _deleted: false,\n _meta: getDefaultRxDocumentMeta(),\n _rev: getDefaultRevision(),\n _attachments: {}\n };\n }\n\n let status = ensureNotFalsy(newDoc).data;\n for (const oneHandler of useHandlers) {\n status = oneHandler(status);\n }\n status.count.percent = Math.round((status.count.handled / status.count.total) * 100);\n\n if (\n newDoc && previous &&\n deepEqual(newDoc.data, previous.data)\n ) {\n break;\n }\n\n\n try {\n await writeSingle(\n this.database.internalStore,\n {\n previous,\n document: ensureNotFalsy(newDoc)\n },\n INTERNAL_CONTEXT_MIGRATION_STATUS\n );\n\n // write successful\n break;\n } catch (err) {\n // ignore conflicts\n if (!isBulkWriteConflictError(err)) {\n throw err;\n }\n }\n }\n });\n return this.updateStatusQueue;\n }\n\n\n public async migrateStorage(\n oldStorage: RxStorageInstance,\n newStorage: RxStorageInstance,\n batchSize: number\n ) {\n const replicationMetaStorageInstance = await this.database.storage.createStorageInstance({\n databaseName: this.database.name,\n collectionName: 'rx-migration-state-meta-' + this.collection.name + '-' + this.collection.schema.version,\n databaseInstanceToken: this.database.token,\n multiInstance: this.database.multiInstance,\n options: {},\n schema: getRxReplicationMetaInstanceSchema(oldStorage.schema, hasEncryption(oldStorage.schema)),\n password: this.database.password,\n devMode: overwritable.isDevMode()\n });\n\n const replicationHandlerBase = rxStorageInstanceToReplicationHandler(\n newStorage,\n /**\n * Ignore push-conflicts.\n * If this happens we drop the 'old' document state.\n */\n defaultConflictHandler,\n this.database.token,\n true\n );\n\n const replicationState = replicateRxStorageInstance({\n keepMeta: true,\n identifier: [\n 'rx-migration-state',\n this.collection.name,\n oldStorage.schema.version,\n this.collection.schema.version\n ].join('-'),\n replicationHandler: {\n masterChangesSince() {\n return Promise.resolve({\n checkpoint: null,\n documents: []\n });\n },\n masterWrite: async (rows) => {\n rows = await Promise.all(\n rows\n .map(async (row) => {\n let newDocData = row.newDocumentState;\n if (newStorage.schema.title === META_INSTANCE_SCHEMA_TITLE) {\n newDocData = row.newDocumentState.docData;\n if (row.newDocumentState.isCheckpoint === '1') {\n return {\n assumedMasterState: undefined,\n newDocumentState: row.newDocumentState\n };\n }\n }\n const migratedDocData: RxReplicationWriteToMasterRow = await migrateDocumentData(\n this.collection,\n oldStorage.schema.version,\n newDocData\n );\n const newRow: RxReplicationWriteToMasterRow = {\n // drop the assumed master state, we do not have to care about conflicts here.\n assumedMasterState: undefined,\n newDocumentState: newStorage.schema.title === META_INSTANCE_SCHEMA_TITLE\n ? Object.assign({}, row.newDocumentState, { docData: migratedDocData })\n : migratedDocData\n };\n return newRow;\n })\n );\n\n // filter out the documents where the migration strategy returned null\n rows = rows.filter(row => !!row.newDocumentState);\n\n const result = await replicationHandlerBase.masterWrite(rows);\n return result;\n },\n masterChangeStream$: new Subject().asObservable()\n },\n forkInstance: oldStorage,\n metaInstance: replicationMetaStorageInstance,\n pushBatchSize: batchSize,\n pullBatchSize: 0,\n conflictHandler: defaultConflictHandler,\n hashFunction: this.database.hashFunction\n });\n\n\n let hasError: RxError | RxTypeError | false = false;\n replicationState.events.error.subscribe(err => hasError = err);\n\n // update replication status on each change\n replicationState.events.processed.up.subscribe(() => {\n this.updateStatus(status => {\n status.count.handled = status.count.handled + 1;\n return status;\n });\n });\n\n await awaitRxStorageReplicationFirstInSync(replicationState);\n await cancelRxStorageReplication(replicationState);\n\n await this.updateStatusQueue;\n if (hasError) {\n await replicationMetaStorageInstance.close();\n throw hasError;\n }\n\n // cleanup old storages\n await Promise.all([\n oldStorage.remove(),\n replicationMetaStorageInstance.remove()\n ]);\n }\n\n public async countAllDoucments(\n storageInstances: RxStorageInstance[]\n ): Promise {\n let ret = 0;\n await Promise.all(\n storageInstances.map(async (instance) => {\n\n const preparedQuery = prepareQuery(\n instance.schema,\n normalizeMangoQuery(\n instance.schema,\n {\n selector: {}\n }\n )\n );\n const countResult = await instance.count(preparedQuery);\n ret += countResult.count;\n })\n );\n return ret;\n }\n\n public async getConnectedStorageInstances() {\n const oldCollectionMeta = await this.oldCollectionMeta;\n const ret: {\n oldStorage: RxStorageInstance;\n newStorage: RxStorageInstance;\n }[] = [];\n\n await Promise.all(\n await Promise.all(\n oldCollectionMeta\n .data\n .connectedStorages\n .map(async (connectedStorage) => {\n\n // atm we can only migrate replication states.\n if (connectedStorage.schema.title !== META_INSTANCE_SCHEMA_TITLE) {\n throw new Error('unknown migration handling for schema');\n }\n\n const newSchema = getRxReplicationMetaInstanceSchema(\n clone(this.collection.schema.jsonSchema),\n hasEncryption(connectedStorage.schema)\n );\n newSchema.version = this.collection.schema.version;\n const [oldStorage, newStorage] = await Promise.all([\n this.database.storage.createStorageInstance({\n databaseInstanceToken: this.database.token,\n databaseName: this.database.name,\n devMode: overwritable.isDevMode(),\n multiInstance: this.database.multiInstance,\n options: {},\n schema: connectedStorage.schema,\n password: this.database.password,\n collectionName: connectedStorage.collectionName\n }),\n this.database.storage.createStorageInstance({\n databaseInstanceToken: this.database.token,\n databaseName: this.database.name,\n devMode: overwritable.isDevMode(),\n multiInstance: this.database.multiInstance,\n options: {},\n schema: newSchema,\n password: this.database.password,\n collectionName: connectedStorage.collectionName\n })\n ]);\n ret.push({ oldStorage, newStorage });\n })\n )\n );\n\n return ret;\n }\n\n\n\n async migratePromise(batchSize?: number): Promise {\n this.startMigration(batchSize);\n const must = await this.mustMigrate;\n if (!must) {\n return {\n status: 'DONE',\n collectionName: this.collection.name,\n count: {\n handled: 0,\n percent: 0,\n total: 0\n }\n };\n }\n\n const result = await Promise.race([\n firstValueFrom(\n this.$.pipe(\n filter(d => d.status === 'DONE')\n )\n ),\n firstValueFrom(\n this.$.pipe(\n filter(d => d.status === 'ERROR')\n )\n )\n ]);\n\n if (result.status === 'ERROR') {\n throw newRxError('DM4', {\n collection: this.collection.name,\n error: result.error\n });\n } else {\n return result;\n }\n\n }\n}\n"],"mappings":"AAAA,SAEIA,OAAO,EACPC,MAAM,EACNC,cAAc,EACdC,GAAG,EACHC,WAAW,QACR,MAAM;AACb,SACIC,wBAAwB,EACxBC,UAAU,QACP,mBAAmB;AAU1B,SACIC,4BAA4B,EAC5BC,2BAA2B,EAC3BC,oBAAoB,EACpBC,mBAAmB,EACnBC,WAAW,QACR,wBAAwB;AAC/B,SACIC,oBAAoB,EACpBC,0BAA0B,EAC1BC,KAAK,EACLC,SAAS,EACTC,cAAc,EACdC,gBAAgB,EAChBC,kBAAkB,EAClBC,wBAAwB,QACrB,mBAAmB;AAM1B,SACIC,iBAAiB,EACjBC,aAAa,EACbC,aAAa,EACbC,WAAW,QACR,4BAA4B;AACnC,SACIC,gBAAgB,EAChBC,oBAAoB,QACjB,mBAAmB;AAC1B,SACIC,0BAA0B,EAC1BC,oCAAoC,EACpCC,0BAA0B,EAC1BC,sBAAsB,EACtBC,kCAAkC,EAClCC,0BAA0B,EAC1BC,qCAAqC,QAClC,qCAAqC;AAC5C,SAASC,YAAY,QAAQ,uBAAuB;AACpD,SACIC,iCAAiC,EACjCC,+BAA+B,EAC/BC,+BAA+B,QAC5B,qCAAqC;AAC5C,SAASC,YAAY,QAAQ,mBAAmB;AAChD,SAASC,mBAAmB,QAAQ,0BAA0B;AAI9D,WAAaC,gBAAgB;EAWzB,SAAAA,iBACoBC,UAAwB,EACxBC,mBAAsC,EACtCC,YAAY,GAAG,CAC3BF,UAAU,CAACG,IAAI,EACf,GAAG,EACHH,UAAU,CAACI,MAAM,CAACC,OAAO,CAC5B,CAACC,IAAI,CAAC,GAAG,CAAC,EACb;IAAA,KAdMC,OAAO,GAAY,KAAK;IAAA,KAiLzBC,oBAAoB,GAA4B,EAAE;IAAA,KAClDC,iBAAiB,GAAiBrC,oBAAoB;IAAA,KA3KzC4B,UAAwB,GAAxBA,UAAwB;IAAA,KACxBC,mBAAsC,GAAtCA,mBAAsC;IAAA,KACtCC,YAAY,GAAZA,YAAY;IAM5B,IAAI,CAACQ,QAAQ,GAAGV,UAAU,CAACU,QAAQ;IACnC,IAAI,CAACC,iBAAiB,GAAG1C,oBAAoB,CAAC,IAAI,CAAC;IACnD,IAAI,CAACE,WAAW,GAAGA,WAAW,CAAC,IAAI,CAAC;IACpC,IAAI,CAACyC,WAAW,GAAGhB,+BAA+B,CAC9C,IAAI,CAACM,YAAY,EACjBR,iCACJ,CAAC;IACD1B,2BAA2B,CAAC,IAAI,CAAC;IAEjC,IAAI,CAAC6C,CAAC,GAAG/B,aAAa,CAClB,IAAI,CAAC4B,QAAQ,CAACI,aAAa,EAC3B,IAAI,CAACF,WACT,CAAC,CAACG,IAAI,CACFtD,MAAM,CAACuD,CAAC,IAAI,CAAC,CAACA,CAAC,CAAC,EAChBrD,GAAG,CAACqD,CAAC,IAAIxC,cAAc,CAACwC,CAAC,CAAC,CAACC,IAAI,CAAC,EAChCrD,WAAW,CAACS,0BAA0B,CAC1C,CAAC;EACL;EAAC,IAAA6C,MAAA,GAAAnB,gBAAA,CAAAoB,SAAA;EAAAD,MAAA,CAEDE,SAAS,GAAT,SAAAA,UAAA,EAAY;IACR,OAAO1D,cAAc,CAAC,IAAI,CAACmD,CAAC,CAAC;EACjC;;EAGA;AACJ;AACA;AACA;AACA;AACA;AACA;AACA,KAPI;EAAAK,MAAA,CAQMG,cAAc,GAApB,eAAAA,eAAqBC,SAAiB,GAAGvD,4BAA4B,EAAiB;IAClF,IAAMwD,IAAI,GAAG,MAAM,IAAI,CAACpD,WAAW;IACnC,IAAI,CAACoD,IAAI,EAAE;MACP;IACJ;IACA,IAAI,IAAI,CAAChB,OAAO,EAAE;MACd,MAAMzC,UAAU,CAAC,KAAK,CAAC;IAC3B;IACA,IAAI,CAACyC,OAAO,GAAG,IAAI;IAGnB,IAAIiB,gBAA8C,GAAGC,SAAS;IAC9D;AACR;AACA;AACA;AACA;AACA;IACQ,IAAI,IAAI,CAACf,QAAQ,CAACgB,aAAa,EAAE;MAC7BF,gBAAgB,GAAG,IAAIxC,gBAAgB,CAAC,CACpC,oBAAoB,EACpB,IAAI,CAAC0B,QAAQ,CAACP,IAAI,EAClB,IAAI,CAACH,UAAU,CAACG,IAAI,EACpB,IAAI,CAACH,UAAU,CAACI,MAAM,CAACC,OAAO,CACjC,CAACC,IAAI,CAAC,GAAG,CAAC,CAAC;MACZ,IAAMqB,aAAa,GAAG1C,oBAAoB,CAACuC,gBAAgB,CAAC;MAC5D,MAAMG,aAAa,CAACC,eAAe,CAAC,CAAC;IACzC;;IAEA;AACR;AACA;AACA;AACA;IACQ,IAAMjB,iBAAiB,GAAG,MAAM,IAAI,CAACA,iBAAiB;IACtD,IAAMkB,kBAAkB,GAAG,MAAM,IAAI,CAACnB,QAAQ,CAACoB,OAAO,CAACC,qBAAqB,CAAC;MACzEC,YAAY,EAAE,IAAI,CAACtB,QAAQ,CAACP,IAAI;MAChC8B,cAAc,EAAE,IAAI,CAACjC,UAAU,CAACG,IAAI;MACpC+B,qBAAqB,EAAE,IAAI,CAACxB,QAAQ,CAACyB,KAAK;MAC1CT,aAAa,EAAE,IAAI,CAAChB,QAAQ,CAACgB,aAAa;MAC1CU,OAAO,EAAE,CAAC,CAAC;MACXhC,MAAM,EAAEO,iBAAiB,CAACM,IAAI,CAACb,MAAM;MACrCiC,QAAQ,EAAE,IAAI,CAAC3B,QAAQ,CAAC2B,QAAQ;MAChCC,OAAO,EAAE7C,YAAY,CAAC8C,SAAS,CAAC;IACpC,CAAC,CAAC;IAGF,IAAMC,kBAAkB,GAAG,MAAM,IAAI,CAACC,4BAA4B,CAAC,CAAC;;IAGpE;AACR;AACA;IACQ,IAAMC,UAAU,GAAG,MAAM,IAAI,CAACC,iBAAiB,CAC3C,CAACd,kBAAkB,CAAC,CAACe,MAAM,CAACJ,kBAAkB,CAAC7E,GAAG,CAACkF,CAAC,IAAIA,CAAC,CAACC,UAAU,CAAC,CACzE,CAAC;IACD,MAAM,IAAI,CAACC,YAAY,CAACC,CAAC,IAAI;MACzBA,CAAC,CAACC,KAAK,CAACC,KAAK,GAAGR,UAAU;MAC1B,OAAOM,CAAC;IACZ,CAAC,CAAC;IAGF,IAAI;MACA;AACZ;AACA;AACA;MACY,MAAMG,OAAO,CAACC,GAAG,CACbZ,kBAAkB,CAAC7E,GAAG,CAAC,MAAO0F,iBAAiB,IAAK;QAChD,MAAM1D,+BAA+B,CACjC,IAAI,CAACK,UAAU,EACfqD,iBAAiB,CAACC,UAAU,CAACrB,cAAc,EAC3CoB,iBAAiB,CAACC,UAAU,CAAClD,MACjC,CAAC;QACD,MAAM,IAAI,CAACmD,cAAc,CACrBF,iBAAiB,CAACP,UAAU,EAC5BO,iBAAiB,CAACC,UAAU,EAC5BhC,SACJ,CAAC;QACD,MAAM+B,iBAAiB,CAACC,UAAU,CAACE,KAAK,CAAC,CAAC;MAC9C,CAAC,CACL,CAAC;MAED,MAAM,IAAI,CAACD,cAAc,CACrB1B,kBAAkB;MAClB;AAChB;AACA;AACA;AACA;AACA;MACgB,IAAI,CAAC7B,UAAU,CAACyD,eAAe,CAACC,uBAAuB,EACvDpC,SACJ,CAAC;IACL,CAAC,CAAC,OAAOqC,GAAG,EAAE;MACV,MAAM9B,kBAAkB,CAAC2B,KAAK,CAAC,CAAC;MAChC,MAAM,IAAI,CAACT,YAAY,CAACC,CAAC,IAAI;QACzBA,CAAC,CAACY,MAAM,GAAG,OAAO;QAClBZ,CAAC,CAACa,KAAK,GAAGpF,gBAAgB,CAACkF,GAAY,CAAC;QACxC,OAAOX,CAAC;MACZ,CAAC,CAAC;MACF;IACJ;;IAGA;IACA,MAAMjE,WAAW,CACb,IAAI,CAAC2B,QAAQ,CAACI,aAAa,EAC3B;MACIgD,QAAQ,EAAEnD,iBAAiB;MAC3BoD,QAAQ,EAAEC,MAAM,CAACC,MAAM,CACnB,CAAC,CAAC,EACFtD,iBAAiB,EACjB;QACIuD,QAAQ,EAAE;MACd,CACJ;IACJ,CAAC,EACD,qCACJ,CAAC;IAED,MAAM,IAAI,CAACnB,YAAY,CAACC,CAAC,IAAI;MACzBA,CAAC,CAACY,MAAM,GAAG,MAAM;MACjB,OAAOZ,CAAC;IACZ,CAAC,CAAC;IACF,IAAIxB,gBAAgB,EAAE;MAClB,MAAMA,gBAAgB,CAACgC,KAAK,CAAC,CAAC;IAClC;EACJ,CAAC;EAAAtC,MAAA,CAIM6B,YAAY,GAAnB,SAAAA,aACIoB,OAA8B,EAChC;IACE,IAAI,CAAC3D,oBAAoB,CAAC4D,IAAI,CAACD,OAAO,CAAC;IACvC,IAAI,CAAC1D,iBAAiB,GAAG,IAAI,CAACA,iBAAiB,CAAC4D,IAAI,CAAC,YAAY;MAC7D,IAAI,IAAI,CAAC7D,oBAAoB,CAAC8D,MAAM,KAAK,CAAC,EAAE;QACxC;MACJ;MACA;MACA,IAAMC,WAAW,GAAG,IAAI,CAAC/D,oBAAoB;MAC7C,IAAI,CAACA,oBAAoB,GAAG,EAAE;MAC9B,OAAO,IAAI,EAAE;QACT,IAAMsD,QAAQ,GAAG,MAAMlF,iBAAiB,CACpC,IAAI,CAAC8B,QAAQ,CAACI,aAAa,EAC3B,IAAI,CAACF,WACT,CAAC;QACD,IAAI4D,MAAM,GAAGlG,KAAK,CAACwF,QAAQ,CAAC;QAC5B,IAAI,CAACA,QAAQ,EAAE;UACXU,MAAM,GAAG;YACLC,EAAE,EAAE,IAAI,CAAC7D,WAAW;YACpB8D,GAAG,EAAE,IAAI,CAACxE,YAAY;YACtByE,OAAO,EAAEjF,iCAAiC;YAC1CuB,IAAI,EAAE;cACFgB,cAAc,EAAE,IAAI,CAACjC,UAAU,CAACG,IAAI;cACpCyD,MAAM,EAAE,SAAS;cACjBX,KAAK,EAAE;gBACHC,KAAK,EAAE,CAAC;gBACR0B,OAAO,EAAE,CAAC;gBACVC,OAAO,EAAE;cACb;YACJ,CAAC;YACDX,QAAQ,EAAE,KAAK;YACfY,KAAK,EAAEnG,wBAAwB,CAAC,CAAC;YACjCoG,IAAI,EAAErG,kBAAkB,CAAC,CAAC;YAC1BsG,YAAY,EAAE,CAAC;UACnB,CAAC;QACL;QAEA,IAAIpB,MAAM,GAAGpF,cAAc,CAACgG,MAAM,CAAC,CAACvD,IAAI;QACxC,KAAK,IAAMgE,UAAU,IAAIV,WAAW,EAAE;UAClCX,MAAM,GAAGqB,UAAU,CAACrB,MAAM,CAAC;QAC/B;QACAA,MAAM,CAACX,KAAK,CAAC4B,OAAO,GAAGK,IAAI,CAACC,KAAK,CAAEvB,MAAM,CAACX,KAAK,CAAC2B,OAAO,GAAGhB,MAAM,CAACX,KAAK,CAACC,KAAK,GAAI,GAAG,CAAC;QAEpF,IACIsB,MAAM,IAAIV,QAAQ,IAClBvF,SAAS,CAACiG,MAAM,CAACvD,IAAI,EAAE6C,QAAQ,CAAC7C,IAAI,CAAC,EACvC;UACE;QACJ;QAGA,IAAI;UACA,MAAMlC,WAAW,CACb,IAAI,CAAC2B,QAAQ,CAACI,aAAa,EAC3B;YACIgD,QAAQ;YACRC,QAAQ,EAAEvF,cAAc,CAACgG,MAAM;UACnC,CAAC,EACD9E,iCACJ,CAAC;;UAED;UACA;QACJ,CAAC,CAAC,OAAOiE,GAAG,EAAE;UACV;UACA,IAAI,CAAC9F,wBAAwB,CAAC8F,GAAG,CAAC,EAAE;YAChC,MAAMA,GAAG;UACb;QACJ;MACJ;IACJ,CAAC,CAAC;IACF,OAAO,IAAI,CAAClD,iBAAiB;EACjC,CAAC;EAAAS,MAAA,CAGYqC,cAAc,GAA3B,eAAAA,eACIT,UAA4C,EAC5CQ,UAA4C,EAC5ChC,SAAiB,EACnB;IACE,IAAM8D,8BAA8B,GAAG,MAAM,IAAI,CAAC1E,QAAQ,CAACoB,OAAO,CAACC,qBAAqB,CAAC;MACrFC,YAAY,EAAE,IAAI,CAACtB,QAAQ,CAACP,IAAI;MAChC8B,cAAc,EAAE,0BAA0B,GAAG,IAAI,CAACjC,UAAU,CAACG,IAAI,GAAG,GAAG,GAAG,IAAI,CAACH,UAAU,CAACI,MAAM,CAACC,OAAO;MACxG6B,qBAAqB,EAAE,IAAI,CAACxB,QAAQ,CAACyB,KAAK;MAC1CT,aAAa,EAAE,IAAI,CAAChB,QAAQ,CAACgB,aAAa;MAC1CU,OAAO,EAAE,CAAC,CAAC;MACXhC,MAAM,EAAEd,kCAAkC,CAACwD,UAAU,CAAC1C,MAAM,EAAEvB,aAAa,CAACiE,UAAU,CAAC1C,MAAM,CAAC,CAAC;MAC/FiC,QAAQ,EAAE,IAAI,CAAC3B,QAAQ,CAAC2B,QAAQ;MAChCC,OAAO,EAAE7C,YAAY,CAAC8C,SAAS,CAAC;IACpC,CAAC,CAAC;IAEF,IAAM8C,sBAAsB,GAAG7F,qCAAqC,CAChE8D,UAAU;IACV;AACZ;AACA;AACA;IACYjE,sBAAsB,EACtB,IAAI,CAACqB,QAAQ,CAACyB,KAAK,EACnB,IACJ,CAAC;IAED,IAAMmD,gBAAgB,GAAG/F,0BAA0B,CAAC;MAChDgG,QAAQ,EAAE,IAAI;MACdC,UAAU,EAAE,CACR,oBAAoB,EACpB,IAAI,CAACxF,UAAU,CAACG,IAAI,EACpB2C,UAAU,CAAC1C,MAAM,CAACC,OAAO,EACzB,IAAI,CAACL,UAAU,CAACI,MAAM,CAACC,OAAO,CACjC,CAACC,IAAI,CAAC,GAAG,CAAC;MACXmF,kBAAkB,EAAE;QAChBC,kBAAkBA,CAAA,EAAG;UACjB,OAAOvC,OAAO,CAACwC,OAAO,CAAC;YACnBC,UAAU,EAAE,IAAI;YAChBC,SAAS,EAAE;UACf,CAAC,CAAC;QACN,CAAC;QACDC,WAAW,EAAE,MAAOC,IAAI,IAAK;UACzBA,IAAI,GAAG,MAAM5C,OAAO,CAACC,GAAG,CACpB2C,IAAI,CACCpI,GAAG,CAAC,MAAOqI,GAAG,IAAK;YAChB,IAAIC,UAAU,GAAGD,GAAG,CAACE,gBAAgB;YACrC,IAAI5C,UAAU,CAAClD,MAAM,CAAC+F,KAAK,KAAKjH,0BAA0B,EAAE;cACxD+G,UAAU,GAAGD,GAAG,CAACE,gBAAgB,CAACE,OAAO;cACzC,IAAIJ,GAAG,CAACE,gBAAgB,CAACG,YAAY,KAAK,GAAG,EAAE;gBAC3C,OAAO;kBACHC,kBAAkB,EAAE7E,SAAS;kBAC7ByE,gBAAgB,EAAEF,GAAG,CAACE;gBAC1B,CAAC;cACL;YACJ;YACA,IAAMK,eAAmD,GAAG,MAAMrI,mBAAmB,CACjF,IAAI,CAAC8B,UAAU,EACf8C,UAAU,CAAC1C,MAAM,CAACC,OAAO,EACzB4F,UACJ,CAAC;YACD,IAAMO,MAA0C,GAAG;cAC/C;cACAF,kBAAkB,EAAE7E,SAAS;cAC7ByE,gBAAgB,EAAE5C,UAAU,CAAClD,MAAM,CAAC+F,KAAK,KAAKjH,0BAA0B,GAClE8E,MAAM,CAACC,MAAM,CAAC,CAAC,CAAC,EAAE+B,GAAG,CAACE,gBAAgB,EAAE;gBAAEE,OAAO,EAAEG;cAAgB,CAAC,CAAC,GACrEA;YACV,CAAC;YACD,OAAOC,MAAM;UACjB,CAAC,CACT,CAAC;;UAED;UACAT,IAAI,GAAGA,IAAI,CAACtI,MAAM,CAACuI,GAAG,IAAI,CAAC,CAACA,GAAG,CAACE,gBAAgB,CAAC;UAEjD,IAAMO,MAAM,GAAG,MAAMpB,sBAAsB,CAACS,WAAW,CAACC,IAAI,CAAC;UAC7D,OAAOU,MAAM;QACjB,CAAC;QACDC,mBAAmB,EAAE,IAAIlJ,OAAO,CAAM,CAAC,CAACmJ,YAAY,CAAC;MACzD,CAAC;MACDC,YAAY,EAAE9D,UAAU;MACxB+D,YAAY,EAAEzB,8BAA8B;MAC5C0B,aAAa,EAAExF,SAAS;MACxByF,aAAa,EAAE,CAAC;MAChBC,eAAe,EAAE3H,sBAAsB;MACvC4H,YAAY,EAAE,IAAI,CAACvG,QAAQ,CAACuG;IAChC,CAAC,CAAC;IAGF,IAAIC,QAAuC,GAAG,KAAK;IACnD5B,gBAAgB,CAAC6B,MAAM,CAACtD,KAAK,CAACuD,SAAS,CAACzD,GAAG,IAAIuD,QAAQ,GAAGvD,GAAG,CAAC;;IAE9D;IACA2B,gBAAgB,CAAC6B,MAAM,CAACE,SAAS,CAACC,EAAE,CAACF,SAAS,CAAC,MAAM;MACjD,IAAI,CAACrE,YAAY,CAACa,MAAM,IAAI;QACxBA,MAAM,CAACX,KAAK,CAAC2B,OAAO,GAAGhB,MAAM,CAACX,KAAK,CAAC2B,OAAO,GAAG,CAAC;QAC/C,OAAOhB,MAAM;MACjB,CAAC,CAAC;IACN,CAAC,CAAC;IAEF,MAAMzE,oCAAoC,CAACmG,gBAAgB,CAAC;IAC5D,MAAMlG,0BAA0B,CAACkG,gBAAgB,CAAC;IAElD,MAAM,IAAI,CAAC7E,iBAAiB;IAC5B,IAAIyG,QAAQ,EAAE;MACV,MAAM9B,8BAA8B,CAAC5B,KAAK,CAAC,CAAC;MAC5C,MAAM0D,QAAQ;IAClB;;IAEA;IACA,MAAM/D,OAAO,CAACC,GAAG,CAAC,CACdN,UAAU,CAACyE,MAAM,CAAC,CAAC,EACnBnC,8BAA8B,CAACmC,MAAM,CAAC,CAAC,CAC1C,CAAC;EACN,CAAC;EAAArG,MAAA,CAEYyB,iBAAiB,GAA9B,eAAAA,kBACI6E,gBAAoD,EACrC;IACf,IAAIC,GAAG,GAAG,CAAC;IACX,MAAMtE,OAAO,CAACC,GAAG,CACboE,gBAAgB,CAAC7J,GAAG,CAAC,MAAO+J,QAAQ,IAAK;MAErC,IAAMC,aAAa,GAAG9H,YAAY,CAC9B6H,QAAQ,CAACtH,MAAM,EACfN,mBAAmB,CACf4H,QAAQ,CAACtH,MAAM,EACf;QACIwH,QAAQ,EAAE,CAAC;MACf,CACJ,CACJ,CAAC;MACD,IAAMC,WAAW,GAAG,MAAMH,QAAQ,CAACzE,KAAK,CAAC0E,aAAa,CAAC;MACvDF,GAAG,IAAII,WAAW,CAAC5E,KAAK;IAC5B,CAAC,CACL,CAAC;IACD,OAAOwE,GAAG;EACd,CAAC;EAAAvG,MAAA,CAEYuB,4BAA4B,GAAzC,eAAAA,6BAAA,EAA4C;IACxC,IAAM9B,iBAAiB,GAAG,MAAM,IAAI,CAACA,iBAAiB;IACtD,IAAM8G,GAGH,GAAG,EAAE;IAER,MAAMtE,OAAO,CAACC,GAAG,CACb,MAAMD,OAAO,CAACC,GAAG,CACbzC,iBAAiB,CACZM,IAAI,CACJ6G,iBAAiB,CACjBnK,GAAG,CAAC,MAAOoK,gBAAgB,IAAK;MAE7B;MACA,IAAIA,gBAAgB,CAAC3H,MAAM,CAAC+F,KAAK,KAAKjH,0BAA0B,EAAE;QAC9D,MAAM,IAAI8I,KAAK,CAAC,uCAAuC,CAAC;MAC5D;MAEA,IAAMC,SAAS,GAAG3I,kCAAkC,CAChDhB,KAAK,CAAC,IAAI,CAAC0B,UAAU,CAACI,MAAM,CAAC8H,UAAU,CAAC,EACxCrJ,aAAa,CAACkJ,gBAAgB,CAAC3H,MAAM,CACzC,CAAC;MACD6H,SAAS,CAAC5H,OAAO,GAAG,IAAI,CAACL,UAAU,CAACI,MAAM,CAACC,OAAO;MAClD,IAAM,CAACyC,UAAU,EAAEQ,UAAU,CAAC,GAAG,MAAMH,OAAO,CAACC,GAAG,CAAC,CAC/C,IAAI,CAAC1C,QAAQ,CAACoB,OAAO,CAACC,qBAAqB,CAAC;QACxCG,qBAAqB,EAAE,IAAI,CAACxB,QAAQ,CAACyB,KAAK;QAC1CH,YAAY,EAAE,IAAI,CAACtB,QAAQ,CAACP,IAAI;QAChCmC,OAAO,EAAE7C,YAAY,CAAC8C,SAAS,CAAC,CAAC;QACjCb,aAAa,EAAE,IAAI,CAAChB,QAAQ,CAACgB,aAAa;QAC1CU,OAAO,EAAE,CAAC,CAAC;QACXhC,MAAM,EAAE2H,gBAAgB,CAAC3H,MAAM;QAC/BiC,QAAQ,EAAE,IAAI,CAAC3B,QAAQ,CAAC2B,QAAQ;QAChCJ,cAAc,EAAE8F,gBAAgB,CAAC9F;MACrC,CAAC,CAAC,EACF,IAAI,CAACvB,QAAQ,CAACoB,OAAO,CAACC,qBAAqB,CAAC;QACxCG,qBAAqB,EAAE,IAAI,CAACxB,QAAQ,CAACyB,KAAK;QAC1CH,YAAY,EAAE,IAAI,CAACtB,QAAQ,CAACP,IAAI;QAChCmC,OAAO,EAAE7C,YAAY,CAAC8C,SAAS,CAAC,CAAC;QACjCb,aAAa,EAAE,IAAI,CAAChB,QAAQ,CAACgB,aAAa;QAC1CU,OAAO,EAAE,CAAC,CAAC;QACXhC,MAAM,EAAE6H,SAAS;QACjB5F,QAAQ,EAAE,IAAI,CAAC3B,QAAQ,CAAC2B,QAAQ;QAChCJ,cAAc,EAAE8F,gBAAgB,CAAC9F;MACrC,CAAC,CAAC,CACL,CAAC;MACFwF,GAAG,CAACrD,IAAI,CAAC;QAAEtB,UAAU;QAAEQ;MAAW,CAAC,CAAC;IACxC,CAAC,CACT,CACJ,CAAC;IAED,OAAOmE,GAAG;EACd,CAAC;EAAAvG,MAAA,CAIKiH,cAAc,GAApB,eAAAA,eAAqB7G,SAAkB,EAA8B;IACjE,IAAI,CAACD,cAAc,CAACC,SAAS,CAAC;IAC9B,IAAMC,IAAI,GAAG,MAAM,IAAI,CAACpD,WAAW;IACnC,IAAI,CAACoD,IAAI,EAAE;MACP,OAAO;QACHqC,MAAM,EAAE,MAAM;QACd3B,cAAc,EAAE,IAAI,CAACjC,UAAU,CAACG,IAAI;QACpC8C,KAAK,EAAE;UACH2B,OAAO,EAAE,CAAC;UACVC,OAAO,EAAE,CAAC;UACV3B,KAAK,EAAE;QACX;MACJ,CAAC;IACL;IAEA,IAAMuD,MAAM,GAAG,MAAMtD,OAAO,CAACiF,IAAI,CAAC,CAC9B1K,cAAc,CACV,IAAI,CAACmD,CAAC,CAACE,IAAI,CACPtD,MAAM,CAACuD,CAAC,IAAIA,CAAC,CAAC4C,MAAM,KAAK,MAAM,CACnC,CACJ,CAAC,EACDlG,cAAc,CACV,IAAI,CAACmD,CAAC,CAACE,IAAI,CACPtD,MAAM,CAACuD,CAAC,IAAIA,CAAC,CAAC4C,MAAM,KAAK,OAAO,CACpC,CACJ,CAAC,CACJ,CAAC;IAEF,IAAI6C,MAAM,CAAC7C,MAAM,KAAK,OAAO,EAAE;MAC3B,MAAM9F,UAAU,CAAC,KAAK,EAAE;QACpBkC,UAAU,EAAE,IAAI,CAACA,UAAU,CAACG,IAAI;QAChC0D,KAAK,EAAE4C,MAAM,CAAC5C;MAClB,CAAC,CAAC;IACN,CAAC,MAAM;MACH,OAAO4C,MAAM;IACjB;EAEJ,CAAC;EAAA,OAAA1G,gBAAA;AAAA","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/migration-storage/index.js b/dist/esm/plugins/migration-storage/index.js deleted file mode 100644 index 25a48d4350a..00000000000 --- a/dist/esm/plugins/migration-storage/index.js +++ /dev/null @@ -1,209 +0,0 @@ -import { createRevision, clone, randomCouchString, blobToBase64String, prepareQuery, ensureNotFalsy, toArray, arrayFilterNotEmpty } from "../../index.js"; -/** - * Migrates collections of RxDB version A and puts them - * into a RxDatabase that is created with version B. - * This function only works from the previous major version upwards. - * Do not use it to migrate like rxdb v9 to v14. - */ -export async function migrateStorage(params) { - var collections = Object.values(params.database.collections); - var batchSize = params.batchSize ? params.batchSize : 10; - if (params.parallel) { - await Promise.all(collections.map(collection => migrateCollection(collection, params.oldDatabaseName, params.oldStorage, batchSize, params.afterMigrateBatch, params.logFunction))); - } else { - for (var collection of collections) { - await migrateCollection(collection, params.oldDatabaseName, params.oldStorage, batchSize, params.afterMigrateBatch, params.logFunction); - } - } -} -export async function migrateCollection(collection, oldDatabaseName, oldStorage, batchSize, afterMigrateBatch, -// to log each step, pass console.log.bind(console) here. -logFunction) { - function log(message) { - if (logFunction) { - logFunction('migrateCollection(' + collection.name + ')' + message); - } - } - log('start migrateCollection()'); - var schema = collection.schema.jsonSchema; - var primaryPath = collection.schema.primaryPath; - var oldDatabaseInstanceToken = randomCouchString(10); - - /** - * In RxDB v15 we changed how the indexes are created. - * Before (v14), the storage prepended the _deleted field - * to all indexes. - * In v15, RxDB will prepend the _deleted field BEFORE sending - * it to the storage. Therefore we have to strip these fields - * when crating v14 storage instances. - */ - if (!oldStorage.rxdbVersion && schema.indexes) { - schema = clone(schema); - schema.indexes = ensureNotFalsy(schema.indexes).map(index => { - index = toArray(index).filter(field => field !== '_deleted'); - if (index.includes('_meta.lwt')) { - return null; - } - return index; - }).filter(arrayFilterNotEmpty); - } - var oldStorageInstance = await oldStorage.createStorageInstance({ - databaseName: oldDatabaseName, - collectionName: collection.name, - multiInstance: false, - options: {}, - schema: schema, - databaseInstanceToken: oldDatabaseInstanceToken, - devMode: false - }); - var plainQuery = { - selector: { - _deleted: { - $eq: false - } - }, - limit: batchSize, - sort: [{ - [primaryPath]: 'asc' - }], - skip: 0 - }; - - /** - * In RxDB v15 we removed statics.prepareQuery() - * But to be downwards compatible, still use that - * when migrating from an old storage. - * TODO remove this in the next major version. v16. - */ - var preparedQuery; - if (oldStorage.statics && oldStorage.statics.prepareQuery) { - preparedQuery = oldStorage.statics.prepareQuery(schema, plainQuery); - } else { - preparedQuery = prepareQuery(schema, plainQuery); - } - var _loop = async function () { - log('loop once'); - /** - * Get a batch of documents - */ - var queryResult = await oldStorageInstance.query(preparedQuery); - var docs = queryResult.documents; - if (docs.length === 0) { - /** - * No more documents to migrate - */ - log('migration of collection done'); - await oldStorageInstance.remove(); - return { - v: void 0 - }; - } - var docsNonMutated = clone(docs); - - /** - * Get attachments - * if defined in the schema. - */ - if (schema.attachments) { - await Promise.all(docs.map(async doc => { - var docId = doc[primaryPath]; - await Promise.all(Object.entries(doc._attachments).map(async ([attachmentId, attachmentMeta]) => { - var attachmentData = await oldStorageInstance.getAttachmentData(docId, attachmentId, attachmentMeta.digest); - var attachmentDataString = await blobToBase64String(attachmentData); - doc._attachments[attachmentId] = { - data: attachmentDataString, - digest: attachmentMeta.digest, - length: attachmentMeta.length, - type: attachmentMeta.type - }; - })); - })); - log('got attachments'); - } - - /** - * Insert the documents to the new storage - */ - var insertToNewWriteRows = docs.map(document => { - return { - document - }; - }); - var writeToNewResult = await collection.storageInstance.bulkWrite(insertToNewWriteRows, 'migrate-storage'); - log('written batch to new storage'); - - // TODO we should throw on non-conflict errors here. - // if (Object.keys(writeToNewResult.error).length > 0) { - // throw new Error('could not write to new storage'); - // } - - /** - * Remove the docs from the old storage - */ - var writeToOldRows = docs.map((_doc, idx) => { - var previous = docsNonMutated[idx]; - if (!previous._meta) { - previous._meta = { - lwt: new Date().getTime() - }; - } - var newDoc = clone(previous); - newDoc._deleted = true; - if (!newDoc._meta) { - newDoc._meta = { - lwt: new Date().getTime() - }; - } - newDoc._meta.lwt = new Date().getTime() + 1; - newDoc._rev = createRevision(oldDatabaseInstanceToken, previous); - return { - previous, - document: newDoc - }; - }); - try { - var writeToOldResult = await oldStorageInstance.bulkWrite(writeToOldRows, 'migrate-between-rxdb-versions'); - if (Object.keys(writeToOldResult.error).length > 0) { - console.dir({ - writeToOldRows, - errors: writeToOldResult.error - }); - throw new Error('got error while deleting migrated documents on the old storage'); - } - } catch (err) { - log('could not delete on old instance'); - console.dir(err); - throw err; - } - log('deleted batch on old storage'); - await oldStorageInstance.cleanup(0).catch(() => { - /** - * Migration from RxDB v14 to v15 had problem running the cleanup() - * on the old storage because the indexing structure changed. - * Because the periodic cleanup during migration - * is an optional step, we just log instead of throwing an error. - * @link https://github.com/pubkey/rxdb/issues/5565 - * - * TODO remove this in the next major version - */ - log('oldStorageInstance.cleanup(0) has thrown'); - }); - - // run the handler if provided - if (afterMigrateBatch) { - await afterMigrateBatch({ - databaseName: collection.database.name, - collectionName: collection.name, - oldDatabaseName, - insertToNewWriteRows, - writeToNewResult - }); - } - }, - _ret; - while (true) { - _ret = await _loop(); - if (_ret) return _ret.v; - } -} -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/esm/plugins/migration-storage/index.js.map b/dist/esm/plugins/migration-storage/index.js.map deleted file mode 100644 index 4e85355f296..00000000000 --- a/dist/esm/plugins/migration-storage/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["createRevision","clone","randomCouchString","blobToBase64String","prepareQuery","ensureNotFalsy","toArray","arrayFilterNotEmpty","migrateStorage","params","collections","Object","values","database","batchSize","parallel","Promise","all","map","collection","migrateCollection","oldDatabaseName","oldStorage","afterMigrateBatch","logFunction","log","message","name","schema","jsonSchema","primaryPath","oldDatabaseInstanceToken","rxdbVersion","indexes","index","filter","field","includes","oldStorageInstance","createStorageInstance","databaseName","collectionName","multiInstance","options","databaseInstanceToken","devMode","plainQuery","selector","_deleted","$eq","limit","sort","skip","preparedQuery","statics","_loop","queryResult","query","docs","documents","length","remove","v","docsNonMutated","attachments","doc","docId","entries","_attachments","attachmentId","attachmentMeta","attachmentData","getAttachmentData","digest","attachmentDataString","data","type","insertToNewWriteRows","document","writeToNewResult","storageInstance","bulkWrite","writeToOldRows","_doc","idx","previous","_meta","lwt","Date","getTime","newDoc","_rev","writeToOldResult","keys","error","console","dir","errors","Error","err","cleanup","catch","_ret"],"sources":["../../../../src/plugins/migration-storage/index.ts"],"sourcesContent":["import {\n RxDatabase,\n RxCollection,\n createRevision,\n clone,\n BulkWriteRow,\n RxStorageBulkWriteResponse,\n randomCouchString,\n RxStorage,\n blobToBase64String,\n prepareQuery,\n PreparedQuery,\n FilledMangoQuery,\n ensureNotFalsy,\n toArray,\n arrayFilterNotEmpty\n} from '../../index.ts';\n\nexport type RxStorageOld = RxStorage | any;\n\nexport type AfterMigrateBatchHandlerInput = {\n databaseName: string;\n collectionName: string;\n oldDatabaseName: string;\n insertToNewWriteRows: BulkWriteRow[];\n writeToNewResult: RxStorageBulkWriteResponse;\n};\nexport type AfterMigrateBatchHandler = (\n input: AfterMigrateBatchHandlerInput\n) => any | Promise;\n\n\nexport type MigrateStorageParams = {\n database: RxDatabase;\n /**\n * Using the migration plugin requires you\n * to rename your new old database.\n * The original name of the v11 database must be provided here.\n */\n oldDatabaseName: string;\n oldStorage: RxStorageOld;\n batchSize?: number;\n parallel?: boolean;\n afterMigrateBatch?: AfterMigrateBatchHandler;\n // to log each step, pass console.log.bind(console) here.\n logFunction?: (message: string) => void;\n}\n\n/**\n * Migrates collections of RxDB version A and puts them\n * into a RxDatabase that is created with version B.\n * This function only works from the previous major version upwards.\n * Do not use it to migrate like rxdb v9 to v14. \n */\nexport async function migrateStorage(\n params: MigrateStorageParams\n): Promise {\n const collections = Object.values(params.database.collections);\n const batchSize = params.batchSize ? params.batchSize : 10;\n if (params.parallel) {\n await Promise.all(\n collections.map(collection => migrateCollection(\n collection,\n params.oldDatabaseName,\n params.oldStorage,\n batchSize,\n params.afterMigrateBatch,\n params.logFunction\n ))\n );\n } else {\n for (const collection of collections) {\n await migrateCollection(\n collection,\n params.oldDatabaseName,\n params.oldStorage,\n batchSize,\n params.afterMigrateBatch,\n params.logFunction\n );\n }\n }\n}\n\nexport async function migrateCollection(\n collection: RxCollection,\n oldDatabaseName: string,\n oldStorage: RxStorageOld,\n batchSize: number,\n afterMigrateBatch?: AfterMigrateBatchHandler,\n // to log each step, pass console.log.bind(console) here.\n logFunction?: (message: string) => void\n) {\n function log(message: string) {\n if (logFunction) {\n logFunction('migrateCollection(' + collection.name + ')' + message);\n }\n }\n log('start migrateCollection()');\n let schema = collection.schema.jsonSchema;\n const primaryPath = collection.schema.primaryPath;\n const oldDatabaseInstanceToken = randomCouchString(10);\n\n\n /**\n * In RxDB v15 we changed how the indexes are created.\n * Before (v14), the storage prepended the _deleted field\n * to all indexes.\n * In v15, RxDB will prepend the _deleted field BEFORE sending\n * it to the storage. Therefore we have to strip these fields\n * when crating v14 storage instances.\n */\n if (!oldStorage.rxdbVersion && schema.indexes) {\n schema = clone(schema);\n schema.indexes = ensureNotFalsy(schema.indexes).map(index => {\n index = toArray(index).filter(field => field !== '_deleted');\n if (index.includes('_meta.lwt')) {\n return null;\n }\n return index;\n }).filter(arrayFilterNotEmpty);\n\n }\n\n const oldStorageInstance = await oldStorage.createStorageInstance({\n databaseName: oldDatabaseName,\n collectionName: collection.name,\n multiInstance: false,\n options: {},\n schema: schema,\n databaseInstanceToken: oldDatabaseInstanceToken,\n devMode: false\n });\n\n\n const plainQuery: FilledMangoQuery = {\n selector: {\n _deleted: {\n $eq: false\n }\n } as any,\n limit: batchSize,\n sort: [{ [primaryPath]: 'asc' } as any],\n skip: 0\n };\n\n /**\n * In RxDB v15 we removed statics.prepareQuery()\n * But to be downwards compatible, still use that\n * when migrating from an old storage.\n * TODO remove this in the next major version. v16.\n */\n let preparedQuery: PreparedQuery;\n if (oldStorage.statics && oldStorage.statics.prepareQuery) {\n preparedQuery = oldStorage.statics.prepareQuery(\n schema,\n plainQuery\n );\n } else {\n preparedQuery = prepareQuery(\n schema,\n plainQuery\n );\n }\n\n while (true) {\n log('loop once');\n /**\n * Get a batch of documents\n */\n const queryResult = await oldStorageInstance.query(preparedQuery);\n const docs = queryResult.documents;\n if (docs.length === 0) {\n /**\n * No more documents to migrate\n */\n log('migration of collection done');\n await oldStorageInstance.remove();\n return;\n }\n\n const docsNonMutated = clone(docs);\n\n /**\n * Get attachments\n * if defined in the schema.\n */\n if (schema.attachments) {\n await Promise.all(\n docs.map(async (doc: any) => {\n const docId: string = (doc as any)[primaryPath];\n await Promise.all(\n Object.entries(doc._attachments).map(async ([attachmentId, attachmentMeta]) => {\n const attachmentData = await oldStorageInstance.getAttachmentData(\n docId,\n attachmentId,\n (attachmentMeta as any).digest\n );\n const attachmentDataString = await blobToBase64String(attachmentData);\n (doc as any)._attachments[attachmentId] = {\n data: attachmentDataString,\n digest: (attachmentMeta as any).digest,\n length: (attachmentMeta as any).length,\n type: (attachmentMeta as any).type\n }\n })\n );\n })\n );\n log('got attachments');\n }\n\n /**\n * Insert the documents to the new storage\n */\n const insertToNewWriteRows: BulkWriteRow[] = docs.map((document: any) => {\n return { document };\n });\n const writeToNewResult: RxStorageBulkWriteResponse = await collection.storageInstance.bulkWrite(\n insertToNewWriteRows,\n 'migrate-storage'\n );\n log('written batch to new storage');\n\n // TODO we should throw on non-conflict errors here.\n // if (Object.keys(writeToNewResult.error).length > 0) {\n // throw new Error('could not write to new storage');\n // }\n\n /**\n * Remove the docs from the old storage\n */\n const writeToOldRows = docs.map((_doc: any, idx: number) => {\n const previous = docsNonMutated[idx];\n if (!previous._meta) {\n previous._meta = {\n lwt: new Date().getTime()\n };\n }\n\n const newDoc: typeof previous = clone(previous);\n newDoc._deleted = true;\n if (!newDoc._meta) {\n newDoc._meta = {\n lwt: new Date().getTime()\n };\n }\n newDoc._meta.lwt = new Date().getTime() + 1;\n newDoc._rev = createRevision(\n oldDatabaseInstanceToken,\n previous\n );\n\n return {\n previous,\n document: newDoc,\n }\n });\n try {\n const writeToOldResult = await oldStorageInstance.bulkWrite(\n writeToOldRows,\n 'migrate-between-rxdb-versions'\n );\n if (Object.keys(writeToOldResult.error).length > 0) {\n console.dir({\n writeToOldRows,\n errors: writeToOldResult.error\n });\n throw new Error('got error while deleting migrated documents on the old storage');\n }\n } catch (err) {\n log('could not delete on old instance');\n console.dir(err);\n throw err;\n }\n log('deleted batch on old storage');\n await oldStorageInstance.cleanup(0)\n .catch(() => {\n /**\n * Migration from RxDB v14 to v15 had problem running the cleanup()\n * on the old storage because the indexing structure changed.\n * Because the periodic cleanup during migration\n * is an optional step, we just log instead of throwing an error.\n * @link https://github.com/pubkey/rxdb/issues/5565\n * \n * TODO remove this in the next major version\n */\n log('oldStorageInstance.cleanup(0) has thrown');\n });\n\n // run the handler if provided\n if (afterMigrateBatch) {\n await afterMigrateBatch({\n databaseName: collection.database.name,\n collectionName: collection.name,\n oldDatabaseName,\n insertToNewWriteRows,\n writeToNewResult\n });\n }\n }\n}\n\n\n\n\n\n"],"mappings":"AAAA,SAGIA,cAAc,EACdC,KAAK,EAGLC,iBAAiB,EAEjBC,kBAAkB,EAClBC,YAAY,EAGZC,cAAc,EACdC,OAAO,EACPC,mBAAmB,QAChB,gBAAgB;AAgCvB;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,eAAeC,cAAcA,CAChCC,MAA4B,EACf;EACb,IAAMC,WAAW,GAAGC,MAAM,CAACC,MAAM,CAACH,MAAM,CAACI,QAAQ,CAACH,WAAW,CAAC;EAC9D,IAAMI,SAAS,GAAGL,MAAM,CAACK,SAAS,GAAGL,MAAM,CAACK,SAAS,GAAG,EAAE;EAC1D,IAAIL,MAAM,CAACM,QAAQ,EAAE;IACjB,MAAMC,OAAO,CAACC,GAAG,CACbP,WAAW,CAACQ,GAAG,CAACC,UAAU,IAAIC,iBAAiB,CAC3CD,UAAU,EACVV,MAAM,CAACY,eAAe,EACtBZ,MAAM,CAACa,UAAU,EACjBR,SAAS,EACTL,MAAM,CAACc,iBAAiB,EACxBd,MAAM,CAACe,WACX,CAAC,CACL,CAAC;EACL,CAAC,MAAM;IACH,KAAK,IAAML,UAAU,IAAIT,WAAW,EAAE;MAClC,MAAMU,iBAAiB,CACnBD,UAAU,EACVV,MAAM,CAACY,eAAe,EACtBZ,MAAM,CAACa,UAAU,EACjBR,SAAS,EACTL,MAAM,CAACc,iBAAiB,EACxBd,MAAM,CAACe,WACX,CAAC;IACL;EACJ;AACJ;AAEA,OAAO,eAAeJ,iBAAiBA,CACnCD,UAAmC,EACnCE,eAAuB,EACvBC,UAAkC,EAClCR,SAAiB,EACjBS,iBAA4C;AAC5C;AACAC,WAAuC,EACzC;EACE,SAASC,GAAGA,CAACC,OAAe,EAAE;IAC1B,IAAIF,WAAW,EAAE;MACbA,WAAW,CAAC,oBAAoB,GAAGL,UAAU,CAACQ,IAAI,GAAG,GAAG,GAAGD,OAAO,CAAC;IACvE;EACJ;EACAD,GAAG,CAAC,2BAA2B,CAAC;EAChC,IAAIG,MAAM,GAAGT,UAAU,CAACS,MAAM,CAACC,UAAU;EACzC,IAAMC,WAAW,GAAGX,UAAU,CAACS,MAAM,CAACE,WAAW;EACjD,IAAMC,wBAAwB,GAAG7B,iBAAiB,CAAC,EAAE,CAAC;;EAGtD;AACJ;AACA;AACA;AACA;AACA;AACA;AACA;EACI,IAAI,CAACoB,UAAU,CAACU,WAAW,IAAIJ,MAAM,CAACK,OAAO,EAAE;IAC3CL,MAAM,GAAG3B,KAAK,CAAC2B,MAAM,CAAC;IACtBA,MAAM,CAACK,OAAO,GAAG5B,cAAc,CAACuB,MAAM,CAACK,OAAO,CAAC,CAACf,GAAG,CAACgB,KAAK,IAAI;MACzDA,KAAK,GAAG5B,OAAO,CAAC4B,KAAK,CAAC,CAACC,MAAM,CAACC,KAAK,IAAIA,KAAK,KAAK,UAAU,CAAC;MAC5D,IAAIF,KAAK,CAACG,QAAQ,CAAC,WAAW,CAAC,EAAE;QAC7B,OAAO,IAAI;MACf;MACA,OAAOH,KAAK;IAChB,CAAC,CAAC,CAACC,MAAM,CAAC5B,mBAAmB,CAAC;EAElC;EAEA,IAAM+B,kBAAkB,GAAG,MAAMhB,UAAU,CAACiB,qBAAqB,CAAC;IAC9DC,YAAY,EAAEnB,eAAe;IAC7BoB,cAAc,EAAEtB,UAAU,CAACQ,IAAI;IAC/Be,aAAa,EAAE,KAAK;IACpBC,OAAO,EAAE,CAAC,CAAC;IACXf,MAAM,EAAEA,MAAM;IACdgB,qBAAqB,EAAEb,wBAAwB;IAC/Cc,OAAO,EAAE;EACb,CAAC,CAAC;EAGF,IAAMC,UAAuC,GAAG;IAC5CC,QAAQ,EAAE;MACNC,QAAQ,EAAE;QACNC,GAAG,EAAE;MACT;IACJ,CAAQ;IACRC,KAAK,EAAEpC,SAAS;IAChBqC,IAAI,EAAE,CAAC;MAAE,CAACrB,WAAW,GAAG;IAAM,CAAC,CAAQ;IACvCsB,IAAI,EAAE;EACV,CAAC;;EAED;AACJ;AACA;AACA;AACA;AACA;EACI,IAAIC,aAAuC;EAC3C,IAAI/B,UAAU,CAACgC,OAAO,IAAIhC,UAAU,CAACgC,OAAO,CAAClD,YAAY,EAAE;IACvDiD,aAAa,GAAG/B,UAAU,CAACgC,OAAO,CAAClD,YAAY,CAC3CwB,MAAM,EACNkB,UACJ,CAAC;EACL,CAAC,MAAM;IACHO,aAAa,GAAGjD,YAAY,CACxBwB,MAAM,EACNkB,UACJ,CAAC;EACL;EAAC,IAAAS,KAAA,kBAAAA,CAAA,EAEY;MACT9B,GAAG,CAAC,WAAW,CAAC;MAChB;AACR;AACA;MACQ,IAAM+B,WAAW,GAAG,MAAMlB,kBAAkB,CAACmB,KAAK,CAACJ,aAAa,CAAC;MACjE,IAAMK,IAAI,GAAGF,WAAW,CAACG,SAAS;MAClC,IAAID,IAAI,CAACE,MAAM,KAAK,CAAC,EAAE;QACnB;AACZ;AACA;QACYnC,GAAG,CAAC,8BAA8B,CAAC;QACnC,MAAMa,kBAAkB,CAACuB,MAAM,CAAC,CAAC;QAAC;UAAAC,CAAA;QAAA;MAEtC;MAEA,IAAMC,cAAc,GAAG9D,KAAK,CAACyD,IAAI,CAAC;;MAElC;AACR;AACA;AACA;MACQ,IAAI9B,MAAM,CAACoC,WAAW,EAAE;QACpB,MAAMhD,OAAO,CAACC,GAAG,CACbyC,IAAI,CAACxC,GAAG,CAAC,MAAO+C,GAAQ,IAAK;UACzB,IAAMC,KAAa,GAAID,GAAG,CAASnC,WAAW,CAAC;UAC/C,MAAMd,OAAO,CAACC,GAAG,CACbN,MAAM,CAACwD,OAAO,CAACF,GAAG,CAACG,YAAY,CAAC,CAAClD,GAAG,CAAC,OAAO,CAACmD,YAAY,EAAEC,cAAc,CAAC,KAAK;YAC3E,IAAMC,cAAc,GAAG,MAAMjC,kBAAkB,CAACkC,iBAAiB,CAC7DN,KAAK,EACLG,YAAY,EACXC,cAAc,CAASG,MAC5B,CAAC;YACD,IAAMC,oBAAoB,GAAG,MAAMvE,kBAAkB,CAACoE,cAAc,CAAC;YACpEN,GAAG,CAASG,YAAY,CAACC,YAAY,CAAC,GAAG;cACtCM,IAAI,EAAED,oBAAoB;cAC1BD,MAAM,EAAGH,cAAc,CAASG,MAAM;cACtCb,MAAM,EAAGU,cAAc,CAASV,MAAM;cACtCgB,IAAI,EAAGN,cAAc,CAASM;YAClC,CAAC;UACL,CAAC,CACL,CAAC;QACL,CAAC,CACL,CAAC;QACDnD,GAAG,CAAC,iBAAiB,CAAC;MAC1B;;MAEA;AACR;AACA;MACQ,IAAMoD,oBAAyC,GAAGnB,IAAI,CAACxC,GAAG,CAAE4D,QAAa,IAAK;QAC1E,OAAO;UAAEA;QAAS,CAAC;MACvB,CAAC,CAAC;MACF,IAAMC,gBAAiD,GAAG,MAAM5D,UAAU,CAAC6D,eAAe,CAACC,SAAS,CAChGJ,oBAAoB,EACpB,iBACJ,CAAC;MACDpD,GAAG,CAAC,8BAA8B,CAAC;;MAEnC;MACA;MACA;MACA;;MAEA;AACR;AACA;MACQ,IAAMyD,cAAc,GAAGxB,IAAI,CAACxC,GAAG,CAAC,CAACiE,IAAS,EAAEC,GAAW,KAAK;QACxD,IAAMC,QAAQ,GAAGtB,cAAc,CAACqB,GAAG,CAAC;QACpC,IAAI,CAACC,QAAQ,CAACC,KAAK,EAAE;UACjBD,QAAQ,CAACC,KAAK,GAAG;YACbC,GAAG,EAAE,IAAIC,IAAI,CAAC,CAAC,CAACC,OAAO,CAAC;UAC5B,CAAC;QACL;QAEA,IAAMC,MAAuB,GAAGzF,KAAK,CAACoF,QAAQ,CAAC;QAC/CK,MAAM,CAAC1C,QAAQ,GAAG,IAAI;QACtB,IAAI,CAAC0C,MAAM,CAACJ,KAAK,EAAE;UACfI,MAAM,CAACJ,KAAK,GAAG;YACXC,GAAG,EAAE,IAAIC,IAAI,CAAC,CAAC,CAACC,OAAO,CAAC;UAC5B,CAAC;QACL;QACAC,MAAM,CAACJ,KAAK,CAACC,GAAG,GAAG,IAAIC,IAAI,CAAC,CAAC,CAACC,OAAO,CAAC,CAAC,GAAG,CAAC;QAC3CC,MAAM,CAACC,IAAI,GAAG3F,cAAc,CACxB+B,wBAAwB,EACxBsD,QACJ,CAAC;QAED,OAAO;UACHA,QAAQ;UACRP,QAAQ,EAAEY;QACd,CAAC;MACL,CAAC,CAAC;MACF,IAAI;QACA,IAAME,gBAAgB,GAAG,MAAMtD,kBAAkB,CAAC2C,SAAS,CACvDC,cAAc,EACd,+BACJ,CAAC;QACD,IAAIvE,MAAM,CAACkF,IAAI,CAACD,gBAAgB,CAACE,KAAK,CAAC,CAAClC,MAAM,GAAG,CAAC,EAAE;UAChDmC,OAAO,CAACC,GAAG,CAAC;YACRd,cAAc;YACde,MAAM,EAAEL,gBAAgB,CAACE;UAC7B,CAAC,CAAC;UACF,MAAM,IAAII,KAAK,CAAC,gEAAgE,CAAC;QACrF;MACJ,CAAC,CAAC,OAAOC,GAAG,EAAE;QACV1E,GAAG,CAAC,kCAAkC,CAAC;QACvCsE,OAAO,CAACC,GAAG,CAACG,GAAG,CAAC;QAChB,MAAMA,GAAG;MACb;MACA1E,GAAG,CAAC,8BAA8B,CAAC;MACnC,MAAMa,kBAAkB,CAAC8D,OAAO,CAAC,CAAC,CAAC,CAC9BC,KAAK,CAAC,MAAM;QACT;AAChB;AACA;AACA;AACA;AACA;AACA;AACA;AACA;QACgB5E,GAAG,CAAC,0CAA0C,CAAC;MACnD,CAAC,CAAC;;MAEN;MACA,IAAIF,iBAAiB,EAAE;QACnB,MAAMA,iBAAiB,CAAC;UACpBiB,YAAY,EAAErB,UAAU,CAACN,QAAQ,CAACc,IAAI;UACtCc,cAAc,EAAEtB,UAAU,CAACQ,IAAI;UAC/BN,eAAe;UACfwD,oBAAoB;UACpBE;QACJ,CAAC,CAAC;MACN;IACJ,CAAC;IAAAuB,IAAA;EAvID,OAAO,IAAI;IAAAA,IAAA,SAAA/C,KAAA;IAAA,IAAA+C,IAAA,SAAAA,IAAA,CAAAxC,CAAA;EAAA;AAwIf","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/query-builder/index.js b/dist/esm/plugins/query-builder/index.js deleted file mode 100644 index 4c81cecab18..00000000000 --- a/dist/esm/plugins/query-builder/index.js +++ /dev/null @@ -1,40 +0,0 @@ -import { createQueryBuilder, OTHER_MANGO_ATTRIBUTES, OTHER_MANGO_OPERATORS } from "./mquery/nosql-query-builder.js"; -import { createRxQuery } from "../../rx-query.js"; -import { clone } from "../../plugins/utils/index.js"; - -// if the query-builder plugin is used, we have to save its last path -var RXQUERY_OTHER_FLAG = 'queryBuilderPath'; -export function runBuildingStep(rxQuery, functionName, value) { - var queryBuilder = createQueryBuilder(clone(rxQuery.mangoQuery), rxQuery.other[RXQUERY_OTHER_FLAG]); - queryBuilder[functionName](value); // run - - var queryBuilderJson = queryBuilder.toJSON(); - return createRxQuery(rxQuery.op, queryBuilderJson.query, rxQuery.collection, { - ...rxQuery.other, - [RXQUERY_OTHER_FLAG]: queryBuilderJson.path - }); -} -export function applyBuildingStep(proto, functionName) { - proto[functionName] = function (value) { - return runBuildingStep(this, functionName, value); - }; -} -export * from "./mquery/nosql-query-builder.js"; -export var RxDBQueryBuilderPlugin = { - name: 'query-builder', - rxdb: true, - prototypes: { - RxQuery(proto) { - ['where', 'equals', 'eq', 'or', 'nor', 'and', 'mod', 'exists', 'elemMatch', 'sort'].forEach(attribute => { - applyBuildingStep(proto, attribute); - }); - OTHER_MANGO_ATTRIBUTES.forEach(attribute => { - applyBuildingStep(proto, attribute); - }); - OTHER_MANGO_OPERATORS.forEach(operator => { - applyBuildingStep(proto, operator); - }); - } - } -}; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/esm/plugins/query-builder/index.js.map b/dist/esm/plugins/query-builder/index.js.map deleted file mode 100644 index 382af24ac8b..00000000000 --- a/dist/esm/plugins/query-builder/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["createQueryBuilder","OTHER_MANGO_ATTRIBUTES","OTHER_MANGO_OPERATORS","createRxQuery","clone","RXQUERY_OTHER_FLAG","runBuildingStep","rxQuery","functionName","value","queryBuilder","mangoQuery","other","queryBuilderJson","toJSON","op","query","collection","path","applyBuildingStep","proto","RxDBQueryBuilderPlugin","name","rxdb","prototypes","RxQuery","forEach","attribute","operator"],"sources":["../../../../src/plugins/query-builder/index.ts"],"sourcesContent":["import {\n createQueryBuilder,\n OTHER_MANGO_ATTRIBUTES,\n OTHER_MANGO_OPERATORS\n} from './mquery/nosql-query-builder.ts';\nimport type { RxPlugin, RxQuery } from '../../types/index.d.ts';\nimport { createRxQuery } from '../../rx-query.ts';\nimport { clone } from '../../plugins/utils/index.ts';\n\n// if the query-builder plugin is used, we have to save its last path\nconst RXQUERY_OTHER_FLAG = 'queryBuilderPath';\n\nexport function runBuildingStep(\n rxQuery: RxQuery,\n functionName: string,\n value: any\n): RxQuery {\n const queryBuilder = createQueryBuilder(clone(rxQuery.mangoQuery), rxQuery.other[RXQUERY_OTHER_FLAG]);\n\n (queryBuilder as any)[functionName](value); // run\n\n const queryBuilderJson = queryBuilder.toJSON();\n\n return createRxQuery(\n rxQuery.op,\n queryBuilderJson.query,\n rxQuery.collection,\n {\n ...rxQuery.other,\n [RXQUERY_OTHER_FLAG]: queryBuilderJson.path\n }\n ) as RxQuery;\n}\n\nexport function applyBuildingStep(\n proto: any,\n functionName: string\n): void {\n proto[functionName] = function (this: RxQuery, value: any) {\n return runBuildingStep(this, functionName, value);\n };\n}\n\nexport * from './mquery/nosql-query-builder.ts';\n\nexport const RxDBQueryBuilderPlugin: RxPlugin = {\n name: 'query-builder',\n rxdb: true,\n prototypes: {\n RxQuery(proto: any) {\n [\n 'where',\n 'equals',\n 'eq',\n 'or',\n 'nor',\n 'and',\n 'mod',\n 'exists',\n 'elemMatch',\n 'sort'\n ].forEach(attribute => {\n applyBuildingStep(proto, attribute);\n });\n OTHER_MANGO_ATTRIBUTES.forEach(attribute => {\n applyBuildingStep(proto, attribute);\n });\n OTHER_MANGO_OPERATORS.forEach(operator => {\n applyBuildingStep(proto, operator);\n });\n }\n }\n};\n"],"mappings":"AAAA,SACIA,kBAAkB,EAClBC,sBAAsB,EACtBC,qBAAqB,QAClB,iCAAiC;AAExC,SAASC,aAAa,QAAQ,mBAAmB;AACjD,SAASC,KAAK,QAAQ,8BAA8B;;AAEpD;AACA,IAAMC,kBAAkB,GAAG,kBAAkB;AAE7C,OAAO,SAASC,eAAeA,CAC3BC,OAA+C,EAC/CC,YAAoB,EACpBC,KAAU,EAC4B;EACtC,IAAMC,YAAY,GAAGV,kBAAkB,CAACI,KAAK,CAACG,OAAO,CAACI,UAAU,CAAC,EAAEJ,OAAO,CAACK,KAAK,CAACP,kBAAkB,CAAC,CAAC;EAEpGK,YAAY,CAASF,YAAY,CAAC,CAACC,KAAK,CAAC,CAAC,CAAC;;EAE5C,IAAMI,gBAAgB,GAAGH,YAAY,CAACI,MAAM,CAAC,CAAC;EAE9C,OAAOX,aAAa,CAChBI,OAAO,CAACQ,EAAE,EACVF,gBAAgB,CAACG,KAAK,EACtBT,OAAO,CAACU,UAAU,EAClB;IACI,GAAGV,OAAO,CAACK,KAAK;IAChB,CAACP,kBAAkB,GAAGQ,gBAAgB,CAACK;EAC3C,CACJ,CAAC;AACL;AAEA,OAAO,SAASC,iBAAiBA,CAC7BC,KAAU,EACVZ,YAAoB,EAChB;EACJY,KAAK,CAACZ,YAAY,CAAC,GAAG,UAAyBC,KAAU,EAAE;IACvD,OAAOH,eAAe,CAAC,IAAI,EAAEE,YAAY,EAAEC,KAAK,CAAC;EACrD,CAAC;AACL;AAEA,cAAc,iCAAiC;AAE/C,OAAO,IAAMY,sBAAgC,GAAG;EAC5CC,IAAI,EAAE,eAAe;EACrBC,IAAI,EAAE,IAAI;EACVC,UAAU,EAAE;IACRC,OAAOA,CAACL,KAAU,EAAE;MAChB,CACI,OAAO,EACP,QAAQ,EACR,IAAI,EACJ,IAAI,EACJ,KAAK,EACL,KAAK,EACL,KAAK,EACL,QAAQ,EACR,WAAW,EACX,MAAM,CACT,CAACM,OAAO,CAACC,SAAS,IAAI;QACnBR,iBAAiB,CAACC,KAAK,EAAEO,SAAS,CAAC;MACvC,CAAC,CAAC;MACF1B,sBAAsB,CAACyB,OAAO,CAACC,SAAS,IAAI;QACxCR,iBAAiB,CAACC,KAAK,EAAEO,SAAS,CAAC;MACvC,CAAC,CAAC;MACFzB,qBAAqB,CAACwB,OAAO,CAACE,QAAQ,IAAI;QACtCT,iBAAiB,CAACC,KAAK,EAAEQ,QAAQ,CAAC;MACtC,CAAC,CAAC;IACN;EACJ;AACJ,CAAC","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/query-builder/mquery/mquery-utils.js b/dist/esm/plugins/query-builder/mquery/mquery-utils.js deleted file mode 100644 index 18842d7e183..00000000000 --- a/dist/esm/plugins/query-builder/mquery/mquery-utils.js +++ /dev/null @@ -1,33 +0,0 @@ -/** - * this is copied from - * @link https://github.com/aheckmann/mquery/blob/master/lib/utils.js - */ - -/** - * @link https://github.com/aheckmann/mquery/commit/792e69fd0a7281a0300be5cade5a6d7c1d468ad4 - */ -var SPECIAL_PROPERTIES = ['__proto__', 'constructor', 'prototype']; - -/** - * Merges 'from' into 'to' without overwriting existing properties. - */ -export function merge(to, from) { - Object.keys(from).forEach(key => { - if (SPECIAL_PROPERTIES.includes(key)) { - return; - } - if (typeof to[key] === 'undefined') { - to[key] = from[key]; - } else { - if (isObject(from[key])) merge(to[key], from[key]);else to[key] = from[key]; - } - }); -} - -/** - * Determines if `arg` is an object. - */ -export function isObject(arg) { - return '[object Object]' === arg.toString(); -} -//# sourceMappingURL=mquery-utils.js.map \ No newline at end of file diff --git a/dist/esm/plugins/query-builder/mquery/mquery-utils.js.map b/dist/esm/plugins/query-builder/mquery/mquery-utils.js.map deleted file mode 100644 index cea6dfe6352..00000000000 --- a/dist/esm/plugins/query-builder/mquery/mquery-utils.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"mquery-utils.js","names":["SPECIAL_PROPERTIES","merge","to","from","Object","keys","forEach","key","includes","isObject","arg","toString"],"sources":["../../../../../src/plugins/query-builder/mquery/mquery-utils.ts"],"sourcesContent":["/**\n * this is copied from\n * @link https://github.com/aheckmann/mquery/blob/master/lib/utils.js\n */\n\n\n/**\n * @link https://github.com/aheckmann/mquery/commit/792e69fd0a7281a0300be5cade5a6d7c1d468ad4\n */\nconst SPECIAL_PROPERTIES = ['__proto__', 'constructor', 'prototype'];\n\n/**\n * Merges 'from' into 'to' without overwriting existing properties.\n */\nexport function merge(to: any, from: any): any {\n Object.keys(from)\n .forEach(key => {\n if (SPECIAL_PROPERTIES.includes(key)) {\n return;\n }\n if (typeof to[key] === 'undefined') {\n to[key] = from[key];\n } else {\n if (isObject(from[key]))\n merge(to[key], from[key]);\n else\n to[key] = from[key];\n }\n });\n}\n\n/**\n * Determines if `arg` is an object.\n */\nexport function isObject(arg: Object | any[] | String | Function | RegExp | any): boolean {\n return '[object Object]' === arg.toString();\n}\n"],"mappings":"AAAA;AACA;AACA;AACA;;AAGA;AACA;AACA;AACA,IAAMA,kBAAkB,GAAG,CAAC,WAAW,EAAE,aAAa,EAAE,WAAW,CAAC;;AAEpE;AACA;AACA;AACA,OAAO,SAASC,KAAKA,CAACC,EAAO,EAAEC,IAAS,EAAO;EAC3CC,MAAM,CAACC,IAAI,CAACF,IAAI,CAAC,CACZG,OAAO,CAACC,GAAG,IAAI;IACZ,IAAIP,kBAAkB,CAACQ,QAAQ,CAACD,GAAG,CAAC,EAAE;MAClC;IACJ;IACA,IAAI,OAAOL,EAAE,CAACK,GAAG,CAAC,KAAK,WAAW,EAAE;MAChCL,EAAE,CAACK,GAAG,CAAC,GAAGJ,IAAI,CAACI,GAAG,CAAC;IACvB,CAAC,MAAM;MACH,IAAIE,QAAQ,CAACN,IAAI,CAACI,GAAG,CAAC,CAAC,EACnBN,KAAK,CAACC,EAAE,CAACK,GAAG,CAAC,EAAEJ,IAAI,CAACI,GAAG,CAAC,CAAC,CAAC,KAE1BL,EAAE,CAACK,GAAG,CAAC,GAAGJ,IAAI,CAACI,GAAG,CAAC;IAC3B;EACJ,CAAC,CAAC;AACV;;AAEA;AACA;AACA;AACA,OAAO,SAASE,QAAQA,CAACC,GAAsD,EAAW;EACtF,OAAO,iBAAiB,KAAKA,GAAG,CAACC,QAAQ,CAAC,CAAC;AAC/C","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/query-builder/mquery/nosql-query-builder.js b/dist/esm/plugins/query-builder/mquery/nosql-query-builder.js deleted file mode 100644 index 3c4cc067518..00000000000 --- a/dist/esm/plugins/query-builder/mquery/nosql-query-builder.js +++ /dev/null @@ -1,477 +0,0 @@ -/** - * this is based on - * @link https://github.com/aheckmann/mquery/blob/master/lib/mquery.js - */ -import { isObject, merge as _merge } from "./mquery-utils.js"; -import { newRxTypeError, newRxError } from "../../../rx-error.js"; -export var NoSqlQueryBuilderClass = /*#__PURE__*/function () { - /** - * MQuery constructor used for building queries. - * - * ####Example: - * var query = new MQuery({ name: 'mquery' }); - * query.where('age').gte(21).exec(callback); - * - */ - function NoSqlQueryBuilderClass(mangoQuery, _path) { - this.options = {}; - this._conditions = {}; - this._fields = {}; - this._path = _path; - if (mangoQuery) { - var queryBuilder = this; - if (mangoQuery.selector) { - queryBuilder.find(mangoQuery.selector); - } - if (mangoQuery.limit) { - queryBuilder.limit(mangoQuery.limit); - } - if (mangoQuery.skip) { - queryBuilder.skip(mangoQuery.skip); - } - if (mangoQuery.sort) { - mangoQuery.sort.forEach(s => queryBuilder.sort(s)); - } - } - } - - /** - * Specifies a `path` for use with chaining. - */ - var _proto = NoSqlQueryBuilderClass.prototype; - _proto.where = function where(_path, _val) { - if (!arguments.length) return this; - var type = typeof arguments[0]; - if ('string' === type) { - this._path = arguments[0]; - if (2 === arguments.length) { - this._conditions[this._path] = arguments[1]; - } - return this; - } - if ('object' === type && !Array.isArray(arguments[0])) { - return this.merge(arguments[0]); - } - throw newRxTypeError('MQ1', { - path: arguments[0] - }); - } - - /** - * Specifies the complementary comparison value for paths specified with `where()` - * ####Example - * User.where('age').equals(49); - */; - _proto.equals = function equals(val) { - this._ensurePath('equals'); - var path = this._path; - this._conditions[path] = val; - return this; - } - - /** - * Specifies the complementary comparison value for paths specified with `where()` - * This is alias of `equals` - */; - _proto.eq = function eq(val) { - this._ensurePath('eq'); - var path = this._path; - this._conditions[path] = val; - return this; - } - - /** - * Specifies arguments for an `$or` condition. - * ####Example - * query.or([{ color: 'red' }, { status: 'emergency' }]) - */; - _proto.or = function or(array) { - var or = this._conditions.$or || (this._conditions.$or = []); - if (!Array.isArray(array)) array = [array]; - or.push.apply(or, array); - return this; - } - - /** - * Specifies arguments for a `$nor` condition. - * ####Example - * query.nor([{ color: 'green' }, { status: 'ok' }]) - */; - _proto.nor = function nor(array) { - var nor = this._conditions.$nor || (this._conditions.$nor = []); - if (!Array.isArray(array)) array = [array]; - nor.push.apply(nor, array); - return this; - } - - /** - * Specifies arguments for a `$and` condition. - * ####Example - * query.and([{ color: 'green' }, { status: 'ok' }]) - * @see $and http://docs.mongodb.org/manual/reference/operator/and/ - */; - _proto.and = function and(array) { - var and = this._conditions.$and || (this._conditions.$and = []); - if (!Array.isArray(array)) array = [array]; - and.push.apply(and, array); - return this; - } - - /** - * Specifies a `$mod` condition - */; - _proto.mod = function mod(_path, _val) { - var val; - var path; - if (1 === arguments.length) { - this._ensurePath('mod'); - val = arguments[0]; - path = this._path; - } else if (2 === arguments.length && !Array.isArray(arguments[1])) { - this._ensurePath('mod'); - val = arguments.slice(); - path = this._path; - } else if (3 === arguments.length) { - val = arguments.slice(1); - path = arguments[0]; - } else { - val = arguments[1]; - path = arguments[0]; - } - var conds = this._conditions[path] || (this._conditions[path] = {}); - conds.$mod = val; - return this; - } - - /** - * Specifies an `$exists` condition - * ####Example - * // { name: { $exists: true }} - * Thing.where('name').exists() - * Thing.where('name').exists(true) - * Thing.find().exists('name') - */; - _proto.exists = function exists(_path, _val) { - var path; - var val; - if (0 === arguments.length) { - this._ensurePath('exists'); - path = this._path; - val = true; - } else if (1 === arguments.length) { - if ('boolean' === typeof arguments[0]) { - this._ensurePath('exists'); - path = this._path; - val = arguments[0]; - } else { - path = arguments[0]; - val = true; - } - } else if (2 === arguments.length) { - path = arguments[0]; - val = arguments[1]; - } - var conds = this._conditions[path] || (this._conditions[path] = {}); - conds.$exists = val; - return this; - } - - /** - * Specifies an `$elemMatch` condition - * ####Example - * query.elemMatch('comment', { author: 'autobot', votes: {$gte: 5}}) - * query.where('comment').elemMatch({ author: 'autobot', votes: {$gte: 5}}) - * query.elemMatch('comment', function (elem) { - * elem.where('author').equals('autobot'); - * elem.where('votes').gte(5); - * }) - * query.where('comment').elemMatch(function (elem) { - * elem.where({ author: 'autobot' }); - * elem.where('votes').gte(5); - * }) - */; - _proto.elemMatch = function elemMatch(_path, _criteria) { - if (null === arguments[0]) throw newRxTypeError('MQ2'); - var fn; - var path; - var criteria; - if ('function' === typeof arguments[0]) { - this._ensurePath('elemMatch'); - path = this._path; - fn = arguments[0]; - } else if (isObject(arguments[0])) { - this._ensurePath('elemMatch'); - path = this._path; - criteria = arguments[0]; - } else if ('function' === typeof arguments[1]) { - path = arguments[0]; - fn = arguments[1]; - } else if (arguments[1] && isObject(arguments[1])) { - path = arguments[0]; - criteria = arguments[1]; - } else throw newRxTypeError('MQ2'); - if (fn) { - criteria = new NoSqlQueryBuilderClass(); - fn(criteria); - criteria = criteria._conditions; - } - var conds = this._conditions[path] || (this._conditions[path] = {}); - conds.$elemMatch = criteria; - return this; - } - - /** - * Sets the sort order - * If an object is passed, values allowed are 'asc', 'desc', 'ascending', 'descending', 1, and -1. - * If a string is passed, it must be a space delimited list of path names. - * The sort order of each path is ascending unless the path name is prefixed with `-` which will be treated as descending. - * ####Example - * query.sort({ field: 'asc', test: -1 }); - * query.sort('field -test'); - * query.sort([['field', 1], ['test', -1]]); - */; - _proto.sort = function sort(arg) { - if (!arg) return this; - var len; - var type = typeof arg; - // .sort([['field', 1], ['test', -1]]) - if (Array.isArray(arg)) { - len = arg.length; - for (var i = 0; i < arg.length; ++i) { - _pushArr(this.options, arg[i][0], arg[i][1]); - } - return this; - } - - // .sort('field -test') - if (1 === arguments.length && 'string' === type) { - arg = arg.split(/\s+/); - len = arg.length; - for (var _i = 0; _i < len; ++_i) { - var field = arg[_i]; - if (!field) continue; - var ascend = '-' === field[0] ? -1 : 1; - if (ascend === -1) field = field.substring(1); - push(this.options, field, ascend); - } - return this; - } - - // .sort({ field: 1, test: -1 }) - if (isObject(arg)) { - var keys = Object.keys(arg); - keys.forEach(field => push(this.options, field, arg[field])); - return this; - } - throw newRxTypeError('MQ3', { - args: arguments - }); - } - - /** - * Merges another MQuery or conditions object into this one. - * - * When a MQuery is passed, conditions, field selection and options are merged. - * - */; - _proto.merge = function merge(source) { - if (!source) { - return this; - } - if (!canMerge(source)) { - throw newRxTypeError('MQ4', { - source - }); - } - if (source instanceof NoSqlQueryBuilderClass) { - // if source has a feature, apply it to ourselves - - if (source._conditions) _merge(this._conditions, source._conditions); - if (source._fields) { - if (!this._fields) this._fields = {}; - _merge(this._fields, source._fields); - } - if (source.options) { - if (!this.options) this.options = {}; - _merge(this.options, source.options); - } - if (source._distinct) this._distinct = source._distinct; - return this; - } - - // plain object - _merge(this._conditions, source); - return this; - } - - /** - * Finds documents. - * ####Example - * query.find() - * query.find({ name: 'Burning Lights' }) - */; - _proto.find = function find(criteria) { - if (canMerge(criteria)) { - this.merge(criteria); - } - return this; - } - - /** - * Make sure _path is set. - * - * @param {String} method - */; - _proto._ensurePath = function _ensurePath(method) { - if (!this._path) { - throw newRxError('MQ5', { - method - }); - } - }; - _proto.toJSON = function toJSON() { - var query = { - selector: this._conditions - }; - if (this.options.skip) { - query.skip = this.options.skip; - } - if (this.options.limit) { - query.limit = this.options.limit; - } - if (this.options.sort) { - query.sort = mQuerySortToRxDBSort(this.options.sort); - } - return { - query, - path: this._path - }; - }; - return NoSqlQueryBuilderClass; -}(); -export function mQuerySortToRxDBSort(sort) { - return Object.entries(sort).map(([k, v]) => { - var direction = v === 1 ? 'asc' : 'desc'; - var part = { - [k]: direction - }; - return part; - }); -} - -/** - * Because some prototype-methods are generated, - * we have to define the type of NoSqlQueryBuilder here - */ - -/** - * limit, skip, maxScan, batchSize, comment - * - * Sets these associated options. - * - * query.comment('feed query'); - */ -export var OTHER_MANGO_ATTRIBUTES = ['limit', 'skip', 'maxScan', 'batchSize', 'comment']; -OTHER_MANGO_ATTRIBUTES.forEach(function (method) { - NoSqlQueryBuilderClass.prototype[method] = function (v) { - this.options[method] = v; - return this; - }; -}); - -/** - * gt, gte, lt, lte, ne, in, nin, all, regex, size, maxDistance - * - * Thing.where('type').nin(array) - */ -export var OTHER_MANGO_OPERATORS = ['gt', 'gte', 'lt', 'lte', 'ne', 'in', 'nin', 'all', 'regex', 'size']; -OTHER_MANGO_OPERATORS.forEach(function ($conditional) { - NoSqlQueryBuilderClass.prototype[$conditional] = function () { - var path; - var val; - if (1 === arguments.length) { - this._ensurePath($conditional); - val = arguments[0]; - path = this._path; - } else { - val = arguments[1]; - path = arguments[0]; - } - var conds = this._conditions[path] === null || typeof this._conditions[path] === 'object' ? this._conditions[path] : this._conditions[path] = {}; - if ($conditional === 'regex') { - if (val instanceof RegExp) { - throw newRxError('QU16', { - field: path, - query: this._conditions - }); - } - if (typeof val === 'string') { - conds['$' + $conditional] = val; - } else { - conds['$' + $conditional] = val.$regex; - if (val.$options) { - conds.$options = val.$options; - } - } - } else { - conds['$' + $conditional] = val; - } - return this; - }; -}); -function push(opts, field, value) { - if (Array.isArray(opts.sort)) { - throw newRxTypeError('MQ6', { - opts, - field, - value - }); - } - if (value && value.$meta) { - var sort = opts.sort || (opts.sort = {}); - sort[field] = { - $meta: value.$meta - }; - return; - } - var val = String(value || 1).toLowerCase(); - if (!/^(?:ascending|asc|descending|desc|1|-1)$/.test(val)) { - if (Array.isArray(value)) value = '[' + value + ']'; - throw newRxTypeError('MQ7', { - field, - value - }); - } - // store `sort` in a sane format - var s = opts.sort || (opts.sort = {}); - var valueStr = value.toString().replace('asc', '1').replace('ascending', '1').replace('desc', '-1').replace('descending', '-1'); - s[field] = parseInt(valueStr, 10); -} -function _pushArr(opts, field, value) { - opts.sort = opts.sort || []; - if (!Array.isArray(opts.sort)) { - throw newRxTypeError('MQ8', { - opts, - field, - value - }); - } - - /* const valueStr = value.toString() - .replace('asc', '1') - .replace('ascending', '1') - .replace('desc', '-1') - .replace('descending', '-1');*/ - opts.sort.push([field, value]); -} - -/** - * Determines if `conds` can be merged using `mquery().merge()` - */ -export function canMerge(conds) { - return conds instanceof NoSqlQueryBuilderClass || isObject(conds); -} -export function createQueryBuilder(query, path) { - return new NoSqlQueryBuilderClass(query, path); -} -//# sourceMappingURL=nosql-query-builder.js.map \ No newline at end of file diff --git a/dist/esm/plugins/query-builder/mquery/nosql-query-builder.js.map b/dist/esm/plugins/query-builder/mquery/nosql-query-builder.js.map deleted file mode 100644 index b046eb220c1..00000000000 --- a/dist/esm/plugins/query-builder/mquery/nosql-query-builder.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"nosql-query-builder.js","names":["isObject","merge","newRxTypeError","newRxError","NoSqlQueryBuilderClass","mangoQuery","_path","options","_conditions","_fields","queryBuilder","selector","find","limit","skip","sort","forEach","s","_proto","prototype","where","_val","arguments","length","type","Array","isArray","path","equals","val","_ensurePath","eq","or","array","$or","push","apply","nor","$nor","and","$and","mod","slice","conds","$mod","exists","$exists","elemMatch","_criteria","fn","criteria","$elemMatch","arg","len","i","_pushArr","split","field","ascend","substring","keys","Object","args","source","canMerge","_distinct","method","toJSON","query","mQuerySortToRxDBSort","entries","map","k","v","direction","part","OTHER_MANGO_ATTRIBUTES","OTHER_MANGO_OPERATORS","$conditional","RegExp","$regex","$options","opts","value","$meta","String","toLowerCase","test","valueStr","toString","replace","parseInt","createQueryBuilder"],"sources":["../../../../../src/plugins/query-builder/mquery/nosql-query-builder.ts"],"sourcesContent":["/**\n * this is based on\n * @link https://github.com/aheckmann/mquery/blob/master/lib/mquery.js\n */\nimport {\n isObject,\n merge\n} from './mquery-utils.ts';\nimport {\n newRxTypeError,\n newRxError\n} from '../../../rx-error.ts';\nimport type {\n MangoQuery,\n MangoQuerySelector,\n MangoQuerySortPart,\n MangoQuerySortDirection\n} from '../../../types/index.d.ts';\n\n\ndeclare type MQueryOptions = {\n limit?: number;\n skip?: number;\n sort?: any;\n};\n\nexport class NoSqlQueryBuilderClass {\n\n public options: MQueryOptions = {};\n public _conditions: MangoQuerySelector = {};\n public _fields: any = {};\n private _distinct: any;\n\n /**\n * MQuery constructor used for building queries.\n *\n * ####Example:\n * var query = new MQuery({ name: 'mquery' });\n * query.where('age').gte(21).exec(callback);\n *\n */\n constructor(\n mangoQuery?: MangoQuery,\n public _path?: any\n ) {\n if (mangoQuery) {\n const queryBuilder: NoSqlQueryBuilder = this as any;\n\n if (mangoQuery.selector) {\n queryBuilder.find(mangoQuery.selector);\n }\n if (mangoQuery.limit) {\n queryBuilder.limit(mangoQuery.limit);\n }\n if (mangoQuery.skip) {\n queryBuilder.skip(mangoQuery.skip);\n }\n if (mangoQuery.sort) {\n mangoQuery.sort.forEach(s => queryBuilder.sort(s));\n }\n }\n }\n\n /**\n * Specifies a `path` for use with chaining.\n */\n where(_path: string, _val?: MangoQuerySelector): NoSqlQueryBuilder {\n if (!arguments.length) return this as any;\n const type = typeof arguments[0];\n if ('string' === type) {\n this._path = arguments[0];\n if (2 === arguments.length) {\n (this._conditions as any)[this._path] = arguments[1];\n }\n return this as any;\n }\n\n if ('object' === type && !Array.isArray(arguments[0])) {\n return this.merge(arguments[0]);\n }\n\n throw newRxTypeError('MQ1', {\n path: arguments[0]\n });\n }\n\n /**\n * Specifies the complementary comparison value for paths specified with `where()`\n * ####Example\n * User.where('age').equals(49);\n */\n equals(val: any): NoSqlQueryBuilder {\n this._ensurePath('equals');\n const path = this._path;\n (this._conditions as any)[path] = val;\n return this as any;\n }\n\n /**\n * Specifies the complementary comparison value for paths specified with `where()`\n * This is alias of `equals`\n */\n eq(val: any): NoSqlQueryBuilder {\n this._ensurePath('eq');\n const path = this._path;\n (this._conditions as any)[path] = val;\n return this as any;\n }\n\n /**\n * Specifies arguments for an `$or` condition.\n * ####Example\n * query.or([{ color: 'red' }, { status: 'emergency' }])\n */\n or(array: any[]): NoSqlQueryBuilder {\n const or = this._conditions.$or || (this._conditions.$or = []);\n if (!Array.isArray(array)) array = [array];\n or.push.apply(or, array);\n return this as any;\n }\n\n /**\n * Specifies arguments for a `$nor` condition.\n * ####Example\n * query.nor([{ color: 'green' }, { status: 'ok' }])\n */\n nor(array: any[]): NoSqlQueryBuilder {\n const nor = this._conditions.$nor || (this._conditions.$nor = []);\n if (!Array.isArray(array)) array = [array];\n nor.push.apply(nor, array);\n return this as any;\n }\n\n /**\n * Specifies arguments for a `$and` condition.\n * ####Example\n * query.and([{ color: 'green' }, { status: 'ok' }])\n * @see $and http://docs.mongodb.org/manual/reference/operator/and/\n */\n and(array: any[]): NoSqlQueryBuilder {\n const and = this._conditions.$and || (this._conditions.$and = []);\n if (!Array.isArray(array)) array = [array];\n and.push.apply(and, array);\n return this as any;\n }\n\n /**\n * Specifies a `$mod` condition\n */\n mod(_path: string, _val: number): NoSqlQueryBuilder {\n let val;\n let path;\n\n if (1 === arguments.length) {\n this._ensurePath('mod');\n val = arguments[0];\n path = this._path;\n } else if (2 === arguments.length && !Array.isArray(arguments[1])) {\n this._ensurePath('mod');\n val = (arguments as any).slice();\n path = this._path;\n } else if (3 === arguments.length) {\n val = (arguments as any).slice(1);\n path = arguments[0];\n } else {\n val = arguments[1];\n path = arguments[0];\n }\n\n const conds = (this._conditions as any)[path] || ((this._conditions as any)[path] = {});\n conds.$mod = val;\n return this as any;\n }\n\n /**\n * Specifies an `$exists` condition\n * ####Example\n * // { name: { $exists: true }}\n * Thing.where('name').exists()\n * Thing.where('name').exists(true)\n * Thing.find().exists('name')\n */\n exists(_path: string, _val: number): NoSqlQueryBuilder {\n let path;\n let val;\n if (0 === arguments.length) {\n this._ensurePath('exists');\n path = this._path;\n val = true;\n } else if (1 === arguments.length) {\n if ('boolean' === typeof arguments[0]) {\n this._ensurePath('exists');\n path = this._path;\n val = arguments[0];\n } else {\n path = arguments[0];\n val = true;\n }\n } else if (2 === arguments.length) {\n path = arguments[0];\n val = arguments[1];\n }\n\n const conds = (this._conditions as any)[path] || ((this._conditions as any)[path] = {});\n conds.$exists = val;\n return this as any;\n }\n\n /**\n * Specifies an `$elemMatch` condition\n * ####Example\n * query.elemMatch('comment', { author: 'autobot', votes: {$gte: 5}})\n * query.where('comment').elemMatch({ author: 'autobot', votes: {$gte: 5}})\n * query.elemMatch('comment', function (elem) {\n * elem.where('author').equals('autobot');\n * elem.where('votes').gte(5);\n * })\n * query.where('comment').elemMatch(function (elem) {\n * elem.where({ author: 'autobot' });\n * elem.where('votes').gte(5);\n * })\n */\n elemMatch(_path: string, _criteria: any): NoSqlQueryBuilder {\n if (null === arguments[0])\n throw newRxTypeError('MQ2');\n\n let fn;\n let path;\n let criteria;\n\n if ('function' === typeof arguments[0]) {\n this._ensurePath('elemMatch');\n path = this._path;\n fn = arguments[0];\n } else if (isObject(arguments[0])) {\n this._ensurePath('elemMatch');\n path = this._path;\n criteria = arguments[0];\n } else if ('function' === typeof arguments[1]) {\n path = arguments[0];\n fn = arguments[1];\n } else if (arguments[1] && isObject(arguments[1])) {\n path = arguments[0];\n criteria = arguments[1];\n } else\n throw newRxTypeError('MQ2');\n\n if (fn) {\n criteria = new NoSqlQueryBuilderClass;\n fn(criteria);\n criteria = criteria._conditions;\n }\n\n const conds = (this._conditions as any)[path] || ((this._conditions as any)[path] = {});\n conds.$elemMatch = criteria;\n return this as any;\n }\n\n /**\n * Sets the sort order\n * If an object is passed, values allowed are 'asc', 'desc', 'ascending', 'descending', 1, and -1.\n * If a string is passed, it must be a space delimited list of path names.\n * The sort order of each path is ascending unless the path name is prefixed with `-` which will be treated as descending.\n * ####Example\n * query.sort({ field: 'asc', test: -1 });\n * query.sort('field -test');\n * query.sort([['field', 1], ['test', -1]]);\n */\n sort(arg: any): NoSqlQueryBuilder {\n if (!arg) return this as any;\n let len;\n const type = typeof arg;\n // .sort([['field', 1], ['test', -1]])\n if (Array.isArray(arg)) {\n len = arg.length;\n for (let i = 0; i < arg.length; ++i) {\n _pushArr(this.options, arg[i][0], arg[i][1]);\n }\n\n return this as any;\n }\n\n // .sort('field -test')\n if (1 === arguments.length && 'string' === type) {\n arg = arg.split(/\\s+/);\n len = arg.length;\n for (let i = 0; i < len; ++i) {\n let field = arg[i];\n if (!field) continue;\n const ascend = '-' === field[0] ? -1 : 1;\n if (ascend === -1) field = field.substring(1);\n push(this.options, field, ascend);\n }\n\n return this as any;\n }\n\n // .sort({ field: 1, test: -1 })\n if (isObject(arg)) {\n const keys = Object.keys(arg);\n keys.forEach(field => push(this.options, field, arg[field]));\n return this as any;\n }\n\n throw newRxTypeError('MQ3', {\n args: arguments\n });\n }\n\n /**\n * Merges another MQuery or conditions object into this one.\n *\n * When a MQuery is passed, conditions, field selection and options are merged.\n *\n */\n merge(source: any): NoSqlQueryBuilder {\n if (!source) {\n return this as any;\n }\n\n if (!canMerge(source)) {\n throw newRxTypeError('MQ4', {\n source\n });\n }\n\n if (source instanceof NoSqlQueryBuilderClass) {\n // if source has a feature, apply it to ourselves\n\n if (source._conditions)\n merge(this._conditions, source._conditions);\n\n if (source._fields) {\n if (!this._fields) this._fields = {};\n merge(this._fields, source._fields);\n }\n\n if (source.options) {\n if (!this.options) this.options = {};\n merge(this.options, source.options);\n }\n\n if (source._distinct)\n this._distinct = source._distinct;\n\n return this as any;\n }\n\n // plain object\n merge(this._conditions, source);\n\n return this as any;\n }\n\n /**\n * Finds documents.\n * ####Example\n * query.find()\n * query.find({ name: 'Burning Lights' })\n */\n find(criteria: any): NoSqlQueryBuilder {\n if (canMerge(criteria)) {\n this.merge(criteria);\n }\n\n return this as any;\n }\n\n /**\n * Make sure _path is set.\n *\n * @param {String} method\n */\n _ensurePath(method: any) {\n if (!this._path) {\n throw newRxError('MQ5', {\n method\n });\n }\n }\n\n toJSON(): {\n query: MangoQuery;\n path?: string;\n } {\n const query: MangoQuery = {\n selector: this._conditions,\n };\n\n if (this.options.skip) {\n query.skip = this.options.skip;\n }\n if (this.options.limit) {\n query.limit = this.options.limit;\n }\n if (this.options.sort) {\n query.sort = mQuerySortToRxDBSort(this.options.sort);\n }\n\n return {\n query,\n path: this._path\n };\n }\n}\n\nexport function mQuerySortToRxDBSort(\n sort: { [k: string]: 1 | -1; }\n): MangoQuerySortPart[] {\n return Object.entries(sort).map(([k, v]) => {\n const direction: MangoQuerySortDirection = v === 1 ? 'asc' : 'desc';\n const part: MangoQuerySortPart = { [k]: direction } as any;\n return part;\n });\n}\n\n/**\n * Because some prototype-methods are generated,\n * we have to define the type of NoSqlQueryBuilder here\n */\n\nexport interface NoSqlQueryBuilder extends NoSqlQueryBuilderClass {\n maxScan: ReturnSelfNumberFunction;\n batchSize: ReturnSelfNumberFunction;\n limit: ReturnSelfNumberFunction;\n skip: ReturnSelfNumberFunction;\n comment: ReturnSelfFunction;\n\n gt: ReturnSelfFunction;\n gte: ReturnSelfFunction;\n lt: ReturnSelfFunction;\n lte: ReturnSelfFunction;\n ne: ReturnSelfFunction;\n in: ReturnSelfFunction;\n nin: ReturnSelfFunction;\n all: ReturnSelfFunction;\n regex: ReturnSelfFunction;\n size: ReturnSelfFunction;\n\n}\n\ndeclare type ReturnSelfFunction = (v: any) => NoSqlQueryBuilder;\ndeclare type ReturnSelfNumberFunction = (v: number | null) => NoSqlQueryBuilder;\n\n/**\n * limit, skip, maxScan, batchSize, comment\n *\n * Sets these associated options.\n *\n * query.comment('feed query');\n */\nexport const OTHER_MANGO_ATTRIBUTES = ['limit', 'skip', 'maxScan', 'batchSize', 'comment'];\nOTHER_MANGO_ATTRIBUTES.forEach(function (method) {\n (NoSqlQueryBuilderClass.prototype as any)[method] = function (v: any) {\n this.options[method] = v;\n return this;\n };\n});\n\n\n/**\n * gt, gte, lt, lte, ne, in, nin, all, regex, size, maxDistance\n *\n * Thing.where('type').nin(array)\n */\nexport const OTHER_MANGO_OPERATORS = [\n 'gt', 'gte', 'lt', 'lte', 'ne',\n 'in', 'nin', 'all', 'regex', 'size'\n];\nOTHER_MANGO_OPERATORS.forEach(function ($conditional) {\n (NoSqlQueryBuilderClass.prototype as any)[$conditional] = function () {\n let path;\n let val;\n\n if (1 === arguments.length) {\n this._ensurePath($conditional);\n val = arguments[0];\n path = this._path;\n } else {\n val = arguments[1];\n path = arguments[0];\n }\n\n const conds = this._conditions[path] === null || typeof this._conditions[path] === 'object' ?\n this._conditions[path] :\n (this._conditions[path] = {});\n\n\n\n if ($conditional === 'regex') {\n if (val instanceof RegExp) {\n throw newRxError('QU16', {\n field: path,\n query: this._conditions,\n });\n }\n if (typeof val === 'string') {\n conds['$' + $conditional] = val;\n } else {\n conds['$' + $conditional] = val.$regex;\n if (val.$options) {\n conds.$options = val.$options;\n }\n }\n } else {\n conds['$' + $conditional] = val;\n }\n\n return this;\n };\n});\n\n\nfunction push(opts: any, field: string, value: any) {\n if (Array.isArray(opts.sort)) {\n throw newRxTypeError('MQ6', {\n opts,\n field,\n value\n });\n }\n\n if (value && value.$meta) {\n const sort = opts.sort || (opts.sort = {});\n sort[field] = {\n $meta: value.$meta\n };\n return;\n }\n\n const val = String(value || 1).toLowerCase();\n if (!/^(?:ascending|asc|descending|desc|1|-1)$/.test(val)) {\n if (Array.isArray(value)) value = '[' + value + ']';\n throw newRxTypeError('MQ7', {\n field,\n value\n });\n }\n // store `sort` in a sane format\n const s = opts.sort || (opts.sort = {});\n const valueStr = value.toString()\n .replace('asc', '1')\n .replace('ascending', '1')\n .replace('desc', '-1')\n .replace('descending', '-1');\n s[field] = parseInt(valueStr, 10);\n}\n\nfunction _pushArr(opts: any, field: string, value: any) {\n opts.sort = opts.sort || [];\n if (!Array.isArray(opts.sort)) {\n throw newRxTypeError('MQ8', {\n opts,\n field,\n value\n });\n }\n\n /* const valueStr = value.toString()\n .replace('asc', '1')\n .replace('ascending', '1')\n .replace('desc', '-1')\n .replace('descending', '-1');*/\n opts.sort.push([field, value]);\n}\n\n\n/**\n * Determines if `conds` can be merged using `mquery().merge()`\n */\nexport function canMerge(conds: any): boolean {\n return conds instanceof NoSqlQueryBuilderClass || isObject(conds);\n}\n\n\nexport function createQueryBuilder(query?: MangoQuery, path?: any): NoSqlQueryBuilder {\n return new NoSqlQueryBuilderClass(query, path) as NoSqlQueryBuilder;\n}\n"],"mappings":"AAAA;AACA;AACA;AACA;AACA,SACIA,QAAQ,EACRC,KAAK,IAALA,MAAK,QACF,mBAAmB;AAC1B,SACIC,cAAc,EACdC,UAAU,QACP,sBAAsB;AAe7B,WAAaC,sBAAsB;EAO/B;AACJ;AACA;AACA;AACA;AACA;AACA;AACA;EACI,SAAAA,uBACIC,UAAgC,EACzBC,KAAW,EACpB;IAAA,KAhBKC,OAAO,GAAkB,CAAC,CAAC;IAAA,KAC3BC,WAAW,GAAgC,CAAC,CAAC;IAAA,KAC7CC,OAAO,GAAQ,CAAC,CAAC;IAAA,KAabH,KAAW,GAAXA,KAAW;IAElB,IAAID,UAAU,EAAE;MACZ,IAAMK,YAAwC,GAAG,IAAW;MAE5D,IAAIL,UAAU,CAACM,QAAQ,EAAE;QACrBD,YAAY,CAACE,IAAI,CAACP,UAAU,CAACM,QAAQ,CAAC;MAC1C;MACA,IAAIN,UAAU,CAACQ,KAAK,EAAE;QAClBH,YAAY,CAACG,KAAK,CAACR,UAAU,CAACQ,KAAK,CAAC;MACxC;MACA,IAAIR,UAAU,CAACS,IAAI,EAAE;QACjBJ,YAAY,CAACI,IAAI,CAACT,UAAU,CAACS,IAAI,CAAC;MACtC;MACA,IAAIT,UAAU,CAACU,IAAI,EAAE;QACjBV,UAAU,CAACU,IAAI,CAACC,OAAO,CAACC,CAAC,IAAIP,YAAY,CAACK,IAAI,CAACE,CAAC,CAAC,CAAC;MACtD;IACJ;EACJ;;EAEA;AACJ;AACA;EAFI,IAAAC,MAAA,GAAAd,sBAAA,CAAAe,SAAA;EAAAD,MAAA,CAGAE,KAAK,GAAL,SAAAA,MAAMd,KAAa,EAAEe,IAAkC,EAA8B;IACjF,IAAI,CAACC,SAAS,CAACC,MAAM,EAAE,OAAO,IAAI;IAClC,IAAMC,IAAI,GAAG,OAAOF,SAAS,CAAC,CAAC,CAAC;IAChC,IAAI,QAAQ,KAAKE,IAAI,EAAE;MACnB,IAAI,CAAClB,KAAK,GAAGgB,SAAS,CAAC,CAAC,CAAC;MACzB,IAAI,CAAC,KAAKA,SAAS,CAACC,MAAM,EAAE;QACvB,IAAI,CAACf,WAAW,CAAS,IAAI,CAACF,KAAK,CAAC,GAAGgB,SAAS,CAAC,CAAC,CAAC;MACxD;MACA,OAAO,IAAI;IACf;IAEA,IAAI,QAAQ,KAAKE,IAAI,IAAI,CAACC,KAAK,CAACC,OAAO,CAACJ,SAAS,CAAC,CAAC,CAAC,CAAC,EAAE;MACnD,OAAO,IAAI,CAACrB,KAAK,CAACqB,SAAS,CAAC,CAAC,CAAC,CAAC;IACnC;IAEA,MAAMpB,cAAc,CAAC,KAAK,EAAE;MACxByB,IAAI,EAAEL,SAAS,CAAC,CAAC;IACrB,CAAC,CAAC;EACN;;EAEA;AACJ;AACA;AACA;AACA,KAJI;EAAAJ,MAAA,CAKAU,MAAM,GAAN,SAAAA,OAAOC,GAAQ,EAA8B;IACzC,IAAI,CAACC,WAAW,CAAC,QAAQ,CAAC;IAC1B,IAAMH,IAAI,GAAG,IAAI,CAACrB,KAAK;IACtB,IAAI,CAACE,WAAW,CAASmB,IAAI,CAAC,GAAGE,GAAG;IACrC,OAAO,IAAI;EACf;;EAEA;AACJ;AACA;AACA,KAHI;EAAAX,MAAA,CAIAa,EAAE,GAAF,SAAAA,GAAGF,GAAQ,EAA8B;IACrC,IAAI,CAACC,WAAW,CAAC,IAAI,CAAC;IACtB,IAAMH,IAAI,GAAG,IAAI,CAACrB,KAAK;IACtB,IAAI,CAACE,WAAW,CAASmB,IAAI,CAAC,GAAGE,GAAG;IACrC,OAAO,IAAI;EACf;;EAEA;AACJ;AACA;AACA;AACA,KAJI;EAAAX,MAAA,CAKAc,EAAE,GAAF,SAAAA,GAAGC,KAAY,EAA8B;IACzC,IAAMD,EAAE,GAAG,IAAI,CAACxB,WAAW,CAAC0B,GAAG,KAAK,IAAI,CAAC1B,WAAW,CAAC0B,GAAG,GAAG,EAAE,CAAC;IAC9D,IAAI,CAACT,KAAK,CAACC,OAAO,CAACO,KAAK,CAAC,EAAEA,KAAK,GAAG,CAACA,KAAK,CAAC;IAC1CD,EAAE,CAACG,IAAI,CAACC,KAAK,CAACJ,EAAE,EAAEC,KAAK,CAAC;IACxB,OAAO,IAAI;EACf;;EAEA;AACJ;AACA;AACA;AACA,KAJI;EAAAf,MAAA,CAKAmB,GAAG,GAAH,SAAAA,IAAIJ,KAAY,EAA8B;IAC1C,IAAMI,GAAG,GAAG,IAAI,CAAC7B,WAAW,CAAC8B,IAAI,KAAK,IAAI,CAAC9B,WAAW,CAAC8B,IAAI,GAAG,EAAE,CAAC;IACjE,IAAI,CAACb,KAAK,CAACC,OAAO,CAACO,KAAK,CAAC,EAAEA,KAAK,GAAG,CAACA,KAAK,CAAC;IAC1CI,GAAG,CAACF,IAAI,CAACC,KAAK,CAACC,GAAG,EAAEJ,KAAK,CAAC;IAC1B,OAAO,IAAI;EACf;;EAEA;AACJ;AACA;AACA;AACA;AACA,KALI;EAAAf,MAAA,CAMAqB,GAAG,GAAH,SAAAA,IAAIN,KAAY,EAA8B;IAC1C,IAAMM,GAAG,GAAG,IAAI,CAAC/B,WAAW,CAACgC,IAAI,KAAK,IAAI,CAAChC,WAAW,CAACgC,IAAI,GAAG,EAAE,CAAC;IACjE,IAAI,CAACf,KAAK,CAACC,OAAO,CAACO,KAAK,CAAC,EAAEA,KAAK,GAAG,CAACA,KAAK,CAAC;IAC1CM,GAAG,CAACJ,IAAI,CAACC,KAAK,CAACG,GAAG,EAAEN,KAAK,CAAC;IAC1B,OAAO,IAAI;EACf;;EAEA;AACJ;AACA,KAFI;EAAAf,MAAA,CAGAuB,GAAG,GAAH,SAAAA,IAAInC,KAAa,EAAEe,IAAY,EAA8B;IACzD,IAAIQ,GAAG;IACP,IAAIF,IAAI;IAER,IAAI,CAAC,KAAKL,SAAS,CAACC,MAAM,EAAE;MACxB,IAAI,CAACO,WAAW,CAAC,KAAK,CAAC;MACvBD,GAAG,GAAGP,SAAS,CAAC,CAAC,CAAC;MAClBK,IAAI,GAAG,IAAI,CAACrB,KAAK;IACrB,CAAC,MAAM,IAAI,CAAC,KAAKgB,SAAS,CAACC,MAAM,IAAI,CAACE,KAAK,CAACC,OAAO,CAACJ,SAAS,CAAC,CAAC,CAAC,CAAC,EAAE;MAC/D,IAAI,CAACQ,WAAW,CAAC,KAAK,CAAC;MACvBD,GAAG,GAAIP,SAAS,CAASoB,KAAK,CAAC,CAAC;MAChCf,IAAI,GAAG,IAAI,CAACrB,KAAK;IACrB,CAAC,MAAM,IAAI,CAAC,KAAKgB,SAAS,CAACC,MAAM,EAAE;MAC/BM,GAAG,GAAIP,SAAS,CAASoB,KAAK,CAAC,CAAC,CAAC;MACjCf,IAAI,GAAGL,SAAS,CAAC,CAAC,CAAC;IACvB,CAAC,MAAM;MACHO,GAAG,GAAGP,SAAS,CAAC,CAAC,CAAC;MAClBK,IAAI,GAAGL,SAAS,CAAC,CAAC,CAAC;IACvB;IAEA,IAAMqB,KAAK,GAAI,IAAI,CAACnC,WAAW,CAASmB,IAAI,CAAC,KAAM,IAAI,CAACnB,WAAW,CAASmB,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC;IACvFgB,KAAK,CAACC,IAAI,GAAGf,GAAG;IAChB,OAAO,IAAI;EACf;;EAEA;AACJ;AACA;AACA;AACA;AACA;AACA;AACA,KAPI;EAAAX,MAAA,CAQA2B,MAAM,GAAN,SAAAA,OAAOvC,KAAa,EAAEe,IAAY,EAA8B;IAC5D,IAAIM,IAAI;IACR,IAAIE,GAAG;IACP,IAAI,CAAC,KAAKP,SAAS,CAACC,MAAM,EAAE;MACxB,IAAI,CAACO,WAAW,CAAC,QAAQ,CAAC;MAC1BH,IAAI,GAAG,IAAI,CAACrB,KAAK;MACjBuB,GAAG,GAAG,IAAI;IACd,CAAC,MAAM,IAAI,CAAC,KAAKP,SAAS,CAACC,MAAM,EAAE;MAC/B,IAAI,SAAS,KAAK,OAAOD,SAAS,CAAC,CAAC,CAAC,EAAE;QACnC,IAAI,CAACQ,WAAW,CAAC,QAAQ,CAAC;QAC1BH,IAAI,GAAG,IAAI,CAACrB,KAAK;QACjBuB,GAAG,GAAGP,SAAS,CAAC,CAAC,CAAC;MACtB,CAAC,MAAM;QACHK,IAAI,GAAGL,SAAS,CAAC,CAAC,CAAC;QACnBO,GAAG,GAAG,IAAI;MACd;IACJ,CAAC,MAAM,IAAI,CAAC,KAAKP,SAAS,CAACC,MAAM,EAAE;MAC/BI,IAAI,GAAGL,SAAS,CAAC,CAAC,CAAC;MACnBO,GAAG,GAAGP,SAAS,CAAC,CAAC,CAAC;IACtB;IAEA,IAAMqB,KAAK,GAAI,IAAI,CAACnC,WAAW,CAASmB,IAAI,CAAC,KAAM,IAAI,CAACnB,WAAW,CAASmB,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC;IACvFgB,KAAK,CAACG,OAAO,GAAGjB,GAAG;IACnB,OAAO,IAAI;EACf;;EAEA;AACJ;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,KAbI;EAAAX,MAAA,CAcA6B,SAAS,GAAT,SAAAA,UAAUzC,KAAa,EAAE0C,SAAc,EAA8B;IACjE,IAAI,IAAI,KAAK1B,SAAS,CAAC,CAAC,CAAC,EACrB,MAAMpB,cAAc,CAAC,KAAK,CAAC;IAE/B,IAAI+C,EAAE;IACN,IAAItB,IAAI;IACR,IAAIuB,QAAQ;IAEZ,IAAI,UAAU,KAAK,OAAO5B,SAAS,CAAC,CAAC,CAAC,EAAE;MACpC,IAAI,CAACQ,WAAW,CAAC,WAAW,CAAC;MAC7BH,IAAI,GAAG,IAAI,CAACrB,KAAK;MACjB2C,EAAE,GAAG3B,SAAS,CAAC,CAAC,CAAC;IACrB,CAAC,MAAM,IAAItB,QAAQ,CAACsB,SAAS,CAAC,CAAC,CAAC,CAAC,EAAE;MAC/B,IAAI,CAACQ,WAAW,CAAC,WAAW,CAAC;MAC7BH,IAAI,GAAG,IAAI,CAACrB,KAAK;MACjB4C,QAAQ,GAAG5B,SAAS,CAAC,CAAC,CAAC;IAC3B,CAAC,MAAM,IAAI,UAAU,KAAK,OAAOA,SAAS,CAAC,CAAC,CAAC,EAAE;MAC3CK,IAAI,GAAGL,SAAS,CAAC,CAAC,CAAC;MACnB2B,EAAE,GAAG3B,SAAS,CAAC,CAAC,CAAC;IACrB,CAAC,MAAM,IAAIA,SAAS,CAAC,CAAC,CAAC,IAAItB,QAAQ,CAACsB,SAAS,CAAC,CAAC,CAAC,CAAC,EAAE;MAC/CK,IAAI,GAAGL,SAAS,CAAC,CAAC,CAAC;MACnB4B,QAAQ,GAAG5B,SAAS,CAAC,CAAC,CAAC;IAC3B,CAAC,MACG,MAAMpB,cAAc,CAAC,KAAK,CAAC;IAE/B,IAAI+C,EAAE,EAAE;MACJC,QAAQ,GAAG,IAAI9C,sBAAsB,CAAD,CAAC;MACrC6C,EAAE,CAACC,QAAQ,CAAC;MACZA,QAAQ,GAAGA,QAAQ,CAAC1C,WAAW;IACnC;IAEA,IAAMmC,KAAK,GAAI,IAAI,CAACnC,WAAW,CAASmB,IAAI,CAAC,KAAM,IAAI,CAACnB,WAAW,CAASmB,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC;IACvFgB,KAAK,CAACQ,UAAU,GAAGD,QAAQ;IAC3B,OAAO,IAAI;EACf;;EAEA;AACJ;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,KATI;EAAAhC,MAAA,CAUAH,IAAI,GAAJ,SAAAA,KAAKqC,GAAQ,EAA8B;IACvC,IAAI,CAACA,GAAG,EAAE,OAAO,IAAI;IACrB,IAAIC,GAAG;IACP,IAAM7B,IAAI,GAAG,OAAO4B,GAAG;IACvB;IACA,IAAI3B,KAAK,CAACC,OAAO,CAAC0B,GAAG,CAAC,EAAE;MACpBC,GAAG,GAAGD,GAAG,CAAC7B,MAAM;MAChB,KAAK,IAAI+B,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGF,GAAG,CAAC7B,MAAM,EAAE,EAAE+B,CAAC,EAAE;QACjCC,QAAQ,CAAC,IAAI,CAAChD,OAAO,EAAE6C,GAAG,CAACE,CAAC,CAAC,CAAC,CAAC,CAAC,EAAEF,GAAG,CAACE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;MAChD;MAEA,OAAO,IAAI;IACf;;IAEA;IACA,IAAI,CAAC,KAAKhC,SAAS,CAACC,MAAM,IAAI,QAAQ,KAAKC,IAAI,EAAE;MAC7C4B,GAAG,GAAGA,GAAG,CAACI,KAAK,CAAC,KAAK,CAAC;MACtBH,GAAG,GAAGD,GAAG,CAAC7B,MAAM;MAChB,KAAK,IAAI+B,EAAC,GAAG,CAAC,EAAEA,EAAC,GAAGD,GAAG,EAAE,EAAEC,EAAC,EAAE;QAC1B,IAAIG,KAAK,GAAGL,GAAG,CAACE,EAAC,CAAC;QAClB,IAAI,CAACG,KAAK,EAAE;QACZ,IAAMC,MAAM,GAAG,GAAG,KAAKD,KAAK,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC;QACxC,IAAIC,MAAM,KAAK,CAAC,CAAC,EAAED,KAAK,GAAGA,KAAK,CAACE,SAAS,CAAC,CAAC,CAAC;QAC7CxB,IAAI,CAAC,IAAI,CAAC5B,OAAO,EAAEkD,KAAK,EAAEC,MAAM,CAAC;MACrC;MAEA,OAAO,IAAI;IACf;;IAEA;IACA,IAAI1D,QAAQ,CAACoD,GAAG,CAAC,EAAE;MACf,IAAMQ,IAAI,GAAGC,MAAM,CAACD,IAAI,CAACR,GAAG,CAAC;MAC7BQ,IAAI,CAAC5C,OAAO,CAACyC,KAAK,IAAItB,IAAI,CAAC,IAAI,CAAC5B,OAAO,EAAEkD,KAAK,EAAEL,GAAG,CAACK,KAAK,CAAC,CAAC,CAAC;MAC5D,OAAO,IAAI;IACf;IAEA,MAAMvD,cAAc,CAAC,KAAK,EAAE;MACxB4D,IAAI,EAAExC;IACV,CAAC,CAAC;EACN;;EAEA;AACJ;AACA;AACA;AACA;AACA,KALI;EAAAJ,MAAA,CAMAjB,KAAK,GAAL,SAAAA,MAAM8D,MAAW,EAA8B;IAC3C,IAAI,CAACA,MAAM,EAAE;MACT,OAAO,IAAI;IACf;IAEA,IAAI,CAACC,QAAQ,CAACD,MAAM,CAAC,EAAE;MACnB,MAAM7D,cAAc,CAAC,KAAK,EAAE;QACxB6D;MACJ,CAAC,CAAC;IACN;IAEA,IAAIA,MAAM,YAAY3D,sBAAsB,EAAE;MAC1C;;MAEA,IAAI2D,MAAM,CAACvD,WAAW,EAClBP,MAAK,CAAC,IAAI,CAACO,WAAW,EAAEuD,MAAM,CAACvD,WAAW,CAAC;MAE/C,IAAIuD,MAAM,CAACtD,OAAO,EAAE;QAChB,IAAI,CAAC,IAAI,CAACA,OAAO,EAAE,IAAI,CAACA,OAAO,GAAG,CAAC,CAAC;QACpCR,MAAK,CAAC,IAAI,CAACQ,OAAO,EAAEsD,MAAM,CAACtD,OAAO,CAAC;MACvC;MAEA,IAAIsD,MAAM,CAACxD,OAAO,EAAE;QAChB,IAAI,CAAC,IAAI,CAACA,OAAO,EAAE,IAAI,CAACA,OAAO,GAAG,CAAC,CAAC;QACpCN,MAAK,CAAC,IAAI,CAACM,OAAO,EAAEwD,MAAM,CAACxD,OAAO,CAAC;MACvC;MAEA,IAAIwD,MAAM,CAACE,SAAS,EAChB,IAAI,CAACA,SAAS,GAAGF,MAAM,CAACE,SAAS;MAErC,OAAO,IAAI;IACf;;IAEA;IACAhE,MAAK,CAAC,IAAI,CAACO,WAAW,EAAEuD,MAAM,CAAC;IAE/B,OAAO,IAAI;EACf;;EAEA;AACJ;AACA;AACA;AACA;AACA,KALI;EAAA7C,MAAA,CAMAN,IAAI,GAAJ,SAAAA,KAAKsC,QAAa,EAA8B;IAC5C,IAAIc,QAAQ,CAACd,QAAQ,CAAC,EAAE;MACpB,IAAI,CAACjD,KAAK,CAACiD,QAAQ,CAAC;IACxB;IAEA,OAAO,IAAI;EACf;;EAEA;AACJ;AACA;AACA;AACA,KAJI;EAAAhC,MAAA,CAKAY,WAAW,GAAX,SAAAA,YAAYoC,MAAW,EAAE;IACrB,IAAI,CAAC,IAAI,CAAC5D,KAAK,EAAE;MACb,MAAMH,UAAU,CAAC,KAAK,EAAE;QACpB+D;MACJ,CAAC,CAAC;IACN;EACJ,CAAC;EAAAhD,MAAA,CAEDiD,MAAM,GAAN,SAAAA,OAAA,EAGE;IACE,IAAMC,KAA0B,GAAG;MAC/BzD,QAAQ,EAAE,IAAI,CAACH;IACnB,CAAC;IAED,IAAI,IAAI,CAACD,OAAO,CAACO,IAAI,EAAE;MACnBsD,KAAK,CAACtD,IAAI,GAAG,IAAI,CAACP,OAAO,CAACO,IAAI;IAClC;IACA,IAAI,IAAI,CAACP,OAAO,CAACM,KAAK,EAAE;MACpBuD,KAAK,CAACvD,KAAK,GAAG,IAAI,CAACN,OAAO,CAACM,KAAK;IACpC;IACA,IAAI,IAAI,CAACN,OAAO,CAACQ,IAAI,EAAE;MACnBqD,KAAK,CAACrD,IAAI,GAAGsD,oBAAoB,CAAC,IAAI,CAAC9D,OAAO,CAACQ,IAAI,CAAC;IACxD;IAEA,OAAO;MACHqD,KAAK;MACLzC,IAAI,EAAE,IAAI,CAACrB;IACf,CAAC;EACL,CAAC;EAAA,OAAAF,sBAAA;AAAA;AAGL,OAAO,SAASiE,oBAAoBA,CAChCtD,IAA8B,EACD;EAC7B,OAAO8C,MAAM,CAACS,OAAO,CAACvD,IAAI,CAAC,CAACwD,GAAG,CAAC,CAAC,CAACC,CAAC,EAAEC,CAAC,CAAC,KAAK;IACxC,IAAMC,SAAkC,GAAGD,CAAC,KAAK,CAAC,GAAG,KAAK,GAAG,MAAM;IACnE,IAAME,IAAiC,GAAG;MAAE,CAACH,CAAC,GAAGE;IAAU,CAAQ;IACnE,OAAOC,IAAI;EACf,CAAC,CAAC;AACN;;AAEA;AACA;AACA;AACA;;AAyBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,IAAMC,sBAAsB,GAAG,CAAC,OAAO,EAAE,MAAM,EAAE,SAAS,EAAE,WAAW,EAAE,SAAS,CAAC;AAC1FA,sBAAsB,CAAC5D,OAAO,CAAC,UAAUkD,MAAM,EAAE;EAC5C9D,sBAAsB,CAACe,SAAS,CAAS+C,MAAM,CAAC,GAAG,UAAUO,CAAM,EAAE;IAClE,IAAI,CAAClE,OAAO,CAAC2D,MAAM,CAAC,GAAGO,CAAC;IACxB,OAAO,IAAI;EACf,CAAC;AACL,CAAC,CAAC;;AAGF;AACA;AACA;AACA;AACA;AACA,OAAO,IAAMI,qBAAqB,GAAG,CACjC,IAAI,EAAE,KAAK,EAAE,IAAI,EAAE,KAAK,EAAE,IAAI,EAC9B,IAAI,EAAE,KAAK,EAAE,KAAK,EAAE,OAAO,EAAE,MAAM,CACtC;AACDA,qBAAqB,CAAC7D,OAAO,CAAC,UAAU8D,YAAY,EAAE;EACjD1E,sBAAsB,CAACe,SAAS,CAAS2D,YAAY,CAAC,GAAG,YAAY;IAClE,IAAInD,IAAI;IACR,IAAIE,GAAG;IAEP,IAAI,CAAC,KAAKP,SAAS,CAACC,MAAM,EAAE;MACxB,IAAI,CAACO,WAAW,CAACgD,YAAY,CAAC;MAC9BjD,GAAG,GAAGP,SAAS,CAAC,CAAC,CAAC;MAClBK,IAAI,GAAG,IAAI,CAACrB,KAAK;IACrB,CAAC,MAAM;MACHuB,GAAG,GAAGP,SAAS,CAAC,CAAC,CAAC;MAClBK,IAAI,GAAGL,SAAS,CAAC,CAAC,CAAC;IACvB;IAEA,IAAMqB,KAAK,GAAG,IAAI,CAACnC,WAAW,CAACmB,IAAI,CAAC,KAAK,IAAI,IAAI,OAAO,IAAI,CAACnB,WAAW,CAACmB,IAAI,CAAC,KAAK,QAAQ,GACvF,IAAI,CAACnB,WAAW,CAACmB,IAAI,CAAC,GACrB,IAAI,CAACnB,WAAW,CAACmB,IAAI,CAAC,GAAG,CAAC,CAAE;IAIjC,IAAImD,YAAY,KAAK,OAAO,EAAE;MAC1B,IAAIjD,GAAG,YAAYkD,MAAM,EAAE;QACvB,MAAM5E,UAAU,CAAC,MAAM,EAAE;UACrBsD,KAAK,EAAE9B,IAAI;UACXyC,KAAK,EAAE,IAAI,CAAC5D;QAChB,CAAC,CAAC;MACN;MACA,IAAI,OAAOqB,GAAG,KAAK,QAAQ,EAAE;QACzBc,KAAK,CAAC,GAAG,GAAGmC,YAAY,CAAC,GAAGjD,GAAG;MACnC,CAAC,MAAM;QACHc,KAAK,CAAC,GAAG,GAAGmC,YAAY,CAAC,GAAGjD,GAAG,CAACmD,MAAM;QACtC,IAAInD,GAAG,CAACoD,QAAQ,EAAE;UACdtC,KAAK,CAACsC,QAAQ,GAAGpD,GAAG,CAACoD,QAAQ;QACjC;MACJ;IACJ,CAAC,MAAM;MACHtC,KAAK,CAAC,GAAG,GAAGmC,YAAY,CAAC,GAAGjD,GAAG;IACnC;IAEA,OAAO,IAAI;EACf,CAAC;AACL,CAAC,CAAC;AAGF,SAASM,IAAIA,CAAC+C,IAAS,EAAEzB,KAAa,EAAE0B,KAAU,EAAE;EAChD,IAAI1D,KAAK,CAACC,OAAO,CAACwD,IAAI,CAACnE,IAAI,CAAC,EAAE;IAC1B,MAAMb,cAAc,CAAC,KAAK,EAAE;MACxBgF,IAAI;MACJzB,KAAK;MACL0B;IACJ,CAAC,CAAC;EACN;EAEA,IAAIA,KAAK,IAAIA,KAAK,CAACC,KAAK,EAAE;IACtB,IAAMrE,IAAI,GAAGmE,IAAI,CAACnE,IAAI,KAAKmE,IAAI,CAACnE,IAAI,GAAG,CAAC,CAAC,CAAC;IAC1CA,IAAI,CAAC0C,KAAK,CAAC,GAAG;MACV2B,KAAK,EAAED,KAAK,CAACC;IACjB,CAAC;IACD;EACJ;EAEA,IAAMvD,GAAG,GAAGwD,MAAM,CAACF,KAAK,IAAI,CAAC,CAAC,CAACG,WAAW,CAAC,CAAC;EAC5C,IAAI,CAAC,0CAA0C,CAACC,IAAI,CAAC1D,GAAG,CAAC,EAAE;IACvD,IAAIJ,KAAK,CAACC,OAAO,CAACyD,KAAK,CAAC,EAAEA,KAAK,GAAG,GAAG,GAAGA,KAAK,GAAG,GAAG;IACnD,MAAMjF,cAAc,CAAC,KAAK,EAAE;MACxBuD,KAAK;MACL0B;IACJ,CAAC,CAAC;EACN;EACA;EACA,IAAMlE,CAAC,GAAGiE,IAAI,CAACnE,IAAI,KAAKmE,IAAI,CAACnE,IAAI,GAAG,CAAC,CAAC,CAAC;EACvC,IAAMyE,QAAQ,GAAGL,KAAK,CAACM,QAAQ,CAAC,CAAC,CAC5BC,OAAO,CAAC,KAAK,EAAE,GAAG,CAAC,CACnBA,OAAO,CAAC,WAAW,EAAE,GAAG,CAAC,CACzBA,OAAO,CAAC,MAAM,EAAE,IAAI,CAAC,CACrBA,OAAO,CAAC,YAAY,EAAE,IAAI,CAAC;EAChCzE,CAAC,CAACwC,KAAK,CAAC,GAAGkC,QAAQ,CAACH,QAAQ,EAAE,EAAE,CAAC;AACrC;AAEA,SAASjC,QAAQA,CAAC2B,IAAS,EAAEzB,KAAa,EAAE0B,KAAU,EAAE;EACpDD,IAAI,CAACnE,IAAI,GAAGmE,IAAI,CAACnE,IAAI,IAAI,EAAE;EAC3B,IAAI,CAACU,KAAK,CAACC,OAAO,CAACwD,IAAI,CAACnE,IAAI,CAAC,EAAE;IAC3B,MAAMb,cAAc,CAAC,KAAK,EAAE;MACxBgF,IAAI;MACJzB,KAAK;MACL0B;IACJ,CAAC,CAAC;EACN;;EAEA;AACJ;AACA;AACA;AACA;EACID,IAAI,CAACnE,IAAI,CAACoB,IAAI,CAAC,CAACsB,KAAK,EAAE0B,KAAK,CAAC,CAAC;AAClC;;AAGA;AACA;AACA;AACA,OAAO,SAASnB,QAAQA,CAACrB,KAAU,EAAW;EAC1C,OAAOA,KAAK,YAAYvC,sBAAsB,IAAIJ,QAAQ,CAAC2C,KAAK,CAAC;AACrE;AAGA,OAAO,SAASiD,kBAAkBA,CAAUxB,KAA2B,EAAEzC,IAAU,EAA8B;EAC7G,OAAO,IAAIvB,sBAAsB,CAACgE,KAAK,EAAEzC,IAAI,CAAC;AAClD","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/replication-couchdb/couchdb-helper.js b/dist/esm/plugins/replication-couchdb/couchdb-helper.js deleted file mode 100644 index 241ddd5f87c..00000000000 --- a/dist/esm/plugins/replication-couchdb/couchdb-helper.js +++ /dev/null @@ -1,65 +0,0 @@ -import { b64EncodeUnicode, flatClone } from "../../plugins/utils/index.js"; -export var COUCHDB_NEW_REPLICATION_PLUGIN_IDENTITY_PREFIX = 'couchdb'; -export function mergeUrlQueryParams(params) { - return Object.entries(params).filter(([_k, value]) => typeof value !== 'undefined').map(([key, value]) => key + '=' + value).join('&'); -} -export function couchDBDocToRxDocData(primaryPath, couchDocData) { - var doc = couchSwapIdToPrimary(primaryPath, couchDocData); - - // ensure deleted flag is set. - doc._deleted = !!doc._deleted; - delete doc._rev; - return doc; -} -export function couchSwapIdToPrimary(primaryKey, docData) { - if (primaryKey === '_id' || docData[primaryKey]) { - return flatClone(docData); - } - docData = flatClone(docData); - docData[primaryKey] = docData._id; - delete docData._id; - return docData; -} - -/** - * Swaps the primaryKey of the document - * to the _id property. - */ -export function couchSwapPrimaryToId(primaryKey, docData) { - // optimisation shortcut - if (primaryKey === '_id') { - return docData; - } - var idValue = docData[primaryKey]; - var ret = flatClone(docData); - delete ret[primaryKey]; - ret._id = idValue; - return ret; -} -export function getDefaultFetch() { - if (typeof window === 'object' && window['fetch']) { - /** - * @link https://stackoverflow.com/a/47180009/3443137 - */ - return window.fetch.bind(window); - } else { - return fetch; - } -} - -/** - * Returns a fetch handler that contains the username and password - * in the Authorization header - */ -export function getFetchWithCouchDBAuthorization(username, password) { - var ret = (url, options) => { - options = Object.assign({}, options); - if (!options.headers) { - options.headers = {}; - } - options.headers['Authorization'] = 'Basic ' + b64EncodeUnicode(username + ':' + password); - return fetch(url, options); - }; - return ret; -} -//# sourceMappingURL=couchdb-helper.js.map \ No newline at end of file diff --git a/dist/esm/plugins/replication-couchdb/couchdb-helper.js.map b/dist/esm/plugins/replication-couchdb/couchdb-helper.js.map deleted file mode 100644 index f1323dc55b8..00000000000 --- a/dist/esm/plugins/replication-couchdb/couchdb-helper.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"couchdb-helper.js","names":["b64EncodeUnicode","flatClone","COUCHDB_NEW_REPLICATION_PLUGIN_IDENTITY_PREFIX","mergeUrlQueryParams","params","Object","entries","filter","_k","value","map","key","join","couchDBDocToRxDocData","primaryPath","couchDocData","doc","couchSwapIdToPrimary","_deleted","_rev","primaryKey","docData","_id","couchSwapPrimaryToId","idValue","ret","getDefaultFetch","window","fetch","bind","getFetchWithCouchDBAuthorization","username","password","url","options","assign","headers"],"sources":["../../../../src/plugins/replication-couchdb/couchdb-helper.ts"],"sourcesContent":["import type {\n RxDocumentData,\n StringKeys,\n WithDeleted\n} from '../../types/index.d.ts';\nimport { b64EncodeUnicode, flatClone } from '../../plugins/utils/index.ts';\nimport { URLQueryParams } from './couchdb-types.ts';\n\n\nexport const COUCHDB_NEW_REPLICATION_PLUGIN_IDENTITY_PREFIX = 'couchdb';\n\n\nexport function mergeUrlQueryParams(\n params: URLQueryParams\n): string {\n return Object.entries(params)\n .filter(([_k, value]) => typeof value !== 'undefined')\n .map(([key, value]) => key + '=' + value)\n .join('&');\n}\n\nexport function couchDBDocToRxDocData(\n primaryPath: string,\n couchDocData: any\n): WithDeleted {\n const doc = couchSwapIdToPrimary(primaryPath as any, couchDocData);\n\n // ensure deleted flag is set.\n doc._deleted = !!doc._deleted;\n\n delete doc._rev;\n\n return doc;\n}\n\n\nexport function couchSwapIdToPrimary(\n primaryKey: StringKeys>,\n docData: any\n): any {\n if (primaryKey === '_id' || docData[primaryKey]) {\n return flatClone(docData);\n }\n docData = flatClone(docData);\n docData[primaryKey] = docData._id;\n delete docData._id;\n\n return docData;\n}\n\n/**\n * Swaps the primaryKey of the document\n * to the _id property.\n */\nexport function couchSwapPrimaryToId(\n primaryKey: StringKeys>,\n docData: any\n): RxDocType & { _id: string; } {\n // optimisation shortcut\n if (primaryKey === '_id') {\n return docData;\n }\n\n const idValue = docData[primaryKey];\n const ret = flatClone(docData);\n delete ret[primaryKey];\n ret._id = idValue;\n return ret;\n}\n\n\nexport function getDefaultFetch() {\n if (\n typeof window === 'object' &&\n (window as any)['fetch']\n ) {\n /**\n * @link https://stackoverflow.com/a/47180009/3443137\n */\n return window.fetch.bind(window);\n } else {\n return fetch;\n }\n}\n\n/**\n * Returns a fetch handler that contains the username and password\n * in the Authorization header\n */\nexport function getFetchWithCouchDBAuthorization(username: string, password: string): typeof fetch {\n const ret: typeof fetch = (url, options) => {\n options = Object.assign({}, options);\n if (!options.headers) {\n options.headers = {};\n }\n (options as any).headers['Authorization'] = 'Basic ' + b64EncodeUnicode(username + ':' + password);\n return fetch(url as any, options);\n };\n return ret;\n}\n"],"mappings":"AAKA,SAASA,gBAAgB,EAAEC,SAAS,QAAQ,8BAA8B;AAI1E,OAAO,IAAMC,8CAA8C,GAAG,SAAS;AAGvE,OAAO,SAASC,mBAAmBA,CAC/BC,MAAsB,EAChB;EACN,OAAOC,MAAM,CAACC,OAAO,CAACF,MAAM,CAAC,CACxBG,MAAM,CAAC,CAAC,CAACC,EAAE,EAAEC,KAAK,CAAC,KAAK,OAAOA,KAAK,KAAK,WAAW,CAAC,CACrDC,GAAG,CAAC,CAAC,CAACC,GAAG,EAAEF,KAAK,CAAC,KAAKE,GAAG,GAAG,GAAG,GAAGF,KAAK,CAAC,CACxCG,IAAI,CAAC,GAAG,CAAC;AAClB;AAEA,OAAO,SAASC,qBAAqBA,CACjCC,WAAmB,EACnBC,YAAiB,EACK;EACtB,IAAMC,GAAG,GAAGC,oBAAoB,CAACH,WAAW,EAASC,YAAY,CAAC;;EAElE;EACAC,GAAG,CAACE,QAAQ,GAAG,CAAC,CAACF,GAAG,CAACE,QAAQ;EAE7B,OAAOF,GAAG,CAACG,IAAI;EAEf,OAAOH,GAAG;AACd;AAGA,OAAO,SAASC,oBAAoBA,CAChCG,UAAyC,EACzCC,OAAY,EACT;EACH,IAAID,UAAU,KAAK,KAAK,IAAIC,OAAO,CAACD,UAAU,CAAC,EAAE;IAC7C,OAAOnB,SAAS,CAACoB,OAAO,CAAC;EAC7B;EACAA,OAAO,GAAGpB,SAAS,CAACoB,OAAO,CAAC;EAC5BA,OAAO,CAACD,UAAU,CAAC,GAAGC,OAAO,CAACC,GAAG;EACjC,OAAOD,OAAO,CAACC,GAAG;EAElB,OAAOD,OAAO;AAClB;;AAEA;AACA;AACA;AACA;AACA,OAAO,SAASE,oBAAoBA,CAChCH,UAAiD,EACjDC,OAAY,EACgB;EAC5B;EACA,IAAID,UAAU,KAAK,KAAK,EAAE;IACtB,OAAOC,OAAO;EAClB;EAEA,IAAMG,OAAO,GAAGH,OAAO,CAACD,UAAU,CAAC;EACnC,IAAMK,GAAG,GAAGxB,SAAS,CAACoB,OAAO,CAAC;EAC9B,OAAOI,GAAG,CAACL,UAAU,CAAC;EACtBK,GAAG,CAACH,GAAG,GAAGE,OAAO;EACjB,OAAOC,GAAG;AACd;AAGA,OAAO,SAASC,eAAeA,CAAA,EAAG;EAC9B,IACI,OAAOC,MAAM,KAAK,QAAQ,IACzBA,MAAM,CAAS,OAAO,CAAC,EAC1B;IACE;AACR;AACA;IACQ,OAAOA,MAAM,CAACC,KAAK,CAACC,IAAI,CAACF,MAAM,CAAC;EACpC,CAAC,MAAM;IACH,OAAOC,KAAK;EAChB;AACJ;;AAEA;AACA;AACA;AACA;AACA,OAAO,SAASE,gCAAgCA,CAACC,QAAgB,EAAEC,QAAgB,EAAgB;EAC/F,IAAMP,GAAiB,GAAGA,CAACQ,GAAG,EAAEC,OAAO,KAAK;IACxCA,OAAO,GAAG7B,MAAM,CAAC8B,MAAM,CAAC,CAAC,CAAC,EAAED,OAAO,CAAC;IACpC,IAAI,CAACA,OAAO,CAACE,OAAO,EAAE;MAClBF,OAAO,CAACE,OAAO,GAAG,CAAC,CAAC;IACxB;IACCF,OAAO,CAASE,OAAO,CAAC,eAAe,CAAC,GAAG,QAAQ,GAAGpC,gBAAgB,CAAC+B,QAAQ,GAAG,GAAG,GAAGC,QAAQ,CAAC;IAClG,OAAOJ,KAAK,CAACK,GAAG,EAASC,OAAO,CAAC;EACrC,CAAC;EACD,OAAOT,GAAG;AACd","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/replication-couchdb/couchdb-types.js b/dist/esm/plugins/replication-couchdb/couchdb-types.js deleted file mode 100644 index cc2f5eb3fda..00000000000 --- a/dist/esm/plugins/replication-couchdb/couchdb-types.js +++ /dev/null @@ -1,3 +0,0 @@ -export {}; -export {}; -//# sourceMappingURL=couchdb-types.js.map \ No newline at end of file diff --git a/dist/esm/plugins/replication-couchdb/couchdb-types.js.map b/dist/esm/plugins/replication-couchdb/couchdb-types.js.map deleted file mode 100644 index fc2fde9f850..00000000000 --- a/dist/esm/plugins/replication-couchdb/couchdb-types.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"couchdb-types.js","names":[],"sources":["../../../../src/plugins/replication-couchdb/couchdb-types.ts"],"sourcesContent":["import type {\n ById,\n ReplicationOptions,\n ReplicationPullOptions,\n ReplicationPushOptions\n} from '../../types/index.d.ts';\n\nexport type CouchDBCheckpointType = {\n sequence: number;\n};\n\nexport type FetchMethodType = typeof fetch;\nexport type SyncOptionsCouchDB = Omit<\n ReplicationOptions,\n 'pull' | 'push'\n> & {\n url: string;\n /**\n * Here you can set a custom fetch method\n * to use http headers or credentials when doing requests.\n */\n fetch?: FetchMethodType;\n pull?: Omit, 'handler' | 'stream$'> & {\n /**\n * Heartbeat time in milliseconds\n * for the long polling of the changestream.\n */\n heartbeat?: number;\n };\n push?: Omit, 'handler'>;\n};\n\n\nexport type URLQueryParams = ById;\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/replication-couchdb/index.js b/dist/esm/plugins/replication-couchdb/index.js deleted file mode 100644 index 303b15f91df..00000000000 --- a/dist/esm/plugins/replication-couchdb/index.js +++ /dev/null @@ -1,257 +0,0 @@ -import _inheritsLoose from "@babel/runtime/helpers/inheritsLoose"; -/** - * This plugin can be used to sync collections with a remote CouchDB endpoint. - */ -import { ensureNotFalsy, errorToPlainJson, flatClone, getFromMapOrThrow, now, promiseWait } from "../../plugins/utils/index.js"; -import { RxDBLeaderElectionPlugin } from "../leader-election/index.js"; -import { RxReplicationState, startReplicationOnLeaderShip } from "../replication/index.js"; -import { addRxPlugin, newRxError } from "../../index.js"; -import { Subject } from 'rxjs'; -import { couchDBDocToRxDocData, mergeUrlQueryParams, couchSwapPrimaryToId, getDefaultFetch } from "./couchdb-helper.js"; -import { awaitRetry } from "../replication/replication-helper.js"; -export * from "./couchdb-helper.js"; -export * from "./couchdb-types.js"; -export var RxCouchDBReplicationState = /*#__PURE__*/function (_RxReplicationState) { - function RxCouchDBReplicationState(url, fetch, replicationIdentifier, collection, pull, push, live = true, retryTime = 1000 * 5, autoStart = true) { - var _this; - _this = _RxReplicationState.call(this, replicationIdentifier, collection, '_deleted', pull, push, live, retryTime, autoStart) || this; - _this.url = url; - _this.fetch = fetch; - _this.replicationIdentifier = replicationIdentifier; - _this.collection = collection; - _this.pull = pull; - _this.push = push; - _this.live = live; - _this.retryTime = retryTime; - _this.autoStart = autoStart; - return _this; - } - _inheritsLoose(RxCouchDBReplicationState, _RxReplicationState); - return RxCouchDBReplicationState; -}(RxReplicationState); -export function replicateCouchDB(options) { - var collection = options.collection; - var conflictHandler = collection.conflictHandler; - addRxPlugin(RxDBLeaderElectionPlugin); - var primaryPath = options.collection.schema.primaryPath; - if (!options.url.endsWith('/')) { - throw newRxError('RC_COUCHDB_1', { - args: { - collection: options.collection.name, - url: options.url - } - }); - } - options = flatClone(options); - if (!options.url.endsWith('/')) { - options.url = options.url + '/'; - } - options.waitForLeadership = typeof options.waitForLeadership === 'undefined' ? true : options.waitForLeadership; - var pullStream$ = new Subject(); - var replicationPrimitivesPull; - if (options.pull) { - replicationPrimitivesPull = { - async handler(lastPulledCheckpoint, batchSize) { - /** - * @link https://docs.couchdb.org/en/3.2.2-docs/api/database/changes.html - */ - var url = options.url + '_changes?' + mergeUrlQueryParams({ - style: 'all_docs', - feed: 'normal', - include_docs: true, - since: lastPulledCheckpoint ? lastPulledCheckpoint.sequence : 0, - heartbeat: options.pull && options.pull.heartbeat ? options.pull.heartbeat : 60000, - limit: batchSize, - seq_interval: batchSize - }); - var response = await replicationState.fetch(url); - var jsonResponse = await response.json(); - if (!jsonResponse.results) { - throw newRxError('RC_COUCHDB_2', { - args: { - jsonResponse - } - }); - } - var documents = jsonResponse.results.map(row => couchDBDocToRxDocData(collection.schema.primaryPath, ensureNotFalsy(row.doc))); - return { - documents, - checkpoint: { - sequence: jsonResponse.last_seq - } - }; - }, - batchSize: ensureNotFalsy(options.pull).batchSize, - modifier: ensureNotFalsy(options.pull).modifier, - stream$: pullStream$.asObservable(), - initialCheckpoint: options.pull.initialCheckpoint - }; - } - var replicationPrimitivesPush; - if (options.push) { - replicationPrimitivesPush = { - async handler(rows) { - var conflicts = []; - var pushRowsById = new Map(); - rows.forEach(row => { - var id = row.newDocumentState[primaryPath]; - pushRowsById.set(id, row); - }); - - /** - * First get the current master state from the remote - * to check for conflicts - */ - var docsByIdResponse = await replicationState.fetch(options.url + '_all_docs?' + mergeUrlQueryParams({}), { - method: 'POST', - headers: { - 'content-type': 'application/json' - }, - body: JSON.stringify({ - keys: rows.map(row => row.newDocumentState[primaryPath]), - include_docs: true, - deleted: 'ok' - }) - }); - var docsByIdRows = await docsByIdResponse.json(); - var nonConflictRows = []; - var remoteRevById = new Map(); - await Promise.all(docsByIdRows.rows.map(async row => { - if (!row.doc) { - nonConflictRows.push(getFromMapOrThrow(pushRowsById, row.key)); - return; - } - var realMasterState = couchDBDocToRxDocData(primaryPath, row.doc); - var pushRow = getFromMapOrThrow(pushRowsById, row.id); - if (pushRow.assumedMasterState && (await conflictHandler({ - realMasterState, - newDocumentState: pushRow.assumedMasterState - }, 'couchdb-push-1')).isEqual) { - remoteRevById.set(row.id, row.doc._rev); - nonConflictRows.push(pushRow); - } else { - conflicts.push(realMasterState); - } - })); - - /** - * @link https://docs.couchdb.org/en/3.2.2-docs/api/database/bulk-api.html#db-bulk-docs - */ - var url = options.url + '_bulk_docs?' + mergeUrlQueryParams({}); - var body = { - docs: nonConflictRows.map(row => { - var docId = row.newDocumentState[primaryPath]; - var sendDoc = flatClone(row.newDocumentState); - if (remoteRevById.has(docId)) { - sendDoc._rev = getFromMapOrThrow(remoteRevById, docId); - } - return couchSwapPrimaryToId(collection.schema.primaryPath, sendDoc); - }) - }; - var response = await replicationState.fetch(url, { - method: 'POST', - headers: { - 'content-type': 'application/json' - }, - body: JSON.stringify(body) - }); - var responseJson = await response.json(); - - // get conflicting writes - var conflictAgainIds = []; - responseJson.forEach(writeResultRow => { - var isConflict = writeResultRow.error === 'conflict'; - if (!writeResultRow.ok && !isConflict) { - throw newRxError('SNH', { - args: { - writeResultRow - } - }); - } - if (isConflict) { - conflictAgainIds.push(writeResultRow.id); - } - }); - if (conflictAgainIds.length === 0) { - return conflicts; - } - var getConflictDocsUrl = options.url + '_all_docs?' + mergeUrlQueryParams({ - include_docs: true, - keys: JSON.stringify(conflictAgainIds) - }); - var conflictResponse = await replicationState.fetch(getConflictDocsUrl); - var conflictResponseJson = await conflictResponse.json(); - conflictResponseJson.rows.forEach(conflictAgainRow => { - conflicts.push(couchDBDocToRxDocData(collection.schema.primaryPath, conflictAgainRow.doc)); - }); - return conflicts; - }, - batchSize: options.push.batchSize, - modifier: options.push.modifier, - initialCheckpoint: options.push.initialCheckpoint - }; - } - var replicationState = new RxCouchDBReplicationState(options.url, options.fetch ? options.fetch : getDefaultFetch(), options.replicationIdentifier, collection, replicationPrimitivesPull, replicationPrimitivesPush, options.live, options.retryTime, options.autoStart); - - /** - * Use long polling to get live changes for the pull.stream$ - */ - if (options.live && options.pull) { - var startBefore = replicationState.start.bind(replicationState); - replicationState.start = () => { - var since = 'now'; - var batchSize = options.pull && options.pull.batchSize ? options.pull.batchSize : 20; - (async () => { - var lastRequestStartTime = now(); - while (!replicationState.isStopped()) { - var _url = options.url + '_changes?' + mergeUrlQueryParams({ - style: 'all_docs', - feed: 'longpoll', - since, - include_docs: true, - heartbeat: options.pull && options.pull.heartbeat ? options.pull.heartbeat : 60000, - limit: batchSize, - seq_interval: batchSize - }); - var jsonResponse = void 0; - try { - lastRequestStartTime = now(); - jsonResponse = await (await replicationState.fetch(_url)).json(); - } catch (err) { - replicationState.subjects.error.next(newRxError('RC_STREAM', { - args: { - url: _url - }, - error: errorToPlainJson(err) - })); - if (lastRequestStartTime < now() - replicationState.retryTime) { - /** - * Last request start was long ago, - * so we directly retry. - * This mostly happens on timeouts - * which are normal behavior for long polling requests. - */ - await promiseWait(0); - } else { - // await next tick here otherwise we could go in to a 100% CPU blocking cycle. - await awaitRetry(collection, replicationState.retryTime); - } - continue; - } - var documents = jsonResponse.results.map(row => couchDBDocToRxDocData(collection.schema.primaryPath, ensureNotFalsy(row.doc))); - since = jsonResponse.last_seq; - pullStream$.next({ - documents, - checkpoint: { - sequence: jsonResponse.last_seq - } - }); - } - })(); - return startBefore(); - }; - } - startReplicationOnLeaderShip(options.waitForLeadership, replicationState); - return replicationState; -} -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/esm/plugins/replication-couchdb/index.js.map b/dist/esm/plugins/replication-couchdb/index.js.map deleted file mode 100644 index c8856004615..00000000000 --- a/dist/esm/plugins/replication-couchdb/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["ensureNotFalsy","errorToPlainJson","flatClone","getFromMapOrThrow","now","promiseWait","RxDBLeaderElectionPlugin","RxReplicationState","startReplicationOnLeaderShip","addRxPlugin","newRxError","Subject","couchDBDocToRxDocData","mergeUrlQueryParams","couchSwapPrimaryToId","getDefaultFetch","awaitRetry","RxCouchDBReplicationState","_RxReplicationState","url","fetch","replicationIdentifier","collection","pull","push","live","retryTime","autoStart","_this","call","_inheritsLoose","replicateCouchDB","options","conflictHandler","primaryPath","schema","endsWith","args","name","waitForLeadership","pullStream$","replicationPrimitivesPull","handler","lastPulledCheckpoint","batchSize","style","feed","include_docs","since","sequence","heartbeat","limit","seq_interval","response","replicationState","jsonResponse","json","results","documents","map","row","doc","checkpoint","last_seq","modifier","stream$","asObservable","initialCheckpoint","replicationPrimitivesPush","rows","conflicts","pushRowsById","Map","forEach","id","newDocumentState","set","docsByIdResponse","method","headers","body","JSON","stringify","keys","deleted","docsByIdRows","nonConflictRows","remoteRevById","Promise","all","key","realMasterState","pushRow","assumedMasterState","isEqual","_rev","docs","docId","sendDoc","has","responseJson","conflictAgainIds","writeResultRow","isConflict","error","ok","length","getConflictDocsUrl","conflictResponse","conflictResponseJson","conflictAgainRow","startBefore","start","bind","lastRequestStartTime","isStopped","err","subjects","next"],"sources":["../../../../src/plugins/replication-couchdb/index.ts"],"sourcesContent":["/**\n * This plugin can be used to sync collections with a remote CouchDB endpoint.\n */\nimport {\n ensureNotFalsy,\n errorToPlainJson,\n flatClone,\n getFromMapOrThrow,\n now,\n promiseWait\n} from '../../plugins/utils/index.ts';\n\nimport { RxDBLeaderElectionPlugin } from '../leader-election/index.ts';\nimport type {\n RxCollection,\n ReplicationPullOptions,\n ReplicationPushOptions,\n RxReplicationWriteToMasterRow,\n RxReplicationPullStreamItem,\n CouchdbChangesResult,\n CouchBulkDocResultRow,\n CouchAllDocsResponse,\n RxConflictHandler\n} from '../../types/index.d.ts';\nimport {\n RxReplicationState,\n startReplicationOnLeaderShip\n} from '../replication/index.ts';\nimport {\n addRxPlugin,\n newRxError,\n WithDeleted\n} from '../../index.ts';\n\nimport { Subject } from 'rxjs';\nimport type {\n CouchDBCheckpointType,\n FetchMethodType,\n SyncOptionsCouchDB\n} from './couchdb-types.ts';\nimport {\n couchDBDocToRxDocData,\n mergeUrlQueryParams,\n couchSwapPrimaryToId,\n getDefaultFetch\n} from './couchdb-helper.ts';\nimport { awaitRetry } from '../replication/replication-helper.ts';\n\nexport * from './couchdb-helper.ts';\nexport * from './couchdb-types.ts';\n\nexport class RxCouchDBReplicationState extends RxReplicationState {\n constructor(\n public readonly url: string,\n public fetch: FetchMethodType,\n public readonly replicationIdentifier: string,\n public readonly collection: RxCollection,\n public readonly pull?: ReplicationPullOptions,\n public readonly push?: ReplicationPushOptions,\n public readonly live: boolean = true,\n public retryTime: number = 1000 * 5,\n public autoStart: boolean = true\n ) {\n super(\n replicationIdentifier,\n collection,\n '_deleted',\n pull,\n push,\n live,\n retryTime,\n autoStart\n );\n }\n}\n\nexport function replicateCouchDB(\n options: SyncOptionsCouchDB\n) {\n const collection = options.collection;\n const conflictHandler: RxConflictHandler = collection.conflictHandler;\n addRxPlugin(RxDBLeaderElectionPlugin);\n const primaryPath = options.collection.schema.primaryPath;\n\n if (!options.url.endsWith('/')) {\n throw newRxError('RC_COUCHDB_1', {\n args: {\n collection: options.collection.name,\n url: options.url\n }\n });\n }\n\n options = flatClone(options);\n if (!options.url.endsWith('/')) {\n options.url = options.url + '/';\n }\n options.waitForLeadership = typeof options.waitForLeadership === 'undefined' ? true : options.waitForLeadership;\n const pullStream$: Subject> = new Subject();\n let replicationPrimitivesPull: ReplicationPullOptions | undefined;\n if (options.pull) {\n replicationPrimitivesPull = {\n async handler(\n lastPulledCheckpoint: CouchDBCheckpointType | undefined,\n batchSize: number\n ) {\n /**\n * @link https://docs.couchdb.org/en/3.2.2-docs/api/database/changes.html\n */\n const url = options.url + '_changes?' + mergeUrlQueryParams({\n style: 'all_docs',\n feed: 'normal',\n include_docs: true,\n since: lastPulledCheckpoint ? lastPulledCheckpoint.sequence : 0,\n heartbeat: options.pull && options.pull.heartbeat ? options.pull.heartbeat : 60000,\n limit: batchSize,\n seq_interval: batchSize\n });\n\n const response = await replicationState.fetch(url);\n const jsonResponse: CouchdbChangesResult = await response.json();\n if (!jsonResponse.results) {\n throw newRxError('RC_COUCHDB_2', {\n args: { jsonResponse }\n });\n }\n const documents: WithDeleted[] = jsonResponse.results\n .map(row => couchDBDocToRxDocData(collection.schema.primaryPath, ensureNotFalsy(row.doc)));\n return {\n documents,\n checkpoint: {\n sequence: jsonResponse.last_seq\n }\n };\n },\n batchSize: ensureNotFalsy(options.pull).batchSize,\n modifier: ensureNotFalsy(options.pull).modifier,\n stream$: pullStream$.asObservable(),\n initialCheckpoint: options.pull.initialCheckpoint\n };\n }\n\n let replicationPrimitivesPush: ReplicationPushOptions | undefined;\n if (options.push) {\n replicationPrimitivesPush = {\n async handler(\n rows: RxReplicationWriteToMasterRow[]\n ) {\n const conflicts: WithDeleted[] = [];\n const pushRowsById = new Map>();\n rows.forEach(row => {\n const id = (row.newDocumentState as any)[primaryPath];\n pushRowsById.set(id, row);\n });\n\n /**\n * First get the current master state from the remote\n * to check for conflicts\n */\n const docsByIdResponse = await replicationState.fetch(\n options.url + '_all_docs?' + mergeUrlQueryParams({}),\n {\n method: 'POST',\n headers: {\n 'content-type': 'application/json'\n },\n body: JSON.stringify({\n keys: rows.map(row => (row.newDocumentState as any)[primaryPath]),\n include_docs: true,\n deleted: 'ok'\n })\n }\n );\n const docsByIdRows: CouchAllDocsResponse = await docsByIdResponse.json();\n const nonConflictRows: typeof rows = [];\n const remoteRevById = new Map();\n await Promise.all(\n docsByIdRows.rows.map(async (row) => {\n if (!row.doc) {\n nonConflictRows.push(getFromMapOrThrow(pushRowsById, row.key));\n return;\n }\n const realMasterState: WithDeleted = couchDBDocToRxDocData(primaryPath, row.doc);\n const pushRow = getFromMapOrThrow(pushRowsById, row.id);\n\n if (\n pushRow.assumedMasterState &&\n (await conflictHandler({\n realMasterState,\n newDocumentState: pushRow.assumedMasterState\n }, 'couchdb-push-1')).isEqual\n ) {\n remoteRevById.set(row.id, row.doc._rev);\n nonConflictRows.push(pushRow);\n } else {\n conflicts.push(realMasterState);\n }\n })\n );\n\n /**\n * @link https://docs.couchdb.org/en/3.2.2-docs/api/database/bulk-api.html#db-bulk-docs\n */\n const url = options.url + '_bulk_docs?' + mergeUrlQueryParams({});\n const body = {\n docs: nonConflictRows.map(row => {\n const docId = (row.newDocumentState as any)[primaryPath];\n const sendDoc = flatClone(row.newDocumentState);\n if (remoteRevById.has(docId)) {\n (sendDoc as any)._rev = getFromMapOrThrow(remoteRevById, docId);\n }\n return couchSwapPrimaryToId(collection.schema.primaryPath, sendDoc);\n })\n };\n\n const response = await replicationState.fetch(\n url,\n {\n method: 'POST',\n headers: {\n 'content-type': 'application/json'\n },\n body: JSON.stringify(body)\n }\n );\n const responseJson: CouchBulkDocResultRow[] = await response.json();\n\n // get conflicting writes\n const conflictAgainIds: string[] = [];\n responseJson.forEach(writeResultRow => {\n const isConflict = writeResultRow.error === 'conflict';\n if (!writeResultRow.ok && !isConflict) {\n throw newRxError('SNH', { args: { writeResultRow } });\n }\n if (isConflict) {\n conflictAgainIds.push(writeResultRow.id);\n }\n });\n\n if (conflictAgainIds.length === 0) {\n return conflicts;\n }\n\n const getConflictDocsUrl = options.url + '_all_docs?' + mergeUrlQueryParams({\n include_docs: true,\n keys: JSON.stringify(conflictAgainIds)\n });\n const conflictResponse = await replicationState.fetch(getConflictDocsUrl);\n const conflictResponseJson: CouchAllDocsResponse = await conflictResponse.json();\n conflictResponseJson.rows.forEach(conflictAgainRow => {\n conflicts.push(couchDBDocToRxDocData(collection.schema.primaryPath, conflictAgainRow.doc));\n });\n\n return conflicts;\n },\n batchSize: options.push.batchSize,\n modifier: options.push.modifier,\n initialCheckpoint: options.push.initialCheckpoint\n };\n }\n\n const replicationState = new RxCouchDBReplicationState(\n options.url,\n options.fetch ? options.fetch : getDefaultFetch(),\n options.replicationIdentifier,\n collection,\n replicationPrimitivesPull,\n replicationPrimitivesPush,\n options.live,\n options.retryTime,\n options.autoStart\n );\n\n /**\n * Use long polling to get live changes for the pull.stream$\n */\n if (options.live && options.pull) {\n const startBefore = replicationState.start.bind(replicationState);\n replicationState.start = () => {\n let since: string | number = 'now';\n const batchSize = options.pull && options.pull.batchSize ? options.pull.batchSize : 20;\n\n (async () => {\n let lastRequestStartTime = now();\n while (!replicationState.isStopped()) {\n const url = options.url + '_changes?' + mergeUrlQueryParams({\n style: 'all_docs',\n feed: 'longpoll',\n since,\n include_docs: true,\n heartbeat: options.pull && options.pull.heartbeat ? options.pull.heartbeat : 60000,\n limit: batchSize,\n seq_interval: batchSize\n });\n\n let jsonResponse: CouchdbChangesResult;\n try {\n lastRequestStartTime = now();\n jsonResponse = await (await replicationState.fetch(url)).json();\n } catch (err: any) {\n replicationState.subjects.error.next(\n newRxError('RC_STREAM', {\n args: { url },\n error: errorToPlainJson(err)\n })\n );\n\n if (lastRequestStartTime < (now() - replicationState.retryTime)) {\n /**\n * Last request start was long ago,\n * so we directly retry.\n * This mostly happens on timeouts\n * which are normal behavior for long polling requests.\n */\n await promiseWait(0);\n } else {\n // await next tick here otherwise we could go in to a 100% CPU blocking cycle.\n await awaitRetry(\n collection,\n replicationState.retryTime\n );\n }\n continue;\n }\n const documents: WithDeleted[] = jsonResponse.results\n .map(row => couchDBDocToRxDocData(collection.schema.primaryPath, ensureNotFalsy(row.doc)));\n since = jsonResponse.last_seq;\n\n pullStream$.next({\n documents,\n checkpoint: {\n sequence: jsonResponse.last_seq\n }\n });\n }\n })();\n return startBefore();\n };\n }\n\n startReplicationOnLeaderShip(options.waitForLeadership, replicationState);\n\n return replicationState;\n}\n"],"mappings":";AAAA;AACA;AACA;AACA,SACIA,cAAc,EACdC,gBAAgB,EAChBC,SAAS,EACTC,iBAAiB,EACjBC,GAAG,EACHC,WAAW,QACR,8BAA8B;AAErC,SAASC,wBAAwB,QAAQ,6BAA6B;AAYtE,SACIC,kBAAkB,EAClBC,4BAA4B,QACzB,yBAAyB;AAChC,SACIC,WAAW,EACXC,UAAU,QAEP,gBAAgB;AAEvB,SAASC,OAAO,QAAQ,MAAM;AAM9B,SACIC,qBAAqB,EACrBC,mBAAmB,EACnBC,oBAAoB,EACpBC,eAAe,QACZ,qBAAqB;AAC5B,SAASC,UAAU,QAAQ,sCAAsC;AAEjE,cAAc,qBAAqB;AACnC,cAAc,oBAAoB;AAElC,WAAaC,yBAAyB,0BAAAC,mBAAA;EAClC,SAAAD,0BACoBE,GAAW,EACpBC,KAAsB,EACbC,qBAA6B,EAC7BC,UAAmC,EACnCC,IAA+D,EAC/DC,IAAwC,EACxCC,IAAa,GAAG,IAAI,EAC7BC,SAAiB,GAAG,IAAI,GAAG,CAAC,EAC5BC,SAAkB,GAAG,IAAI,EAClC;IAAA,IAAAC,KAAA;IACEA,KAAA,GAAAV,mBAAA,CAAAW,IAAA,OACIR,qBAAqB,EACrBC,UAAU,EACV,UAAU,EACVC,IAAI,EACJC,IAAI,EACJC,IAAI,EACJC,SAAS,EACTC,SACJ,CAAC;IAACC,KAAA,CAnBcT,GAAW,GAAXA,GAAW;IAAAS,KAAA,CACpBR,KAAsB,GAAtBA,KAAsB;IAAAQ,KAAA,CACbP,qBAA6B,GAA7BA,qBAA6B;IAAAO,KAAA,CAC7BN,UAAmC,GAAnCA,UAAmC;IAAAM,KAAA,CACnCL,IAA+D,GAA/DA,IAA+D;IAAAK,KAAA,CAC/DJ,IAAwC,GAAxCA,IAAwC;IAAAI,KAAA,CACxCH,IAAa,GAAbA,IAAa;IAAAG,KAAA,CACtBF,SAAiB,GAAjBA,SAAiB;IAAAE,KAAA,CACjBD,SAAkB,GAAlBA,SAAkB;IAAA,OAAAC,KAAA;EAY7B;EAACE,cAAA,CAAAb,yBAAA,EAAAC,mBAAA;EAAA,OAAAD,yBAAA;AAAA,EAtBqDV,kBAAkB;AAyB5E,OAAO,SAASwB,gBAAgBA,CAC5BC,OAAsC,EACxC;EACE,IAAMV,UAAU,GAAGU,OAAO,CAACV,UAAU;EACrC,IAAMW,eAA2C,GAAGX,UAAU,CAACW,eAAe;EAC9ExB,WAAW,CAACH,wBAAwB,CAAC;EACrC,IAAM4B,WAAW,GAAGF,OAAO,CAACV,UAAU,CAACa,MAAM,CAACD,WAAW;EAEzD,IAAI,CAACF,OAAO,CAACb,GAAG,CAACiB,QAAQ,CAAC,GAAG,CAAC,EAAE;IAC5B,MAAM1B,UAAU,CAAC,cAAc,EAAE;MAC7B2B,IAAI,EAAE;QACFf,UAAU,EAAEU,OAAO,CAACV,UAAU,CAACgB,IAAI;QACnCnB,GAAG,EAAEa,OAAO,CAACb;MACjB;IACJ,CAAC,CAAC;EACN;EAEAa,OAAO,GAAG9B,SAAS,CAAC8B,OAAO,CAAC;EAC5B,IAAI,CAACA,OAAO,CAACb,GAAG,CAACiB,QAAQ,CAAC,GAAG,CAAC,EAAE;IAC5BJ,OAAO,CAACb,GAAG,GAAGa,OAAO,CAACb,GAAG,GAAG,GAAG;EACnC;EACAa,OAAO,CAACO,iBAAiB,GAAG,OAAOP,OAAO,CAACO,iBAAiB,KAAK,WAAW,GAAG,IAAI,GAAGP,OAAO,CAACO,iBAAiB;EAC/G,IAAMC,WAAmF,GAAG,IAAI7B,OAAO,CAAC,CAAC;EACzG,IAAI8B,yBAA+F;EACnG,IAAIT,OAAO,CAACT,IAAI,EAAE;IACdkB,yBAAyB,GAAG;MACxB,MAAMC,OAAOA,CACTC,oBAAuD,EACvDC,SAAiB,EACnB;QACE;AAChB;AACA;QACgB,IAAMzB,GAAG,GAAGa,OAAO,CAACb,GAAG,GAAG,WAAW,GAAGN,mBAAmB,CAAC;UACxDgC,KAAK,EAAE,UAAU;UACjBC,IAAI,EAAE,QAAQ;UACdC,YAAY,EAAE,IAAI;UAClBC,KAAK,EAAEL,oBAAoB,GAAGA,oBAAoB,CAACM,QAAQ,GAAG,CAAC;UAC/DC,SAAS,EAAElB,OAAO,CAACT,IAAI,IAAIS,OAAO,CAACT,IAAI,CAAC2B,SAAS,GAAGlB,OAAO,CAACT,IAAI,CAAC2B,SAAS,GAAG,KAAK;UAClFC,KAAK,EAAEP,SAAS;UAChBQ,YAAY,EAAER;QAClB,CAAC,CAAC;QAEF,IAAMS,QAAQ,GAAG,MAAMC,gBAAgB,CAAClC,KAAK,CAACD,GAAG,CAAC;QAClD,IAAMoC,YAAkC,GAAG,MAAMF,QAAQ,CAACG,IAAI,CAAC,CAAC;QAChE,IAAI,CAACD,YAAY,CAACE,OAAO,EAAE;UACvB,MAAM/C,UAAU,CAAC,cAAc,EAAE;YAC7B2B,IAAI,EAAE;cAAEkB;YAAa;UACzB,CAAC,CAAC;QACN;QACA,IAAMG,SAAmC,GAAGH,YAAY,CAACE,OAAO,CAC3DE,GAAG,CAACC,GAAG,IAAIhD,qBAAqB,CAACU,UAAU,CAACa,MAAM,CAACD,WAAW,EAAElC,cAAc,CAAC4D,GAAG,CAACC,GAAG,CAAC,CAAC,CAAC;QAC9F,OAAO;UACHH,SAAS;UACTI,UAAU,EAAE;YACRb,QAAQ,EAAEM,YAAY,CAACQ;UAC3B;QACJ,CAAC;MACL,CAAC;MACDnB,SAAS,EAAE5C,cAAc,CAACgC,OAAO,CAACT,IAAI,CAAC,CAACqB,SAAS;MACjDoB,QAAQ,EAAEhE,cAAc,CAACgC,OAAO,CAACT,IAAI,CAAC,CAACyC,QAAQ;MAC/CC,OAAO,EAAEzB,WAAW,CAAC0B,YAAY,CAAC,CAAC;MACnCC,iBAAiB,EAAEnC,OAAO,CAACT,IAAI,CAAC4C;IACpC,CAAC;EACL;EAEA,IAAIC,yBAAwE;EAC5E,IAAIpC,OAAO,CAACR,IAAI,EAAE;IACd4C,yBAAyB,GAAG;MACxB,MAAM1B,OAAOA,CACT2B,IAAgD,EAClD;QACE,IAAMC,SAAmC,GAAG,EAAE;QAC9C,IAAMC,YAAY,GAAG,IAAIC,GAAG,CAAmD,CAAC;QAChFH,IAAI,CAACI,OAAO,CAACb,GAAG,IAAI;UAChB,IAAMc,EAAE,GAAId,GAAG,CAACe,gBAAgB,CAASzC,WAAW,CAAC;UACrDqC,YAAY,CAACK,GAAG,CAACF,EAAE,EAAEd,GAAG,CAAC;QAC7B,CAAC,CAAC;;QAEF;AAChB;AACA;AACA;QACgB,IAAMiB,gBAAgB,GAAG,MAAMvB,gBAAgB,CAAClC,KAAK,CACjDY,OAAO,CAACb,GAAG,GAAG,YAAY,GAAGN,mBAAmB,CAAC,CAAC,CAAC,CAAC,EACpD;UACIiE,MAAM,EAAE,MAAM;UACdC,OAAO,EAAE;YACL,cAAc,EAAE;UACpB,CAAC;UACDC,IAAI,EAAEC,IAAI,CAACC,SAAS,CAAC;YACjBC,IAAI,EAAEd,IAAI,CAACV,GAAG,CAACC,GAAG,IAAKA,GAAG,CAACe,gBAAgB,CAASzC,WAAW,CAAC,CAAC;YACjEa,YAAY,EAAE,IAAI;YAClBqC,OAAO,EAAE;UACb,CAAC;QACL,CACJ,CAAC;QACD,IAAMC,YAAkC,GAAG,MAAMR,gBAAgB,CAACrB,IAAI,CAAC,CAAC;QACxE,IAAM8B,eAA4B,GAAG,EAAE;QACvC,IAAMC,aAAa,GAAG,IAAIf,GAAG,CAAiB,CAAC;QAC/C,MAAMgB,OAAO,CAACC,GAAG,CACbJ,YAAY,CAAChB,IAAI,CAACV,GAAG,CAAC,MAAOC,GAAG,IAAK;UACjC,IAAI,CAACA,GAAG,CAACC,GAAG,EAAE;YACVyB,eAAe,CAAC9D,IAAI,CAACrB,iBAAiB,CAACoE,YAAY,EAAEX,GAAG,CAAC8B,GAAG,CAAC,CAAC;YAC9D;UACJ;UACA,IAAMC,eAAuC,GAAG/E,qBAAqB,CAACsB,WAAW,EAAE0B,GAAG,CAACC,GAAG,CAAC;UAC3F,IAAM+B,OAAO,GAAGzF,iBAAiB,CAACoE,YAAY,EAAEX,GAAG,CAACc,EAAE,CAAC;UAEvD,IACIkB,OAAO,CAACC,kBAAkB,IAC1B,CAAC,MAAM5D,eAAe,CAAC;YACnB0D,eAAe;YACfhB,gBAAgB,EAAEiB,OAAO,CAACC;UAC9B,CAAC,EAAE,gBAAgB,CAAC,EAAEC,OAAO,EAC/B;YACEP,aAAa,CAACX,GAAG,CAAChB,GAAG,CAACc,EAAE,EAAEd,GAAG,CAACC,GAAG,CAACkC,IAAI,CAAC;YACvCT,eAAe,CAAC9D,IAAI,CAACoE,OAAO,CAAC;UACjC,CAAC,MAAM;YACHtB,SAAS,CAAC9C,IAAI,CAACmE,eAAe,CAAC;UACnC;QACJ,CAAC,CACL,CAAC;;QAED;AAChB;AACA;QACgB,IAAMxE,GAAG,GAAGa,OAAO,CAACb,GAAG,GAAG,aAAa,GAAGN,mBAAmB,CAAC,CAAC,CAAC,CAAC;QACjE,IAAMmE,IAAI,GAAG;UACTgB,IAAI,EAAEV,eAAe,CAAC3B,GAAG,CAACC,GAAG,IAAI;YAC7B,IAAMqC,KAAK,GAAIrC,GAAG,CAACe,gBAAgB,CAASzC,WAAW,CAAC;YACxD,IAAMgE,OAAO,GAAGhG,SAAS,CAAC0D,GAAG,CAACe,gBAAgB,CAAC;YAC/C,IAAIY,aAAa,CAACY,GAAG,CAACF,KAAK,CAAC,EAAE;cACzBC,OAAO,CAASH,IAAI,GAAG5F,iBAAiB,CAACoF,aAAa,EAAEU,KAAK,CAAC;YACnE;YACA,OAAOnF,oBAAoB,CAACQ,UAAU,CAACa,MAAM,CAACD,WAAW,EAAEgE,OAAO,CAAC;UACvE,CAAC;QACL,CAAC;QAED,IAAM7C,QAAQ,GAAG,MAAMC,gBAAgB,CAAClC,KAAK,CACzCD,GAAG,EACH;UACI2D,MAAM,EAAE,MAAM;UACdC,OAAO,EAAE;YACL,cAAc,EAAE;UACpB,CAAC;UACDC,IAAI,EAAEC,IAAI,CAACC,SAAS,CAACF,IAAI;QAC7B,CACJ,CAAC;QACD,IAAMoB,YAAqC,GAAG,MAAM/C,QAAQ,CAACG,IAAI,CAAC,CAAC;;QAEnE;QACA,IAAM6C,gBAA0B,GAAG,EAAE;QACrCD,YAAY,CAAC3B,OAAO,CAAC6B,cAAc,IAAI;UACnC,IAAMC,UAAU,GAAGD,cAAc,CAACE,KAAK,KAAK,UAAU;UACtD,IAAI,CAACF,cAAc,CAACG,EAAE,IAAI,CAACF,UAAU,EAAE;YACnC,MAAM7F,UAAU,CAAC,KAAK,EAAE;cAAE2B,IAAI,EAAE;gBAAEiE;cAAe;YAAE,CAAC,CAAC;UACzD;UACA,IAAIC,UAAU,EAAE;YACZF,gBAAgB,CAAC7E,IAAI,CAAC8E,cAAc,CAAC5B,EAAE,CAAC;UAC5C;QACJ,CAAC,CAAC;QAEF,IAAI2B,gBAAgB,CAACK,MAAM,KAAK,CAAC,EAAE;UAC/B,OAAOpC,SAAS;QACpB;QAEA,IAAMqC,kBAAkB,GAAG3E,OAAO,CAACb,GAAG,GAAG,YAAY,GAAGN,mBAAmB,CAAC;UACxEkC,YAAY,EAAE,IAAI;UAClBoC,IAAI,EAAEF,IAAI,CAACC,SAAS,CAACmB,gBAAgB;QACzC,CAAC,CAAC;QACF,IAAMO,gBAAgB,GAAG,MAAMtD,gBAAgB,CAAClC,KAAK,CAACuF,kBAAkB,CAAC;QACzE,IAAME,oBAA0C,GAAG,MAAMD,gBAAgB,CAACpD,IAAI,CAAC,CAAC;QAChFqD,oBAAoB,CAACxC,IAAI,CAACI,OAAO,CAACqC,gBAAgB,IAAI;UAClDxC,SAAS,CAAC9C,IAAI,CAACZ,qBAAqB,CAACU,UAAU,CAACa,MAAM,CAACD,WAAW,EAAE4E,gBAAgB,CAACjD,GAAG,CAAC,CAAC;QAC9F,CAAC,CAAC;QAEF,OAAOS,SAAS;MACpB,CAAC;MACD1B,SAAS,EAAEZ,OAAO,CAACR,IAAI,CAACoB,SAAS;MACjCoB,QAAQ,EAAEhC,OAAO,CAACR,IAAI,CAACwC,QAAQ;MAC/BG,iBAAiB,EAAEnC,OAAO,CAACR,IAAI,CAAC2C;IACpC,CAAC;EACL;EAEA,IAAMb,gBAAgB,GAAG,IAAIrC,yBAAyB,CAClDe,OAAO,CAACb,GAAG,EACXa,OAAO,CAACZ,KAAK,GAAGY,OAAO,CAACZ,KAAK,GAAGL,eAAe,CAAC,CAAC,EACjDiB,OAAO,CAACX,qBAAqB,EAC7BC,UAAU,EACVmB,yBAAyB,EACzB2B,yBAAyB,EACzBpC,OAAO,CAACP,IAAI,EACZO,OAAO,CAACN,SAAS,EACjBM,OAAO,CAACL,SACZ,CAAC;;EAED;AACJ;AACA;EACI,IAAIK,OAAO,CAACP,IAAI,IAAIO,OAAO,CAACT,IAAI,EAAE;IAC9B,IAAMwF,WAAW,GAAGzD,gBAAgB,CAAC0D,KAAK,CAACC,IAAI,CAAC3D,gBAAgB,CAAC;IACjEA,gBAAgB,CAAC0D,KAAK,GAAG,MAAM;MAC3B,IAAIhE,KAAsB,GAAG,KAAK;MAClC,IAAMJ,SAAS,GAAGZ,OAAO,CAACT,IAAI,IAAIS,OAAO,CAACT,IAAI,CAACqB,SAAS,GAAGZ,OAAO,CAACT,IAAI,CAACqB,SAAS,GAAG,EAAE;MAEtF,CAAC,YAAY;QACT,IAAIsE,oBAAoB,GAAG9G,GAAG,CAAC,CAAC;QAChC,OAAO,CAACkD,gBAAgB,CAAC6D,SAAS,CAAC,CAAC,EAAE;UAClC,IAAMhG,IAAG,GAAGa,OAAO,CAACb,GAAG,GAAG,WAAW,GAAGN,mBAAmB,CAAC;YACxDgC,KAAK,EAAE,UAAU;YACjBC,IAAI,EAAE,UAAU;YAChBE,KAAK;YACLD,YAAY,EAAE,IAAI;YAClBG,SAAS,EAAElB,OAAO,CAACT,IAAI,IAAIS,OAAO,CAACT,IAAI,CAAC2B,SAAS,GAAGlB,OAAO,CAACT,IAAI,CAAC2B,SAAS,GAAG,KAAK;YAClFC,KAAK,EAAEP,SAAS;YAChBQ,YAAY,EAAER;UAClB,CAAC,CAAC;UAEF,IAAIW,YAAkC;UACtC,IAAI;YACA2D,oBAAoB,GAAG9G,GAAG,CAAC,CAAC;YAC5BmD,YAAY,GAAG,MAAM,CAAC,MAAMD,gBAAgB,CAAClC,KAAK,CAACD,IAAG,CAAC,EAAEqC,IAAI,CAAC,CAAC;UACnE,CAAC,CAAC,OAAO4D,GAAQ,EAAE;YACf9D,gBAAgB,CAAC+D,QAAQ,CAACb,KAAK,CAACc,IAAI,CAChC5G,UAAU,CAAC,WAAW,EAAE;cACpB2B,IAAI,EAAE;gBAAElB,GAAG,EAAHA;cAAI,CAAC;cACbqF,KAAK,EAAEvG,gBAAgB,CAACmH,GAAG;YAC/B,CAAC,CACL,CAAC;YAED,IAAIF,oBAAoB,GAAI9G,GAAG,CAAC,CAAC,GAAGkD,gBAAgB,CAAC5B,SAAU,EAAE;cAC7D;AAC5B;AACA;AACA;AACA;AACA;cAC4B,MAAMrB,WAAW,CAAC,CAAC,CAAC;YACxB,CAAC,MAAM;cACH;cACA,MAAMW,UAAU,CACZM,UAAU,EACVgC,gBAAgB,CAAC5B,SACrB,CAAC;YACL;YACA;UACJ;UACA,IAAMgC,SAAmC,GAAGH,YAAY,CAACE,OAAO,CAC3DE,GAAG,CAACC,GAAG,IAAIhD,qBAAqB,CAACU,UAAU,CAACa,MAAM,CAACD,WAAW,EAAElC,cAAc,CAAC4D,GAAG,CAACC,GAAG,CAAC,CAAC,CAAC;UAC9Fb,KAAK,GAAGO,YAAY,CAACQ,QAAQ;UAE7BvB,WAAW,CAAC8E,IAAI,CAAC;YACb5D,SAAS;YACTI,UAAU,EAAE;cACRb,QAAQ,EAAEM,YAAY,CAACQ;YAC3B;UACJ,CAAC,CAAC;QACN;MACJ,CAAC,EAAE,CAAC;MACJ,OAAOgD,WAAW,CAAC,CAAC;IACxB,CAAC;EACL;EAEAvG,4BAA4B,CAACwB,OAAO,CAACO,iBAAiB,EAAEe,gBAAgB,CAAC;EAEzE,OAAOA,gBAAgB;AAC3B","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/replication-firestore/firestore-helper.js b/dist/esm/plugins/replication-firestore/firestore-helper.js deleted file mode 100644 index 148afd7141b..00000000000 --- a/dist/esm/plugins/replication-firestore/firestore-helper.js +++ /dev/null @@ -1,46 +0,0 @@ -import { Timestamp } from 'firebase/firestore'; -import { flatClone, now } from "../../plugins/utils/index.js"; -export function getFirestoreSortFieldValue(docData, primaryKey) { - var timeString = now() + ''; - return 'rxdb-' + timeString.padStart(15, '0') + '-' + docData[primaryKey]; -} -export function stripServerTimestampField(serverTimestampField, docData) { - var data = flatClone(docData); - delete data[serverTimestampField]; - return data; -} -export function serverTimestampToIsoString(serverTimestampField, docData) { - var timestamp = docData[serverTimestampField]; - var date = timestamp.toDate(); - return date.toISOString(); -} -export function isoStringToServerTimestamp(isoString) { - var date = new Date(isoString); - return Timestamp.fromDate(date); -} -export function firestoreRowToDocData(serverTimestampField, primaryPath, row) { - var docData = stripServerTimestampField(serverTimestampField, row.data()); - docData[primaryPath] = row.id; - return docData; -} -export function stripPrimaryKey(primaryPath, docData) { - docData = flatClone(docData); - delete docData[primaryPath]; - return docData; -} - -// https://stackoverflow.com/questions/61354866/is-there-a-workaround-for-the-firebase-query-in-limit-to-10 -export function getContentByIds(ids, getQuery) { - var batches = []; - while (ids.length) { - // firestore limits batches to 10 - var batch = ids.splice(0, 10); - - // add the batch request to to a queue - batches.push(getQuery(batch)); - } - - // after all of the data is fetched, return it - return Promise.all(batches).then(content => content.map(i => i.docs).flat()); -} -//# sourceMappingURL=firestore-helper.js.map \ No newline at end of file diff --git a/dist/esm/plugins/replication-firestore/firestore-helper.js.map b/dist/esm/plugins/replication-firestore/firestore-helper.js.map deleted file mode 100644 index 651d3bcd821..00000000000 --- a/dist/esm/plugins/replication-firestore/firestore-helper.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"firestore-helper.js","names":["Timestamp","flatClone","now","getFirestoreSortFieldValue","docData","primaryKey","timeString","padStart","stripServerTimestampField","serverTimestampField","data","serverTimestampToIsoString","timestamp","date","toDate","toISOString","isoStringToServerTimestamp","isoString","Date","fromDate","firestoreRowToDocData","primaryPath","row","id","stripPrimaryKey","getContentByIds","ids","getQuery","batches","length","batch","splice","push","Promise","all","then","content","map","i","docs","flat"],"sources":["../../../../src/plugins/replication-firestore/firestore-helper.ts"],"sourcesContent":["import {\n QueryDocumentSnapshot,\n Timestamp\n} from 'firebase/firestore';\nimport type {\n WithDeleted\n} from '../../types/index.d.ts';\nimport { flatClone, now } from '../../plugins/utils/index.ts';\nimport type { GetQuery } from './firestore-types.ts';\n\n\nexport function getFirestoreSortFieldValue(docData: any, primaryKey: string): string {\n const timeString = now() + '';\n return 'rxdb-' + timeString.padStart(15, '0') + '-' + docData[primaryKey];\n}\n\nexport function stripServerTimestampField(\n serverTimestampField: string,\n docData: RxDocType\n): WithDeleted {\n const data = flatClone(docData);\n delete (data as any)[serverTimestampField];\n return data as any;\n}\n\n\nexport function serverTimestampToIsoString(serverTimestampField: string, docData: any): string {\n const timestamp = (docData as any)[serverTimestampField];\n const date: Date = timestamp.toDate();\n return date.toISOString();\n}\n\nexport function isoStringToServerTimestamp(isoString: string): Timestamp {\n const date = new Date(isoString);\n return Timestamp.fromDate(date);\n}\n\nexport function firestoreRowToDocData(\n serverTimestampField: string,\n primaryPath: string,\n row: QueryDocumentSnapshot\n): WithDeleted {\n const docData = stripServerTimestampField(\n serverTimestampField,\n row.data()\n );\n (docData as any)[primaryPath] = row.id;\n return docData;\n}\n\nexport function stripPrimaryKey(\n primaryPath: string,\n docData: any\n): any {\n docData = flatClone(docData);\n delete (docData as any)[primaryPath];\n return docData;\n}\n\n// https://stackoverflow.com/questions/61354866/is-there-a-workaround-for-the-firebase-query-in-limit-to-10\nexport function getContentByIds(ids: string[], getQuery: GetQuery): Promise[]> {\n const batches = [];\n\n while (ids.length) {\n // firestore limits batches to 10\n const batch = ids.splice(0, 10);\n\n // add the batch request to to a queue\n batches.push(getQuery(batch));\n }\n\n // after all of the data is fetched, return it\n return Promise.all(batches).then((content) => content.map(i => i.docs).flat());\n}\n"],"mappings":"AAAA,SAEIA,SAAS,QACN,oBAAoB;AAI3B,SAASC,SAAS,EAAEC,GAAG,QAAQ,8BAA8B;AAI7D,OAAO,SAASC,0BAA0BA,CAACC,OAAY,EAAEC,UAAkB,EAAU;EACjF,IAAMC,UAAU,GAAGJ,GAAG,CAAC,CAAC,GAAG,EAAE;EAC7B,OAAO,OAAO,GAAGI,UAAU,CAACC,QAAQ,CAAC,EAAE,EAAE,GAAG,CAAC,GAAG,GAAG,GAAGH,OAAO,CAACC,UAAU,CAAC;AAC7E;AAEA,OAAO,SAASG,yBAAyBA,CACrCC,oBAA4B,EAC5BL,OAAkB,EACI;EACtB,IAAMM,IAAI,GAAGT,SAAS,CAACG,OAAO,CAAC;EAC/B,OAAQM,IAAI,CAASD,oBAAoB,CAAC;EAC1C,OAAOC,IAAI;AACf;AAGA,OAAO,SAASC,0BAA0BA,CAACF,oBAA4B,EAAEL,OAAY,EAAU;EAC3F,IAAMQ,SAAS,GAAIR,OAAO,CAASK,oBAAoB,CAAC;EACxD,IAAMI,IAAU,GAAGD,SAAS,CAACE,MAAM,CAAC,CAAC;EACrC,OAAOD,IAAI,CAACE,WAAW,CAAC,CAAC;AAC7B;AAEA,OAAO,SAASC,0BAA0BA,CAACC,SAAiB,EAAa;EACrE,IAAMJ,IAAI,GAAG,IAAIK,IAAI,CAACD,SAAS,CAAC;EAChC,OAAOjB,SAAS,CAACmB,QAAQ,CAACN,IAAI,CAAC;AACnC;AAEA,OAAO,SAASO,qBAAqBA,CACjCX,oBAA4B,EAC5BY,WAAmB,EACnBC,GAAqC,EACf;EACtB,IAAMlB,OAAO,GAAGI,yBAAyB,CACrCC,oBAAoB,EACpBa,GAAG,CAACZ,IAAI,CAAC,CACb,CAAC;EACAN,OAAO,CAASiB,WAAW,CAAC,GAAGC,GAAG,CAACC,EAAE;EACtC,OAAOnB,OAAO;AAClB;AAEA,OAAO,SAASoB,eAAeA,CAC3BH,WAAmB,EACnBjB,OAAY,EACT;EACHA,OAAO,GAAGH,SAAS,CAACG,OAAO,CAAC;EAC5B,OAAQA,OAAO,CAASiB,WAAW,CAAC;EACpC,OAAOjB,OAAO;AAClB;;AAEA;AACA,OAAO,SAASqB,eAAeA,CAAYC,GAAa,EAAEC,QAA6B,EAA+C;EAClI,IAAMC,OAAO,GAAG,EAAE;EAElB,OAAOF,GAAG,CAACG,MAAM,EAAE;IACf;IACA,IAAMC,KAAK,GAAGJ,GAAG,CAACK,MAAM,CAAC,CAAC,EAAE,EAAE,CAAC;;IAE/B;IACAH,OAAO,CAACI,IAAI,CAACL,QAAQ,CAACG,KAAK,CAAC,CAAC;EACjC;;EAEA;EACA,OAAOG,OAAO,CAACC,GAAG,CAACN,OAAO,CAAC,CAACO,IAAI,CAAEC,OAAO,IAAKA,OAAO,CAACC,GAAG,CAACC,CAAC,IAAIA,CAAC,CAACC,IAAI,CAAC,CAACC,IAAI,CAAC,CAAC,CAAC;AAClF","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/replication-firestore/firestore-types.js b/dist/esm/plugins/replication-firestore/firestore-types.js deleted file mode 100644 index 84d820d033e..00000000000 --- a/dist/esm/plugins/replication-firestore/firestore-types.js +++ /dev/null @@ -1,3 +0,0 @@ -export {}; -export {}; -//# sourceMappingURL=firestore-types.js.map \ No newline at end of file diff --git a/dist/esm/plugins/replication-firestore/firestore-types.js.map b/dist/esm/plugins/replication-firestore/firestore-types.js.map deleted file mode 100644 index 4af79d887b5..00000000000 --- a/dist/esm/plugins/replication-firestore/firestore-types.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"firestore-types.js","names":[],"sources":["../../../../src/plugins/replication-firestore/firestore-types.ts"],"sourcesContent":["import type {\n MaybePromise,\n ReplicationOptions,\n ReplicationPullOptions,\n ReplicationPushOptions,\n WithDeleted\n} from '../../types/index.d.ts';\n\nimport type {\n CollectionReference,\n Firestore,\n QueryFieldFilterConstraint,\n QuerySnapshot\n} from 'firebase/firestore';\n\nexport type FirestoreCheckpointType = {\n id: string;\n /**\n * Firestore internally sets the time to an object like\n * {\n * \"seconds\": 1669807105,\n * \"nanoseconds\": 476000000\n * }\n * But to be able to query that, we have to use a date string\n * like '2022-11-30T11:18:25.141Z'\n * so we store that string instead.\n */\n serverTimestamp: string;\n};\nexport type FirestoreCollection = CollectionReference;\n\nexport type FirestoreOptions = {\n projectId: string;\n collection: FirestoreCollection;\n database: Firestore;\n};\n\nexport type FirestoreSyncPullOptions =\n Omit, 'handler' | 'stream$'>\n & {\n filter?: QueryFieldFilterConstraint | QueryFieldFilterConstraint[];\n };\n\nexport type FirestoreSyncPushOptions = Omit, 'handler'>\n & {\n filter?(item: WithDeleted): MaybePromise;\n };\n\nexport type SyncOptionsFirestore = Omit<\n ReplicationOptions,\n 'pull' | 'push'\n> & {\n firestore: FirestoreOptions;\n /**\n * In firestore it is not possible to read out\n * the internally used write timestamp.\n * Even if we could read it out, it is not indexed which\n * is required for fetch 'changes-since-x'.\n * So instead we have to rely on a custom user defined field\n * that contains the server time which is set by firestore via serverTimestamp()\n * IMPORTANT: The serverTimestampField MUST NOT be part of the collections RxJsonSchema!\n * [default='serverTimestamp']\n * @link https://groups.google.com/g/firebase-talk/c/tAmPzPei-mE\n */\n serverTimestampField?: string;\n pull?: FirestoreSyncPullOptions;\n push?: FirestoreSyncPushOptions;\n};\n\nexport type GetQuery = (ids: string[]) => Promise>;\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/replication-firestore/index.js b/dist/esm/plugins/replication-firestore/index.js deleted file mode 100644 index b7c5834fa59..00000000000 --- a/dist/esm/plugins/replication-firestore/index.js +++ /dev/null @@ -1,228 +0,0 @@ -import _inheritsLoose from "@babel/runtime/helpers/inheritsLoose"; -import { appendToArray, asyncFilter, ensureNotFalsy, errorToPlainJson, flatClone, lastOfArray, toArray } from "../../plugins/utils/index.js"; -import { doc, query, where, orderBy, limit, getDocs, onSnapshot, runTransaction, writeBatch, serverTimestamp, waitForPendingWrites, documentId } from 'firebase/firestore'; -import { RxDBLeaderElectionPlugin } from "../leader-election/index.js"; -import { RxReplicationState, startReplicationOnLeaderShip } from "../replication/index.js"; -import { addRxPlugin, getSchemaByObjectPath, newRxError } from "../../index.js"; -import { Subject } from 'rxjs'; -import { firestoreRowToDocData, getContentByIds, isoStringToServerTimestamp, serverTimestampToIsoString, stripPrimaryKey, stripServerTimestampField } from "./firestore-helper.js"; -export * from "./firestore-helper.js"; -export * from "./firestore-types.js"; -export var RxFirestoreReplicationState = /*#__PURE__*/function (_RxReplicationState) { - function RxFirestoreReplicationState(firestore, replicationIdentifierHash, collection, pull, push, live = true, retryTime = 1000 * 5, autoStart = true) { - var _this; - _this = _RxReplicationState.call(this, replicationIdentifierHash, collection, '_deleted', pull, push, live, retryTime, autoStart) || this; - _this.firestore = firestore; - _this.replicationIdentifierHash = replicationIdentifierHash; - _this.collection = collection; - _this.pull = pull; - _this.push = push; - _this.live = live; - _this.retryTime = retryTime; - _this.autoStart = autoStart; - return _this; - } - _inheritsLoose(RxFirestoreReplicationState, _RxReplicationState); - return RxFirestoreReplicationState; -}(RxReplicationState); -export function replicateFirestore(options) { - var collection = options.collection; - addRxPlugin(RxDBLeaderElectionPlugin); - var pullStream$ = new Subject(); - var replicationPrimitivesPull; - options.live = typeof options.live === 'undefined' ? true : options.live; - options.waitForLeadership = typeof options.waitForLeadership === 'undefined' ? true : options.waitForLeadership; - var serverTimestampField = typeof options.serverTimestampField === 'undefined' ? 'serverTimestamp' : options.serverTimestampField; - options.serverTimestampField = serverTimestampField; - var primaryPath = collection.schema.primaryPath; - - /** - * The serverTimestampField MUST NOT be part of the collections RxJsonSchema. - */ - var schemaPart = getSchemaByObjectPath(collection.schema.jsonSchema, serverTimestampField); - if (schemaPart || - // also must not be nested. - serverTimestampField.includes('.')) { - throw newRxError('RC6', { - field: serverTimestampField, - schema: collection.schema.jsonSchema - }); - } - var pullFilters = options.pull?.filter !== undefined ? toArray(options.pull.filter) : []; - var pullQuery = query(options.firestore.collection, ...pullFilters); - if (options.pull) { - replicationPrimitivesPull = { - async handler(lastPulledCheckpoint, batchSize) { - var newerQuery; - var sameTimeQuery; - if (lastPulledCheckpoint) { - var lastServerTimestamp = isoStringToServerTimestamp(lastPulledCheckpoint.serverTimestamp); - newerQuery = query(pullQuery, where(serverTimestampField, '>', lastServerTimestamp), orderBy(serverTimestampField, 'asc'), limit(batchSize)); - sameTimeQuery = query(pullQuery, where(serverTimestampField, '==', lastServerTimestamp), where(primaryPath, '>', lastPulledCheckpoint.id), orderBy(primaryPath, 'asc'), limit(batchSize)); - } else { - newerQuery = query(pullQuery, orderBy(serverTimestampField, 'asc'), limit(batchSize)); - } - var mustsReRun = true; - var useDocs = []; - while (mustsReRun) { - /** - * Local writes that have not been persisted to the server - * are in pending state and do not have a correct serverTimestamp set. - * We have to ensure we only use document states that are in sync with the server. - * @link https://medium.com/firebase-developers/the-secrets-of-firestore-fieldvalue-servertimestamp-revealed-29dd7a38a82b - */ - await waitForPendingWrites(options.firestore.database); - await runTransaction(options.firestore.database, async _tx => { - useDocs = []; - var [newerQueryResult, sameTimeQueryResult] = await Promise.all([getDocs(newerQuery), sameTimeQuery ? getDocs(sameTimeQuery) : undefined]); - if (newerQueryResult.metadata.hasPendingWrites || sameTimeQuery && ensureNotFalsy(sameTimeQueryResult).metadata.hasPendingWrites) { - return; - } else { - mustsReRun = false; - if (sameTimeQuery) { - useDocs = ensureNotFalsy(sameTimeQueryResult).docs; - } - var missingAmount = batchSize - useDocs.length; - if (missingAmount > 0) { - var additionalDocs = newerQueryResult.docs.slice(0, missingAmount).filter(x => !!x); - appendToArray(useDocs, additionalDocs); - } - } - }); - } - if (useDocs.length === 0) { - return { - checkpoint: lastPulledCheckpoint ?? null, - documents: [] - }; - } - var lastDoc = ensureNotFalsy(lastOfArray(useDocs)); - var documents = useDocs.map(row => firestoreRowToDocData(serverTimestampField, primaryPath, row)); - var newCheckpoint = { - id: lastDoc.id, - serverTimestamp: serverTimestampToIsoString(serverTimestampField, lastDoc.data()) - }; - var ret = { - documents: documents, - checkpoint: newCheckpoint - }; - return ret; - }, - batchSize: ensureNotFalsy(options.pull).batchSize, - modifier: ensureNotFalsy(options.pull).modifier, - stream$: pullStream$.asObservable() - }; - } - var replicationPrimitivesPush; - if (options.push) { - var pushFilter = options.push?.filter; - replicationPrimitivesPush = { - async handler(rows) { - if (pushFilter !== undefined) { - rows = await asyncFilter(rows, row => pushFilter(row.newDocumentState)); - } - var writeRowsById = {}; - var docIds = rows.map(row => { - var docId = row.newDocumentState[primaryPath]; - writeRowsById[docId] = row; - return docId; - }); - await waitForPendingWrites(options.firestore.database); - var conflicts = []; - - /** - * Everything must run INSIDE of the transaction - * because on tx-errors, firebase will re-run the transaction on some cases. - * @link https://firebase.google.com/docs/firestore/manage-data/transactions#transaction_failure - * @link https://firebase.google.com/docs/firestore/manage-data/transactions - */ - await runTransaction(options.firestore.database, async _tx => { - conflicts = []; // reset in case the tx has re-run. - /** - * @link https://stackoverflow.com/a/48423626/3443137 - */ - - var getQuery = ids => { - return getDocs(query(options.firestore.collection, where(documentId(), 'in', ids))); - }; - var docsInDbResult = await getContentByIds(docIds, getQuery); - var docsInDbById = {}; - docsInDbResult.forEach(row => { - var docDataInDb = stripServerTimestampField(serverTimestampField, row.data()); - var docId = row.id; - docDataInDb[primaryPath] = docId; - docsInDbById[docId] = docDataInDb; - }); - - /** - * @link https://firebase.google.com/docs/firestore/manage-data/transactions#batched-writes - */ - var batch = writeBatch(options.firestore.database); - var hasWrite = false; - await Promise.all(Object.entries(writeRowsById).map(async ([docId, writeRow]) => { - var docInDb = docsInDbById[docId]; - if (docInDb && (!writeRow.assumedMasterState || (await collection.conflictHandler({ - newDocumentState: docInDb, - realMasterState: writeRow.assumedMasterState - }, 'replication-firestore-push')).isEqual === false)) { - // conflict - conflicts.push(docInDb); - } else { - // no conflict - hasWrite = true; - var docRef = doc(options.firestore.collection, docId); - var writeDocData = flatClone(writeRow.newDocumentState); - writeDocData[serverTimestampField] = serverTimestamp(); - if (!docInDb) { - // insert - batch.set(docRef, stripPrimaryKey(primaryPath, writeDocData)); - } else { - // update - batch.update(docRef, stripPrimaryKey(primaryPath, writeDocData)); - } - } - })); - if (hasWrite) { - await batch.commit(); - } - }); - await waitForPendingWrites(options.firestore.database); - return conflicts; - }, - batchSize: options.push.batchSize, - modifier: options.push.modifier - }; - } - var replicationState = new RxFirestoreReplicationState(options.firestore, options.replicationIdentifier, collection, replicationPrimitivesPull, replicationPrimitivesPush, options.live, options.retryTime, options.autoStart); - - /** - * Use long polling to get live changes for the pull.stream$ - */ - if (options.live && options.pull) { - var startBefore = replicationState.start.bind(replicationState); - var cancelBefore = replicationState.cancel.bind(replicationState); - replicationState.start = () => { - var lastChangeQuery = query(pullQuery, orderBy(serverTimestampField, 'desc'), limit(1)); - var unsubscribe = onSnapshot(lastChangeQuery, _querySnapshot => { - /** - * There is no good way to observe the event stream in firestore. - * So instead we listen to any write to the collection - * and then emit a 'RESYNC' flag. - */ - replicationState.reSync(); - }, error => { - replicationState.subjects.error.next(newRxError('RC_STREAM', { - error: errorToPlainJson(error) - })); - }); - replicationState.cancel = () => { - unsubscribe(); - return cancelBefore(); - }; - return startBefore(); - }; - } - startReplicationOnLeaderShip(options.waitForLeadership, replicationState); - return replicationState; -} -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/esm/plugins/replication-firestore/index.js.map b/dist/esm/plugins/replication-firestore/index.js.map deleted file mode 100644 index 3c209d61716..00000000000 --- a/dist/esm/plugins/replication-firestore/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["appendToArray","asyncFilter","ensureNotFalsy","errorToPlainJson","flatClone","lastOfArray","toArray","doc","query","where","orderBy","limit","getDocs","onSnapshot","runTransaction","writeBatch","serverTimestamp","waitForPendingWrites","documentId","RxDBLeaderElectionPlugin","RxReplicationState","startReplicationOnLeaderShip","addRxPlugin","getSchemaByObjectPath","newRxError","Subject","firestoreRowToDocData","getContentByIds","isoStringToServerTimestamp","serverTimestampToIsoString","stripPrimaryKey","stripServerTimestampField","RxFirestoreReplicationState","_RxReplicationState","firestore","replicationIdentifierHash","collection","pull","push","live","retryTime","autoStart","_this","call","_inheritsLoose","replicateFirestore","options","pullStream$","replicationPrimitivesPull","waitForLeadership","serverTimestampField","primaryPath","schema","schemaPart","jsonSchema","includes","field","pullFilters","filter","undefined","pullQuery","handler","lastPulledCheckpoint","batchSize","newerQuery","sameTimeQuery","lastServerTimestamp","id","mustsReRun","useDocs","database","_tx","newerQueryResult","sameTimeQueryResult","Promise","all","metadata","hasPendingWrites","docs","missingAmount","length","additionalDocs","slice","x","checkpoint","documents","lastDoc","map","row","newCheckpoint","data","ret","modifier","stream$","asObservable","replicationPrimitivesPush","pushFilter","rows","newDocumentState","writeRowsById","docIds","docId","conflicts","getQuery","ids","docsInDbResult","docsInDbById","forEach","docDataInDb","batch","hasWrite","Object","entries","writeRow","docInDb","assumedMasterState","conflictHandler","realMasterState","isEqual","docRef","writeDocData","set","update","commit","replicationState","replicationIdentifier","startBefore","start","bind","cancelBefore","cancel","lastChangeQuery","unsubscribe","_querySnapshot","reSync","error","subjects","next"],"sources":["../../../../src/plugins/replication-firestore/index.ts"],"sourcesContent":["import {\n appendToArray,\n asyncFilter,\n ensureNotFalsy,\n errorToPlainJson,\n flatClone,\n lastOfArray,\n toArray\n} from '../../plugins/utils/index.ts';\n\nimport {\n doc,\n query,\n where,\n orderBy,\n limit,\n getDocs,\n onSnapshot,\n runTransaction,\n writeBatch,\n serverTimestamp,\n QueryDocumentSnapshot,\n waitForPendingWrites,\n documentId\n} from 'firebase/firestore';\n\nimport { RxDBLeaderElectionPlugin } from '../leader-election/index.ts';\nimport type {\n RxCollection,\n ReplicationPullOptions,\n ReplicationPushOptions,\n RxReplicationWriteToMasterRow,\n RxReplicationPullStreamItem\n} from '../../types/index.d.ts';\nimport {\n RxReplicationState,\n startReplicationOnLeaderShip\n} from '../replication/index.ts';\nimport {\n addRxPlugin,\n ById,\n getSchemaByObjectPath,\n newRxError,\n WithDeleted\n} from '../../index.ts';\n\nimport type {\n FirestoreCheckpointType,\n FirestoreOptions,\n SyncOptionsFirestore\n} from './firestore-types.ts';\nimport { Subject } from 'rxjs';\nimport {\n firestoreRowToDocData,\n getContentByIds,\n isoStringToServerTimestamp,\n serverTimestampToIsoString,\n stripPrimaryKey,\n stripServerTimestampField\n} from './firestore-helper.ts';\n\nexport * from './firestore-helper.ts';\nexport * from './firestore-types.ts';\n\nexport class RxFirestoreReplicationState extends RxReplicationState {\n constructor(\n public readonly firestore: FirestoreOptions,\n public readonly replicationIdentifierHash: string,\n public readonly collection: RxCollection,\n public readonly pull?: ReplicationPullOptions,\n public readonly push?: ReplicationPushOptions,\n public readonly live: boolean = true,\n public retryTime: number = 1000 * 5,\n public autoStart: boolean = true\n ) {\n super(\n replicationIdentifierHash,\n collection,\n '_deleted',\n pull,\n push,\n live,\n retryTime,\n autoStart\n );\n }\n}\n\nexport function replicateFirestore(\n options: SyncOptionsFirestore\n): RxFirestoreReplicationState {\n const collection = options.collection;\n addRxPlugin(RxDBLeaderElectionPlugin);\n const pullStream$: Subject> = new Subject();\n let replicationPrimitivesPull: ReplicationPullOptions | undefined;\n options.live = typeof options.live === 'undefined' ? true : options.live;\n options.waitForLeadership = typeof options.waitForLeadership === 'undefined' ? true : options.waitForLeadership;\n const serverTimestampField = typeof options.serverTimestampField === 'undefined' ? 'serverTimestamp' : options.serverTimestampField;\n options.serverTimestampField = serverTimestampField;\n const primaryPath = collection.schema.primaryPath;\n\n /**\n * The serverTimestampField MUST NOT be part of the collections RxJsonSchema.\n */\n const schemaPart = getSchemaByObjectPath(collection.schema.jsonSchema, serverTimestampField);\n if (\n schemaPart ||\n // also must not be nested.\n serverTimestampField.includes('.')\n ) {\n throw newRxError('RC6', {\n field: serverTimestampField,\n schema: collection.schema.jsonSchema\n });\n }\n\n const pullFilters = options.pull?.filter !== undefined\n ? toArray(options.pull.filter)\n : [];\n\n const pullQuery = query(options.firestore.collection, ...pullFilters);\n\n if (options.pull) {\n replicationPrimitivesPull = {\n async handler(\n lastPulledCheckpoint: FirestoreCheckpointType | undefined,\n batchSize: number\n ) {\n let newerQuery: ReturnType;\n let sameTimeQuery: ReturnType | undefined;\n\n if (lastPulledCheckpoint) {\n const lastServerTimestamp = isoStringToServerTimestamp(lastPulledCheckpoint.serverTimestamp);\n newerQuery = query(pullQuery,\n where(serverTimestampField, '>', lastServerTimestamp),\n orderBy(serverTimestampField, 'asc'),\n limit(batchSize)\n );\n sameTimeQuery = query(pullQuery,\n where(serverTimestampField, '==', lastServerTimestamp),\n where(primaryPath, '>', lastPulledCheckpoint.id),\n orderBy(primaryPath, 'asc'),\n limit(batchSize)\n );\n } else {\n newerQuery = query(pullQuery,\n orderBy(serverTimestampField, 'asc'),\n limit(batchSize)\n );\n }\n\n let mustsReRun = true;\n let useDocs: QueryDocumentSnapshot[] = [];\n while (mustsReRun) {\n /**\n * Local writes that have not been persisted to the server\n * are in pending state and do not have a correct serverTimestamp set.\n * We have to ensure we only use document states that are in sync with the server.\n * @link https://medium.com/firebase-developers/the-secrets-of-firestore-fieldvalue-servertimestamp-revealed-29dd7a38a82b\n */\n await waitForPendingWrites(options.firestore.database);\n await runTransaction(options.firestore.database, async (_tx) => {\n useDocs = [];\n const [\n newerQueryResult,\n sameTimeQueryResult\n ] = await Promise.all([\n getDocs(newerQuery),\n sameTimeQuery ? getDocs(sameTimeQuery) : undefined\n ]);\n\n if (\n newerQueryResult.metadata.hasPendingWrites ||\n (sameTimeQuery && ensureNotFalsy(sameTimeQueryResult).metadata.hasPendingWrites)\n ) {\n return;\n } else {\n mustsReRun = false;\n\n if (sameTimeQuery) {\n useDocs = ensureNotFalsy(sameTimeQueryResult).docs as any;\n }\n const missingAmount = batchSize - useDocs.length;\n if (missingAmount > 0) {\n const additionalDocs = newerQueryResult.docs.slice(0, missingAmount).filter(x => !!x);\n appendToArray(useDocs, additionalDocs);\n }\n }\n });\n }\n\n if (useDocs.length === 0) {\n return {\n checkpoint: lastPulledCheckpoint ?? null,\n documents: []\n };\n }\n const lastDoc = ensureNotFalsy(lastOfArray(useDocs));\n const documents: WithDeleted[] = useDocs\n .map(row => firestoreRowToDocData(\n serverTimestampField,\n primaryPath,\n row\n ));\n const newCheckpoint: FirestoreCheckpointType = {\n id: lastDoc.id,\n serverTimestamp: serverTimestampToIsoString(serverTimestampField, lastDoc.data())\n };\n const ret = {\n documents: documents,\n checkpoint: newCheckpoint\n };\n return ret;\n },\n batchSize: ensureNotFalsy(options.pull).batchSize,\n modifier: ensureNotFalsy(options.pull).modifier,\n stream$: pullStream$.asObservable()\n };\n }\n\n let replicationPrimitivesPush: ReplicationPushOptions | undefined;\n if (options.push) {\n const pushFilter = options.push?.filter;\n replicationPrimitivesPush = {\n async handler(\n rows: RxReplicationWriteToMasterRow[]\n ) {\n if (pushFilter !== undefined) {\n rows = await asyncFilter(rows, (row) => pushFilter(row.newDocumentState));\n }\n\n const writeRowsById: ById> = {};\n const docIds: string[] = rows.map(row => {\n const docId = (row.newDocumentState as any)[primaryPath];\n writeRowsById[docId] = row;\n return docId;\n });\n await waitForPendingWrites(options.firestore.database);\n let conflicts: WithDeleted[] = [];\n\n /**\n * Everything must run INSIDE of the transaction\n * because on tx-errors, firebase will re-run the transaction on some cases.\n * @link https://firebase.google.com/docs/firestore/manage-data/transactions#transaction_failure\n * @link https://firebase.google.com/docs/firestore/manage-data/transactions\n */\n await runTransaction(options.firestore.database, async (_tx) => {\n conflicts = []; // reset in case the tx has re-run.\n /**\n * @link https://stackoverflow.com/a/48423626/3443137\n */\n\n const getQuery = (ids: string[]) => {\n return getDocs(\n query(\n options.firestore.collection,\n where(documentId(), 'in', ids)\n )\n );\n };\n\n const docsInDbResult = await getContentByIds(docIds, getQuery);\n\n const docsInDbById: ById = {};\n docsInDbResult.forEach(row => {\n const docDataInDb = stripServerTimestampField(serverTimestampField, row.data());\n const docId = row.id;\n (docDataInDb as any)[primaryPath] = docId;\n docsInDbById[docId] = docDataInDb;\n });\n\n /**\n * @link https://firebase.google.com/docs/firestore/manage-data/transactions#batched-writes\n */\n const batch = writeBatch(options.firestore.database);\n let hasWrite = false;\n await Promise.all(\n Object.entries(writeRowsById).map(async ([docId, writeRow]) => {\n const docInDb: RxDocType | undefined = docsInDbById[docId];\n\n if (\n docInDb &&\n (\n !writeRow.assumedMasterState ||\n (await collection.conflictHandler({\n newDocumentState: docInDb as any,\n realMasterState: writeRow.assumedMasterState\n }, 'replication-firestore-push')).isEqual === false\n )\n ) {\n // conflict\n conflicts.push(docInDb as any);\n } else {\n // no conflict\n hasWrite = true;\n const docRef = doc(options.firestore.collection, docId);\n const writeDocData = flatClone(writeRow.newDocumentState);\n (writeDocData as any)[serverTimestampField] = serverTimestamp();\n if (!docInDb) {\n // insert\n batch.set(docRef, stripPrimaryKey(primaryPath, writeDocData));\n } else {\n // update\n batch.update(docRef, stripPrimaryKey(primaryPath, writeDocData));\n }\n }\n })\n );\n\n if (hasWrite) {\n await batch.commit();\n }\n });\n await waitForPendingWrites(options.firestore.database);\n return conflicts;\n },\n batchSize: options.push.batchSize,\n modifier: options.push.modifier\n };\n }\n\n\n const replicationState = new RxFirestoreReplicationState(\n options.firestore,\n options.replicationIdentifier,\n collection,\n replicationPrimitivesPull,\n replicationPrimitivesPush,\n options.live,\n options.retryTime,\n options.autoStart\n );\n\n /**\n * Use long polling to get live changes for the pull.stream$\n */\n if (options.live && options.pull) {\n const startBefore = replicationState.start.bind(replicationState);\n const cancelBefore = replicationState.cancel.bind(replicationState);\n replicationState.start = () => {\n const lastChangeQuery = query(\n pullQuery,\n orderBy(serverTimestampField, 'desc'),\n limit(1)\n );\n const unsubscribe = onSnapshot(\n lastChangeQuery,\n (_querySnapshot) => {\n /**\n * There is no good way to observe the event stream in firestore.\n * So instead we listen to any write to the collection\n * and then emit a 'RESYNC' flag.\n */\n replicationState.reSync();\n },\n (error) => {\n replicationState.subjects.error.next(\n newRxError('RC_STREAM', { error: errorToPlainJson(error) })\n );\n }\n );\n replicationState.cancel = () => {\n unsubscribe();\n return cancelBefore();\n };\n return startBefore();\n };\n }\n\n startReplicationOnLeaderShip(options.waitForLeadership, replicationState);\n\n return replicationState;\n}\n"],"mappings":";AAAA,SACIA,aAAa,EACbC,WAAW,EACXC,cAAc,EACdC,gBAAgB,EAChBC,SAAS,EACTC,WAAW,EACXC,OAAO,QACJ,8BAA8B;AAErC,SACIC,GAAG,EACHC,KAAK,EACLC,KAAK,EACLC,OAAO,EACPC,KAAK,EACLC,OAAO,EACPC,UAAU,EACVC,cAAc,EACdC,UAAU,EACVC,eAAe,EAEfC,oBAAoB,EACpBC,UAAU,QACP,oBAAoB;AAE3B,SAASC,wBAAwB,QAAQ,6BAA6B;AAQtE,SACIC,kBAAkB,EAClBC,4BAA4B,QACzB,yBAAyB;AAChC,SACIC,WAAW,EAEXC,qBAAqB,EACrBC,UAAU,QAEP,gBAAgB;AAOvB,SAASC,OAAO,QAAQ,MAAM;AAC9B,SACIC,qBAAqB,EACrBC,eAAe,EACfC,0BAA0B,EAC1BC,0BAA0B,EAC1BC,eAAe,EACfC,yBAAyB,QACtB,uBAAuB;AAE9B,cAAc,uBAAuB;AACrC,cAAc,sBAAsB;AAEpC,WAAaC,2BAA2B,0BAAAC,mBAAA;EACpC,SAAAD,4BACoBE,SAAsC,EACtCC,yBAAiC,EACjCC,UAAmC,EACnCC,IAAiE,EACjEC,IAAwC,EACxCC,IAAa,GAAG,IAAI,EAC7BC,SAAiB,GAAG,IAAI,GAAG,CAAC,EAC5BC,SAAkB,GAAG,IAAI,EAClC;IAAA,IAAAC,KAAA;IACEA,KAAA,GAAAT,mBAAA,CAAAU,IAAA,OACIR,yBAAyB,EACzBC,UAAU,EACV,UAAU,EACVC,IAAI,EACJC,IAAI,EACJC,IAAI,EACJC,SAAS,EACTC,SACJ,CAAC;IAACC,KAAA,CAlBcR,SAAsC,GAAtCA,SAAsC;IAAAQ,KAAA,CACtCP,yBAAiC,GAAjCA,yBAAiC;IAAAO,KAAA,CACjCN,UAAmC,GAAnCA,UAAmC;IAAAM,KAAA,CACnCL,IAAiE,GAAjEA,IAAiE;IAAAK,KAAA,CACjEJ,IAAwC,GAAxCA,IAAwC;IAAAI,KAAA,CACxCH,IAAa,GAAbA,IAAa;IAAAG,KAAA,CACtBF,SAAiB,GAAjBA,SAAiB;IAAAE,KAAA,CACjBD,SAAkB,GAAlBA,SAAkB;IAAA,OAAAC,KAAA;EAY7B;EAACE,cAAA,CAAAZ,2BAAA,EAAAC,mBAAA;EAAA,OAAAD,2BAAA;AAAA,EArBuDZ,kBAAkB;AAwB9E,OAAO,SAASyB,kBAAkBA,CAC9BC,OAAwC,EACF;EACtC,IAAMV,UAAU,GAAGU,OAAO,CAACV,UAAU;EACrCd,WAAW,CAACH,wBAAwB,CAAC;EACrC,IAAM4B,WAAqF,GAAG,IAAItB,OAAO,CAAC,CAAC;EAC3G,IAAIuB,yBAAiG;EACrGF,OAAO,CAACP,IAAI,GAAG,OAAOO,OAAO,CAACP,IAAI,KAAK,WAAW,GAAG,IAAI,GAAGO,OAAO,CAACP,IAAI;EACxEO,OAAO,CAACG,iBAAiB,GAAG,OAAOH,OAAO,CAACG,iBAAiB,KAAK,WAAW,GAAG,IAAI,GAAGH,OAAO,CAACG,iBAAiB;EAC/G,IAAMC,oBAAoB,GAAG,OAAOJ,OAAO,CAACI,oBAAoB,KAAK,WAAW,GAAG,iBAAiB,GAAGJ,OAAO,CAACI,oBAAoB;EACnIJ,OAAO,CAACI,oBAAoB,GAAGA,oBAAoB;EACnD,IAAMC,WAAW,GAAGf,UAAU,CAACgB,MAAM,CAACD,WAAW;;EAEjD;AACJ;AACA;EACI,IAAME,UAAU,GAAG9B,qBAAqB,CAACa,UAAU,CAACgB,MAAM,CAACE,UAAU,EAAEJ,oBAAoB,CAAC;EAC5F,IACIG,UAAU;EACV;EACAH,oBAAoB,CAACK,QAAQ,CAAC,GAAG,CAAC,EACpC;IACE,MAAM/B,UAAU,CAAC,KAAK,EAAE;MACpBgC,KAAK,EAAEN,oBAAoB;MAC3BE,MAAM,EAAEhB,UAAU,CAACgB,MAAM,CAACE;IAC9B,CAAC,CAAC;EACN;EAEA,IAAMG,WAAW,GAAGX,OAAO,CAACT,IAAI,EAAEqB,MAAM,KAAKC,SAAS,GAChDrD,OAAO,CAACwC,OAAO,CAACT,IAAI,CAACqB,MAAM,CAAC,GAC5B,EAAE;EAER,IAAME,SAAS,GAAGpD,KAAK,CAACsC,OAAO,CAACZ,SAAS,CAACE,UAAU,EAAE,GAAGqB,WAAW,CAAC;EAErE,IAAIX,OAAO,CAACT,IAAI,EAAE;IACdW,yBAAyB,GAAG;MACxB,MAAMa,OAAOA,CACTC,oBAAyD,EACzDC,SAAiB,EACnB;QACE,IAAIC,UAAoC;QACxC,IAAIC,aAAmD;QAEvD,IAAIH,oBAAoB,EAAE;UACtB,IAAMI,mBAAmB,GAAGtC,0BAA0B,CAACkC,oBAAoB,CAAC9C,eAAe,CAAC;UAC5FgD,UAAU,GAAGxD,KAAK,CAACoD,SAAS,EACxBnD,KAAK,CAACyC,oBAAoB,EAAE,GAAG,EAAEgB,mBAAmB,CAAC,EACrDxD,OAAO,CAACwC,oBAAoB,EAAE,KAAK,CAAC,EACpCvC,KAAK,CAACoD,SAAS,CACnB,CAAC;UACDE,aAAa,GAAGzD,KAAK,CAACoD,SAAS,EAC3BnD,KAAK,CAACyC,oBAAoB,EAAE,IAAI,EAAEgB,mBAAmB,CAAC,EACtDzD,KAAK,CAAC0C,WAAW,EAAE,GAAG,EAAEW,oBAAoB,CAACK,EAAE,CAAC,EAChDzD,OAAO,CAACyC,WAAW,EAAE,KAAK,CAAC,EAC3BxC,KAAK,CAACoD,SAAS,CACnB,CAAC;QACL,CAAC,MAAM;UACHC,UAAU,GAAGxD,KAAK,CAACoD,SAAS,EACxBlD,OAAO,CAACwC,oBAAoB,EAAE,KAAK,CAAC,EACpCvC,KAAK,CAACoD,SAAS,CACnB,CAAC;QACL;QAEA,IAAIK,UAAU,GAAG,IAAI;QACrB,IAAIC,OAA2C,GAAG,EAAE;QACpD,OAAOD,UAAU,EAAE;UACf;AACpB;AACA;AACA;AACA;AACA;UACoB,MAAMnD,oBAAoB,CAAC6B,OAAO,CAACZ,SAAS,CAACoC,QAAQ,CAAC;UACtD,MAAMxD,cAAc,CAACgC,OAAO,CAACZ,SAAS,CAACoC,QAAQ,EAAE,MAAOC,GAAG,IAAK;YAC5DF,OAAO,GAAG,EAAE;YACZ,IAAM,CACFG,gBAAgB,EAChBC,mBAAmB,CACtB,GAAG,MAAMC,OAAO,CAACC,GAAG,CAAC,CAClB/D,OAAO,CAACoD,UAAU,CAAC,EACnBC,aAAa,GAAGrD,OAAO,CAACqD,aAAa,CAAC,GAAGN,SAAS,CACrD,CAAC;YAEF,IACIa,gBAAgB,CAACI,QAAQ,CAACC,gBAAgB,IACzCZ,aAAa,IAAI/D,cAAc,CAACuE,mBAAmB,CAAC,CAACG,QAAQ,CAACC,gBAAiB,EAClF;cACE;YACJ,CAAC,MAAM;cACHT,UAAU,GAAG,KAAK;cAElB,IAAIH,aAAa,EAAE;gBACfI,OAAO,GAAGnE,cAAc,CAACuE,mBAAmB,CAAC,CAACK,IAAW;cAC7D;cACA,IAAMC,aAAa,GAAGhB,SAAS,GAAGM,OAAO,CAACW,MAAM;cAChD,IAAID,aAAa,GAAG,CAAC,EAAE;gBACnB,IAAME,cAAc,GAAGT,gBAAgB,CAACM,IAAI,CAACI,KAAK,CAAC,CAAC,EAAEH,aAAa,CAAC,CAACrB,MAAM,CAACyB,CAAC,IAAI,CAAC,CAACA,CAAC,CAAC;gBACrFnF,aAAa,CAACqE,OAAO,EAAEY,cAAc,CAAC;cAC1C;YACJ;UACJ,CAAC,CAAC;QACN;QAEA,IAAIZ,OAAO,CAACW,MAAM,KAAK,CAAC,EAAE;UACtB,OAAO;YACHI,UAAU,EAAEtB,oBAAoB,IAAI,IAAI;YACxCuB,SAAS,EAAE;UACf,CAAC;QACL;QACA,IAAMC,OAAO,GAAGpF,cAAc,CAACG,WAAW,CAACgE,OAAO,CAAC,CAAC;QACpD,IAAMgB,SAAmC,GAAGhB,OAAO,CAC9CkB,GAAG,CAACC,GAAG,IAAI9D,qBAAqB,CAC7BwB,oBAAoB,EACpBC,WAAW,EACXqC,GACJ,CAAC,CAAC;QACN,IAAMC,aAAsC,GAAG;UAC3CtB,EAAE,EAAEmB,OAAO,CAACnB,EAAE;UACdnD,eAAe,EAAEa,0BAA0B,CAACqB,oBAAoB,EAAEoC,OAAO,CAACI,IAAI,CAAC,CAAC;QACpF,CAAC;QACD,IAAMC,GAAG,GAAG;UACRN,SAAS,EAAEA,SAAS;UACpBD,UAAU,EAAEK;QAChB,CAAC;QACD,OAAOE,GAAG;MACd,CAAC;MACD5B,SAAS,EAAE7D,cAAc,CAAC4C,OAAO,CAACT,IAAI,CAAC,CAAC0B,SAAS;MACjD6B,QAAQ,EAAE1F,cAAc,CAAC4C,OAAO,CAACT,IAAI,CAAC,CAACuD,QAAQ;MAC/CC,OAAO,EAAE9C,WAAW,CAAC+C,YAAY,CAAC;IACtC,CAAC;EACL;EAEA,IAAIC,yBAAwE;EAC5E,IAAIjD,OAAO,CAACR,IAAI,EAAE;IACd,IAAM0D,UAAU,GAAGlD,OAAO,CAACR,IAAI,EAAEoB,MAAM;IACvCqC,yBAAyB,GAAG;MACxB,MAAMlC,OAAOA,CACToC,IAAgD,EAClD;QACE,IAAID,UAAU,KAAKrC,SAAS,EAAE;UAC1BsC,IAAI,GAAG,MAAMhG,WAAW,CAACgG,IAAI,EAAGT,GAAG,IAAKQ,UAAU,CAACR,GAAG,CAACU,gBAAgB,CAAC,CAAC;QAC7E;QAEA,IAAMC,aAA6D,GAAG,CAAC,CAAC;QACxE,IAAMC,MAAgB,GAAGH,IAAI,CAACV,GAAG,CAACC,GAAG,IAAI;UACrC,IAAMa,KAAK,GAAIb,GAAG,CAACU,gBAAgB,CAAS/C,WAAW,CAAC;UACxDgD,aAAa,CAACE,KAAK,CAAC,GAAGb,GAAG;UAC1B,OAAOa,KAAK;QAChB,CAAC,CAAC;QACF,MAAMpF,oBAAoB,CAAC6B,OAAO,CAACZ,SAAS,CAACoC,QAAQ,CAAC;QACtD,IAAIgC,SAAmC,GAAG,EAAE;;QAE5C;AAChB;AACA;AACA;AACA;AACA;QACgB,MAAMxF,cAAc,CAACgC,OAAO,CAACZ,SAAS,CAACoC,QAAQ,EAAE,MAAOC,GAAG,IAAK;UAC5D+B,SAAS,GAAG,EAAE,CAAC,CAAC;UAChB;AACpB;AACA;;UAEoB,IAAMC,QAAQ,GAAIC,GAAa,IAAK;YAChC,OAAO5F,OAAO,CACVJ,KAAK,CACDsC,OAAO,CAACZ,SAAS,CAACE,UAAU,EAC5B3B,KAAK,CAACS,UAAU,CAAC,CAAC,EAAE,IAAI,EAAEsF,GAAG,CACjC,CACJ,CAAC;UACL,CAAC;UAED,IAAMC,cAAc,GAAG,MAAM9E,eAAe,CAAYyE,MAAM,EAAEG,QAAQ,CAAC;UAEzE,IAAMG,YAA6B,GAAG,CAAC,CAAC;UACxCD,cAAc,CAACE,OAAO,CAACnB,GAAG,IAAI;YAC1B,IAAMoB,WAAW,GAAG7E,yBAAyB,CAACmB,oBAAoB,EAAEsC,GAAG,CAACE,IAAI,CAAC,CAAC,CAAC;YAC/E,IAAMW,KAAK,GAAGb,GAAG,CAACrB,EAAE;YACnByC,WAAW,CAASzD,WAAW,CAAC,GAAGkD,KAAK;YACzCK,YAAY,CAACL,KAAK,CAAC,GAAGO,WAAW;UACrC,CAAC,CAAC;;UAEF;AACpB;AACA;UACoB,IAAMC,KAAK,GAAG9F,UAAU,CAAC+B,OAAO,CAACZ,SAAS,CAACoC,QAAQ,CAAC;UACpD,IAAIwC,QAAQ,GAAG,KAAK;UACpB,MAAMpC,OAAO,CAACC,GAAG,CACboC,MAAM,CAACC,OAAO,CAACb,aAAa,CAAC,CAACZ,GAAG,CAAC,OAAO,CAACc,KAAK,EAAEY,QAAQ,CAAC,KAAK;YAC3D,IAAMC,OAA8B,GAAGR,YAAY,CAACL,KAAK,CAAC;YAE1D,IACIa,OAAO,KAEH,CAACD,QAAQ,CAACE,kBAAkB,IAC5B,CAAC,MAAM/E,UAAU,CAACgF,eAAe,CAAC;cAC9BlB,gBAAgB,EAAEgB,OAAc;cAChCG,eAAe,EAAEJ,QAAQ,CAACE;YAC9B,CAAC,EAAE,4BAA4B,CAAC,EAAEG,OAAO,KAAK,KAAK,CACtD,EACH;cACE;cACAhB,SAAS,CAAChE,IAAI,CAAC4E,OAAc,CAAC;YAClC,CAAC,MAAM;cACH;cACAJ,QAAQ,GAAG,IAAI;cACf,IAAMS,MAAM,GAAGhH,GAAG,CAACuC,OAAO,CAACZ,SAAS,CAACE,UAAU,EAAEiE,KAAK,CAAC;cACvD,IAAMmB,YAAY,GAAGpH,SAAS,CAAC6G,QAAQ,CAACf,gBAAgB,CAAC;cACxDsB,YAAY,CAAStE,oBAAoB,CAAC,GAAGlC,eAAe,CAAC,CAAC;cAC/D,IAAI,CAACkG,OAAO,EAAE;gBACV;gBACAL,KAAK,CAACY,GAAG,CAACF,MAAM,EAAEzF,eAAe,CAACqB,WAAW,EAAEqE,YAAY,CAAC,CAAC;cACjE,CAAC,MAAM;gBACH;gBACAX,KAAK,CAACa,MAAM,CAACH,MAAM,EAAEzF,eAAe,CAACqB,WAAW,EAAEqE,YAAY,CAAC,CAAC;cACpE;YACJ;UACJ,CAAC,CACL,CAAC;UAED,IAAIV,QAAQ,EAAE;YACV,MAAMD,KAAK,CAACc,MAAM,CAAC,CAAC;UACxB;QACJ,CAAC,CAAC;QACF,MAAM1G,oBAAoB,CAAC6B,OAAO,CAACZ,SAAS,CAACoC,QAAQ,CAAC;QACtD,OAAOgC,SAAS;MACpB,CAAC;MACDvC,SAAS,EAAEjB,OAAO,CAACR,IAAI,CAACyB,SAAS;MACjC6B,QAAQ,EAAE9C,OAAO,CAACR,IAAI,CAACsD;IAC3B,CAAC;EACL;EAGA,IAAMgC,gBAAgB,GAAG,IAAI5F,2BAA2B,CACpDc,OAAO,CAACZ,SAAS,EACjBY,OAAO,CAAC+E,qBAAqB,EAC7BzF,UAAU,EACVY,yBAAyB,EACzB+C,yBAAyB,EACzBjD,OAAO,CAACP,IAAI,EACZO,OAAO,CAACN,SAAS,EACjBM,OAAO,CAACL,SACZ,CAAC;;EAED;AACJ;AACA;EACI,IAAIK,OAAO,CAACP,IAAI,IAAIO,OAAO,CAACT,IAAI,EAAE;IAC9B,IAAMyF,WAAW,GAAGF,gBAAgB,CAACG,KAAK,CAACC,IAAI,CAACJ,gBAAgB,CAAC;IACjE,IAAMK,YAAY,GAAGL,gBAAgB,CAACM,MAAM,CAACF,IAAI,CAACJ,gBAAgB,CAAC;IACnEA,gBAAgB,CAACG,KAAK,GAAG,MAAM;MAC3B,IAAMI,eAAe,GAAG3H,KAAK,CACzBoD,SAAS,EACTlD,OAAO,CAACwC,oBAAoB,EAAE,MAAM,CAAC,EACrCvC,KAAK,CAAC,CAAC,CACX,CAAC;MACD,IAAMyH,WAAW,GAAGvH,UAAU,CAC1BsH,eAAe,EACdE,cAAc,IAAK;QAChB;AACpB;AACA;AACA;AACA;QACoBT,gBAAgB,CAACU,MAAM,CAAC,CAAC;MAC7B,CAAC,EACAC,KAAK,IAAK;QACPX,gBAAgB,CAACY,QAAQ,CAACD,KAAK,CAACE,IAAI,CAChCjH,UAAU,CAAC,WAAW,EAAE;UAAE+G,KAAK,EAAEpI,gBAAgB,CAACoI,KAAK;QAAE,CAAC,CAC9D,CAAC;MACL,CACJ,CAAC;MACDX,gBAAgB,CAACM,MAAM,GAAG,MAAM;QAC5BE,WAAW,CAAC,CAAC;QACb,OAAOH,YAAY,CAAC,CAAC;MACzB,CAAC;MACD,OAAOH,WAAW,CAAC,CAAC;IACxB,CAAC;EACL;EAEAzG,4BAA4B,CAACyB,OAAO,CAACG,iBAAiB,EAAE2E,gBAAgB,CAAC;EAEzE,OAAOA,gBAAgB;AAC3B","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/replication-graphql/graphql-schema-from-rx-schema.js b/dist/esm/plugins/replication-graphql/graphql-schema-from-rx-schema.js deleted file mode 100644 index 8a6e274107d..00000000000 --- a/dist/esm/plugins/replication-graphql/graphql-schema-from-rx-schema.js +++ /dev/null @@ -1,228 +0,0 @@ -import { getGraphqlSchemaFromJsonSchema } from 'get-graphql-from-jsonschema'; -import { fillWithDefaultSettings } from "../../rx-schema-helper.js"; -import { clone, ensureNotFalsy, flatClone, ucfirst } from "../../plugins/utils/index.js"; - -/** - * just type some common types - * to have better IDE autocomplete, - * all strings are allowed - */ - -// we use two spaces because get-graphql-from-jsonschema does also -export var SPACING = ' '; - -/** - * Create a GraphQL schema from a given RxJsonSchema - */ -export function graphQLSchemaFromRxSchema(input) { - var ret = { - asString: '', - queries: [], - mutations: [], - subscriptions: [], - inputs: [], - types: [] - }; - Object.entries(input).forEach(([collectionName, collectionSettings]) => { - collectionSettings = fillUpOptionals(collectionSettings); - var schema = collectionSettings.schema; - var prefixes = ensureNotFalsy(collectionSettings.prefixes); - var ucCollectionName = ucfirst(collectionName); - var collectionNameInput = ucfirst(collectionName) + 'Input'; - - // input - var inputSchema = stripKeysFromSchema(schema, ensureNotFalsy(collectionSettings.ignoreInputKeys)); - var inputGraphQL = getGraphqlSchemaFromJsonSchema({ - rootName: collectionNameInput, - schema: inputSchema, - direction: 'input' - }); - var pushRowGraphQL = getGraphqlSchemaFromJsonSchema({ - rootName: collectionNameInput + prefixes.pushRow, - schema: { - type: 'object', - properties: { - assumedMasterState: inputSchema, - newDocumentState: inputSchema - }, - required: ['newDocumentState'], - additionalProperties: false - }, - direction: 'input' - }); - var checkpointSchema = { - type: 'object', - properties: {}, - required: [], - additionalProperties: false - }; - collectionSettings.checkpointFields.forEach(key => { - var subSchema = schema.properties[key]; - checkpointSchema.properties[key] = subSchema; - checkpointSchema.required.push(key); - }); - var checkpointInputGraphQL = getGraphqlSchemaFromJsonSchema({ - rootName: collectionNameInput + prefixes.checkpoint, - schema: checkpointSchema, - direction: 'input' - }); - ret.inputs = ret.inputs.concat(inputGraphQL.typeDefinitions.map(str => replaceTopLevelTypeName(str, collectionNameInput))).concat(pushRowGraphQL.typeDefinitions.map(str => replaceTopLevelTypeName(str, collectionNameInput + prefixes.pushRow))).concat(checkpointInputGraphQL.typeDefinitions.map(str => replaceTopLevelTypeName(str, collectionNameInput + prefixes.checkpoint))); - var headersSchema = { - type: 'object', - additionalProperties: false, - properties: {}, - required: [] - }; - ensureNotFalsy(collectionSettings.headerFields).forEach(headerField => { - headersSchema.properties[headerField] = { - type: 'string' - }; - headersSchema.required.push(headerField); - }); - var headersInputName = collectionNameInput + prefixes.headers; - var headersInputGraphQL = getGraphqlSchemaFromJsonSchema({ - rootName: headersInputName, - schema: headersSchema, - direction: 'input' - }); - if (ensureNotFalsy(collectionSettings.headerFields).length > 0) { - ret.inputs = ret.inputs.concat(headersInputGraphQL.typeDefinitions.map(str => replaceTopLevelTypeName(str, headersInputName))); - } - - // output - var outputSchema = stripKeysFromSchema(schema, ensureNotFalsy(collectionSettings.ignoreOutputKeys)); - var outputGraphQL = getGraphqlSchemaFromJsonSchema({ - rootName: collectionName, - schema: outputSchema, - direction: 'output' - }); - var checkpointOutputGraphQL = getGraphqlSchemaFromJsonSchema({ - rootName: ucCollectionName + prefixes.checkpoint, - schema: checkpointSchema, - direction: 'output' - }); - var pullBulkOutputGraphQL = getGraphqlSchemaFromJsonSchema({ - rootName: ucCollectionName + prefixes.pullBulk, - schema: { - type: 'object', - properties: { - documents: { - type: 'array', - items: inputSchema - }, - checkpoint: checkpointSchema - }, - required: ['documents', 'checkpoint'], - additionalProperties: false - }, - direction: 'output' - }); - ret.types = ret.types.concat(outputGraphQL.typeDefinitions.map(str => replaceTopLevelTypeName(str, ucCollectionName))).concat(checkpointOutputGraphQL.typeDefinitions.map(str => replaceTopLevelTypeName(str, ucCollectionName + prefixes.checkpoint))).concat(pullBulkOutputGraphQL.typeDefinitions.map(str => replaceTopLevelTypeName(str, ucCollectionName + prefixes.pullBulk))); - - // query - var queryName = prefixes.pull + ucCollectionName; - var queryKeys = ['checkpoint: ' + collectionNameInput + prefixes.checkpoint, 'limit: Int!']; - var queryString = queryName + '(' + queryKeys.join(', ') + '): ' + ucCollectionName + prefixes.pullBulk + '!'; - ret.queries.push(SPACING + queryString); - - // mutation - var mutationName = prefixes.push + ucCollectionName; - var mutationString = mutationName + '(' + collectionName + prefixes.pushRow + ': [' + collectionNameInput + prefixes.pushRow + ']): [' + ucCollectionName + '!]!'; - ret.mutations.push(SPACING + mutationString); - - // subscription - var subscriptionHeaderInputString = ''; - if (collectionSettings.headerFields && collectionSettings.headerFields.length > 0) { - subscriptionHeaderInputString = '(headers: ' + headersInputName + ')'; - } - var subscriptionName = prefixes.stream + ucCollectionName; - var subscriptionString = subscriptionName + subscriptionHeaderInputString + ': ' + ucCollectionName + prefixes.pullBulk + '!'; - ret.subscriptions.push(SPACING + subscriptionString); - }); - - // build full string - var fullQueryString = 'type Query {\n' + ret.queries.join('\n') + '\n}\n'; - var fullMutationString = 'type Mutation {\n' + ret.mutations.join('\n') + '\n}\n'; - var fullSubscriptionString = 'type Subscription {\n' + ret.subscriptions.join('\n') + '\n}\n'; - var fullTypeString = ret.types.join('\n'); - var fullInputString = ret.inputs.join('\n'); - var fullSchemaString = '' + 'schema {\n' + SPACING + 'query: Query\n' + SPACING + 'mutation: Mutation\n' + SPACING + 'subscription: Subscription\n' + '}\n'; - ret.asString = '' + fullQueryString + '\n' + fullMutationString + '\n' + fullSubscriptionString + '\n' + fullTypeString + '\n' + fullInputString + '\n' + fullSchemaString; - return ret; -} -export function fillUpOptionals(input) { - input = flatClone(input); - var schema = fillWithDefaultSettings(input.schema); - // strip internal attributes - Object.keys(schema.properties).forEach(key => { - if (key.startsWith('_')) { - delete schema.properties[key]; - } - }); - input.schema = schema; - - // add deleted field to schema - if (!input.deletedField) { - input.deletedField = '_deleted'; - } - schema.properties[input.deletedField] = { - type: 'boolean' - }; - schema.required.push(input.deletedField); - - // fill up prefixes - if (!input.prefixes) { - input.prefixes = {}; - } - var prefixes = input.prefixes; - if (!prefixes.push) { - prefixes.push = 'push'; - } - if (!prefixes.pushRow) { - prefixes.pushRow = 'PushRow'; - } - if (!prefixes.checkpoint) { - prefixes.checkpoint = 'Checkpoint'; - } - if (!prefixes.pull) { - prefixes.pull = 'pull'; - } - if (!prefixes.pullBulk) { - prefixes.pullBulk = 'PullBulk'; - } - if (!prefixes.stream) { - prefixes.stream = 'stream'; - } - if (!prefixes.headers) { - prefixes.headers = 'Headers'; - } - if (!input.headerFields) { - input.headerFields = []; - } - if (!input.withRevisions) { - input.withRevisions = false; - } - if (!input.ignoreInputKeys) { - input.ignoreInputKeys = []; - } - if (!input.ignoreOutputKeys) { - input.ignoreOutputKeys = []; - } - return input; -} -function stripKeysFromSchema(schema, strip) { - var cloned = clone(schema); - strip.forEach(key => { - delete cloned.properties[key]; - }); - return cloned; -} - -/** - * get-graphql-from-jsonschema add a T0-suffix - * that we do not want for the top level type - */ -function replaceTopLevelTypeName(str, ucCollectionName) { - return str.replace(' ' + ucCollectionName + 'T0 ', ' ' + ucCollectionName + ' '); -} -//# sourceMappingURL=graphql-schema-from-rx-schema.js.map \ No newline at end of file diff --git a/dist/esm/plugins/replication-graphql/graphql-schema-from-rx-schema.js.map b/dist/esm/plugins/replication-graphql/graphql-schema-from-rx-schema.js.map deleted file mode 100644 index 883a17e17ce..00000000000 --- a/dist/esm/plugins/replication-graphql/graphql-schema-from-rx-schema.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"graphql-schema-from-rx-schema.js","names":["getGraphqlSchemaFromJsonSchema","fillWithDefaultSettings","clone","ensureNotFalsy","flatClone","ucfirst","SPACING","graphQLSchemaFromRxSchema","input","ret","asString","queries","mutations","subscriptions","inputs","types","Object","entries","forEach","collectionName","collectionSettings","fillUpOptionals","schema","prefixes","ucCollectionName","collectionNameInput","inputSchema","stripKeysFromSchema","ignoreInputKeys","inputGraphQL","rootName","direction","pushRowGraphQL","pushRow","type","properties","assumedMasterState","newDocumentState","required","additionalProperties","checkpointSchema","checkpointFields","key","subSchema","push","checkpointInputGraphQL","checkpoint","concat","typeDefinitions","map","str","replaceTopLevelTypeName","headersSchema","headerFields","headerField","headersInputName","headers","headersInputGraphQL","length","outputSchema","ignoreOutputKeys","outputGraphQL","checkpointOutputGraphQL","pullBulkOutputGraphQL","pullBulk","documents","items","queryName","pull","queryKeys","queryString","join","mutationName","mutationString","subscriptionHeaderInputString","subscriptionName","stream","subscriptionString","fullQueryString","fullMutationString","fullSubscriptionString","fullTypeString","fullInputString","fullSchemaString","keys","startsWith","deletedField","withRevisions","strip","cloned","replace"],"sources":["../../../../src/plugins/replication-graphql/graphql-schema-from-rx-schema.ts"],"sourcesContent":["import { getGraphqlSchemaFromJsonSchema } from 'get-graphql-from-jsonschema';\n\nimport { fillWithDefaultSettings } from '../../rx-schema-helper.ts';\n\nimport type { RxJsonSchema } from '../../types/index.d.ts';\nimport { clone, ensureNotFalsy, flatClone, ucfirst } from '../../plugins/utils/index.ts';\n\nexport type Prefixes = {\n push?: string;\n pushRow?: string;\n checkpoint?: string;\n pull?: string;\n pullBulk?: string;\n stream?: string;\n headers?: string;\n};\n\n/**\n * just type some common types\n * to have better IDE autocomplete,\n * all strings are allowed\n */\nexport type GraphQLParamType = 'ID' | 'ID!' |\n 'String' | 'String!' |\n 'Int' | 'Int!' |\n 'Float' | 'Float!' |\n string;\n\nexport type GraphQLSchemaFromRxSchemaInputSingleCollection = {\n schema: RxJsonSchema;\n /**\n * These fields of the document data\n * will be used for the checkpoint.\n */\n checkpointFields: string[];\n ignoreInputKeys?: string[];\n ignoreOutputKeys?: string[];\n withRevisions?: boolean;\n prefixes?: Prefixes;\n headerFields?: string[];\n /**\n * Name of the boolean field that marks deleted documents.\n * [default='_deleted']\n */\n deletedField?: string;\n};\n\nexport type GraphQLSchemaFromRxSchemaInput = {\n [collectionName: string]: GraphQLSchemaFromRxSchemaInputSingleCollection;\n};\nexport type GraphQLSchemaFromRxSchemaOutput = {\n asString: string;\n queries: string[];\n mutations: string[];\n subscriptions: string[];\n inputs: string[];\n types: string[];\n};\n\n// we use two spaces because get-graphql-from-jsonschema does also\nexport const SPACING = ' ';\n\n/**\n * Create a GraphQL schema from a given RxJsonSchema\n */\nexport function graphQLSchemaFromRxSchema(\n input: GraphQLSchemaFromRxSchemaInput\n): GraphQLSchemaFromRxSchemaOutput {\n const ret: GraphQLSchemaFromRxSchemaOutput = {\n asString: '',\n queries: [],\n mutations: [],\n subscriptions: [],\n inputs: [],\n types: []\n };\n\n Object.entries(input).forEach(([collectionName, collectionSettings]) => {\n collectionSettings = fillUpOptionals(collectionSettings);\n\n const schema = collectionSettings.schema;\n const prefixes: Prefixes = ensureNotFalsy(collectionSettings.prefixes);\n const ucCollectionName = ucfirst(collectionName);\n const collectionNameInput = ucfirst(collectionName) + 'Input';\n\n // input\n const inputSchema = stripKeysFromSchema(schema, ensureNotFalsy(collectionSettings.ignoreInputKeys));\n\n const inputGraphQL = getGraphqlSchemaFromJsonSchema({\n rootName: collectionNameInput,\n schema: inputSchema as any,\n direction: 'input'\n });\n const pushRowGraphQL = getGraphqlSchemaFromJsonSchema({\n rootName: collectionNameInput + prefixes.pushRow,\n schema: {\n type: 'object',\n properties: {\n assumedMasterState: inputSchema as any,\n newDocumentState: inputSchema as any\n },\n required: ['newDocumentState'],\n additionalProperties: false\n },\n direction: 'input'\n });\n\n const checkpointSchema = {\n type: 'object',\n properties: {},\n required: [],\n additionalProperties: false\n } as any;\n collectionSettings.checkpointFields.forEach(key => {\n const subSchema: any = schema.properties[key];\n checkpointSchema.properties[key] = subSchema;\n checkpointSchema.required.push(key);\n });\n const checkpointInputGraphQL = getGraphqlSchemaFromJsonSchema({\n rootName: collectionNameInput + prefixes.checkpoint,\n schema: checkpointSchema as any,\n direction: 'input'\n });\n\n ret.inputs = ret.inputs.concat(\n inputGraphQL\n .typeDefinitions\n .map(str => replaceTopLevelTypeName(str, collectionNameInput))\n ).concat(\n pushRowGraphQL\n .typeDefinitions\n .map(str => replaceTopLevelTypeName(str, collectionNameInput + prefixes.pushRow))\n ).concat(\n checkpointInputGraphQL\n .typeDefinitions\n .map(str => replaceTopLevelTypeName(str, collectionNameInput + prefixes.checkpoint))\n );\n\n const headersSchema: any = {\n type: 'object',\n additionalProperties: false,\n properties: {},\n required: []\n };\n ensureNotFalsy(collectionSettings.headerFields).forEach(headerField => {\n headersSchema.properties[headerField] = {\n type: 'string'\n };\n headersSchema.required.push(headerField);\n });\n const headersInputName = collectionNameInput + prefixes.headers;\n const headersInputGraphQL = getGraphqlSchemaFromJsonSchema({\n rootName: headersInputName,\n schema: headersSchema,\n direction: 'input'\n });\n if (ensureNotFalsy(collectionSettings.headerFields).length > 0) {\n ret.inputs = ret.inputs.concat(\n headersInputGraphQL\n .typeDefinitions\n .map(str => replaceTopLevelTypeName(str, headersInputName))\n );\n }\n\n // output\n const outputSchema = stripKeysFromSchema(schema, ensureNotFalsy(collectionSettings.ignoreOutputKeys));\n const outputGraphQL = getGraphqlSchemaFromJsonSchema({\n rootName: collectionName,\n schema: outputSchema as any,\n direction: 'output'\n });\n const checkpointOutputGraphQL = getGraphqlSchemaFromJsonSchema({\n rootName: ucCollectionName + prefixes.checkpoint,\n schema: checkpointSchema as any,\n direction: 'output'\n });\n const pullBulkOutputGraphQL = getGraphqlSchemaFromJsonSchema({\n rootName: ucCollectionName + prefixes.pullBulk,\n schema: {\n type: 'object',\n properties: {\n documents: {\n type: 'array',\n items: inputSchema as any\n },\n checkpoint: checkpointSchema as any\n },\n required: ['documents', 'checkpoint'],\n additionalProperties: false\n },\n direction: 'output'\n });\n ret.types = ret.types.concat(\n outputGraphQL.typeDefinitions\n .map(str => replaceTopLevelTypeName(str, ucCollectionName))\n ).concat(\n checkpointOutputGraphQL.typeDefinitions\n .map(str => replaceTopLevelTypeName(str, ucCollectionName + prefixes.checkpoint))\n ).concat(\n pullBulkOutputGraphQL.typeDefinitions\n .map(str => replaceTopLevelTypeName(str, ucCollectionName + prefixes.pullBulk))\n );\n\n // query\n const queryName = prefixes.pull + ucCollectionName;\n const queryKeys = [\n 'checkpoint: ' + collectionNameInput + prefixes.checkpoint,\n 'limit: Int!'\n ];\n const queryString = queryName + '(' + queryKeys.join(', ') + '): ' + ucCollectionName + prefixes.pullBulk + '!';\n ret.queries.push(SPACING + queryString);\n\n // mutation\n const mutationName = prefixes.push + ucCollectionName;\n const mutationString = mutationName + '(' + collectionName + prefixes.pushRow + ': [' + collectionNameInput + prefixes.pushRow + ']): [' + ucCollectionName + '!]!';\n ret.mutations.push(SPACING + mutationString);\n\n // subscription\n let subscriptionHeaderInputString = '';\n if (collectionSettings.headerFields && collectionSettings.headerFields.length > 0) {\n subscriptionHeaderInputString = '(headers: ' + headersInputName + ')';\n }\n const subscriptionName = prefixes.stream + ucCollectionName;\n const subscriptionString = subscriptionName + subscriptionHeaderInputString + ': ' + ucCollectionName + prefixes.pullBulk + '!';\n ret.subscriptions.push(SPACING + subscriptionString);\n });\n\n // build full string\n const fullQueryString = 'type Query {\\n' + ret.queries.join('\\n') + '\\n}\\n';\n const fullMutationString = 'type Mutation {\\n' + ret.mutations.join('\\n') + '\\n}\\n';\n const fullSubscriptionString = 'type Subscription {\\n' + ret.subscriptions.join('\\n') + '\\n}\\n';\n\n const fullTypeString = ret.types.join('\\n');\n const fullInputString = ret.inputs.join('\\n');\n\n const fullSchemaString = '' +\n 'schema {\\n' +\n SPACING + 'query: Query\\n' +\n SPACING + 'mutation: Mutation\\n' +\n SPACING + 'subscription: Subscription\\n' +\n '}\\n';\n\n ret.asString = '' +\n fullQueryString + '\\n' +\n fullMutationString + '\\n' +\n fullSubscriptionString + '\\n' +\n fullTypeString + '\\n' +\n fullInputString + '\\n' +\n fullSchemaString;\n\n return ret;\n}\n\n\nexport function fillUpOptionals(\n input: GraphQLSchemaFromRxSchemaInputSingleCollection\n): GraphQLSchemaFromRxSchemaInputSingleCollection {\n input = flatClone(input);\n\n const schema = fillWithDefaultSettings(input.schema);\n // strip internal attributes\n Object.keys(schema.properties).forEach(key => {\n if (key.startsWith('_')) {\n delete schema.properties[key];\n }\n });\n input.schema = schema;\n\n // add deleted field to schema\n if (!input.deletedField) {\n input.deletedField = '_deleted';\n }\n schema.properties[input.deletedField] = {\n type: 'boolean'\n };\n (schema.required as string[]).push(input.deletedField);\n\n // fill up prefixes\n if (!input.prefixes) {\n input.prefixes = {} as any;\n }\n const prefixes: Prefixes = input.prefixes as any;\n if (!prefixes.push) {\n prefixes.push = 'push';\n }\n if (!prefixes.pushRow) {\n prefixes.pushRow = 'PushRow';\n }\n if (!prefixes.checkpoint) {\n prefixes.checkpoint = 'Checkpoint';\n }\n if (!prefixes.pull) {\n prefixes.pull = 'pull';\n }\n if (!prefixes.pullBulk) {\n prefixes.pullBulk = 'PullBulk';\n }\n if (!prefixes.stream) {\n prefixes.stream = 'stream';\n }\n if (!prefixes.headers) {\n prefixes.headers = 'Headers';\n }\n if (!input.headerFields) {\n input.headerFields = [];\n }\n\n\n if (!input.withRevisions) {\n input.withRevisions = false;\n }\n\n if (!input.ignoreInputKeys) {\n input.ignoreInputKeys = [];\n }\n if (!input.ignoreOutputKeys) {\n input.ignoreOutputKeys = [];\n }\n\n return input;\n}\n\nfunction stripKeysFromSchema(schema: RxJsonSchema, strip: string[]): RxJsonSchema> {\n const cloned: any = clone(schema);\n strip.forEach(key => {\n delete cloned.properties[key];\n });\n return cloned;\n}\n\n/**\n * get-graphql-from-jsonschema add a T0-suffix\n * that we do not want for the top level type\n */\nfunction replaceTopLevelTypeName(str: string, ucCollectionName: string): string {\n return str.replace(' ' + ucCollectionName + 'T0 ', ' ' + ucCollectionName + ' ');\n}\n"],"mappings":"AAAA,SAASA,8BAA8B,QAAQ,6BAA6B;AAE5E,SAASC,uBAAuB,QAAQ,2BAA2B;AAGnE,SAASC,KAAK,EAAEC,cAAc,EAAEC,SAAS,EAAEC,OAAO,QAAQ,8BAA8B;;AAYxF;AACA;AACA;AACA;AACA;;AAsCA;AACA,OAAO,IAAMC,OAAO,GAAG,IAAI;;AAE3B;AACA;AACA;AACA,OAAO,SAASC,yBAAyBA,CACrCC,KAAqC,EACN;EAC/B,IAAMC,GAAoC,GAAG;IACzCC,QAAQ,EAAE,EAAE;IACZC,OAAO,EAAE,EAAE;IACXC,SAAS,EAAE,EAAE;IACbC,aAAa,EAAE,EAAE;IACjBC,MAAM,EAAE,EAAE;IACVC,KAAK,EAAE;EACX,CAAC;EAEDC,MAAM,CAACC,OAAO,CAACT,KAAK,CAAC,CAACU,OAAO,CAAC,CAAC,CAACC,cAAc,EAAEC,kBAAkB,CAAC,KAAK;IACpEA,kBAAkB,GAAGC,eAAe,CAACD,kBAAkB,CAAC;IAExD,IAAME,MAAM,GAAGF,kBAAkB,CAACE,MAAM;IACxC,IAAMC,QAAkB,GAAGpB,cAAc,CAACiB,kBAAkB,CAACG,QAAQ,CAAC;IACtE,IAAMC,gBAAgB,GAAGnB,OAAO,CAACc,cAAc,CAAC;IAChD,IAAMM,mBAAmB,GAAGpB,OAAO,CAACc,cAAc,CAAC,GAAG,OAAO;;IAE7D;IACA,IAAMO,WAAW,GAAGC,mBAAmB,CAACL,MAAM,EAAEnB,cAAc,CAACiB,kBAAkB,CAACQ,eAAe,CAAC,CAAC;IAEnG,IAAMC,YAAY,GAAG7B,8BAA8B,CAAC;MAChD8B,QAAQ,EAAEL,mBAAmB;MAC7BH,MAAM,EAAEI,WAAkB;MAC1BK,SAAS,EAAE;IACf,CAAC,CAAC;IACF,IAAMC,cAAc,GAAGhC,8BAA8B,CAAC;MAClD8B,QAAQ,EAAEL,mBAAmB,GAAGF,QAAQ,CAACU,OAAO;MAChDX,MAAM,EAAE;QACJY,IAAI,EAAE,QAAQ;QACdC,UAAU,EAAE;UACRC,kBAAkB,EAAEV,WAAkB;UACtCW,gBAAgB,EAAEX;QACtB,CAAC;QACDY,QAAQ,EAAE,CAAC,kBAAkB,CAAC;QAC9BC,oBAAoB,EAAE;MAC1B,CAAC;MACDR,SAAS,EAAE;IACf,CAAC,CAAC;IAEF,IAAMS,gBAAgB,GAAG;MACrBN,IAAI,EAAE,QAAQ;MACdC,UAAU,EAAE,CAAC,CAAC;MACdG,QAAQ,EAAE,EAAE;MACZC,oBAAoB,EAAE;IAC1B,CAAQ;IACRnB,kBAAkB,CAACqB,gBAAgB,CAACvB,OAAO,CAACwB,GAAG,IAAI;MAC/C,IAAMC,SAAc,GAAGrB,MAAM,CAACa,UAAU,CAACO,GAAG,CAAC;MAC7CF,gBAAgB,CAACL,UAAU,CAACO,GAAG,CAAC,GAAGC,SAAS;MAC5CH,gBAAgB,CAACF,QAAQ,CAACM,IAAI,CAACF,GAAG,CAAC;IACvC,CAAC,CAAC;IACF,IAAMG,sBAAsB,GAAG7C,8BAA8B,CAAC;MAC1D8B,QAAQ,EAAEL,mBAAmB,GAAGF,QAAQ,CAACuB,UAAU;MACnDxB,MAAM,EAAEkB,gBAAuB;MAC/BT,SAAS,EAAE;IACf,CAAC,CAAC;IAEFtB,GAAG,CAACK,MAAM,GAAGL,GAAG,CAACK,MAAM,CAACiC,MAAM,CAC1BlB,YAAY,CACPmB,eAAe,CACfC,GAAG,CAACC,GAAG,IAAIC,uBAAuB,CAACD,GAAG,EAAEzB,mBAAmB,CAAC,CACrE,CAAC,CAACsB,MAAM,CACJf,cAAc,CACTgB,eAAe,CACfC,GAAG,CAACC,GAAG,IAAIC,uBAAuB,CAACD,GAAG,EAAEzB,mBAAmB,GAAGF,QAAQ,CAACU,OAAO,CAAC,CACxF,CAAC,CAACc,MAAM,CACJF,sBAAsB,CACjBG,eAAe,CACfC,GAAG,CAACC,GAAG,IAAIC,uBAAuB,CAACD,GAAG,EAAEzB,mBAAmB,GAAGF,QAAQ,CAACuB,UAAU,CAAC,CAC3F,CAAC;IAED,IAAMM,aAAkB,GAAG;MACvBlB,IAAI,EAAE,QAAQ;MACdK,oBAAoB,EAAE,KAAK;MAC3BJ,UAAU,EAAE,CAAC,CAAC;MACdG,QAAQ,EAAE;IACd,CAAC;IACDnC,cAAc,CAACiB,kBAAkB,CAACiC,YAAY,CAAC,CAACnC,OAAO,CAACoC,WAAW,IAAI;MACnEF,aAAa,CAACjB,UAAU,CAACmB,WAAW,CAAC,GAAG;QACpCpB,IAAI,EAAE;MACV,CAAC;MACDkB,aAAa,CAACd,QAAQ,CAACM,IAAI,CAACU,WAAW,CAAC;IAC5C,CAAC,CAAC;IACF,IAAMC,gBAAgB,GAAG9B,mBAAmB,GAAGF,QAAQ,CAACiC,OAAO;IAC/D,IAAMC,mBAAmB,GAAGzD,8BAA8B,CAAC;MACvD8B,QAAQ,EAAEyB,gBAAgB;MAC1BjC,MAAM,EAAE8B,aAAa;MACrBrB,SAAS,EAAE;IACf,CAAC,CAAC;IACF,IAAI5B,cAAc,CAACiB,kBAAkB,CAACiC,YAAY,CAAC,CAACK,MAAM,GAAG,CAAC,EAAE;MAC5DjD,GAAG,CAACK,MAAM,GAAGL,GAAG,CAACK,MAAM,CAACiC,MAAM,CAC1BU,mBAAmB,CACdT,eAAe,CACfC,GAAG,CAACC,GAAG,IAAIC,uBAAuB,CAACD,GAAG,EAAEK,gBAAgB,CAAC,CAClE,CAAC;IACL;;IAEA;IACA,IAAMI,YAAY,GAAGhC,mBAAmB,CAACL,MAAM,EAAEnB,cAAc,CAACiB,kBAAkB,CAACwC,gBAAgB,CAAC,CAAC;IACrG,IAAMC,aAAa,GAAG7D,8BAA8B,CAAC;MACjD8B,QAAQ,EAAEX,cAAc;MACxBG,MAAM,EAAEqC,YAAmB;MAC3B5B,SAAS,EAAE;IACf,CAAC,CAAC;IACF,IAAM+B,uBAAuB,GAAG9D,8BAA8B,CAAC;MAC3D8B,QAAQ,EAAEN,gBAAgB,GAAGD,QAAQ,CAACuB,UAAU;MAChDxB,MAAM,EAAEkB,gBAAuB;MAC/BT,SAAS,EAAE;IACf,CAAC,CAAC;IACF,IAAMgC,qBAAqB,GAAG/D,8BAA8B,CAAC;MACzD8B,QAAQ,EAAEN,gBAAgB,GAAGD,QAAQ,CAACyC,QAAQ;MAC9C1C,MAAM,EAAE;QACJY,IAAI,EAAE,QAAQ;QACdC,UAAU,EAAE;UACR8B,SAAS,EAAE;YACP/B,IAAI,EAAE,OAAO;YACbgC,KAAK,EAAExC;UACX,CAAC;UACDoB,UAAU,EAAEN;QAChB,CAAC;QACDF,QAAQ,EAAE,CAAC,WAAW,EAAE,YAAY,CAAC;QACrCC,oBAAoB,EAAE;MAC1B,CAAC;MACDR,SAAS,EAAE;IACf,CAAC,CAAC;IACFtB,GAAG,CAACM,KAAK,GAAGN,GAAG,CAACM,KAAK,CAACgC,MAAM,CACxBc,aAAa,CAACb,eAAe,CACxBC,GAAG,CAACC,GAAG,IAAIC,uBAAuB,CAACD,GAAG,EAAE1B,gBAAgB,CAAC,CAClE,CAAC,CAACuB,MAAM,CACJe,uBAAuB,CAACd,eAAe,CAClCC,GAAG,CAACC,GAAG,IAAIC,uBAAuB,CAACD,GAAG,EAAE1B,gBAAgB,GAAGD,QAAQ,CAACuB,UAAU,CAAC,CACxF,CAAC,CAACC,MAAM,CACJgB,qBAAqB,CAACf,eAAe,CAChCC,GAAG,CAACC,GAAG,IAAIC,uBAAuB,CAACD,GAAG,EAAE1B,gBAAgB,GAAGD,QAAQ,CAACyC,QAAQ,CAAC,CACtF,CAAC;;IAED;IACA,IAAMG,SAAS,GAAG5C,QAAQ,CAAC6C,IAAI,GAAG5C,gBAAgB;IAClD,IAAM6C,SAAS,GAAG,CACd,cAAc,GAAG5C,mBAAmB,GAAGF,QAAQ,CAACuB,UAAU,EAC1D,aAAa,CAChB;IACD,IAAMwB,WAAW,GAAGH,SAAS,GAAG,GAAG,GAAGE,SAAS,CAACE,IAAI,CAAC,IAAI,CAAC,GAAG,KAAK,GAAG/C,gBAAgB,GAAGD,QAAQ,CAACyC,QAAQ,GAAG,GAAG;IAC/GvD,GAAG,CAACE,OAAO,CAACiC,IAAI,CAACtC,OAAO,GAAGgE,WAAW,CAAC;;IAEvC;IACA,IAAME,YAAY,GAAGjD,QAAQ,CAACqB,IAAI,GAAGpB,gBAAgB;IACrD,IAAMiD,cAAc,GAAGD,YAAY,GAAG,GAAG,GAAGrD,cAAc,GAAGI,QAAQ,CAACU,OAAO,GAAG,KAAK,GAAGR,mBAAmB,GAAGF,QAAQ,CAACU,OAAO,GAAG,OAAO,GAAGT,gBAAgB,GAAG,KAAK;IACnKf,GAAG,CAACG,SAAS,CAACgC,IAAI,CAACtC,OAAO,GAAGmE,cAAc,CAAC;;IAE5C;IACA,IAAIC,6BAA6B,GAAG,EAAE;IACtC,IAAItD,kBAAkB,CAACiC,YAAY,IAAIjC,kBAAkB,CAACiC,YAAY,CAACK,MAAM,GAAG,CAAC,EAAE;MAC/EgB,6BAA6B,GAAG,YAAY,GAAGnB,gBAAgB,GAAG,GAAG;IACzE;IACA,IAAMoB,gBAAgB,GAAGpD,QAAQ,CAACqD,MAAM,GAAGpD,gBAAgB;IAC3D,IAAMqD,kBAAkB,GAAGF,gBAAgB,GAAGD,6BAA6B,GAAG,IAAI,GAAGlD,gBAAgB,GAAGD,QAAQ,CAACyC,QAAQ,GAAG,GAAG;IAC/HvD,GAAG,CAACI,aAAa,CAAC+B,IAAI,CAACtC,OAAO,GAAGuE,kBAAkB,CAAC;EACxD,CAAC,CAAC;;EAEF;EACA,IAAMC,eAAe,GAAG,gBAAgB,GAAGrE,GAAG,CAACE,OAAO,CAAC4D,IAAI,CAAC,IAAI,CAAC,GAAG,OAAO;EAC3E,IAAMQ,kBAAkB,GAAG,mBAAmB,GAAGtE,GAAG,CAACG,SAAS,CAAC2D,IAAI,CAAC,IAAI,CAAC,GAAG,OAAO;EACnF,IAAMS,sBAAsB,GAAG,uBAAuB,GAAGvE,GAAG,CAACI,aAAa,CAAC0D,IAAI,CAAC,IAAI,CAAC,GAAG,OAAO;EAE/F,IAAMU,cAAc,GAAGxE,GAAG,CAACM,KAAK,CAACwD,IAAI,CAAC,IAAI,CAAC;EAC3C,IAAMW,eAAe,GAAGzE,GAAG,CAACK,MAAM,CAACyD,IAAI,CAAC,IAAI,CAAC;EAE7C,IAAMY,gBAAgB,GAAG,EAAE,GACvB,YAAY,GACZ7E,OAAO,GAAG,gBAAgB,GAC1BA,OAAO,GAAG,sBAAsB,GAChCA,OAAO,GAAG,8BAA8B,GACxC,KAAK;EAETG,GAAG,CAACC,QAAQ,GAAG,EAAE,GACboE,eAAe,GAAG,IAAI,GACtBC,kBAAkB,GAAG,IAAI,GACzBC,sBAAsB,GAAG,IAAI,GAC7BC,cAAc,GAAG,IAAI,GACrBC,eAAe,GAAG,IAAI,GACtBC,gBAAgB;EAEpB,OAAO1E,GAAG;AACd;AAGA,OAAO,SAASY,eAAeA,CAC3Bb,KAAqD,EACP;EAC9CA,KAAK,GAAGJ,SAAS,CAACI,KAAK,CAAC;EAExB,IAAMc,MAAM,GAAGrB,uBAAuB,CAACO,KAAK,CAACc,MAAM,CAAC;EACpD;EACAN,MAAM,CAACoE,IAAI,CAAC9D,MAAM,CAACa,UAAU,CAAC,CAACjB,OAAO,CAACwB,GAAG,IAAI;IAC1C,IAAIA,GAAG,CAAC2C,UAAU,CAAC,GAAG,CAAC,EAAE;MACrB,OAAO/D,MAAM,CAACa,UAAU,CAACO,GAAG,CAAC;IACjC;EACJ,CAAC,CAAC;EACFlC,KAAK,CAACc,MAAM,GAAGA,MAAM;;EAErB;EACA,IAAI,CAACd,KAAK,CAAC8E,YAAY,EAAE;IACrB9E,KAAK,CAAC8E,YAAY,GAAG,UAAU;EACnC;EACAhE,MAAM,CAACa,UAAU,CAAC3B,KAAK,CAAC8E,YAAY,CAAC,GAAG;IACpCpD,IAAI,EAAE;EACV,CAAC;EACAZ,MAAM,CAACgB,QAAQ,CAAcM,IAAI,CAACpC,KAAK,CAAC8E,YAAY,CAAC;;EAEtD;EACA,IAAI,CAAC9E,KAAK,CAACe,QAAQ,EAAE;IACjBf,KAAK,CAACe,QAAQ,GAAG,CAAC,CAAQ;EAC9B;EACA,IAAMA,QAAkB,GAAGf,KAAK,CAACe,QAAe;EAChD,IAAI,CAACA,QAAQ,CAACqB,IAAI,EAAE;IAChBrB,QAAQ,CAACqB,IAAI,GAAG,MAAM;EAC1B;EACA,IAAI,CAACrB,QAAQ,CAACU,OAAO,EAAE;IACnBV,QAAQ,CAACU,OAAO,GAAG,SAAS;EAChC;EACA,IAAI,CAACV,QAAQ,CAACuB,UAAU,EAAE;IACtBvB,QAAQ,CAACuB,UAAU,GAAG,YAAY;EACtC;EACA,IAAI,CAACvB,QAAQ,CAAC6C,IAAI,EAAE;IAChB7C,QAAQ,CAAC6C,IAAI,GAAG,MAAM;EAC1B;EACA,IAAI,CAAC7C,QAAQ,CAACyC,QAAQ,EAAE;IACpBzC,QAAQ,CAACyC,QAAQ,GAAG,UAAU;EAClC;EACA,IAAI,CAACzC,QAAQ,CAACqD,MAAM,EAAE;IAClBrD,QAAQ,CAACqD,MAAM,GAAG,QAAQ;EAC9B;EACA,IAAI,CAACrD,QAAQ,CAACiC,OAAO,EAAE;IACnBjC,QAAQ,CAACiC,OAAO,GAAG,SAAS;EAChC;EACA,IAAI,CAAChD,KAAK,CAAC6C,YAAY,EAAE;IACrB7C,KAAK,CAAC6C,YAAY,GAAG,EAAE;EAC3B;EAGA,IAAI,CAAC7C,KAAK,CAAC+E,aAAa,EAAE;IACtB/E,KAAK,CAAC+E,aAAa,GAAG,KAAK;EAC/B;EAEA,IAAI,CAAC/E,KAAK,CAACoB,eAAe,EAAE;IACxBpB,KAAK,CAACoB,eAAe,GAAG,EAAE;EAC9B;EACA,IAAI,CAACpB,KAAK,CAACoD,gBAAgB,EAAE;IACzBpD,KAAK,CAACoD,gBAAgB,GAAG,EAAE;EAC/B;EAEA,OAAOpD,KAAK;AAChB;AAEA,SAASmB,mBAAmBA,CAAIL,MAAuB,EAAEkE,KAAe,EAA4B;EAChG,IAAMC,MAAW,GAAGvF,KAAK,CAACoB,MAAM,CAAC;EACjCkE,KAAK,CAACtE,OAAO,CAACwB,GAAG,IAAI;IACjB,OAAO+C,MAAM,CAACtD,UAAU,CAACO,GAAG,CAAC;EACjC,CAAC,CAAC;EACF,OAAO+C,MAAM;AACjB;;AAEA;AACA;AACA;AACA;AACA,SAAStC,uBAAuBA,CAACD,GAAW,EAAE1B,gBAAwB,EAAU;EAC5E,OAAO0B,GAAG,CAACwC,OAAO,CAAC,GAAG,GAAGlE,gBAAgB,GAAG,KAAK,EAAE,GAAG,GAAGA,gBAAgB,GAAG,GAAG,CAAC;AACpF","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/replication-graphql/graphql-websocket.js b/dist/esm/plugins/replication-graphql/graphql-websocket.js deleted file mode 100644 index de07736444e..00000000000 --- a/dist/esm/plugins/replication-graphql/graphql-websocket.js +++ /dev/null @@ -1,36 +0,0 @@ -import { createClient } from 'graphql-ws'; -import { getFromMapOrCreate, getFromMapOrThrow } from "../../plugins/utils/index.js"; -import ws from 'isomorphic-ws'; -var { - WebSocket: IsomorphicWebSocket -} = ws; -export var GRAPHQL_WEBSOCKET_BY_URL = new Map(); -export function getGraphQLWebSocket(url, headers) { - var has = getFromMapOrCreate(GRAPHQL_WEBSOCKET_BY_URL, url, () => { - var wsClient = createClient({ - url, - shouldRetry: () => true, - webSocketImpl: IsomorphicWebSocket, - connectionParams: headers ? { - headers - } : undefined - }); - return { - url, - socket: wsClient, - refCount: 1 - }; - }, value => { - value.refCount = value.refCount + 1; - }); - return has.socket; -} -export function removeGraphQLWebSocketRef(url) { - var obj = getFromMapOrThrow(GRAPHQL_WEBSOCKET_BY_URL, url); - obj.refCount = obj.refCount - 1; - if (obj.refCount === 0) { - GRAPHQL_WEBSOCKET_BY_URL.delete(url); - obj.socket.dispose(); - } -} -//# sourceMappingURL=graphql-websocket.js.map \ No newline at end of file diff --git a/dist/esm/plugins/replication-graphql/graphql-websocket.js.map b/dist/esm/plugins/replication-graphql/graphql-websocket.js.map deleted file mode 100644 index ccefef4c7e0..00000000000 --- a/dist/esm/plugins/replication-graphql/graphql-websocket.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"graphql-websocket.js","names":["createClient","getFromMapOrCreate","getFromMapOrThrow","ws","WebSocket","IsomorphicWebSocket","GRAPHQL_WEBSOCKET_BY_URL","Map","getGraphQLWebSocket","url","headers","has","wsClient","shouldRetry","webSocketImpl","connectionParams","undefined","socket","refCount","value","removeGraphQLWebSocketRef","obj","delete","dispose"],"sources":["../../../../src/plugins/replication-graphql/graphql-websocket.ts"],"sourcesContent":["import { Client, createClient } from 'graphql-ws';\nimport { getFromMapOrCreate, getFromMapOrThrow } from '../../plugins/utils/index.ts';\nimport ws from 'isomorphic-ws';\n\nconst { WebSocket: IsomorphicWebSocket } = ws;\n\nexport type WebsocketWithRefCount = {\n url: string;\n socket: Client;\n refCount: number;\n};\n\nexport const GRAPHQL_WEBSOCKET_BY_URL: Map = new Map();\n\n\nexport function getGraphQLWebSocket(\n url: string,\n headers?: { [k: string]: string; }\n): Client {\n\n const has = getFromMapOrCreate(\n GRAPHQL_WEBSOCKET_BY_URL,\n url,\n () => {\n const wsClient = createClient({\n url,\n shouldRetry: () => true,\n webSocketImpl: IsomorphicWebSocket,\n connectionParams: headers ? { headers } : undefined,\n });\n return {\n url,\n socket: wsClient,\n refCount: 1\n };\n },\n (value) => {\n value.refCount = value.refCount + 1;\n }\n );\n return has.socket;\n}\n\n\nexport function removeGraphQLWebSocketRef(\n url: string\n) {\n const obj = getFromMapOrThrow(GRAPHQL_WEBSOCKET_BY_URL, url);\n obj.refCount = obj.refCount - 1;\n if (obj.refCount === 0) {\n GRAPHQL_WEBSOCKET_BY_URL.delete(url);\n obj.socket.dispose();\n }\n}\n"],"mappings":"AAAA,SAAiBA,YAAY,QAAQ,YAAY;AACjD,SAASC,kBAAkB,EAAEC,iBAAiB,QAAQ,8BAA8B;AACpF,OAAOC,EAAE,MAAM,eAAe;AAE9B,IAAM;EAAEC,SAAS,EAAEC;AAAoB,CAAC,GAAGF,EAAE;AAQ7C,OAAO,IAAMG,wBAA4D,GAAG,IAAIC,GAAG,CAAC,CAAC;AAGrF,OAAO,SAASC,mBAAmBA,CAC/BC,GAAW,EACXC,OAAkC,EAC5B;EAEN,IAAMC,GAAG,GAAGV,kBAAkB,CAC1BK,wBAAwB,EACxBG,GAAG,EACH,MAAM;IACF,IAAMG,QAAQ,GAAGZ,YAAY,CAAC;MAC1BS,GAAG;MACHI,WAAW,EAAEA,CAAA,KAAM,IAAI;MACvBC,aAAa,EAAET,mBAAmB;MAClCU,gBAAgB,EAAEL,OAAO,GAAG;QAAEA;MAAQ,CAAC,GAAGM;IAC9C,CAAC,CAAC;IACF,OAAO;MACHP,GAAG;MACHQ,MAAM,EAAEL,QAAQ;MAChBM,QAAQ,EAAE;IACd,CAAC;EACL,CAAC,EACAC,KAAK,IAAK;IACPA,KAAK,CAACD,QAAQ,GAAGC,KAAK,CAACD,QAAQ,GAAG,CAAC;EACvC,CACJ,CAAC;EACD,OAAOP,GAAG,CAACM,MAAM;AACrB;AAGA,OAAO,SAASG,yBAAyBA,CACrCX,GAAW,EACb;EACE,IAAMY,GAAG,GAAGnB,iBAAiB,CAACI,wBAAwB,EAAEG,GAAG,CAAC;EAC5DY,GAAG,CAACH,QAAQ,GAAGG,GAAG,CAACH,QAAQ,GAAG,CAAC;EAC/B,IAAIG,GAAG,CAACH,QAAQ,KAAK,CAAC,EAAE;IACpBZ,wBAAwB,CAACgB,MAAM,CAACb,GAAG,CAAC;IACpCY,GAAG,CAACJ,MAAM,CAACM,OAAO,CAAC,CAAC;EACxB;AACJ","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/replication-graphql/helper.js b/dist/esm/plugins/replication-graphql/helper.js deleted file mode 100644 index 24da304d453..00000000000 --- a/dist/esm/plugins/replication-graphql/helper.js +++ /dev/null @@ -1,16 +0,0 @@ -import { ensureNotFalsy } from "../../plugins/utils/index.js"; -export var GRAPHQL_REPLICATION_PLUGIN_IDENTITY_PREFIX = 'graphql'; -export function graphQLRequest(fetchRequest, httpUrl, clientState, queryParams) { - var headers = new Headers(clientState.headers || {}); - headers.append('Content-Type', 'application/json'); - var req = new Request(ensureNotFalsy(httpUrl), { - method: 'POST', - body: JSON.stringify(queryParams), - headers, - credentials: clientState.credentials - }); - return fetchRequest(req).then(res => res.json()).then(body => { - return body; - }); -} -//# sourceMappingURL=helper.js.map \ No newline at end of file diff --git a/dist/esm/plugins/replication-graphql/helper.js.map b/dist/esm/plugins/replication-graphql/helper.js.map deleted file mode 100644 index 7adfcbd54cf..00000000000 --- a/dist/esm/plugins/replication-graphql/helper.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"helper.js","names":["ensureNotFalsy","GRAPHQL_REPLICATION_PLUGIN_IDENTITY_PREFIX","graphQLRequest","fetchRequest","httpUrl","clientState","queryParams","headers","Headers","append","req","Request","method","body","JSON","stringify","credentials","then","res","json"],"sources":["../../../../src/plugins/replication-graphql/helper.ts"],"sourcesContent":["import type { RxGraphQLReplicationClientState, RxGraphQLReplicationQueryBuilderResponseObject } from '../../types/index.d.ts';\nimport { ensureNotFalsy } from '../../plugins/utils/index.ts';\n\nexport const GRAPHQL_REPLICATION_PLUGIN_IDENTITY_PREFIX = 'graphql';\n\nexport interface GraphQLError {\n message: string;\n locations: Array<{\n line: number;\n column: number;\n }>;\n path: string[];\n}\nexport type GraphQLErrors = Array;\n\n\n\nexport function graphQLRequest(\n fetchRequest: WindowOrWorkerGlobalScope['fetch'],\n httpUrl: string,\n clientState: RxGraphQLReplicationClientState,\n queryParams: RxGraphQLReplicationQueryBuilderResponseObject\n) {\n\n const headers = new Headers(clientState.headers || {});\n headers.append('Content-Type', 'application/json');\n\n const req = new Request(\n ensureNotFalsy(httpUrl),\n {\n method: 'POST',\n body: JSON.stringify(queryParams),\n headers,\n credentials: clientState.credentials,\n }\n );\n \n return fetchRequest(req)\n .then((res) => res.json())\n .then((body) => {\n return body;\n });\n}\n"],"mappings":"AACA,SAASA,cAAc,QAAQ,8BAA8B;AAE7D,OAAO,IAAMC,0CAA0C,GAAG,SAAS;AAcnE,OAAO,SAASC,cAAcA,CAC1BC,YAAgD,EAChDC,OAAe,EACfC,WAA4C,EAC5CC,WAA2D,EAC7D;EAEE,IAAMC,OAAO,GAAG,IAAIC,OAAO,CAACH,WAAW,CAACE,OAAO,IAAI,CAAC,CAAC,CAAC;EACtDA,OAAO,CAACE,MAAM,CAAC,cAAc,EAAE,kBAAkB,CAAC;EAElD,IAAMC,GAAG,GAAG,IAAIC,OAAO,CACnBX,cAAc,CAACI,OAAO,CAAC,EACvB;IACIQ,MAAM,EAAE,MAAM;IACdC,IAAI,EAAEC,IAAI,CAACC,SAAS,CAACT,WAAW,CAAC;IACjCC,OAAO;IACPS,WAAW,EAAEX,WAAW,CAACW;EAC7B,CACJ,CAAC;EAED,OAAOb,YAAY,CAACO,GAAG,CAAC,CACnBO,IAAI,CAAEC,GAAG,IAAKA,GAAG,CAACC,IAAI,CAAC,CAAC,CAAC,CACzBF,IAAI,CAAEJ,IAAI,IAAK;IACZ,OAAOA,IAAI;EACf,CAAC,CAAC;AACV","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/replication-graphql/index.js b/dist/esm/plugins/replication-graphql/index.js deleted file mode 100644 index 56d6c7bfac0..00000000000 --- a/dist/esm/plugins/replication-graphql/index.js +++ /dev/null @@ -1,163 +0,0 @@ -import _inheritsLoose from "@babel/runtime/helpers/inheritsLoose"; -/** - * this plugin adds the RxCollection.syncGraphQl()-function to rxdb - * you can use it to sync collections with a remote graphql endpoint. - */ -import { ensureNotFalsy, flatClone, getProperty } from "../../plugins/utils/index.js"; -import { graphQLRequest as _graphQLRequest } from "./helper.js"; -import { RxDBLeaderElectionPlugin } from "../leader-election/index.js"; -import { RxReplicationState, startReplicationOnLeaderShip } from "../replication/index.js"; -import { addRxPlugin } from "../../index.js"; -import { removeGraphQLWebSocketRef, getGraphQLWebSocket } from "./graphql-websocket.js"; -import { Subject } from 'rxjs'; -export var RxGraphQLReplicationState = /*#__PURE__*/function (_RxReplicationState) { - function RxGraphQLReplicationState(url, clientState, replicationIdentifier, collection, deletedField, pull, push, live, retryTime, autoStart, customFetch) { - var _this; - _this = _RxReplicationState.call(this, replicationIdentifier, collection, deletedField, pull, push, live, retryTime, autoStart) || this; - _this.url = url; - _this.clientState = clientState; - _this.replicationIdentifier = replicationIdentifier; - _this.collection = collection; - _this.deletedField = deletedField; - _this.pull = pull; - _this.push = push; - _this.live = live; - _this.retryTime = retryTime; - _this.autoStart = autoStart; - _this.customFetch = customFetch; - return _this; - } - _inheritsLoose(RxGraphQLReplicationState, _RxReplicationState); - var _proto = RxGraphQLReplicationState.prototype; - _proto.setHeaders = function setHeaders(headers) { - this.clientState.headers = flatClone(headers); - }; - _proto.setCredentials = function setCredentials(credentials) { - this.clientState.credentials = credentials; - }; - _proto.graphQLRequest = function graphQLRequest(queryParams) { - return _graphQLRequest(this.customFetch ?? fetch, ensureNotFalsy(this.url.http), this.clientState, queryParams); - }; - return RxGraphQLReplicationState; -}(RxReplicationState); -export function replicateGraphQL({ - collection, - url, - headers = {}, - credentials, - deletedField = '_deleted', - waitForLeadership = true, - pull, - push, - live = true, - fetch: customFetch, - retryTime = 1000 * 5, - // in ms - autoStart = true, - replicationIdentifier -}) { - addRxPlugin(RxDBLeaderElectionPlugin); - /** - * We use this object to store the GraphQL client - * so we can later swap out the client inside of the replication handlers. - */ - var mutateableClientState = { - headers, - credentials - }; - var pullStream$ = new Subject(); - var replicationPrimitivesPull; - if (pull) { - var pullBatchSize = pull.batchSize ? pull.batchSize : 20; - replicationPrimitivesPull = { - async handler(lastPulledCheckpoint) { - var pullGraphQL = await pull.queryBuilder(lastPulledCheckpoint, pullBatchSize); - var result = await graphqlReplicationState.graphQLRequest(pullGraphQL); - if (result.errors) { - throw result.errors; - } - var dataPath = pull.dataPath || ['data', Object.keys(result.data)[0]]; - var data = getProperty(result, dataPath); - if (pull.responseModifier) { - data = await pull.responseModifier(data, 'handler', lastPulledCheckpoint); - } - var docsData = data.documents; - var newCheckpoint = data.checkpoint; - return { - documents: docsData, - checkpoint: newCheckpoint - }; - }, - batchSize: pull.batchSize, - modifier: pull.modifier, - stream$: pullStream$.asObservable() - }; - } - var replicationPrimitivesPush; - if (push) { - replicationPrimitivesPush = { - async handler(rows) { - var pushObj = await push.queryBuilder(rows); - var result = await graphqlReplicationState.graphQLRequest(pushObj); - if (result.errors) { - throw result.errors; - } - var dataPath = push.dataPath || Object.keys(result.data)[0]; - var data = getProperty(result.data, dataPath); - if (push.responseModifier) { - data = await push.responseModifier(data); - } - return data; - }, - batchSize: push.batchSize, - modifier: push.modifier - }; - } - var graphqlReplicationState = new RxGraphQLReplicationState(url, mutateableClientState, replicationIdentifier, collection, deletedField, replicationPrimitivesPull, replicationPrimitivesPush, live, retryTime, autoStart, customFetch); - var mustUseSocket = url.ws && pull && pull.streamQueryBuilder && live; - var startBefore = graphqlReplicationState.start.bind(graphqlReplicationState); - graphqlReplicationState.start = () => { - if (mustUseSocket) { - var httpHeaders = pull.includeWsHeaders ? mutateableClientState.headers : undefined; - var wsClient = getGraphQLWebSocket(ensureNotFalsy(url.ws), httpHeaders); - wsClient.on('connected', () => { - pullStream$.next('RESYNC'); - }); - var query = ensureNotFalsy(pull.streamQueryBuilder)(mutateableClientState.headers); - wsClient.subscribe(query, { - next: async streamResponse => { - var firstField = Object.keys(streamResponse.data)[0]; - var data = streamResponse.data[firstField]; - if (pull.responseModifier) { - data = await pull.responseModifier(data, 'stream'); - } - pullStream$.next(data); - }, - error: error => { - pullStream$.error(error); - }, - complete: () => { - pullStream$.complete(); - } - }); - } - return startBefore(); - }; - var cancelBefore = graphqlReplicationState.cancel.bind(graphqlReplicationState); - graphqlReplicationState.cancel = () => { - if (!graphqlReplicationState.isStopped()) { - pullStream$.complete(); - if (mustUseSocket) { - removeGraphQLWebSocketRef(ensureNotFalsy(url.ws)); - } - } - return cancelBefore(); - }; - startReplicationOnLeaderShip(waitForLeadership, graphqlReplicationState); - return graphqlReplicationState; -} -export * from "./helper.js"; -export * from "./graphql-schema-from-rx-schema.js"; -export * from "./query-builder-from-rx-schema.js"; -export * from "./graphql-websocket.js"; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/esm/plugins/replication-graphql/index.js.map b/dist/esm/plugins/replication-graphql/index.js.map deleted file mode 100644 index a7637e04775..00000000000 --- a/dist/esm/plugins/replication-graphql/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["ensureNotFalsy","flatClone","getProperty","graphQLRequest","RxDBLeaderElectionPlugin","RxReplicationState","startReplicationOnLeaderShip","addRxPlugin","removeGraphQLWebSocketRef","getGraphQLWebSocket","Subject","RxGraphQLReplicationState","_RxReplicationState","url","clientState","replicationIdentifier","collection","deletedField","pull","push","live","retryTime","autoStart","customFetch","_this","call","_inheritsLoose","_proto","prototype","setHeaders","headers","setCredentials","credentials","queryParams","fetch","http","replicateGraphQL","waitForLeadership","mutateableClientState","pullStream$","replicationPrimitivesPull","pullBatchSize","batchSize","handler","lastPulledCheckpoint","pullGraphQL","queryBuilder","result","graphqlReplicationState","errors","dataPath","Object","keys","data","responseModifier","docsData","documents","newCheckpoint","checkpoint","modifier","stream$","asObservable","replicationPrimitivesPush","rows","pushObj","mustUseSocket","ws","streamQueryBuilder","startBefore","start","bind","httpHeaders","includeWsHeaders","undefined","wsClient","on","next","query","subscribe","streamResponse","firstField","error","complete","cancelBefore","cancel","isStopped"],"sources":["../../../../src/plugins/replication-graphql/index.ts"],"sourcesContent":["/**\n * this plugin adds the RxCollection.syncGraphQl()-function to rxdb\n * you can use it to sync collections with a remote graphql endpoint.\n */\nimport {\n ensureNotFalsy,\n flatClone,\n getProperty\n} from '../../plugins/utils/index.ts';\n\nimport {\n graphQLRequest\n} from './helper.ts';\n\nimport { RxDBLeaderElectionPlugin } from '../leader-election/index.ts';\nimport type {\n RxCollection,\n ReplicationPullOptions,\n ReplicationPushOptions,\n RxReplicationWriteToMasterRow,\n GraphQLServerUrl,\n RxReplicationPullStreamItem,\n RxGraphQLReplicationQueryBuilderResponseObject,\n RxGraphQLReplicationClientState,\n ById\n} from '../../types/index.d.ts';\nimport {\n RxReplicationState,\n startReplicationOnLeaderShip\n} from '../replication/index.ts';\nimport {\n addRxPlugin,\n SyncOptionsGraphQL,\n WithDeleted\n} from '../../index.ts';\n\nimport {\n removeGraphQLWebSocketRef,\n getGraphQLWebSocket\n} from './graphql-websocket.ts';\nimport { Subject } from 'rxjs';\n\n\n\n\nexport class RxGraphQLReplicationState extends RxReplicationState {\n constructor(\n public readonly url: GraphQLServerUrl,\n public readonly clientState: RxGraphQLReplicationClientState,\n public readonly replicationIdentifier: string,\n public readonly collection: RxCollection,\n public readonly deletedField: string,\n public readonly pull?: ReplicationPullOptions,\n public readonly push?: ReplicationPushOptions,\n public readonly live?: boolean,\n public retryTime?: number,\n public autoStart?: boolean,\n public readonly customFetch?: WindowOrWorkerGlobalScope['fetch']\n ) {\n super(\n replicationIdentifier,\n collection,\n deletedField,\n pull,\n push,\n live,\n retryTime,\n autoStart\n );\n }\n\n setHeaders(headers: ById): void {\n this.clientState.headers = flatClone(headers);\n }\n\n setCredentials(credentials: RequestCredentials | undefined) {\n this.clientState.credentials = credentials;\n }\n\n graphQLRequest(\n queryParams: RxGraphQLReplicationQueryBuilderResponseObject\n ) {\n return graphQLRequest(\n this.customFetch ?? fetch,\n ensureNotFalsy(this.url.http),\n this.clientState,\n queryParams\n );\n }\n}\n\nexport function replicateGraphQL(\n {\n collection,\n url,\n headers = {},\n credentials,\n deletedField = '_deleted',\n waitForLeadership = true,\n pull,\n push,\n live = true,\n fetch: customFetch,\n retryTime = 1000 * 5, // in ms\n autoStart = true,\n replicationIdentifier\n }: SyncOptionsGraphQL\n): RxGraphQLReplicationState {\n addRxPlugin(RxDBLeaderElectionPlugin);\n /**\n * We use this object to store the GraphQL client\n * so we can later swap out the client inside of the replication handlers.\n */\n const mutateableClientState = {\n headers,\n credentials\n };\n\n\n const pullStream$: Subject> = new Subject();\n\n let replicationPrimitivesPull: ReplicationPullOptions | undefined;\n if (pull) {\n const pullBatchSize = pull.batchSize ? pull.batchSize : 20;\n replicationPrimitivesPull = {\n async handler(\n lastPulledCheckpoint: CheckpointType | undefined\n ) {\n const pullGraphQL = await pull.queryBuilder(lastPulledCheckpoint, pullBatchSize);\n const result = await graphqlReplicationState.graphQLRequest(pullGraphQL);\n if (result.errors) {\n throw result.errors;\n }\n const dataPath = pull.dataPath || ['data', Object.keys(result.data)[0]];\n let data: any = getProperty(result, dataPath);\n if (pull.responseModifier) {\n data = await pull.responseModifier(\n data,\n 'handler',\n lastPulledCheckpoint\n );\n }\n\n const docsData: WithDeleted[] = data.documents;\n const newCheckpoint = data.checkpoint;\n\n return {\n documents: docsData,\n checkpoint: newCheckpoint\n };\n },\n batchSize: pull.batchSize,\n modifier: pull.modifier,\n stream$: pullStream$.asObservable()\n };\n }\n let replicationPrimitivesPush: ReplicationPushOptions | undefined;\n if (push) {\n replicationPrimitivesPush = {\n async handler(\n rows: RxReplicationWriteToMasterRow[]\n ) {\n const pushObj = await push.queryBuilder(rows);\n const result = await graphqlReplicationState.graphQLRequest(pushObj);\n\n if (result.errors) {\n throw result.errors;\n }\n const dataPath = push.dataPath || Object.keys(result.data)[0];\n let data: any = getProperty(result.data, dataPath);\n\n if (push.responseModifier) {\n data = await push.responseModifier(\n data,\n );\n }\n\n return data;\n },\n batchSize: push.batchSize,\n modifier: push.modifier\n };\n }\n\n const graphqlReplicationState = new RxGraphQLReplicationState(\n url,\n mutateableClientState,\n replicationIdentifier,\n collection,\n deletedField,\n replicationPrimitivesPull,\n replicationPrimitivesPush,\n live,\n retryTime,\n autoStart,\n customFetch\n );\n\n const mustUseSocket = url.ws &&\n pull &&\n pull.streamQueryBuilder &&\n live;\n\n const startBefore = graphqlReplicationState.start.bind(graphqlReplicationState);\n graphqlReplicationState.start = () => {\n if (mustUseSocket) {\n const httpHeaders = pull.includeWsHeaders ? mutateableClientState.headers : undefined;\n const wsClient = getGraphQLWebSocket(ensureNotFalsy(url.ws), httpHeaders);\n\n wsClient.on('connected', () => {\n pullStream$.next('RESYNC');\n });\n\n const query: any = ensureNotFalsy(pull.streamQueryBuilder)(mutateableClientState.headers);\n\n wsClient.subscribe(\n query,\n {\n next: async (streamResponse: any) => {\n const firstField = Object.keys(streamResponse.data)[0];\n let data = streamResponse.data[firstField];\n if (pull.responseModifier) {\n data = await pull.responseModifier(\n data,\n 'stream'\n );\n }\n pullStream$.next(data);\n },\n error: (error: any) => {\n pullStream$.error(error);\n },\n complete: () => {\n pullStream$.complete();\n }\n });\n }\n return startBefore();\n };\n\n const cancelBefore = graphqlReplicationState.cancel.bind(graphqlReplicationState);\n graphqlReplicationState.cancel = () => {\n if (!graphqlReplicationState.isStopped()) {\n pullStream$.complete();\n if (mustUseSocket) {\n removeGraphQLWebSocketRef(ensureNotFalsy(url.ws));\n }\n }\n return cancelBefore();\n };\n\n startReplicationOnLeaderShip(waitForLeadership, graphqlReplicationState);\n return graphqlReplicationState;\n}\n\nexport * from './helper.ts';\nexport * from './graphql-schema-from-rx-schema.ts';\nexport * from './query-builder-from-rx-schema.ts';\nexport * from './graphql-websocket.ts';\n"],"mappings":";AAAA;AACA;AACA;AACA;AACA,SACIA,cAAc,EACdC,SAAS,EACTC,WAAW,QACR,8BAA8B;AAErC,SACIC,cAAc,IAAdA,eAAc,QACX,aAAa;AAEpB,SAASC,wBAAwB,QAAQ,6BAA6B;AAYtE,SACIC,kBAAkB,EAClBC,4BAA4B,QACzB,yBAAyB;AAChC,SACIC,WAAW,QAGR,gBAAgB;AAEvB,SACIC,yBAAyB,EACzBC,mBAAmB,QAChB,wBAAwB;AAC/B,SAASC,OAAO,QAAQ,MAAM;AAK9B,WAAaC,yBAAyB,0BAAAC,mBAAA;EAClC,SAAAD,0BACoBE,GAAqB,EACrBC,WAA4C,EAC5CC,qBAA6B,EAC7BC,UAAmC,EACnCC,YAAoB,EACpBC,IAAwD,EACxDC,IAAwC,EACxCC,IAAc,EACvBC,SAAkB,EAClBC,SAAmB,EACVC,WAAgD,EAClE;IAAA,IAAAC,KAAA;IACEA,KAAA,GAAAZ,mBAAA,CAAAa,IAAA,OACIV,qBAAqB,EACrBC,UAAU,EACVC,YAAY,EACZC,IAAI,EACJC,IAAI,EACJC,IAAI,EACJC,SAAS,EACTC,SACJ,CAAC;IAACE,KAAA,CArBcX,GAAqB,GAArBA,GAAqB;IAAAW,KAAA,CACrBV,WAA4C,GAA5CA,WAA4C;IAAAU,KAAA,CAC5CT,qBAA6B,GAA7BA,qBAA6B;IAAAS,KAAA,CAC7BR,UAAmC,GAAnCA,UAAmC;IAAAQ,KAAA,CACnCP,YAAoB,GAApBA,YAAoB;IAAAO,KAAA,CACpBN,IAAwD,GAAxDA,IAAwD;IAAAM,KAAA,CACxDL,IAAwC,GAAxCA,IAAwC;IAAAK,KAAA,CACxCJ,IAAc,GAAdA,IAAc;IAAAI,KAAA,CACvBH,SAAkB,GAAlBA,SAAkB;IAAAG,KAAA,CAClBF,SAAmB,GAAnBA,SAAmB;IAAAE,KAAA,CACVD,WAAgD,GAAhDA,WAAgD;IAAA,OAAAC,KAAA;EAYpE;EAACE,cAAA,CAAAf,yBAAA,EAAAC,mBAAA;EAAA,IAAAe,MAAA,GAAAhB,yBAAA,CAAAiB,SAAA;EAAAD,MAAA,CAEDE,UAAU,GAAV,SAAAA,WAAWC,OAAqB,EAAQ;IACpC,IAAI,CAAChB,WAAW,CAACgB,OAAO,GAAG7B,SAAS,CAAC6B,OAAO,CAAC;EACjD,CAAC;EAAAH,MAAA,CAEDI,cAAc,GAAd,SAAAA,eAAeC,WAA2C,EAAE;IACxD,IAAI,CAAClB,WAAW,CAACkB,WAAW,GAAGA,WAAW;EAC9C,CAAC;EAAAL,MAAA,CAEDxB,cAAc,GAAd,SAAAA,eACI8B,WAA2D,EAC7D;IACE,OAAO9B,eAAc,CACjB,IAAI,CAACoB,WAAW,IAAIW,KAAK,EACzBlC,cAAc,CAAC,IAAI,CAACa,GAAG,CAACsB,IAAI,CAAC,EAC7B,IAAI,CAACrB,WAAW,EAChBmB,WACJ,CAAC;EACL,CAAC;EAAA,OAAAtB,yBAAA;AAAA,EA3CqEN,kBAAkB;AA8C5F,OAAO,SAAS+B,gBAAgBA,CAC5B;EACIpB,UAAU;EACVH,GAAG;EACHiB,OAAO,GAAG,CAAC,CAAC;EACZE,WAAW;EACXf,YAAY,GAAG,UAAU;EACzBoB,iBAAiB,GAAG,IAAI;EACxBnB,IAAI;EACJC,IAAI;EACJC,IAAI,GAAG,IAAI;EACXc,KAAK,EAAEX,WAAW;EAClBF,SAAS,GAAG,IAAI,GAAG,CAAC;EAAE;EACtBC,SAAS,GAAG,IAAI;EAChBP;AAC2C,CAAC,EACI;EACpDR,WAAW,CAACH,wBAAwB,CAAC;EACrC;AACJ;AACA;AACA;EACI,IAAMkC,qBAAqB,GAAG;IAC1BR,OAAO;IACPE;EACJ,CAAC;EAGD,IAAMO,WAA4E,GAAG,IAAI7B,OAAO,CAAC,CAAC;EAElG,IAAI8B,yBAAwF;EAC5F,IAAItB,IAAI,EAAE;IACN,IAAMuB,aAAa,GAAGvB,IAAI,CAACwB,SAAS,GAAGxB,IAAI,CAACwB,SAAS,GAAG,EAAE;IAC1DF,yBAAyB,GAAG;MACxB,MAAMG,OAAOA,CACTC,oBAAgD,EAClD;QACE,IAAMC,WAAW,GAAG,MAAM3B,IAAI,CAAC4B,YAAY,CAACF,oBAAoB,EAAEH,aAAa,CAAC;QAChF,IAAMM,MAAM,GAAG,MAAMC,uBAAuB,CAAC7C,cAAc,CAAC0C,WAAW,CAAC;QACxE,IAAIE,MAAM,CAACE,MAAM,EAAE;UACf,MAAMF,MAAM,CAACE,MAAM;QACvB;QACA,IAAMC,QAAQ,GAAGhC,IAAI,CAACgC,QAAQ,IAAI,CAAC,MAAM,EAAEC,MAAM,CAACC,IAAI,CAACL,MAAM,CAACM,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;QACvE,IAAIA,IAAS,GAAGnD,WAAW,CAAC6C,MAAM,EAAEG,QAAQ,CAAC;QAC7C,IAAIhC,IAAI,CAACoC,gBAAgB,EAAE;UACvBD,IAAI,GAAG,MAAMnC,IAAI,CAACoC,gBAAgB,CAC9BD,IAAI,EACJ,SAAS,EACTT,oBACJ,CAAC;QACL;QAEA,IAAMW,QAAkC,GAAGF,IAAI,CAACG,SAAS;QACzD,IAAMC,aAAa,GAAGJ,IAAI,CAACK,UAAU;QAErC,OAAO;UACHF,SAAS,EAAED,QAAQ;UACnBG,UAAU,EAAED;QAChB,CAAC;MACL,CAAC;MACDf,SAAS,EAAExB,IAAI,CAACwB,SAAS;MACzBiB,QAAQ,EAAEzC,IAAI,CAACyC,QAAQ;MACvBC,OAAO,EAAErB,WAAW,CAACsB,YAAY,CAAC;IACtC,CAAC;EACL;EACA,IAAIC,yBAAwE;EAC5E,IAAI3C,IAAI,EAAE;IACN2C,yBAAyB,GAAG;MACxB,MAAMnB,OAAOA,CACToB,IAAgD,EAClD;QACE,IAAMC,OAAO,GAAG,MAAM7C,IAAI,CAAC2B,YAAY,CAACiB,IAAI,CAAC;QAC7C,IAAMhB,MAAM,GAAG,MAAMC,uBAAuB,CAAC7C,cAAc,CAAC6D,OAAO,CAAC;QAEpE,IAAIjB,MAAM,CAACE,MAAM,EAAE;UACf,MAAMF,MAAM,CAACE,MAAM;QACvB;QACA,IAAMC,QAAQ,GAAG/B,IAAI,CAAC+B,QAAQ,IAAIC,MAAM,CAACC,IAAI,CAACL,MAAM,CAACM,IAAI,CAAC,CAAC,CAAC,CAAC;QAC7D,IAAIA,IAAS,GAAGnD,WAAW,CAAC6C,MAAM,CAACM,IAAI,EAAEH,QAAQ,CAAC;QAElD,IAAI/B,IAAI,CAACmC,gBAAgB,EAAE;UACvBD,IAAI,GAAG,MAAMlC,IAAI,CAACmC,gBAAgB,CAC9BD,IACJ,CAAC;QACL;QAEA,OAAOA,IAAI;MACf,CAAC;MACDX,SAAS,EAAEvB,IAAI,CAACuB,SAAS;MACzBiB,QAAQ,EAAExC,IAAI,CAACwC;IACnB,CAAC;EACL;EAEA,IAAMX,uBAAuB,GAAG,IAAIrC,yBAAyB,CACzDE,GAAG,EACHyB,qBAAqB,EACrBvB,qBAAqB,EACrBC,UAAU,EACVC,YAAY,EACZuB,yBAAyB,EACzBsB,yBAAyB,EACzB1C,IAAI,EACJC,SAAS,EACTC,SAAS,EACTC,WACJ,CAAC;EAED,IAAM0C,aAAa,GAAGpD,GAAG,CAACqD,EAAE,IACxBhD,IAAI,IACJA,IAAI,CAACiD,kBAAkB,IACvB/C,IAAI;EAER,IAAMgD,WAAW,GAAGpB,uBAAuB,CAACqB,KAAK,CAACC,IAAI,CAACtB,uBAAuB,CAAC;EAC/EA,uBAAuB,CAACqB,KAAK,GAAG,MAAM;IAClC,IAAIJ,aAAa,EAAE;MACf,IAAMM,WAAW,GAAGrD,IAAI,CAACsD,gBAAgB,GAAGlC,qBAAqB,CAACR,OAAO,GAAG2C,SAAS;MACrF,IAAMC,QAAQ,GAAGjE,mBAAmB,CAACT,cAAc,CAACa,GAAG,CAACqD,EAAE,CAAC,EAAEK,WAAW,CAAC;MAEzEG,QAAQ,CAACC,EAAE,CAAC,WAAW,EAAE,MAAM;QAC3BpC,WAAW,CAACqC,IAAI,CAAC,QAAQ,CAAC;MAC9B,CAAC,CAAC;MAEF,IAAMC,KAAU,GAAG7E,cAAc,CAACkB,IAAI,CAACiD,kBAAkB,CAAC,CAAC7B,qBAAqB,CAACR,OAAO,CAAC;MAEzF4C,QAAQ,CAACI,SAAS,CACdD,KAAK,EACL;QACID,IAAI,EAAE,MAAOG,cAAmB,IAAK;UACjC,IAAMC,UAAU,GAAG7B,MAAM,CAACC,IAAI,CAAC2B,cAAc,CAAC1B,IAAI,CAAC,CAAC,CAAC,CAAC;UACtD,IAAIA,IAAI,GAAG0B,cAAc,CAAC1B,IAAI,CAAC2B,UAAU,CAAC;UAC1C,IAAI9D,IAAI,CAACoC,gBAAgB,EAAE;YACvBD,IAAI,GAAG,MAAMnC,IAAI,CAACoC,gBAAgB,CAC9BD,IAAI,EACJ,QACJ,CAAC;UACL;UACAd,WAAW,CAACqC,IAAI,CAACvB,IAAI,CAAC;QAC1B,CAAC;QACD4B,KAAK,EAAGA,KAAU,IAAK;UACnB1C,WAAW,CAAC0C,KAAK,CAACA,KAAK,CAAC;QAC5B,CAAC;QACDC,QAAQ,EAAEA,CAAA,KAAM;UACZ3C,WAAW,CAAC2C,QAAQ,CAAC,CAAC;QAC1B;MACJ,CAAC,CAAC;IACV;IACA,OAAOd,WAAW,CAAC,CAAC;EACxB,CAAC;EAED,IAAMe,YAAY,GAAGnC,uBAAuB,CAACoC,MAAM,CAACd,IAAI,CAACtB,uBAAuB,CAAC;EACjFA,uBAAuB,CAACoC,MAAM,GAAG,MAAM;IACnC,IAAI,CAACpC,uBAAuB,CAACqC,SAAS,CAAC,CAAC,EAAE;MACtC9C,WAAW,CAAC2C,QAAQ,CAAC,CAAC;MACtB,IAAIjB,aAAa,EAAE;QACfzD,yBAAyB,CAACR,cAAc,CAACa,GAAG,CAACqD,EAAE,CAAC,CAAC;MACrD;IACJ;IACA,OAAOiB,YAAY,CAAC,CAAC;EACzB,CAAC;EAED7E,4BAA4B,CAAC+B,iBAAiB,EAAEW,uBAAuB,CAAC;EACxE,OAAOA,uBAAuB;AAClC;AAEA,cAAc,aAAa;AAC3B,cAAc,oCAAoC;AAClD,cAAc,mCAAmC;AACjD,cAAc,wBAAwB","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/replication-graphql/query-builder-from-rx-schema.js b/dist/esm/plugins/replication-graphql/query-builder-from-rx-schema.js deleted file mode 100644 index 3dc67b10664..00000000000 --- a/dist/esm/plugins/replication-graphql/query-builder-from-rx-schema.js +++ /dev/null @@ -1,126 +0,0 @@ -import { fillUpOptionals, SPACING } from "./graphql-schema-from-rx-schema.js"; -import { ensureNotFalsy, ucfirst } from "../../plugins/utils/index.js"; -export function pullQueryBuilderFromRxSchema(collectionName, input) { - input = fillUpOptionals(input); - var schema = input.schema; - var prefixes = input.prefixes; - var ucCollectionName = ucfirst(collectionName); - var queryName = prefixes.pull + ucCollectionName; - var operationName = ucfirst(queryName); - var outputFields = generateGQLOutputFields({ - schema, - ignoreOutputKeys: input.ignoreOutputKeys - }); - // outputFields.push(input.deletedField); - - var checkpointInputName = ucCollectionName + 'Input' + prefixes.checkpoint; - var builder = (checkpoint, limit) => { - var query = 'query ' + operationName + '($checkpoint: ' + checkpointInputName + ', $limit: Int!) {\n' + SPACING + SPACING + queryName + '(checkpoint: $checkpoint, limit: $limit) {\n' + SPACING + SPACING + SPACING + 'documents {\n' + outputFields + '\n' + SPACING + SPACING + SPACING + '}\n' + SPACING + SPACING + SPACING + 'checkpoint {\n' + SPACING + SPACING + SPACING + SPACING + input.checkpointFields.join('\n' + SPACING + SPACING + SPACING + SPACING) + '\n' + SPACING + SPACING + SPACING + '}\n' + SPACING + SPACING + '}\n' + '}'; - return { - query, - operationName, - variables: { - checkpoint, - limit - } - }; - }; - return builder; -} -export function pullStreamBuilderFromRxSchema(collectionName, input) { - input = fillUpOptionals(input); - var schema = input.schema; - var prefixes = input.prefixes; - var ucCollectionName = ucfirst(collectionName); - var queryName = prefixes.stream + ucCollectionName; - var outputFields = generateGQLOutputFields({ - schema, - ignoreOutputKeys: input.ignoreOutputKeys - }); - var headersName = ucCollectionName + 'Input' + prefixes.headers; - var query = 'subscription on' + ucfirst(ensureNotFalsy(prefixes.stream)) + '($headers: ' + headersName + ') {\n' + SPACING + queryName + '(headers: $headers) {\n' + SPACING + SPACING + SPACING + 'documents {\n' + outputFields + '\n' + SPACING + SPACING + SPACING + '}\n' + SPACING + SPACING + SPACING + 'checkpoint {\n' + SPACING + SPACING + SPACING + SPACING + input.checkpointFields.join('\n' + SPACING + SPACING + SPACING + SPACING) + '\n' + SPACING + SPACING + SPACING + '}\n' + SPACING + '}' + '}'; - var builder = headers => { - return { - query, - variables: { - headers - } - }; - }; - return builder; -} -export function pushQueryBuilderFromRxSchema(collectionName, input) { - input = fillUpOptionals(input); - var prefixes = input.prefixes; - var ucCollectionName = ucfirst(collectionName); - var queryName = prefixes.push + ucCollectionName; - var operationName = ucfirst(queryName); - var variableName = collectionName + prefixes.pushRow; - var returnFields = generateGQLOutputFields({ - schema: input.schema, - spaceCount: 2 - }); - var builder = pushRows => { - var query = '' + 'mutation ' + operationName + '($' + variableName + ': [' + ucCollectionName + 'Input' + prefixes.pushRow + '!]) {\n' + SPACING + queryName + '(' + variableName + ': $' + variableName + ') {\n' + returnFields + '\n' + SPACING + '}\n' + '}'; - var sendRows = []; - function transformPushDoc(doc) { - var sendDoc = {}; - Object.entries(doc).forEach(([k, v]) => { - if ( - // skip if in ignoreInputKeys list - !input.ignoreInputKeys.includes(k) && - // only use properties that are in the schema - input.schema.properties[k]) { - sendDoc[k] = v; - } - }); - return sendDoc; - } - pushRows.forEach(pushRow => { - var newRow = { - newDocumentState: transformPushDoc(pushRow.newDocumentState), - assumedMasterState: pushRow.assumedMasterState ? transformPushDoc(pushRow.assumedMasterState) : undefined - }; - sendRows.push(newRow); - }); - var variables = { - [variableName]: sendRows - }; - return { - query, - operationName, - variables - }; - }; - return builder; -} -function generateGQLOutputFields(options) { - var { - schema, - spaceCount = 4, - depth = 0, - ignoreOutputKeys = [] - } = options; - var outputFields = []; - var properties = schema.properties; - var NESTED_SPACING = SPACING.repeat(depth); - var LINE_SPACING = SPACING.repeat(spaceCount); - for (var key in properties) { - //only skipping top level keys that are in ignoreOutputKeys list - if (ignoreOutputKeys.includes(key)) { - continue; - } - var value = properties[key]; - if (value.type === "object") { - outputFields.push(LINE_SPACING + NESTED_SPACING + key + " {", generateGQLOutputFields({ - schema: value, - spaceCount, - depth: depth + 1 - }), LINE_SPACING + NESTED_SPACING + "}"); - } else { - outputFields.push(LINE_SPACING + NESTED_SPACING + key); - } - } - return outputFields.join('\n'); -} -//# sourceMappingURL=query-builder-from-rx-schema.js.map \ No newline at end of file diff --git a/dist/esm/plugins/replication-graphql/query-builder-from-rx-schema.js.map b/dist/esm/plugins/replication-graphql/query-builder-from-rx-schema.js.map deleted file mode 100644 index dbcfa08f117..00000000000 --- a/dist/esm/plugins/replication-graphql/query-builder-from-rx-schema.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"query-builder-from-rx-schema.js","names":["fillUpOptionals","SPACING","ensureNotFalsy","ucfirst","pullQueryBuilderFromRxSchema","collectionName","input","schema","prefixes","ucCollectionName","queryName","pull","operationName","outputFields","generateGQLOutputFields","ignoreOutputKeys","checkpointInputName","checkpoint","builder","limit","query","checkpointFields","join","variables","pullStreamBuilderFromRxSchema","stream","headersName","headers","pushQueryBuilderFromRxSchema","push","variableName","pushRow","returnFields","spaceCount","pushRows","sendRows","transformPushDoc","doc","sendDoc","Object","entries","forEach","k","v","ignoreInputKeys","includes","properties","newRow","newDocumentState","assumedMasterState","undefined","options","depth","NESTED_SPACING","repeat","LINE_SPACING","key","value","type"],"sources":["../../../../src/plugins/replication-graphql/query-builder-from-rx-schema.ts"],"sourcesContent":["import {\n GraphQLSchemaFromRxSchemaInputSingleCollection,\n fillUpOptionals,\n Prefixes,\n SPACING\n} from './graphql-schema-from-rx-schema.ts';\nimport { ensureNotFalsy, ucfirst } from '../../plugins/utils/index.ts';\nimport type {\n RxGraphQLReplicationPullQueryBuilder,\n RxGraphQLReplicationPullStreamQueryBuilder,\n RxGraphQLReplicationPushQueryBuilder,\n RxJsonSchema,\n TopLevelProperty,\n WithDeleted\n} from '../../types/index.d.ts';\n\nexport function pullQueryBuilderFromRxSchema(\n collectionName: string,\n input: GraphQLSchemaFromRxSchemaInputSingleCollection,\n): RxGraphQLReplicationPullQueryBuilder {\n input = fillUpOptionals(input);\n const schema = input.schema\n const prefixes: Prefixes = input.prefixes as any;\n\n const ucCollectionName = ucfirst(collectionName);\n const queryName = prefixes.pull + ucCollectionName;\n const operationName = ucfirst(queryName);\n\n const outputFields = generateGQLOutputFields({ schema, ignoreOutputKeys: input.ignoreOutputKeys })\n // outputFields.push(input.deletedField); \n \n const checkpointInputName = ucCollectionName + 'Input' + prefixes.checkpoint;\n const builder: RxGraphQLReplicationPullQueryBuilder = (checkpoint: any, limit: number) => {\n const query = 'query ' + operationName + '($checkpoint: ' + checkpointInputName + ', $limit: Int!) {\\n' +\n SPACING + SPACING + queryName + '(checkpoint: $checkpoint, limit: $limit) {\\n' +\n SPACING + SPACING + SPACING + 'documents {\\n' + \n outputFields + '\\n' +\n SPACING + SPACING + SPACING + '}\\n' +\n SPACING + SPACING + SPACING + 'checkpoint {\\n' +\n SPACING + SPACING + SPACING + SPACING + input.checkpointFields.join('\\n' + SPACING + SPACING + SPACING + SPACING) + '\\n' +\n SPACING + SPACING + SPACING + '}\\n' +\n SPACING + SPACING + '}\\n' +\n '}';\n return {\n query,\n operationName,\n variables: {\n checkpoint,\n limit\n }\n };\n };\n\n return builder;\n}\n\nexport function pullStreamBuilderFromRxSchema(\n collectionName: string,\n input: GraphQLSchemaFromRxSchemaInputSingleCollection,\n) {\n input = fillUpOptionals(input);\n const schema = input.schema;\n const prefixes: Prefixes = input.prefixes as any;\n\n const ucCollectionName = ucfirst(collectionName);\n const queryName = prefixes.stream + ucCollectionName;\n const outputFields = generateGQLOutputFields({ schema, ignoreOutputKeys: input.ignoreOutputKeys })\n\n const headersName = ucCollectionName + 'Input' + prefixes.headers;\n\n const query = 'subscription on' + ucfirst(ensureNotFalsy(prefixes.stream)) + '($headers: ' + headersName + ') {\\n' +\n SPACING + queryName + '(headers: $headers) {\\n' +\n SPACING + SPACING + SPACING + 'documents {\\n' +\n outputFields + '\\n' +\n SPACING + SPACING + SPACING + '}\\n' +\n SPACING + SPACING + SPACING + 'checkpoint {\\n' +\n SPACING + SPACING + SPACING + SPACING + input.checkpointFields.join('\\n' + SPACING + SPACING + SPACING + SPACING) + '\\n' +\n SPACING + SPACING + SPACING + '}\\n' +\n SPACING + '}' +\n '}';\n\n const builder: RxGraphQLReplicationPullStreamQueryBuilder = (headers: any) => {\n return {\n query,\n variables: {\n headers\n }\n };\n };\n return builder;\n}\n\n\nexport function pushQueryBuilderFromRxSchema(\n collectionName: string,\n input: GraphQLSchemaFromRxSchemaInputSingleCollection\n): RxGraphQLReplicationPushQueryBuilder {\n input = fillUpOptionals(input);\n const prefixes: Prefixes = input.prefixes as any;\n\n const ucCollectionName = ucfirst(collectionName);\n const queryName = prefixes.push + ucCollectionName;\n const operationName = ucfirst(queryName);\n\n const variableName = collectionName + prefixes.pushRow;\n const returnFields = generateGQLOutputFields({ schema: input.schema, spaceCount: 2 })\n \n const builder: RxGraphQLReplicationPushQueryBuilder = (pushRows) => {\n const query = '' +\n 'mutation ' + operationName + '($' + variableName + ': [' + ucCollectionName + 'Input' + prefixes.pushRow + '!]) {\\n' +\n SPACING + queryName + '(' + variableName + ': $' + variableName + ') {\\n' +\n returnFields + '\\n' +\n SPACING + '}\\n' +\n '}';\n\n const sendRows: typeof pushRows = [];\n function transformPushDoc(doc: WithDeleted) {\n const sendDoc: any = {};\n Object.entries(doc).forEach(([k, v]) => {\n if (\n // skip if in ignoreInputKeys list\n !(input.ignoreInputKeys as string[]).includes(k) &&\n // only use properties that are in the schema\n input.schema.properties[k]\n ) {\n sendDoc[k] = v;\n }\n });\n return sendDoc;\n }\n pushRows.forEach(pushRow => {\n const newRow: typeof pushRow = {\n newDocumentState: transformPushDoc(pushRow.newDocumentState),\n assumedMasterState: pushRow.assumedMasterState ? transformPushDoc(pushRow.assumedMasterState) : undefined\n };\n sendRows.push(newRow);\n });\n const variables = {\n [variableName]: sendRows\n };\n return {\n query,\n operationName,\n variables\n };\n };\n\n return builder;\n}\n\ntype GenerateGQLOutputFieldsOptions = {\n schema: RxJsonSchema | TopLevelProperty,\n spaceCount?: number,\n depth?: number\n ignoreOutputKeys?: string[]\n}\n\nfunction generateGQLOutputFields(options: GenerateGQLOutputFieldsOptions) {\n const { schema, spaceCount = 4, depth = 0, ignoreOutputKeys = [] } = options;\n\n const outputFields: string[] = [];\n const properties = schema.properties \n const NESTED_SPACING = SPACING.repeat(depth);\n const LINE_SPACING = SPACING.repeat(spaceCount);\n \n for (const key in properties) {\n //only skipping top level keys that are in ignoreOutputKeys list\n if (ignoreOutputKeys.includes(key)) {\n continue;\n }\n\n const value = properties[key];\n if (value.type === \"object\") {\n outputFields.push(\n LINE_SPACING + NESTED_SPACING + key + \" {\",\n generateGQLOutputFields({ schema: value, spaceCount, depth: depth + 1 }),\n LINE_SPACING + NESTED_SPACING + \"}\"\n );\n } else {\n outputFields.push(LINE_SPACING + NESTED_SPACING + key);\n }\n }\n \n return outputFields.join('\\n');\n}\n\n\n\n"],"mappings":"AAAA,SAEIA,eAAe,EAEfC,OAAO,QACJ,oCAAoC;AAC3C,SAASC,cAAc,EAAEC,OAAO,QAAQ,8BAA8B;AAUtE,OAAO,SAASC,4BAA4BA,CACxCC,cAAsB,EACtBC,KAAqD,EACZ;EACzCA,KAAK,GAAGN,eAAe,CAACM,KAAK,CAAC;EAC9B,IAAMC,MAAM,GAAGD,KAAK,CAACC,MAAM;EAC3B,IAAMC,QAAkB,GAAGF,KAAK,CAACE,QAAe;EAEhD,IAAMC,gBAAgB,GAAGN,OAAO,CAACE,cAAc,CAAC;EAChD,IAAMK,SAAS,GAAGF,QAAQ,CAACG,IAAI,GAAGF,gBAAgB;EAClD,IAAMG,aAAa,GAAGT,OAAO,CAACO,SAAS,CAAC;EAExC,IAAMG,YAAY,GAAGC,uBAAuB,CAAC;IAAEP,MAAM;IAAEQ,gBAAgB,EAAET,KAAK,CAACS;EAAiB,CAAC,CAAC;EAClG;;EAEA,IAAMC,mBAAmB,GAAGP,gBAAgB,GAAG,OAAO,GAAGD,QAAQ,CAACS,UAAU;EAC5E,IAAMC,OAAkD,GAAGA,CAACD,UAAe,EAAEE,KAAa,KAAK;IAC3F,IAAMC,KAAK,GAAG,QAAQ,GAAGR,aAAa,GAAG,gBAAgB,GAAGI,mBAAmB,GAAG,qBAAqB,GACnGf,OAAO,GAAGA,OAAO,GAAGS,SAAS,GAAG,8CAA8C,GAC9ET,OAAO,GAAGA,OAAO,GAAGA,OAAO,GAAG,eAAe,GAC7CY,YAAY,GAAI,IAAI,GACpBZ,OAAO,GAAGA,OAAO,GAAGA,OAAO,GAAG,KAAK,GACnCA,OAAO,GAAGA,OAAO,GAAGA,OAAO,GAAG,gBAAgB,GAC9CA,OAAO,GAAGA,OAAO,GAAGA,OAAO,GAAGA,OAAO,GAAGK,KAAK,CAACe,gBAAgB,CAACC,IAAI,CAAC,IAAI,GAAGrB,OAAO,GAAGA,OAAO,GAAGA,OAAO,GAAGA,OAAO,CAAC,GAAG,IAAI,GACxHA,OAAO,GAAGA,OAAO,GAAGA,OAAO,GAAG,KAAK,GACnCA,OAAO,GAAGA,OAAO,GAAG,KAAK,GACzB,GAAG;IACP,OAAO;MACHmB,KAAK;MACLR,aAAa;MACbW,SAAS,EAAE;QACPN,UAAU;QACVE;MACJ;IACJ,CAAC;EACL,CAAC;EAED,OAAOD,OAAO;AAClB;AAEA,OAAO,SAASM,6BAA6BA,CACzCnB,cAAsB,EACtBC,KAAqD,EACvD;EACEA,KAAK,GAAGN,eAAe,CAACM,KAAK,CAAC;EAC9B,IAAMC,MAAM,GAAGD,KAAK,CAACC,MAAM;EAC3B,IAAMC,QAAkB,GAAGF,KAAK,CAACE,QAAe;EAEhD,IAAMC,gBAAgB,GAAGN,OAAO,CAACE,cAAc,CAAC;EAChD,IAAMK,SAAS,GAAGF,QAAQ,CAACiB,MAAM,GAAGhB,gBAAgB;EACpD,IAAMI,YAAY,GAAGC,uBAAuB,CAAC;IAAEP,MAAM;IAAEQ,gBAAgB,EAAET,KAAK,CAACS;EAAiB,CAAC,CAAC;EAElG,IAAMW,WAAW,GAAGjB,gBAAgB,GAAG,OAAO,GAAGD,QAAQ,CAACmB,OAAO;EAEjE,IAAMP,KAAK,GAAG,iBAAiB,GAAGjB,OAAO,CAACD,cAAc,CAACM,QAAQ,CAACiB,MAAM,CAAC,CAAC,GAAG,aAAa,GAAGC,WAAW,GAAG,OAAO,GAC9GzB,OAAO,GAAGS,SAAS,GAAG,yBAAyB,GAC/CT,OAAO,GAAGA,OAAO,GAAGA,OAAO,GAAG,eAAe,GAC7CY,YAAY,GAAI,IAAI,GACpBZ,OAAO,GAAGA,OAAO,GAAGA,OAAO,GAAG,KAAK,GACnCA,OAAO,GAAGA,OAAO,GAAGA,OAAO,GAAG,gBAAgB,GAC9CA,OAAO,GAAGA,OAAO,GAAGA,OAAO,GAAGA,OAAO,GAAGK,KAAK,CAACe,gBAAgB,CAACC,IAAI,CAAC,IAAI,GAAGrB,OAAO,GAAGA,OAAO,GAAGA,OAAO,GAAGA,OAAO,CAAC,GAAG,IAAI,GACxHA,OAAO,GAAGA,OAAO,GAAGA,OAAO,GAAG,KAAK,GACnCA,OAAO,GAAG,GAAG,GACb,GAAG;EAEP,IAAMiB,OAAmD,GAAIS,OAAY,IAAK;IAC1E,OAAO;MACHP,KAAK;MACLG,SAAS,EAAE;QACPI;MACJ;IACJ,CAAC;EACL,CAAC;EACD,OAAOT,OAAO;AAClB;AAGA,OAAO,SAASU,4BAA4BA,CACxCvB,cAAsB,EACtBC,KAAqD,EACjB;EACpCA,KAAK,GAAGN,eAAe,CAACM,KAAK,CAAC;EAC9B,IAAME,QAAkB,GAAGF,KAAK,CAACE,QAAe;EAEhD,IAAMC,gBAAgB,GAAGN,OAAO,CAACE,cAAc,CAAC;EAChD,IAAMK,SAAS,GAAGF,QAAQ,CAACqB,IAAI,GAAGpB,gBAAgB;EAClD,IAAMG,aAAa,GAAGT,OAAO,CAACO,SAAS,CAAC;EAExC,IAAMoB,YAAY,GAAGzB,cAAc,GAAGG,QAAQ,CAACuB,OAAO;EACtD,IAAMC,YAAY,GAAGlB,uBAAuB,CAAC;IAAEP,MAAM,EAAED,KAAK,CAACC,MAAM;IAAE0B,UAAU,EAAE;EAAE,CAAC,CAAC;EAErF,IAAMf,OAA6C,GAAIgB,QAAQ,IAAK;IAChE,IAAMd,KAAK,GAAG,EAAE,GACZ,WAAW,GAAGR,aAAa,GAAG,IAAI,GAAGkB,YAAY,GAAG,KAAK,GAAGrB,gBAAgB,GAAG,OAAO,GAAGD,QAAQ,CAACuB,OAAO,GAAG,SAAS,GACrH9B,OAAO,GAAGS,SAAS,GAAG,GAAG,GAAGoB,YAAY,GAAG,KAAK,GAAGA,YAAY,GAAG,OAAO,GACzEE,YAAY,GAAI,IAAI,GACpB/B,OAAO,GAAG,KAAK,GACf,GAAG;IAEP,IAAMkC,QAAyB,GAAG,EAAE;IACpC,SAASC,gBAAgBA,CAACC,GAAqB,EAAE;MAC7C,IAAMC,OAAY,GAAG,CAAC,CAAC;MACvBC,MAAM,CAACC,OAAO,CAACH,GAAG,CAAC,CAACI,OAAO,CAAC,CAAC,CAACC,CAAC,EAAEC,CAAC,CAAC,KAAK;QACpC;QACI;QACA,CAAErC,KAAK,CAACsC,eAAe,CAAcC,QAAQ,CAACH,CAAC,CAAC;QAChD;QACApC,KAAK,CAACC,MAAM,CAACuC,UAAU,CAACJ,CAAC,CAAC,EAC5B;UACEJ,OAAO,CAACI,CAAC,CAAC,GAAGC,CAAC;QAClB;MACJ,CAAC,CAAC;MACF,OAAOL,OAAO;IAClB;IACAJ,QAAQ,CAACO,OAAO,CAACV,OAAO,IAAI;MACxB,IAAMgB,MAAsB,GAAG;QAC3BC,gBAAgB,EAAEZ,gBAAgB,CAACL,OAAO,CAACiB,gBAAgB,CAAC;QAC5DC,kBAAkB,EAAElB,OAAO,CAACkB,kBAAkB,GAAGb,gBAAgB,CAACL,OAAO,CAACkB,kBAAkB,CAAC,GAAGC;MACpG,CAAC;MACDf,QAAQ,CAACN,IAAI,CAACkB,MAAM,CAAC;IACzB,CAAC,CAAC;IACF,IAAMxB,SAAS,GAAG;MACd,CAACO,YAAY,GAAGK;IACpB,CAAC;IACD,OAAO;MACHf,KAAK;MACLR,aAAa;MACbW;IACJ,CAAC;EACL,CAAC;EAED,OAAOL,OAAO;AAClB;AASA,SAASJ,uBAAuBA,CAACqC,OAAuC,EAAE;EACtE,IAAM;IAAE5C,MAAM;IAAE0B,UAAU,GAAG,CAAC;IAAEmB,KAAK,GAAG,CAAC;IAAErC,gBAAgB,GAAG;EAAG,CAAC,GAAGoC,OAAO;EAE5E,IAAMtC,YAAsB,GAAG,EAAE;EACjC,IAAMiC,UAAU,GAAGvC,MAAM,CAACuC,UAAU;EACpC,IAAMO,cAAc,GAAGpD,OAAO,CAACqD,MAAM,CAACF,KAAK,CAAC;EAC5C,IAAMG,YAAY,GAAGtD,OAAO,CAACqD,MAAM,CAACrB,UAAU,CAAC;EAE/C,KAAK,IAAMuB,GAAG,IAAIV,UAAU,EAAE;IAC1B;IACA,IAAI/B,gBAAgB,CAAC8B,QAAQ,CAACW,GAAG,CAAC,EAAE;MAChC;IACJ;IAEA,IAAMC,KAAK,GAAGX,UAAU,CAACU,GAAG,CAAC;IAC7B,IAAIC,KAAK,CAACC,IAAI,KAAK,QAAQ,EAAE;MAC3B7C,YAAY,CAACgB,IAAI,CACf0B,YAAY,GAAGF,cAAc,GAAGG,GAAG,GAAG,IAAI,EAC1C1C,uBAAuB,CAAC;QAAEP,MAAM,EAAEkD,KAAK;QAAExB,UAAU;QAAEmB,KAAK,EAAEA,KAAK,GAAG;MAAE,CAAC,CAAC,EACxEG,YAAY,GAAGF,cAAc,GAAG,GAClC,CAAC;IACH,CAAC,MAAM;MACHxC,YAAY,CAACgB,IAAI,CAAC0B,YAAY,GAAGF,cAAc,GAAGG,GAAG,CAAC;IAC1D;EACJ;EAEA,OAAO3C,YAAY,CAACS,IAAI,CAAC,IAAI,CAAC;AAClC","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/replication-nats/index.js b/dist/esm/plugins/replication-nats/index.js deleted file mode 100644 index dce8fa43318..00000000000 --- a/dist/esm/plugins/replication-nats/index.js +++ /dev/null @@ -1,202 +0,0 @@ -import _inheritsLoose from "@babel/runtime/helpers/inheritsLoose"; -import { ensureNotFalsy, errorToPlainJson } from "../../plugins/utils/index.js"; -import { RxDBLeaderElectionPlugin } from "../leader-election/index.js"; -import { RxReplicationState, startReplicationOnLeaderShip } from "../replication/index.js"; -import { addRxPlugin, newRxError } from "../../index.js"; -import { Subject } from 'rxjs'; -import { connect, DeliverPolicy, JSONCodec, ReplayPolicy } from 'nats'; -import { getNatsServerDocumentState } from "./nats-helper.js"; -import { awaitRetry } from "../replication/replication-helper.js"; -export * from "./nats-types.js"; -export * from "./nats-helper.js"; -export var RxNatsReplicationState = /*#__PURE__*/function (_RxReplicationState) { - function RxNatsReplicationState(replicationIdentifier, collection, pull, push, live = true, retryTime = 1000 * 5, autoStart = true) { - var _this; - _this = _RxReplicationState.call(this, replicationIdentifier, collection, '_deleted', pull, push, live, retryTime, autoStart) || this; - _this.replicationIdentifier = replicationIdentifier; - _this.collection = collection; - _this.pull = pull; - _this.push = push; - _this.live = live; - _this.retryTime = retryTime; - _this.autoStart = autoStart; - return _this; - } - _inheritsLoose(RxNatsReplicationState, _RxReplicationState); - return RxNatsReplicationState; -}(RxReplicationState); -export function replicateNats(options) { - options.live = typeof options.live === 'undefined' ? true : options.live; - options.waitForLeadership = typeof options.waitForLeadership === 'undefined' ? true : options.waitForLeadership; - var collection = options.collection; - var primaryPath = collection.schema.primaryPath; - addRxPlugin(RxDBLeaderElectionPlugin); - var jc = JSONCodec(); - var connectionStatePromise = (async () => { - var nc = await connect(options.connection); - var jetstreamClient = nc.jetstream(); - var jsm = await nc.jetstreamManager(); - await jsm.streams.add({ - name: options.streamName, - subjects: [options.subjectPrefix + '.*'] - }); - var natsStream = await jetstreamClient.streams.get(options.streamName); - return { - nc, - jetstreamClient, - jsm, - natsStream - }; - })(); - var pullStream$ = new Subject(); - var replicationPrimitivesPull; - if (options.pull) { - replicationPrimitivesPull = { - async handler(lastPulledCheckpoint, batchSize) { - var cn = await connectionStatePromise; - var newCheckpoint = { - sequence: lastPulledCheckpoint ? lastPulledCheckpoint.sequence : 0 - }; - var consumer = await cn.natsStream.getConsumer({ - opt_start_seq: lastPulledCheckpoint ? lastPulledCheckpoint.sequence : 0, - deliver_policy: DeliverPolicy.LastPerSubject, - replay_policy: ReplayPolicy.Instant - }); - var fetchedMessages = await consumer.fetch({ - max_messages: batchSize - }); - await fetchedMessages.signal; - await fetchedMessages.close(); - var useMessages = []; - for await (var m of fetchedMessages) { - useMessages.push(m.json()); - newCheckpoint.sequence = m.seq; - m.ack(); - } - return { - documents: useMessages, - checkpoint: newCheckpoint - }; - }, - batchSize: ensureNotFalsy(options.pull).batchSize, - modifier: ensureNotFalsy(options.pull).modifier, - stream$: pullStream$.asObservable() - }; - } - var replicationPrimitivesPush; - if (options.push) { - replicationPrimitivesPush = { - async handler(rows) { - var cn = await connectionStatePromise; - var conflicts = []; - await Promise.all(rows.map(async writeRow => { - var docId = writeRow.newDocumentState[primaryPath]; - - /** - * first get the current state of the documents from the server - * so that we have the sequence number for conflict detection. - */ - var remoteDocState; - try { - remoteDocState = await getNatsServerDocumentState(cn.natsStream, options.subjectPrefix, docId); - } catch (err) { - if (!err.message.includes('no message found')) { - throw err; - } - } - if (remoteDocState && (!writeRow.assumedMasterState || (await collection.conflictHandler({ - newDocumentState: remoteDocState.json(), - realMasterState: writeRow.assumedMasterState - }, 'replication-firestore-push')).isEqual === false)) { - // conflict - conflicts.push(remoteDocState.json()); - } else { - // no conflict (yet) - var pushDone = false; - while (!pushDone) { - try { - await cn.jetstreamClient.publish(options.subjectPrefix + '.' + docId, jc.encode(writeRow.newDocumentState), { - expect: remoteDocState ? { - streamName: options.streamName, - lastSubjectSequence: remoteDocState.seq - } : undefined - }); - pushDone = true; - } catch (err) { - if (err.message.includes('wrong last sequence')) { - // A write happened while we are doing our write -> handle conflict - var newServerState = await getNatsServerDocumentState(cn.natsStream, options.subjectPrefix, docId); - conflicts.push(ensureNotFalsy(newServerState).json()); - pushDone = true; - } else { - replicationState.subjects.error.next(newRxError('RC_STREAM', { - document: writeRow.newDocumentState, - error: errorToPlainJson(err) - })); - - // -> retry after wait - await awaitRetry(collection, replicationState.retryTime); - } - } - } - } - })); - return conflicts; - }, - batchSize: options.push.batchSize, - modifier: options.push.modifier - }; - } - var replicationState = new RxNatsReplicationState(options.replicationIdentifier, collection, replicationPrimitivesPull, replicationPrimitivesPush, options.live, options.retryTime, options.autoStart); - - /** - * Use long polling to get live changes for the pull.stream$ - */ - if (options.live && options.pull) { - var startBefore = replicationState.start.bind(replicationState); - var cancelBefore = replicationState.cancel.bind(replicationState); - replicationState.start = async () => { - var cn = await connectionStatePromise; - - /** - * First get the last sequence so that we can - * laster only fetch 'newer' messages. - */ - var lastSeq = 0; - try { - var lastDocState = await cn.natsStream.getMessage({ - last_by_subj: options.subjectPrefix + '.*' - }); - lastSeq = lastDocState.seq; - } catch (err) { - if (!err.message.includes('no message found')) { - throw err; - } - } - var consumer = await cn.natsStream.getConsumer({ - opt_start_seq: lastSeq - }); - var newMessages = await consumer.consume(); - (async () => { - for await (var m of newMessages) { - var docData = m.json(); - pullStream$.next({ - documents: [docData], - checkpoint: { - sequence: m.seq - } - }); - m.ack(); - } - })(); - replicationState.cancel = () => { - newMessages.close(); - return cancelBefore(); - }; - return startBefore(); - }; - } - startReplicationOnLeaderShip(options.waitForLeadership, replicationState); - return replicationState; -} -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/esm/plugins/replication-nats/index.js.map b/dist/esm/plugins/replication-nats/index.js.map deleted file mode 100644 index 86ff036ec1e..00000000000 --- a/dist/esm/plugins/replication-nats/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["ensureNotFalsy","errorToPlainJson","RxDBLeaderElectionPlugin","RxReplicationState","startReplicationOnLeaderShip","addRxPlugin","newRxError","Subject","connect","DeliverPolicy","JSONCodec","ReplayPolicy","getNatsServerDocumentState","awaitRetry","RxNatsReplicationState","_RxReplicationState","replicationIdentifier","collection","pull","push","live","retryTime","autoStart","_this","call","_inheritsLoose","replicateNats","options","waitForLeadership","primaryPath","schema","jc","connectionStatePromise","nc","connection","jetstreamClient","jetstream","jsm","jetstreamManager","streams","add","name","streamName","subjects","subjectPrefix","natsStream","get","pullStream$","replicationPrimitivesPull","handler","lastPulledCheckpoint","batchSize","cn","newCheckpoint","sequence","consumer","getConsumer","opt_start_seq","deliver_policy","LastPerSubject","replay_policy","Instant","fetchedMessages","fetch","max_messages","signal","close","useMessages","m","json","seq","ack","documents","checkpoint","modifier","stream$","asObservable","replicationPrimitivesPush","rows","conflicts","Promise","all","map","writeRow","docId","newDocumentState","remoteDocState","err","message","includes","assumedMasterState","conflictHandler","realMasterState","isEqual","pushDone","publish","encode","expect","lastSubjectSequence","undefined","newServerState","replicationState","error","next","document","startBefore","start","bind","cancelBefore","cancel","lastSeq","lastDocState","getMessage","last_by_subj","newMessages","consume","docData"],"sources":["../../../../src/plugins/replication-nats/index.ts"],"sourcesContent":["import {\n ensureNotFalsy,\n errorToPlainJson\n} from '../../plugins/utils/index.ts';\n\n\nimport { RxDBLeaderElectionPlugin } from '../leader-election/index.ts';\nimport type {\n RxCollection,\n ReplicationPullOptions,\n ReplicationPushOptions,\n RxReplicationWriteToMasterRow,\n RxReplicationPullStreamItem\n} from '../../types/index.d.ts';\nimport {\n RxReplicationState,\n startReplicationOnLeaderShip\n} from '../replication/index.ts';\nimport {\n addRxPlugin,\n newRxError,\n WithDeleted\n} from '../../index.ts';\n\nimport { Subject } from 'rxjs';\nimport type {\n NatsCheckpointType,\n NatsSyncOptions\n} from './nats-types.ts';\nimport { connect, DeliverPolicy, JSONCodec, ReplayPolicy } from 'nats';\nimport { getNatsServerDocumentState } from './nats-helper.ts';\nimport { awaitRetry } from '../replication/replication-helper.ts';\n\nexport * from './nats-types.ts';\nexport * from './nats-helper.ts';\n\n\nexport class RxNatsReplicationState extends RxReplicationState {\n constructor(\n public readonly replicationIdentifier: string,\n public readonly collection: RxCollection,\n public readonly pull?: ReplicationPullOptions,\n public readonly push?: ReplicationPushOptions,\n public readonly live: boolean = true,\n public retryTime: number = 1000 * 5,\n public autoStart: boolean = true\n ) {\n super(\n replicationIdentifier,\n collection,\n '_deleted',\n pull,\n push,\n live,\n retryTime,\n autoStart\n );\n }\n}\n\n\n\nexport function replicateNats(\n options: NatsSyncOptions\n): RxNatsReplicationState {\n options.live = typeof options.live === 'undefined' ? true : options.live;\n options.waitForLeadership = typeof options.waitForLeadership === 'undefined' ? true : options.waitForLeadership;\n\n const collection = options.collection;\n const primaryPath = collection.schema.primaryPath;\n addRxPlugin(RxDBLeaderElectionPlugin);\n\n const jc = JSONCodec();\n\n\n const connectionStatePromise = (async () => {\n const nc = await connect(options.connection);\n const jetstreamClient = nc.jetstream();\n const jsm = await nc.jetstreamManager();\n await jsm.streams.add({\n name: options.streamName, subjects: [\n options.subjectPrefix + '.*'\n ]\n });\n const natsStream = await jetstreamClient.streams.get(options.streamName);\n return {\n nc,\n jetstreamClient,\n jsm,\n natsStream\n };\n })();\n const pullStream$: Subject> = new Subject();\n\n let replicationPrimitivesPull: ReplicationPullOptions | undefined;\n if (options.pull) {\n replicationPrimitivesPull = {\n async handler(\n lastPulledCheckpoint: NatsCheckpointType | undefined,\n batchSize: number\n ) {\n const cn = await connectionStatePromise;\n const newCheckpoint: NatsCheckpointType = {\n sequence: lastPulledCheckpoint ? lastPulledCheckpoint.sequence : 0\n };\n const consumer = await cn.natsStream.getConsumer({\n opt_start_seq: lastPulledCheckpoint ? lastPulledCheckpoint.sequence : 0,\n deliver_policy: DeliverPolicy.LastPerSubject,\n replay_policy: ReplayPolicy.Instant\n });\n\n const fetchedMessages = await consumer.fetch({\n max_messages: batchSize\n });\n await (fetchedMessages as any).signal;\n await fetchedMessages.close();\n\n const useMessages: WithDeleted[] = [];\n for await (const m of fetchedMessages) {\n useMessages.push(m.json());\n newCheckpoint.sequence = m.seq;\n m.ack();\n }\n return {\n documents: useMessages,\n checkpoint: newCheckpoint\n };\n },\n batchSize: ensureNotFalsy(options.pull).batchSize,\n modifier: ensureNotFalsy(options.pull).modifier,\n stream$: pullStream$.asObservable()\n };\n }\n\n\n let replicationPrimitivesPush: ReplicationPushOptions | undefined;\n if (options.push) {\n replicationPrimitivesPush = {\n async handler(\n rows: RxReplicationWriteToMasterRow[]\n ) {\n const cn = await connectionStatePromise;\n const conflicts: WithDeleted[] = [];\n await Promise.all(\n rows.map(async (writeRow) => {\n const docId = (writeRow.newDocumentState as any)[primaryPath];\n\n /**\n * first get the current state of the documents from the server\n * so that we have the sequence number for conflict detection.\n */\n let remoteDocState;\n try {\n remoteDocState = await getNatsServerDocumentState(\n cn.natsStream,\n options.subjectPrefix,\n docId\n );\n } catch (err: Error | any) {\n if (!err.message.includes('no message found')) {\n throw err;\n }\n }\n\n if (\n remoteDocState &&\n (\n !writeRow.assumedMasterState ||\n (await collection.conflictHandler({\n newDocumentState: remoteDocState.json(),\n realMasterState: writeRow.assumedMasterState\n }, 'replication-firestore-push')).isEqual === false\n )\n ) {\n // conflict\n conflicts.push(remoteDocState.json());\n } else {\n // no conflict (yet)\n let pushDone = false;\n while (!pushDone) {\n try {\n await cn.jetstreamClient.publish(\n options.subjectPrefix + '.' + docId,\n jc.encode(writeRow.newDocumentState),\n {\n expect: remoteDocState ? {\n streamName: options.streamName,\n lastSubjectSequence: remoteDocState.seq\n } : undefined\n }\n );\n pushDone = true;\n } catch (err: Error | any) {\n if (err.message.includes('wrong last sequence')) {\n // A write happened while we are doing our write -> handle conflict\n const newServerState = await getNatsServerDocumentState(\n cn.natsStream,\n options.subjectPrefix,\n docId\n );\n conflicts.push(ensureNotFalsy(newServerState).json());\n pushDone = true;\n } else {\n replicationState.subjects.error.next(\n newRxError('RC_STREAM', {\n document: writeRow.newDocumentState,\n error: errorToPlainJson(err)\n })\n );\n\n // -> retry after wait\n await awaitRetry(\n collection,\n replicationState.retryTime\n );\n }\n }\n }\n }\n })\n );\n return conflicts;\n },\n batchSize: options.push.batchSize,\n modifier: options.push.modifier\n };\n }\n\n\n const replicationState = new RxNatsReplicationState(\n options.replicationIdentifier,\n collection,\n replicationPrimitivesPull,\n replicationPrimitivesPush,\n options.live,\n options.retryTime,\n options.autoStart\n );\n\n /**\n * Use long polling to get live changes for the pull.stream$\n */\n if (options.live && options.pull) {\n const startBefore = replicationState.start.bind(replicationState);\n const cancelBefore = replicationState.cancel.bind(replicationState);\n replicationState.start = async () => {\n const cn = await connectionStatePromise;\n\n /**\n * First get the last sequence so that we can\n * laster only fetch 'newer' messages.\n */\n let lastSeq = 0;\n try {\n const lastDocState = await cn.natsStream.getMessage({\n last_by_subj: options.subjectPrefix + '.*'\n });\n lastSeq = lastDocState.seq;\n } catch (err: any | Error) {\n if (!err.message.includes('no message found')) {\n throw err;\n }\n }\n\n const consumer = await cn.natsStream.getConsumer({\n opt_start_seq: lastSeq\n });\n const newMessages = await consumer.consume();\n (async () => {\n for await (const m of newMessages) {\n const docData: WithDeleted = m.json();\n pullStream$.next({\n documents: [docData],\n checkpoint: {\n sequence: m.seq\n }\n });\n m.ack();\n }\n })();\n replicationState.cancel = () => {\n newMessages.close();\n return cancelBefore();\n };\n return startBefore();\n };\n }\n\n startReplicationOnLeaderShip(options.waitForLeadership, replicationState);\n\n return replicationState;\n}\n"],"mappings":";AAAA,SACIA,cAAc,EACdC,gBAAgB,QACb,8BAA8B;AAGrC,SAASC,wBAAwB,QAAQ,6BAA6B;AAQtE,SACIC,kBAAkB,EAClBC,4BAA4B,QACzB,yBAAyB;AAChC,SACIC,WAAW,EACXC,UAAU,QAEP,gBAAgB;AAEvB,SAASC,OAAO,QAAQ,MAAM;AAK9B,SAASC,OAAO,EAAEC,aAAa,EAAEC,SAAS,EAAEC,YAAY,QAAQ,MAAM;AACtE,SAASC,0BAA0B,QAAQ,kBAAkB;AAC7D,SAASC,UAAU,QAAQ,sCAAsC;AAEjE,cAAc,iBAAiB;AAC/B,cAAc,kBAAkB;AAGhC,WAAaC,sBAAsB,0BAAAC,mBAAA;EAC/B,SAAAD,uBACoBE,qBAA6B,EAC7BC,UAAmC,EACnCC,IAA4D,EAC5DC,IAAwC,EACxCC,IAAa,GAAG,IAAI,EAC7BC,SAAiB,GAAG,IAAI,GAAG,CAAC,EAC5BC,SAAkB,GAAG,IAAI,EAClC;IAAA,IAAAC,KAAA;IACEA,KAAA,GAAAR,mBAAA,CAAAS,IAAA,OACIR,qBAAqB,EACrBC,UAAU,EACV,UAAU,EACVC,IAAI,EACJC,IAAI,EACJC,IAAI,EACJC,SAAS,EACTC,SACJ,CAAC;IAACC,KAAA,CAjBcP,qBAA6B,GAA7BA,qBAA6B;IAAAO,KAAA,CAC7BN,UAAmC,GAAnCA,UAAmC;IAAAM,KAAA,CACnCL,IAA4D,GAA5DA,IAA4D;IAAAK,KAAA,CAC5DJ,IAAwC,GAAxCA,IAAwC;IAAAI,KAAA,CACxCH,IAAa,GAAbA,IAAa;IAAAG,KAAA,CACtBF,SAAiB,GAAjBA,SAAiB;IAAAE,KAAA,CACjBD,SAAkB,GAAlBA,SAAkB;IAAA,OAAAC,KAAA;EAY7B;EAACE,cAAA,CAAAX,sBAAA,EAAAC,mBAAA;EAAA,OAAAD,sBAAA;AAAA,EApBkDX,kBAAkB;AAyBzE,OAAO,SAASuB,aAAaA,CACzBC,OAAmC,EACF;EACjCA,OAAO,CAACP,IAAI,GAAG,OAAOO,OAAO,CAACP,IAAI,KAAK,WAAW,GAAG,IAAI,GAAGO,OAAO,CAACP,IAAI;EACxEO,OAAO,CAACC,iBAAiB,GAAG,OAAOD,OAAO,CAACC,iBAAiB,KAAK,WAAW,GAAG,IAAI,GAAGD,OAAO,CAACC,iBAAiB;EAE/G,IAAMX,UAAU,GAAGU,OAAO,CAACV,UAAU;EACrC,IAAMY,WAAW,GAAGZ,UAAU,CAACa,MAAM,CAACD,WAAW;EACjDxB,WAAW,CAACH,wBAAwB,CAAC;EAErC,IAAM6B,EAAE,GAAGrB,SAAS,CAAC,CAAC;EAGtB,IAAMsB,sBAAsB,GAAG,CAAC,YAAY;IACxC,IAAMC,EAAE,GAAG,MAAMzB,OAAO,CAACmB,OAAO,CAACO,UAAU,CAAC;IAC5C,IAAMC,eAAe,GAAGF,EAAE,CAACG,SAAS,CAAC,CAAC;IACtC,IAAMC,GAAG,GAAG,MAAMJ,EAAE,CAACK,gBAAgB,CAAC,CAAC;IACvC,MAAMD,GAAG,CAACE,OAAO,CAACC,GAAG,CAAC;MAClBC,IAAI,EAAEd,OAAO,CAACe,UAAU;MAAEC,QAAQ,EAAE,CAChChB,OAAO,CAACiB,aAAa,GAAG,IAAI;IAEpC,CAAC,CAAC;IACF,IAAMC,UAAU,GAAG,MAAMV,eAAe,CAACI,OAAO,CAACO,GAAG,CAACnB,OAAO,CAACe,UAAU,CAAC;IACxE,OAAO;MACHT,EAAE;MACFE,eAAe;MACfE,GAAG;MACHQ;IACJ,CAAC;EACL,CAAC,EAAE,CAAC;EACJ,IAAME,WAAgF,GAAG,IAAIxC,OAAO,CAAC,CAAC;EAEtG,IAAIyC,yBAA4F;EAChG,IAAIrB,OAAO,CAACT,IAAI,EAAE;IACd8B,yBAAyB,GAAG;MACxB,MAAMC,OAAOA,CACTC,oBAAoD,EACpDC,SAAiB,EACnB;QACE,IAAMC,EAAE,GAAG,MAAMpB,sBAAsB;QACvC,IAAMqB,aAAiC,GAAG;UACtCC,QAAQ,EAAEJ,oBAAoB,GAAGA,oBAAoB,CAACI,QAAQ,GAAG;QACrE,CAAC;QACD,IAAMC,QAAQ,GAAG,MAAMH,EAAE,CAACP,UAAU,CAACW,WAAW,CAAC;UAC7CC,aAAa,EAAEP,oBAAoB,GAAGA,oBAAoB,CAACI,QAAQ,GAAG,CAAC;UACvEI,cAAc,EAAEjD,aAAa,CAACkD,cAAc;UAC5CC,aAAa,EAAEjD,YAAY,CAACkD;QAChC,CAAC,CAAC;QAEF,IAAMC,eAAe,GAAG,MAAMP,QAAQ,CAACQ,KAAK,CAAC;UACzCC,YAAY,EAAEb;QAClB,CAAC,CAAC;QACF,MAAOW,eAAe,CAASG,MAAM;QACrC,MAAMH,eAAe,CAACI,KAAK,CAAC,CAAC;QAE7B,IAAMC,WAAqC,GAAG,EAAE;QAChD,WAAW,IAAMC,CAAC,IAAIN,eAAe,EAAE;UACnCK,WAAW,CAAChD,IAAI,CAACiD,CAAC,CAACC,IAAI,CAAC,CAAC,CAAC;UAC1BhB,aAAa,CAACC,QAAQ,GAAGc,CAAC,CAACE,GAAG;UAC9BF,CAAC,CAACG,GAAG,CAAC,CAAC;QACX;QACA,OAAO;UACHC,SAAS,EAAEL,WAAW;UACtBM,UAAU,EAAEpB;QAChB,CAAC;MACL,CAAC;MACDF,SAAS,EAAEnD,cAAc,CAAC2B,OAAO,CAACT,IAAI,CAAC,CAACiC,SAAS;MACjDuB,QAAQ,EAAE1E,cAAc,CAAC2B,OAAO,CAACT,IAAI,CAAC,CAACwD,QAAQ;MAC/CC,OAAO,EAAE5B,WAAW,CAAC6B,YAAY,CAAC;IACtC,CAAC;EACL;EAGA,IAAIC,yBAAwE;EAC5E,IAAIlD,OAAO,CAACR,IAAI,EAAE;IACd0D,yBAAyB,GAAG;MACxB,MAAM5B,OAAOA,CACT6B,IAAgD,EAClD;QACE,IAAM1B,EAAE,GAAG,MAAMpB,sBAAsB;QACvC,IAAM+C,SAAmC,GAAG,EAAE;QAC9C,MAAMC,OAAO,CAACC,GAAG,CACbH,IAAI,CAACI,GAAG,CAAC,MAAOC,QAAQ,IAAK;UACzB,IAAMC,KAAK,GAAID,QAAQ,CAACE,gBAAgB,CAASxD,WAAW,CAAC;;UAE7D;AACxB;AACA;AACA;UACwB,IAAIyD,cAAc;UAClB,IAAI;YACAA,cAAc,GAAG,MAAM1E,0BAA0B,CAC7CwC,EAAE,CAACP,UAAU,EACblB,OAAO,CAACiB,aAAa,EACrBwC,KACJ,CAAC;UACL,CAAC,CAAC,OAAOG,GAAgB,EAAE;YACvB,IAAI,CAACA,GAAG,CAACC,OAAO,CAACC,QAAQ,CAAC,kBAAkB,CAAC,EAAE;cAC3C,MAAMF,GAAG;YACb;UACJ;UAEA,IACID,cAAc,KAEV,CAACH,QAAQ,CAACO,kBAAkB,IAC5B,CAAC,MAAMzE,UAAU,CAAC0E,eAAe,CAAC;YAC9BN,gBAAgB,EAAEC,cAAc,CAACjB,IAAI,CAAC,CAAC;YACvCuB,eAAe,EAAET,QAAQ,CAACO;UAC9B,CAAC,EAAE,4BAA4B,CAAC,EAAEG,OAAO,KAAK,KAAK,CACtD,EACH;YACE;YACAd,SAAS,CAAC5D,IAAI,CAACmE,cAAc,CAACjB,IAAI,CAAC,CAAC,CAAC;UACzC,CAAC,MAAM;YACH;YACA,IAAIyB,QAAQ,GAAG,KAAK;YACpB,OAAO,CAACA,QAAQ,EAAE;cACd,IAAI;gBACA,MAAM1C,EAAE,CAACjB,eAAe,CAAC4D,OAAO,CAC5BpE,OAAO,CAACiB,aAAa,GAAG,GAAG,GAAGwC,KAAK,EACnCrD,EAAE,CAACiE,MAAM,CAACb,QAAQ,CAACE,gBAAgB,CAAC,EACpC;kBACIY,MAAM,EAAEX,cAAc,GAAG;oBACrB5C,UAAU,EAAEf,OAAO,CAACe,UAAU;oBAC9BwD,mBAAmB,EAAEZ,cAAc,CAAChB;kBACxC,CAAC,GAAG6B;gBACR,CACJ,CAAC;gBACDL,QAAQ,GAAG,IAAI;cACnB,CAAC,CAAC,OAAOP,GAAgB,EAAE;gBACvB,IAAIA,GAAG,CAACC,OAAO,CAACC,QAAQ,CAAC,qBAAqB,CAAC,EAAE;kBAC7C;kBACA,IAAMW,cAAc,GAAG,MAAMxF,0BAA0B,CACnDwC,EAAE,CAACP,UAAU,EACblB,OAAO,CAACiB,aAAa,EACrBwC,KACJ,CAAC;kBACDL,SAAS,CAAC5D,IAAI,CAACnB,cAAc,CAACoG,cAAc,CAAC,CAAC/B,IAAI,CAAC,CAAC,CAAC;kBACrDyB,QAAQ,GAAG,IAAI;gBACnB,CAAC,MAAM;kBACHO,gBAAgB,CAAC1D,QAAQ,CAAC2D,KAAK,CAACC,IAAI,CAChCjG,UAAU,CAAC,WAAW,EAAE;oBACpBkG,QAAQ,EAAErB,QAAQ,CAACE,gBAAgB;oBACnCiB,KAAK,EAAErG,gBAAgB,CAACsF,GAAG;kBAC/B,CAAC,CACL,CAAC;;kBAED;kBACA,MAAM1E,UAAU,CACZI,UAAU,EACVoF,gBAAgB,CAAChF,SACrB,CAAC;gBACL;cACJ;YACJ;UACJ;QACJ,CAAC,CACL,CAAC;QACD,OAAO0D,SAAS;MACpB,CAAC;MACD5B,SAAS,EAAExB,OAAO,CAACR,IAAI,CAACgC,SAAS;MACjCuB,QAAQ,EAAE/C,OAAO,CAACR,IAAI,CAACuD;IAC3B,CAAC;EACL;EAGA,IAAM2B,gBAAgB,GAAG,IAAIvF,sBAAsB,CAC/Ca,OAAO,CAACX,qBAAqB,EAC7BC,UAAU,EACV+B,yBAAyB,EACzB6B,yBAAyB,EACzBlD,OAAO,CAACP,IAAI,EACZO,OAAO,CAACN,SAAS,EACjBM,OAAO,CAACL,SACZ,CAAC;;EAED;AACJ;AACA;EACI,IAAIK,OAAO,CAACP,IAAI,IAAIO,OAAO,CAACT,IAAI,EAAE;IAC9B,IAAMuF,WAAW,GAAGJ,gBAAgB,CAACK,KAAK,CAACC,IAAI,CAACN,gBAAgB,CAAC;IACjE,IAAMO,YAAY,GAAGP,gBAAgB,CAACQ,MAAM,CAACF,IAAI,CAACN,gBAAgB,CAAC;IACnEA,gBAAgB,CAACK,KAAK,GAAG,YAAY;MACjC,IAAMtD,EAAE,GAAG,MAAMpB,sBAAsB;;MAEvC;AACZ;AACA;AACA;MACY,IAAI8E,OAAO,GAAG,CAAC;MACf,IAAI;QACA,IAAMC,YAAY,GAAG,MAAM3D,EAAE,CAACP,UAAU,CAACmE,UAAU,CAAC;UAChDC,YAAY,EAAEtF,OAAO,CAACiB,aAAa,GAAG;QAC1C,CAAC,CAAC;QACFkE,OAAO,GAAGC,YAAY,CAACzC,GAAG;MAC9B,CAAC,CAAC,OAAOiB,GAAgB,EAAE;QACvB,IAAI,CAACA,GAAG,CAACC,OAAO,CAACC,QAAQ,CAAC,kBAAkB,CAAC,EAAE;UAC3C,MAAMF,GAAG;QACb;MACJ;MAEA,IAAMhC,QAAQ,GAAG,MAAMH,EAAE,CAACP,UAAU,CAACW,WAAW,CAAC;QAC7CC,aAAa,EAAEqD;MACnB,CAAC,CAAC;MACF,IAAMI,WAAW,GAAG,MAAM3D,QAAQ,CAAC4D,OAAO,CAAC,CAAC;MAC5C,CAAC,YAAY;QACT,WAAW,IAAM/C,CAAC,IAAI8C,WAAW,EAAE;UAC/B,IAAME,OAA+B,GAAGhD,CAAC,CAACC,IAAI,CAAC,CAAC;UAChDtB,WAAW,CAACwD,IAAI,CAAC;YACb/B,SAAS,EAAE,CAAC4C,OAAO,CAAC;YACpB3C,UAAU,EAAE;cACRnB,QAAQ,EAAEc,CAAC,CAACE;YAChB;UACJ,CAAC,CAAC;UACFF,CAAC,CAACG,GAAG,CAAC,CAAC;QACX;MACJ,CAAC,EAAE,CAAC;MACJ8B,gBAAgB,CAACQ,MAAM,GAAG,MAAM;QAC5BK,WAAW,CAAChD,KAAK,CAAC,CAAC;QACnB,OAAO0C,YAAY,CAAC,CAAC;MACzB,CAAC;MACD,OAAOH,WAAW,CAAC,CAAC;IACxB,CAAC;EACL;EAEArG,4BAA4B,CAACuB,OAAO,CAACC,iBAAiB,EAAEyE,gBAAgB,CAAC;EAEzE,OAAOA,gBAAgB;AAC3B","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/replication-nats/nats-helper.js b/dist/esm/plugins/replication-nats/nats-helper.js deleted file mode 100644 index 6c1f8e0c576..00000000000 --- a/dist/esm/plugins/replication-nats/nats-helper.js +++ /dev/null @@ -1,7 +0,0 @@ -export async function getNatsServerDocumentState(natsStream, subjectPrefix, docId) { - var remoteDocState = await natsStream.getMessage({ - last_by_subj: subjectPrefix + '.' + docId - }); - return remoteDocState; -} -//# sourceMappingURL=nats-helper.js.map \ No newline at end of file diff --git a/dist/esm/plugins/replication-nats/nats-helper.js.map b/dist/esm/plugins/replication-nats/nats-helper.js.map deleted file mode 100644 index ad13488ec40..00000000000 --- a/dist/esm/plugins/replication-nats/nats-helper.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"nats-helper.js","names":["getNatsServerDocumentState","natsStream","subjectPrefix","docId","remoteDocState","getMessage","last_by_subj"],"sources":["../../../../src/plugins/replication-nats/nats-helper.ts"],"sourcesContent":["import type {\n StoredMsg,\n Stream\n} from 'nats';\n\nexport async function getNatsServerDocumentState(\n natsStream: Stream,\n subjectPrefix: string,\n docId: string\n): Promise {\n const remoteDocState = await natsStream.getMessage({\n last_by_subj: subjectPrefix + '.' + docId\n });\n return remoteDocState;\n}\n"],"mappings":"AAKA,OAAO,eAAeA,0BAA0BA,CAC5CC,UAAkB,EAClBC,aAAqB,EACrBC,KAAa,EACiB;EAC9B,IAAMC,cAAc,GAAG,MAAMH,UAAU,CAACI,UAAU,CAAC;IAC/CC,YAAY,EAAEJ,aAAa,GAAG,GAAG,GAAGC;EACxC,CAAC,CAAC;EACF,OAAOC,cAAc;AACzB","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/replication-nats/nats-types.js b/dist/esm/plugins/replication-nats/nats-types.js deleted file mode 100644 index 6224eb5db19..00000000000 --- a/dist/esm/plugins/replication-nats/nats-types.js +++ /dev/null @@ -1,3 +0,0 @@ -export {}; -export {}; -//# sourceMappingURL=nats-types.js.map \ No newline at end of file diff --git a/dist/esm/plugins/replication-nats/nats-types.js.map b/dist/esm/plugins/replication-nats/nats-types.js.map deleted file mode 100644 index f94a79adae4..00000000000 --- a/dist/esm/plugins/replication-nats/nats-types.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"nats-types.js","names":[],"sources":["../../../../src/plugins/replication-nats/nats-types.ts"],"sourcesContent":["import type {\n ReplicationOptions,\n ReplicationPullOptions,\n ReplicationPushOptions\n} from '../../types/index.d.ts';\n\n\nimport {\n ConnectionOptions\n} from 'nats';\n\n\nexport type NatsCheckpointType = {\n sequence: number;\n};\n\nexport type NatsSyncPullOptions =\n Omit, 'handler' | 'stream$'>\n & {\n };\n\nexport type NatsSyncPushOptions = Omit, 'handler'>\n & {\n};\n\nexport type NatsSyncOptions = Omit<\n ReplicationOptions,\n 'pull' | 'push'\n> & {\n\n connection: ConnectionOptions;\n streamName: string;\n /**\n * NATS subject prefix like 'foo.bar'\n * which means a message for a document would have the subject\n * 'foo.bar.myDoc' where the last part 'myDoc' would be the primaryKey in\n * the RxDB document.\n * @link https://docs.nats.io/nats-concepts/subjects\n */\n subjectPrefix: string;\n pull?: NatsSyncPullOptions;\n push?: NatsSyncPushOptions;\n};\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/replication-webrtc/connection-handler-p2pcf.js b/dist/esm/plugins/replication-webrtc/connection-handler-p2pcf.js deleted file mode 100644 index 947c1c3fe48..00000000000 --- a/dist/esm/plugins/replication-webrtc/connection-handler-p2pcf.js +++ /dev/null @@ -1,78 +0,0 @@ -// import { Subject } from 'rxjs'; -// import { PROMISE_RESOLVE_VOID, randomCouchString } from '../../util'; -// import type { -// P2PConnectionHandler, -// P2PConnectionHandlerCreator, -// P2PMessage, -// P2PPeer, -// PeerWithMessage, -// PeerWithResponse -// } from './p2p-types'; - -// import P2PCF from 'p2pcf'; - -// /** -// * Returns a connection handler that uses the Cloudflare worker signaling server -// * @link https://github.com/gfodor/p2pcf -// */ -// export function getConnectionHandlerP2PCF( -// p2pCFOptions: { -// workerUrl?: string -// } = {} -// ): P2PConnectionHandlerCreator { -// // const P2PCF = require('p2pcf'); - -// const creator: P2PConnectionHandlerCreator = (options) => { -// const clientId = randomCouchString(10); -// const p2p2 = new P2PCF(clientId, options.topic, p2pCFOptions); - -// const connect$ = new Subject(); -// p2p2.on('peerconnect', (peer) => connect$.next(peer as any)); - -// const disconnect$ = new Subject(); -// p2p2.on('peerclose', (peer) => disconnect$.next(peer as any)); - -// const message$ = new Subject(); -// const response$ = new Subject(); -// p2p2.on('msg', (peer, messageOrResponse) => { -// if (messageOrResponse.result) { -// response$.next({ -// peer: peer as any, -// response: messageOrResponse -// }); -// } else { -// message$.next({ -// peer: peer as any, -// message: messageOrResponse -// }); -// } - -// }); - -// const handler: P2PConnectionHandler = { -// connect$, -// disconnect$, -// message$, -// response$, -// async send(peer: P2PPeer, message: P2PMessage) { -// const [responsePeer, response] = await p2p2.send(peer as any, message); -// return { -// peer: responsePeer, -// response -// } as any; -// }, -// destroy() { -// p2p2.destroy(); -// connect$.complete(); -// disconnect$.complete(); -// message$.complete(); -// response$.complete(); -// return PROMISE_RESOLVE_VOID; -// } -// } -// p2p2.start(); -// return handler; -// }; -// return creator; -// } -//# sourceMappingURL=connection-handler-p2pcf.js.map \ No newline at end of file diff --git a/dist/esm/plugins/replication-webrtc/connection-handler-p2pcf.js.map b/dist/esm/plugins/replication-webrtc/connection-handler-p2pcf.js.map deleted file mode 100644 index 6b907c9b115..00000000000 --- a/dist/esm/plugins/replication-webrtc/connection-handler-p2pcf.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"connection-handler-p2pcf.js","names":[],"sources":["../../../../src/plugins/replication-webrtc/connection-handler-p2pcf.ts"],"sourcesContent":["// import { Subject } from 'rxjs';\n// import { PROMISE_RESOLVE_VOID, randomCouchString } from '../../util';\n// import type {\n// P2PConnectionHandler,\n// P2PConnectionHandlerCreator,\n// P2PMessage,\n// P2PPeer,\n// PeerWithMessage,\n// PeerWithResponse\n// } from './p2p-types';\n\n// import P2PCF from 'p2pcf';\n\n// /**\n// * Returns a connection handler that uses the Cloudflare worker signaling server\n// * @link https://github.com/gfodor/p2pcf\n// */\n// export function getConnectionHandlerP2PCF(\n// p2pCFOptions: {\n// workerUrl?: string\n// } = {}\n// ): P2PConnectionHandlerCreator {\n// // const P2PCF = require('p2pcf');\n\n// const creator: P2PConnectionHandlerCreator = (options) => {\n// const clientId = randomCouchString(10);\n// const p2p2 = new P2PCF(clientId, options.topic, p2pCFOptions);\n\n// const connect$ = new Subject();\n// p2p2.on('peerconnect', (peer) => connect$.next(peer as any));\n\n// const disconnect$ = new Subject();\n// p2p2.on('peerclose', (peer) => disconnect$.next(peer as any));\n\n// const message$ = new Subject();\n// const response$ = new Subject();\n// p2p2.on('msg', (peer, messageOrResponse) => {\n// if (messageOrResponse.result) {\n// response$.next({\n// peer: peer as any,\n// response: messageOrResponse\n// });\n// } else {\n// message$.next({\n// peer: peer as any,\n// message: messageOrResponse\n// });\n// }\n\n// });\n\n// const handler: P2PConnectionHandler = {\n// connect$,\n// disconnect$,\n// message$,\n// response$,\n// async send(peer: P2PPeer, message: P2PMessage) {\n// const [responsePeer, response] = await p2p2.send(peer as any, message);\n// return {\n// peer: responsePeer,\n// response\n// } as any;\n// },\n// destroy() {\n// p2p2.destroy();\n// connect$.complete();\n// disconnect$.complete();\n// message$.complete();\n// response$.complete();\n// return PROMISE_RESOLVE_VOID;\n// }\n// }\n// p2p2.start();\n// return handler;\n// };\n// return creator;\n// }\n"],"mappings":"AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;;AAEA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/replication-webrtc/connection-handler-simple-peer.js b/dist/esm/plugins/replication-webrtc/connection-handler-simple-peer.js deleted file mode 100644 index fe75d6ee02f..00000000000 --- a/dist/esm/plugins/replication-webrtc/connection-handler-simple-peer.js +++ /dev/null @@ -1,194 +0,0 @@ -import { Subject } from 'rxjs'; -import { ensureNotFalsy, getFromMapOrThrow, PROMISE_RESOLVE_VOID, promiseWait, randomCouchString } from "../../plugins/utils/index.js"; -import { default as _Peer -// @ts-ignore -} from 'simple-peer/simplepeer.min.js'; -var Peer = _Peer; -import { newRxError } from "../../rx-error.js"; -function sendMessage(ws, msg) { - ws.send(JSON.stringify(msg)); -} -var DEFAULT_SIGNALING_SERVER_HOSTNAME = 'signaling.rxdb.info'; -export var DEFAULT_SIGNALING_SERVER = 'wss://' + DEFAULT_SIGNALING_SERVER_HOSTNAME + '/'; -var defaultServerWarningShown = false; -export var SIMPLE_PEER_PING_INTERVAL = 1000 * 60 * 2; - -/** - * Returns a connection handler that uses simple-peer and the signaling server. - */ -export function getConnectionHandlerSimplePeer({ - signalingServerUrl, - wrtc, - config, - webSocketConstructor -}) { - ensureProcessNextTickIsSet(); - signalingServerUrl = signalingServerUrl ? signalingServerUrl : DEFAULT_SIGNALING_SERVER; - webSocketConstructor = webSocketConstructor ? webSocketConstructor : WebSocket; - if (signalingServerUrl.includes(DEFAULT_SIGNALING_SERVER_HOSTNAME) && !defaultServerWarningShown) { - defaultServerWarningShown = true; - console.warn(['RxDB Warning: You are using the RxDB WebRTC replication plugin', 'but you did not specify your own signaling server url.', 'By default it will use a signaling server provided by RxDB at ' + DEFAULT_SIGNALING_SERVER, 'This server is made for demonstration purposes and tryouts. It is not reliable and might be offline at any time.', 'In production you must always use your own signaling server instead.', 'Learn how to run your own server at https://rxdb.info/replication-webrtc.html', 'Also leave a ⭐ at the RxDB github repo 🙏 https://github.com/pubkey/rxdb 🙏'].join(' ')); - } - var creator = async options => { - var connect$ = new Subject(); - var disconnect$ = new Subject(); - var message$ = new Subject(); - var response$ = new Subject(); - var error$ = new Subject(); - var peers = new Map(); - var closed = false; - var ownPeerId; - var socket = undefined; - createSocket(); - - /** - * Send ping signals to the server. - */ - (async () => { - while (true) { - await promiseWait(SIMPLE_PEER_PING_INTERVAL / 2); - if (closed) { - break; - } - if (socket) { - sendMessage(socket, { - type: 'ping' - }); - } - } - })(); - - /** - * @recursive calls it self on socket disconnects - * so that when the user goes offline and online - * again, it will recreate the WebSocket connection. - */ - function createSocket() { - if (closed) { - return; - } - socket = new webSocketConstructor(signalingServerUrl); - socket.onclose = () => createSocket(); - socket.onopen = () => { - ensureNotFalsy(socket).onmessage = msgEvent => { - var msg = JSON.parse(msgEvent.data); - switch (msg.type) { - case 'init': - ownPeerId = msg.yourPeerId; - sendMessage(ensureNotFalsy(socket), { - type: 'join', - room: options.topic - }); - break; - case 'joined': - /** - * PeerId is created by the signaling server - * to prevent spoofing it. - */ - var createPeerConnection = function (remotePeerId) { - var disconnected = false; - var newSimplePeer = new Peer({ - initiator: remotePeerId > ownPeerId, - wrtc, - config, - trickle: true - }); - newSimplePeer.id = randomCouchString(10); - peers.set(remotePeerId, newSimplePeer); - newSimplePeer.on('signal', signal => { - sendMessage(ensureNotFalsy(socket), { - type: 'signal', - senderPeerId: ownPeerId, - receiverPeerId: remotePeerId, - room: options.topic, - data: signal - }); - }); - newSimplePeer.on('data', messageOrResponse => { - messageOrResponse = JSON.parse(messageOrResponse.toString()); - if (messageOrResponse.result) { - response$.next({ - peer: newSimplePeer, - response: messageOrResponse - }); - } else { - message$.next({ - peer: newSimplePeer, - message: messageOrResponse - }); - } - }); - newSimplePeer.on('error', error => { - error$.next(newRxError('RC_WEBRTC_PEER', { - error - })); - newSimplePeer.destroy(); - if (!disconnected) { - disconnected = true; - disconnect$.next(newSimplePeer); - } - }); - newSimplePeer.on('connect', () => { - connect$.next(newSimplePeer); - }); - newSimplePeer.on('close', () => { - if (!disconnected) { - disconnected = true; - disconnect$.next(newSimplePeer); - } - createPeerConnection(remotePeerId); - }); - }; - msg.otherPeerIds.forEach(remotePeerId => { - if (remotePeerId === ownPeerId || peers.has(remotePeerId)) { - return; - } else { - createPeerConnection(remotePeerId); - } - }); - break; - case 'signal': - var peer = getFromMapOrThrow(peers, msg.senderPeerId); - peer.signal(msg.data); - break; - } - }; - }; - } - ; - var handler = { - error$, - connect$, - disconnect$, - message$, - response$, - async send(peer, message) { - await peer.send(JSON.stringify(message)); - }, - destroy() { - closed = true; - ensureNotFalsy(socket).close(); - error$.complete(); - connect$.complete(); - disconnect$.complete(); - message$.complete(); - response$.complete(); - return PROMISE_RESOLVE_VOID; - } - }; - return handler; - }; - return creator; -} - -/** - * Multiple people had problems because it requires to have - * the nextTick() method in the runtime. So we check here and - * throw a helpful error. - */ -export function ensureProcessNextTickIsSet() { - if (typeof process === 'undefined' || typeof process.nextTick !== 'function') { - throw newRxError('RC7'); - } -} -//# sourceMappingURL=connection-handler-simple-peer.js.map \ No newline at end of file diff --git a/dist/esm/plugins/replication-webrtc/connection-handler-simple-peer.js.map b/dist/esm/plugins/replication-webrtc/connection-handler-simple-peer.js.map deleted file mode 100644 index 5d22e8e0391..00000000000 --- a/dist/esm/plugins/replication-webrtc/connection-handler-simple-peer.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"connection-handler-simple-peer.js","names":["Subject","ensureNotFalsy","getFromMapOrThrow","PROMISE_RESOLVE_VOID","promiseWait","randomCouchString","default","_Peer","Peer","newRxError","sendMessage","ws","msg","send","JSON","stringify","DEFAULT_SIGNALING_SERVER_HOSTNAME","DEFAULT_SIGNALING_SERVER","defaultServerWarningShown","SIMPLE_PEER_PING_INTERVAL","getConnectionHandlerSimplePeer","signalingServerUrl","wrtc","config","webSocketConstructor","ensureProcessNextTickIsSet","WebSocket","includes","console","warn","join","creator","options","connect$","disconnect$","message$","response$","error$","peers","Map","closed","ownPeerId","socket","undefined","createSocket","type","onclose","onopen","onmessage","msgEvent","parse","data","yourPeerId","room","topic","createPeerConnection","remotePeerId","disconnected","newSimplePeer","initiator","trickle","id","set","on","signal","senderPeerId","receiverPeerId","messageOrResponse","toString","result","next","peer","response","message","error","destroy","otherPeerIds","forEach","has","handler","close","complete","process","nextTick"],"sources":["../../../../src/plugins/replication-webrtc/connection-handler-simple-peer.ts"],"sourcesContent":["import { Subject } from 'rxjs';\nimport {\n ensureNotFalsy,\n getFromMapOrThrow,\n PROMISE_RESOLVE_VOID,\n promiseWait,\n randomCouchString\n} from '../../plugins/utils/index.ts';\nimport type {\n WebRTCConnectionHandler,\n WebRTCConnectionHandlerCreator,\n WebRTCMessage,\n PeerWithMessage,\n PeerWithResponse,\n SyncOptionsWebRTC\n} from './webrtc-types.ts';\n\nimport type { \n SimplePeer as Peer, \n Instance as SimplePeerInstance, \n Options as SimplePeerOptions \n} from 'simple-peer';\nimport {\n default as _Peer\n // @ts-ignore\n} from 'simple-peer/simplepeer.min.js';\n\nconst Peer = _Peer as Peer\n\nimport type { RxError, RxTypeError } from '../../types/index.d.ts';\nimport { newRxError } from '../../rx-error.ts';\n\nexport type SimplePeer = SimplePeerInstance & {\n // add id to make debugging easier\n id: string;\n};\n\nexport type SimplePeerInitMessage = {\n type: 'init';\n yourPeerId: string;\n};\nexport type SimplePeerJoinMessage = {\n type: 'join';\n room: string;\n};\nexport type SimplePeerJoinedMessage = {\n type: 'joined';\n otherPeerIds: string[];\n};\nexport type SimplePeerSignalMessage = {\n type: 'signal';\n room: string;\n senderPeerId: string;\n receiverPeerId: string;\n data: string;\n};\nexport type SimplePeerPingMessage = {\n type: 'ping';\n};\n\nexport type PeerMessage =\n SimplePeerInitMessage |\n SimplePeerJoinMessage |\n SimplePeerJoinedMessage |\n SimplePeerSignalMessage |\n SimplePeerPingMessage;\n\n\nfunction sendMessage(ws: WebSocket, msg: PeerMessage) {\n ws.send(JSON.stringify(msg));\n}\n\nconst DEFAULT_SIGNALING_SERVER_HOSTNAME = 'signaling.rxdb.info';\nexport const DEFAULT_SIGNALING_SERVER = 'wss://' + DEFAULT_SIGNALING_SERVER_HOSTNAME + '/';\nlet defaultServerWarningShown = false;\n\nexport type SimplePeerWrtc = SimplePeerOptions['wrtc'];\nexport type SimplePeerConfig = SimplePeerOptions['config'];\n\nexport type SimplePeerConnectionHandlerOptions = {\n /**\n * If no server is specified, the default signaling server\n * from signaling.rxdb.info is used.\n * This server is not reliable and you should use\n * your own signaling server instead.\n */\n signalingServerUrl?: string;\n wrtc?: SimplePeerWrtc;\n config?: SimplePeerConfig;\n webSocketConstructor?: WebSocket;\n};\n\nexport const SIMPLE_PEER_PING_INTERVAL = 1000 * 60 * 2;\n\n/**\n * Returns a connection handler that uses simple-peer and the signaling server.\n */\nexport function getConnectionHandlerSimplePeer({\n signalingServerUrl,\n wrtc,\n config,\n webSocketConstructor\n}: SimplePeerConnectionHandlerOptions): WebRTCConnectionHandlerCreator {\n ensureProcessNextTickIsSet();\n\n signalingServerUrl = signalingServerUrl ? signalingServerUrl : DEFAULT_SIGNALING_SERVER;\n webSocketConstructor = webSocketConstructor ? webSocketConstructor as any : WebSocket;\n\n if (\n signalingServerUrl.includes(DEFAULT_SIGNALING_SERVER_HOSTNAME) &&\n !defaultServerWarningShown\n ) {\n defaultServerWarningShown = true;\n console.warn(\n [\n 'RxDB Warning: You are using the RxDB WebRTC replication plugin',\n 'but you did not specify your own signaling server url.',\n 'By default it will use a signaling server provided by RxDB at ' + DEFAULT_SIGNALING_SERVER,\n 'This server is made for demonstration purposes and tryouts. It is not reliable and might be offline at any time.',\n 'In production you must always use your own signaling server instead.',\n 'Learn how to run your own server at https://rxdb.info/replication-webrtc.html',\n 'Also leave a ⭐ at the RxDB github repo 🙏 https://github.com/pubkey/rxdb 🙏'\n ].join(' ')\n );\n }\n\n const creator: WebRTCConnectionHandlerCreator = async (options: SyncOptionsWebRTC) => {\n\n const connect$ = new Subject();\n const disconnect$ = new Subject();\n const message$ = new Subject>();\n const response$ = new Subject>();\n const error$ = new Subject();\n\n const peers = new Map();\n let closed = false;\n let ownPeerId: string;\n let socket: WebSocket | undefined = undefined;\n createSocket();\n\n\n /**\n * Send ping signals to the server.\n */\n (async () => {\n while (true) {\n await promiseWait(SIMPLE_PEER_PING_INTERVAL / 2);\n if (closed) {\n break;\n }\n if (socket) {\n sendMessage(socket, { type: 'ping' });\n }\n }\n })();\n\n\n /**\n * @recursive calls it self on socket disconnects\n * so that when the user goes offline and online\n * again, it will recreate the WebSocket connection.\n */\n function createSocket() {\n if (closed) {\n return;\n }\n socket = new (webSocketConstructor as any)(signalingServerUrl) as WebSocket;\n socket.onclose = () => createSocket();\n socket.onopen = () => {\n ensureNotFalsy(socket).onmessage = (msgEvent: any) => {\n const msg: PeerMessage = JSON.parse(msgEvent.data as any);\n switch (msg.type) {\n case 'init':\n ownPeerId = msg.yourPeerId;\n sendMessage(ensureNotFalsy(socket), {\n type: 'join',\n room: options.topic\n });\n break;\n case 'joined':\n /**\n * PeerId is created by the signaling server\n * to prevent spoofing it.\n */\n function createPeerConnection(remotePeerId: string) {\n let disconnected = false;\n const newSimplePeer: SimplePeer = new Peer({\n initiator: remotePeerId > ownPeerId,\n wrtc,\n config,\n trickle: true\n }) as any;\n newSimplePeer.id = randomCouchString(10);\n peers.set(remotePeerId, newSimplePeer);\n\n\n newSimplePeer.on('signal', (signal: any) => {\n sendMessage(ensureNotFalsy(socket), {\n type: 'signal',\n senderPeerId: ownPeerId,\n receiverPeerId: remotePeerId,\n room: options.topic,\n data: signal\n });\n });\n\n newSimplePeer.on('data', (messageOrResponse: any) => {\n messageOrResponse = JSON.parse(messageOrResponse.toString());\n if (messageOrResponse.result) {\n response$.next({\n peer: newSimplePeer,\n response: messageOrResponse\n });\n } else {\n message$.next({\n peer: newSimplePeer,\n message: messageOrResponse\n });\n }\n });\n\n newSimplePeer.on('error', (error) => {\n error$.next(newRxError('RC_WEBRTC_PEER', {\n error\n }));\n newSimplePeer.destroy();\n if (!disconnected) {\n disconnected = true;\n disconnect$.next(newSimplePeer);\n }\n });\n\n newSimplePeer.on('connect', () => {\n connect$.next(newSimplePeer);\n });\n\n newSimplePeer.on('close', () => {\n if (!disconnected) {\n disconnected = true;\n disconnect$.next(newSimplePeer);\n }\n createPeerConnection(remotePeerId);\n });\n }\n msg.otherPeerIds.forEach(remotePeerId => {\n if (\n remotePeerId === ownPeerId ||\n peers.has(remotePeerId)\n ) {\n return;\n } else {\n createPeerConnection(remotePeerId);\n }\n\n });\n break;\n case 'signal':\n const peer = getFromMapOrThrow(peers, msg.senderPeerId);\n peer.signal(msg.data);\n break;\n }\n }\n }\n };\n\n const handler: WebRTCConnectionHandler = {\n error$,\n connect$,\n disconnect$,\n message$,\n response$,\n async send(peer: SimplePeer, message: WebRTCMessage) {\n await peer.send(JSON.stringify(message));\n },\n destroy() {\n closed = true;\n ensureNotFalsy(socket).close();\n error$.complete();\n connect$.complete();\n disconnect$.complete();\n message$.complete();\n response$.complete();\n return PROMISE_RESOLVE_VOID;\n }\n };\n return handler;\n };\n return creator;\n}\n\n\n/**\n * Multiple people had problems because it requires to have\n * the nextTick() method in the runtime. So we check here and\n * throw a helpful error.\n */\nexport function ensureProcessNextTickIsSet() {\n if (\n typeof process === 'undefined' ||\n typeof process.nextTick !== 'function'\n ) {\n throw newRxError('RC7');\n }\n}\n"],"mappings":"AAAA,SAASA,OAAO,QAAQ,MAAM;AAC9B,SACIC,cAAc,EACdC,iBAAiB,EACjBC,oBAAoB,EACpBC,WAAW,EACXC,iBAAiB,QACd,8BAA8B;AAerC,SACIC,OAAO,IAAIC;AACX;AAAA,OACG,+BAA+B;AAEtC,IAAMC,IAAI,GAAGD,KAAa;AAG1B,SAASE,UAAU,QAAQ,mBAAmB;AAsC9C,SAASC,WAAWA,CAACC,EAAa,EAAEC,GAAgB,EAAE;EAClDD,EAAE,CAACE,IAAI,CAACC,IAAI,CAACC,SAAS,CAACH,GAAG,CAAC,CAAC;AAChC;AAEA,IAAMI,iCAAiC,GAAG,qBAAqB;AAC/D,OAAO,IAAMC,wBAAwB,GAAG,QAAQ,GAAGD,iCAAiC,GAAG,GAAG;AAC1F,IAAIE,yBAAyB,GAAG,KAAK;AAkBrC,OAAO,IAAMC,yBAAyB,GAAG,IAAI,GAAG,EAAE,GAAG,CAAC;;AAEtD;AACA;AACA;AACA,OAAO,SAASC,8BAA8BA,CAAC;EAC3CC,kBAAkB;EAClBC,IAAI;EACJC,MAAM;EACNC;AACgC,CAAC,EAA8C;EAC/EC,0BAA0B,CAAC,CAAC;EAE5BJ,kBAAkB,GAAGA,kBAAkB,GAAGA,kBAAkB,GAAGJ,wBAAwB;EACvFO,oBAAoB,GAAGA,oBAAoB,GAAGA,oBAAoB,GAAUE,SAAS;EAErF,IACIL,kBAAkB,CAACM,QAAQ,CAACX,iCAAiC,CAAC,IAC9D,CAACE,yBAAyB,EAC5B;IACEA,yBAAyB,GAAG,IAAI;IAChCU,OAAO,CAACC,IAAI,CACR,CACI,gEAAgE,EAChE,wDAAwD,EACxD,gEAAgE,GAAGZ,wBAAwB,EAC3F,kHAAkH,EAClH,sEAAsE,EACtE,+EAA+E,EAC/E,6EAA6E,CAChF,CAACa,IAAI,CAAC,GAAG,CACd,CAAC;EACL;EAEA,IAAMC,OAAmD,GAAG,MAAOC,OAA2C,IAAK;IAE/G,IAAMC,QAAQ,GAAG,IAAIjC,OAAO,CAAa,CAAC;IAC1C,IAAMkC,WAAW,GAAG,IAAIlC,OAAO,CAAa,CAAC;IAC7C,IAAMmC,QAAQ,GAAG,IAAInC,OAAO,CAA8B,CAAC;IAC3D,IAAMoC,SAAS,GAAG,IAAIpC,OAAO,CAA+B,CAAC;IAC7D,IAAMqC,MAAM,GAAG,IAAIrC,OAAO,CAAwB,CAAC;IAEnD,IAAMsC,KAAK,GAAG,IAAIC,GAAG,CAAqB,CAAC;IAC3C,IAAIC,MAAM,GAAG,KAAK;IAClB,IAAIC,SAAiB;IACrB,IAAIC,MAA6B,GAAGC,SAAS;IAC7CC,YAAY,CAAC,CAAC;;IAGd;AACR;AACA;IACQ,CAAC,YAAY;MACT,OAAO,IAAI,EAAE;QACT,MAAMxC,WAAW,CAACe,yBAAyB,GAAG,CAAC,CAAC;QAChD,IAAIqB,MAAM,EAAE;UACR;QACJ;QACA,IAAIE,MAAM,EAAE;UACRhC,WAAW,CAACgC,MAAM,EAAE;YAAEG,IAAI,EAAE;UAAO,CAAC,CAAC;QACzC;MACJ;IACJ,CAAC,EAAE,CAAC;;IAGJ;AACR;AACA;AACA;AACA;IACQ,SAASD,YAAYA,CAAA,EAAG;MACpB,IAAIJ,MAAM,EAAE;QACR;MACJ;MACAE,MAAM,GAAG,IAAKlB,oBAAoB,CAASH,kBAAkB,CAAc;MAC3EqB,MAAM,CAACI,OAAO,GAAG,MAAMF,YAAY,CAAC,CAAC;MACrCF,MAAM,CAACK,MAAM,GAAG,MAAM;QAClB9C,cAAc,CAACyC,MAAM,CAAC,CAACM,SAAS,GAAIC,QAAa,IAAK;UAClD,IAAMrC,GAAgB,GAAGE,IAAI,CAACoC,KAAK,CAACD,QAAQ,CAACE,IAAW,CAAC;UACzD,QAAQvC,GAAG,CAACiC,IAAI;YACZ,KAAK,MAAM;cACPJ,SAAS,GAAG7B,GAAG,CAACwC,UAAU;cAC1B1C,WAAW,CAACT,cAAc,CAACyC,MAAM,CAAC,EAAE;gBAChCG,IAAI,EAAE,MAAM;gBACZQ,IAAI,EAAErB,OAAO,CAACsB;cAClB,CAAC,CAAC;cACF;YACJ,KAAK,QAAQ;cACT;AAC5B;AACA;AACA;cAH4B,IAISC,oBAAoB,GAA7B,SAAAA,CAA8BC,YAAoB,EAAE;gBAChD,IAAIC,YAAY,GAAG,KAAK;gBACxB,IAAMC,aAAyB,GAAG,IAAIlD,IAAI,CAAC;kBACvCmD,SAAS,EAAEH,YAAY,GAAGf,SAAS;kBACnCnB,IAAI;kBACJC,MAAM;kBACNqC,OAAO,EAAE;gBACb,CAAC,CAAQ;gBACTF,aAAa,CAACG,EAAE,GAAGxD,iBAAiB,CAAC,EAAE,CAAC;gBACxCiC,KAAK,CAACwB,GAAG,CAACN,YAAY,EAAEE,aAAa,CAAC;gBAGtCA,aAAa,CAACK,EAAE,CAAC,QAAQ,EAAGC,MAAW,IAAK;kBACxCtD,WAAW,CAACT,cAAc,CAACyC,MAAM,CAAC,EAAE;oBAChCG,IAAI,EAAE,QAAQ;oBACdoB,YAAY,EAAExB,SAAS;oBACvByB,cAAc,EAAEV,YAAY;oBAC5BH,IAAI,EAAErB,OAAO,CAACsB,KAAK;oBACnBH,IAAI,EAAEa;kBACV,CAAC,CAAC;gBACN,CAAC,CAAC;gBAEFN,aAAa,CAACK,EAAE,CAAC,MAAM,EAAGI,iBAAsB,IAAK;kBACjDA,iBAAiB,GAAGrD,IAAI,CAACoC,KAAK,CAACiB,iBAAiB,CAACC,QAAQ,CAAC,CAAC,CAAC;kBAC5D,IAAID,iBAAiB,CAACE,MAAM,EAAE;oBAC1BjC,SAAS,CAACkC,IAAI,CAAC;sBACXC,IAAI,EAAEb,aAAa;sBACnBc,QAAQ,EAAEL;oBACd,CAAC,CAAC;kBACN,CAAC,MAAM;oBACHhC,QAAQ,CAACmC,IAAI,CAAC;sBACVC,IAAI,EAAEb,aAAa;sBACnBe,OAAO,EAAEN;oBACb,CAAC,CAAC;kBACN;gBACJ,CAAC,CAAC;gBAEFT,aAAa,CAACK,EAAE,CAAC,OAAO,EAAGW,KAAK,IAAK;kBACjCrC,MAAM,CAACiC,IAAI,CAAC7D,UAAU,CAAC,gBAAgB,EAAE;oBACrCiE;kBACJ,CAAC,CAAC,CAAC;kBACHhB,aAAa,CAACiB,OAAO,CAAC,CAAC;kBACvB,IAAI,CAAClB,YAAY,EAAE;oBACfA,YAAY,GAAG,IAAI;oBACnBvB,WAAW,CAACoC,IAAI,CAACZ,aAAa,CAAC;kBACnC;gBACJ,CAAC,CAAC;gBAEFA,aAAa,CAACK,EAAE,CAAC,SAAS,EAAE,MAAM;kBAC9B9B,QAAQ,CAACqC,IAAI,CAACZ,aAAa,CAAC;gBAChC,CAAC,CAAC;gBAEFA,aAAa,CAACK,EAAE,CAAC,OAAO,EAAE,MAAM;kBAC5B,IAAI,CAACN,YAAY,EAAE;oBACfA,YAAY,GAAG,IAAI;oBACnBvB,WAAW,CAACoC,IAAI,CAACZ,aAAa,CAAC;kBACnC;kBACAH,oBAAoB,CAACC,YAAY,CAAC;gBACtC,CAAC,CAAC;cACN,CAAC;cACD5C,GAAG,CAACgE,YAAY,CAACC,OAAO,CAACrB,YAAY,IAAI;gBACrC,IACIA,YAAY,KAAKf,SAAS,IAC1BH,KAAK,CAACwC,GAAG,CAACtB,YAAY,CAAC,EACzB;kBACE;gBACJ,CAAC,MAAM;kBACHD,oBAAoB,CAACC,YAAY,CAAC;gBACtC;cAEJ,CAAC,CAAC;cACF;YACJ,KAAK,QAAQ;cACT,IAAMe,IAAI,GAAGrE,iBAAiB,CAACoC,KAAK,EAAE1B,GAAG,CAACqD,YAAY,CAAC;cACvDM,IAAI,CAACP,MAAM,CAACpD,GAAG,CAACuC,IAAI,CAAC;cACrB;UACR;QACJ,CAAC;MACL,CAAC;IACL;IAAC;IAED,IAAM4B,OAA4C,GAAG;MACjD1C,MAAM;MACNJ,QAAQ;MACRC,WAAW;MACXC,QAAQ;MACRC,SAAS;MACT,MAAMvB,IAAIA,CAAC0D,IAAgB,EAAEE,OAAsB,EAAE;QACjD,MAAMF,IAAI,CAAC1D,IAAI,CAACC,IAAI,CAACC,SAAS,CAAC0D,OAAO,CAAC,CAAC;MAC5C,CAAC;MACDE,OAAOA,CAAA,EAAG;QACNnC,MAAM,GAAG,IAAI;QACbvC,cAAc,CAACyC,MAAM,CAAC,CAACsC,KAAK,CAAC,CAAC;QAC9B3C,MAAM,CAAC4C,QAAQ,CAAC,CAAC;QACjBhD,QAAQ,CAACgD,QAAQ,CAAC,CAAC;QACnB/C,WAAW,CAAC+C,QAAQ,CAAC,CAAC;QACtB9C,QAAQ,CAAC8C,QAAQ,CAAC,CAAC;QACnB7C,SAAS,CAAC6C,QAAQ,CAAC,CAAC;QACpB,OAAO9E,oBAAoB;MAC/B;IACJ,CAAC;IACD,OAAO4E,OAAO;EAClB,CAAC;EACD,OAAOhD,OAAO;AAClB;;AAGA;AACA;AACA;AACA;AACA;AACA,OAAO,SAASN,0BAA0BA,CAAA,EAAG;EACzC,IACI,OAAOyD,OAAO,KAAK,WAAW,IAC9B,OAAOA,OAAO,CAACC,QAAQ,KAAK,UAAU,EACxC;IACE,MAAM1E,UAAU,CAAC,KAAK,CAAC;EAC3B;AACJ","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/replication-webrtc/connection-handler-webtorrent.js b/dist/esm/plugins/replication-webrtc/connection-handler-webtorrent.js deleted file mode 100644 index 00ea583db38..00000000000 --- a/dist/esm/plugins/replication-webrtc/connection-handler-webtorrent.js +++ /dev/null @@ -1,136 +0,0 @@ -// /** -// * Uses the Webtorrent servers as signaling server, works similar to p2pt. -// * We could not use p2pt directly because it has so many bugs and behaves wrong in -// * cases with more then 2 peers. -// * @link https://github.com/subins2000/p2pt/blob/master/p2pt.js -// */ - -// import { Subject } from 'rxjs'; -// import { PROMISE_RESOLVE_VOID, randomCouchString } from '../../util'; -// import { P2PConnectionHandler, P2PConnectionHandlerCreator, P2PMessage, P2PPeer, PeerWithMessage, PeerWithResponse } from './p2p-types'; -// const wrtc = require('wrtc'); - -// const WebSocketTracker = require('bittorrent-tracker/lib/client/websocket-tracker'); -// const Client = require('bittorrent-tracker'); -// const randombytes = require('randombytes'); -// const EventEmitter = require('events'); -// const sha1 = require('simple-sha1'); -// const debug = require('debug')('p2pt'); - -// export const P2PT_DEFAULT_TRACKERS = [ -// 'wss://tracker.files.fm:7073/announce', -// 'wss://tracker.btorrent.xyz', -// 'wss://spacetradersapi-chatbox.herokuapp.com:443/announce', -// 'wss://qot.abiir.top:443/announce' -// ]; - -// export function getConnectionHandlerWebtorrent( -// trackers: string[] = P2PT_DEFAULT_TRACKERS, -// /** -// * Port is only required in Node.js, -// * not on browsers. -// */ -// torrentClientPort = 18669 -// ): P2PConnectionHandlerCreator { -// const creator: P2PConnectionHandlerCreator = (options) => { -// /** -// * @link https://github.com/webtorrent/bittorrent-tracker#client -// */ -// const requiredOpts = { -// infoHash: sha1.sync(options.topic).toLowerCase(), -// peerId: randombytes(20), -// announce: trackers, -// port: torrentClientPort, -// wrtc -// } -// const client = new Client(requiredOpts); - -// const connect$ = new Subject(); -// const disconnect$ = new Subject(); -// const message$ = new Subject(); -// const response$ = new Subject(); - -// client.on('error', function (err) { -// console.error('fatal client error! ' + requiredOpts.peerId.toString('hex')); -// console.log(err.message) -// }) - -// client.on('warning', function (err) { -// // a tracker was unavailable or sent bad data to the client. you can probably ignore it -// console.log(err.message) -// }) - -// client.on('update', function (data) { -// console.log('got an announce response from tracker: ' + data.announce) -// console.log('number of seeders in the swarm: ' + data.complete) -// console.log('number of leechers in the swarm: ' + data.incomplete) -// }); - -// const knownPeers = new Set(); -// client.on('peer', function (peer: P2PPeer) { -// console.log('found a peer: ' + peer.id + ' ' + requiredOpts.peerId.toString('hex')) // 85.10.239.191:48623 -// if (knownPeers.has(peer.id)) { -// return; -// } -// knownPeers.add(peer.id); -// peer.once('connect', () => { -// connect$.next(peer); -// }); -// peer.on('data', (data: Buffer) => { -// console.log('# GOT DATA FROM PEER:'); -// const messageOrResponse = JSON.parse(data as any); -// console.dir(messageOrResponse); -// if (messageOrResponse.result) { -// response$.next({ -// peer: peer as any, -// response: messageOrResponse -// }); -// } else { -// message$.next({ -// peer, -// message: JSON.parse(data) -// }); -// } -// }); -// peer.on('signal', (signal) => { -// console.log('GOT SIGNAL: ' + requiredOpts.peerId.toString('hex')); -// console.dir(signal); -// client.signal(signal); -// client.update(); -// client.scrape(); -// }); -// }); - -// client.on('scrape', function (data) { -// console.log('number of leechers in the swarm: ' + data.incomplete) -// }) - -// const handler: P2PConnectionHandler = { -// connect$, -// disconnect$, -// message$, -// response$, -// async send(peer: P2PPeer, message: P2PMessage) { -// await peer.send(JSON.stringify(message)); -// }, -// destroy() { -// client.destroy(); -// connect$.complete(); -// disconnect$.complete(); -// message$.complete(); -// response$.complete(); -// return PROMISE_RESOLVE_VOID; -// } -// } -// client.start(); -// client.update(); -// client.scrape(); -// setInterval(() => { -// // client.update(); -// }, 10000); -// return handler; -// }; - -// return creator; -// } -//# sourceMappingURL=connection-handler-webtorrent.js.map \ No newline at end of file diff --git a/dist/esm/plugins/replication-webrtc/connection-handler-webtorrent.js.map b/dist/esm/plugins/replication-webrtc/connection-handler-webtorrent.js.map deleted file mode 100644 index 59f21070cdd..00000000000 --- a/dist/esm/plugins/replication-webrtc/connection-handler-webtorrent.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"connection-handler-webtorrent.js","names":[],"sources":["../../../../src/plugins/replication-webrtc/connection-handler-webtorrent.ts"],"sourcesContent":["// /**\n// * Uses the Webtorrent servers as signaling server, works similar to p2pt.\n// * We could not use p2pt directly because it has so many bugs and behaves wrong in\n// * cases with more then 2 peers.\n// * @link https://github.com/subins2000/p2pt/blob/master/p2pt.js\n// */\n\n// import { Subject } from 'rxjs';\n// import { PROMISE_RESOLVE_VOID, randomCouchString } from '../../util';\n// import { P2PConnectionHandler, P2PConnectionHandlerCreator, P2PMessage, P2PPeer, PeerWithMessage, PeerWithResponse } from './p2p-types';\n// const wrtc = require('wrtc');\n\n// const WebSocketTracker = require('bittorrent-tracker/lib/client/websocket-tracker');\n// const Client = require('bittorrent-tracker');\n// const randombytes = require('randombytes');\n// const EventEmitter = require('events');\n// const sha1 = require('simple-sha1');\n// const debug = require('debug')('p2pt');\n\n\n// export const P2PT_DEFAULT_TRACKERS = [\n// 'wss://tracker.files.fm:7073/announce',\n// 'wss://tracker.btorrent.xyz',\n// 'wss://spacetradersapi-chatbox.herokuapp.com:443/announce',\n// 'wss://qot.abiir.top:443/announce'\n// ];\n\n// export function getConnectionHandlerWebtorrent(\n// trackers: string[] = P2PT_DEFAULT_TRACKERS,\n// /**\n// * Port is only required in Node.js,\n// * not on browsers.\n// */\n// torrentClientPort = 18669\n// ): P2PConnectionHandlerCreator {\n// const creator: P2PConnectionHandlerCreator = (options) => {\n// /**\n// * @link https://github.com/webtorrent/bittorrent-tracker#client\n// */\n// const requiredOpts = {\n// infoHash: sha1.sync(options.topic).toLowerCase(),\n// peerId: randombytes(20),\n// announce: trackers,\n// port: torrentClientPort,\n// wrtc\n// }\n// const client = new Client(requiredOpts);\n\n// const connect$ = new Subject();\n// const disconnect$ = new Subject();\n// const message$ = new Subject();\n// const response$ = new Subject();\n\n\n// client.on('error', function (err) {\n// console.error('fatal client error! ' + requiredOpts.peerId.toString('hex'));\n// console.log(err.message)\n// })\n\n// client.on('warning', function (err) {\n// // a tracker was unavailable or sent bad data to the client. you can probably ignore it\n// console.log(err.message)\n// })\n\n// client.on('update', function (data) {\n// console.log('got an announce response from tracker: ' + data.announce)\n// console.log('number of seeders in the swarm: ' + data.complete)\n// console.log('number of leechers in the swarm: ' + data.incomplete)\n// });\n\n// const knownPeers = new Set();\n// client.on('peer', function (peer: P2PPeer) {\n// console.log('found a peer: ' + peer.id + ' ' + requiredOpts.peerId.toString('hex')) // 85.10.239.191:48623\n// if (knownPeers.has(peer.id)) {\n// return;\n// }\n// knownPeers.add(peer.id);\n// peer.once('connect', () => {\n// connect$.next(peer);\n// });\n// peer.on('data', (data: Buffer) => {\n// console.log('# GOT DATA FROM PEER:');\n// const messageOrResponse = JSON.parse(data as any);\n// console.dir(messageOrResponse);\n// if (messageOrResponse.result) {\n// response$.next({\n// peer: peer as any,\n// response: messageOrResponse\n// });\n// } else {\n// message$.next({\n// peer,\n// message: JSON.parse(data)\n// });\n// }\n// });\n// peer.on('signal', (signal) => {\n// console.log('GOT SIGNAL: ' + requiredOpts.peerId.toString('hex'));\n// console.dir(signal);\n// client.signal(signal);\n// client.update();\n// client.scrape();\n// });\n// });\n\n// client.on('scrape', function (data) {\n// console.log('number of leechers in the swarm: ' + data.incomplete)\n// })\n\n// const handler: P2PConnectionHandler = {\n// connect$,\n// disconnect$,\n// message$,\n// response$,\n// async send(peer: P2PPeer, message: P2PMessage) {\n// await peer.send(JSON.stringify(message));\n// },\n// destroy() {\n// client.destroy();\n// connect$.complete();\n// disconnect$.complete();\n// message$.complete();\n// response$.complete();\n// return PROMISE_RESOLVE_VOID;\n// }\n// }\n// client.start();\n// client.update();\n// client.scrape();\n// setInterval(() => {\n// // client.update();\n// }, 10000);\n// return handler;\n// };\n\n// return creator;\n// }\n"],"mappings":"AAAA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;;AAGA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAGA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/replication-webrtc/index.js b/dist/esm/plugins/replication-webrtc/index.js deleted file mode 100644 index 2529a5d518b..00000000000 --- a/dist/esm/plugins/replication-webrtc/index.js +++ /dev/null @@ -1,207 +0,0 @@ -import { BehaviorSubject, filter, firstValueFrom, map, Subject } from 'rxjs'; -import { addRxPlugin } from "../../plugin.js"; -import { rxStorageInstanceToReplicationHandler } from "../../replication-protocol/index.js"; -import { ensureNotFalsy, getFromMapOrThrow, randomCouchString } from "../../plugins/utils/index.js"; -import { RxDBLeaderElectionPlugin } from "../leader-election/index.js"; -import { replicateRxCollection } from "../replication/index.js"; -import { isMasterInWebRTCReplication, sendMessageAndAwaitAnswer } from "./webrtc-helper.js"; -import { newRxError } from "../../rx-error.js"; -export async function replicateWebRTC(options) { - var collection = options.collection; - addRxPlugin(RxDBLeaderElectionPlugin); - - // fill defaults - if (options.pull) { - if (!options.pull.batchSize) { - options.pull.batchSize = 20; - } - } - if (options.push) { - if (!options.push.batchSize) { - options.push.batchSize = 20; - } - } - if (collection.database.multiInstance) { - await collection.database.waitForLeadership(); - } - - // used to easier debug stuff - var requestCounter = 0; - var requestFlag = randomCouchString(10); - function getRequestId() { - var count = requestCounter++; - return collection.database.token + '|' + requestFlag + '|' + count; - } - var storageToken = await collection.database.storageToken; - var pool = new RxWebRTCReplicationPool(collection, options, await options.connectionHandlerCreator(options)); - pool.subs.push(pool.connectionHandler.error$.subscribe(err => pool.error$.next(err)), pool.connectionHandler.disconnect$.subscribe(peer => pool.removePeer(peer))); - - /** - * Answer if someone requests our storage token - */ - pool.subs.push(pool.connectionHandler.message$.pipe(filter(data => data.message.method === 'token')).subscribe(data => { - pool.connectionHandler.send(data.peer, { - id: data.message.id, - result: storageToken - }); - })); - var connectSub = pool.connectionHandler.connect$.pipe(filter(() => !pool.canceled)).subscribe(async peer => { - if (options.isPeerValid) { - var isValid = await options.isPeerValid(peer); - if (!isValid) { - return; - } - } - var peerToken; - try { - var tokenResponse = await sendMessageAndAwaitAnswer(pool.connectionHandler, peer, { - id: getRequestId(), - method: 'token', - params: [] - }); - peerToken = tokenResponse.result; - } catch (error) { - /** - * If could not get the tokenResponse, - * just ignore that peer. - */ - pool.error$.next(newRxError('RC_WEBRTC_PEER', { - error - })); - return; - } - var isMaster = await isMasterInWebRTCReplication(collection.database.hashFunction, storageToken, peerToken); - var replicationState; - if (isMaster) { - var masterHandler = pool.masterReplicationHandler; - var masterChangeStreamSub = masterHandler.masterChangeStream$.subscribe(ev => { - var streamResponse = { - id: 'masterChangeStream$', - result: ev - }; - pool.connectionHandler.send(peer, streamResponse); - }); - - // clean up the subscription - pool.subs.push(masterChangeStreamSub, pool.connectionHandler.disconnect$.pipe(filter(p => p === peer)).subscribe(() => masterChangeStreamSub.unsubscribe())); - var messageSub = pool.connectionHandler.message$.pipe(filter(data => data.peer === peer), filter(data => data.message.method !== 'token')).subscribe(async data => { - var { - peer: msgPeer, - message - } = data; - /** - * If it is not a function, - * it means that the client requested the masterChangeStream$ - */ - var method = masterHandler[message.method].bind(masterHandler); - var result = await method(...message.params); - var response = { - id: message.id, - result - }; - pool.connectionHandler.send(msgPeer, response); - }); - pool.subs.push(messageSub); - } else { - replicationState = replicateRxCollection({ - replicationIdentifier: [collection.name, options.topic, peerToken].join('||'), - collection: collection, - autoStart: true, - deletedField: '_deleted', - live: true, - retryTime: options.retryTime, - waitForLeadership: false, - pull: options.pull ? Object.assign({}, options.pull, { - async handler(lastPulledCheckpoint) { - var answer = await sendMessageAndAwaitAnswer(pool.connectionHandler, peer, { - method: 'masterChangesSince', - params: [lastPulledCheckpoint, ensureNotFalsy(options.pull).batchSize], - id: getRequestId() - }); - return answer.result; - }, - stream$: pool.connectionHandler.response$.pipe(filter(m => m.response.id === 'masterChangeStream$'), map(m => m.response.result)) - }) : undefined, - push: options.push ? Object.assign({}, options.push, { - async handler(docs) { - var answer = await sendMessageAndAwaitAnswer(pool.connectionHandler, peer, { - method: 'masterWrite', - params: [docs], - id: getRequestId() - }); - return answer.result; - } - }) : undefined - }); - } - pool.addPeer(peer, replicationState); - }); - pool.subs.push(connectSub); - return pool; -} - -/** - * Because the WebRTC replication runs between many instances, - * we use a Pool instead of returning a single replication state. - */ -export var RxWebRTCReplicationPool = /*#__PURE__*/function () { - function RxWebRTCReplicationPool(collection, options, connectionHandler) { - this.peerStates$ = new BehaviorSubject(new Map()); - this.canceled = false; - this.subs = []; - this.error$ = new Subject(); - this.collection = collection; - this.options = options; - this.connectionHandler = connectionHandler; - this.collection.onDestroy.push(() => this.cancel()); - this.masterReplicationHandler = rxStorageInstanceToReplicationHandler(collection.storageInstance, collection.conflictHandler, collection.database.token); - } - var _proto = RxWebRTCReplicationPool.prototype; - _proto.addPeer = function addPeer(peer, - // only if isMaster=false it has a replicationState - replicationState) { - var peerState = { - peer, - replicationState, - subs: [] - }; - this.peerStates$.next(this.peerStates$.getValue().set(peer, peerState)); - if (replicationState) { - peerState.subs.push(replicationState.error$.subscribe(ev => this.error$.next(ev))); - } - }; - _proto.removePeer = function removePeer(peer) { - var peerState = getFromMapOrThrow(this.peerStates$.getValue(), peer); - this.peerStates$.getValue().delete(peer); - this.peerStates$.next(this.peerStates$.getValue()); - peerState.subs.forEach(sub => sub.unsubscribe()); - if (peerState.replicationState) { - peerState.replicationState.cancel(); - } - } - - // often used in unit tests - ; - _proto.awaitFirstPeer = function awaitFirstPeer() { - return firstValueFrom(this.peerStates$.pipe(filter(peerStates => peerStates.size > 0))); - }; - _proto.cancel = async function cancel() { - if (this.canceled) { - return; - } - this.canceled = true; - this.subs.forEach(sub => sub.unsubscribe()); - Array.from(this.peerStates$.getValue().keys()).forEach(peer => { - this.removePeer(peer); - }); - await this.connectionHandler.destroy(); - }; - return RxWebRTCReplicationPool; -}(); -export * from "./webrtc-helper.js"; -export * from "./signaling-server.js"; -export * from "./webrtc-types.js"; -// export * from './connection-handler-webtorrent'; -// export * from './connection-handler-p2pcf'; -export * from "./connection-handler-simple-peer.js"; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/esm/plugins/replication-webrtc/index.js.map b/dist/esm/plugins/replication-webrtc/index.js.map deleted file mode 100644 index 79a881a4eca..00000000000 --- a/dist/esm/plugins/replication-webrtc/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["BehaviorSubject","filter","firstValueFrom","map","Subject","addRxPlugin","rxStorageInstanceToReplicationHandler","ensureNotFalsy","getFromMapOrThrow","randomCouchString","RxDBLeaderElectionPlugin","replicateRxCollection","isMasterInWebRTCReplication","sendMessageAndAwaitAnswer","newRxError","replicateWebRTC","options","collection","pull","batchSize","push","database","multiInstance","waitForLeadership","requestCounter","requestFlag","getRequestId","count","token","storageToken","pool","RxWebRTCReplicationPool","connectionHandlerCreator","subs","connectionHandler","error$","subscribe","err","next","disconnect$","peer","removePeer","message$","pipe","data","message","method","send","id","result","connectSub","connect$","canceled","isPeerValid","isValid","peerToken","tokenResponse","params","error","isMaster","hashFunction","replicationState","masterHandler","masterReplicationHandler","masterChangeStreamSub","masterChangeStream$","ev","streamResponse","p","unsubscribe","messageSub","msgPeer","bind","response","replicationIdentifier","name","topic","join","autoStart","deletedField","live","retryTime","Object","assign","handler","lastPulledCheckpoint","answer","stream$","response$","m","undefined","docs","addPeer","peerStates$","Map","onDestroy","cancel","storageInstance","conflictHandler","_proto","prototype","peerState","getValue","set","delete","forEach","sub","awaitFirstPeer","peerStates","size","Array","from","keys","destroy"],"sources":["../../../../src/plugins/replication-webrtc/index.ts"],"sourcesContent":["import {\n BehaviorSubject,\n filter,\n firstValueFrom,\n map,\n Subject,\n Subscription\n} from 'rxjs';\nimport { addRxPlugin } from '../../plugin.ts';\nimport { rxStorageInstanceToReplicationHandler } from '../../replication-protocol/index.ts';\nimport type {\n RxCollection,\n RxError,\n RxReplicationHandler,\n RxReplicationWriteToMasterRow,\n RxTypeError\n} from '../../types/index.d.ts';\nimport {\n ensureNotFalsy,\n getFromMapOrThrow,\n randomCouchString\n} from '../../plugins/utils/index.ts';\nimport { RxDBLeaderElectionPlugin } from '../leader-election/index.ts';\nimport { replicateRxCollection } from '../replication/index.ts';\nimport {\n isMasterInWebRTCReplication,\n sendMessageAndAwaitAnswer\n} from './webrtc-helper.ts';\nimport type {\n WebRTCConnectionHandler,\n WebRTCPeerState,\n WebRTCReplicationCheckpoint,\n WebRTCResponse,\n RxWebRTCReplicationState,\n SyncOptionsWebRTC\n} from './webrtc-types.ts';\nimport { newRxError } from '../../rx-error.ts';\n\n\nexport async function replicateWebRTC(\n options: SyncOptionsWebRTC\n): Promise> {\n const collection = options.collection;\n addRxPlugin(RxDBLeaderElectionPlugin);\n\n // fill defaults\n if (options.pull) {\n if (!options.pull.batchSize) {\n options.pull.batchSize = 20;\n }\n }\n if (options.push) {\n if (!options.push.batchSize) {\n options.push.batchSize = 20;\n }\n }\n\n if (collection.database.multiInstance) {\n await collection.database.waitForLeadership();\n }\n\n // used to easier debug stuff\n let requestCounter = 0;\n const requestFlag = randomCouchString(10);\n function getRequestId() {\n const count = requestCounter++;\n return collection.database.token + '|' + requestFlag + '|' + count;\n }\n\n const storageToken = await collection.database.storageToken;\n const pool = new RxWebRTCReplicationPool(\n collection,\n options,\n await options.connectionHandlerCreator(options)\n );\n\n\n pool.subs.push(\n pool.connectionHandler.error$.subscribe(err => pool.error$.next(err)),\n pool.connectionHandler.disconnect$.subscribe(peer => pool.removePeer(peer))\n );\n\n /**\n * Answer if someone requests our storage token\n */\n pool.subs.push(\n pool.connectionHandler.message$.pipe(\n filter(data => data.message.method === 'token')\n ).subscribe(data => {\n pool.connectionHandler.send(data.peer, {\n id: data.message.id,\n result: storageToken\n });\n })\n );\n\n const connectSub = pool.connectionHandler.connect$\n .pipe(\n filter(() => !pool.canceled)\n )\n .subscribe(async (peer) => {\n if (options.isPeerValid) {\n const isValid = await options.isPeerValid(peer);\n if (!isValid) {\n return;\n }\n }\n\n let peerToken: string;\n try {\n const tokenResponse = await sendMessageAndAwaitAnswer(\n pool.connectionHandler,\n peer,\n {\n id: getRequestId(),\n method: 'token',\n params: []\n }\n );\n peerToken = tokenResponse.result;\n } catch (error: any) {\n /**\n * If could not get the tokenResponse,\n * just ignore that peer.\n */\n pool.error$.next(newRxError('RC_WEBRTC_PEER', {\n error\n }));\n return;\n }\n const isMaster = await isMasterInWebRTCReplication(collection.database.hashFunction, storageToken, peerToken);\n\n let replicationState: RxWebRTCReplicationState | undefined;\n if (isMaster) {\n const masterHandler = pool.masterReplicationHandler;\n const masterChangeStreamSub = masterHandler.masterChangeStream$.subscribe(ev => {\n const streamResponse: WebRTCResponse = {\n id: 'masterChangeStream$',\n result: ev\n };\n pool.connectionHandler.send(peer, streamResponse);\n });\n\n // clean up the subscription\n pool.subs.push(\n masterChangeStreamSub,\n pool.connectionHandler.disconnect$.pipe(\n filter(p => p === peer)\n ).subscribe(() => masterChangeStreamSub.unsubscribe())\n );\n\n const messageSub = pool.connectionHandler.message$\n .pipe(\n filter(data => data.peer === peer),\n filter(data => data.message.method !== 'token')\n )\n .subscribe(async (data) => {\n const { peer: msgPeer, message } = data;\n /**\n * If it is not a function,\n * it means that the client requested the masterChangeStream$\n */\n const method = (masterHandler as any)[message.method].bind(masterHandler);\n const result = await (method as any)(...message.params);\n const response: WebRTCResponse = {\n id: message.id,\n result\n };\n pool.connectionHandler.send(msgPeer, response);\n });\n pool.subs.push(messageSub);\n } else {\n replicationState = replicateRxCollection({\n replicationIdentifier: [collection.name, options.topic, peerToken].join('||'),\n collection: collection,\n autoStart: true,\n deletedField: '_deleted',\n live: true,\n retryTime: options.retryTime,\n waitForLeadership: false,\n pull: options.pull ? Object.assign({}, options.pull, {\n async handler(lastPulledCheckpoint: WebRTCReplicationCheckpoint | undefined) {\n const answer = await sendMessageAndAwaitAnswer(\n pool.connectionHandler,\n peer,\n {\n method: 'masterChangesSince',\n params: [\n lastPulledCheckpoint,\n ensureNotFalsy(options.pull).batchSize\n ],\n id: getRequestId()\n }\n );\n return answer.result;\n },\n stream$: pool.connectionHandler.response$.pipe(\n filter(m => m.response.id === 'masterChangeStream$'),\n map(m => m.response.result)\n )\n\n }) : undefined,\n push: options.push ? Object.assign({}, options.push, {\n async handler(docs: RxReplicationWriteToMasterRow[]) {\n const answer = await sendMessageAndAwaitAnswer(\n pool.connectionHandler,\n peer,\n {\n method: 'masterWrite',\n params: [docs],\n id: getRequestId()\n }\n );\n return answer.result;\n }\n }) : undefined\n });\n }\n pool.addPeer(peer, replicationState);\n });\n pool.subs.push(connectSub);\n return pool;\n}\n\n\n/**\n * Because the WebRTC replication runs between many instances,\n * we use a Pool instead of returning a single replication state.\n */\nexport class RxWebRTCReplicationPool {\n peerStates$: BehaviorSubject>> = new BehaviorSubject(new Map());\n canceled: boolean = false;\n masterReplicationHandler: RxReplicationHandler;\n subs: Subscription[] = [];\n\n public error$ = new Subject();\n\n constructor(\n public readonly collection: RxCollection,\n public readonly options: SyncOptionsWebRTC,\n public readonly connectionHandler: WebRTCConnectionHandler\n ) {\n this.collection.onDestroy.push(() => this.cancel());\n this.masterReplicationHandler = rxStorageInstanceToReplicationHandler(\n collection.storageInstance,\n collection.conflictHandler,\n collection.database.token,\n );\n }\n\n addPeer(\n peer: PeerType,\n // only if isMaster=false it has a replicationState\n replicationState?: RxWebRTCReplicationState\n ) {\n const peerState: WebRTCPeerState = {\n peer,\n replicationState,\n subs: []\n };\n this.peerStates$.next(this.peerStates$.getValue().set(peer, peerState));\n if (replicationState) {\n peerState.subs.push(\n replicationState.error$.subscribe(ev => this.error$.next(ev))\n );\n }\n }\n removePeer(peer: PeerType) {\n const peerState = getFromMapOrThrow(this.peerStates$.getValue(), peer);\n this.peerStates$.getValue().delete(peer);\n this.peerStates$.next(this.peerStates$.getValue());\n peerState.subs.forEach(sub => sub.unsubscribe());\n if (peerState.replicationState) {\n peerState.replicationState.cancel();\n }\n }\n\n // often used in unit tests\n awaitFirstPeer() {\n return firstValueFrom(\n this.peerStates$.pipe(\n filter(peerStates => peerStates.size > 0)\n )\n );\n }\n\n public async cancel() {\n if (this.canceled) {\n return;\n }\n this.canceled = true;\n this.subs.forEach(sub => sub.unsubscribe());\n Array.from(this.peerStates$.getValue().keys()).forEach(peer => {\n this.removePeer(peer);\n });\n await this.connectionHandler.destroy();\n }\n}\n\nexport * from './webrtc-helper.ts';\nexport * from './signaling-server.ts';\nexport * from './webrtc-types.ts';\n// export * from './connection-handler-webtorrent';\n// export * from './connection-handler-p2pcf';\nexport * from './connection-handler-simple-peer.ts';\n"],"mappings":"AAAA,SACIA,eAAe,EACfC,MAAM,EACNC,cAAc,EACdC,GAAG,EACHC,OAAO,QAEJ,MAAM;AACb,SAASC,WAAW,QAAQ,iBAAiB;AAC7C,SAASC,qCAAqC,QAAQ,qCAAqC;AAQ3F,SACIC,cAAc,EACdC,iBAAiB,EACjBC,iBAAiB,QACd,8BAA8B;AACrC,SAASC,wBAAwB,QAAQ,6BAA6B;AACtE,SAASC,qBAAqB,QAAQ,yBAAyB;AAC/D,SACIC,2BAA2B,EAC3BC,yBAAyB,QACtB,oBAAoB;AAS3B,SAASC,UAAU,QAAQ,mBAAmB;AAG9C,OAAO,eAAeC,eAAeA,CACjCC,OAA+C,EACM;EACrD,IAAMC,UAAU,GAAGD,OAAO,CAACC,UAAU;EACrCZ,WAAW,CAACK,wBAAwB,CAAC;;EAErC;EACA,IAAIM,OAAO,CAACE,IAAI,EAAE;IACd,IAAI,CAACF,OAAO,CAACE,IAAI,CAACC,SAAS,EAAE;MACzBH,OAAO,CAACE,IAAI,CAACC,SAAS,GAAG,EAAE;IAC/B;EACJ;EACA,IAAIH,OAAO,CAACI,IAAI,EAAE;IACd,IAAI,CAACJ,OAAO,CAACI,IAAI,CAACD,SAAS,EAAE;MACzBH,OAAO,CAACI,IAAI,CAACD,SAAS,GAAG,EAAE;IAC/B;EACJ;EAEA,IAAIF,UAAU,CAACI,QAAQ,CAACC,aAAa,EAAE;IACnC,MAAML,UAAU,CAACI,QAAQ,CAACE,iBAAiB,CAAC,CAAC;EACjD;;EAEA;EACA,IAAIC,cAAc,GAAG,CAAC;EACtB,IAAMC,WAAW,GAAGhB,iBAAiB,CAAC,EAAE,CAAC;EACzC,SAASiB,YAAYA,CAAA,EAAG;IACpB,IAAMC,KAAK,GAAGH,cAAc,EAAE;IAC9B,OAAOP,UAAU,CAACI,QAAQ,CAACO,KAAK,GAAG,GAAG,GAAGH,WAAW,GAAG,GAAG,GAAGE,KAAK;EACtE;EAEA,IAAME,YAAY,GAAG,MAAMZ,UAAU,CAACI,QAAQ,CAACQ,YAAY;EAC3D,IAAMC,IAAI,GAAG,IAAIC,uBAAuB,CACpCd,UAAU,EACVD,OAAO,EACP,MAAMA,OAAO,CAACgB,wBAAwB,CAAChB,OAAO,CAClD,CAAC;EAGDc,IAAI,CAACG,IAAI,CAACb,IAAI,CACVU,IAAI,CAACI,iBAAiB,CAACC,MAAM,CAACC,SAAS,CAACC,GAAG,IAAIP,IAAI,CAACK,MAAM,CAACG,IAAI,CAACD,GAAG,CAAC,CAAC,EACrEP,IAAI,CAACI,iBAAiB,CAACK,WAAW,CAACH,SAAS,CAACI,IAAI,IAAIV,IAAI,CAACW,UAAU,CAACD,IAAI,CAAC,CAC9E,CAAC;;EAED;AACJ;AACA;EACIV,IAAI,CAACG,IAAI,CAACb,IAAI,CACVU,IAAI,CAACI,iBAAiB,CAACQ,QAAQ,CAACC,IAAI,CAChC1C,MAAM,CAAC2C,IAAI,IAAIA,IAAI,CAACC,OAAO,CAACC,MAAM,KAAK,OAAO,CAClD,CAAC,CAACV,SAAS,CAACQ,IAAI,IAAI;IAChBd,IAAI,CAACI,iBAAiB,CAACa,IAAI,CAACH,IAAI,CAACJ,IAAI,EAAE;MACnCQ,EAAE,EAAEJ,IAAI,CAACC,OAAO,CAACG,EAAE;MACnBC,MAAM,EAAEpB;IACZ,CAAC,CAAC;EACN,CAAC,CACL,CAAC;EAED,IAAMqB,UAAU,GAAGpB,IAAI,CAACI,iBAAiB,CAACiB,QAAQ,CAC7CR,IAAI,CACD1C,MAAM,CAAC,MAAM,CAAC6B,IAAI,CAACsB,QAAQ,CAC/B,CAAC,CACAhB,SAAS,CAAC,MAAOI,IAAI,IAAK;IACvB,IAAIxB,OAAO,CAACqC,WAAW,EAAE;MACrB,IAAMC,OAAO,GAAG,MAAMtC,OAAO,CAACqC,WAAW,CAACb,IAAI,CAAC;MAC/C,IAAI,CAACc,OAAO,EAAE;QACV;MACJ;IACJ;IAEA,IAAIC,SAAiB;IACrB,IAAI;MACA,IAAMC,aAAa,GAAG,MAAM3C,yBAAyB,CACjDiB,IAAI,CAACI,iBAAiB,EACtBM,IAAI,EACJ;QACIQ,EAAE,EAAEtB,YAAY,CAAC,CAAC;QAClBoB,MAAM,EAAE,OAAO;QACfW,MAAM,EAAE;MACZ,CACJ,CAAC;MACDF,SAAS,GAAGC,aAAa,CAACP,MAAM;IACpC,CAAC,CAAC,OAAOS,KAAU,EAAE;MACjB;AAChB;AACA;AACA;MACgB5B,IAAI,CAACK,MAAM,CAACG,IAAI,CAACxB,UAAU,CAAC,gBAAgB,EAAE;QAC1C4C;MACJ,CAAC,CAAC,CAAC;MACH;IACJ;IACA,IAAMC,QAAQ,GAAG,MAAM/C,2BAA2B,CAACK,UAAU,CAACI,QAAQ,CAACuC,YAAY,EAAE/B,YAAY,EAAE0B,SAAS,CAAC;IAE7G,IAAIM,gBAAiE;IACrE,IAAIF,QAAQ,EAAE;MACV,IAAMG,aAAa,GAAGhC,IAAI,CAACiC,wBAAwB;MACnD,IAAMC,qBAAqB,GAAGF,aAAa,CAACG,mBAAmB,CAAC7B,SAAS,CAAC8B,EAAE,IAAI;QAC5E,IAAMC,cAA8B,GAAG;UACnCnB,EAAE,EAAE,qBAAqB;UACzBC,MAAM,EAAEiB;QACZ,CAAC;QACDpC,IAAI,CAACI,iBAAiB,CAACa,IAAI,CAACP,IAAI,EAAE2B,cAAc,CAAC;MACrD,CAAC,CAAC;;MAEF;MACArC,IAAI,CAACG,IAAI,CAACb,IAAI,CACV4C,qBAAqB,EACrBlC,IAAI,CAACI,iBAAiB,CAACK,WAAW,CAACI,IAAI,CACnC1C,MAAM,CAACmE,CAAC,IAAIA,CAAC,KAAK5B,IAAI,CAC1B,CAAC,CAACJ,SAAS,CAAC,MAAM4B,qBAAqB,CAACK,WAAW,CAAC,CAAC,CACzD,CAAC;MAED,IAAMC,UAAU,GAAGxC,IAAI,CAACI,iBAAiB,CAACQ,QAAQ,CAC7CC,IAAI,CACD1C,MAAM,CAAC2C,IAAI,IAAIA,IAAI,CAACJ,IAAI,KAAKA,IAAI,CAAC,EAClCvC,MAAM,CAAC2C,IAAI,IAAIA,IAAI,CAACC,OAAO,CAACC,MAAM,KAAK,OAAO,CAClD,CAAC,CACAV,SAAS,CAAC,MAAOQ,IAAI,IAAK;QACvB,IAAM;UAAEJ,IAAI,EAAE+B,OAAO;UAAE1B;QAAQ,CAAC,GAAGD,IAAI;QACvC;AACxB;AACA;AACA;QACwB,IAAME,MAAM,GAAIgB,aAAa,CAASjB,OAAO,CAACC,MAAM,CAAC,CAAC0B,IAAI,CAACV,aAAa,CAAC;QACzE,IAAMb,MAAM,GAAG,MAAOH,MAAM,CAAS,GAAGD,OAAO,CAACY,MAAM,CAAC;QACvD,IAAMgB,QAAwB,GAAG;UAC7BzB,EAAE,EAAEH,OAAO,CAACG,EAAE;UACdC;QACJ,CAAC;QACDnB,IAAI,CAACI,iBAAiB,CAACa,IAAI,CAACwB,OAAO,EAAEE,QAAQ,CAAC;MAClD,CAAC,CAAC;MACN3C,IAAI,CAACG,IAAI,CAACb,IAAI,CAACkD,UAAU,CAAC;IAC9B,CAAC,MAAM;MACHT,gBAAgB,GAAGlD,qBAAqB,CAAC;QACrC+D,qBAAqB,EAAE,CAACzD,UAAU,CAAC0D,IAAI,EAAE3D,OAAO,CAAC4D,KAAK,EAAErB,SAAS,CAAC,CAACsB,IAAI,CAAC,IAAI,CAAC;QAC7E5D,UAAU,EAAEA,UAAU;QACtB6D,SAAS,EAAE,IAAI;QACfC,YAAY,EAAE,UAAU;QACxBC,IAAI,EAAE,IAAI;QACVC,SAAS,EAAEjE,OAAO,CAACiE,SAAS;QAC5B1D,iBAAiB,EAAE,KAAK;QACxBL,IAAI,EAAEF,OAAO,CAACE,IAAI,GAAGgE,MAAM,CAACC,MAAM,CAAC,CAAC,CAAC,EAAEnE,OAAO,CAACE,IAAI,EAAE;UACjD,MAAMkE,OAAOA,CAACC,oBAA6D,EAAE;YACzE,IAAMC,MAAM,GAAG,MAAMzE,yBAAyB,CAC1CiB,IAAI,CAACI,iBAAiB,EACtBM,IAAI,EACJ;cACIM,MAAM,EAAE,oBAAoB;cAC5BW,MAAM,EAAE,CACJ4B,oBAAoB,EACpB9E,cAAc,CAACS,OAAO,CAACE,IAAI,CAAC,CAACC,SAAS,CACzC;cACD6B,EAAE,EAAEtB,YAAY,CAAC;YACrB,CACJ,CAAC;YACD,OAAO4D,MAAM,CAACrC,MAAM;UACxB,CAAC;UACDsC,OAAO,EAAEzD,IAAI,CAACI,iBAAiB,CAACsD,SAAS,CAAC7C,IAAI,CAC1C1C,MAAM,CAACwF,CAAC,IAAIA,CAAC,CAAChB,QAAQ,CAACzB,EAAE,KAAK,qBAAqB,CAAC,EACpD7C,GAAG,CAACsF,CAAC,IAAIA,CAAC,CAAChB,QAAQ,CAACxB,MAAM,CAC9B;QAEJ,CAAC,CAAC,GAAGyC,SAAS;QACdtE,IAAI,EAAEJ,OAAO,CAACI,IAAI,GAAG8D,MAAM,CAACC,MAAM,CAAC,CAAC,CAAC,EAAEnE,OAAO,CAACI,IAAI,EAAE;UACjD,MAAMgE,OAAOA,CAACO,IAAgD,EAAE;YAC5D,IAAML,MAAM,GAAG,MAAMzE,yBAAyB,CAC1CiB,IAAI,CAACI,iBAAiB,EACtBM,IAAI,EACJ;cACIM,MAAM,EAAE,aAAa;cACrBW,MAAM,EAAE,CAACkC,IAAI,CAAC;cACd3C,EAAE,EAAEtB,YAAY,CAAC;YACrB,CACJ,CAAC;YACD,OAAO4D,MAAM,CAACrC,MAAM;UACxB;QACJ,CAAC,CAAC,GAAGyC;MACT,CAAC,CAAC;IACN;IACA5D,IAAI,CAAC8D,OAAO,CAACpD,IAAI,EAAEqB,gBAAgB,CAAC;EACxC,CAAC,CAAC;EACN/B,IAAI,CAACG,IAAI,CAACb,IAAI,CAAC8B,UAAU,CAAC;EAC1B,OAAOpB,IAAI;AACf;;AAGA;AACA;AACA;AACA;AACA,WAAaC,uBAAuB;EAQhC,SAAAA,wBACoBd,UAAmC,EACnCD,OAA+C,EAC/CkB,iBAAoD,EACtE;IAAA,KAXF2D,WAAW,GAAyE,IAAI7F,eAAe,CAAC,IAAI8F,GAAG,CAAC,CAAC,CAAC;IAAA,KAClH1C,QAAQ,GAAY,KAAK;IAAA,KAEzBnB,IAAI,GAAmB,EAAE;IAAA,KAElBE,MAAM,GAAG,IAAI/B,OAAO,CAAwB,CAAC;IAAA,KAGhCa,UAAmC,GAAnCA,UAAmC;IAAA,KACnCD,OAA+C,GAA/CA,OAA+C;IAAA,KAC/CkB,iBAAoD,GAApDA,iBAAoD;IAEpE,IAAI,CAACjB,UAAU,CAAC8E,SAAS,CAAC3E,IAAI,CAAC,MAAM,IAAI,CAAC4E,MAAM,CAAC,CAAC,CAAC;IACnD,IAAI,CAACjC,wBAAwB,GAAGzD,qCAAqC,CACjEW,UAAU,CAACgF,eAAe,EAC1BhF,UAAU,CAACiF,eAAe,EAC1BjF,UAAU,CAACI,QAAQ,CAACO,KACxB,CAAC;EACL;EAAC,IAAAuE,MAAA,GAAApE,uBAAA,CAAAqE,SAAA;EAAAD,MAAA,CAEDP,OAAO,GAAP,SAAAA,QACIpD,IAAc;EACd;EACAqB,gBAAsD,EACxD;IACE,IAAMwC,SAA+C,GAAG;MACpD7D,IAAI;MACJqB,gBAAgB;MAChB5B,IAAI,EAAE;IACV,CAAC;IACD,IAAI,CAAC4D,WAAW,CAACvD,IAAI,CAAC,IAAI,CAACuD,WAAW,CAACS,QAAQ,CAAC,CAAC,CAACC,GAAG,CAAC/D,IAAI,EAAE6D,SAAS,CAAC,CAAC;IACvE,IAAIxC,gBAAgB,EAAE;MAClBwC,SAAS,CAACpE,IAAI,CAACb,IAAI,CACfyC,gBAAgB,CAAC1B,MAAM,CAACC,SAAS,CAAC8B,EAAE,IAAI,IAAI,CAAC/B,MAAM,CAACG,IAAI,CAAC4B,EAAE,CAAC,CAChE,CAAC;IACL;EACJ,CAAC;EAAAiC,MAAA,CACD1D,UAAU,GAAV,SAAAA,WAAWD,IAAc,EAAE;IACvB,IAAM6D,SAAS,GAAG7F,iBAAiB,CAAC,IAAI,CAACqF,WAAW,CAACS,QAAQ,CAAC,CAAC,EAAE9D,IAAI,CAAC;IACtE,IAAI,CAACqD,WAAW,CAACS,QAAQ,CAAC,CAAC,CAACE,MAAM,CAAChE,IAAI,CAAC;IACxC,IAAI,CAACqD,WAAW,CAACvD,IAAI,CAAC,IAAI,CAACuD,WAAW,CAACS,QAAQ,CAAC,CAAC,CAAC;IAClDD,SAAS,CAACpE,IAAI,CAACwE,OAAO,CAACC,GAAG,IAAIA,GAAG,CAACrC,WAAW,CAAC,CAAC,CAAC;IAChD,IAAIgC,SAAS,CAACxC,gBAAgB,EAAE;MAC5BwC,SAAS,CAACxC,gBAAgB,CAACmC,MAAM,CAAC,CAAC;IACvC;EACJ;;EAEA;EAAA;EAAAG,MAAA,CACAQ,cAAc,GAAd,SAAAA,eAAA,EAAiB;IACb,OAAOzG,cAAc,CACjB,IAAI,CAAC2F,WAAW,CAAClD,IAAI,CACjB1C,MAAM,CAAC2G,UAAU,IAAIA,UAAU,CAACC,IAAI,GAAG,CAAC,CAC5C,CACJ,CAAC;EACL,CAAC;EAAAV,MAAA,CAEYH,MAAM,GAAnB,eAAAA,OAAA,EAAsB;IAClB,IAAI,IAAI,CAAC5C,QAAQ,EAAE;MACf;IACJ;IACA,IAAI,CAACA,QAAQ,GAAG,IAAI;IACpB,IAAI,CAACnB,IAAI,CAACwE,OAAO,CAACC,GAAG,IAAIA,GAAG,CAACrC,WAAW,CAAC,CAAC,CAAC;IAC3CyC,KAAK,CAACC,IAAI,CAAC,IAAI,CAAClB,WAAW,CAACS,QAAQ,CAAC,CAAC,CAACU,IAAI,CAAC,CAAC,CAAC,CAACP,OAAO,CAACjE,IAAI,IAAI;MAC3D,IAAI,CAACC,UAAU,CAACD,IAAI,CAAC;IACzB,CAAC,CAAC;IACF,MAAM,IAAI,CAACN,iBAAiB,CAAC+E,OAAO,CAAC,CAAC;EAC1C,CAAC;EAAA,OAAAlF,uBAAA;AAAA;AAGL,cAAc,oBAAoB;AAClC,cAAc,uBAAuB;AACrC,cAAc,mBAAmB;AACjC;AACA;AACA,cAAc,qCAAqC","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/replication-webrtc/signaling-server.js b/dist/esm/plugins/replication-webrtc/signaling-server.js deleted file mode 100644 index 32888cbaf6a..00000000000 --- a/dist/esm/plugins/replication-webrtc/signaling-server.js +++ /dev/null @@ -1,144 +0,0 @@ -import { getFromMapOrCreate, promiseWait, randomCouchString } from "../utils/index.js"; -import { SIMPLE_PEER_PING_INTERVAL } from "./connection-handler-simple-peer.js"; -export var PEER_ID_LENGTH = 12; -/** - * Starts a WebRTC signaling server - * that can be used in tests. -*/ -export async function startSignalingServerSimplePeer(serverOptions) { - var { - WebSocketServer - } = await import('ws'); - var wss = new WebSocketServer(serverOptions); - var peerById = new Map(); - var peersByRoom = new Map(); - var serverClosed = false; - wss.on('close', () => { - serverClosed = true; - peerById.clear(); - peersByRoom.clear(); - }); - - /** - * Clients can disconnect without telling that to the - * server. Therefore we have to automatically disconnect clients that - * have not send a ping message in the last 2 minutes. - */ - (async () => { - var _loop = async function () { - await promiseWait(1000 * 5); - var minTime = Date.now() - SIMPLE_PEER_PING_INTERVAL; - Array.from(peerById.values()).forEach(peer => { - if (peer.lastPing < minTime) { - disconnectSocket(peer.id, 'no ping for 2 minutes'); - } - }); - }; - while (!serverClosed) { - await _loop(); - } - })(); - function disconnectSocket(peerId, reason) { - console.log('# disconnect peer ' + peerId + ' reason: ' + reason); - var peer = peerById.get(peerId); - if (peer) { - peer.socket.close && peer.socket.close(undefined, reason); - peer.rooms.forEach(roomId => { - var room = peersByRoom.get(roomId); - room?.delete(peerId); - if (room && room.size === 0) { - peersByRoom.delete(roomId); - } - }); - } - peerById.delete(peerId); - } - wss.on('connection', function (ws) { - /** - * PeerID is created by the server to prevent malicious - * actors from falsy claiming other peoples ids. - */ - var peerId = randomCouchString(PEER_ID_LENGTH); - var peer = { - id: peerId, - socket: ws, - rooms: new Set(), - lastPing: Date.now() - }; - peerById.set(peerId, peer); - sendMessage(ws, { - type: 'init', - yourPeerId: peerId - }); - ws.on('error', err => { - console.error('SERVER ERROR:'); - console.dir(err); - disconnectSocket(peerId, 'socket errored'); - }); - ws.on('close', () => { - disconnectSocket(peerId, 'socket disconnected'); - }); - ws.on('message', msgEvent => { - peer.lastPing = Date.now(); - var message = JSON.parse(msgEvent.toString()); - var type = message.type; - switch (type) { - case 'join': - var roomId = message.room; - if (!validateIdString(roomId) || !validateIdString(peerId)) { - disconnectSocket(peerId, 'invalid ids'); - return; - } - if (peer.rooms.has(peerId)) { - return; - } - peer.rooms.add(roomId); - var room = getFromMapOrCreate(peersByRoom, message.room, () => new Set()); - room.add(peerId); - - // tell everyone about new room state - room.forEach(otherPeerId => { - var otherPeer = peerById.get(otherPeerId); - if (otherPeer) { - sendMessage(otherPeer.socket, { - type: 'joined', - otherPeerIds: Array.from(room) - }); - } - }); - break; - case 'signal': - if (message.senderPeerId !== peerId) { - disconnectSocket(peerId, 'spoofed sender'); - return; - } - var receiver = peerById.get(message.receiverPeerId); - if (receiver) { - sendMessage(receiver.socket, message); - } - break; - case 'ping': - break; - default: - disconnectSocket(peerId, 'unknown message type ' + type); - } - }); - }); - return { - port: serverOptions.port, - server: wss, - localUrl: 'ws://localhost:' + serverOptions.port - }; -} -function sendMessage(ws, message) { - var msgString = JSON.stringify(message); - ws.send(msgString); -} -function validateIdString(roomId) { - if (typeof roomId === 'string' && roomId.length > 5 && roomId.length < 100) { - return true; - } else { - return false; - } -} -//# sourceMappingURL=signaling-server.js.map \ No newline at end of file diff --git a/dist/esm/plugins/replication-webrtc/signaling-server.js.map b/dist/esm/plugins/replication-webrtc/signaling-server.js.map deleted file mode 100644 index 9b60b84f336..00000000000 --- a/dist/esm/plugins/replication-webrtc/signaling-server.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"signaling-server.js","names":["getFromMapOrCreate","promiseWait","randomCouchString","SIMPLE_PEER_PING_INTERVAL","PEER_ID_LENGTH","startSignalingServerSimplePeer","serverOptions","WebSocketServer","wss","peerById","Map","peersByRoom","serverClosed","on","clear","_loop","minTime","Date","now","Array","from","values","forEach","peer","lastPing","disconnectSocket","id","peerId","reason","console","log","get","socket","close","undefined","rooms","roomId","room","delete","size","ws","Set","set","sendMessage","type","yourPeerId","err","error","dir","msgEvent","message","JSON","parse","toString","validateIdString","has","add","otherPeerId","otherPeer","otherPeerIds","senderPeerId","receiver","receiverPeerId","port","server","localUrl","msgString","stringify","send","length"],"sources":["../../../../src/plugins/replication-webrtc/signaling-server.ts"],"sourcesContent":["import {\n getFromMapOrCreate,\n promiseWait,\n randomCouchString\n} from '../utils/index.ts';\nimport {\n SIMPLE_PEER_PING_INTERVAL,\n type PeerMessage\n} from './connection-handler-simple-peer.ts';\nimport type {\n WebSocket,\n ServerOptions\n} from 'ws';\n\nexport const PEER_ID_LENGTH = 12;\nexport type ServerPeer = {\n id: string;\n socket: WebSocket;\n rooms: Set;\n lastPing: number;\n};\n\n\n/**\n * Starts a WebRTC signaling server\n * that can be used in tests.\n*/\nexport async function startSignalingServerSimplePeer(\n serverOptions: ServerOptions\n) {\n const { WebSocketServer } = await import('ws');\n const wss = new WebSocketServer(serverOptions);\n\n const peerById = new Map();\n const peersByRoom = new Map>();\n\n let serverClosed = false;\n wss.on('close', () => {\n serverClosed = true\n peerById.clear();\n peersByRoom.clear();\n });\n\n /**\n * Clients can disconnect without telling that to the\n * server. Therefore we have to automatically disconnect clients that\n * have not send a ping message in the last 2 minutes.\n */\n (async () => {\n while (!serverClosed) {\n await promiseWait(1000 * 5);\n const minTime = Date.now() - SIMPLE_PEER_PING_INTERVAL;\n Array.from(peerById.values()).forEach(peer => {\n if (peer.lastPing < minTime) {\n disconnectSocket(peer.id, 'no ping for 2 minutes');\n }\n });\n }\n })();\n\n function disconnectSocket(peerId: string, reason: string) {\n console.log('# disconnect peer ' + peerId + ' reason: ' + reason);\n const peer = peerById.get(peerId);\n if (peer) {\n peer.socket.close && peer.socket.close(undefined, reason);\n peer.rooms.forEach(roomId => {\n const room = peersByRoom.get(roomId);\n room?.delete(peerId);\n if (room && room.size === 0) {\n peersByRoom.delete(roomId);\n }\n });\n }\n peerById.delete(peerId);\n }\n\n wss.on('connection', function (ws) {\n /**\n * PeerID is created by the server to prevent malicious\n * actors from falsy claiming other peoples ids.\n */\n const peerId = randomCouchString(PEER_ID_LENGTH);\n const peer: ServerPeer = {\n id: peerId,\n socket: ws,\n rooms: new Set(),\n lastPing: Date.now()\n };\n peerById.set(peerId, peer);\n\n sendMessage(ws, { type: 'init', yourPeerId: peerId });\n\n\n ws.on('error', err => {\n console.error('SERVER ERROR:');\n console.dir(err);\n disconnectSocket(peerId, 'socket errored');\n });\n ws.on('close', () => {\n disconnectSocket(peerId, 'socket disconnected');\n });\n\n ws.on('message', msgEvent => {\n peer.lastPing = Date.now();\n const message = JSON.parse(msgEvent.toString());\n const type = message.type;\n switch (type) {\n case 'join':\n const roomId = message.room;\n if (\n !validateIdString(roomId) ||\n !validateIdString(peerId)\n ) {\n disconnectSocket(peerId, 'invalid ids');\n return;\n }\n\n if (peer.rooms.has(peerId)) {\n return;\n }\n peer.rooms.add(roomId);\n\n\n const room = getFromMapOrCreate(\n peersByRoom,\n message.room,\n () => new Set()\n );\n\n room.add(peerId);\n\n // tell everyone about new room state\n room.forEach(otherPeerId => {\n const otherPeer = peerById.get(otherPeerId);\n if (otherPeer) {\n sendMessage(\n otherPeer.socket,\n {\n type: 'joined',\n otherPeerIds: Array.from(room)\n }\n );\n }\n });\n break;\n case 'signal':\n if (\n message.senderPeerId !== peerId\n ) {\n disconnectSocket(peerId, 'spoofed sender');\n return;\n }\n const receiver = peerById.get(message.receiverPeerId);\n if (receiver) {\n sendMessage(\n receiver.socket,\n message\n );\n }\n break;\n case 'ping':\n break;\n default:\n disconnectSocket(peerId, 'unknown message type ' + type);\n }\n\n });\n });\n\n return {\n port: serverOptions.port,\n server: wss,\n localUrl: 'ws://localhost:' + serverOptions.port\n };\n}\n\n\nfunction sendMessage(ws: WebSocket, message: PeerMessage) {\n const msgString = JSON.stringify(message);\n ws.send(msgString);\n}\n\nfunction validateIdString(roomId: string): boolean {\n if (\n typeof roomId === 'string' &&\n roomId.length > 5 &&\n roomId.length < 100\n ) {\n return true;\n } else {\n return false;\n }\n}\n"],"mappings":"AAAA,SACIA,kBAAkB,EAClBC,WAAW,EACXC,iBAAiB,QACd,mBAAmB;AAC1B,SACIC,yBAAyB,QAEtB,qCAAqC;AAM5C,OAAO,IAAMC,cAAc,GAAG,EAAE;AAShC;AACA;AACA;AACA;AACA,OAAO,eAAeC,8BAA8BA,CAChDC,aAA4B,EAC9B;EACE,IAAM;IAAEC;EAAgB,CAAC,GAAG,MAAM,MAAM,CAAC,IAAI,CAAC;EAC9C,IAAMC,GAAG,GAAG,IAAID,eAAe,CAACD,aAAa,CAAC;EAE9C,IAAMG,QAAQ,GAAG,IAAIC,GAAG,CAAqB,CAAC;EAC9C,IAAMC,WAAW,GAAG,IAAID,GAAG,CAAsB,CAAC;EAElD,IAAIE,YAAY,GAAG,KAAK;EACxBJ,GAAG,CAACK,EAAE,CAAC,OAAO,EAAE,MAAM;IAClBD,YAAY,GAAG,IAAI;IACnBH,QAAQ,CAACK,KAAK,CAAC,CAAC;IAChBH,WAAW,CAACG,KAAK,CAAC,CAAC;EACvB,CAAC,CAAC;;EAEF;AACJ;AACA;AACA;AACA;EACI,CAAC,YAAY;IAAA,IAAAC,KAAA,kBAAAA,CAAA,EACa;MAClB,MAAMd,WAAW,CAAC,IAAI,GAAG,CAAC,CAAC;MAC3B,IAAMe,OAAO,GAAGC,IAAI,CAACC,GAAG,CAAC,CAAC,GAAGf,yBAAyB;MACtDgB,KAAK,CAACC,IAAI,CAACX,QAAQ,CAACY,MAAM,CAAC,CAAC,CAAC,CAACC,OAAO,CAACC,IAAI,IAAI;QAC1C,IAAIA,IAAI,CAACC,QAAQ,GAAGR,OAAO,EAAE;UACzBS,gBAAgB,CAACF,IAAI,CAACG,EAAE,EAAE,uBAAuB,CAAC;QACtD;MACJ,CAAC,CAAC;IACN,CAAC;IARD,OAAO,CAACd,YAAY;MAAA,MAAAG,KAAA;IAAA;EASxB,CAAC,EAAE,CAAC;EAEJ,SAASU,gBAAgBA,CAACE,MAAc,EAAEC,MAAc,EAAE;IACtDC,OAAO,CAACC,GAAG,CAAC,oBAAoB,GAAGH,MAAM,GAAG,WAAW,GAAGC,MAAM,CAAC;IACjE,IAAML,IAAI,GAAGd,QAAQ,CAACsB,GAAG,CAACJ,MAAM,CAAC;IACjC,IAAIJ,IAAI,EAAE;MACNA,IAAI,CAACS,MAAM,CAACC,KAAK,IAAIV,IAAI,CAACS,MAAM,CAACC,KAAK,CAACC,SAAS,EAAEN,MAAM,CAAC;MACzDL,IAAI,CAACY,KAAK,CAACb,OAAO,CAACc,MAAM,IAAI;QACzB,IAAMC,IAAI,GAAG1B,WAAW,CAACoB,GAAG,CAACK,MAAM,CAAC;QACpCC,IAAI,EAAEC,MAAM,CAACX,MAAM,CAAC;QACpB,IAAIU,IAAI,IAAIA,IAAI,CAACE,IAAI,KAAK,CAAC,EAAE;UACzB5B,WAAW,CAAC2B,MAAM,CAACF,MAAM,CAAC;QAC9B;MACJ,CAAC,CAAC;IACN;IACA3B,QAAQ,CAAC6B,MAAM,CAACX,MAAM,CAAC;EAC3B;EAEAnB,GAAG,CAACK,EAAE,CAAC,YAAY,EAAE,UAAU2B,EAAE,EAAE;IAC/B;AACR;AACA;AACA;IACQ,IAAMb,MAAM,GAAGzB,iBAAiB,CAACE,cAAc,CAAC;IAChD,IAAMmB,IAAgB,GAAG;MACrBG,EAAE,EAAEC,MAAM;MACVK,MAAM,EAAEQ,EAAE;MACVL,KAAK,EAAE,IAAIM,GAAG,CAAC,CAAC;MAChBjB,QAAQ,EAAEP,IAAI,CAACC,GAAG,CAAC;IACvB,CAAC;IACDT,QAAQ,CAACiC,GAAG,CAACf,MAAM,EAAEJ,IAAI,CAAC;IAE1BoB,WAAW,CAACH,EAAE,EAAE;MAAEI,IAAI,EAAE,MAAM;MAAEC,UAAU,EAAElB;IAAO,CAAC,CAAC;IAGrDa,EAAE,CAAC3B,EAAE,CAAC,OAAO,EAAEiC,GAAG,IAAI;MAClBjB,OAAO,CAACkB,KAAK,CAAC,eAAe,CAAC;MAC9BlB,OAAO,CAACmB,GAAG,CAACF,GAAG,CAAC;MAChBrB,gBAAgB,CAACE,MAAM,EAAE,gBAAgB,CAAC;IAC9C,CAAC,CAAC;IACFa,EAAE,CAAC3B,EAAE,CAAC,OAAO,EAAE,MAAM;MACjBY,gBAAgB,CAACE,MAAM,EAAE,qBAAqB,CAAC;IACnD,CAAC,CAAC;IAEFa,EAAE,CAAC3B,EAAE,CAAC,SAAS,EAAEoC,QAAQ,IAAI;MACzB1B,IAAI,CAACC,QAAQ,GAAGP,IAAI,CAACC,GAAG,CAAC,CAAC;MAC1B,IAAMgC,OAAO,GAAGC,IAAI,CAACC,KAAK,CAACH,QAAQ,CAACI,QAAQ,CAAC,CAAC,CAAC;MAC/C,IAAMT,IAAI,GAAGM,OAAO,CAACN,IAAI;MACzB,QAAQA,IAAI;QACR,KAAK,MAAM;UACP,IAAMR,MAAM,GAAGc,OAAO,CAACb,IAAI;UAC3B,IACI,CAACiB,gBAAgB,CAAClB,MAAM,CAAC,IACzB,CAACkB,gBAAgB,CAAC3B,MAAM,CAAC,EAC3B;YACEF,gBAAgB,CAACE,MAAM,EAAE,aAAa,CAAC;YACvC;UACJ;UAEA,IAAIJ,IAAI,CAACY,KAAK,CAACoB,GAAG,CAAC5B,MAAM,CAAC,EAAE;YACxB;UACJ;UACAJ,IAAI,CAACY,KAAK,CAACqB,GAAG,CAACpB,MAAM,CAAC;UAGtB,IAAMC,IAAI,GAAGrC,kBAAkB,CAC3BW,WAAW,EACXuC,OAAO,CAACb,IAAI,EACZ,MAAM,IAAII,GAAG,CAAC,CAClB,CAAC;UAEDJ,IAAI,CAACmB,GAAG,CAAC7B,MAAM,CAAC;;UAEhB;UACAU,IAAI,CAACf,OAAO,CAACmC,WAAW,IAAI;YACxB,IAAMC,SAAS,GAAGjD,QAAQ,CAACsB,GAAG,CAAC0B,WAAW,CAAC;YAC3C,IAAIC,SAAS,EAAE;cACXf,WAAW,CACPe,SAAS,CAAC1B,MAAM,EAChB;gBACIY,IAAI,EAAE,QAAQ;gBACde,YAAY,EAAExC,KAAK,CAACC,IAAI,CAACiB,IAAI;cACjC,CACJ,CAAC;YACL;UACJ,CAAC,CAAC;UACF;QACJ,KAAK,QAAQ;UACT,IACIa,OAAO,CAACU,YAAY,KAAKjC,MAAM,EACjC;YACEF,gBAAgB,CAACE,MAAM,EAAE,gBAAgB,CAAC;YAC1C;UACJ;UACA,IAAMkC,QAAQ,GAAGpD,QAAQ,CAACsB,GAAG,CAACmB,OAAO,CAACY,cAAc,CAAC;UACrD,IAAID,QAAQ,EAAE;YACVlB,WAAW,CACPkB,QAAQ,CAAC7B,MAAM,EACfkB,OACJ,CAAC;UACL;UACA;QACJ,KAAK,MAAM;UACP;QACJ;UACIzB,gBAAgB,CAACE,MAAM,EAAE,uBAAuB,GAAGiB,IAAI,CAAC;MAChE;IAEJ,CAAC,CAAC;EACN,CAAC,CAAC;EAEF,OAAO;IACHmB,IAAI,EAAEzD,aAAa,CAACyD,IAAI;IACxBC,MAAM,EAAExD,GAAG;IACXyD,QAAQ,EAAE,iBAAiB,GAAG3D,aAAa,CAACyD;EAChD,CAAC;AACL;AAGA,SAASpB,WAAWA,CAACH,EAAa,EAAEU,OAAoB,EAAE;EACtD,IAAMgB,SAAS,GAAGf,IAAI,CAACgB,SAAS,CAACjB,OAAO,CAAC;EACzCV,EAAE,CAAC4B,IAAI,CAACF,SAAS,CAAC;AACtB;AAEA,SAASZ,gBAAgBA,CAAClB,MAAc,EAAW;EAC/C,IACI,OAAOA,MAAM,KAAK,QAAQ,IAC1BA,MAAM,CAACiC,MAAM,GAAG,CAAC,IACjBjC,MAAM,CAACiC,MAAM,GAAG,GAAG,EACrB;IACE,OAAO,IAAI;EACf,CAAC,MAAM;IACH,OAAO,KAAK;EAChB;AACJ","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/replication-webrtc/webrtc-helper.js b/dist/esm/plugins/replication-webrtc/webrtc-helper.js deleted file mode 100644 index 00b612796f6..00000000000 --- a/dist/esm/plugins/replication-webrtc/webrtc-helper.js +++ /dev/null @@ -1,26 +0,0 @@ -import { filter, firstValueFrom, map } from 'rxjs'; - -/** - * To deterministically define which peer is master and - * which peer is fork, we compare the storage tokens. - * But we have to hash them before, to ensure that - * a storageToken like 'aaaaaa' is not always the master - * for all peers. - */ -export async function isMasterInWebRTCReplication(hashFunction, ownStorageToken, otherStorageToken) { - var isMaster = (await hashFunction([ownStorageToken, otherStorageToken].join('|'))) > (await hashFunction([otherStorageToken, ownStorageToken].join('|'))); - return isMaster; -} - -/** - * Send a message to the peer and await the answer. - * @throws with an EmptyErrorImpl if the peer connection - * was closed before an answer was received. - */ -export function sendMessageAndAwaitAnswer(handler, peer, message) { - var requestId = message.id; - var answerPromise = firstValueFrom(handler.response$.pipe(filter(d => d.peer === peer), filter(d => d.response.id === requestId), map(d => d.response))); - handler.send(peer, message); - return answerPromise; -} -//# sourceMappingURL=webrtc-helper.js.map \ No newline at end of file diff --git a/dist/esm/plugins/replication-webrtc/webrtc-helper.js.map b/dist/esm/plugins/replication-webrtc/webrtc-helper.js.map deleted file mode 100644 index 1121f1f10b4..00000000000 --- a/dist/esm/plugins/replication-webrtc/webrtc-helper.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"webrtc-helper.js","names":["filter","firstValueFrom","map","isMasterInWebRTCReplication","hashFunction","ownStorageToken","otherStorageToken","isMaster","join","sendMessageAndAwaitAnswer","handler","peer","message","requestId","id","answerPromise","response$","pipe","d","response","send"],"sources":["../../../../src/plugins/replication-webrtc/webrtc-helper.ts"],"sourcesContent":["import type {\n HashFunction\n} from '../../types/index.d.ts';\nimport type {\n WebRTCConnectionHandler,\n WebRTCMessage,\n WebRTCResponse\n} from './webrtc-types.ts';\nimport { filter, firstValueFrom, map } from 'rxjs';\n\n\n\n/**\n * To deterministically define which peer is master and\n * which peer is fork, we compare the storage tokens.\n * But we have to hash them before, to ensure that\n * a storageToken like 'aaaaaa' is not always the master\n * for all peers.\n */\nexport async function isMasterInWebRTCReplication(\n hashFunction: HashFunction,\n ownStorageToken: string,\n otherStorageToken: string\n): Promise {\n const isMaster =\n await hashFunction([ownStorageToken, otherStorageToken].join('|'))\n >\n await hashFunction([otherStorageToken, ownStorageToken].join('|'));\n return isMaster;\n}\n\n/**\n * Send a message to the peer and await the answer.\n * @throws with an EmptyErrorImpl if the peer connection\n * was closed before an answer was received.\n */\nexport function sendMessageAndAwaitAnswer(\n handler: WebRTCConnectionHandler,\n peer: PeerType,\n message: WebRTCMessage\n): Promise {\n\n\n const requestId = message.id;\n const answerPromise = firstValueFrom(\n handler.response$.pipe(\n filter(d => d.peer === peer),\n filter(d => d.response.id === requestId),\n map(d => d.response)\n )\n );\n handler.send(peer, message);\n return answerPromise;\n}\n"],"mappings":"AAQA,SAASA,MAAM,EAAEC,cAAc,EAAEC,GAAG,QAAQ,MAAM;;AAIlD;AACA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,eAAeC,2BAA2BA,CAC7CC,YAA0B,EAC1BC,eAAuB,EACvBC,iBAAyB,EACT;EAChB,IAAMC,QAAQ,GACV,OAAMH,YAAY,CAAC,CAACC,eAAe,EAAEC,iBAAiB,CAAC,CAACE,IAAI,CAAC,GAAG,CAAC,CAAC,KAElE,MAAMJ,YAAY,CAAC,CAACE,iBAAiB,EAAED,eAAe,CAAC,CAACG,IAAI,CAAC,GAAG,CAAC,CAAC;EACtE,OAAOD,QAAQ;AACnB;;AAEA;AACA;AACA;AACA;AACA;AACA,OAAO,SAASE,yBAAyBA,CACrCC,OAA0C,EAC1CC,IAAc,EACdC,OAAsB,EACC;EAGvB,IAAMC,SAAS,GAAGD,OAAO,CAACE,EAAE;EAC5B,IAAMC,aAAa,GAAGd,cAAc,CAChCS,OAAO,CAACM,SAAS,CAACC,IAAI,CAClBjB,MAAM,CAACkB,CAAC,IAAIA,CAAC,CAACP,IAAI,KAAKA,IAAI,CAAC,EAC5BX,MAAM,CAACkB,CAAC,IAAIA,CAAC,CAACC,QAAQ,CAACL,EAAE,KAAKD,SAAS,CAAC,EACxCX,GAAG,CAACgB,CAAC,IAAIA,CAAC,CAACC,QAAQ,CACvB,CACJ,CAAC;EACDT,OAAO,CAACU,IAAI,CAACT,IAAI,EAAEC,OAAO,CAAC;EAC3B,OAAOG,aAAa;AACxB","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/replication-webrtc/webrtc-types.js b/dist/esm/plugins/replication-webrtc/webrtc-types.js deleted file mode 100644 index e9db5f9f8e1..00000000000 --- a/dist/esm/plugins/replication-webrtc/webrtc-types.js +++ /dev/null @@ -1,3 +0,0 @@ -export {}; -export {}; -//# sourceMappingURL=webrtc-types.js.map \ No newline at end of file diff --git a/dist/esm/plugins/replication-webrtc/webrtc-types.js.map b/dist/esm/plugins/replication-webrtc/webrtc-types.js.map deleted file mode 100644 index 5d19fb332c8..00000000000 --- a/dist/esm/plugins/replication-webrtc/webrtc-types.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"webrtc-types.js","names":[],"sources":["../../../../src/plugins/replication-webrtc/webrtc-types.ts"],"sourcesContent":["import { Observable, Subscription } from 'rxjs';\nimport type {\n MaybePromise,\n ReplicationOptions,\n ReplicationPullOptions,\n ReplicationPushOptions,\n RxError,\n RxReplicationHandler,\n RxStorageDefaultCheckpoint,\n RxTypeError,\n StringKeys\n} from '../../types/index.d.ts';\nimport { RxReplicationState } from '../replication/index.ts';\nimport { WebsocketMessageResponseType, WebsocketMessageType } from '../replication-websocket/index.ts';\n\nexport type WebRTCReplicationCheckpoint = RxStorageDefaultCheckpoint;\n\n\nexport type WebRTCMessage = Omit & {\n method: StringKeys> | 'token';\n};\nexport type WebRTCResponse = Omit;\nexport type PeerWithMessage = {\n peer: PeerType;\n message: WebRTCMessage;\n};\nexport type PeerWithResponse = {\n peer: PeerType;\n response: WebRTCResponse;\n};\n\nexport type WebRTCConnectionHandler = {\n connect$: Observable;\n disconnect$: Observable;\n message$: Observable>;\n response$: Observable>;\n error$: Observable;\n send(peer: PeerType, message: WebRTCMessage | WebRTCResponse): Promise;\n destroy(): Promise;\n};\n\nexport type WebRTCConnectionHandlerCreator = (\n opts: SyncOptionsWebRTC\n) => Promise>;\n\nexport type WebRTCSyncPushOptions = Omit<\n ReplicationPushOptions,\n 'handler'\n> & {};\n\nexport type WebRTCSyncPullOptions = Omit<\n ReplicationPullOptions,\n 'handler' | 'stream$'\n> & {};\n\nexport type SyncOptionsWebRTC = Omit<\n ReplicationOptions,\n 'pull' |\n 'push' |\n 'replicationIdentifier' |\n 'deletedField' |\n 'live' |\n 'autostart' |\n 'waitForLeadership'\n> & {\n /**\n * It will only replicate with other instances\n * that use the same topic.\n */\n topic: string;\n connectionHandlerCreator: WebRTCConnectionHandlerCreator;\n /**\n * Run on new peers so that bad peers can be blocked.\n * If returns true, the peer is valid and it will replicate.\n * If returns false, it will drop the peer.\n */\n isPeerValid?: (peer: PeerType) => MaybePromise;\n pull?: WebRTCSyncPullOptions;\n push?: WebRTCSyncPushOptions;\n};\n\nexport type RxWebRTCReplicationState = RxReplicationState;\n\n\nexport type WebRTCPeerState = {\n peer: PeerType;\n // only exists when the peer was picked as master and the own client was picked as fork.\n replicationState?: RxWebRTCReplicationState;\n // clean this up when removing the peer\n subs: Subscription[];\n};\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/replication-websocket/index.js b/dist/esm/plugins/replication-websocket/index.js deleted file mode 100644 index eaff6e147f3..00000000000 --- a/dist/esm/plugins/replication-websocket/index.js +++ /dev/null @@ -1,4 +0,0 @@ -export * from "./websocket-client.js"; -export * from "./websocket-server.js"; -export * from "./websocket-types.js"; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/esm/plugins/replication-websocket/index.js.map b/dist/esm/plugins/replication-websocket/index.js.map deleted file mode 100644 index dfb3f9bda36..00000000000 --- a/dist/esm/plugins/replication-websocket/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":[],"sources":["../../../../src/plugins/replication-websocket/index.ts"],"sourcesContent":["export * from './websocket-client.ts';\nexport * from './websocket-server.ts';\nexport * from './websocket-types.ts';\n"],"mappings":"AAAA,cAAc,uBAAuB;AACrC,cAAc,uBAAuB;AACrC,cAAc,sBAAsB","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/replication-websocket/websocket-client.js b/dist/esm/plugins/replication-websocket/websocket-client.js deleted file mode 100644 index abad1100a21..00000000000 --- a/dist/esm/plugins/replication-websocket/websocket-client.js +++ /dev/null @@ -1,139 +0,0 @@ -import { replicateRxCollection } from "../replication/index.js"; -import ReconnectingWebSocket from 'reconnecting-websocket'; -import IsomorphicWebSocket from 'isomorphic-ws'; -import { errorToPlainJson, randomCouchString, toArray } from "../../plugins/utils/index.js"; -import { filter, map, Subject, firstValueFrom, BehaviorSubject } from 'rxjs'; -import { newRxError } from "../../rx-error.js"; -/** - * Copied and adapted from the 'reconnecting-websocket' npm module. - * Some bundlers have problems with bundling the isomorphic-ws plugin - * so we directly check the correctness in RxDB to ensure that we can - * throw a helpful error. - */ -export function ensureIsWebsocket(w) { - var is = typeof w !== 'undefined' && !!w && w.CLOSING === 2; - if (!is) { - console.dir(w); - throw new Error('websocket not valid'); - } -} -export async function createWebSocketClient(options) { - ensureIsWebsocket(IsomorphicWebSocket); - var wsClient = new ReconnectingWebSocket(options.url, [], { - WebSocket: IsomorphicWebSocket - }); - var connected$ = new BehaviorSubject(false); - var message$ = new Subject(); - var error$ = new Subject(); - wsClient.onerror = err => { - console.log('--- WAS CLIENT GOT ERROR:'); - console.log(err.error.message); - var emitError = newRxError('RC_STREAM', { - errors: toArray(err).map(er => errorToPlainJson(er)), - direction: 'pull' - }); - error$.next(emitError); - }; - await new Promise(res => { - wsClient.onopen = () => { - if (options.headers) { - var authMessage = { - collection: options.collection.name, - id: randomCouchString(10), - params: [options.headers], - method: 'auth' - }; - wsClient.send(JSON.stringify(authMessage)); - } - connected$.next(true); - res(); - }; - }); - wsClient.onclose = () => { - connected$.next(false); - }; - wsClient.onmessage = messageObj => { - var message = JSON.parse(messageObj.data); - message$.next(message); - }; - return { - url: options.url, - socket: wsClient, - connected$, - message$, - error$ - }; -} -export async function replicateWithWebsocketServer(options) { - var websocketClient = await createWebSocketClient(options); - var wsClient = websocketClient.socket; - var messages$ = websocketClient.message$; - var requestCounter = 0; - var requestFlag = randomCouchString(10); - function getRequestId() { - var count = requestCounter++; - return options.collection.database.token + '|' + requestFlag + '|' + count; - } - var replicationState = replicateRxCollection({ - collection: options.collection, - replicationIdentifier: options.replicationIdentifier, - live: options.live, - pull: { - batchSize: options.batchSize, - stream$: messages$.pipe(filter(msg => msg.id === 'stream' && msg.collection === options.collection.name), map(msg => msg.result)), - async handler(lastPulledCheckpoint, batchSize) { - var requestId = getRequestId(); - var request = { - id: requestId, - collection: options.collection.name, - method: 'masterChangesSince', - params: [lastPulledCheckpoint, batchSize] - }; - wsClient.send(JSON.stringify(request)); - var result = await firstValueFrom(messages$.pipe(filter(msg => msg.id === requestId), map(msg => msg.result))); - return result; - } - }, - push: { - batchSize: options.batchSize, - handler(docs) { - var requestId = getRequestId(); - var request = { - id: requestId, - collection: options.collection.name, - method: 'masterWrite', - params: [docs] - }; - wsClient.send(JSON.stringify(request)); - return firstValueFrom(messages$.pipe(filter(msg => msg.id === requestId), map(msg => msg.result))); - } - } - }); - websocketClient.error$.subscribe(err => replicationState.subjects.error.next(err)); - websocketClient.connected$.subscribe(isConnected => { - if (isConnected) { - /** - * When the client goes offline and online again, - * we have to send a 'RESYNC' signal because the client - * might have missed out events while being offline. - */ - replicationState.reSync(); - - /** - * Because reconnecting creates a new websocket-instance, - * we have to start the changestream from the remote again - * each time. - */ - var streamRequest = { - id: 'stream', - collection: options.collection.name, - method: 'masterChangeStream$', - params: [] - }; - wsClient.send(JSON.stringify(streamRequest)); - } - }); - options.collection.onDestroy.push(() => websocketClient.socket.close()); - return replicationState; -} -//# sourceMappingURL=websocket-client.js.map \ No newline at end of file diff --git a/dist/esm/plugins/replication-websocket/websocket-client.js.map b/dist/esm/plugins/replication-websocket/websocket-client.js.map deleted file mode 100644 index 10b5698415e..00000000000 --- a/dist/esm/plugins/replication-websocket/websocket-client.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"websocket-client.js","names":["replicateRxCollection","ReconnectingWebSocket","IsomorphicWebSocket","errorToPlainJson","randomCouchString","toArray","filter","map","Subject","firstValueFrom","BehaviorSubject","newRxError","ensureIsWebsocket","w","is","CLOSING","console","dir","Error","createWebSocketClient","options","wsClient","url","WebSocket","connected$","message$","error$","onerror","err","log","error","message","emitError","errors","er","direction","next","Promise","res","onopen","headers","authMessage","collection","name","id","params","method","send","JSON","stringify","onclose","onmessage","messageObj","parse","data","socket","replicateWithWebsocketServer","websocketClient","messages$","requestCounter","requestFlag","getRequestId","count","database","token","replicationState","replicationIdentifier","live","pull","batchSize","stream$","pipe","msg","result","handler","lastPulledCheckpoint","requestId","request","push","docs","subscribe","subjects","isConnected","reSync","streamRequest","onDestroy","close"],"sources":["../../../../src/plugins/replication-websocket/websocket-client.ts"],"sourcesContent":["import {\n replicateRxCollection,\n RxReplicationState\n} from '../replication/index.ts';\nimport {\n WebsocketClientOptions,\n WebsocketMessageType\n} from './websocket-types.ts';\n\nimport ReconnectingWebSocket from 'reconnecting-websocket';\n\nimport IsomorphicWebSocket from 'isomorphic-ws';\nimport {\n errorToPlainJson,\n randomCouchString,\n toArray\n} from '../../plugins/utils/index.ts';\nimport {\n filter,\n map,\n Subject,\n firstValueFrom,\n BehaviorSubject\n} from 'rxjs';\nimport type {\n RxError,\n RxReplicationWriteToMasterRow\n} from '../../types/index.d.ts';\nimport { newRxError } from '../../rx-error.ts';\n\nexport type WebsocketClient = {\n url: string;\n socket: any;\n connected$: BehaviorSubject;\n message$: Subject;\n error$: Subject;\n};\n\n\n/**\n * Copied and adapted from the 'reconnecting-websocket' npm module.\n * Some bundlers have problems with bundling the isomorphic-ws plugin\n * so we directly check the correctness in RxDB to ensure that we can\n * throw a helpful error.\n */\nexport function ensureIsWebsocket(w: typeof IsomorphicWebSocket) {\n const is = typeof w !== 'undefined' && !!w && w.CLOSING === 2;\n if (!is) {\n console.dir(w);\n throw new Error('websocket not valid');\n }\n}\n\n\nexport async function createWebSocketClient(options: WebsocketClientOptions): Promise {\n ensureIsWebsocket(IsomorphicWebSocket);\n const wsClient = new ReconnectingWebSocket(\n options.url,\n [],\n {\n WebSocket: IsomorphicWebSocket\n }\n );\n const connected$ = new BehaviorSubject(false);\n const message$ = new Subject();\n const error$ = new Subject();\n wsClient.onerror = (err) => {\n\n console.log('--- WAS CLIENT GOT ERROR:');\n console.log(err.error.message);\n\n const emitError = newRxError('RC_STREAM', {\n errors: toArray(err).map((er: any) => errorToPlainJson(er)),\n direction: 'pull'\n });\n error$.next(emitError);\n };\n await new Promise(res => {\n wsClient.onopen = () => {\n\n if (options.headers) {\n const authMessage: WebsocketMessageType = {\n collection: options.collection.name,\n id: randomCouchString(10),\n params: [options.headers],\n method: 'auth'\n };\n wsClient.send(JSON.stringify(authMessage));\n }\n\n connected$.next(true);\n res();\n };\n });\n wsClient.onclose = () => {\n connected$.next(false);\n };\n\n wsClient.onmessage = (messageObj) => {\n const message = JSON.parse(messageObj.data);\n message$.next(message);\n };\n\n return {\n url: options.url,\n socket: wsClient,\n connected$,\n message$,\n error$\n };\n\n}\n\nexport async function replicateWithWebsocketServer(\n options: WebsocketClientOptions\n): Promise> {\n const websocketClient = await createWebSocketClient(options);\n const wsClient = websocketClient.socket;\n const messages$ = websocketClient.message$;\n\n let requestCounter = 0;\n const requestFlag = randomCouchString(10);\n function getRequestId() {\n const count = requestCounter++;\n return options.collection.database.token + '|' + requestFlag + '|' + count;\n }\n const replicationState = replicateRxCollection({\n collection: options.collection,\n replicationIdentifier: options.replicationIdentifier,\n live: options.live,\n pull: {\n batchSize: options.batchSize,\n stream$: messages$.pipe(\n filter(msg => msg.id === 'stream' && msg.collection === options.collection.name),\n map(msg => msg.result)\n ),\n async handler(lastPulledCheckpoint: CheckpointType | undefined, batchSize: number) {\n const requestId = getRequestId();\n const request: WebsocketMessageType = {\n id: requestId,\n collection: options.collection.name,\n method: 'masterChangesSince',\n params: [lastPulledCheckpoint, batchSize]\n };\n wsClient.send(JSON.stringify(request));\n const result = await firstValueFrom(\n messages$.pipe(\n filter(msg => msg.id === requestId),\n map(msg => msg.result)\n )\n );\n return result;\n }\n },\n push: {\n batchSize: options.batchSize,\n handler(docs: RxReplicationWriteToMasterRow[]) {\n const requestId = getRequestId();\n const request: WebsocketMessageType = {\n id: requestId,\n collection: options.collection.name,\n method: 'masterWrite',\n params: [docs]\n };\n wsClient.send(JSON.stringify(request));\n return firstValueFrom(\n messages$.pipe(\n filter(msg => msg.id === requestId),\n map(msg => msg.result)\n )\n );\n }\n }\n });\n\n websocketClient.error$.subscribe(err => replicationState.subjects.error.next(err));\n\n websocketClient.connected$.subscribe(isConnected => {\n if (isConnected) {\n /**\n * When the client goes offline and online again,\n * we have to send a 'RESYNC' signal because the client\n * might have missed out events while being offline.\n */\n replicationState.reSync();\n\n /**\n * Because reconnecting creates a new websocket-instance,\n * we have to start the changestream from the remote again\n * each time.\n */\n const streamRequest: WebsocketMessageType = {\n id: 'stream',\n collection: options.collection.name,\n method: 'masterChangeStream$',\n params: []\n };\n wsClient.send(JSON.stringify(streamRequest));\n }\n });\n\n options.collection.onDestroy.push(() => websocketClient.socket.close());\n return replicationState;\n}\n"],"mappings":"AAAA,SACIA,qBAAqB,QAElB,yBAAyB;AAMhC,OAAOC,qBAAqB,MAAM,wBAAwB;AAE1D,OAAOC,mBAAmB,MAAM,eAAe;AAC/C,SACIC,gBAAgB,EAChBC,iBAAiB,EACjBC,OAAO,QACJ,8BAA8B;AACrC,SACIC,MAAM,EACNC,GAAG,EACHC,OAAO,EACPC,cAAc,EACdC,eAAe,QACZ,MAAM;AAKb,SAASC,UAAU,QAAQ,mBAAmB;AAW9C;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,SAASC,iBAAiBA,CAACC,CAA6B,EAAE;EAC7D,IAAMC,EAAE,GAAG,OAAOD,CAAC,KAAK,WAAW,IAAI,CAAC,CAACA,CAAC,IAAIA,CAAC,CAACE,OAAO,KAAK,CAAC;EAC7D,IAAI,CAACD,EAAE,EAAE;IACLE,OAAO,CAACC,GAAG,CAACJ,CAAC,CAAC;IACd,MAAM,IAAIK,KAAK,CAAC,qBAAqB,CAAC;EAC1C;AACJ;AAGA,OAAO,eAAeC,qBAAqBA,CAAYC,OAA0C,EAA4B;EACzHR,iBAAiB,CAACV,mBAAmB,CAAC;EACtC,IAAMmB,QAAQ,GAAG,IAAIpB,qBAAqB,CACtCmB,OAAO,CAACE,GAAG,EACX,EAAE,EACF;IACIC,SAAS,EAAErB;EACf,CACJ,CAAC;EACD,IAAMsB,UAAU,GAAG,IAAId,eAAe,CAAU,KAAK,CAAC;EACtD,IAAMe,QAAQ,GAAG,IAAIjB,OAAO,CAAM,CAAC;EACnC,IAAMkB,MAAM,GAAG,IAAIlB,OAAO,CAAM,CAAC;EACjCa,QAAQ,CAACM,OAAO,GAAIC,GAAG,IAAK;IAExBZ,OAAO,CAACa,GAAG,CAAC,2BAA2B,CAAC;IACxCb,OAAO,CAACa,GAAG,CAACD,GAAG,CAACE,KAAK,CAACC,OAAO,CAAC;IAE9B,IAAMC,SAAS,GAAGrB,UAAU,CAAC,WAAW,EAAE;MACtCsB,MAAM,EAAE5B,OAAO,CAACuB,GAAG,CAAC,CAACrB,GAAG,CAAE2B,EAAO,IAAK/B,gBAAgB,CAAC+B,EAAE,CAAC,CAAC;MAC3DC,SAAS,EAAE;IACf,CAAC,CAAC;IACFT,MAAM,CAACU,IAAI,CAACJ,SAAS,CAAC;EAC1B,CAAC;EACD,MAAM,IAAIK,OAAO,CAAOC,GAAG,IAAI;IAC3BjB,QAAQ,CAACkB,MAAM,GAAG,MAAM;MAEpB,IAAInB,OAAO,CAACoB,OAAO,EAAE;QACjB,IAAMC,WAAiC,GAAG;UACtCC,UAAU,EAAEtB,OAAO,CAACsB,UAAU,CAACC,IAAI;UACnCC,EAAE,EAAExC,iBAAiB,CAAC,EAAE,CAAC;UACzByC,MAAM,EAAE,CAACzB,OAAO,CAACoB,OAAO,CAAC;UACzBM,MAAM,EAAE;QACZ,CAAC;QACDzB,QAAQ,CAAC0B,IAAI,CAACC,IAAI,CAACC,SAAS,CAACR,WAAW,CAAC,CAAC;MAC9C;MAEAjB,UAAU,CAACY,IAAI,CAAC,IAAI,CAAC;MACrBE,GAAG,CAAC,CAAC;IACT,CAAC;EACL,CAAC,CAAC;EACFjB,QAAQ,CAAC6B,OAAO,GAAG,MAAM;IACrB1B,UAAU,CAACY,IAAI,CAAC,KAAK,CAAC;EAC1B,CAAC;EAEDf,QAAQ,CAAC8B,SAAS,GAAIC,UAAU,IAAK;IACjC,IAAMrB,OAAO,GAAGiB,IAAI,CAACK,KAAK,CAACD,UAAU,CAACE,IAAI,CAAC;IAC3C7B,QAAQ,CAACW,IAAI,CAACL,OAAO,CAAC;EAC1B,CAAC;EAED,OAAO;IACHT,GAAG,EAAEF,OAAO,CAACE,GAAG;IAChBiC,MAAM,EAAElC,QAAQ;IAChBG,UAAU;IACVC,QAAQ;IACRC;EACJ,CAAC;AAEL;AAEA,OAAO,eAAe8B,4BAA4BA,CAC9CpC,OAA0C,EACY;EACtD,IAAMqC,eAAe,GAAG,MAAMtC,qBAAqB,CAACC,OAAO,CAAC;EAC5D,IAAMC,QAAQ,GAAGoC,eAAe,CAACF,MAAM;EACvC,IAAMG,SAAS,GAAGD,eAAe,CAAChC,QAAQ;EAE1C,IAAIkC,cAAc,GAAG,CAAC;EACtB,IAAMC,WAAW,GAAGxD,iBAAiB,CAAC,EAAE,CAAC;EACzC,SAASyD,YAAYA,CAAA,EAAG;IACpB,IAAMC,KAAK,GAAGH,cAAc,EAAE;IAC9B,OAAOvC,OAAO,CAACsB,UAAU,CAACqB,QAAQ,CAACC,KAAK,GAAG,GAAG,GAAGJ,WAAW,GAAG,GAAG,GAAGE,KAAK;EAC9E;EACA,IAAMG,gBAAgB,GAAGjE,qBAAqB,CAA4B;IACtE0C,UAAU,EAAEtB,OAAO,CAACsB,UAAU;IAC9BwB,qBAAqB,EAAE9C,OAAO,CAAC8C,qBAAqB;IACpDC,IAAI,EAAE/C,OAAO,CAAC+C,IAAI;IAClBC,IAAI,EAAE;MACFC,SAAS,EAAEjD,OAAO,CAACiD,SAAS;MAC5BC,OAAO,EAAEZ,SAAS,CAACa,IAAI,CACnBjE,MAAM,CAACkE,GAAG,IAAIA,GAAG,CAAC5B,EAAE,KAAK,QAAQ,IAAI4B,GAAG,CAAC9B,UAAU,KAAKtB,OAAO,CAACsB,UAAU,CAACC,IAAI,CAAC,EAChFpC,GAAG,CAACiE,GAAG,IAAIA,GAAG,CAACC,MAAM,CACzB,CAAC;MACD,MAAMC,OAAOA,CAACC,oBAAgD,EAAEN,SAAiB,EAAE;QAC/E,IAAMO,SAAS,GAAGf,YAAY,CAAC,CAAC;QAChC,IAAMgB,OAA6B,GAAG;UAClCjC,EAAE,EAAEgC,SAAS;UACblC,UAAU,EAAEtB,OAAO,CAACsB,UAAU,CAACC,IAAI;UACnCG,MAAM,EAAE,oBAAoB;UAC5BD,MAAM,EAAE,CAAC8B,oBAAoB,EAAEN,SAAS;QAC5C,CAAC;QACDhD,QAAQ,CAAC0B,IAAI,CAACC,IAAI,CAACC,SAAS,CAAC4B,OAAO,CAAC,CAAC;QACtC,IAAMJ,MAAM,GAAG,MAAMhE,cAAc,CAC/BiD,SAAS,CAACa,IAAI,CACVjE,MAAM,CAACkE,GAAG,IAAIA,GAAG,CAAC5B,EAAE,KAAKgC,SAAS,CAAC,EACnCrE,GAAG,CAACiE,GAAG,IAAIA,GAAG,CAACC,MAAM,CACzB,CACJ,CAAC;QACD,OAAOA,MAAM;MACjB;IACJ,CAAC;IACDK,IAAI,EAAE;MACFT,SAAS,EAAEjD,OAAO,CAACiD,SAAS;MAC5BK,OAAOA,CAACK,IAAgD,EAAE;QACtD,IAAMH,SAAS,GAAGf,YAAY,CAAC,CAAC;QAChC,IAAMgB,OAA6B,GAAG;UAClCjC,EAAE,EAAEgC,SAAS;UACblC,UAAU,EAAEtB,OAAO,CAACsB,UAAU,CAACC,IAAI;UACnCG,MAAM,EAAE,aAAa;UACrBD,MAAM,EAAE,CAACkC,IAAI;QACjB,CAAC;QACD1D,QAAQ,CAAC0B,IAAI,CAACC,IAAI,CAACC,SAAS,CAAC4B,OAAO,CAAC,CAAC;QACtC,OAAOpE,cAAc,CACjBiD,SAAS,CAACa,IAAI,CACVjE,MAAM,CAACkE,GAAG,IAAIA,GAAG,CAAC5B,EAAE,KAAKgC,SAAS,CAAC,EACnCrE,GAAG,CAACiE,GAAG,IAAIA,GAAG,CAACC,MAAM,CACzB,CACJ,CAAC;MACL;IACJ;EACJ,CAAC,CAAC;EAEFhB,eAAe,CAAC/B,MAAM,CAACsD,SAAS,CAACpD,GAAG,IAAIqC,gBAAgB,CAACgB,QAAQ,CAACnD,KAAK,CAACM,IAAI,CAACR,GAAG,CAAC,CAAC;EAElF6B,eAAe,CAACjC,UAAU,CAACwD,SAAS,CAACE,WAAW,IAAI;IAChD,IAAIA,WAAW,EAAE;MACb;AACZ;AACA;AACA;AACA;MACYjB,gBAAgB,CAACkB,MAAM,CAAC,CAAC;;MAEzB;AACZ;AACA;AACA;AACA;MACY,IAAMC,aAAmC,GAAG;QACxCxC,EAAE,EAAE,QAAQ;QACZF,UAAU,EAAEtB,OAAO,CAACsB,UAAU,CAACC,IAAI;QACnCG,MAAM,EAAE,qBAAqB;QAC7BD,MAAM,EAAE;MACZ,CAAC;MACDxB,QAAQ,CAAC0B,IAAI,CAACC,IAAI,CAACC,SAAS,CAACmC,aAAa,CAAC,CAAC;IAChD;EACJ,CAAC,CAAC;EAEFhE,OAAO,CAACsB,UAAU,CAAC2C,SAAS,CAACP,IAAI,CAAC,MAAMrB,eAAe,CAACF,MAAM,CAAC+B,KAAK,CAAC,CAAC,CAAC;EACvE,OAAOrB,gBAAgB;AAC3B","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/replication-websocket/websocket-server.js b/dist/esm/plugins/replication-websocket/websocket-server.js deleted file mode 100644 index ecd81df167b..00000000000 --- a/dist/esm/plugins/replication-websocket/websocket-server.js +++ /dev/null @@ -1,103 +0,0 @@ -import pkg from 'isomorphic-ws'; -var { - WebSocketServer -} = pkg; -import { rxStorageInstanceToReplicationHandler } from "../../replication-protocol/index.js"; -import { PROMISE_RESOLVE_VOID, getFromMapOrCreate } from "../../plugins/utils/index.js"; -import { Subject } from 'rxjs'; -export function startSocketServer(options) { - var wss = new WebSocketServer(options); - var closed = false; - function closeServer() { - if (closed) { - return PROMISE_RESOLVE_VOID; - } - closed = true; - onConnection$.complete(); - return new Promise((res, rej) => { - /** - * We have to close all client connections, - * otherwise wss.close() will never call the callback. - * @link https://github.com/websockets/ws/issues/1288#issuecomment-360594458 - */ - for (var ws of wss.clients) { - ws.close(); - } - wss.close(err => { - if (err) { - rej(err); - } else { - res(); - } - }); - }); - } - var onConnection$ = new Subject(); - wss.on('connection', ws => onConnection$.next(ws)); - return { - server: wss, - close: closeServer, - onConnection$: onConnection$.asObservable() - }; -} -var REPLICATION_HANDLER_BY_COLLECTION = new Map(); -export function getReplicationHandlerByCollection(database, collectionName) { - if (!database.collections[collectionName]) { - throw new Error('collection ' + collectionName + ' does not exist'); - } - var collection = database.collections[collectionName]; - var handler = getFromMapOrCreate(REPLICATION_HANDLER_BY_COLLECTION, collection, () => { - return rxStorageInstanceToReplicationHandler(collection.storageInstance, collection.conflictHandler, database.token); - }); - return handler; -} -export function startWebsocketServer(options) { - var { - database, - ...wsOptions - } = options; - var serverState = startSocketServer(wsOptions); - - // auto close when the database gets destroyed - database.onDestroy.push(() => serverState.close()); - serverState.onConnection$.subscribe(ws => { - var onCloseHandlers = []; - ws.onclose = () => { - onCloseHandlers.map(fn => fn()); - }; - ws.on('message', async messageString => { - var message = JSON.parse(messageString); - var handler = getReplicationHandlerByCollection(database, message.collection); - if (message.method === 'auth') { - return; - } - var method = handler[message.method]; - - /** - * If it is not a function, - * it means that the client requested the masterChangeStream$ - */ - if (typeof method !== 'function') { - var changeStreamSub = handler.masterChangeStream$.subscribe(ev => { - var streamResponse = { - id: 'stream', - collection: message.collection, - result: ev - }; - ws.send(JSON.stringify(streamResponse)); - }); - onCloseHandlers.push(() => changeStreamSub.unsubscribe()); - return; - } - var result = await method(...message.params); - var response = { - id: message.id, - collection: message.collection, - result - }; - ws.send(JSON.stringify(response)); - }); - }); - return serverState; -} -//# sourceMappingURL=websocket-server.js.map \ No newline at end of file diff --git a/dist/esm/plugins/replication-websocket/websocket-server.js.map b/dist/esm/plugins/replication-websocket/websocket-server.js.map deleted file mode 100644 index 0dc5049cfa2..00000000000 --- a/dist/esm/plugins/replication-websocket/websocket-server.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"websocket-server.js","names":["pkg","WebSocketServer","rxStorageInstanceToReplicationHandler","PROMISE_RESOLVE_VOID","getFromMapOrCreate","Subject","startSocketServer","options","wss","closed","closeServer","onConnection$","complete","Promise","res","rej","ws","clients","close","err","on","next","server","asObservable","REPLICATION_HANDLER_BY_COLLECTION","Map","getReplicationHandlerByCollection","database","collectionName","collections","Error","collection","handler","storageInstance","conflictHandler","token","startWebsocketServer","wsOptions","serverState","onDestroy","push","subscribe","onCloseHandlers","onclose","map","fn","messageString","message","JSON","parse","method","changeStreamSub","masterChangeStream$","ev","streamResponse","id","result","send","stringify","unsubscribe","params","response"],"sources":["../../../../src/plugins/replication-websocket/websocket-server.ts"],"sourcesContent":["import type {\n RxCollection,\n RxDatabase,\n RxReplicationHandler\n} from '../../types/index.d.ts';\n\nimport type {\n WebSocket,\n ServerOptions\n} from 'isomorphic-ws';\nimport pkg from 'isomorphic-ws';\nconst { WebSocketServer } = pkg;\n\nimport type {\n WebsocketMessageResponseType,\n WebsocketMessageType,\n WebsocketServerOptions,\n WebsocketServerState\n} from './websocket-types.ts';\nimport { rxStorageInstanceToReplicationHandler } from '../../replication-protocol/index.ts';\nimport {\n PROMISE_RESOLVE_VOID, getFromMapOrCreate\n} from '../../plugins/utils/index.ts';\nimport { Subject } from 'rxjs';\n\nexport function startSocketServer(options: ServerOptions): WebsocketServerState {\n const wss = new WebSocketServer(options);\n let closed = false;\n function closeServer() {\n if (closed) {\n return PROMISE_RESOLVE_VOID;\n }\n closed = true;\n onConnection$.complete();\n return new Promise((res, rej) => {\n /**\n * We have to close all client connections,\n * otherwise wss.close() will never call the callback.\n * @link https://github.com/websockets/ws/issues/1288#issuecomment-360594458\n */\n for (const ws of wss.clients) {\n ws.close();\n }\n wss.close((err: any) => {\n if (err) {\n rej(err);\n } else {\n res();\n }\n });\n });\n }\n\n const onConnection$ = new Subject();\n wss.on('connection', (ws: any) => onConnection$.next(ws));\n\n return {\n server: wss,\n close: closeServer,\n onConnection$: onConnection$.asObservable()\n };\n}\n\nconst REPLICATION_HANDLER_BY_COLLECTION: WeakMap> = new Map();\nexport function getReplicationHandlerByCollection(\n database: RxDatabase,\n collectionName: string\n): RxReplicationHandler {\n if (!database.collections[collectionName]) {\n throw new Error('collection ' + collectionName + ' does not exist');\n }\n\n const collection = database.collections[collectionName];\n const handler = getFromMapOrCreate>(\n REPLICATION_HANDLER_BY_COLLECTION,\n collection,\n () => {\n return rxStorageInstanceToReplicationHandler(\n collection.storageInstance,\n collection.conflictHandler,\n database.token\n );\n }\n );\n return handler;\n}\n\nexport function startWebsocketServer(options: WebsocketServerOptions): WebsocketServerState {\n const { database, ...wsOptions } = options;\n const serverState = startSocketServer(wsOptions);\n\n // auto close when the database gets destroyed\n database.onDestroy.push(() => serverState.close());\n\n serverState.onConnection$.subscribe(ws => {\n const onCloseHandlers: Function[] = [];\n ws.onclose = () => {\n onCloseHandlers.map(fn => fn());\n };\n ws.on('message', async (messageString: string) => {\n const message: WebsocketMessageType = JSON.parse(messageString);\n const handler = getReplicationHandlerByCollection(database, message.collection);\n if (message.method === 'auth') {\n return;\n }\n const method = handler[message.method];\n\n /**\n * If it is not a function,\n * it means that the client requested the masterChangeStream$\n */\n if (typeof method !== 'function') {\n const changeStreamSub = handler.masterChangeStream$.subscribe(ev => {\n const streamResponse: WebsocketMessageResponseType = {\n id: 'stream',\n collection: message.collection,\n result: ev\n };\n ws.send(JSON.stringify(streamResponse));\n });\n onCloseHandlers.push(() => changeStreamSub.unsubscribe());\n return;\n }\n const result = await (method as any)(...message.params);\n const response: WebsocketMessageResponseType = {\n id: message.id,\n collection: message.collection,\n result\n };\n ws.send(JSON.stringify(response));\n });\n });\n\n\n return serverState;\n}\n"],"mappings":"AAUA,OAAOA,GAAG,MAAM,eAAe;AAC/B,IAAM;EAAEC;AAAgB,CAAC,GAAGD,GAAG;AAQ/B,SAASE,qCAAqC,QAAQ,qCAAqC;AAC3F,SACIC,oBAAoB,EAAEC,kBAAkB,QACrC,8BAA8B;AACrC,SAASC,OAAO,QAAQ,MAAM;AAE9B,OAAO,SAASC,iBAAiBA,CAACC,OAAsB,EAAwB;EAC5E,IAAMC,GAAG,GAAG,IAAIP,eAAe,CAACM,OAAO,CAAC;EACxC,IAAIE,MAAM,GAAG,KAAK;EAClB,SAASC,WAAWA,CAAA,EAAG;IACnB,IAAID,MAAM,EAAE;MACR,OAAON,oBAAoB;IAC/B;IACAM,MAAM,GAAG,IAAI;IACbE,aAAa,CAACC,QAAQ,CAAC,CAAC;IACxB,OAAO,IAAIC,OAAO,CAAO,CAACC,GAAG,EAAEC,GAAG,KAAK;MACnC;AACZ;AACA;AACA;AACA;MACY,KAAK,IAAMC,EAAE,IAAIR,GAAG,CAACS,OAAO,EAAE;QAC1BD,EAAE,CAACE,KAAK,CAAC,CAAC;MACd;MACAV,GAAG,CAACU,KAAK,CAAEC,GAAQ,IAAK;QACpB,IAAIA,GAAG,EAAE;UACLJ,GAAG,CAACI,GAAG,CAAC;QACZ,CAAC,MAAM;UACHL,GAAG,CAAC,CAAC;QACT;MACJ,CAAC,CAAC;IACN,CAAC,CAAC;EACN;EAEA,IAAMH,aAAa,GAAG,IAAIN,OAAO,CAAY,CAAC;EAC9CG,GAAG,CAACY,EAAE,CAAC,YAAY,EAAGJ,EAAO,IAAKL,aAAa,CAACU,IAAI,CAACL,EAAE,CAAC,CAAC;EAEzD,OAAO;IACHM,MAAM,EAAEd,GAAG;IACXU,KAAK,EAAER,WAAW;IAClBC,aAAa,EAAEA,aAAa,CAACY,YAAY,CAAC;EAC9C,CAAC;AACL;AAEA,IAAMC,iCAAwF,GAAG,IAAIC,GAAG,CAAC,CAAC;AAC1G,OAAO,SAASC,iCAAiCA,CAC7CC,QAAyB,EACzBC,cAAsB,EACc;EACpC,IAAI,CAACD,QAAQ,CAACE,WAAW,CAACD,cAAc,CAAC,EAAE;IACvC,MAAM,IAAIE,KAAK,CAAC,aAAa,GAAGF,cAAc,GAAG,iBAAiB,CAAC;EACvE;EAEA,IAAMG,UAAU,GAAGJ,QAAQ,CAACE,WAAW,CAACD,cAAc,CAAC;EACvD,IAAMI,OAAO,GAAG5B,kBAAkB,CAC9BoB,iCAAiC,EACjCO,UAAU,EACV,MAAM;IACF,OAAO7B,qCAAqC,CACxC6B,UAAU,CAACE,eAAe,EAC1BF,UAAU,CAACG,eAAe,EAC1BP,QAAQ,CAACQ,KACb,CAAC;EACL,CACJ,CAAC;EACD,OAAOH,OAAO;AAClB;AAEA,OAAO,SAASI,oBAAoBA,CAAC7B,OAA+B,EAAwB;EACxF,IAAM;IAAEoB,QAAQ;IAAE,GAAGU;EAAU,CAAC,GAAG9B,OAAO;EAC1C,IAAM+B,WAAW,GAAGhC,iBAAiB,CAAC+B,SAAS,CAAC;;EAEhD;EACAV,QAAQ,CAACY,SAAS,CAACC,IAAI,CAAC,MAAMF,WAAW,CAACpB,KAAK,CAAC,CAAC,CAAC;EAElDoB,WAAW,CAAC3B,aAAa,CAAC8B,SAAS,CAACzB,EAAE,IAAI;IACtC,IAAM0B,eAA2B,GAAG,EAAE;IACtC1B,EAAE,CAAC2B,OAAO,GAAG,MAAM;MACfD,eAAe,CAACE,GAAG,CAACC,EAAE,IAAIA,EAAE,CAAC,CAAC,CAAC;IACnC,CAAC;IACD7B,EAAE,CAACI,EAAE,CAAC,SAAS,EAAE,MAAO0B,aAAqB,IAAK;MAC9C,IAAMC,OAA6B,GAAGC,IAAI,CAACC,KAAK,CAACH,aAAa,CAAC;MAC/D,IAAMd,OAAO,GAAGN,iCAAiC,CAACC,QAAQ,EAAEoB,OAAO,CAAChB,UAAU,CAAC;MAC/E,IAAIgB,OAAO,CAACG,MAAM,KAAK,MAAM,EAAE;QAC3B;MACJ;MACA,IAAMA,MAAM,GAAGlB,OAAO,CAACe,OAAO,CAACG,MAAM,CAAC;;MAEtC;AACZ;AACA;AACA;MACY,IAAI,OAAOA,MAAM,KAAK,UAAU,EAAE;QAC9B,IAAMC,eAAe,GAAGnB,OAAO,CAACoB,mBAAmB,CAACX,SAAS,CAACY,EAAE,IAAI;UAChE,IAAMC,cAA4C,GAAG;YACjDC,EAAE,EAAE,QAAQ;YACZxB,UAAU,EAAEgB,OAAO,CAAChB,UAAU;YAC9ByB,MAAM,EAAEH;UACZ,CAAC;UACDrC,EAAE,CAACyC,IAAI,CAACT,IAAI,CAACU,SAAS,CAACJ,cAAc,CAAC,CAAC;QAC3C,CAAC,CAAC;QACFZ,eAAe,CAACF,IAAI,CAAC,MAAMW,eAAe,CAACQ,WAAW,CAAC,CAAC,CAAC;QACzD;MACJ;MACA,IAAMH,MAAM,GAAG,MAAON,MAAM,CAAS,GAAGH,OAAO,CAACa,MAAM,CAAC;MACvD,IAAMC,QAAsC,GAAG;QAC3CN,EAAE,EAAER,OAAO,CAACQ,EAAE;QACdxB,UAAU,EAAEgB,OAAO,CAAChB,UAAU;QAC9ByB;MACJ,CAAC;MACDxC,EAAE,CAACyC,IAAI,CAACT,IAAI,CAACU,SAAS,CAACG,QAAQ,CAAC,CAAC;IACrC,CAAC,CAAC;EACN,CAAC,CAAC;EAGF,OAAOvB,WAAW;AACtB","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/replication-websocket/websocket-types.js b/dist/esm/plugins/replication-websocket/websocket-types.js deleted file mode 100644 index 32586dd58b6..00000000000 --- a/dist/esm/plugins/replication-websocket/websocket-types.js +++ /dev/null @@ -1,3 +0,0 @@ -export {}; -export {}; -//# sourceMappingURL=websocket-types.js.map \ No newline at end of file diff --git a/dist/esm/plugins/replication-websocket/websocket-types.js.map b/dist/esm/plugins/replication-websocket/websocket-types.js.map deleted file mode 100644 index f15c7a897a0..00000000000 --- a/dist/esm/plugins/replication-websocket/websocket-types.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"websocket-types.js","names":[],"sources":["../../../../src/plugins/replication-websocket/websocket-types.ts"],"sourcesContent":["import type {\n Observable,\n} from 'rxjs';\nimport type {\n ServerOptions,\n ClientOptions,\n WebSocketServer,\n WebSocket\n} from 'ws';\nimport type {\n RxCollection,\n RxDatabase,\n RxReplicationHandler,\n StringKeys\n} from '../../types/index.d.ts';\n\nexport type WebsocketServerOptions = {\n database: RxDatabase;\n} & ServerOptions;\n\nexport type WebsocketServerState = {\n server: WebSocketServer;\n close: () => Promise;\n onConnection$: Observable;\n};\n\nexport type WebsocketClientOptions = {\n replicationIdentifier: string;\n collection: RxCollection;\n url: string;\n batchSize?: number;\n live?: boolean;\n headers?: { [k: string]: string; };\n} & ClientOptions;\n\nexport type WebsocketMessageType = {\n id: string;\n collection: string;\n method: StringKeys> | 'auth';\n params: any[];\n};\n\nexport type WebsocketMessageResponseType = {\n id: string;\n collection: string;\n result: any;\n};\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/replication/index.js b/dist/esm/plugins/replication/index.js deleted file mode 100644 index 592a619f56a..00000000000 --- a/dist/esm/plugins/replication/index.js +++ /dev/null @@ -1,399 +0,0 @@ -/** - * This plugin contains the primitives to create - * a RxDB client-server replication. - * It is used in the other replication plugins - * but also can be used as standalone with a custom replication handler. - */ - -import { BehaviorSubject, combineLatest, filter, mergeMap, Subject } from 'rxjs'; -import { RxDBLeaderElectionPlugin } from "../leader-election/index.js"; -import { arrayFilterNotEmpty, ensureNotFalsy, errorToPlainJson, flatClone, getFromMapOrCreate, PROMISE_RESOLVE_FALSE, PROMISE_RESOLVE_TRUE, toArray, toPromise } from "../../plugins/utils/index.js"; -import { awaitRxStorageReplicationFirstInSync, awaitRxStorageReplicationInSync, cancelRxStorageReplication, getRxReplicationMetaInstanceSchema, replicateRxStorageInstance } from "../../replication-protocol/index.js"; -import { newRxError } from "../../rx-error.js"; -import { awaitRetry, DEFAULT_MODIFIER, swapDefaultDeletedTodeletedField, handlePulledDocuments } from "./replication-helper.js"; -import { addConnectedStorageToCollection, removeConnectedStorageFromCollection } from "../../rx-database-internal-store.js"; -import { addRxPlugin } from "../../plugin.js"; -import { hasEncryption } from "../../rx-storage-helper.js"; -import { overwritable } from "../../overwritable.js"; -import { runAsyncPluginHooks } from "../../hooks.js"; -export var REPLICATION_STATE_BY_COLLECTION = new WeakMap(); -export var RxReplicationState = /*#__PURE__*/function () { - function RxReplicationState( - /** - * The identifier, used to flag revisions - * and to identify which documents state came from the remote. - */ - replicationIdentifier, collection, deletedField, pull, push, live, retryTime, autoStart) { - this.subs = []; - this.subjects = { - received: new Subject(), - // all documents that are received from the endpoint - sent: new Subject(), - // all documents that are send to the endpoint - error: new Subject(), - // all errors that are received from the endpoint, emits new Error() objects - canceled: new BehaviorSubject(false), - // true when the replication was canceled - active: new BehaviorSubject(false) // true when something is running, false when not - }; - this.received$ = this.subjects.received.asObservable(); - this.sent$ = this.subjects.sent.asObservable(); - this.error$ = this.subjects.error.asObservable(); - this.canceled$ = this.subjects.canceled.asObservable(); - this.active$ = this.subjects.active.asObservable(); - this.onCancel = []; - this.callOnStart = undefined; - this.remoteEvents$ = new Subject(); - this.replicationIdentifier = replicationIdentifier; - this.collection = collection; - this.deletedField = deletedField; - this.pull = pull; - this.push = push; - this.live = live; - this.retryTime = retryTime; - this.autoStart = autoStart; - this.metaInfoPromise = (async () => { - var metaInstanceCollectionName = 'rx-replication-meta-' + (await collection.database.hashFunction([this.collection.name, this.replicationIdentifier].join('-'))); - var metaInstanceSchema = getRxReplicationMetaInstanceSchema(this.collection.schema.jsonSchema, hasEncryption(this.collection.schema.jsonSchema)); - return { - collectionName: metaInstanceCollectionName, - schema: metaInstanceSchema - }; - })(); - var replicationStates = getFromMapOrCreate(REPLICATION_STATE_BY_COLLECTION, collection, () => []); - replicationStates.push(this); - - // stop the replication when the collection gets destroyed - this.collection.onDestroy.push(() => this.cancel()); - - // create getters for the observables - Object.keys(this.subjects).forEach(key => { - Object.defineProperty(this, key + '$', { - get: function () { - return this.subjects[key].asObservable(); - } - }); - }); - var startPromise = new Promise(res => { - this.callOnStart = res; - }); - this.startPromise = startPromise; - } - var _proto = RxReplicationState.prototype; - _proto.start = async function start() { - if (this.isStopped()) { - return; - } - - // fill in defaults for pull & push - var pullModifier = this.pull && this.pull.modifier ? this.pull.modifier : DEFAULT_MODIFIER; - var pushModifier = this.push && this.push.modifier ? this.push.modifier : DEFAULT_MODIFIER; - var database = this.collection.database; - var metaInfo = await this.metaInfoPromise; - var [metaInstance] = await Promise.all([this.collection.database.storage.createStorageInstance({ - databaseName: database.name, - collectionName: metaInfo.collectionName, - databaseInstanceToken: database.token, - multiInstance: database.multiInstance, - // TODO is this always false? - options: {}, - schema: metaInfo.schema, - password: database.password, - devMode: overwritable.isDevMode() - }), addConnectedStorageToCollection(this.collection, metaInfo.collectionName, metaInfo.schema)]); - this.metaInstance = metaInstance; - this.internalReplicationState = replicateRxStorageInstance({ - pushBatchSize: this.push && this.push.batchSize ? this.push.batchSize : 100, - pullBatchSize: this.pull && this.pull.batchSize ? this.pull.batchSize : 100, - initialCheckpoint: { - upstream: this.push ? this.push.initialCheckpoint : undefined, - downstream: this.pull ? this.pull.initialCheckpoint : undefined - }, - forkInstance: this.collection.storageInstance, - metaInstance: this.metaInstance, - hashFunction: database.hashFunction, - identifier: 'rxdbreplication' + this.replicationIdentifier, - conflictHandler: this.collection.conflictHandler, - replicationHandler: { - masterChangeStream$: this.remoteEvents$.asObservable().pipe(filter(_v => !!this.pull), mergeMap(async ev => { - if (ev === 'RESYNC') { - return ev; - } - var useEv = flatClone(ev); - useEv.documents = handlePulledDocuments(this.collection, this.deletedField, useEv.documents); - useEv.documents = await Promise.all(useEv.documents.map(d => pullModifier(d))); - return useEv; - })), - masterChangesSince: async (checkpoint, batchSize) => { - if (!this.pull) { - return { - checkpoint: null, - documents: [] - }; - } - /** - * Retries must be done here in the replication primitives plugin, - * because the replication protocol itself has no - * error handling. - */ - var done = false; - var result = {}; - while (!done && !this.isStopped()) { - try { - result = await this.pull.handler(checkpoint, batchSize); - done = true; - } catch (err) { - var emitError = newRxError('RC_PULL', { - checkpoint, - errors: toArray(err).map(er => errorToPlainJson(er)), - direction: 'pull' - }); - this.subjects.error.next(emitError); - await awaitRetry(this.collection, ensureNotFalsy(this.retryTime)); - } - } - if (this.isStopped()) { - return { - checkpoint: null, - documents: [] - }; - } - var useResult = flatClone(result); - useResult.documents = handlePulledDocuments(this.collection, this.deletedField, useResult.documents); - useResult.documents = await Promise.all(useResult.documents.map(d => pullModifier(d))); - return useResult; - }, - masterWrite: async rows => { - if (!this.push) { - return []; - } - var done = false; - await runAsyncPluginHooks('preReplicationMasterWrite', { - rows, - collection: this.collection - }); - var useRowsOrNull = await Promise.all(rows.map(async row => { - row.newDocumentState = await pushModifier(row.newDocumentState); - if (row.newDocumentState === null) { - return null; - } - if (row.assumedMasterState) { - row.assumedMasterState = await pushModifier(row.assumedMasterState); - } - if (this.deletedField !== '_deleted') { - row.newDocumentState = swapDefaultDeletedTodeletedField(this.deletedField, row.newDocumentState); - if (row.assumedMasterState) { - row.assumedMasterState = swapDefaultDeletedTodeletedField(this.deletedField, row.assumedMasterState); - } - } - return row; - })); - var useRows = useRowsOrNull.filter(arrayFilterNotEmpty); - var result = null; - - // In case all the rows have been filtered and nothing has to be sent - if (useRows.length === 0) { - done = true; - result = []; - } - while (!done && !this.isStopped()) { - try { - result = await this.push.handler(useRows); - /** - * It is a common problem that people have wrongly behaving backend - * that do not return an array with the conflicts on push requests. - * So we run this check here to make it easier to debug. - * @link https://github.com/pubkey/rxdb/issues/4103 - */ - if (!Array.isArray(result)) { - throw newRxError('RC_PUSH_NO_AR', { - pushRows: rows, - direction: 'push', - args: { - result - } - }); - } - done = true; - } catch (err) { - var emitError = err.rxdb ? err : newRxError('RC_PUSH', { - pushRows: rows, - errors: toArray(err).map(er => errorToPlainJson(er)), - direction: 'push' - }); - this.subjects.error.next(emitError); - await awaitRetry(this.collection, ensureNotFalsy(this.retryTime)); - } - } - if (this.isStopped()) { - return []; - } - await runAsyncPluginHooks('preReplicationMasterWriteDocumentsHandle', { - result, - collection: this.collection - }); - var conflicts = handlePulledDocuments(this.collection, this.deletedField, ensureNotFalsy(result)); - return conflicts; - } - } - }); - this.subs.push(this.internalReplicationState.events.error.subscribe(err => { - this.subjects.error.next(err); - }), this.internalReplicationState.events.processed.down.subscribe(row => this.subjects.received.next(row.document)), this.internalReplicationState.events.processed.up.subscribe(writeToMasterRow => { - this.subjects.sent.next(writeToMasterRow.newDocumentState); - }), combineLatest([this.internalReplicationState.events.active.down, this.internalReplicationState.events.active.up]).subscribe(([down, up]) => { - var isActive = down || up; - this.subjects.active.next(isActive); - })); - if (this.pull && this.pull.stream$ && this.live) { - this.subs.push(this.pull.stream$.subscribe({ - next: ev => { - this.remoteEvents$.next(ev); - }, - error: err => { - this.subjects.error.next(err); - } - })); - } - - /** - * Non-live replications run once - * and then automatically get canceled. - */ - if (!this.live) { - await awaitRxStorageReplicationFirstInSync(this.internalReplicationState); - await awaitRxStorageReplicationInSync(this.internalReplicationState); - await this.cancel(); - } - this.callOnStart(); - }; - _proto.isStopped = function isStopped() { - if (this.subjects.canceled.getValue()) { - return true; - } - return false; - }; - _proto.awaitInitialReplication = async function awaitInitialReplication() { - await this.startPromise; - return awaitRxStorageReplicationFirstInSync(ensureNotFalsy(this.internalReplicationState)); - } - - /** - * Returns a promise that resolves when: - * - All local data is replicated with the remote - * - No replication cycle is running or in retry-state - * - * WARNING: USing this function directly in a multi-tab browser application - * is dangerous because only the leading instance will ever be replicated, - * so this promise will not resolve in the other tabs. - * For multi-tab support you should set and observe a flag in a local document. - */; - _proto.awaitInSync = async function awaitInSync() { - await this.startPromise; - await awaitRxStorageReplicationFirstInSync(ensureNotFalsy(this.internalReplicationState)); - - /** - * To reduce the amount of re-renders and make testing - * and to make the whole behavior more predictable, - * we await these things multiple times. - * For example the state might be in sync already and at the - * exact same time a pull.stream$ event comes in and we want to catch - * that in the same call to awaitInSync() instead of resolving - * while actually the state is not in sync. - */ - var t = 2; - while (t > 0) { - t--; - - /** - * Often awaitInSync() is called directly after a document write, - * like in the unit tests. - * So we first have to await the idleness to ensure that all RxChangeEvents - * are processed already. - */ - await this.collection.database.requestIdlePromise(); - await awaitRxStorageReplicationInSync(ensureNotFalsy(this.internalReplicationState)); - } - return true; - }; - _proto.reSync = function reSync() { - this.remoteEvents$.next('RESYNC'); - }; - _proto.emitEvent = function emitEvent(ev) { - this.remoteEvents$.next(ev); - }; - _proto.cancel = async function cancel() { - if (this.isStopped()) { - return PROMISE_RESOLVE_FALSE; - } - var promises = this.onCancel.map(fn => toPromise(fn())); - if (this.internalReplicationState) { - await cancelRxStorageReplication(this.internalReplicationState); - } - if (this.metaInstance) { - promises.push(ensureNotFalsy(this.internalReplicationState).checkpointQueue.then(() => ensureNotFalsy(this.metaInstance).close())); - } - this.subs.forEach(sub => sub.unsubscribe()); - this.subjects.canceled.next(true); - this.subjects.active.complete(); - this.subjects.canceled.complete(); - this.subjects.error.complete(); - this.subjects.received.complete(); - this.subjects.sent.complete(); - return Promise.all(promises); - }; - _proto.remove = async function remove() { - await ensureNotFalsy(this.metaInstance).remove(); - var metaInfo = await this.metaInfoPromise; - await this.cancel(); - await removeConnectedStorageFromCollection(this.collection, metaInfo.collectionName, metaInfo.schema); - }; - return RxReplicationState; -}(); -export function replicateRxCollection({ - replicationIdentifier, - collection, - deletedField = '_deleted', - pull, - push, - live = true, - retryTime = 1000 * 5, - waitForLeadership = true, - autoStart = true -}) { - addRxPlugin(RxDBLeaderElectionPlugin); - - /** - * It is a common error to forget to add these config - * objects. So we check here because it makes no sense - * to start a replication with neither push nor pull. - */ - if (!pull && !push) { - throw newRxError('UT3', { - collection: collection.name, - args: { - replicationIdentifier - } - }); - } - var replicationState = new RxReplicationState(replicationIdentifier, collection, deletedField, pull, push, live, retryTime, autoStart); - startReplicationOnLeaderShip(waitForLeadership, replicationState); - return replicationState; -} -export function startReplicationOnLeaderShip(waitForLeadership, replicationState) { - /** - * Always await this Promise to ensure that the current instance - * is leader when waitForLeadership=true - */ - var mustWaitForLeadership = waitForLeadership && replicationState.collection.database.multiInstance; - var waitTillRun = mustWaitForLeadership ? replicationState.collection.database.waitForLeadership() : PROMISE_RESOLVE_TRUE; - return waitTillRun.then(() => { - if (replicationState.isStopped()) { - return; - } - if (replicationState.autoStart) { - replicationState.start(); - } - }); -} -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/esm/plugins/replication/index.js.map b/dist/esm/plugins/replication/index.js.map deleted file mode 100644 index 49c1dcccf32..00000000000 --- a/dist/esm/plugins/replication/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["BehaviorSubject","combineLatest","filter","mergeMap","Subject","RxDBLeaderElectionPlugin","arrayFilterNotEmpty","ensureNotFalsy","errorToPlainJson","flatClone","getFromMapOrCreate","PROMISE_RESOLVE_FALSE","PROMISE_RESOLVE_TRUE","toArray","toPromise","awaitRxStorageReplicationFirstInSync","awaitRxStorageReplicationInSync","cancelRxStorageReplication","getRxReplicationMetaInstanceSchema","replicateRxStorageInstance","newRxError","awaitRetry","DEFAULT_MODIFIER","swapDefaultDeletedTodeletedField","handlePulledDocuments","addConnectedStorageToCollection","removeConnectedStorageFromCollection","addRxPlugin","hasEncryption","overwritable","runAsyncPluginHooks","REPLICATION_STATE_BY_COLLECTION","WeakMap","RxReplicationState","replicationIdentifier","collection","deletedField","pull","push","live","retryTime","autoStart","subs","subjects","received","sent","error","canceled","active","received$","asObservable","sent$","error$","canceled$","active$","onCancel","callOnStart","undefined","remoteEvents$","metaInfoPromise","metaInstanceCollectionName","database","hashFunction","name","join","metaInstanceSchema","schema","jsonSchema","collectionName","replicationStates","onDestroy","cancel","Object","keys","forEach","key","defineProperty","get","startPromise","Promise","res","_proto","prototype","start","isStopped","pullModifier","modifier","pushModifier","metaInfo","metaInstance","all","storage","createStorageInstance","databaseName","databaseInstanceToken","token","multiInstance","options","password","devMode","isDevMode","internalReplicationState","pushBatchSize","batchSize","pullBatchSize","initialCheckpoint","upstream","downstream","forkInstance","storageInstance","identifier","conflictHandler","replicationHandler","masterChangeStream$","pipe","_v","ev","useEv","documents","map","d","masterChangesSince","checkpoint","done","result","handler","err","emitError","errors","er","direction","next","useResult","masterWrite","rows","useRowsOrNull","row","newDocumentState","assumedMasterState","useRows","length","Array","isArray","pushRows","args","rxdb","conflicts","events","subscribe","processed","down","document","up","writeToMasterRow","isActive","stream$","getValue","awaitInitialReplication","awaitInSync","t","requestIdlePromise","reSync","emitEvent","promises","fn","checkpointQueue","then","close","sub","unsubscribe","complete","remove","replicateRxCollection","waitForLeadership","replicationState","startReplicationOnLeaderShip","mustWaitForLeadership","waitTillRun"],"sources":["../../../../src/plugins/replication/index.ts"],"sourcesContent":["/**\n * This plugin contains the primitives to create\n * a RxDB client-server replication.\n * It is used in the other replication plugins\n * but also can be used as standalone with a custom replication handler.\n */\n\nimport {\n BehaviorSubject,\n combineLatest,\n filter,\n mergeMap,\n Observable,\n Subject,\n Subscription\n} from 'rxjs';\nimport type {\n ReplicationOptions,\n ReplicationPullHandlerResult,\n ReplicationPullOptions,\n ReplicationPushOptions,\n RxCollection,\n RxDocumentData,\n RxError,\n RxJsonSchema,\n RxReplicationPullStreamItem,\n RxReplicationWriteToMasterRow,\n RxStorageInstance,\n RxStorageInstanceReplicationState,\n RxStorageReplicationMeta,\n RxTypeError,\n WithDeleted\n} from '../../types/index.d.ts';\nimport { RxDBLeaderElectionPlugin } from '../leader-election/index.ts';\nimport {\n arrayFilterNotEmpty,\n ensureNotFalsy,\n errorToPlainJson,\n flatClone,\n getFromMapOrCreate,\n PROMISE_RESOLVE_FALSE,\n PROMISE_RESOLVE_TRUE,\n toArray,\n toPromise\n} from '../../plugins/utils/index.ts';\nimport {\n awaitRxStorageReplicationFirstInSync,\n awaitRxStorageReplicationInSync,\n cancelRxStorageReplication,\n getRxReplicationMetaInstanceSchema,\n replicateRxStorageInstance\n} from '../../replication-protocol/index.ts';\nimport { newRxError } from '../../rx-error.ts';\nimport {\n awaitRetry,\n DEFAULT_MODIFIER,\n swapDefaultDeletedTodeletedField,\n handlePulledDocuments\n} from './replication-helper.ts';\nimport {\n addConnectedStorageToCollection, removeConnectedStorageFromCollection\n} from '../../rx-database-internal-store.ts';\nimport { addRxPlugin } from '../../plugin.ts';\nimport { hasEncryption } from '../../rx-storage-helper.ts';\nimport { overwritable } from '../../overwritable.ts';\nimport {\n runAsyncPluginHooks\n} from '../../hooks.ts';\n\n\nexport const REPLICATION_STATE_BY_COLLECTION: WeakMap[]> = new WeakMap();\n\nexport class RxReplicationState {\n public readonly subs: Subscription[] = [];\n public readonly subjects = {\n received: new Subject>(), // all documents that are received from the endpoint\n sent: new Subject>(), // all documents that are send to the endpoint\n error: new Subject(), // all errors that are received from the endpoint, emits new Error() objects\n canceled: new BehaviorSubject(false), // true when the replication was canceled\n active: new BehaviorSubject(false) // true when something is running, false when not\n };\n\n readonly received$: Observable> = this.subjects.received.asObservable();\n readonly sent$: Observable> = this.subjects.sent.asObservable();\n readonly error$: Observable = this.subjects.error.asObservable();\n readonly canceled$: Observable = this.subjects.canceled.asObservable();\n readonly active$: Observable = this.subjects.active.asObservable();\n\n readonly metaInfoPromise: Promise<{ collectionName: string, schema: RxJsonSchema>> }>;\n\n public startPromise: Promise;\n\n public onCancel: (() => void)[] = [];\n\n constructor(\n /**\n * The identifier, used to flag revisions\n * and to identify which documents state came from the remote.\n */\n public readonly replicationIdentifier: string,\n public readonly collection: RxCollection,\n public readonly deletedField: string,\n public readonly pull?: ReplicationPullOptions,\n public readonly push?: ReplicationPushOptions,\n public readonly live?: boolean,\n public retryTime?: number,\n public autoStart?: boolean,\n ) {\n this.metaInfoPromise = (async () => {\n const metaInstanceCollectionName = 'rx-replication-meta-' + await collection.database.hashFunction([\n this.collection.name,\n this.replicationIdentifier\n ].join('-'));\n const metaInstanceSchema = getRxReplicationMetaInstanceSchema(\n this.collection.schema.jsonSchema,\n hasEncryption(this.collection.schema.jsonSchema)\n );\n return {\n collectionName: metaInstanceCollectionName,\n schema: metaInstanceSchema\n };\n })();\n const replicationStates = getFromMapOrCreate(\n REPLICATION_STATE_BY_COLLECTION,\n collection,\n () => []\n );\n replicationStates.push(this);\n\n // stop the replication when the collection gets destroyed\n this.collection.onDestroy.push(() => this.cancel());\n\n // create getters for the observables\n Object.keys(this.subjects).forEach(key => {\n Object.defineProperty(this, key + '$', {\n get: function () {\n return this.subjects[key].asObservable();\n }\n });\n });\n const startPromise = new Promise(res => {\n this.callOnStart = res;\n });\n this.startPromise = startPromise;\n }\n\n private callOnStart: () => void = undefined as any;\n\n public internalReplicationState?: RxStorageInstanceReplicationState;\n public metaInstance?: RxStorageInstance, any, {}, any>;\n public remoteEvents$: Subject> = new Subject();\n\n public async start(): Promise {\n if (this.isStopped()) {\n return;\n }\n\n // fill in defaults for pull & push\n const pullModifier = this.pull && this.pull.modifier ? this.pull.modifier : DEFAULT_MODIFIER;\n const pushModifier = this.push && this.push.modifier ? this.push.modifier : DEFAULT_MODIFIER;\n\n const database = this.collection.database;\n\n const metaInfo = await this.metaInfoPromise;\n\n const [metaInstance] = await Promise.all([\n this.collection.database.storage.createStorageInstance>({\n databaseName: database.name,\n collectionName: metaInfo.collectionName,\n databaseInstanceToken: database.token,\n multiInstance: database.multiInstance, // TODO is this always false?\n options: {},\n schema: metaInfo.schema,\n password: database.password,\n devMode: overwritable.isDevMode()\n }),\n addConnectedStorageToCollection(\n this.collection,\n metaInfo.collectionName,\n metaInfo.schema\n )\n ]);\n this.metaInstance = metaInstance;\n\n this.internalReplicationState = replicateRxStorageInstance({\n pushBatchSize: this.push && this.push.batchSize ? this.push.batchSize : 100,\n pullBatchSize: this.pull && this.pull.batchSize ? this.pull.batchSize : 100,\n initialCheckpoint: {\n upstream: this.push ? this.push.initialCheckpoint : undefined,\n downstream: this.pull ? this.pull.initialCheckpoint : undefined\n },\n forkInstance: this.collection.storageInstance,\n metaInstance: this.metaInstance,\n hashFunction: database.hashFunction,\n identifier: 'rxdbreplication' + this.replicationIdentifier,\n conflictHandler: this.collection.conflictHandler,\n replicationHandler: {\n masterChangeStream$: this.remoteEvents$.asObservable().pipe(\n filter(_v => !!this.pull),\n mergeMap(async (ev) => {\n if (ev === 'RESYNC') {\n return ev;\n }\n const useEv = flatClone(ev);\n useEv.documents = handlePulledDocuments(this.collection, this.deletedField, useEv.documents);\n useEv.documents = await Promise.all(\n useEv.documents.map(d => pullModifier(d))\n );\n return useEv;\n })\n ),\n masterChangesSince: async (\n checkpoint: CheckpointType | undefined,\n batchSize: number\n ) => {\n if (!this.pull) {\n return {\n checkpoint: null,\n documents: []\n };\n }\n /**\n * Retries must be done here in the replication primitives plugin,\n * because the replication protocol itself has no\n * error handling.\n */\n let done = false;\n let result: ReplicationPullHandlerResult = {} as any;\n while (!done && !this.isStopped()) {\n try {\n result = await this.pull.handler(\n checkpoint,\n batchSize\n );\n done = true;\n } catch (err: any | Error | Error[]) {\n const emitError = newRxError('RC_PULL', {\n checkpoint,\n errors: toArray(err).map(er => errorToPlainJson(er)),\n direction: 'pull'\n });\n this.subjects.error.next(emitError);\n await awaitRetry(this.collection, ensureNotFalsy(this.retryTime));\n }\n }\n\n if (this.isStopped()) {\n return {\n checkpoint: null,\n documents: []\n };\n }\n\n const useResult = flatClone(result);\n useResult.documents = handlePulledDocuments(this.collection, this.deletedField, useResult.documents);\n useResult.documents = await Promise.all(\n useResult.documents.map(d => pullModifier(d))\n );\n return useResult;\n },\n masterWrite: async (\n rows: RxReplicationWriteToMasterRow[]\n ) => {\n if (!this.push) {\n return [];\n }\n let done = false;\n\n await runAsyncPluginHooks('preReplicationMasterWrite', {\n rows,\n collection: this.collection\n });\n\n const useRowsOrNull = await Promise.all(\n rows.map(async (row) => {\n row.newDocumentState = await pushModifier(row.newDocumentState);\n if (row.newDocumentState === null) {\n return null;\n }\n if (row.assumedMasterState) {\n row.assumedMasterState = await pushModifier(row.assumedMasterState);\n }\n if (this.deletedField !== '_deleted') {\n row.newDocumentState = swapDefaultDeletedTodeletedField(this.deletedField, row.newDocumentState) as any;\n if (row.assumedMasterState) {\n row.assumedMasterState = swapDefaultDeletedTodeletedField(this.deletedField, row.assumedMasterState) as any;\n }\n }\n return row;\n })\n );\n const useRows: RxReplicationWriteToMasterRow[] = useRowsOrNull.filter(arrayFilterNotEmpty);\n\n let result: WithDeleted[] = null as any;\n\n // In case all the rows have been filtered and nothing has to be sent\n if (useRows.length === 0) {\n done = true;\n result = [];\n }\n\n while (!done && !this.isStopped()) {\n try {\n result = await this.push.handler(useRows);\n /**\n * It is a common problem that people have wrongly behaving backend\n * that do not return an array with the conflicts on push requests.\n * So we run this check here to make it easier to debug.\n * @link https://github.com/pubkey/rxdb/issues/4103\n */\n if (!Array.isArray(result)) {\n throw newRxError(\n 'RC_PUSH_NO_AR',\n {\n pushRows: rows,\n direction: 'push',\n args: { result }\n }\n );\n }\n done = true;\n } catch (err: any | Error | Error[] | RxError) {\n const emitError = (err as RxError).rxdb ? err : newRxError('RC_PUSH', {\n pushRows: rows,\n errors: toArray(err).map(er => errorToPlainJson(er)),\n direction: 'push'\n });\n this.subjects.error.next(emitError);\n await awaitRetry(this.collection, ensureNotFalsy(this.retryTime));\n }\n }\n if (this.isStopped()) {\n return [];\n }\n\n await runAsyncPluginHooks('preReplicationMasterWriteDocumentsHandle', {\n result,\n collection: this.collection\n });\n\n const conflicts = handlePulledDocuments(this.collection, this.deletedField, ensureNotFalsy(result));\n return conflicts;\n }\n }\n });\n this.subs.push(\n this.internalReplicationState.events.error.subscribe(err => {\n this.subjects.error.next(err);\n }),\n this.internalReplicationState.events.processed.down\n .subscribe(row => this.subjects.received.next(row.document as any)),\n this.internalReplicationState.events.processed.up\n .subscribe(writeToMasterRow => {\n this.subjects.sent.next(writeToMasterRow.newDocumentState);\n }),\n combineLatest([\n this.internalReplicationState.events.active.down,\n this.internalReplicationState.events.active.up\n ]).subscribe(([down, up]) => {\n const isActive = down || up;\n this.subjects.active.next(isActive);\n })\n );\n\n if (\n this.pull &&\n this.pull.stream$ &&\n this.live\n ) {\n this.subs.push(\n this.pull.stream$.subscribe({\n next: ev => {\n this.remoteEvents$.next(ev);\n },\n error: err => {\n this.subjects.error.next(err);\n }\n })\n );\n }\n\n /**\n * Non-live replications run once\n * and then automatically get canceled.\n */\n if (!this.live) {\n await awaitRxStorageReplicationFirstInSync(this.internalReplicationState);\n await awaitRxStorageReplicationInSync(this.internalReplicationState);\n await this.cancel();\n }\n this.callOnStart();\n }\n\n isStopped(): boolean {\n if (this.subjects.canceled.getValue()) {\n return true;\n }\n return false;\n }\n\n async awaitInitialReplication(): Promise {\n await this.startPromise;\n return awaitRxStorageReplicationFirstInSync(\n ensureNotFalsy(this.internalReplicationState)\n );\n }\n\n /**\n * Returns a promise that resolves when:\n * - All local data is replicated with the remote\n * - No replication cycle is running or in retry-state\n *\n * WARNING: USing this function directly in a multi-tab browser application\n * is dangerous because only the leading instance will ever be replicated,\n * so this promise will not resolve in the other tabs.\n * For multi-tab support you should set and observe a flag in a local document.\n */\n async awaitInSync(): Promise {\n await this.startPromise;\n await awaitRxStorageReplicationFirstInSync(ensureNotFalsy(this.internalReplicationState));\n\n /**\n * To reduce the amount of re-renders and make testing\n * and to make the whole behavior more predictable,\n * we await these things multiple times.\n * For example the state might be in sync already and at the\n * exact same time a pull.stream$ event comes in and we want to catch\n * that in the same call to awaitInSync() instead of resolving\n * while actually the state is not in sync.\n */\n let t = 2;\n while (t > 0) {\n t--;\n\n /**\n * Often awaitInSync() is called directly after a document write,\n * like in the unit tests.\n * So we first have to await the idleness to ensure that all RxChangeEvents\n * are processed already.\n */\n await this.collection.database.requestIdlePromise();\n await awaitRxStorageReplicationInSync(ensureNotFalsy(this.internalReplicationState));\n }\n\n return true;\n }\n\n reSync() {\n this.remoteEvents$.next('RESYNC');\n }\n emitEvent(ev: RxReplicationPullStreamItem) {\n this.remoteEvents$.next(ev);\n }\n\n async cancel(): Promise {\n if (this.isStopped()) {\n return PROMISE_RESOLVE_FALSE;\n }\n\n const promises: Promise[] = this.onCancel.map(fn => toPromise(fn()));\n\n if (this.internalReplicationState) {\n await cancelRxStorageReplication(this.internalReplicationState);\n }\n if (this.metaInstance) {\n promises.push(\n ensureNotFalsy(this.internalReplicationState).checkpointQueue\n .then(() => ensureNotFalsy(this.metaInstance).close())\n );\n }\n\n this.subs.forEach(sub => sub.unsubscribe());\n this.subjects.canceled.next(true);\n\n this.subjects.active.complete();\n this.subjects.canceled.complete();\n this.subjects.error.complete();\n this.subjects.received.complete();\n this.subjects.sent.complete();\n\n return Promise.all(promises);\n }\n\n async remove() {\n await ensureNotFalsy(this.metaInstance).remove();\n const metaInfo = await this.metaInfoPromise;\n await this.cancel();\n await removeConnectedStorageFromCollection(\n this.collection,\n metaInfo.collectionName,\n metaInfo.schema\n );\n }\n}\n\n\nexport function replicateRxCollection(\n {\n replicationIdentifier,\n collection,\n deletedField = '_deleted',\n pull,\n push,\n live = true,\n retryTime = 1000 * 5,\n waitForLeadership = true,\n autoStart = true,\n }: ReplicationOptions\n): RxReplicationState {\n addRxPlugin(RxDBLeaderElectionPlugin);\n\n /**\n * It is a common error to forget to add these config\n * objects. So we check here because it makes no sense\n * to start a replication with neither push nor pull.\n */\n if (!pull && !push) {\n throw newRxError('UT3', {\n collection: collection.name,\n args: {\n replicationIdentifier\n }\n });\n }\n\n const replicationState = new RxReplicationState(\n replicationIdentifier,\n collection,\n deletedField,\n pull,\n push,\n live,\n retryTime,\n autoStart\n );\n\n\n startReplicationOnLeaderShip(waitForLeadership, replicationState);\n return replicationState as any;\n}\n\n\nexport function startReplicationOnLeaderShip(\n waitForLeadership: boolean,\n replicationState: RxReplicationState\n) {\n /**\n * Always await this Promise to ensure that the current instance\n * is leader when waitForLeadership=true\n */\n const mustWaitForLeadership = waitForLeadership && replicationState.collection.database.multiInstance;\n const waitTillRun: Promise = mustWaitForLeadership ? replicationState.collection.database.waitForLeadership() : PROMISE_RESOLVE_TRUE;\n return waitTillRun.then(() => {\n if (replicationState.isStopped()) {\n return;\n }\n if (replicationState.autoStart) {\n replicationState.start();\n }\n });\n}\n"],"mappings":"AAAA;AACA;AACA;AACA;AACA;AACA;;AAEA,SACIA,eAAe,EACfC,aAAa,EACbC,MAAM,EACNC,QAAQ,EAERC,OAAO,QAEJ,MAAM;AAkBb,SAASC,wBAAwB,QAAQ,6BAA6B;AACtE,SACIC,mBAAmB,EACnBC,cAAc,EACdC,gBAAgB,EAChBC,SAAS,EACTC,kBAAkB,EAClBC,qBAAqB,EACrBC,oBAAoB,EACpBC,OAAO,EACPC,SAAS,QACN,8BAA8B;AACrC,SACIC,oCAAoC,EACpCC,+BAA+B,EAC/BC,0BAA0B,EAC1BC,kCAAkC,EAClCC,0BAA0B,QACvB,qCAAqC;AAC5C,SAASC,UAAU,QAAQ,mBAAmB;AAC9C,SACIC,UAAU,EACVC,gBAAgB,EAChBC,gCAAgC,EAChCC,qBAAqB,QAClB,yBAAyB;AAChC,SACIC,+BAA+B,EAAEC,oCAAoC,QAClE,qCAAqC;AAC5C,SAASC,WAAW,QAAQ,iBAAiB;AAC7C,SAASC,aAAa,QAAQ,4BAA4B;AAC1D,SAASC,YAAY,QAAQ,uBAAuB;AACpD,SACIC,mBAAmB,QAChB,gBAAgB;AAGvB,OAAO,IAAMC,+BAAsF,GAAG,IAAIC,OAAO,CAAC,CAAC;AAEnH,WAAaC,kBAAkB;EAsB3B,SAAAA;EACI;AACR;AACA;AACA;EACwBC,qBAA6B,EAC7BC,UAAmC,EACnCC,YAAoB,EACpBC,IAAwD,EACxDC,IAAwC,EACxCC,IAAc,EACvBC,SAAkB,EAClBC,SAAmB,EAC5B;IAAA,KAlCcC,IAAI,GAAmB,EAAE;IAAA,KACzBC,QAAQ,GAAG;MACvBC,QAAQ,EAAE,IAAIxC,OAAO,CAA4B,CAAC;MAAE;MACpDyC,IAAI,EAAE,IAAIzC,OAAO,CAAyB,CAAC;MAAE;MAC7C0C,KAAK,EAAE,IAAI1C,OAAO,CAAwB,CAAC;MAAE;MAC7C2C,QAAQ,EAAE,IAAI/C,eAAe,CAAU,KAAK,CAAC;MAAE;MAC/CgD,MAAM,EAAE,IAAIhD,eAAe,CAAU,KAAK,CAAC,CAAC;IAChD,CAAC;IAAA,KAEQiD,SAAS,GAA0C,IAAI,CAACN,QAAQ,CAACC,QAAQ,CAACM,YAAY,CAAC,CAAC;IAAA,KACxFC,KAAK,GAAuC,IAAI,CAACR,QAAQ,CAACE,IAAI,CAACK,YAAY,CAAC,CAAC;IAAA,KAC7EE,MAAM,GAAsC,IAAI,CAACT,QAAQ,CAACG,KAAK,CAACI,YAAY,CAAC,CAAC;IAAA,KAC9EG,SAAS,GAAoB,IAAI,CAACV,QAAQ,CAACI,QAAQ,CAACG,YAAY,CAAC,CAAC;IAAA,KAClEI,OAAO,GAAwB,IAAI,CAACX,QAAQ,CAACK,MAAM,CAACE,YAAY,CAAC,CAAC;IAAA,KAMpEK,QAAQ,GAAmB,EAAE;IAAA,KAsD5BC,WAAW,GAAeC,SAAS;IAAA,KAIpCC,aAAa,GAAoE,IAAItD,OAAO,CAAC,CAAC;IAAA,KAnDjF8B,qBAA6B,GAA7BA,qBAA6B;IAAA,KAC7BC,UAAmC,GAAnCA,UAAmC;IAAA,KACnCC,YAAoB,GAApBA,YAAoB;IAAA,KACpBC,IAAwD,GAAxDA,IAAwD;IAAA,KACxDC,IAAwC,GAAxCA,IAAwC;IAAA,KACxCC,IAAc,GAAdA,IAAc;IAAA,KACvBC,SAAkB,GAAlBA,SAAkB;IAAA,KAClBC,SAAmB,GAAnBA,SAAmB;IAE1B,IAAI,CAACkB,eAAe,GAAG,CAAC,YAAY;MAChC,IAAMC,0BAA0B,GAAG,sBAAsB,IAAG,MAAMzB,UAAU,CAAC0B,QAAQ,CAACC,YAAY,CAAC,CAC/F,IAAI,CAAC3B,UAAU,CAAC4B,IAAI,EACpB,IAAI,CAAC7B,qBAAqB,CAC7B,CAAC8B,IAAI,CAAC,GAAG,CAAC,CAAC;MACZ,IAAMC,kBAAkB,GAAG/C,kCAAkC,CACzD,IAAI,CAACiB,UAAU,CAAC+B,MAAM,CAACC,UAAU,EACjCvC,aAAa,CAAC,IAAI,CAACO,UAAU,CAAC+B,MAAM,CAACC,UAAU,CACnD,CAAC;MACD,OAAO;QACHC,cAAc,EAAER,0BAA0B;QAC1CM,MAAM,EAAED;MACZ,CAAC;IACL,CAAC,EAAE,CAAC;IACJ,IAAMI,iBAAiB,GAAG3D,kBAAkB,CACxCqB,+BAA+B,EAC/BI,UAAU,EACV,MAAM,EACV,CAAC;IACDkC,iBAAiB,CAAC/B,IAAI,CAAC,IAAI,CAAC;;IAE5B;IACA,IAAI,CAACH,UAAU,CAACmC,SAAS,CAAChC,IAAI,CAAC,MAAM,IAAI,CAACiC,MAAM,CAAC,CAAC,CAAC;;IAEnD;IACAC,MAAM,CAACC,IAAI,CAAC,IAAI,CAAC9B,QAAQ,CAAC,CAAC+B,OAAO,CAACC,GAAG,IAAI;MACtCH,MAAM,CAACI,cAAc,CAAC,IAAI,EAAED,GAAG,GAAG,GAAG,EAAE;QACnCE,GAAG,EAAE,SAAAA,CAAA,EAAY;UACb,OAAO,IAAI,CAAClC,QAAQ,CAACgC,GAAG,CAAC,CAACzB,YAAY,CAAC,CAAC;QAC5C;MACJ,CAAC,CAAC;IACN,CAAC,CAAC;IACF,IAAM4B,YAAY,GAAG,IAAIC,OAAO,CAAOC,GAAG,IAAI;MAC1C,IAAI,CAACxB,WAAW,GAAGwB,GAAG;IAC1B,CAAC,CAAC;IACF,IAAI,CAACF,YAAY,GAAGA,YAAY;EACpC;EAAC,IAAAG,MAAA,GAAAhD,kBAAA,CAAAiD,SAAA;EAAAD,MAAA,CAQYE,KAAK,GAAlB,eAAAA,MAAA,EAAoC;IAChC,IAAI,IAAI,CAACC,SAAS,CAAC,CAAC,EAAE;MAClB;IACJ;;IAEA;IACA,IAAMC,YAAY,GAAG,IAAI,CAAChD,IAAI,IAAI,IAAI,CAACA,IAAI,CAACiD,QAAQ,GAAG,IAAI,CAACjD,IAAI,CAACiD,QAAQ,GAAGhE,gBAAgB;IAC5F,IAAMiE,YAAY,GAAG,IAAI,CAACjD,IAAI,IAAI,IAAI,CAACA,IAAI,CAACgD,QAAQ,GAAG,IAAI,CAAChD,IAAI,CAACgD,QAAQ,GAAGhE,gBAAgB;IAE5F,IAAMuC,QAAQ,GAAG,IAAI,CAAC1B,UAAU,CAAC0B,QAAQ;IAEzC,IAAM2B,QAAQ,GAAG,MAAM,IAAI,CAAC7B,eAAe;IAE3C,IAAM,CAAC8B,YAAY,CAAC,GAAG,MAAMV,OAAO,CAACW,GAAG,CAAC,CACrC,IAAI,CAACvD,UAAU,CAAC0B,QAAQ,CAAC8B,OAAO,CAACC,qBAAqB,CAAsD;MACxGC,YAAY,EAAEhC,QAAQ,CAACE,IAAI;MAC3BK,cAAc,EAAEoB,QAAQ,CAACpB,cAAc;MACvC0B,qBAAqB,EAAEjC,QAAQ,CAACkC,KAAK;MACrCC,aAAa,EAAEnC,QAAQ,CAACmC,aAAa;MAAE;MACvCC,OAAO,EAAE,CAAC,CAAC;MACX/B,MAAM,EAAEsB,QAAQ,CAACtB,MAAM;MACvBgC,QAAQ,EAAErC,QAAQ,CAACqC,QAAQ;MAC3BC,OAAO,EAAEtE,YAAY,CAACuE,SAAS,CAAC;IACpC,CAAC,CAAC,EACF3E,+BAA+B,CAC3B,IAAI,CAACU,UAAU,EACfqD,QAAQ,CAACpB,cAAc,EACvBoB,QAAQ,CAACtB,MACb,CAAC,CACJ,CAAC;IACF,IAAI,CAACuB,YAAY,GAAGA,YAAY;IAEhC,IAAI,CAACY,wBAAwB,GAAGlF,0BAA0B,CAAC;MACvDmF,aAAa,EAAE,IAAI,CAAChE,IAAI,IAAI,IAAI,CAACA,IAAI,CAACiE,SAAS,GAAG,IAAI,CAACjE,IAAI,CAACiE,SAAS,GAAG,GAAG;MAC3EC,aAAa,EAAE,IAAI,CAACnE,IAAI,IAAI,IAAI,CAACA,IAAI,CAACkE,SAAS,GAAG,IAAI,CAAClE,IAAI,CAACkE,SAAS,GAAG,GAAG;MAC3EE,iBAAiB,EAAE;QACfC,QAAQ,EAAE,IAAI,CAACpE,IAAI,GAAG,IAAI,CAACA,IAAI,CAACmE,iBAAiB,GAAGhD,SAAS;QAC7DkD,UAAU,EAAE,IAAI,CAACtE,IAAI,GAAG,IAAI,CAACA,IAAI,CAACoE,iBAAiB,GAAGhD;MAC1D,CAAC;MACDmD,YAAY,EAAE,IAAI,CAACzE,UAAU,CAAC0E,eAAe;MAC7CpB,YAAY,EAAE,IAAI,CAACA,YAAY;MAC/B3B,YAAY,EAAED,QAAQ,CAACC,YAAY;MACnCgD,UAAU,EAAE,iBAAiB,GAAG,IAAI,CAAC5E,qBAAqB;MAC1D6E,eAAe,EAAE,IAAI,CAAC5E,UAAU,CAAC4E,eAAe;MAChDC,kBAAkB,EAAE;QAChBC,mBAAmB,EAAE,IAAI,CAACvD,aAAa,CAACR,YAAY,CAAC,CAAC,CAACgE,IAAI,CACvDhH,MAAM,CAACiH,EAAE,IAAI,CAAC,CAAC,IAAI,CAAC9E,IAAI,CAAC,EACzBlC,QAAQ,CAAC,MAAOiH,EAAE,IAAK;UACnB,IAAIA,EAAE,KAAK,QAAQ,EAAE;YACjB,OAAOA,EAAE;UACb;UACA,IAAMC,KAAK,GAAG5G,SAAS,CAAC2G,EAAE,CAAC;UAC3BC,KAAK,CAACC,SAAS,GAAG9F,qBAAqB,CAAC,IAAI,CAACW,UAAU,EAAE,IAAI,CAACC,YAAY,EAAEiF,KAAK,CAACC,SAAS,CAAC;UAC5FD,KAAK,CAACC,SAAS,GAAG,MAAMvC,OAAO,CAACW,GAAG,CAC/B2B,KAAK,CAACC,SAAS,CAACC,GAAG,CAACC,CAAC,IAAInC,YAAY,CAACmC,CAAC,CAAC,CAC5C,CAAC;UACD,OAAOH,KAAK;QAChB,CAAC,CACL,CAAC;QACDI,kBAAkB,EAAE,MAAAA,CAChBC,UAAsC,EACtCnB,SAAiB,KAChB;UACD,IAAI,CAAC,IAAI,CAAClE,IAAI,EAAE;YACZ,OAAO;cACHqF,UAAU,EAAE,IAAI;cAChBJ,SAAS,EAAE;YACf,CAAC;UACL;UACA;AACpB;AACA;AACA;AACA;UACoB,IAAIK,IAAI,GAAG,KAAK;UAChB,IAAIC,MAA+D,GAAG,CAAC,CAAQ;UAC/E,OAAO,CAACD,IAAI,IAAI,CAAC,IAAI,CAACvC,SAAS,CAAC,CAAC,EAAE;YAC/B,IAAI;cACAwC,MAAM,GAAG,MAAM,IAAI,CAACvF,IAAI,CAACwF,OAAO,CAC5BH,UAAU,EACVnB,SACJ,CAAC;cACDoB,IAAI,GAAG,IAAI;YACf,CAAC,CAAC,OAAOG,GAA0B,EAAE;cACjC,IAAMC,SAAS,GAAG3G,UAAU,CAAC,SAAS,EAAE;gBACpCsG,UAAU;gBACVM,MAAM,EAAEnH,OAAO,CAACiH,GAAG,CAAC,CAACP,GAAG,CAACU,EAAE,IAAIzH,gBAAgB,CAACyH,EAAE,CAAC,CAAC;gBACpDC,SAAS,EAAE;cACf,CAAC,CAAC;cACF,IAAI,CAACvF,QAAQ,CAACG,KAAK,CAACqF,IAAI,CAACJ,SAAS,CAAC;cACnC,MAAM1G,UAAU,CAAC,IAAI,CAACc,UAAU,EAAE5B,cAAc,CAAC,IAAI,CAACiC,SAAS,CAAC,CAAC;YACrE;UACJ;UAEA,IAAI,IAAI,CAAC4C,SAAS,CAAC,CAAC,EAAE;YAClB,OAAO;cACHsC,UAAU,EAAE,IAAI;cAChBJ,SAAS,EAAE;YACf,CAAC;UACL;UAEA,IAAMc,SAAS,GAAG3H,SAAS,CAACmH,MAAM,CAAC;UACnCQ,SAAS,CAACd,SAAS,GAAG9F,qBAAqB,CAAC,IAAI,CAACW,UAAU,EAAE,IAAI,CAACC,YAAY,EAAEgG,SAAS,CAACd,SAAS,CAAC;UACpGc,SAAS,CAACd,SAAS,GAAG,MAAMvC,OAAO,CAACW,GAAG,CACnC0C,SAAS,CAACd,SAAS,CAACC,GAAG,CAACC,CAAC,IAAInC,YAAY,CAACmC,CAAC,CAAC,CAChD,CAAC;UACD,OAAOY,SAAS;QACpB,CAAC;QACDC,WAAW,EAAE,MACTC,IAAgD,IAC/C;UACD,IAAI,CAAC,IAAI,CAAChG,IAAI,EAAE;YACZ,OAAO,EAAE;UACb;UACA,IAAIqF,IAAI,GAAG,KAAK;UAEhB,MAAM7F,mBAAmB,CAAC,2BAA2B,EAAE;YACnDwG,IAAI;YACJnG,UAAU,EAAE,IAAI,CAACA;UACrB,CAAC,CAAC;UAEF,IAAMoG,aAAa,GAAG,MAAMxD,OAAO,CAACW,GAAG,CACnC4C,IAAI,CAACf,GAAG,CAAC,MAAOiB,GAAG,IAAK;YACpBA,GAAG,CAACC,gBAAgB,GAAG,MAAMlD,YAAY,CAACiD,GAAG,CAACC,gBAAgB,CAAC;YAC/D,IAAID,GAAG,CAACC,gBAAgB,KAAK,IAAI,EAAE;cAC/B,OAAO,IAAI;YACf;YACA,IAAID,GAAG,CAACE,kBAAkB,EAAE;cACxBF,GAAG,CAACE,kBAAkB,GAAG,MAAMnD,YAAY,CAACiD,GAAG,CAACE,kBAAkB,CAAC;YACvE;YACA,IAAI,IAAI,CAACtG,YAAY,KAAK,UAAU,EAAE;cAClCoG,GAAG,CAACC,gBAAgB,GAAGlH,gCAAgC,CAAC,IAAI,CAACa,YAAY,EAAEoG,GAAG,CAACC,gBAAgB,CAAQ;cACvG,IAAID,GAAG,CAACE,kBAAkB,EAAE;gBACxBF,GAAG,CAACE,kBAAkB,GAAGnH,gCAAgC,CAAC,IAAI,CAACa,YAAY,EAAEoG,GAAG,CAACE,kBAAkB,CAAQ;cAC/G;YACJ;YACA,OAAOF,GAAG;UACd,CAAC,CACL,CAAC;UACD,IAAMG,OAAmD,GAAGJ,aAAa,CAACrI,MAAM,CAACI,mBAAmB,CAAC;UAErG,IAAIsH,MAAgC,GAAG,IAAW;;UAElD;UACA,IAAIe,OAAO,CAACC,MAAM,KAAK,CAAC,EAAE;YACtBjB,IAAI,GAAG,IAAI;YACXC,MAAM,GAAG,EAAE;UACf;UAEA,OAAO,CAACD,IAAI,IAAI,CAAC,IAAI,CAACvC,SAAS,CAAC,CAAC,EAAE;YAC/B,IAAI;cACAwC,MAAM,GAAG,MAAM,IAAI,CAACtF,IAAI,CAACuF,OAAO,CAACc,OAAO,CAAC;cACzC;AAC5B;AACA;AACA;AACA;AACA;cAC4B,IAAI,CAACE,KAAK,CAACC,OAAO,CAAClB,MAAM,CAAC,EAAE;gBACxB,MAAMxG,UAAU,CACZ,eAAe,EACf;kBACI2H,QAAQ,EAAET,IAAI;kBACdJ,SAAS,EAAE,MAAM;kBACjBc,IAAI,EAAE;oBAAEpB;kBAAO;gBACnB,CACJ,CAAC;cACL;cACAD,IAAI,GAAG,IAAI;YACf,CAAC,CAAC,OAAOG,GAAoC,EAAE;cAC3C,IAAMC,SAAS,GAAID,GAAG,CAAamB,IAAI,GAAGnB,GAAG,GAAG1G,UAAU,CAAC,SAAS,EAAE;gBAClE2H,QAAQ,EAAET,IAAI;gBACdN,MAAM,EAAEnH,OAAO,CAACiH,GAAG,CAAC,CAACP,GAAG,CAACU,EAAE,IAAIzH,gBAAgB,CAACyH,EAAE,CAAC,CAAC;gBACpDC,SAAS,EAAE;cACf,CAAC,CAAC;cACF,IAAI,CAACvF,QAAQ,CAACG,KAAK,CAACqF,IAAI,CAACJ,SAAS,CAAC;cACnC,MAAM1G,UAAU,CAAC,IAAI,CAACc,UAAU,EAAE5B,cAAc,CAAC,IAAI,CAACiC,SAAS,CAAC,CAAC;YACrE;UACJ;UACA,IAAI,IAAI,CAAC4C,SAAS,CAAC,CAAC,EAAE;YAClB,OAAO,EAAE;UACb;UAEA,MAAMtD,mBAAmB,CAAC,0CAA0C,EAAE;YAClE8F,MAAM;YACNzF,UAAU,EAAE,IAAI,CAACA;UACrB,CAAC,CAAC;UAEF,IAAM+G,SAAS,GAAG1H,qBAAqB,CAAC,IAAI,CAACW,UAAU,EAAE,IAAI,CAACC,YAAY,EAAE7B,cAAc,CAACqH,MAAM,CAAC,CAAC;UACnG,OAAOsB,SAAS;QACpB;MACJ;IACJ,CAAC,CAAC;IACF,IAAI,CAACxG,IAAI,CAACJ,IAAI,CACV,IAAI,CAAC+D,wBAAwB,CAAC8C,MAAM,CAACrG,KAAK,CAACsG,SAAS,CAACtB,GAAG,IAAI;MACxD,IAAI,CAACnF,QAAQ,CAACG,KAAK,CAACqF,IAAI,CAACL,GAAG,CAAC;IACjC,CAAC,CAAC,EACF,IAAI,CAACzB,wBAAwB,CAAC8C,MAAM,CAACE,SAAS,CAACC,IAAI,CAC9CF,SAAS,CAACZ,GAAG,IAAI,IAAI,CAAC7F,QAAQ,CAACC,QAAQ,CAACuF,IAAI,CAACK,GAAG,CAACe,QAAe,CAAC,CAAC,EACvE,IAAI,CAAClD,wBAAwB,CAAC8C,MAAM,CAACE,SAAS,CAACG,EAAE,CAC5CJ,SAAS,CAACK,gBAAgB,IAAI;MAC3B,IAAI,CAAC9G,QAAQ,CAACE,IAAI,CAACsF,IAAI,CAACsB,gBAAgB,CAAChB,gBAAgB,CAAC;IAC9D,CAAC,CAAC,EACNxI,aAAa,CAAC,CACV,IAAI,CAACoG,wBAAwB,CAAC8C,MAAM,CAACnG,MAAM,CAACsG,IAAI,EAChD,IAAI,CAACjD,wBAAwB,CAAC8C,MAAM,CAACnG,MAAM,CAACwG,EAAE,CACjD,CAAC,CAACJ,SAAS,CAAC,CAAC,CAACE,IAAI,EAAEE,EAAE,CAAC,KAAK;MACzB,IAAME,QAAQ,GAAGJ,IAAI,IAAIE,EAAE;MAC3B,IAAI,CAAC7G,QAAQ,CAACK,MAAM,CAACmF,IAAI,CAACuB,QAAQ,CAAC;IACvC,CAAC,CACL,CAAC;IAED,IACI,IAAI,CAACrH,IAAI,IACT,IAAI,CAACA,IAAI,CAACsH,OAAO,IACjB,IAAI,CAACpH,IAAI,EACX;MACE,IAAI,CAACG,IAAI,CAACJ,IAAI,CACV,IAAI,CAACD,IAAI,CAACsH,OAAO,CAACP,SAAS,CAAC;QACxBjB,IAAI,EAAEf,EAAE,IAAI;UACR,IAAI,CAAC1D,aAAa,CAACyE,IAAI,CAACf,EAAE,CAAC;QAC/B,CAAC;QACDtE,KAAK,EAAEgF,GAAG,IAAI;UACV,IAAI,CAACnF,QAAQ,CAACG,KAAK,CAACqF,IAAI,CAACL,GAAG,CAAC;QACjC;MACJ,CAAC,CACL,CAAC;IACL;;IAEA;AACR;AACA;AACA;IACQ,IAAI,CAAC,IAAI,CAACvF,IAAI,EAAE;MACZ,MAAMxB,oCAAoC,CAAC,IAAI,CAACsF,wBAAwB,CAAC;MACzE,MAAMrF,+BAA+B,CAAC,IAAI,CAACqF,wBAAwB,CAAC;MACpE,MAAM,IAAI,CAAC9B,MAAM,CAAC,CAAC;IACvB;IACA,IAAI,CAACf,WAAW,CAAC,CAAC;EACtB,CAAC;EAAAyB,MAAA,CAEDG,SAAS,GAAT,SAAAA,UAAA,EAAqB;IACjB,IAAI,IAAI,CAACzC,QAAQ,CAACI,QAAQ,CAAC6G,QAAQ,CAAC,CAAC,EAAE;MACnC,OAAO,IAAI;IACf;IACA,OAAO,KAAK;EAChB,CAAC;EAAA3E,MAAA,CAEK4E,uBAAuB,GAA7B,eAAAA,wBAAA,EAA+C;IAC3C,MAAM,IAAI,CAAC/E,YAAY;IACvB,OAAO/D,oCAAoC,CACvCR,cAAc,CAAC,IAAI,CAAC8F,wBAAwB,CAChD,CAAC;EACL;;EAEA;AACJ;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,KATI;EAAApB,MAAA,CAUM6E,WAAW,GAAjB,eAAAA,YAAA,EAAmC;IAC/B,MAAM,IAAI,CAAChF,YAAY;IACvB,MAAM/D,oCAAoC,CAACR,cAAc,CAAC,IAAI,CAAC8F,wBAAwB,CAAC,CAAC;;IAEzF;AACR;AACA;AACA;AACA;AACA;AACA;AACA;AACA;IACQ,IAAI0D,CAAC,GAAG,CAAC;IACT,OAAOA,CAAC,GAAG,CAAC,EAAE;MACVA,CAAC,EAAE;;MAEH;AACZ;AACA;AACA;AACA;AACA;MACY,MAAM,IAAI,CAAC5H,UAAU,CAAC0B,QAAQ,CAACmG,kBAAkB,CAAC,CAAC;MACnD,MAAMhJ,+BAA+B,CAACT,cAAc,CAAC,IAAI,CAAC8F,wBAAwB,CAAC,CAAC;IACxF;IAEA,OAAO,IAAI;EACf,CAAC;EAAApB,MAAA,CAEDgF,MAAM,GAAN,SAAAA,OAAA,EAAS;IACL,IAAI,CAACvG,aAAa,CAACyE,IAAI,CAAC,QAAQ,CAAC;EACrC,CAAC;EAAAlD,MAAA,CACDiF,SAAS,GAAT,SAAAA,UAAU9C,EAA0D,EAAE;IAClE,IAAI,CAAC1D,aAAa,CAACyE,IAAI,CAACf,EAAE,CAAC;EAC/B,CAAC;EAAAnC,MAAA,CAEKV,MAAM,GAAZ,eAAAA,OAAA,EAA6B;IACzB,IAAI,IAAI,CAACa,SAAS,CAAC,CAAC,EAAE;MAClB,OAAOzE,qBAAqB;IAChC;IAEA,IAAMwJ,QAAwB,GAAG,IAAI,CAAC5G,QAAQ,CAACgE,GAAG,CAAC6C,EAAE,IAAItJ,SAAS,CAACsJ,EAAE,CAAC,CAAC,CAAC,CAAC;IAEzE,IAAI,IAAI,CAAC/D,wBAAwB,EAAE;MAC/B,MAAMpF,0BAA0B,CAAC,IAAI,CAACoF,wBAAwB,CAAC;IACnE;IACA,IAAI,IAAI,CAACZ,YAAY,EAAE;MACnB0E,QAAQ,CAAC7H,IAAI,CACT/B,cAAc,CAAC,IAAI,CAAC8F,wBAAwB,CAAC,CAACgE,eAAe,CACxDC,IAAI,CAAC,MAAM/J,cAAc,CAAC,IAAI,CAACkF,YAAY,CAAC,CAAC8E,KAAK,CAAC,CAAC,CAC7D,CAAC;IACL;IAEA,IAAI,CAAC7H,IAAI,CAACgC,OAAO,CAAC8F,GAAG,IAAIA,GAAG,CAACC,WAAW,CAAC,CAAC,CAAC;IAC3C,IAAI,CAAC9H,QAAQ,CAACI,QAAQ,CAACoF,IAAI,CAAC,IAAI,CAAC;IAEjC,IAAI,CAACxF,QAAQ,CAACK,MAAM,CAAC0H,QAAQ,CAAC,CAAC;IAC/B,IAAI,CAAC/H,QAAQ,CAACI,QAAQ,CAAC2H,QAAQ,CAAC,CAAC;IACjC,IAAI,CAAC/H,QAAQ,CAACG,KAAK,CAAC4H,QAAQ,CAAC,CAAC;IAC9B,IAAI,CAAC/H,QAAQ,CAACC,QAAQ,CAAC8H,QAAQ,CAAC,CAAC;IACjC,IAAI,CAAC/H,QAAQ,CAACE,IAAI,CAAC6H,QAAQ,CAAC,CAAC;IAE7B,OAAO3F,OAAO,CAACW,GAAG,CAACyE,QAAQ,CAAC;EAChC,CAAC;EAAAlF,MAAA,CAEK0F,MAAM,GAAZ,eAAAA,OAAA,EAAe;IACX,MAAMpK,cAAc,CAAC,IAAI,CAACkF,YAAY,CAAC,CAACkF,MAAM,CAAC,CAAC;IAChD,IAAMnF,QAAQ,GAAG,MAAM,IAAI,CAAC7B,eAAe;IAC3C,MAAM,IAAI,CAACY,MAAM,CAAC,CAAC;IACnB,MAAM7C,oCAAoC,CACtC,IAAI,CAACS,UAAU,EACfqD,QAAQ,CAACpB,cAAc,EACvBoB,QAAQ,CAACtB,MACb,CAAC;EACL,CAAC;EAAA,OAAAjC,kBAAA;AAAA;AAIL,OAAO,SAAS2I,qBAAqBA,CACjC;EACI1I,qBAAqB;EACrBC,UAAU;EACVC,YAAY,GAAG,UAAU;EACzBC,IAAI;EACJC,IAAI;EACJC,IAAI,GAAG,IAAI;EACXC,SAAS,GAAG,IAAI,GAAG,CAAC;EACpBqI,iBAAiB,GAAG,IAAI;EACxBpI,SAAS,GAAG;AAC+B,CAAC,EACH;EAC7Cd,WAAW,CAACtB,wBAAwB,CAAC;;EAErC;AACJ;AACA;AACA;AACA;EACI,IAAI,CAACgC,IAAI,IAAI,CAACC,IAAI,EAAE;IAChB,MAAMlB,UAAU,CAAC,KAAK,EAAE;MACpBe,UAAU,EAAEA,UAAU,CAAC4B,IAAI;MAC3BiF,IAAI,EAAE;QACF9G;MACJ;IACJ,CAAC,CAAC;EACN;EAEA,IAAM4I,gBAAgB,GAAG,IAAI7I,kBAAkB,CAC3CC,qBAAqB,EACrBC,UAAU,EACVC,YAAY,EACZC,IAAI,EACJC,IAAI,EACJC,IAAI,EACJC,SAAS,EACTC,SACJ,CAAC;EAGDsI,4BAA4B,CAACF,iBAAiB,EAAEC,gBAAgB,CAAC;EACjE,OAAOA,gBAAgB;AAC3B;AAGA,OAAO,SAASC,4BAA4BA,CACxCF,iBAA0B,EAC1BC,gBAA8C,EAChD;EACE;AACJ;AACA;AACA;EACI,IAAME,qBAAqB,GAAGH,iBAAiB,IAAIC,gBAAgB,CAAC3I,UAAU,CAAC0B,QAAQ,CAACmC,aAAa;EACrG,IAAMiF,WAAyB,GAAGD,qBAAqB,GAAGF,gBAAgB,CAAC3I,UAAU,CAAC0B,QAAQ,CAACgH,iBAAiB,CAAC,CAAC,GAAGjK,oBAAoB;EACzI,OAAOqK,WAAW,CAACX,IAAI,CAAC,MAAM;IAC1B,IAAIQ,gBAAgB,CAAC1F,SAAS,CAAC,CAAC,EAAE;MAC9B;IACJ;IACA,IAAI0F,gBAAgB,CAACrI,SAAS,EAAE;MAC5BqI,gBAAgB,CAAC3F,KAAK,CAAC,CAAC;IAC5B;EACJ,CAAC,CAAC;AACN","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/replication/replication-helper.js b/dist/esm/plugins/replication/replication-helper.js deleted file mode 100644 index 6130c1c8b91..00000000000 --- a/dist/esm/plugins/replication/replication-helper.js +++ /dev/null @@ -1,68 +0,0 @@ -import { flatClone } from "../../plugins/utils/index.js"; -import { getComposedPrimaryKeyOfDocumentData } from "../../rx-schema-helper.js"; - -// does nothing -export var DEFAULT_MODIFIER = d => Promise.resolve(d); -export function swapDefaultDeletedTodeletedField(deletedField, doc) { - if (deletedField === '_deleted') { - return doc; - } else { - doc = flatClone(doc); - var isDeleted = !!doc._deleted; - doc[deletedField] = isDeleted; - delete doc._deleted; - return doc; - } -} - -/** - * Must be run over all plain document data - * that was pulled from the remote. - * Used to fill up fields or modify the deleted field etc. - */ -export function handlePulledDocuments(collection, deletedField, docs) { - return docs.map(doc => { - var useDoc = flatClone(doc); - - /** - * Swap out the deleted field - */ - if (deletedField !== '_deleted') { - var isDeleted = !!useDoc[deletedField]; - useDoc._deleted = isDeleted; - delete useDoc[deletedField]; - } else { - // ensure we have a boolean. - useDoc._deleted = !!useDoc._deleted; - } - - /** - * Fill up composed primary - */ - var primaryPath = collection.schema.primaryPath; - useDoc[primaryPath] = getComposedPrimaryKeyOfDocumentData(collection.schema.jsonSchema, useDoc); - return useDoc; - }); -} - -/** - * Like normal promiseWait() - * but will skip the wait time if the online-state changes. - */ -export function awaitRetry(collection, retryTime) { - if (typeof window === 'undefined' || typeof window !== 'object' || typeof window.addEventListener === 'undefined' || navigator.onLine) { - return collection.promiseWait(retryTime); - } - var listener; - var onlineAgain = new Promise(res => { - listener = () => { - window.removeEventListener('online', listener); - res(); - }; - window.addEventListener('online', listener); - }); - return Promise.race([onlineAgain, collection.promiseWait(retryTime)]).then(() => { - window.removeEventListener('online', listener); - }); -} -//# sourceMappingURL=replication-helper.js.map \ No newline at end of file diff --git a/dist/esm/plugins/replication/replication-helper.js.map b/dist/esm/plugins/replication/replication-helper.js.map deleted file mode 100644 index e93415c6f0d..00000000000 --- a/dist/esm/plugins/replication/replication-helper.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"replication-helper.js","names":["flatClone","getComposedPrimaryKeyOfDocumentData","DEFAULT_MODIFIER","d","Promise","resolve","swapDefaultDeletedTodeletedField","deletedField","doc","isDeleted","_deleted","handlePulledDocuments","collection","docs","map","useDoc","primaryPath","schema","jsonSchema","awaitRetry","retryTime","window","addEventListener","navigator","onLine","promiseWait","listener","onlineAgain","res","removeEventListener","race","then"],"sources":["../../../../src/plugins/replication/replication-helper.ts"],"sourcesContent":["import type {\n RxCollection,\n WithDeleted\n} from '../../types/index.d.ts';\nimport { flatClone } from '../../plugins/utils/index.ts';\nimport { getComposedPrimaryKeyOfDocumentData } from '../../rx-schema-helper.ts';\n\n// does nothing\nexport const DEFAULT_MODIFIER = (d: any) => Promise.resolve(d);\n\n\nexport function swapDefaultDeletedTodeletedField(\n deletedField: string,\n doc: WithDeleted\n): RxDocType {\n if (deletedField === '_deleted') {\n return doc;\n } else {\n doc = flatClone(doc);\n const isDeleted = !!doc._deleted;\n (doc as any)[deletedField] = isDeleted;\n delete (doc as any)._deleted;\n return doc;\n }\n}\n\n/**\n * Must be run over all plain document data\n * that was pulled from the remote.\n * Used to fill up fields or modify the deleted field etc.\n */\nexport function handlePulledDocuments(\n collection: RxCollection,\n deletedField: string,\n docs: RxDocType[]\n): WithDeleted[] {\n return docs.map(doc => {\n const useDoc: WithDeleted = flatClone(doc) as any;\n\n /**\n * Swap out the deleted field\n */\n if (deletedField !== '_deleted') {\n const isDeleted = !!(useDoc as any)[deletedField];\n (useDoc as any)._deleted = isDeleted;\n delete (useDoc as any)[deletedField];\n } else {\n // ensure we have a boolean.\n useDoc._deleted = !!useDoc._deleted;\n }\n\n /**\n * Fill up composed primary\n */\n const primaryPath = collection.schema.primaryPath;\n (useDoc as any)[primaryPath] = getComposedPrimaryKeyOfDocumentData(\n collection.schema.jsonSchema,\n useDoc\n );\n return useDoc as any;\n });\n}\n\n\n/**\n * Like normal promiseWait()\n * but will skip the wait time if the online-state changes.\n */\nexport function awaitRetry(\n collection: RxCollection,\n retryTime: number\n) {\n if (\n typeof window === 'undefined' ||\n typeof window !== 'object' ||\n typeof window.addEventListener === 'undefined' ||\n navigator.onLine\n ) {\n return collection.promiseWait(retryTime);\n }\n\n let listener: any;\n const onlineAgain = new Promise(res => {\n listener = () => {\n window.removeEventListener('online', listener);\n res();\n };\n window.addEventListener('online', listener);\n });\n\n return Promise.race([\n onlineAgain,\n collection.promiseWait(retryTime)\n ]).then(() => {\n window.removeEventListener('online', listener);\n });\n}\n"],"mappings":"AAIA,SAASA,SAAS,QAAQ,8BAA8B;AACxD,SAASC,mCAAmC,QAAQ,2BAA2B;;AAE/E;AACA,OAAO,IAAMC,gBAAgB,GAAIC,CAAM,IAAKC,OAAO,CAACC,OAAO,CAACF,CAAC,CAAC;AAG9D,OAAO,SAASG,gCAAgCA,CAC5CC,YAAoB,EACpBC,GAA2B,EAClB;EACT,IAAID,YAAY,KAAK,UAAU,EAAE;IAC7B,OAAOC,GAAG;EACd,CAAC,MAAM;IACHA,GAAG,GAAGR,SAAS,CAACQ,GAAG,CAAC;IACpB,IAAMC,SAAS,GAAG,CAAC,CAACD,GAAG,CAACE,QAAQ;IAC/BF,GAAG,CAASD,YAAY,CAAC,GAAGE,SAAS;IACtC,OAAQD,GAAG,CAASE,QAAQ;IAC5B,OAAOF,GAAG;EACd;AACJ;;AAEA;AACA;AACA;AACA;AACA;AACA,OAAO,SAASG,qBAAqBA,CACjCC,UAAmC,EACnCL,YAAoB,EACpBM,IAAiB,EACO;EACxB,OAAOA,IAAI,CAACC,GAAG,CAACN,GAAG,IAAI;IACnB,IAAMO,MAA8B,GAAGf,SAAS,CAACQ,GAAG,CAAQ;;IAE5D;AACR;AACA;IACQ,IAAID,YAAY,KAAK,UAAU,EAAE;MAC7B,IAAME,SAAS,GAAG,CAAC,CAAEM,MAAM,CAASR,YAAY,CAAC;MAChDQ,MAAM,CAASL,QAAQ,GAAGD,SAAS;MACpC,OAAQM,MAAM,CAASR,YAAY,CAAC;IACxC,CAAC,MAAM;MACH;MACAQ,MAAM,CAACL,QAAQ,GAAG,CAAC,CAACK,MAAM,CAACL,QAAQ;IACvC;;IAEA;AACR;AACA;IACQ,IAAMM,WAAW,GAAGJ,UAAU,CAACK,MAAM,CAACD,WAAW;IAChDD,MAAM,CAASC,WAAW,CAAC,GAAGf,mCAAmC,CAC9DW,UAAU,CAACK,MAAM,CAACC,UAAU,EAC5BH,MACJ,CAAC;IACD,OAAOA,MAAM;EACjB,CAAC,CAAC;AACN;;AAGA;AACA;AACA;AACA;AACA,OAAO,SAASI,UAAUA,CACtBP,UAAwB,EACxBQ,SAAiB,EACnB;EACE,IACI,OAAOC,MAAM,KAAK,WAAW,IAC7B,OAAOA,MAAM,KAAK,QAAQ,IAC1B,OAAOA,MAAM,CAACC,gBAAgB,KAAK,WAAW,IAC9CC,SAAS,CAACC,MAAM,EAClB;IACE,OAAOZ,UAAU,CAACa,WAAW,CAACL,SAAS,CAAC;EAC5C;EAEA,IAAIM,QAAa;EACjB,IAAMC,WAAW,GAAG,IAAIvB,OAAO,CAAOwB,GAAG,IAAI;IACzCF,QAAQ,GAAGA,CAAA,KAAM;MACbL,MAAM,CAACQ,mBAAmB,CAAC,QAAQ,EAAEH,QAAQ,CAAC;MAC9CE,GAAG,CAAC,CAAC;IACT,CAAC;IACDP,MAAM,CAACC,gBAAgB,CAAC,QAAQ,EAAEI,QAAQ,CAAC;EAC/C,CAAC,CAAC;EAEF,OAAOtB,OAAO,CAAC0B,IAAI,CAAC,CAChBH,WAAW,EACXf,UAAU,CAACa,WAAW,CAACL,SAAS,CAAC,CACpC,CAAC,CAACW,IAAI,CAAC,MAAM;IACVV,MAAM,CAACQ,mBAAmB,CAAC,QAAQ,EAAEH,QAAQ,CAAC;EAClD,CAAC,CAAC;AACN","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/state/helpers.js b/dist/esm/plugins/state/helpers.js deleted file mode 100644 index 19622f86e9d..00000000000 --- a/dist/esm/plugins/state/helpers.js +++ /dev/null @@ -1,54 +0,0 @@ -export var RX_STATE_SCHEMA_TITLE = 'RxStateCollection'; -export var RX_STATE_ID_LENGTH = 14; -export var RX_STATE_COLLECTION_SCHEMA = { - title: RX_STATE_SCHEMA_TITLE, - primaryKey: 'id', - version: 0, - type: 'object', - properties: { - id: { - type: 'string', - /** - * We store numbers in string format like '0001' - * with a left-pad. - * TODO instead we should transform the number to a string - * with the same sort-position to improve performance. - */ - maxLength: RX_STATE_ID_LENGTH, - minLength: RX_STATE_ID_LENGTH, - pattern: '[0-9]+' - }, - sId: { - type: 'string', - maxLength: 10, - minLength: 10 - }, - ops: { - type: 'array', - minItems: 1, - items: { - type: 'object', - properties: { - k: { - type: 'string' - }, - v: { - type: 'object' - } - }, - required: ['key', 'value'] - } - } - }, - required: ['id', 'sId', 'ops'] -}; -export function nextRxStateId(lastId) { - if (!lastId) { - return ''.padStart(RX_STATE_ID_LENGTH, '0'); - } - var parsed = parseInt(lastId, 10); - var next = parsed + 1; - var nextString = next.toString(); - return nextString.padStart(RX_STATE_ID_LENGTH, '0'); -} -//# sourceMappingURL=helpers.js.map \ No newline at end of file diff --git a/dist/esm/plugins/state/helpers.js.map b/dist/esm/plugins/state/helpers.js.map deleted file mode 100644 index 2f048499275..00000000000 --- a/dist/esm/plugins/state/helpers.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"helpers.js","names":["RX_STATE_SCHEMA_TITLE","RX_STATE_ID_LENGTH","RX_STATE_COLLECTION_SCHEMA","title","primaryKey","version","type","properties","id","maxLength","minLength","pattern","sId","ops","minItems","items","k","v","required","nextRxStateId","lastId","padStart","parsed","parseInt","next","nextString","toString"],"sources":["../../../../src/plugins/state/helpers.ts"],"sourcesContent":["import type { DeepReadonly, RxJsonSchema } from '../../types';\nimport type { RxStateDocument } from './types';\n\nexport const RX_STATE_SCHEMA_TITLE = 'RxStateCollection';\nexport const RX_STATE_ID_LENGTH = 14;\nexport const RX_STATE_COLLECTION_SCHEMA: DeepReadonly> = {\n title: RX_STATE_SCHEMA_TITLE,\n primaryKey: 'id',\n version: 0,\n type: 'object',\n properties: {\n id: {\n type: 'string',\n /**\n * We store numbers in string format like '0001'\n * with a left-pad.\n * TODO instead we should transform the number to a string\n * with the same sort-position to improve performance.\n */\n maxLength: RX_STATE_ID_LENGTH,\n minLength: RX_STATE_ID_LENGTH,\n pattern: '[0-9]+'\n },\n sId: {\n type: 'string',\n maxLength: 10,\n minLength: 10\n },\n ops: {\n type: 'array',\n minItems: 1,\n items: {\n type: 'object',\n properties: {\n k: {\n type: 'string'\n },\n v: {\n type: 'object'\n }\n },\n required: [\n 'key',\n 'value'\n ]\n }\n }\n },\n required: [\n 'id',\n 'sId',\n 'ops'\n ]\n} as const;\n\n\nexport function nextRxStateId(lastId?: string): string {\n if (!lastId) {\n return ''.padStart(RX_STATE_ID_LENGTH, '0');\n }\n const parsed = parseInt(lastId, 10);\n const next = parsed + 1;\n const nextString = next.toString();\n return nextString.padStart(RX_STATE_ID_LENGTH, '0');\n}\n"],"mappings":"AAGA,OAAO,IAAMA,qBAAqB,GAAG,mBAAmB;AACxD,OAAO,IAAMC,kBAAkB,GAAG,EAAE;AACpC,OAAO,IAAMC,0BAAuE,GAAG;EACnFC,KAAK,EAAEH,qBAAqB;EAC5BI,UAAU,EAAE,IAAI;EAChBC,OAAO,EAAE,CAAC;EACVC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRC,EAAE,EAAE;MACAF,IAAI,EAAE,QAAQ;MACd;AACZ;AACA;AACA;AACA;AACA;MACYG,SAAS,EAAER,kBAAkB;MAC7BS,SAAS,EAAET,kBAAkB;MAC7BU,OAAO,EAAE;IACb,CAAC;IACDC,GAAG,EAAE;MACDN,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE,EAAE;MACbC,SAAS,EAAE;IACf,CAAC;IACDG,GAAG,EAAE;MACDP,IAAI,EAAE,OAAO;MACbQ,QAAQ,EAAE,CAAC;MACXC,KAAK,EAAE;QACHT,IAAI,EAAE,QAAQ;QACdC,UAAU,EAAE;UACRS,CAAC,EAAE;YACCV,IAAI,EAAE;UACV,CAAC;UACDW,CAAC,EAAE;YACCX,IAAI,EAAE;UACV;QACJ,CAAC;QACDY,QAAQ,EAAE,CACN,KAAK,EACL,OAAO;MAEf;IACJ;EACJ,CAAC;EACDA,QAAQ,EAAE,CACN,IAAI,EACJ,KAAK,EACL,KAAK;AAEb,CAAU;AAGV,OAAO,SAASC,aAAaA,CAACC,MAAe,EAAU;EACnD,IAAI,CAACA,MAAM,EAAE;IACT,OAAO,EAAE,CAACC,QAAQ,CAACpB,kBAAkB,EAAE,GAAG,CAAC;EAC/C;EACA,IAAMqB,MAAM,GAAGC,QAAQ,CAACH,MAAM,EAAE,EAAE,CAAC;EACnC,IAAMI,IAAI,GAAGF,MAAM,GAAG,CAAC;EACvB,IAAMG,UAAU,GAAGD,IAAI,CAACE,QAAQ,CAAC,CAAC;EAClC,OAAOD,UAAU,CAACJ,QAAQ,CAACpB,kBAAkB,EAAE,GAAG,CAAC;AACvD","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/state/index.js b/dist/esm/plugins/state/index.js deleted file mode 100644 index d4cf6d360c6..00000000000 --- a/dist/esm/plugins/state/index.js +++ /dev/null @@ -1,20 +0,0 @@ -import { getFromMapOrCreate } from "../utils/utils-map.js"; -import { createRxState } from "./rx-state.js"; -export * from "./helpers.js"; -var STATE_BY_DATABASE = new WeakMap(); -export async function addState(namespace = '') { - var stateCache = getFromMapOrCreate(STATE_BY_DATABASE, this, () => new Map()); - var state = await getFromMapOrCreate(stateCache, namespace, () => createRxState(this, namespace)); - this.states[namespace] = state; - return state; -} -export var RxDBStatePlugin = { - name: 'state', - rxdb: true, - prototypes: { - RxDatabase(proto) { - proto.addState = addState; - } - } -}; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/esm/plugins/state/index.js.map b/dist/esm/plugins/state/index.js.map deleted file mode 100644 index caae2ee0788..00000000000 --- a/dist/esm/plugins/state/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["getFromMapOrCreate","createRxState","STATE_BY_DATABASE","WeakMap","addState","namespace","stateCache","Map","state","states","RxDBStatePlugin","name","rxdb","prototypes","RxDatabase","proto"],"sources":["../../../../src/plugins/state/index.ts"],"sourcesContent":["import type {\n RxDatabase,\n RxPlugin,\n RxState\n} from '../../types/index.d.ts';\nimport { getFromMapOrCreate } from '../utils/utils-map.ts';\nimport { RxStateBase, createRxState } from './rx-state.ts';\n\nexport * from './helpers.ts';\n\ntype StateByPrefix = Map>>;\nconst STATE_BY_DATABASE = new WeakMap();\n\nexport async function addState(\n this: RxDatabase,\n namespace: string = ''\n): Promise> {\n const stateCache = getFromMapOrCreate(\n STATE_BY_DATABASE,\n this,\n () => new Map()\n );\n const state = await getFromMapOrCreate(\n stateCache,\n namespace,\n () => createRxState(this, namespace)\n );\n this.states[namespace] = state;\n return state as any;\n}\n\nexport const RxDBStatePlugin: RxPlugin = {\n name: 'state',\n rxdb: true,\n prototypes: {\n RxDatabase(proto: any) {\n proto.addState = addState;\n }\n }\n};\n"],"mappings":"AAKA,SAASA,kBAAkB,QAAQ,uBAAuB;AAC1D,SAAsBC,aAAa,QAAQ,eAAe;AAE1D,cAAc,cAAc;AAG5B,IAAMC,iBAAiB,GAAG,IAAIC,OAAO,CAA4B,CAAC;AAElE,OAAO,eAAeC,QAAQA,CAE1BC,SAAiB,GAAG,EAAE,EACH;EACnB,IAAMC,UAAU,GAAGN,kBAAkB,CACjCE,iBAAiB,EACjB,IAAI,EACJ,MAAM,IAAIK,GAAG,CAAC,CAClB,CAAC;EACD,IAAMC,KAAK,GAAG,MAAMR,kBAAkB,CAClCM,UAAU,EACVD,SAAS,EACT,MAAMJ,aAAa,CAAI,IAAI,EAAEI,SAAS,CAC1C,CAAC;EACD,IAAI,CAACI,MAAM,CAACJ,SAAS,CAAC,GAAGG,KAAK;EAC9B,OAAOA,KAAK;AAChB;AAEA,OAAO,IAAME,eAAyB,GAAG;EACrCC,IAAI,EAAE,OAAO;EACbC,IAAI,EAAE,IAAI;EACVC,UAAU,EAAE;IACRC,UAAUA,CAACC,KAAU,EAAE;MACnBA,KAAK,CAACX,QAAQ,GAAGA,QAAQ;IAC7B;EACJ;AACJ,CAAC","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/state/rx-state.js b/dist/esm/plugins/state/rx-state.js deleted file mode 100644 index e86c086dd26..00000000000 --- a/dist/esm/plugins/state/rx-state.js +++ /dev/null @@ -1,240 +0,0 @@ -import { Subject, distinctUntilChanged, map, merge, shareReplay, startWith, tap } from 'rxjs'; -import { overwritable } from "../../overwritable.js"; -import { getChangedDocumentsSince } from "../../rx-storage-helper.js"; -import { RXJS_SHARE_REPLAY_DEFAULTS, getProperty, setProperty, PROMISE_RESOLVE_VOID, appendToArray, clone, randomCouchString, deepEqual } from "../utils/index.js"; -import { RX_STATE_COLLECTION_SCHEMA, nextRxStateId } from "./helpers.js"; -import { newRxError } from "../../rx-error.js"; -import { runPluginHooks } from "../../hooks.js"; -var debugId = 0; - -/** - * RxDB internally used properties are - * prefixed with lodash _ to make them less - * likely to clash with actual state properties - * from the user. - */ -export var RxStateBase = /*#__PURE__*/function () { - // used for debugging - - function RxStateBase(prefix, collection) { - this._id = debugId++; - this._state = {}; - this._nonPersisted = []; - this._writeQueue = PROMISE_RESOLVE_VOID; - this._initDone = false; - this._instanceId = randomCouchString(RX_STATE_COLLECTION_SCHEMA.properties.sId.maxLength); - this._ownEmits$ = new Subject(); - this.prefix = prefix; - this.collection = collection; - this.collection.onDestroy.push(() => this._writeQueue); - this._lastIdQuery = this.collection.findOne({ - sort: [{ - id: 'desc' - }] - }); - // make it "hot" for better write performance - this._lastIdQuery.$.subscribe(); - this.$ = merge(this._ownEmits$, this.collection.$.pipe(tap(event => { - if (this._initDone && event.operation === 'INSERT' && event.documentData.sId !== this._instanceId) { - mergeOperationsIntoState(this._state, event.documentData.ops); - } - }))).pipe(shareReplay(RXJS_SHARE_REPLAY_DEFAULTS), map(() => this._state)); - // directly subscribe because of the tap() side effect - this.$.subscribe(); - } - var _proto = RxStateBase.prototype; - _proto.set = async function set(path, modifier) { - this._nonPersisted.push({ - path, - modifier - }); - return this._triggerWrite(); - } - - /** - * To have deterministic writes, - * and to ensure that multiple js realms do not overwrite - * each other, the write happens with incremental ids - * that would throw conflict errors and trigger a retry. - */; - _proto._triggerWrite = function _triggerWrite() { - this._writeQueue = this._writeQueue.then(async () => { - if (this._nonPersisted.length === 0) { - return; - } - var useWrites = []; - var done = false; - while (!done) { - var lastIdDoc = await this._lastIdQuery.exec(); - appendToArray(useWrites, this._nonPersisted); - this._nonPersisted = []; - var nextId = nextRxStateId(lastIdDoc ? lastIdDoc.id : undefined); - try { - /** - * TODO instead of a deep-clone we should - * only clone the parts where we know that they - * will be changed. This would improve performance. - */ - var newState = clone(this._state); - var ops = []; - for (var index = 0; index < useWrites.length; index++) { - var writeRow = useWrites[index]; - var value = getProperty(newState, writeRow.path); - var newValue = writeRow.modifier(value); - setProperty(newState, writeRow.path, newValue); - ops.push({ - k: writeRow.path, - /** - * Here we have to clone the value because - * some storages like the memory storage - * make input data deep-frozen in dev-mode. - */ - v: clone(newValue) - }); - } - await this.collection.insert({ - id: nextId, - sId: this._instanceId, - ops - }); - this._state = newState; - this._ownEmits$.next(this._state); - done = true; - } catch (err) { - if (err.code !== 'CONFLICT') { - throw err; - } - } - } - }).catch(error => { - throw newRxError('SNH', { - name: 'RxState WRITE QUEUE ERROR', - error - }); - }); - return this._writeQueue; - }; - _proto.get = function get(path) { - if (!path) { - return overwritable.deepFreezeWhenDevMode(this._state); - } - return overwritable.deepFreezeWhenDevMode(getProperty(this._state, path)); - }; - _proto.get$ = function get$(path) { - return this.$.pipe(map(() => this.get(path)), startWith(this.get(path)), distinctUntilChanged(deepEqual), shareReplay(RXJS_SHARE_REPLAY_DEFAULTS)); - }; - _proto.get$$ = function get$$(path) { - var obs = this.get$(path); - var reactivity = this.collection.database.getReactivityFactory(); - return reactivity.fromObservable(obs, this.get(path), this.collection.database); - } - - /** - * Merges the state operations into a single write row - * to store space and make recreating the state from - * disc faster. - */; - _proto._cleanup = async function _cleanup() { - var firstWrite = await this.collection.findOne({ - sort: [{ - id: 'asc' - }] - }).exec(); - var lastWrite = await this._lastIdQuery.exec(); - if (!firstWrite || !lastWrite) { - return; - } - var firstNr = parseInt(firstWrite.id, 10); - var lastNr = parseInt(lastWrite.id, 10); - if (lastNr - 5 < firstNr) { - // only run if more then 5 write rows - return; - } - - // update whole state object - await this._writeQueue; - await this.set('', () => this._state); - - // delete old ones - await this.collection.find({ - selector: { - id: { - $lte: lastWrite.id - } - } - }).remove(); - }; - return RxStateBase; -}(); -export async function createRxState(database, prefix) { - var collectionName = 'rx-state-' + prefix; - await database.addCollections({ - [collectionName]: { - schema: RX_STATE_COLLECTION_SCHEMA - } - }); - var collection = database.collections[collectionName]; - var rxState = new RxStateBase(prefix, collection); - - /** - * Directly get the state and put it into memory. - * This ensures we can do non-async accesses to the - * correct state. - */ - var done = false; - var checkpoint = undefined; - while (!done) { - var result = await getChangedDocumentsSince(collection.storageInstance, 1000, checkpoint); - checkpoint = result.checkpoint; - var documents = result.documents; - if (documents.length === 0) { - done = true; - } else { - for (var index = 0; index < documents.length; index++) { - var document = documents[index]; - mergeOperationsIntoState(rxState._state, document.ops); - } - } - } - rxState._initDone = true; - var proxy = new Proxy(rxState, { - get(target, property) { - if (typeof property !== 'string') { - return target[property]; - } - if (rxState[property]) { - var ret = rxState[property]; - if (typeof ret === 'function') { - return ret.bind(rxState); - } else { - return ret; - } - } - var lastChar = property.charAt(property.length - 1); - if (property.endsWith('$$')) { - var key = property.slice(0, -2); - return rxState.get$$(key); - } else if (lastChar === '$') { - var _key = property.slice(0, -1); - return rxState.get$(_key); - } else { - return rxState.get(property); - } - }, - set(target, newValue, receiver) { - throw new Error('Do not write to RxState'); - } - }); - runPluginHooks('createRxState', { - collection, - state: proxy - }); - return proxy; -} -export function mergeOperationsIntoState(state, operations) { - for (var index = 0; index < operations.length; index++) { - var operation = operations[index]; - setProperty(state, operation.k, clone(operation.v)); - } -} -//# sourceMappingURL=rx-state.js.map \ No newline at end of file diff --git a/dist/esm/plugins/state/rx-state.js.map b/dist/esm/plugins/state/rx-state.js.map deleted file mode 100644 index 0b83d46f520..00000000000 --- a/dist/esm/plugins/state/rx-state.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-state.js","names":["Subject","distinctUntilChanged","map","merge","shareReplay","startWith","tap","overwritable","getChangedDocumentsSince","RXJS_SHARE_REPLAY_DEFAULTS","getProperty","setProperty","PROMISE_RESOLVE_VOID","appendToArray","clone","randomCouchString","deepEqual","RX_STATE_COLLECTION_SCHEMA","nextRxStateId","newRxError","runPluginHooks","debugId","RxStateBase","prefix","collection","_id","_state","_nonPersisted","_writeQueue","_initDone","_instanceId","properties","sId","maxLength","_ownEmits$","onDestroy","push","_lastIdQuery","findOne","sort","id","$","subscribe","pipe","event","operation","documentData","mergeOperationsIntoState","ops","_proto","prototype","set","path","modifier","_triggerWrite","then","length","useWrites","done","lastIdDoc","exec","nextId","undefined","newState","index","writeRow","value","newValue","k","v","insert","next","err","code","catch","error","name","get","deepFreezeWhenDevMode","get$","get$$","obs","reactivity","database","getReactivityFactory","fromObservable","_cleanup","firstWrite","lastWrite","firstNr","parseInt","lastNr","find","selector","$lte","remove","createRxState","collectionName","addCollections","schema","collections","rxState","checkpoint","result","storageInstance","documents","document","proxy","Proxy","target","property","ret","bind","lastChar","charAt","endsWith","key","slice","receiver","Error","state","operations"],"sources":["../../../../src/plugins/state/rx-state.ts"],"sourcesContent":["import {\n Observable,\n Subject,\n distinctUntilChanged,\n map,\n merge,\n shareReplay,\n startWith,\n tap\n} from 'rxjs';\nimport { overwritable } from '../../overwritable.ts';\nimport { getChangedDocumentsSince } from '../../rx-storage-helper.ts';\nimport type {\n RxCollection,\n RxDatabase,\n RxQuery,\n RxDocument,\n RxError,\n Paths\n} from '../../types';\nimport {\n RXJS_SHARE_REPLAY_DEFAULTS,\n getProperty,\n setProperty,\n PROMISE_RESOLVE_VOID,\n appendToArray,\n clone,\n randomCouchString,\n deepEqual\n} from '../utils/index.ts';\nimport {\n RX_STATE_COLLECTION_SCHEMA,\n nextRxStateId\n} from './helpers.ts';\nimport {\n RxStateDocument,\n RxStateOperation,\n RxStateModifier\n} from './types.ts';\nimport { newRxError } from '../../rx-error.ts';\nimport { runPluginHooks } from '../../hooks.ts';\n\n\nlet debugId = 0;\n\n\n/**\n * RxDB internally used properties are\n * prefixed with lodash _ to make them less\n * likely to clash with actual state properties\n * from the user.\n */\nexport class RxStateBase {\n // used for debugging\n public _id: number = debugId++;\n public _state: T | any = {};\n public $: Observable;\n public _lastIdQuery: RxQuery | null>;\n public _nonPersisted: {\n path: string;\n modifier: RxStateModifier;\n }[] = [];\n public _writeQueue = PROMISE_RESOLVE_VOID;\n public _initDone = false;\n public _instanceId = randomCouchString(RX_STATE_COLLECTION_SCHEMA.properties.sId.maxLength);\n public _ownEmits$ = new Subject();\n\n constructor(\n public readonly prefix: string,\n public readonly collection: RxCollection\n ) {\n this.collection.onDestroy.push(() => this._writeQueue);\n this._lastIdQuery = this.collection.findOne({\n sort: [\n { id: 'desc' }\n ]\n });\n // make it \"hot\" for better write performance\n this._lastIdQuery.$.subscribe();\n\n this.$ = merge(\n this._ownEmits$,\n this.collection.$.pipe(\n tap(event => {\n if (\n this._initDone &&\n event.operation === 'INSERT' &&\n event.documentData.sId !== this._instanceId\n ) {\n mergeOperationsIntoState(this._state, event.documentData.ops);\n }\n })\n )\n ).pipe(\n shareReplay(RXJS_SHARE_REPLAY_DEFAULTS),\n map(() => this._state)\n );\n // directly subscribe because of the tap() side effect\n this.$.subscribe();\n }\n\n async set(\n path: Paths | '',\n modifier: RxStateModifier\n ) {\n this._nonPersisted.push({\n path,\n modifier\n });\n return this._triggerWrite();\n }\n\n /**\n * To have deterministic writes,\n * and to ensure that multiple js realms do not overwrite\n * each other, the write happens with incremental ids\n * that would throw conflict errors and trigger a retry.\n */\n _triggerWrite() {\n this._writeQueue = this._writeQueue.then(async () => {\n if (this._nonPersisted.length === 0) {\n return;\n }\n let useWrites: typeof this._nonPersisted = [];\n let done = false;\n while (!done) {\n const lastIdDoc = await this._lastIdQuery.exec();\n appendToArray(useWrites, this._nonPersisted);\n this._nonPersisted = [];\n const nextId = nextRxStateId(lastIdDoc ? lastIdDoc.id : undefined);\n try {\n /**\n * TODO instead of a deep-clone we should\n * only clone the parts where we know that they\n * will be changed. This would improve performance.\n */\n const newState = clone(this._state);\n const ops: RxStateOperation[] = [];\n for (let index = 0; index < useWrites.length; index++) {\n const writeRow = useWrites[index];\n const value = getProperty(newState, writeRow.path);\n const newValue = writeRow.modifier(value);\n setProperty(newState, writeRow.path, newValue);\n ops.push({\n k: writeRow.path,\n /**\n * Here we have to clone the value because\n * some storages like the memory storage\n * make input data deep-frozen in dev-mode.\n */\n v: clone(newValue)\n });\n }\n await this.collection.insert({\n id: nextId,\n sId: this._instanceId,\n ops\n });\n this._state = newState;\n this._ownEmits$.next(this._state);\n done = true;\n } catch (err) {\n if ((err as RxError).code !== 'CONFLICT') {\n throw err;\n }\n }\n }\n }).catch(error => {\n throw newRxError('SNH', {\n name: 'RxState WRITE QUEUE ERROR',\n error\n });\n });\n return this._writeQueue;\n }\n\n get(path?: Paths) {\n if (!path) {\n return overwritable.deepFreezeWhenDevMode(this._state);\n }\n return overwritable.deepFreezeWhenDevMode(\n getProperty(this._state, path)\n );\n }\n get$(path?: Paths): Observable {\n return this.$.pipe(\n map(() => this.get(path)),\n startWith(this.get(path)),\n distinctUntilChanged(deepEqual),\n shareReplay(RXJS_SHARE_REPLAY_DEFAULTS),\n );\n }\n get$$(path?: Paths): Reactivity {\n const obs = this.get$(path);\n const reactivity = this.collection.database.getReactivityFactory();\n return reactivity.fromObservable(\n obs,\n this.get(path),\n this.collection.database\n );\n }\n\n /**\n * Merges the state operations into a single write row\n * to store space and make recreating the state from\n * disc faster.\n */\n async _cleanup() {\n const firstWrite = await this.collection.findOne({\n sort: [{ id: 'asc' }]\n }).exec();\n const lastWrite = await this._lastIdQuery.exec();\n\n if (!firstWrite || !lastWrite) {\n return;\n }\n\n const firstNr = parseInt(firstWrite.id, 10);\n const lastNr = parseInt(lastWrite.id, 10);\n if ((lastNr - 5) < firstNr) {\n // only run if more then 5 write rows\n return;\n }\n\n // update whole state object\n await this._writeQueue;\n await this.set('', () => this._state);\n\n // delete old ones\n await this.collection.find({\n selector: {\n id: {\n $lte: lastWrite.id\n }\n }\n }).remove();\n }\n}\n\n\nexport async function createRxState(\n database: RxDatabase,\n prefix: string\n): Promise> {\n const collectionName = 'rx-state-' + prefix;\n await database.addCollections({\n [collectionName]: {\n schema: RX_STATE_COLLECTION_SCHEMA as any\n }\n });\n const collection: RxCollection = database.collections[collectionName];\n\n const rxState = new RxStateBase(\n prefix,\n collection\n );\n\n\n /**\n * Directly get the state and put it into memory.\n * This ensures we can do non-async accesses to the\n * correct state.\n */\n let done = false;\n let checkpoint: any = undefined;\n while (!done) {\n const result = await getChangedDocumentsSince(\n collection.storageInstance,\n 1000,\n checkpoint\n );\n checkpoint = result.checkpoint;\n const documents = result.documents;\n if (documents.length === 0) {\n done = true;\n } else {\n for (let index = 0; index < documents.length; index++) {\n const document = documents[index];\n mergeOperationsIntoState(rxState._state, document.ops);\n }\n }\n }\n rxState._initDone = true;\n\n const proxy = new Proxy(\n rxState as any,\n {\n get(target, property: any) {\n if (typeof property !== 'string') {\n return target[property];\n }\n if ((rxState as any)[property]) {\n const ret = (rxState as any)[property];\n if (typeof ret === 'function') {\n return ret.bind(rxState);\n } else {\n return ret;\n }\n }\n const lastChar = property.charAt(property.length - 1);\n if (property.endsWith('$$')) {\n const key = property.slice(0, -2);\n return rxState.get$$(key as any);\n } else if (lastChar === '$') {\n const key = property.slice(0, -1);\n return rxState.get$(key as any);\n } else {\n return rxState.get(property as any);\n }\n },\n set(target, newValue, receiver) {\n throw new Error('Do not write to RxState');\n }\n }\n );\n\n runPluginHooks('createRxState', {\n collection,\n state: proxy\n });\n\n return proxy;\n}\n\n\nexport function mergeOperationsIntoState(\n state: T,\n operations: RxStateOperation[]\n) {\n for (let index = 0; index < operations.length; index++) {\n const operation = operations[index];\n setProperty(state, operation.k, clone(operation.v));\n }\n}\n"],"mappings":"AAAA,SAEIA,OAAO,EACPC,oBAAoB,EACpBC,GAAG,EACHC,KAAK,EACLC,WAAW,EACXC,SAAS,EACTC,GAAG,QACA,MAAM;AACb,SAASC,YAAY,QAAQ,uBAAuB;AACpD,SAASC,wBAAwB,QAAQ,4BAA4B;AASrE,SACIC,0BAA0B,EAC1BC,WAAW,EACXC,WAAW,EACXC,oBAAoB,EACpBC,aAAa,EACbC,KAAK,EACLC,iBAAiB,EACjBC,SAAS,QACN,mBAAmB;AAC1B,SACIC,0BAA0B,EAC1BC,aAAa,QACV,cAAc;AAMrB,SAASC,UAAU,QAAQ,mBAAmB;AAC9C,SAASC,cAAc,QAAQ,gBAAgB;AAG/C,IAAIC,OAAO,GAAG,CAAC;;AAGf;AACA;AACA;AACA;AACA;AACA;AACA,WAAaC,WAAW;EACpB;;EAcA,SAAAA,YACoBC,MAAc,EACdC,UAAyC,EAC3D;IAAA,KAhBKC,GAAG,GAAWJ,OAAO,EAAE;IAAA,KACvBK,MAAM,GAAY,CAAC,CAAC;IAAA,KAGpBC,aAAa,GAGd,EAAE;IAAA,KACDC,WAAW,GAAGhB,oBAAoB;IAAA,KAClCiB,SAAS,GAAG,KAAK;IAAA,KACjBC,WAAW,GAAGf,iBAAiB,CAACE,0BAA0B,CAACc,UAAU,CAACC,GAAG,CAACC,SAAS,CAAC;IAAA,KACpFC,UAAU,GAAG,IAAIlC,OAAO,CAAI,CAAC;IAAA,KAGhBuB,MAAc,GAAdA,MAAc;IAAA,KACdC,UAAyC,GAAzCA,UAAyC;IAEzD,IAAI,CAACA,UAAU,CAACW,SAAS,CAACC,IAAI,CAAC,MAAM,IAAI,CAACR,WAAW,CAAC;IACtD,IAAI,CAACS,YAAY,GAAG,IAAI,CAACb,UAAU,CAACc,OAAO,CAAC;MACxCC,IAAI,EAAE,CACF;QAAEC,EAAE,EAAE;MAAO,CAAC;IAEtB,CAAC,CAAC;IACF;IACA,IAAI,CAACH,YAAY,CAACI,CAAC,CAACC,SAAS,CAAC,CAAC;IAE/B,IAAI,CAACD,CAAC,GAAGtC,KAAK,CACV,IAAI,CAAC+B,UAAU,EACf,IAAI,CAACV,UAAU,CAACiB,CAAC,CAACE,IAAI,CAClBrC,GAAG,CAACsC,KAAK,IAAI;MACT,IACI,IAAI,CAACf,SAAS,IACde,KAAK,CAACC,SAAS,KAAK,QAAQ,IAC5BD,KAAK,CAACE,YAAY,CAACd,GAAG,KAAK,IAAI,CAACF,WAAW,EAC7C;QACEiB,wBAAwB,CAAC,IAAI,CAACrB,MAAM,EAAEkB,KAAK,CAACE,YAAY,CAACE,GAAG,CAAC;MACjE;IACJ,CAAC,CACL,CACJ,CAAC,CAACL,IAAI,CACFvC,WAAW,CAACK,0BAA0B,CAAC,EACvCP,GAAG,CAAC,MAAM,IAAI,CAACwB,MAAM,CACzB,CAAC;IACD;IACA,IAAI,CAACe,CAAC,CAACC,SAAS,CAAC,CAAC;EACtB;EAAC,IAAAO,MAAA,GAAA3B,WAAA,CAAA4B,SAAA;EAAAD,MAAA,CAEKE,GAAG,GAAT,eAAAA,IACIC,IAAmB,EACnBC,QAAyB,EAC3B;IACE,IAAI,CAAC1B,aAAa,CAACS,IAAI,CAAC;MACpBgB,IAAI;MACJC;IACJ,CAAC,CAAC;IACF,OAAO,IAAI,CAACC,aAAa,CAAC,CAAC;EAC/B;;EAEA;AACJ;AACA;AACA;AACA;AACA,KALI;EAAAL,MAAA,CAMAK,aAAa,GAAb,SAAAA,cAAA,EAAgB;IACZ,IAAI,CAAC1B,WAAW,GAAG,IAAI,CAACA,WAAW,CAAC2B,IAAI,CAAC,YAAY;MACjD,IAAI,IAAI,CAAC5B,aAAa,CAAC6B,MAAM,KAAK,CAAC,EAAE;QACjC;MACJ;MACA,IAAIC,SAAoC,GAAG,EAAE;MAC7C,IAAIC,IAAI,GAAG,KAAK;MAChB,OAAO,CAACA,IAAI,EAAE;QACV,IAAMC,SAAS,GAAG,MAAM,IAAI,CAACtB,YAAY,CAACuB,IAAI,CAAC,CAAC;QAChD/C,aAAa,CAAC4C,SAAS,EAAE,IAAI,CAAC9B,aAAa,CAAC;QAC5C,IAAI,CAACA,aAAa,GAAG,EAAE;QACvB,IAAMkC,MAAM,GAAG3C,aAAa,CAACyC,SAAS,GAAGA,SAAS,CAACnB,EAAE,GAAGsB,SAAS,CAAC;QAClE,IAAI;UACA;AACpB;AACA;AACA;AACA;UACoB,IAAMC,QAAQ,GAAGjD,KAAK,CAAC,IAAI,CAACY,MAAM,CAAC;UACnC,IAAMsB,GAAuB,GAAG,EAAE;UAClC,KAAK,IAAIgB,KAAK,GAAG,CAAC,EAAEA,KAAK,GAAGP,SAAS,CAACD,MAAM,EAAEQ,KAAK,EAAE,EAAE;YACnD,IAAMC,QAAQ,GAAGR,SAAS,CAACO,KAAK,CAAC;YACjC,IAAME,KAAK,GAAGxD,WAAW,CAACqD,QAAQ,EAAEE,QAAQ,CAACb,IAAI,CAAC;YAClD,IAAMe,QAAQ,GAAGF,QAAQ,CAACZ,QAAQ,CAACa,KAAK,CAAC;YACzCvD,WAAW,CAACoD,QAAQ,EAAEE,QAAQ,CAACb,IAAI,EAAEe,QAAQ,CAAC;YAC9CnB,GAAG,CAACZ,IAAI,CAAC;cACLgC,CAAC,EAAEH,QAAQ,CAACb,IAAI;cAChB;AAC5B;AACA;AACA;AACA;cAC4BiB,CAAC,EAAEvD,KAAK,CAACqD,QAAQ;YACrB,CAAC,CAAC;UACN;UACA,MAAM,IAAI,CAAC3C,UAAU,CAAC8C,MAAM,CAAC;YACzB9B,EAAE,EAAEqB,MAAM;YACV7B,GAAG,EAAE,IAAI,CAACF,WAAW;YACrBkB;UACJ,CAAC,CAAC;UACF,IAAI,CAACtB,MAAM,GAAGqC,QAAQ;UACtB,IAAI,CAAC7B,UAAU,CAACqC,IAAI,CAAC,IAAI,CAAC7C,MAAM,CAAC;UACjCgC,IAAI,GAAG,IAAI;QACf,CAAC,CAAC,OAAOc,GAAG,EAAE;UACV,IAAKA,GAAG,CAAaC,IAAI,KAAK,UAAU,EAAE;YACtC,MAAMD,GAAG;UACb;QACJ;MACJ;IACJ,CAAC,CAAC,CAACE,KAAK,CAACC,KAAK,IAAI;MACd,MAAMxD,UAAU,CAAC,KAAK,EAAE;QACpByD,IAAI,EAAE,2BAA2B;QACjCD;MACJ,CAAC,CAAC;IACN,CAAC,CAAC;IACF,OAAO,IAAI,CAAC/C,WAAW;EAC3B,CAAC;EAAAqB,MAAA,CAED4B,GAAG,GAAH,SAAAA,IAAIzB,IAAe,EAAE;IACjB,IAAI,CAACA,IAAI,EAAE;MACP,OAAO7C,YAAY,CAACuE,qBAAqB,CAAC,IAAI,CAACpD,MAAM,CAAC;IAC1D;IACA,OAAOnB,YAAY,CAACuE,qBAAqB,CACrCpE,WAAW,CAAC,IAAI,CAACgB,MAAM,EAAE0B,IAAI,CACjC,CAAC;EACL,CAAC;EAAAH,MAAA,CACD8B,IAAI,GAAJ,SAAAA,KAAK3B,IAAe,EAAmB;IACnC,OAAO,IAAI,CAACX,CAAC,CAACE,IAAI,CACdzC,GAAG,CAAC,MAAM,IAAI,CAAC2E,GAAG,CAACzB,IAAI,CAAC,CAAC,EACzB/C,SAAS,CAAC,IAAI,CAACwE,GAAG,CAACzB,IAAI,CAAC,CAAC,EACzBnD,oBAAoB,CAACe,SAAS,CAAC,EAC/BZ,WAAW,CAACK,0BAA0B,CAC1C,CAAC;EACL,CAAC;EAAAwC,MAAA,CACD+B,KAAK,GAAL,SAAAA,MAAM5B,IAAe,EAAc;IAC/B,IAAM6B,GAAG,GAAG,IAAI,CAACF,IAAI,CAAC3B,IAAI,CAAC;IAC3B,IAAM8B,UAAU,GAAG,IAAI,CAAC1D,UAAU,CAAC2D,QAAQ,CAACC,oBAAoB,CAAC,CAAC;IAClE,OAAOF,UAAU,CAACG,cAAc,CAC5BJ,GAAG,EACH,IAAI,CAACJ,GAAG,CAACzB,IAAI,CAAC,EACd,IAAI,CAAC5B,UAAU,CAAC2D,QACpB,CAAC;EACL;;EAEA;AACJ;AACA;AACA;AACA,KAJI;EAAAlC,MAAA,CAKMqC,QAAQ,GAAd,eAAAA,SAAA,EAAiB;IACb,IAAMC,UAAU,GAAG,MAAM,IAAI,CAAC/D,UAAU,CAACc,OAAO,CAAC;MAC7CC,IAAI,EAAE,CAAC;QAAEC,EAAE,EAAE;MAAM,CAAC;IACxB,CAAC,CAAC,CAACoB,IAAI,CAAC,CAAC;IACT,IAAM4B,SAAS,GAAG,MAAM,IAAI,CAACnD,YAAY,CAACuB,IAAI,CAAC,CAAC;IAEhD,IAAI,CAAC2B,UAAU,IAAI,CAACC,SAAS,EAAE;MAC3B;IACJ;IAEA,IAAMC,OAAO,GAAGC,QAAQ,CAACH,UAAU,CAAC/C,EAAE,EAAE,EAAE,CAAC;IAC3C,IAAMmD,MAAM,GAAGD,QAAQ,CAACF,SAAS,CAAChD,EAAE,EAAE,EAAE,CAAC;IACzC,IAAKmD,MAAM,GAAG,CAAC,GAAIF,OAAO,EAAE;MACxB;MACA;IACJ;;IAEA;IACA,MAAM,IAAI,CAAC7D,WAAW;IACtB,MAAM,IAAI,CAACuB,GAAG,CAAC,EAAE,EAAE,MAAM,IAAI,CAACzB,MAAM,CAAC;;IAErC;IACA,MAAM,IAAI,CAACF,UAAU,CAACoE,IAAI,CAAC;MACvBC,QAAQ,EAAE;QACNrD,EAAE,EAAE;UACAsD,IAAI,EAAEN,SAAS,CAAChD;QACpB;MACJ;IACJ,CAAC,CAAC,CAACuD,MAAM,CAAC,CAAC;EACf,CAAC;EAAA,OAAAzE,WAAA;AAAA;AAIL,OAAO,eAAe0E,aAAaA,CAC/Bb,QAAoB,EACpB5D,MAAc,EACS;EACvB,IAAM0E,cAAc,GAAG,WAAW,GAAG1E,MAAM;EAC3C,MAAM4D,QAAQ,CAACe,cAAc,CAAC;IAC1B,CAACD,cAAc,GAAG;MACdE,MAAM,EAAElF;IACZ;EACJ,CAAC,CAAC;EACF,IAAMO,UAAyC,GAAG2D,QAAQ,CAACiB,WAAW,CAACH,cAAc,CAAC;EAEtF,IAAMI,OAAO,GAAG,IAAI/E,WAAW,CAC3BC,MAAM,EACNC,UACJ,CAAC;;EAGD;AACJ;AACA;AACA;AACA;EACI,IAAIkC,IAAI,GAAG,KAAK;EAChB,IAAI4C,UAAe,GAAGxC,SAAS;EAC/B,OAAO,CAACJ,IAAI,EAAE;IACV,IAAM6C,MAAM,GAAG,MAAM/F,wBAAwB,CACzCgB,UAAU,CAACgF,eAAe,EAC1B,IAAI,EACJF,UACJ,CAAC;IACDA,UAAU,GAAGC,MAAM,CAACD,UAAU;IAC9B,IAAMG,SAAS,GAAGF,MAAM,CAACE,SAAS;IAClC,IAAIA,SAAS,CAACjD,MAAM,KAAK,CAAC,EAAE;MACxBE,IAAI,GAAG,IAAI;IACf,CAAC,MAAM;MACH,KAAK,IAAIM,KAAK,GAAG,CAAC,EAAEA,KAAK,GAAGyC,SAAS,CAACjD,MAAM,EAAEQ,KAAK,EAAE,EAAE;QACnD,IAAM0C,QAAQ,GAAGD,SAAS,CAACzC,KAAK,CAAC;QACjCjB,wBAAwB,CAACsD,OAAO,CAAC3E,MAAM,EAAEgF,QAAQ,CAAC1D,GAAG,CAAC;MAC1D;IACJ;EACJ;EACAqD,OAAO,CAACxE,SAAS,GAAG,IAAI;EAExB,IAAM8E,KAAK,GAAG,IAAIC,KAAK,CACnBP,OAAO,EACP;IACIxB,GAAGA,CAACgC,MAAM,EAAEC,QAAa,EAAE;MACvB,IAAI,OAAOA,QAAQ,KAAK,QAAQ,EAAE;QAC9B,OAAOD,MAAM,CAACC,QAAQ,CAAC;MAC3B;MACA,IAAKT,OAAO,CAASS,QAAQ,CAAC,EAAE;QAC5B,IAAMC,GAAG,GAAIV,OAAO,CAASS,QAAQ,CAAC;QACtC,IAAI,OAAOC,GAAG,KAAK,UAAU,EAAE;UAC3B,OAAOA,GAAG,CAACC,IAAI,CAACX,OAAO,CAAC;QAC5B,CAAC,MAAM;UACH,OAAOU,GAAG;QACd;MACJ;MACA,IAAME,QAAQ,GAAGH,QAAQ,CAACI,MAAM,CAACJ,QAAQ,CAACtD,MAAM,GAAG,CAAC,CAAC;MACrD,IAAIsD,QAAQ,CAACK,QAAQ,CAAC,IAAI,CAAC,EAAE;QACzB,IAAMC,GAAG,GAAGN,QAAQ,CAACO,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;QACjC,OAAOhB,OAAO,CAACrB,KAAK,CAACoC,GAAU,CAAC;MACpC,CAAC,MAAM,IAAIH,QAAQ,KAAK,GAAG,EAAE;QACzB,IAAMG,IAAG,GAAGN,QAAQ,CAACO,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;QACjC,OAAOhB,OAAO,CAACtB,IAAI,CAACqC,IAAU,CAAC;MACnC,CAAC,MAAM;QACH,OAAOf,OAAO,CAACxB,GAAG,CAACiC,QAAe,CAAC;MACvC;IACJ,CAAC;IACD3D,GAAGA,CAAC0D,MAAM,EAAE1C,QAAQ,EAAEmD,QAAQ,EAAE;MAC5B,MAAM,IAAIC,KAAK,CAAC,yBAAyB,CAAC;IAC9C;EACJ,CACJ,CAAC;EAEDnG,cAAc,CAAC,eAAe,EAAE;IAC5BI,UAAU;IACVgG,KAAK,EAAEb;EACX,CAAC,CAAC;EAEF,OAAOA,KAAK;AAChB;AAGA,OAAO,SAAS5D,wBAAwBA,CACpCyE,KAAQ,EACRC,UAA8B,EAChC;EACE,KAAK,IAAIzD,KAAK,GAAG,CAAC,EAAEA,KAAK,GAAGyD,UAAU,CAACjE,MAAM,EAAEQ,KAAK,EAAE,EAAE;IACpD,IAAMnB,SAAS,GAAG4E,UAAU,CAACzD,KAAK,CAAC;IACnCrD,WAAW,CAAC6G,KAAK,EAAE3E,SAAS,CAACuB,CAAC,EAAEtD,KAAK,CAAC+B,SAAS,CAACwB,CAAC,CAAC,CAAC;EACvD;AACJ","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/state/types.js b/dist/esm/plugins/state/types.js deleted file mode 100644 index f4623599160..00000000000 --- a/dist/esm/plugins/state/types.js +++ /dev/null @@ -1,3 +0,0 @@ -export {}; -export {}; -//# sourceMappingURL=types.js.map \ No newline at end of file diff --git a/dist/esm/plugins/state/types.js.map b/dist/esm/plugins/state/types.js.map deleted file mode 100644 index b7684b38e0f..00000000000 --- a/dist/esm/plugins/state/types.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"types.js","names":[],"sources":["../../../../src/plugins/state/types.ts"],"sourcesContent":["import { DeepReadonly } from '../../types';\n\n/**\n * \n */\nexport type RxStateDocument = {\n /**\n * Ensures that when multiple\n * javascript realms write at the same time,\n * we do not overwrite each other but instead\n * one write must conflict-error and retry.\n * The clock value is also the primary key.\n * The clock value contains incremental numbers\n * in a string format like '0001', '0123'...\n */\n id: string;\n /**\n * Id of the JavaScript Instance of RxState\n * that did the write. Used to optimise performance\n * by not running these modifiers twice.\n */\n sId: string;\n ops: RxStateOperation[]\n};\n\n\nexport type RxStateOperation = {\n k: string;\n v: any;\n};\n\nexport type RxStateModifier = (preValue: DeepReadonly) => any;\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/storage-denokv/denokv-helper.js b/dist/esm/plugins/storage-denokv/denokv-helper.js deleted file mode 100644 index e59c08ab0aa..00000000000 --- a/dist/esm/plugins/storage-denokv/denokv-helper.js +++ /dev/null @@ -1,22 +0,0 @@ -export var RX_STORAGE_NAME_DENOKV = 'denokv'; -export function getDenoKVIndexName(index) { - return index.join('|'); -} - -/** - * Used for non-index rows that contain the document data, - * not just a documentId - */ -export var DENOKV_DOCUMENT_ROOT_PATH = '||'; -export var CLEANUP_INDEX = ['_deleted', '_meta.lwt']; - -/** - * Get the global Deno variable from globalThis.Deno - * so that compiling with plain typescript does not fail. - * TODO download the deno typings from somewhere - * and use them. - */ -export function getDenoGlobal() { - return globalThis.Deno; -} -//# sourceMappingURL=denokv-helper.js.map \ No newline at end of file diff --git a/dist/esm/plugins/storage-denokv/denokv-helper.js.map b/dist/esm/plugins/storage-denokv/denokv-helper.js.map deleted file mode 100644 index 771a9fbd7bf..00000000000 --- a/dist/esm/plugins/storage-denokv/denokv-helper.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"denokv-helper.js","names":["RX_STORAGE_NAME_DENOKV","getDenoKVIndexName","index","join","DENOKV_DOCUMENT_ROOT_PATH","CLEANUP_INDEX","getDenoGlobal","globalThis","Deno"],"sources":["../../../../src/plugins/storage-denokv/denokv-helper.ts"],"sourcesContent":["export const RX_STORAGE_NAME_DENOKV = 'denokv';\n\nexport function getDenoKVIndexName(index: string[]): string {\n return index.join('|');\n}\n\n/**\n * Used for non-index rows that contain the document data,\n * not just a documentId\n */\nexport const DENOKV_DOCUMENT_ROOT_PATH = '||';\n\nexport const CLEANUP_INDEX: string[] = ['_deleted', '_meta.lwt'];\n\n\n/**\n * Get the global Deno variable from globalThis.Deno\n * so that compiling with plain typescript does not fail.\n * TODO download the deno typings from somewhere\n * and use them.\n */\nexport function getDenoGlobal(): any {\n return (globalThis as any).Deno;\n}\n"],"mappings":"AAAA,OAAO,IAAMA,sBAAsB,GAAG,QAAQ;AAE9C,OAAO,SAASC,kBAAkBA,CAACC,KAAe,EAAU;EACxD,OAAOA,KAAK,CAACC,IAAI,CAAC,GAAG,CAAC;AAC1B;;AAEA;AACA;AACA;AACA;AACA,OAAO,IAAMC,yBAAyB,GAAG,IAAI;AAE7C,OAAO,IAAMC,aAAuB,GAAG,CAAC,UAAU,EAAE,WAAW,CAAC;;AAGhE;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,SAASC,aAAaA,CAAA,EAAQ;EACjC,OAAQC,UAAU,CAASC,IAAI;AACnC","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/storage-denokv/denokv-query.js b/dist/esm/plugins/storage-denokv/denokv-query.js deleted file mode 100644 index 31ea4d19f2e..00000000000 --- a/dist/esm/plugins/storage-denokv/denokv-query.js +++ /dev/null @@ -1,85 +0,0 @@ -import { changeIndexableStringByOneQuantum, getStartIndexStringFromLowerBound, getStartIndexStringFromUpperBound } from "../../custom-index.js"; -import { ensureNotFalsy } from "../../plugins/utils/index.js"; -import { getQueryMatcher, getSortComparator } from "../../rx-query-helper.js"; -import { DENOKV_DOCUMENT_ROOT_PATH, getDenoKVIndexName } from "./denokv-helper.js"; -export async function queryDenoKV(instance, preparedQuery) { - var queryPlan = preparedQuery.queryPlan; - var query = preparedQuery.query; - var skip = query.skip ? query.skip : 0; - var limit = query.limit ? query.limit : Infinity; - var skipPlusLimit = skip + limit; - var queryPlanFields = queryPlan.index; - var mustManuallyResort = !queryPlan.sortSatisfiedByIndex; - var queryMatcher = false; - if (!queryPlan.selectorSatisfiedByIndex) { - queryMatcher = getQueryMatcher(instance.schema, preparedQuery.query); - } - var kv = await instance.kvPromise; - var indexForName = queryPlanFields.slice(0); - var indexName = getDenoKVIndexName(indexForName); - var indexMeta = ensureNotFalsy(instance.internals.indexes[indexName]); - var lowerBound = queryPlan.startKeys; - var lowerBoundString = getStartIndexStringFromLowerBound(instance.schema, indexForName, lowerBound); - if (!queryPlan.inclusiveStart) { - lowerBoundString = changeIndexableStringByOneQuantum(lowerBoundString, 1); - } - var upperBound = queryPlan.endKeys; - var upperBoundString = getStartIndexStringFromUpperBound(instance.schema, indexForName, upperBound); - if (queryPlan.inclusiveEnd) { - upperBoundString = changeIndexableStringByOneQuantum(upperBoundString, +1); - } - var result = []; - - /** - * TODO for whatever reason the keySelectors like firstGreaterThan etc. - * do not work properly. So we have to hack here to find the correct - * document in case lowerBoundString===upperBoundString. - * This likely must be fixed in the foundationdb library. - * When it is fixed, we do not need this if-case and instead - * can rely on .getRangeBatch() in all cases. - */ - if (lowerBoundString === upperBoundString) { - var singleDocResult = await kv.get([instance.keySpace, indexMeta.indexId, lowerBoundString], instance.kvOptions); - if (singleDocResult.value) { - var docId = singleDocResult.value; - var docDataResult = await kv.get([instance.keySpace, DENOKV_DOCUMENT_ROOT_PATH, docId], instance.kvOptions); - var docData = ensureNotFalsy(docDataResult.value); - if (!queryMatcher || queryMatcher(docData)) { - result.push(docData); - } - } - return { - documents: result - }; - } - var range = kv.list({ - start: [instance.keySpace, indexMeta.indexId, lowerBoundString], - end: [instance.keySpace, indexMeta.indexId, upperBoundString] - }, { - consistency: instance.settings.consistencyLevel, - limit: !mustManuallyResort && queryPlan.selectorSatisfiedByIndex ? skipPlusLimit : undefined, - batchSize: instance.settings.batchSize - }); - for await (var indexDocEntry of range) { - var _docId = indexDocEntry.value; - var _docDataResult = await kv.get([instance.keySpace, DENOKV_DOCUMENT_ROOT_PATH, _docId], instance.kvOptions); - var _docData = ensureNotFalsy(_docDataResult.value); - if (!queryMatcher || queryMatcher(_docData)) { - result.push(_docData); - } - if (!mustManuallyResort && result.length === skipPlusLimit) { - break; - } - } - if (mustManuallyResort) { - var sortComparator = getSortComparator(instance.schema, preparedQuery.query); - result = result.sort(sortComparator); - } - - // apply skip and limit boundaries. - result = result.slice(skip, skipPlusLimit); - return { - documents: result - }; -} -//# sourceMappingURL=denokv-query.js.map \ No newline at end of file diff --git a/dist/esm/plugins/storage-denokv/denokv-query.js.map b/dist/esm/plugins/storage-denokv/denokv-query.js.map deleted file mode 100644 index 41d4b07231c..00000000000 --- a/dist/esm/plugins/storage-denokv/denokv-query.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"denokv-query.js","names":["changeIndexableStringByOneQuantum","getStartIndexStringFromLowerBound","getStartIndexStringFromUpperBound","ensureNotFalsy","getQueryMatcher","getSortComparator","DENOKV_DOCUMENT_ROOT_PATH","getDenoKVIndexName","queryDenoKV","instance","preparedQuery","queryPlan","query","skip","limit","Infinity","skipPlusLimit","queryPlanFields","index","mustManuallyResort","sortSatisfiedByIndex","queryMatcher","selectorSatisfiedByIndex","schema","kv","kvPromise","indexForName","slice","indexName","indexMeta","internals","indexes","lowerBound","startKeys","lowerBoundString","inclusiveStart","upperBound","endKeys","upperBoundString","inclusiveEnd","result","singleDocResult","get","keySpace","indexId","kvOptions","value","docId","docDataResult","docData","push","documents","range","list","start","end","consistency","settings","consistencyLevel","undefined","batchSize","indexDocEntry","length","sortComparator","sort"],"sources":["../../../../src/plugins/storage-denokv/denokv-query.ts"],"sourcesContent":["import {\n changeIndexableStringByOneQuantum,\n getStartIndexStringFromLowerBound,\n getStartIndexStringFromUpperBound\n} from '../../custom-index.ts';\nimport type {\n PreparedQuery,\n QueryMatcher,\n RxDocumentData,\n RxStorageQueryResult\n} from '../../types/index.d.ts';\nimport { ensureNotFalsy } from '../../plugins/utils/index.ts';\nimport { getQueryMatcher, getSortComparator } from '../../rx-query-helper.ts';\nimport { RxStorageInstanceDenoKV } from \"./rx-storage-instance-denokv.ts\";\nimport { DENOKV_DOCUMENT_ROOT_PATH, getDenoKVIndexName } from \"./denokv-helper.ts\";\n\nexport async function queryDenoKV(\n instance: RxStorageInstanceDenoKV,\n preparedQuery: PreparedQuery\n): Promise> {\n const queryPlan = preparedQuery.queryPlan;\n const query = preparedQuery.query;\n const skip = query.skip ? query.skip : 0;\n const limit = query.limit ? query.limit : Infinity;\n const skipPlusLimit = skip + limit;\n const queryPlanFields: string[] = queryPlan.index;\n const mustManuallyResort = !queryPlan.sortSatisfiedByIndex;\n\n\n let queryMatcher: QueryMatcher> | false = false;\n if (!queryPlan.selectorSatisfiedByIndex) {\n queryMatcher = getQueryMatcher(\n instance.schema,\n preparedQuery.query\n );\n }\n\n const kv = await instance.kvPromise;\n\n const indexForName = queryPlanFields.slice(0);\n const indexName = getDenoKVIndexName(indexForName);\n const indexMeta = ensureNotFalsy(instance.internals.indexes[indexName]);\n\n let lowerBound: any[] = queryPlan.startKeys;\n let lowerBoundString = getStartIndexStringFromLowerBound(\n instance.schema,\n indexForName,\n lowerBound\n );\n if (!queryPlan.inclusiveStart) {\n lowerBoundString = changeIndexableStringByOneQuantum(lowerBoundString, 1);\n }\n\n let upperBound: any[] = queryPlan.endKeys;\n let upperBoundString = getStartIndexStringFromUpperBound(\n instance.schema,\n indexForName,\n upperBound\n );\n\n if (queryPlan.inclusiveEnd) {\n upperBoundString = changeIndexableStringByOneQuantum(upperBoundString, +1);\n }\n\n\n let result: RxDocumentData[] = [];\n\n\n /**\n * TODO for whatever reason the keySelectors like firstGreaterThan etc.\n * do not work properly. So we have to hack here to find the correct\n * document in case lowerBoundString===upperBoundString.\n * This likely must be fixed in the foundationdb library.\n * When it is fixed, we do not need this if-case and instead\n * can rely on .getRangeBatch() in all cases.\n */\n if (lowerBoundString === upperBoundString) {\n const singleDocResult = await kv.get([instance.keySpace, indexMeta.indexId, lowerBoundString], instance.kvOptions);\n if (singleDocResult.value) {\n const docId: string = singleDocResult.value;\n const docDataResult = await kv.get([instance.keySpace, DENOKV_DOCUMENT_ROOT_PATH, docId], instance.kvOptions);\n const docData = ensureNotFalsy(docDataResult.value);\n if (!queryMatcher || queryMatcher(docData)) {\n result.push(docData);\n }\n }\n return {\n documents: result\n };\n }\n\n const range = kv.list({\n start: [instance.keySpace, indexMeta.indexId, lowerBoundString],\n end: [instance.keySpace, indexMeta.indexId, upperBoundString]\n }, {\n consistency: instance.settings.consistencyLevel,\n limit: (!mustManuallyResort && queryPlan.selectorSatisfiedByIndex) ? skipPlusLimit : undefined,\n batchSize: instance.settings.batchSize\n });\n\n for await (const indexDocEntry of range) {\n const docId = indexDocEntry.value;\n const docDataResult = await kv.get([instance.keySpace, DENOKV_DOCUMENT_ROOT_PATH, docId], instance.kvOptions);\n const docData = ensureNotFalsy(docDataResult.value);\n if (!queryMatcher || queryMatcher(docData)) {\n result.push(docData);\n }\n if (\n !mustManuallyResort &&\n result.length === skipPlusLimit\n ) {\n break;\n }\n }\n\n if (mustManuallyResort) {\n const sortComparator = getSortComparator(instance.schema, preparedQuery.query);\n result = result.sort(sortComparator);\n }\n\n // apply skip and limit boundaries.\n result = result.slice(skip, skipPlusLimit);\n\n return {\n documents: result\n };\n}\n"],"mappings":"AAAA,SACIA,iCAAiC,EACjCC,iCAAiC,EACjCC,iCAAiC,QAC9B,uBAAuB;AAO9B,SAASC,cAAc,QAAQ,8BAA8B;AAC7D,SAASC,eAAe,EAAEC,iBAAiB,QAAQ,0BAA0B;AAE7E,SAASC,yBAAyB,EAAEC,kBAAkB,QAAQ,oBAAoB;AAElF,OAAO,eAAeC,WAAWA,CAC7BC,QAA4C,EAC5CC,aAAuC,EACC;EACxC,IAAMC,SAAS,GAAGD,aAAa,CAACC,SAAS;EACzC,IAAMC,KAAK,GAAGF,aAAa,CAACE,KAAK;EACjC,IAAMC,IAAI,GAAGD,KAAK,CAACC,IAAI,GAAGD,KAAK,CAACC,IAAI,GAAG,CAAC;EACxC,IAAMC,KAAK,GAAGF,KAAK,CAACE,KAAK,GAAGF,KAAK,CAACE,KAAK,GAAGC,QAAQ;EAClD,IAAMC,aAAa,GAAGH,IAAI,GAAGC,KAAK;EAClC,IAAMG,eAAyB,GAAGN,SAAS,CAACO,KAAK;EACjD,IAAMC,kBAAkB,GAAG,CAACR,SAAS,CAACS,oBAAoB;EAG1D,IAAIC,YAA6D,GAAG,KAAK;EACzE,IAAI,CAACV,SAAS,CAACW,wBAAwB,EAAE;IACrCD,YAAY,GAAGjB,eAAe,CAC1BK,QAAQ,CAACc,MAAM,EACfb,aAAa,CAACE,KAClB,CAAC;EACL;EAEA,IAAMY,EAAE,GAAG,MAAMf,QAAQ,CAACgB,SAAS;EAEnC,IAAMC,YAAY,GAAGT,eAAe,CAACU,KAAK,CAAC,CAAC,CAAC;EAC7C,IAAMC,SAAS,GAAGrB,kBAAkB,CAACmB,YAAY,CAAC;EAClD,IAAMG,SAAS,GAAG1B,cAAc,CAACM,QAAQ,CAACqB,SAAS,CAACC,OAAO,CAACH,SAAS,CAAC,CAAC;EAEvE,IAAII,UAAiB,GAAGrB,SAAS,CAACsB,SAAS;EAC3C,IAAIC,gBAAgB,GAAGjC,iCAAiC,CACpDQ,QAAQ,CAACc,MAAM,EACfG,YAAY,EACZM,UACJ,CAAC;EACD,IAAI,CAACrB,SAAS,CAACwB,cAAc,EAAE;IAC3BD,gBAAgB,GAAGlC,iCAAiC,CAACkC,gBAAgB,EAAE,CAAC,CAAC;EAC7E;EAEA,IAAIE,UAAiB,GAAGzB,SAAS,CAAC0B,OAAO;EACzC,IAAIC,gBAAgB,GAAGpC,iCAAiC,CACpDO,QAAQ,CAACc,MAAM,EACfG,YAAY,EACZU,UACJ,CAAC;EAED,IAAIzB,SAAS,CAAC4B,YAAY,EAAE;IACxBD,gBAAgB,GAAGtC,iCAAiC,CAACsC,gBAAgB,EAAE,CAAC,CAAC,CAAC;EAC9E;EAGA,IAAIE,MAAmC,GAAG,EAAE;;EAG5C;AACJ;AACA;AACA;AACA;AACA;AACA;AACA;EACI,IAAIN,gBAAgB,KAAKI,gBAAgB,EAAE;IACvC,IAAMG,eAAe,GAAG,MAAMjB,EAAE,CAACkB,GAAG,CAAC,CAACjC,QAAQ,CAACkC,QAAQ,EAAEd,SAAS,CAACe,OAAO,EAAEV,gBAAgB,CAAC,EAAEzB,QAAQ,CAACoC,SAAS,CAAC;IAClH,IAAIJ,eAAe,CAACK,KAAK,EAAE;MACvB,IAAMC,KAAa,GAAGN,eAAe,CAACK,KAAK;MAC3C,IAAME,aAAa,GAAG,MAAMxB,EAAE,CAACkB,GAAG,CAAC,CAACjC,QAAQ,CAACkC,QAAQ,EAAErC,yBAAyB,EAAEyC,KAAK,CAAC,EAAEtC,QAAQ,CAACoC,SAAS,CAAC;MAC7G,IAAMI,OAAO,GAAG9C,cAAc,CAAC6C,aAAa,CAACF,KAAK,CAAC;MACnD,IAAI,CAACzB,YAAY,IAAIA,YAAY,CAAC4B,OAAO,CAAC,EAAE;QACxCT,MAAM,CAACU,IAAI,CAACD,OAAO,CAAC;MACxB;IACJ;IACA,OAAO;MACHE,SAAS,EAAEX;IACf,CAAC;EACL;EAEA,IAAMY,KAAK,GAAG5B,EAAE,CAAC6B,IAAI,CAAC;IAClBC,KAAK,EAAE,CAAC7C,QAAQ,CAACkC,QAAQ,EAAEd,SAAS,CAACe,OAAO,EAAEV,gBAAgB,CAAC;IAC/DqB,GAAG,EAAE,CAAC9C,QAAQ,CAACkC,QAAQ,EAAEd,SAAS,CAACe,OAAO,EAAEN,gBAAgB;EAChE,CAAC,EAAE;IACCkB,WAAW,EAAE/C,QAAQ,CAACgD,QAAQ,CAACC,gBAAgB;IAC/C5C,KAAK,EAAG,CAACK,kBAAkB,IAAIR,SAAS,CAACW,wBAAwB,GAAIN,aAAa,GAAG2C,SAAS;IAC9FC,SAAS,EAAEnD,QAAQ,CAACgD,QAAQ,CAACG;EACjC,CAAC,CAAC;EAEF,WAAW,IAAMC,aAAa,IAAIT,KAAK,EAAE;IACrC,IAAML,MAAK,GAAGc,aAAa,CAACf,KAAK;IACjC,IAAME,cAAa,GAAG,MAAMxB,EAAE,CAACkB,GAAG,CAAC,CAACjC,QAAQ,CAACkC,QAAQ,EAAErC,yBAAyB,EAAEyC,MAAK,CAAC,EAAEtC,QAAQ,CAACoC,SAAS,CAAC;IAC7G,IAAMI,QAAO,GAAG9C,cAAc,CAAC6C,cAAa,CAACF,KAAK,CAAC;IACnD,IAAI,CAACzB,YAAY,IAAIA,YAAY,CAAC4B,QAAO,CAAC,EAAE;MACxCT,MAAM,CAACU,IAAI,CAACD,QAAO,CAAC;IACxB;IACA,IACI,CAAC9B,kBAAkB,IACnBqB,MAAM,CAACsB,MAAM,KAAK9C,aAAa,EACjC;MACE;IACJ;EACJ;EAEA,IAAIG,kBAAkB,EAAE;IACpB,IAAM4C,cAAc,GAAG1D,iBAAiB,CAACI,QAAQ,CAACc,MAAM,EAAEb,aAAa,CAACE,KAAK,CAAC;IAC9E4B,MAAM,GAAGA,MAAM,CAACwB,IAAI,CAACD,cAAc,CAAC;EACxC;;EAEA;EACAvB,MAAM,GAAGA,MAAM,CAACb,KAAK,CAACd,IAAI,EAAEG,aAAa,CAAC;EAE1C,OAAO;IACHmC,SAAS,EAAEX;EACf,CAAC;AACL","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/storage-denokv/denokv-types.js b/dist/esm/plugins/storage-denokv/denokv-types.js deleted file mode 100644 index e59754b918a..00000000000 --- a/dist/esm/plugins/storage-denokv/denokv-types.js +++ /dev/null @@ -1,3 +0,0 @@ -export {}; -export {}; -//# sourceMappingURL=denokv-types.js.map \ No newline at end of file diff --git a/dist/esm/plugins/storage-denokv/denokv-types.js.map b/dist/esm/plugins/storage-denokv/denokv-types.js.map deleted file mode 100644 index 4f247a383aa..00000000000 --- a/dist/esm/plugins/storage-denokv/denokv-types.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"denokv-types.js","names":[],"sources":["../../../../src/plugins/storage-denokv/denokv-types.ts"],"sourcesContent":["import type { RxDocumentData } from \"../../types/index.d.ts\";\n\nexport type DenoKVSettings = {\n consistencyLevel: \"strong\" | \"eventual\";\n openKvPath?: string;\n batchSize?: number;\n};\nexport type DenoKVStorageInternals = {\n indexes: {\n [indexName: string]: DenoKVIndexMeta;\n };\n};\n\nexport type DenoKVIndexMeta = {\n indexId: string;\n indexName: string;\n index: string[];\n getIndexableString: (doc: RxDocumentData) => string;\n};\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/storage-denokv/index.js b/dist/esm/plugins/storage-denokv/index.js deleted file mode 100644 index c2a327859fe..00000000000 --- a/dist/esm/plugins/storage-denokv/index.js +++ /dev/null @@ -1,24 +0,0 @@ -import { ensureRxStorageInstanceParamsAreCorrect } from "../../rx-storage-helper.js"; -import { RX_STORAGE_NAME_DENOKV } from "./denokv-helper.js"; -import { createDenoKVStorageInstance } from "./rx-storage-instance-denokv.js"; -import { RXDB_VERSION } from "../utils/utils-rxdb-version.js"; -export var RxStorageDenoKV = /*#__PURE__*/function () { - function RxStorageDenoKV(settings) { - this.name = RX_STORAGE_NAME_DENOKV; - this.rxdbVersion = RXDB_VERSION; - this.settings = settings; - } - var _proto = RxStorageDenoKV.prototype; - _proto.createStorageInstance = function createStorageInstance(params) { - ensureRxStorageInstanceParamsAreCorrect(params); - return createDenoKVStorageInstance(this, params, this.settings); - }; - return RxStorageDenoKV; -}(); -export function getRxStorageDenoKV(settings = { - consistencyLevel: 'strong' -}) { - var storage = new RxStorageDenoKV(settings); - return storage; -} -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/esm/plugins/storage-denokv/index.js.map b/dist/esm/plugins/storage-denokv/index.js.map deleted file mode 100644 index 84248d56a10..00000000000 --- a/dist/esm/plugins/storage-denokv/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["ensureRxStorageInstanceParamsAreCorrect","RX_STORAGE_NAME_DENOKV","createDenoKVStorageInstance","RXDB_VERSION","RxStorageDenoKV","settings","name","rxdbVersion","_proto","prototype","createStorageInstance","params","getRxStorageDenoKV","consistencyLevel","storage"],"sources":["../../../../src/plugins/storage-denokv/index.ts"],"sourcesContent":["import type {\n RxStorage,\n RxStorageInstanceCreationParams\n} from '../../types/index.d.ts';\nimport { ensureRxStorageInstanceParamsAreCorrect } from '../../rx-storage-helper.ts';\nimport type { DenoKVSettings, DenoKVStorageInternals } from './denokv-types.ts';\nimport { RX_STORAGE_NAME_DENOKV } from \"./denokv-helper.ts\";\nimport { RxStorageInstanceDenoKV, createDenoKVStorageInstance } from \"./rx-storage-instance-denokv.ts\";\nimport { RXDB_VERSION } from '../utils/utils-rxdb-version.ts';\n\n\n\nexport class RxStorageDenoKV implements RxStorage, DenoKVSettings> {\n public name = RX_STORAGE_NAME_DENOKV;\n public readonly rxdbVersion = RXDB_VERSION;\n\n constructor(\n public settings: DenoKVSettings\n ) { }\n\n public createStorageInstance(\n params: RxStorageInstanceCreationParams\n ): Promise> {\n ensureRxStorageInstanceParamsAreCorrect(params);\n return createDenoKVStorageInstance(this, params, this.settings);\n }\n}\n\n\nexport function getRxStorageDenoKV(\n settings: DenoKVSettings = {\n consistencyLevel: 'strong'\n }\n): RxStorageDenoKV {\n const storage = new RxStorageDenoKV(settings);\n return storage;\n}\n"],"mappings":"AAIA,SAASA,uCAAuC,QAAQ,4BAA4B;AAEpF,SAASC,sBAAsB,QAAQ,oBAAoB;AAC3D,SAAkCC,2BAA2B,QAAQ,iCAAiC;AACtG,SAASC,YAAY,QAAQ,gCAAgC;AAI7D,WAAaC,eAAe;EAIxB,SAAAA,gBACWC,QAAwB,EACjC;IAAA,KALKC,IAAI,GAAGL,sBAAsB;IAAA,KACpBM,WAAW,GAAGJ,YAAY;IAAA,KAG/BE,QAAwB,GAAxBA,QAAwB;EAC/B;EAAC,IAAAG,MAAA,GAAAJ,eAAA,CAAAK,SAAA;EAAAD,MAAA,CAEEE,qBAAqB,GAA5B,SAAAA,sBACIC,MAAkE,EACvB;IAC3CX,uCAAuC,CAACW,MAAM,CAAC;IAC/C,OAAOT,2BAA2B,CAAC,IAAI,EAAES,MAAM,EAAE,IAAI,CAACN,QAAQ,CAAC;EACnE,CAAC;EAAA,OAAAD,eAAA;AAAA;AAIL,OAAO,SAASQ,kBAAkBA,CAC9BP,QAAwB,GAAG;EACvBQ,gBAAgB,EAAE;AACtB,CAAC,EACc;EACf,IAAMC,OAAO,GAAG,IAAIV,eAAe,CAACC,QAAQ,CAAC;EAC7C,OAAOS,OAAO;AAClB","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/storage-denokv/rx-storage-instance-denokv.js b/dist/esm/plugins/storage-denokv/rx-storage-instance-denokv.js deleted file mode 100644 index cb5ec6ea0a8..00000000000 --- a/dist/esm/plugins/storage-denokv/rx-storage-instance-denokv.js +++ /dev/null @@ -1,308 +0,0 @@ -import { Subject } from 'rxjs'; -import { getPrimaryFieldOfPrimaryKey } from "../../rx-schema-helper.js"; -import { addRxStorageMultiInstanceSupport } from "../../rx-storage-multiinstance.js"; -import { CLEANUP_INDEX, DENOKV_DOCUMENT_ROOT_PATH, RX_STORAGE_NAME_DENOKV, getDenoGlobal, getDenoKVIndexName } from "./denokv-helper.js"; -import { getIndexableStringMonad, getStartIndexStringFromLowerBound } from "../../custom-index.js"; -import { appendToArray, batchArray, toArray } from "../utils/utils-array.js"; -import { ensureNotFalsy } from "../utils/utils-other.js"; -import { categorizeBulkWriteRows } from "../../rx-storage-helper.js"; -import { now } from "../utils/utils-time.js"; -import { queryDenoKV } from "./denokv-query.js"; -import { INDEX_MAX } from "../../query-planner.js"; -import { PROMISE_RESOLVE_VOID } from "../utils/utils-promise.js"; -import { flatClone } from "../utils/utils-object.js"; -export var RxStorageInstanceDenoKV = /*#__PURE__*/function () { - function RxStorageInstanceDenoKV(storage, databaseName, collectionName, schema, internals, options, settings, keySpace = ['rxdb', databaseName, collectionName, schema.version].join('|'), kvOptions = { - consistency: settings.consistencyLevel - }) { - this.changes$ = new Subject(); - this.storage = storage; - this.databaseName = databaseName; - this.collectionName = collectionName; - this.schema = schema; - this.internals = internals; - this.options = options; - this.settings = settings; - this.keySpace = keySpace; - this.kvOptions = kvOptions; - this.primaryPath = getPrimaryFieldOfPrimaryKey(this.schema.primaryKey); - this.kvPromise = getDenoGlobal().openKv(settings.openKvPath).then(async kv => { - // insert writeBlockKey - await kv.set([this.keySpace], 1); - return kv; - }); - } - - /** - * DenoKV has no transactions - * so we have to ensure that there is no write in between our queries - * which would confuse RxDB and return wrong query results. - */ - var _proto = RxStorageInstanceDenoKV.prototype; - _proto.retryUntilNoWriteInBetween = async function retryUntilNoWriteInBetween(fn) { - var kv = await this.kvPromise; - while (true) { - var writeBlockKeyBefore = await kv.get([this.keySpace], this.kvOptions); - var writeBlockValueBefore = writeBlockKeyBefore ? writeBlockKeyBefore.value : -1; - var result = await fn(); - var writeBlockKeyAfter = await kv.get([this.keySpace], this.kvOptions); - var writeBlockValueAfter = writeBlockKeyAfter ? writeBlockKeyAfter.value : -1; - if (writeBlockValueBefore === writeBlockValueAfter) { - return result; - } - } - }; - _proto.bulkWrite = async function bulkWrite(documentWrites, context) { - var _this = this; - var kv = await this.kvPromise; - var primaryPath = this.primaryPath; - var ret = { - success: [], - error: [] - }; - var batches = batchArray(documentWrites, ensureNotFalsy(this.settings.batchSize)); - - /** - * DenoKV does not have transactions - * so we use a special writeBlock row to ensure - * atomic writes (per document) - * and so that we can do bulkWrites - */ - for (var writeBatch of batches) { - var _loop = async function () { - var writeBlockKey = await kv.get([_this.keySpace], _this.kvOptions); - var docsInDB = new Map(); - - /** - * TODO the max amount for .getMany() is 10 which is defined by deno itself. - * How can this be increased? - */ - var readManyBatches = batchArray(writeBatch, 10); - await Promise.all(readManyBatches.map(async readManyBatch => { - var docsResult = await kv.getMany(readManyBatch.map(writeRow => { - var docId = writeRow.document[primaryPath]; - return [_this.keySpace, DENOKV_DOCUMENT_ROOT_PATH, docId]; - })); - docsResult.map(row => { - var docData = row.value; - if (!docData) { - return; - } - var docId = docData[primaryPath]; - docsInDB.set(docId, docData); - }); - })); - var categorized = categorizeBulkWriteRows(_this, _this.primaryPath, docsInDB, writeBatch, context); - var tx = kv.atomic(); - tx = tx.set([_this.keySpace], ensureNotFalsy(writeBlockKey.value) + 1); - tx = tx.check(writeBlockKey); - - // INSERTS - categorized.bulkInsertDocs.forEach(writeRow => { - var docId = writeRow.document[_this.primaryPath]; - ret.success.push(writeRow.document); - - // insert document data - tx = tx.set([_this.keySpace, DENOKV_DOCUMENT_ROOT_PATH, docId], writeRow.document); - - // insert secondary indexes - Object.values(_this.internals.indexes).forEach(indexMeta => { - var indexString = indexMeta.getIndexableString(writeRow.document); - tx = tx.set([_this.keySpace, indexMeta.indexId, indexString], docId); - }); - }); - // UPDATES - categorized.bulkUpdateDocs.forEach(writeRow => { - var docId = writeRow.document[_this.primaryPath]; - - // insert document data - tx = tx.set([_this.keySpace, DENOKV_DOCUMENT_ROOT_PATH, docId], writeRow.document); - - // insert secondary indexes - Object.values(_this.internals.indexes).forEach(indexMeta => { - var oldIndexString = indexMeta.getIndexableString(ensureNotFalsy(writeRow.previous)); - var newIndexString = indexMeta.getIndexableString(writeRow.document); - if (oldIndexString !== newIndexString) { - tx = tx.delete([_this.keySpace, indexMeta.indexId, oldIndexString]); - tx = tx.set([_this.keySpace, indexMeta.indexId, newIndexString], docId); - } - }); - ret.success.push(writeRow.document); - }); - var txResult = await tx.commit(); - if (txResult.ok) { - appendToArray(ret.error, categorized.errors); - if (categorized.eventBulk.events.length > 0) { - var lastState = ensureNotFalsy(categorized.newestRow).document; - categorized.eventBulk.checkpoint = { - id: lastState[primaryPath], - lwt: lastState._meta.lwt - }; - categorized.eventBulk.endTime = now(); - _this.changes$.next(categorized.eventBulk); - } - return 1; // break - } - }; - while (true) { - if (await _loop()) break; - } - } - return ret; - }; - _proto.findDocumentsById = async function findDocumentsById(ids, withDeleted) { - var kv = await this.kvPromise; - var ret = []; - await Promise.all(ids.map(async docId => { - var kvKey = [this.keySpace, DENOKV_DOCUMENT_ROOT_PATH, docId]; - var findSingleResult = await kv.get(kvKey, this.kvOptions); - var docInDb = findSingleResult.value; - if (docInDb && (!docInDb._deleted || withDeleted)) { - ret.push(docInDb); - } - })); - return ret; - }; - _proto.query = function query(preparedQuery) { - return this.retryUntilNoWriteInBetween(() => queryDenoKV(this, preparedQuery)); - }; - _proto.count = async function count(preparedQuery) { - /** - * At this point in time (end 2023), DenoKV does not support - * range counts. So we have to run a normal query and use the result set length. - * @link https://github.com/denoland/deno/issues/18965 - */ - var result = await this.retryUntilNoWriteInBetween(() => this.query(preparedQuery)); - return { - count: result.documents.length, - mode: 'fast' - }; - }; - _proto.getAttachmentData = function getAttachmentData(documentId, attachmentId, digest) { - throw new Error("Method not implemented."); - }; - _proto.changeStream = function changeStream() { - return this.changes$.asObservable(); - }; - _proto.cleanup = async function cleanup(minimumDeletedTime) { - var _this2 = this; - var maxDeletionTime = now() - minimumDeletedTime; - var kv = await this.kvPromise; - var index = CLEANUP_INDEX; - var indexName = getDenoKVIndexName(index); - var indexMeta = this.internals.indexes[indexName]; - var lowerBoundString = getStartIndexStringFromLowerBound(this.schema, index, [true, - /** - * Do not use 0 here, - * because 1 is the minimum value for _meta.lwt - */ - 1]); - var upperBoundString = getStartIndexStringFromLowerBound(this.schema, index, [true, maxDeletionTime]); - var noMoreUndeleted = true; - var range = kv.list({ - start: [this.keySpace, indexMeta.indexId, lowerBoundString], - end: [this.keySpace, indexMeta.indexId, upperBoundString] - }, { - consistency: this.settings.consistencyLevel, - batchSize: this.settings.batchSize, - limit: this.settings.batchSize - }); - var rangeCount = 0; - var _loop2 = async function () { - rangeCount = rangeCount + 1; - var docId = row.value; - var docDataResult = await kv.get([_this2.keySpace, DENOKV_DOCUMENT_ROOT_PATH, docId], _this2.kvOptions); - if (!docDataResult.value) { - return 0; // continue - } - var docData = ensureNotFalsy(docDataResult.value); - if (!docData._deleted || docData._meta.lwt > maxDeletionTime) { - return 0; // continue - } - var tx = kv.atomic(); - tx = tx.check(docDataResult); - tx = tx.delete([_this2.keySpace, DENOKV_DOCUMENT_ROOT_PATH, docId]); - Object.values(_this2.internals.indexes).forEach(indexMetaInner => { - tx = tx.delete([_this2.keySpace, indexMetaInner.indexId, docId]); - }); - await tx.commit(); - }, - _ret; - for await (var row of range) { - _ret = await _loop2(); - if (_ret === 0) continue; - } - return noMoreUndeleted; - }; - _proto.close = async function close() { - if (this.closed) { - return this.closed; - } - this.closed = (async () => { - this.changes$.complete(); - var kv = await this.kvPromise; - await kv.close(); - })(); - return this.closed; - }; - _proto.remove = async function remove() { - ensureNotClosed(this); - var kv = await this.kvPromise; - var range = kv.list({ - start: [this.keySpace], - end: [this.keySpace, INDEX_MAX] - }, { - consistency: this.settings.consistencyLevel, - batchSize: this.settings.batchSize - }); - var promises = []; - for await (var row of range) { - promises.push(kv.delete(row.key)); - } - await Promise.all(promises); - return this.close(); - }; - _proto.conflictResultionTasks = function conflictResultionTasks() { - return new Subject().asObservable(); - }; - _proto.resolveConflictResultionTask = function resolveConflictResultionTask(_taskSolution) { - return PROMISE_RESOLVE_VOID; - }; - return RxStorageInstanceDenoKV; -}(); -export async function createDenoKVStorageInstance(storage, params, settings) { - settings = flatClone(settings); - if (!settings.batchSize) { - settings.batchSize = 100; - } - var primaryPath = getPrimaryFieldOfPrimaryKey(params.schema.primaryKey); - var indexDBs = {}; - var useIndexes = params.schema.indexes ? params.schema.indexes.slice(0) : []; - useIndexes.push([primaryPath]); - var useIndexesFinal = useIndexes.map(index => { - var indexAr = toArray(index); - return indexAr; - }); - useIndexesFinal.push(CLEANUP_INDEX); - useIndexesFinal.forEach((indexAr, indexId) => { - var indexName = getDenoKVIndexName(indexAr); - indexDBs[indexName] = { - indexId: '|' + indexId + '|', - indexName, - getIndexableString: getIndexableStringMonad(params.schema, indexAr), - index: indexAr - }; - }); - var internals = { - indexes: indexDBs - }; - var instance = new RxStorageInstanceDenoKV(storage, params.databaseName, params.collectionName, params.schema, internals, params.options, settings); - await addRxStorageMultiInstanceSupport(RX_STORAGE_NAME_DENOKV, params, instance); - return Promise.resolve(instance); -} -function ensureNotClosed(instance) { - if (instance.closed) { - throw new Error('RxStorageInstanceDenoKV is closed ' + instance.databaseName + '-' + instance.collectionName); - } -} -//# sourceMappingURL=rx-storage-instance-denokv.js.map \ No newline at end of file diff --git a/dist/esm/plugins/storage-denokv/rx-storage-instance-denokv.js.map b/dist/esm/plugins/storage-denokv/rx-storage-instance-denokv.js.map deleted file mode 100644 index 62a210cd8cb..00000000000 --- a/dist/esm/plugins/storage-denokv/rx-storage-instance-denokv.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-storage-instance-denokv.js","names":["Subject","getPrimaryFieldOfPrimaryKey","addRxStorageMultiInstanceSupport","CLEANUP_INDEX","DENOKV_DOCUMENT_ROOT_PATH","RX_STORAGE_NAME_DENOKV","getDenoGlobal","getDenoKVIndexName","getIndexableStringMonad","getStartIndexStringFromLowerBound","appendToArray","batchArray","toArray","ensureNotFalsy","categorizeBulkWriteRows","now","queryDenoKV","INDEX_MAX","PROMISE_RESOLVE_VOID","flatClone","RxStorageInstanceDenoKV","storage","databaseName","collectionName","schema","internals","options","settings","keySpace","version","join","kvOptions","consistency","consistencyLevel","changes$","primaryPath","primaryKey","kvPromise","openKv","openKvPath","then","kv","set","_proto","prototype","retryUntilNoWriteInBetween","fn","writeBlockKeyBefore","get","writeBlockValueBefore","value","result","writeBlockKeyAfter","writeBlockValueAfter","bulkWrite","documentWrites","context","_this","ret","success","error","batches","batchSize","writeBatch","_loop","writeBlockKey","docsInDB","Map","readManyBatches","Promise","all","map","readManyBatch","docsResult","getMany","writeRow","docId","document","row","docData","categorized","tx","atomic","check","bulkInsertDocs","forEach","push","Object","values","indexes","indexMeta","indexString","getIndexableString","indexId","bulkUpdateDocs","oldIndexString","previous","newIndexString","delete","txResult","commit","ok","errors","eventBulk","events","length","lastState","newestRow","checkpoint","id","lwt","_meta","endTime","next","findDocumentsById","ids","withDeleted","kvKey","findSingleResult","docInDb","_deleted","query","preparedQuery","count","documents","mode","getAttachmentData","documentId","attachmentId","digest","Error","changeStream","asObservable","cleanup","minimumDeletedTime","_this2","maxDeletionTime","index","indexName","lowerBoundString","upperBoundString","noMoreUndeleted","range","list","start","end","limit","rangeCount","_loop2","docDataResult","indexMetaInner","_ret","close","closed","complete","remove","ensureNotClosed","promises","key","conflictResultionTasks","resolveConflictResultionTask","_taskSolution","createDenoKVStorageInstance","params","indexDBs","useIndexes","slice","useIndexesFinal","indexAr","instance","resolve"],"sources":["../../../../src/plugins/storage-denokv/rx-storage-instance-denokv.ts"],"sourcesContent":["\nimport {\n Subject,\n Observable\n} from 'rxjs';\nimport type {\n RxStorageInstance,\n RxStorageChangeEvent,\n RxDocumentData,\n BulkWriteRow,\n RxStorageBulkWriteResponse,\n RxStorageQueryResult,\n RxJsonSchema,\n RxStorageInstanceCreationParams,\n EventBulk,\n StringKeys,\n RxConflictResultionTaskSolution,\n RxStorageDefaultCheckpoint,\n RxStorageCountResult,\n RxConflictResultionTask,\n PreparedQuery\n} from '../../types/index.d.ts';\nimport { getPrimaryFieldOfPrimaryKey } from '../../rx-schema-helper.ts';\nimport { addRxStorageMultiInstanceSupport } from '../../rx-storage-multiinstance.ts';\nimport type { DenoKVIndexMeta, DenoKVSettings, DenoKVStorageInternals } from './denokv-types.ts';\nimport { RxStorageDenoKV } from './index.ts';\nimport { CLEANUP_INDEX, DENOKV_DOCUMENT_ROOT_PATH, RX_STORAGE_NAME_DENOKV, getDenoGlobal, getDenoKVIndexName } from \"./denokv-helper.ts\";\nimport { getIndexableStringMonad, getStartIndexStringFromLowerBound } from \"../../custom-index.ts\";\nimport { appendToArray, batchArray, lastOfArray, toArray } from \"../utils/utils-array.ts\";\nimport { ensureNotFalsy } from \"../utils/utils-other.ts\";\nimport { categorizeBulkWriteRows } from \"../../rx-storage-helper.ts\";\nimport { now } from \"../utils/utils-time.ts\";\nimport { queryDenoKV } from \"./denokv-query.ts\";\nimport { INDEX_MAX } from \"../../query-planner.ts\";\nimport { PROMISE_RESOLVE_VOID } from \"../utils/utils-promise.ts\";\nimport { flatClone } from \"../utils/utils-object.ts\";\n\n\n\nexport class RxStorageInstanceDenoKV implements RxStorageInstance<\n RxDocType,\n DenoKVStorageInternals,\n DenoKVSettings,\n RxStorageDefaultCheckpoint\n> {\n public readonly primaryPath: StringKeys>;\n private changes$: Subject>, RxStorageDefaultCheckpoint>> = new Subject();\n public closed?: Promise;\n public readonly kvPromise: Promise;\n\n constructor(\n public readonly storage: RxStorageDenoKV,\n public readonly databaseName: string,\n public readonly collectionName: string,\n public readonly schema: Readonly>>,\n public readonly internals: DenoKVStorageInternals,\n public readonly options: Readonly,\n public readonly settings: DenoKVSettings,\n public readonly keySpace = ['rxdb', databaseName, collectionName, schema.version].join('|'),\n public readonly kvOptions = { consistency: settings.consistencyLevel }\n ) {\n this.primaryPath = getPrimaryFieldOfPrimaryKey(this.schema.primaryKey);\n this.kvPromise = getDenoGlobal().openKv(settings.openKvPath).then(async (kv: any) => {\n // insert writeBlockKey\n await kv.set([this.keySpace], 1);\n return kv;\n });\n }\n\n /**\n * DenoKV has no transactions\n * so we have to ensure that there is no write in between our queries\n * which would confuse RxDB and return wrong query results.\n */\n async retryUntilNoWriteInBetween(\n fn: () => Promise\n ): Promise {\n const kv = await this.kvPromise;\n while (true) {\n const writeBlockKeyBefore = await kv.get([this.keySpace], this.kvOptions);\n const writeBlockValueBefore = writeBlockKeyBefore ? writeBlockKeyBefore.value : -1;\n const result = await fn();\n const writeBlockKeyAfter = await kv.get([this.keySpace], this.kvOptions);\n const writeBlockValueAfter = writeBlockKeyAfter ? writeBlockKeyAfter.value : -1;\n\n if (writeBlockValueBefore === writeBlockValueAfter) {\n return result;\n }\n }\n }\n\n async bulkWrite(documentWrites: BulkWriteRow[], context: string): Promise> {\n const kv = await this.kvPromise;\n const primaryPath = this.primaryPath;\n const ret: RxStorageBulkWriteResponse = {\n success: [],\n error: []\n };\n\n const batches = batchArray(documentWrites, ensureNotFalsy(this.settings.batchSize));\n\n /**\n * DenoKV does not have transactions\n * so we use a special writeBlock row to ensure\n * atomic writes (per document)\n * and so that we can do bulkWrites\n */\n for (const writeBatch of batches) {\n while (true) {\n const writeBlockKey = await kv.get([this.keySpace], this.kvOptions);\n const docsInDB = new Map>();\n\n /**\n * TODO the max amount for .getMany() is 10 which is defined by deno itself.\n * How can this be increased?\n */\n const readManyBatches = batchArray(writeBatch, 10);\n await Promise.all(\n readManyBatches.map(async (readManyBatch) => {\n const docsResult = await kv.getMany(\n readManyBatch.map(writeRow => {\n const docId: string = writeRow.document[primaryPath] as any;\n return [this.keySpace, DENOKV_DOCUMENT_ROOT_PATH, docId];\n })\n );\n docsResult.map((row: any) => {\n const docData = row.value;\n if (!docData) {\n return;\n }\n const docId: string = docData[primaryPath] as any;\n docsInDB.set(docId, docData);\n });\n })\n );\n const categorized = categorizeBulkWriteRows(\n this,\n this.primaryPath as any,\n docsInDB,\n writeBatch,\n context\n );\n\n let tx = kv.atomic();\n tx = tx.set([this.keySpace], ensureNotFalsy(writeBlockKey.value) + 1);\n tx = tx.check(writeBlockKey);\n\n // INSERTS\n categorized.bulkInsertDocs.forEach(writeRow => {\n const docId: string = writeRow.document[this.primaryPath] as any;\n ret.success.push(writeRow.document);\n\n // insert document data\n tx = tx.set([this.keySpace, DENOKV_DOCUMENT_ROOT_PATH, docId], writeRow.document);\n\n // insert secondary indexes\n Object.values(this.internals.indexes).forEach(indexMeta => {\n const indexString = indexMeta.getIndexableString(writeRow.document as any);\n tx = tx.set([this.keySpace, indexMeta.indexId, indexString], docId);\n });\n });\n // UPDATES\n categorized.bulkUpdateDocs.forEach((writeRow: BulkWriteRow) => {\n const docId: string = writeRow.document[this.primaryPath] as any;\n\n // insert document data\n tx = tx.set([this.keySpace, DENOKV_DOCUMENT_ROOT_PATH, docId], writeRow.document);\n\n // insert secondary indexes\n Object.values(this.internals.indexes).forEach(indexMeta => {\n const oldIndexString = indexMeta.getIndexableString(ensureNotFalsy(writeRow.previous));\n const newIndexString = indexMeta.getIndexableString(writeRow.document as any);\n if (oldIndexString !== newIndexString) {\n tx = tx.delete([this.keySpace, indexMeta.indexId, oldIndexString]);\n tx = tx.set([this.keySpace, indexMeta.indexId, newIndexString], docId);\n }\n });\n ret.success.push(writeRow.document as any);\n });\n\n const txResult = await tx.commit();\n if (txResult.ok) {\n appendToArray(ret.error, categorized.errors);\n if (categorized.eventBulk.events.length > 0) {\n const lastState = ensureNotFalsy(categorized.newestRow).document;\n categorized.eventBulk.checkpoint = {\n id: lastState[primaryPath],\n lwt: lastState._meta.lwt\n };\n categorized.eventBulk.endTime = now();\n this.changes$.next(categorized.eventBulk);\n }\n break;\n }\n }\n }\n return ret;\n }\n async findDocumentsById(ids: string[], withDeleted: boolean): Promise[]> {\n const kv = await this.kvPromise;\n const ret: RxDocumentData[] = [];\n await Promise.all(\n ids.map(async (docId) => {\n const kvKey = [this.keySpace, DENOKV_DOCUMENT_ROOT_PATH, docId];\n const findSingleResult = await kv.get(kvKey, this.kvOptions);\n const docInDb = findSingleResult.value;\n if (\n docInDb &&\n (\n !docInDb._deleted ||\n withDeleted\n )\n ) {\n ret.push(docInDb);\n }\n })\n );\n return ret;\n }\n query(preparedQuery: PreparedQuery): Promise> {\n return this.retryUntilNoWriteInBetween(\n () => queryDenoKV(this, preparedQuery)\n );\n }\n async count(preparedQuery: PreparedQuery): Promise {\n /**\n * At this point in time (end 2023), DenoKV does not support\n * range counts. So we have to run a normal query and use the result set length.\n * @link https://github.com/denoland/deno/issues/18965\n */\n const result = await this.retryUntilNoWriteInBetween(\n () => this.query(preparedQuery)\n );\n return {\n count: result.documents.length,\n mode: 'fast'\n };\n }\n getAttachmentData(documentId: string, attachmentId: string, digest: string): Promise {\n throw new Error(\"Method not implemented.\");\n }\n changeStream() {\n return this.changes$.asObservable();\n }\n async cleanup(minimumDeletedTime: number): Promise {\n const maxDeletionTime = now() - minimumDeletedTime;\n const kv = await this.kvPromise;\n const index = CLEANUP_INDEX;\n const indexName = getDenoKVIndexName(index);\n const indexMeta = this.internals.indexes[indexName];\n const lowerBoundString = getStartIndexStringFromLowerBound(\n this.schema,\n index,\n [\n true,\n /**\n * Do not use 0 here,\n * because 1 is the minimum value for _meta.lwt\n */\n 1\n ]\n );\n const upperBoundString = getStartIndexStringFromLowerBound(\n this.schema,\n index,\n [\n true,\n maxDeletionTime\n ]\n );\n let noMoreUndeleted: boolean = true;\n\n const range = kv.list({\n start: [this.keySpace, indexMeta.indexId, lowerBoundString],\n end: [this.keySpace, indexMeta.indexId, upperBoundString]\n }, {\n consistency: this.settings.consistencyLevel,\n batchSize: this.settings.batchSize,\n limit: this.settings.batchSize\n });\n\n let rangeCount = 0;\n for await (const row of range) {\n rangeCount = rangeCount + 1;\n const docId = row.value;\n const docDataResult = await kv.get([this.keySpace, DENOKV_DOCUMENT_ROOT_PATH, docId], this.kvOptions);\n if (!docDataResult.value) {\n continue;\n }\n const docData = ensureNotFalsy(docDataResult.value);\n if (\n !docData._deleted ||\n docData._meta.lwt > maxDeletionTime\n ) {\n continue;\n }\n\n\n let tx = kv.atomic();\n tx = tx.check(docDataResult);\n tx = tx.delete([this.keySpace, DENOKV_DOCUMENT_ROOT_PATH, docId]);\n Object\n .values(this.internals.indexes)\n .forEach(indexMetaInner => {\n tx = tx.delete([this.keySpace, indexMetaInner.indexId, docId]);\n });\n await tx.commit();\n }\n return noMoreUndeleted;\n }\n async close(): Promise {\n if (this.closed) {\n return this.closed;\n }\n this.closed = (async () => {\n this.changes$.complete();\n const kv = await this.kvPromise;\n await kv.close();\n })();\n return this.closed;\n }\n async remove(): Promise {\n ensureNotClosed(this);\n const kv = await this.kvPromise;\n const range = kv.list({\n start: [this.keySpace],\n end: [this.keySpace, INDEX_MAX]\n }, {\n consistency: this.settings.consistencyLevel,\n batchSize: this.settings.batchSize\n });\n let promises: Promise[] = [];\n for await (const row of range) {\n promises.push(kv.delete(row.key));\n }\n\n await Promise.all(promises);\n return this.close();\n }\n conflictResultionTasks(): Observable> {\n return new Subject().asObservable();\n }\n resolveConflictResultionTask(_taskSolution: RxConflictResultionTaskSolution): Promise {\n return PROMISE_RESOLVE_VOID;\n }\n}\n\n\n\nexport async function createDenoKVStorageInstance(\n storage: RxStorageDenoKV,\n params: RxStorageInstanceCreationParams,\n settings: DenoKVSettings\n): Promise> {\n settings = flatClone(settings);\n if (!settings.batchSize) {\n settings.batchSize = 100;\n }\n\n const primaryPath = getPrimaryFieldOfPrimaryKey(params.schema.primaryKey);\n\n const indexDBs: { [indexName: string]: DenoKVIndexMeta; } = {};\n const useIndexes = params.schema.indexes ? params.schema.indexes.slice(0) : [];\n useIndexes.push([primaryPath]);\n const useIndexesFinal = useIndexes.map(index => {\n const indexAr = toArray(index);\n return indexAr;\n });\n useIndexesFinal.push(CLEANUP_INDEX);\n useIndexesFinal.forEach((indexAr, indexId) => {\n const indexName = getDenoKVIndexName(indexAr);\n indexDBs[indexName] = {\n indexId: '|' + indexId + '|',\n indexName,\n getIndexableString: getIndexableStringMonad(params.schema, indexAr),\n index: indexAr\n };\n });\n\n const internals = {\n indexes: indexDBs\n };\n const instance = new RxStorageInstanceDenoKV(\n storage,\n params.databaseName,\n params.collectionName,\n params.schema,\n internals,\n params.options,\n settings\n );\n\n await addRxStorageMultiInstanceSupport(\n RX_STORAGE_NAME_DENOKV,\n params,\n instance\n );\n\n return Promise.resolve(instance);\n}\n\n\n\nfunction ensureNotClosed(\n instance: RxStorageInstanceDenoKV\n) {\n if (instance.closed) {\n throw new Error('RxStorageInstanceDenoKV is closed ' + instance.databaseName + '-' + instance.collectionName);\n }\n}\n"],"mappings":"AACA,SACIA,OAAO,QAEJ,MAAM;AAkBb,SAASC,2BAA2B,QAAQ,2BAA2B;AACvE,SAASC,gCAAgC,QAAQ,mCAAmC;AAGpF,SAASC,aAAa,EAAEC,yBAAyB,EAAEC,sBAAsB,EAAEC,aAAa,EAAEC,kBAAkB,QAAQ,oBAAoB;AACxI,SAASC,uBAAuB,EAAEC,iCAAiC,QAAQ,uBAAuB;AAClG,SAASC,aAAa,EAAEC,UAAU,EAAeC,OAAO,QAAQ,yBAAyB;AACzF,SAASC,cAAc,QAAQ,yBAAyB;AACxD,SAASC,uBAAuB,QAAQ,4BAA4B;AACpE,SAASC,GAAG,QAAQ,wBAAwB;AAC5C,SAASC,WAAW,QAAQ,mBAAmB;AAC/C,SAASC,SAAS,QAAQ,wBAAwB;AAClD,SAASC,oBAAoB,QAAQ,2BAA2B;AAChE,SAASC,SAAS,QAAQ,0BAA0B;AAIpD,WAAaC,uBAAuB;EAWhC,SAAAA,wBACoBC,OAAwB,EACxBC,YAAoB,EACpBC,cAAsB,EACtBC,MAAyD,EACzDC,SAA4C,EAC5CC,OAAiC,EACjCC,QAAwB,EACxBC,QAAQ,GAAG,CAAC,MAAM,EAAEN,YAAY,EAAEC,cAAc,EAAEC,MAAM,CAACK,OAAO,CAAC,CAACC,IAAI,CAAC,GAAG,CAAC,EAC3EC,SAAS,GAAG;IAAEC,WAAW,EAAEL,QAAQ,CAACM;EAAiB,CAAC,EACxE;IAAA,KAdMC,QAAQ,GAAoG,IAAIlC,OAAO,CAAC,CAAC;IAAA,KAK7GqB,OAAwB,GAAxBA,OAAwB;IAAA,KACxBC,YAAoB,GAApBA,YAAoB;IAAA,KACpBC,cAAsB,GAAtBA,cAAsB;IAAA,KACtBC,MAAyD,GAAzDA,MAAyD;IAAA,KACzDC,SAA4C,GAA5CA,SAA4C;IAAA,KAC5CC,OAAiC,GAAjCA,OAAiC;IAAA,KACjCC,QAAwB,GAAxBA,QAAwB;IAAA,KACxBC,QAAQ,GAARA,QAAQ;IAAA,KACRG,SAAS,GAATA,SAAS;IAEzB,IAAI,CAACI,WAAW,GAAGlC,2BAA2B,CAAC,IAAI,CAACuB,MAAM,CAACY,UAAU,CAAC;IACtE,IAAI,CAACC,SAAS,GAAG/B,aAAa,CAAC,CAAC,CAACgC,MAAM,CAACX,QAAQ,CAACY,UAAU,CAAC,CAACC,IAAI,CAAC,MAAOC,EAAO,IAAK;MACjF;MACA,MAAMA,EAAE,CAACC,GAAG,CAAC,CAAC,IAAI,CAACd,QAAQ,CAAC,EAAE,CAAC,CAAC;MAChC,OAAOa,EAAE;IACb,CAAC,CAAC;EACN;;EAEA;AACJ;AACA;AACA;AACA;EAJI,IAAAE,MAAA,GAAAvB,uBAAA,CAAAwB,SAAA;EAAAD,MAAA,CAKME,0BAA0B,GAAhC,eAAAA,2BACIC,EAAoB,EACV;IACV,IAAML,EAAE,GAAG,MAAM,IAAI,CAACJ,SAAS;IAC/B,OAAO,IAAI,EAAE;MACT,IAAMU,mBAAmB,GAAG,MAAMN,EAAE,CAACO,GAAG,CAAC,CAAC,IAAI,CAACpB,QAAQ,CAAC,EAAE,IAAI,CAACG,SAAS,CAAC;MACzE,IAAMkB,qBAAqB,GAAGF,mBAAmB,GAAGA,mBAAmB,CAACG,KAAK,GAAG,CAAC,CAAC;MAClF,IAAMC,MAAM,GAAG,MAAML,EAAE,CAAC,CAAC;MACzB,IAAMM,kBAAkB,GAAG,MAAMX,EAAE,CAACO,GAAG,CAAC,CAAC,IAAI,CAACpB,QAAQ,CAAC,EAAE,IAAI,CAACG,SAAS,CAAC;MACxE,IAAMsB,oBAAoB,GAAGD,kBAAkB,GAAGA,kBAAkB,CAACF,KAAK,GAAG,CAAC,CAAC;MAE/E,IAAID,qBAAqB,KAAKI,oBAAoB,EAAE;QAChD,OAAOF,MAAM;MACjB;IACJ;EACJ,CAAC;EAAAR,MAAA,CAEKW,SAAS,GAAf,eAAAA,UAAgBC,cAAyC,EAAEC,OAAe,EAAkD;IAAA,IAAAC,KAAA;IACxH,IAAMhB,EAAE,GAAG,MAAM,IAAI,CAACJ,SAAS;IAC/B,IAAMF,WAAW,GAAG,IAAI,CAACA,WAAW;IACpC,IAAMuB,GAA0C,GAAG;MAC/CC,OAAO,EAAE,EAAE;MACXC,KAAK,EAAE;IACX,CAAC;IAED,IAAMC,OAAO,GAAGlD,UAAU,CAAC4C,cAAc,EAAE1C,cAAc,CAAC,IAAI,CAACc,QAAQ,CAACmC,SAAS,CAAC,CAAC;;IAEnF;AACR;AACA;AACA;AACA;AACA;IACQ,KAAK,IAAMC,UAAU,IAAIF,OAAO,EAAE;MAAA,IAAAG,KAAA,kBAAAA,CAAA,EACjB;QACT,IAAMC,aAAa,GAAG,MAAMxB,EAAE,CAACO,GAAG,CAAC,CAACS,KAAI,CAAC7B,QAAQ,CAAC,EAAE6B,KAAI,CAAC1B,SAAS,CAAC;QACnE,IAAMmC,QAAQ,GAAG,IAAIC,GAAG,CAAoC,CAAC;;QAE7D;AAChB;AACA;AACA;QACgB,IAAMC,eAAe,GAAGzD,UAAU,CAACoD,UAAU,EAAE,EAAE,CAAC;QAClD,MAAMM,OAAO,CAACC,GAAG,CACbF,eAAe,CAACG,GAAG,CAAC,MAAOC,aAAa,IAAK;UACzC,IAAMC,UAAU,GAAG,MAAMhC,EAAE,CAACiC,OAAO,CAC/BF,aAAa,CAACD,GAAG,CAACI,QAAQ,IAAI;YAC1B,IAAMC,KAAa,GAAGD,QAAQ,CAACE,QAAQ,CAAC1C,WAAW,CAAQ;YAC3D,OAAO,CAACsB,KAAI,CAAC7B,QAAQ,EAAExB,yBAAyB,EAAEwE,KAAK,CAAC;UAC5D,CAAC,CACL,CAAC;UACDH,UAAU,CAACF,GAAG,CAAEO,GAAQ,IAAK;YACzB,IAAMC,OAAO,GAAGD,GAAG,CAAC5B,KAAK;YACzB,IAAI,CAAC6B,OAAO,EAAE;cACV;YACJ;YACA,IAAMH,KAAa,GAAGG,OAAO,CAAC5C,WAAW,CAAQ;YACjD+B,QAAQ,CAACxB,GAAG,CAACkC,KAAK,EAAEG,OAAO,CAAC;UAChC,CAAC,CAAC;QACN,CAAC,CACL,CAAC;QACD,IAAMC,WAAW,GAAGlE,uBAAuB,CACvC2C,KAAI,EACJA,KAAI,CAACtB,WAAW,EAChB+B,QAAQ,EACRH,UAAU,EACVP,OACJ,CAAC;QAED,IAAIyB,EAAE,GAAGxC,EAAE,CAACyC,MAAM,CAAC,CAAC;QACpBD,EAAE,GAAGA,EAAE,CAACvC,GAAG,CAAC,CAACe,KAAI,CAAC7B,QAAQ,CAAC,EAAEf,cAAc,CAACoD,aAAa,CAACf,KAAK,CAAC,GAAG,CAAC,CAAC;QACrE+B,EAAE,GAAGA,EAAE,CAACE,KAAK,CAAClB,aAAa,CAAC;;QAE5B;QACAe,WAAW,CAACI,cAAc,CAACC,OAAO,CAACV,QAAQ,IAAI;UAC3C,IAAMC,KAAa,GAAGD,QAAQ,CAACE,QAAQ,CAACpB,KAAI,CAACtB,WAAW,CAAQ;UAChEuB,GAAG,CAACC,OAAO,CAAC2B,IAAI,CAACX,QAAQ,CAACE,QAAQ,CAAC;;UAEnC;UACAI,EAAE,GAAGA,EAAE,CAACvC,GAAG,CAAC,CAACe,KAAI,CAAC7B,QAAQ,EAAExB,yBAAyB,EAAEwE,KAAK,CAAC,EAAED,QAAQ,CAACE,QAAQ,CAAC;;UAEjF;UACAU,MAAM,CAACC,MAAM,CAAC/B,KAAI,CAAChC,SAAS,CAACgE,OAAO,CAAC,CAACJ,OAAO,CAACK,SAAS,IAAI;YACvD,IAAMC,WAAW,GAAGD,SAAS,CAACE,kBAAkB,CAACjB,QAAQ,CAACE,QAAe,CAAC;YAC1EI,EAAE,GAAGA,EAAE,CAACvC,GAAG,CAAC,CAACe,KAAI,CAAC7B,QAAQ,EAAE8D,SAAS,CAACG,OAAO,EAAEF,WAAW,CAAC,EAAEf,KAAK,CAAC;UACvE,CAAC,CAAC;QACN,CAAC,CAAC;QACF;QACAI,WAAW,CAACc,cAAc,CAACT,OAAO,CAAEV,QAAiC,IAAK;UACtE,IAAMC,KAAa,GAAGD,QAAQ,CAACE,QAAQ,CAACpB,KAAI,CAACtB,WAAW,CAAQ;;UAEhE;UACA8C,EAAE,GAAGA,EAAE,CAACvC,GAAG,CAAC,CAACe,KAAI,CAAC7B,QAAQ,EAAExB,yBAAyB,EAAEwE,KAAK,CAAC,EAAED,QAAQ,CAACE,QAAQ,CAAC;;UAEjF;UACAU,MAAM,CAACC,MAAM,CAAC/B,KAAI,CAAChC,SAAS,CAACgE,OAAO,CAAC,CAACJ,OAAO,CAACK,SAAS,IAAI;YACvD,IAAMK,cAAc,GAAGL,SAAS,CAACE,kBAAkB,CAAC/E,cAAc,CAAC8D,QAAQ,CAACqB,QAAQ,CAAC,CAAC;YACtF,IAAMC,cAAc,GAAGP,SAAS,CAACE,kBAAkB,CAACjB,QAAQ,CAACE,QAAe,CAAC;YAC7E,IAAIkB,cAAc,KAAKE,cAAc,EAAE;cACnChB,EAAE,GAAGA,EAAE,CAACiB,MAAM,CAAC,CAACzC,KAAI,CAAC7B,QAAQ,EAAE8D,SAAS,CAACG,OAAO,EAAEE,cAAc,CAAC,CAAC;cAClEd,EAAE,GAAGA,EAAE,CAACvC,GAAG,CAAC,CAACe,KAAI,CAAC7B,QAAQ,EAAE8D,SAAS,CAACG,OAAO,EAAEI,cAAc,CAAC,EAAErB,KAAK,CAAC;YAC1E;UACJ,CAAC,CAAC;UACFlB,GAAG,CAACC,OAAO,CAAC2B,IAAI,CAACX,QAAQ,CAACE,QAAe,CAAC;QAC9C,CAAC,CAAC;QAEF,IAAMsB,QAAQ,GAAG,MAAMlB,EAAE,CAACmB,MAAM,CAAC,CAAC;QAClC,IAAID,QAAQ,CAACE,EAAE,EAAE;UACb3F,aAAa,CAACgD,GAAG,CAACE,KAAK,EAAEoB,WAAW,CAACsB,MAAM,CAAC;UAC5C,IAAItB,WAAW,CAACuB,SAAS,CAACC,MAAM,CAACC,MAAM,GAAG,CAAC,EAAE;YACzC,IAAMC,SAAS,GAAG7F,cAAc,CAACmE,WAAW,CAAC2B,SAAS,CAAC,CAAC9B,QAAQ;YAChEG,WAAW,CAACuB,SAAS,CAACK,UAAU,GAAG;cAC/BC,EAAE,EAAEH,SAAS,CAACvE,WAAW,CAAC;cAC1B2E,GAAG,EAAEJ,SAAS,CAACK,KAAK,CAACD;YACzB,CAAC;YACD9B,WAAW,CAACuB,SAAS,CAACS,OAAO,GAAGjG,GAAG,CAAC,CAAC;YACrC0C,KAAI,CAACvB,QAAQ,CAAC+E,IAAI,CAACjC,WAAW,CAACuB,SAAS,CAAC;UAC7C;UAAC;QAEL;MACJ,CAAC;MAtFD,OAAO,IAAI;QAAA,UAAAvC,KAAA,IAoFH;MAAM;IAGlB;IACA,OAAON,GAAG;EACd,CAAC;EAAAf,MAAA,CACKuE,iBAAiB,GAAvB,eAAAA,kBAAwBC,GAAa,EAAEC,WAAoB,EAAwC;IAC/F,IAAM3E,EAAE,GAAG,MAAM,IAAI,CAACJ,SAAS;IAC/B,IAAMqB,GAAgC,GAAG,EAAE;IAC3C,MAAMW,OAAO,CAACC,GAAG,CACb6C,GAAG,CAAC5C,GAAG,CAAC,MAAOK,KAAK,IAAK;MACrB,IAAMyC,KAAK,GAAG,CAAC,IAAI,CAACzF,QAAQ,EAAExB,yBAAyB,EAAEwE,KAAK,CAAC;MAC/D,IAAM0C,gBAAgB,GAAG,MAAM7E,EAAE,CAACO,GAAG,CAACqE,KAAK,EAAE,IAAI,CAACtF,SAAS,CAAC;MAC5D,IAAMwF,OAAO,GAAGD,gBAAgB,CAACpE,KAAK;MACtC,IACIqE,OAAO,KAEH,CAACA,OAAO,CAACC,QAAQ,IACjBJ,WAAW,CACd,EACH;QACE1D,GAAG,CAAC4B,IAAI,CAACiC,OAAO,CAAC;MACrB;IACJ,CAAC,CACL,CAAC;IACD,OAAO7D,GAAG;EACd,CAAC;EAAAf,MAAA,CACD8E,KAAK,GAAL,SAAAA,MAAMC,aAAuC,EAA4C;IACrF,OAAO,IAAI,CAAC7E,0BAA0B,CAClC,MAAM7B,WAAW,CAAC,IAAI,EAAE0G,aAAa,CACzC,CAAC;EACL,CAAC;EAAA/E,MAAA,CACKgF,KAAK,GAAX,eAAAA,MAAYD,aAAuC,EAAiC;IAChF;AACR;AACA;AACA;AACA;IACQ,IAAMvE,MAAM,GAAG,MAAM,IAAI,CAACN,0BAA0B,CAChD,MAAM,IAAI,CAAC4E,KAAK,CAACC,aAAa,CAClC,CAAC;IACD,OAAO;MACHC,KAAK,EAAExE,MAAM,CAACyE,SAAS,CAACnB,MAAM;MAC9BoB,IAAI,EAAE;IACV,CAAC;EACL,CAAC;EAAAlF,MAAA,CACDmF,iBAAiB,GAAjB,SAAAA,kBAAkBC,UAAkB,EAAEC,YAAoB,EAAEC,MAAc,EAAmB;IACzF,MAAM,IAAIC,KAAK,CAAC,yBAAyB,CAAC;EAC9C,CAAC;EAAAvF,MAAA,CACDwF,YAAY,GAAZ,SAAAA,aAAA,EAAe;IACX,OAAO,IAAI,CAACjG,QAAQ,CAACkG,YAAY,CAAC,CAAC;EACvC,CAAC;EAAAzF,MAAA,CACK0F,OAAO,GAAb,eAAAA,QAAcC,kBAA0B,EAAoB;IAAA,IAAAC,MAAA;IACxD,IAAMC,eAAe,GAAGzH,GAAG,CAAC,CAAC,GAAGuH,kBAAkB;IAClD,IAAM7F,EAAE,GAAG,MAAM,IAAI,CAACJ,SAAS;IAC/B,IAAMoG,KAAK,GAAGtI,aAAa;IAC3B,IAAMuI,SAAS,GAAGnI,kBAAkB,CAACkI,KAAK,CAAC;IAC3C,IAAM/C,SAAS,GAAG,IAAI,CAACjE,SAAS,CAACgE,OAAO,CAACiD,SAAS,CAAC;IACnD,IAAMC,gBAAgB,GAAGlI,iCAAiC,CACtD,IAAI,CAACe,MAAM,EACXiH,KAAK,EACL,CACI,IAAI;IACJ;AAChB;AACA;AACA;IACgB,CAAC,CAET,CAAC;IACD,IAAMG,gBAAgB,GAAGnI,iCAAiC,CACtD,IAAI,CAACe,MAAM,EACXiH,KAAK,EACL,CACI,IAAI,EACJD,eAAe,CAEvB,CAAC;IACD,IAAIK,eAAwB,GAAG,IAAI;IAEnC,IAAMC,KAAK,GAAGrG,EAAE,CAACsG,IAAI,CAAC;MAClBC,KAAK,EAAE,CAAC,IAAI,CAACpH,QAAQ,EAAE8D,SAAS,CAACG,OAAO,EAAE8C,gBAAgB,CAAC;MAC3DM,GAAG,EAAE,CAAC,IAAI,CAACrH,QAAQ,EAAE8D,SAAS,CAACG,OAAO,EAAE+C,gBAAgB;IAC5D,CAAC,EAAE;MACC5G,WAAW,EAAE,IAAI,CAACL,QAAQ,CAACM,gBAAgB;MAC3C6B,SAAS,EAAE,IAAI,CAACnC,QAAQ,CAACmC,SAAS;MAClCoF,KAAK,EAAE,IAAI,CAACvH,QAAQ,CAACmC;IACzB,CAAC,CAAC;IAEF,IAAIqF,UAAU,GAAG,CAAC;IAAC,IAAAC,MAAA,kBAAAA,CAAA,EACY;QAC3BD,UAAU,GAAGA,UAAU,GAAG,CAAC;QAC3B,IAAMvE,KAAK,GAAGE,GAAG,CAAC5B,KAAK;QACvB,IAAMmG,aAAa,GAAG,MAAM5G,EAAE,CAACO,GAAG,CAAC,CAACuF,MAAI,CAAC3G,QAAQ,EAAExB,yBAAyB,EAAEwE,KAAK,CAAC,EAAE2D,MAAI,CAACxG,SAAS,CAAC;QACrG,IAAI,CAACsH,aAAa,CAACnG,KAAK,EAAE;UAAA;QAE1B;QACA,IAAM6B,OAAO,GAAGlE,cAAc,CAACwI,aAAa,CAACnG,KAAK,CAAC;QACnD,IACI,CAAC6B,OAAO,CAACyC,QAAQ,IACjBzC,OAAO,CAACgC,KAAK,CAACD,GAAG,GAAG0B,eAAe,EACrC;UAAA;QAEF;QAGA,IAAIvD,EAAE,GAAGxC,EAAE,CAACyC,MAAM,CAAC,CAAC;QACpBD,EAAE,GAAGA,EAAE,CAACE,KAAK,CAACkE,aAAa,CAAC;QAC5BpE,EAAE,GAAGA,EAAE,CAACiB,MAAM,CAAC,CAACqC,MAAI,CAAC3G,QAAQ,EAAExB,yBAAyB,EAAEwE,KAAK,CAAC,CAAC;QACjEW,MAAM,CACDC,MAAM,CAAC+C,MAAI,CAAC9G,SAAS,CAACgE,OAAO,CAAC,CAC9BJ,OAAO,CAACiE,cAAc,IAAI;UACvBrE,EAAE,GAAGA,EAAE,CAACiB,MAAM,CAAC,CAACqC,MAAI,CAAC3G,QAAQ,EAAE0H,cAAc,CAACzD,OAAO,EAAEjB,KAAK,CAAC,CAAC;QAClE,CAAC,CAAC;QACN,MAAMK,EAAE,CAACmB,MAAM,CAAC,CAAC;MACrB,CAAC;MAAAmD,IAAA;IAzBD,WAAW,IAAMzE,GAAG,IAAIgE,KAAK;MAAAS,IAAA,SAAAH,MAAA;MAAA,IAAAG,IAAA,QAKrB;IAAS;IAqBjB,OAAOV,eAAe;EAC1B,CAAC;EAAAlG,MAAA,CACK6G,KAAK,GAAX,eAAAA,MAAA,EAA6B;IACzB,IAAI,IAAI,CAACC,MAAM,EAAE;MACb,OAAO,IAAI,CAACA,MAAM;IACtB;IACA,IAAI,CAACA,MAAM,GAAG,CAAC,YAAY;MACvB,IAAI,CAACvH,QAAQ,CAACwH,QAAQ,CAAC,CAAC;MACxB,IAAMjH,EAAE,GAAG,MAAM,IAAI,CAACJ,SAAS;MAC/B,MAAMI,EAAE,CAAC+G,KAAK,CAAC,CAAC;IACpB,CAAC,EAAE,CAAC;IACJ,OAAO,IAAI,CAACC,MAAM;EACtB,CAAC;EAAA9G,MAAA,CACKgH,MAAM,GAAZ,eAAAA,OAAA,EAA8B;IAC1BC,eAAe,CAAC,IAAI,CAAC;IACrB,IAAMnH,EAAE,GAAG,MAAM,IAAI,CAACJ,SAAS;IAC/B,IAAMyG,KAAK,GAAGrG,EAAE,CAACsG,IAAI,CAAC;MAClBC,KAAK,EAAE,CAAC,IAAI,CAACpH,QAAQ,CAAC;MACtBqH,GAAG,EAAE,CAAC,IAAI,CAACrH,QAAQ,EAAEX,SAAS;IAClC,CAAC,EAAE;MACCe,WAAW,EAAE,IAAI,CAACL,QAAQ,CAACM,gBAAgB;MAC3C6B,SAAS,EAAE,IAAI,CAACnC,QAAQ,CAACmC;IAC7B,CAAC,CAAC;IACF,IAAI+F,QAAwB,GAAG,EAAE;IACjC,WAAW,IAAM/E,GAAG,IAAIgE,KAAK,EAAE;MAC3Be,QAAQ,CAACvE,IAAI,CAAC7C,EAAE,CAACyD,MAAM,CAACpB,GAAG,CAACgF,GAAG,CAAC,CAAC;IACrC;IAEA,MAAMzF,OAAO,CAACC,GAAG,CAACuF,QAAQ,CAAC;IAC3B,OAAO,IAAI,CAACL,KAAK,CAAC,CAAC;EACvB,CAAC;EAAA7G,MAAA,CACDoH,sBAAsB,GAAtB,SAAAA,uBAAA,EAAyE;IACrE,OAAO,IAAI/J,OAAO,CAAM,CAAC,CAACoI,YAAY,CAAC,CAAC;EAC5C,CAAC;EAAAzF,MAAA,CACDqH,4BAA4B,GAA5B,SAAAA,6BAA6BC,aAAyD,EAAiB;IACnG,OAAO/I,oBAAoB;EAC/B,CAAC;EAAA,OAAAE,uBAAA;AAAA;AAKL,OAAO,eAAe8I,2BAA2BA,CAC7C7I,OAAwB,EACxB8I,MAAkE,EAClExI,QAAwB,EACmB;EAC3CA,QAAQ,GAAGR,SAAS,CAACQ,QAAQ,CAAC;EAC9B,IAAI,CAACA,QAAQ,CAACmC,SAAS,EAAE;IACrBnC,QAAQ,CAACmC,SAAS,GAAG,GAAG;EAC5B;EAEA,IAAM3B,WAAW,GAAGlC,2BAA2B,CAACkK,MAAM,CAAC3I,MAAM,CAACY,UAAU,CAAC;EAEzE,IAAMgI,QAA8D,GAAG,CAAC,CAAC;EACzE,IAAMC,UAAU,GAAGF,MAAM,CAAC3I,MAAM,CAACiE,OAAO,GAAG0E,MAAM,CAAC3I,MAAM,CAACiE,OAAO,CAAC6E,KAAK,CAAC,CAAC,CAAC,GAAG,EAAE;EAC9ED,UAAU,CAAC/E,IAAI,CAAC,CAACnD,WAAW,CAAC,CAAC;EAC9B,IAAMoI,eAAe,GAAGF,UAAU,CAAC9F,GAAG,CAACkE,KAAK,IAAI;IAC5C,IAAM+B,OAAO,GAAG5J,OAAO,CAAC6H,KAAK,CAAC;IAC9B,OAAO+B,OAAO;EAClB,CAAC,CAAC;EACFD,eAAe,CAACjF,IAAI,CAACnF,aAAa,CAAC;EACnCoK,eAAe,CAAClF,OAAO,CAAC,CAACmF,OAAO,EAAE3E,OAAO,KAAK;IAC1C,IAAM6C,SAAS,GAAGnI,kBAAkB,CAACiK,OAAO,CAAC;IAC7CJ,QAAQ,CAAC1B,SAAS,CAAC,GAAG;MAClB7C,OAAO,EAAE,GAAG,GAAGA,OAAO,GAAG,GAAG;MAC5B6C,SAAS;MACT9C,kBAAkB,EAAEpF,uBAAuB,CAAC2J,MAAM,CAAC3I,MAAM,EAAEgJ,OAAO,CAAC;MACnE/B,KAAK,EAAE+B;IACX,CAAC;EACL,CAAC,CAAC;EAEF,IAAM/I,SAAS,GAAG;IACdgE,OAAO,EAAE2E;EACb,CAAC;EACD,IAAMK,QAAQ,GAAG,IAAIrJ,uBAAuB,CACxCC,OAAO,EACP8I,MAAM,CAAC7I,YAAY,EACnB6I,MAAM,CAAC5I,cAAc,EACrB4I,MAAM,CAAC3I,MAAM,EACbC,SAAS,EACT0I,MAAM,CAACzI,OAAO,EACdC,QACJ,CAAC;EAED,MAAMzB,gCAAgC,CAClCG,sBAAsB,EACtB8J,MAAM,EACNM,QACJ,CAAC;EAED,OAAOpG,OAAO,CAACqG,OAAO,CAACD,QAAQ,CAAC;AACpC;AAIA,SAASb,eAAeA,CACpBa,QAAsC,EACxC;EACE,IAAIA,QAAQ,CAAChB,MAAM,EAAE;IACjB,MAAM,IAAIvB,KAAK,CAAC,oCAAoC,GAAGuC,QAAQ,CAACnJ,YAAY,GAAG,GAAG,GAAGmJ,QAAQ,CAAClJ,cAAc,CAAC;EACjH;AACJ","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/storage-dexie/dexie-helper.js b/dist/esm/plugins/storage-dexie/dexie-helper.js deleted file mode 100644 index 14d9e1286dd..00000000000 --- a/dist/esm/plugins/storage-dexie/dexie-helper.js +++ /dev/null @@ -1,236 +0,0 @@ -import { Dexie } from 'dexie'; -import { flatClone, getFromMapOrCreate, getProperty, setProperty, toArray, uniqueArray } from "../utils/index.js"; -import { getPrimaryFieldOfPrimaryKey, getSchemaByObjectPath } from "../../rx-schema-helper.js"; -export var DEXIE_DOCS_TABLE_NAME = 'docs'; -export var DEXIE_CHANGES_TABLE_NAME = 'changes'; -export var DEXIE_ATTACHMENTS_TABLE_NAME = 'attachments'; -export var RX_STORAGE_NAME_DEXIE = 'dexie'; -var DEXIE_STATE_DB_BY_NAME = new Map(); -var REF_COUNT_PER_DEXIE_DB = new Map(); -export function getDexieDbWithTables(databaseName, collectionName, settings, schema) { - var dexieDbName = 'rxdb-dexie-' + databaseName + '--' + schema.version + '--' + collectionName; - var state = getFromMapOrCreate(DEXIE_STATE_DB_BY_NAME, dexieDbName, () => { - var value = (async () => { - /** - * IndexedDB was not designed for dynamically adding tables on the fly, - * so we create one dexie database per RxDB storage instance. - * @link https://github.com/dexie/Dexie.js/issues/684#issuecomment-373224696 - */ - var useSettings = flatClone(settings); - useSettings.autoOpen = false; - var dexieDb = new Dexie(dexieDbName, useSettings); - var dexieStoresSettings = { - [DEXIE_DOCS_TABLE_NAME]: getDexieStoreSchema(schema), - [DEXIE_CHANGES_TABLE_NAME]: '++sequence, id', - [DEXIE_ATTACHMENTS_TABLE_NAME]: 'id' - }; - dexieDb.version(1).stores(dexieStoresSettings); - await dexieDb.open(); - return { - dexieDb, - dexieTable: dexieDb[DEXIE_DOCS_TABLE_NAME], - dexieAttachmentsTable: dexieDb[DEXIE_ATTACHMENTS_TABLE_NAME], - booleanIndexes: getBooleanIndexes(schema) - }; - })(); - DEXIE_STATE_DB_BY_NAME.set(dexieDbName, state); - REF_COUNT_PER_DEXIE_DB.set(state, 0); - return value; - }); - return state; -} -export async function closeDexieDb(statePromise) { - var state = await statePromise; - var prevCount = REF_COUNT_PER_DEXIE_DB.get(statePromise); - var newCount = prevCount - 1; - if (newCount === 0) { - state.dexieDb.close(); - REF_COUNT_PER_DEXIE_DB.delete(statePromise); - } else { - REF_COUNT_PER_DEXIE_DB.set(statePromise, newCount); - } -} - -/** - * It is not possible to set non-javascript-variable-syntax - * keys as IndexedDB indexes. So we have to substitute the pipe-char - * which comes from the key-compression plugin. - */ -export var DEXIE_PIPE_SUBSTITUTE = '__'; -export function dexieReplaceIfStartsWithPipe(str) { - var split = str.split('.'); - if (split.length > 1) { - return split.map(part => dexieReplaceIfStartsWithPipe(part)).join('.'); - } - if (str.startsWith('|')) { - var withoutFirst = str.substring(1); - return DEXIE_PIPE_SUBSTITUTE + withoutFirst; - } else { - return str; - } -} -export function dexieReplaceIfStartsWithPipeRevert(str) { - var split = str.split('.'); - if (split.length > 1) { - return split.map(part => dexieReplaceIfStartsWithPipeRevert(part)).join('.'); - } - if (str.startsWith(DEXIE_PIPE_SUBSTITUTE)) { - var withoutFirst = str.substring(DEXIE_PIPE_SUBSTITUTE.length); - return '|' + withoutFirst; - } else { - return str; - } -} - -/** - * IndexedDB does not support boolean indexing. - * So we have to replace true/false with '1'/'0' - * @param d - */ -export function fromStorageToDexie(booleanIndexes, d) { - if (!d) { - return d; - } - d = flatClone(d); - d = fromStorageToDexieField(d); - booleanIndexes.forEach(idx => { - var val = getProperty(d, idx); - var newVal = val ? '1' : '0'; - setProperty(d, idx, newVal); - }); - return d; -} -export function fromDexieToStorage(booleanIndexes, d) { - if (!d) { - return d; - } - d = flatClone(d); - d = fromDexieToStorageField(d); - booleanIndexes.forEach(idx => { - var val = getProperty(d, idx); - var newVal = val === '1' ? true : false; - setProperty(d, idx, newVal); - }); - return d; -} - -/** - * @recursive - */ -export function fromStorageToDexieField(documentData) { - if (!documentData || typeof documentData === 'string' || typeof documentData === 'number' || typeof documentData === 'boolean') { - return documentData; - } else if (Array.isArray(documentData)) { - return documentData.map(row => fromStorageToDexieField(row)); - } else if (typeof documentData === 'object') { - var ret = {}; - Object.entries(documentData).forEach(([key, value]) => { - if (typeof value === 'object') { - value = fromStorageToDexieField(value); - } - ret[dexieReplaceIfStartsWithPipe(key)] = value; - }); - return ret; - } -} -export function fromDexieToStorageField(documentData) { - if (!documentData || typeof documentData === 'string' || typeof documentData === 'number' || typeof documentData === 'boolean') { - return documentData; - } else if (Array.isArray(documentData)) { - return documentData.map(row => fromDexieToStorageField(row)); - } else if (typeof documentData === 'object') { - var ret = {}; - Object.entries(documentData).forEach(([key, value]) => { - if (typeof value === 'object' || Array.isArray(documentData)) { - value = fromDexieToStorageField(value); - } - ret[dexieReplaceIfStartsWithPipeRevert(key)] = value; - }); - return ret; - } -} - -/** - * Creates a string that can be used to create the dexie store. - * @link https://dexie.org/docs/API-Reference#quick-reference - */ -export function getDexieStoreSchema(rxJsonSchema) { - var parts = []; - - /** - * First part must be the primary key - * @link https://github.com/dexie/Dexie.js/issues/1307#issuecomment-846590912 - */ - var primaryKey = getPrimaryFieldOfPrimaryKey(rxJsonSchema.primaryKey); - parts.push([primaryKey]); - parts.push(['_deleted', primaryKey]); - - // add other indexes - if (rxJsonSchema.indexes) { - rxJsonSchema.indexes.forEach(index => { - var arIndex = toArray(index); - parts.push(arIndex); - }); - } - - // we also need the _meta.lwt+primaryKey index for the getChangedDocumentsSince() method. - parts.push(['_meta.lwt', primaryKey]); - - // and this one for the cleanup() - parts.push(['_meta.lwt']); - - /** - * It is not possible to set non-javascript-variable-syntax - * keys as IndexedDB indexes. So we have to substitute the pipe-char - * which comes from the key-compression plugin. - */ - parts = parts.map(part => { - return part.map(str => dexieReplaceIfStartsWithPipe(str)); - }); - var dexieSchemaRows = parts.map(part => { - if (part.length === 1) { - return part[0]; - } else { - return '[' + part.join('+') + ']'; - } - }); - dexieSchemaRows = dexieSchemaRows.filter((elem, pos, arr) => arr.indexOf(elem) === pos); // unique; - var dexieSchema = dexieSchemaRows.join(', '); - return dexieSchema; -} - -/** - * Returns all documents in the database. - * Non-deleted plus deleted ones. - */ -export async function getDocsInDb(internals, docIds) { - var state = await internals; - var docsInDb = await state.dexieTable.bulkGet(docIds); - return docsInDb.map(d => fromDexieToStorage(state.booleanIndexes, d)); -} -export function attachmentObjectId(documentId, attachmentId) { - return documentId + '||' + attachmentId; -} -export function getBooleanIndexes(schema) { - var checkedFields = new Set(); - var ret = []; - if (!schema.indexes) { - return ret; - } - schema.indexes.forEach(index => { - var fields = toArray(index); - fields.forEach(field => { - if (checkedFields.has(field)) { - return; - } - checkedFields.add(field); - var schemaObj = getSchemaByObjectPath(schema, field); - if (schemaObj.type === 'boolean') { - ret.push(field); - } - }); - }); - ret.push('_deleted'); - return uniqueArray(ret); -} -//# sourceMappingURL=dexie-helper.js.map \ No newline at end of file diff --git a/dist/esm/plugins/storage-dexie/dexie-helper.js.map b/dist/esm/plugins/storage-dexie/dexie-helper.js.map deleted file mode 100644 index d3646a72db2..00000000000 --- a/dist/esm/plugins/storage-dexie/dexie-helper.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"dexie-helper.js","names":["Dexie","flatClone","getFromMapOrCreate","getProperty","setProperty","toArray","uniqueArray","getPrimaryFieldOfPrimaryKey","getSchemaByObjectPath","DEXIE_DOCS_TABLE_NAME","DEXIE_CHANGES_TABLE_NAME","DEXIE_ATTACHMENTS_TABLE_NAME","RX_STORAGE_NAME_DEXIE","DEXIE_STATE_DB_BY_NAME","Map","REF_COUNT_PER_DEXIE_DB","getDexieDbWithTables","databaseName","collectionName","settings","schema","dexieDbName","version","state","value","useSettings","autoOpen","dexieDb","dexieStoresSettings","getDexieStoreSchema","stores","open","dexieTable","dexieAttachmentsTable","booleanIndexes","getBooleanIndexes","set","closeDexieDb","statePromise","prevCount","get","newCount","close","delete","DEXIE_PIPE_SUBSTITUTE","dexieReplaceIfStartsWithPipe","str","split","length","map","part","join","startsWith","withoutFirst","substring","dexieReplaceIfStartsWithPipeRevert","fromStorageToDexie","d","fromStorageToDexieField","forEach","idx","val","newVal","fromDexieToStorage","fromDexieToStorageField","documentData","Array","isArray","row","ret","Object","entries","key","rxJsonSchema","parts","primaryKey","push","indexes","index","arIndex","dexieSchemaRows","filter","elem","pos","arr","indexOf","dexieSchema","getDocsInDb","internals","docIds","docsInDb","bulkGet","attachmentObjectId","documentId","attachmentId","checkedFields","Set","fields","field","has","add","schemaObj","type"],"sources":["../../../../src/plugins/storage-dexie/dexie-helper.ts"],"sourcesContent":["import type {\n DexieStorageInternals,\n RxDocumentData,\n RxJsonSchema\n} from '../../types/index.d.ts';\nimport { Dexie } from 'dexie';\nimport type { DexieSettings } from '../../types/index.d.ts';\nimport { flatClone, getFromMapOrCreate, getProperty, setProperty, toArray, uniqueArray } from '../utils/index.ts';\nimport {\n getPrimaryFieldOfPrimaryKey,\n getSchemaByObjectPath\n} from '../../rx-schema-helper.ts';\n\nexport const DEXIE_DOCS_TABLE_NAME = 'docs';\nexport const DEXIE_CHANGES_TABLE_NAME = 'changes';\nexport const DEXIE_ATTACHMENTS_TABLE_NAME = 'attachments';\n\nexport const RX_STORAGE_NAME_DEXIE = 'dexie';\n\nconst DEXIE_STATE_DB_BY_NAME: Map = new Map();\nconst REF_COUNT_PER_DEXIE_DB: Map = new Map();\nexport function getDexieDbWithTables(\n databaseName: string,\n collectionName: string,\n settings: DexieSettings,\n schema: RxJsonSchema\n): DexieStorageInternals {\n const dexieDbName = 'rxdb-dexie-' + databaseName + '--' + schema.version + '--' + collectionName;\n\n const state = getFromMapOrCreate(\n DEXIE_STATE_DB_BY_NAME,\n dexieDbName,\n () => {\n const value = (async () => {\n /**\n * IndexedDB was not designed for dynamically adding tables on the fly,\n * so we create one dexie database per RxDB storage instance.\n * @link https://github.com/dexie/Dexie.js/issues/684#issuecomment-373224696\n */\n const useSettings = flatClone(settings);\n useSettings.autoOpen = false;\n const dexieDb = new Dexie(dexieDbName, useSettings);\n const dexieStoresSettings = {\n [DEXIE_DOCS_TABLE_NAME]: getDexieStoreSchema(schema),\n [DEXIE_CHANGES_TABLE_NAME]: '++sequence, id',\n [DEXIE_ATTACHMENTS_TABLE_NAME]: 'id'\n };\n\n dexieDb.version(1).stores(dexieStoresSettings);\n await dexieDb.open();\n\n return {\n dexieDb,\n dexieTable: (dexieDb as any)[DEXIE_DOCS_TABLE_NAME],\n dexieAttachmentsTable: (dexieDb as any)[DEXIE_ATTACHMENTS_TABLE_NAME],\n booleanIndexes: getBooleanIndexes(schema)\n };\n })();\n DEXIE_STATE_DB_BY_NAME.set(dexieDbName, state);\n REF_COUNT_PER_DEXIE_DB.set(state, 0);\n return value;\n }\n );\n return state;\n}\n\nexport async function closeDexieDb(statePromise: DexieStorageInternals) {\n const state = await statePromise;\n const prevCount = REF_COUNT_PER_DEXIE_DB.get(statePromise);\n const newCount = (prevCount as any) - 1;\n if (newCount === 0) {\n state.dexieDb.close();\n REF_COUNT_PER_DEXIE_DB.delete(statePromise);\n } else {\n REF_COUNT_PER_DEXIE_DB.set(statePromise, newCount);\n }\n}\n\n\n\n/**\n * It is not possible to set non-javascript-variable-syntax\n * keys as IndexedDB indexes. So we have to substitute the pipe-char\n * which comes from the key-compression plugin.\n */\nexport const DEXIE_PIPE_SUBSTITUTE = '__';\nexport function dexieReplaceIfStartsWithPipe(str: string): string {\n const split = str.split('.');\n if (split.length > 1) {\n return split.map(part => dexieReplaceIfStartsWithPipe(part)).join('.');\n }\n\n if (str.startsWith('|')) {\n const withoutFirst = str.substring(1);\n return DEXIE_PIPE_SUBSTITUTE + withoutFirst;\n } else {\n return str;\n }\n}\n\nexport function dexieReplaceIfStartsWithPipeRevert(str: string): string {\n const split = str.split('.');\n if (split.length > 1) {\n return split.map(part => dexieReplaceIfStartsWithPipeRevert(part)).join('.');\n }\n\n if (str.startsWith(DEXIE_PIPE_SUBSTITUTE)) {\n const withoutFirst = str.substring(DEXIE_PIPE_SUBSTITUTE.length);\n return '|' + withoutFirst;\n } else {\n return str;\n }\n}\n\n\n/**\n * IndexedDB does not support boolean indexing.\n * So we have to replace true/false with '1'/'0'\n * @param d \n */\nexport function fromStorageToDexie(\n booleanIndexes: string[],\n d: RxDocumentData\n): any {\n if (!d) {\n return d;\n }\n d = flatClone(d);\n d = fromStorageToDexieField(d);\n\n booleanIndexes.forEach(idx => {\n const val = getProperty(d, idx);\n const newVal = val ? '1' : '0';\n setProperty(d, idx, newVal);\n });\n\n return d;\n}\nexport function fromDexieToStorage(\n booleanIndexes: string[],\n d: any\n): RxDocumentData {\n if (!d) {\n return d;\n }\n\n d = flatClone(d);\n d = fromDexieToStorageField(d);\n\n booleanIndexes.forEach(idx => {\n const val = getProperty(d, idx);\n const newVal = val === '1' ? true : false;\n setProperty(d, idx, newVal);\n });\n\n return d;\n}\n\n/**\n * @recursive\n */\nexport function fromStorageToDexieField(documentData: RxDocumentData): any {\n if (\n !documentData ||\n typeof documentData === 'string' ||\n typeof documentData === 'number' ||\n typeof documentData === 'boolean'\n ) {\n return documentData;\n } else if (Array.isArray(documentData)) {\n return documentData.map(row => fromStorageToDexieField(row));\n } else if (typeof documentData === 'object') {\n const ret: any = {};\n Object.entries(documentData).forEach(([key, value]) => {\n if (typeof value === 'object') {\n value = fromStorageToDexieField(value);\n }\n ret[dexieReplaceIfStartsWithPipe(key)] = value;\n });\n return ret;\n }\n}\n\nexport function fromDexieToStorageField(documentData: any): RxDocumentData {\n if (!documentData || typeof documentData === 'string' || typeof documentData === 'number' || typeof documentData === 'boolean') {\n return documentData;\n } else if (Array.isArray(documentData)) {\n return documentData.map(row => fromDexieToStorageField(row));\n } else if (typeof documentData === 'object') {\n const ret: any = {};\n Object.entries(documentData).forEach(([key, value]) => {\n if (typeof value === 'object' || Array.isArray(documentData)) {\n value = fromDexieToStorageField(value);\n }\n ret[dexieReplaceIfStartsWithPipeRevert(key)] = value;\n });\n return ret;\n }\n}\n\n\n/**\n * Creates a string that can be used to create the dexie store.\n * @link https://dexie.org/docs/API-Reference#quick-reference\n */\nexport function getDexieStoreSchema(\n rxJsonSchema: RxJsonSchema\n): string {\n let parts: string[][] = [];\n\n /**\n * First part must be the primary key\n * @link https://github.com/dexie/Dexie.js/issues/1307#issuecomment-846590912\n */\n const primaryKey = getPrimaryFieldOfPrimaryKey(rxJsonSchema.primaryKey);\n parts.push([primaryKey]);\n parts.push(['_deleted', primaryKey]);\n\n // add other indexes\n if (rxJsonSchema.indexes) {\n rxJsonSchema.indexes.forEach(index => {\n const arIndex = toArray(index);\n parts.push(arIndex);\n });\n }\n\n // we also need the _meta.lwt+primaryKey index for the getChangedDocumentsSince() method.\n parts.push(['_meta.lwt', primaryKey]);\n\n // and this one for the cleanup()\n parts.push(['_meta.lwt']);\n\n /**\n * It is not possible to set non-javascript-variable-syntax\n * keys as IndexedDB indexes. So we have to substitute the pipe-char\n * which comes from the key-compression plugin.\n */\n parts = parts.map(part => {\n return part.map(str => dexieReplaceIfStartsWithPipe(str));\n });\n\n let dexieSchemaRows = parts.map(part => {\n if (part.length === 1) {\n return part[0];\n } else {\n return '[' + part.join('+') + ']';\n }\n });\n dexieSchemaRows = dexieSchemaRows.filter((elem: any, pos: any, arr: any) => arr.indexOf(elem) === pos); // unique;\n const dexieSchema = dexieSchemaRows.join(', ');\n\n return dexieSchema;\n}\n\n/**\n * Returns all documents in the database.\n * Non-deleted plus deleted ones.\n */\nexport async function getDocsInDb(\n internals: DexieStorageInternals,\n docIds: string[]\n): Promise[]> {\n const state = await internals;\n const docsInDb = await state.dexieTable.bulkGet(docIds);\n return docsInDb.map(d => fromDexieToStorage(state.booleanIndexes, d));\n}\n\n\nexport function attachmentObjectId(documentId: string, attachmentId: string): string {\n return documentId + '||' + attachmentId;\n}\n\n\nexport function getBooleanIndexes(schema: RxJsonSchema): string[] {\n const checkedFields = new Set();\n const ret: string[] = [];\n if (!schema.indexes) {\n return ret;\n }\n schema.indexes.forEach(index => {\n const fields = toArray(index);\n fields.forEach(field => {\n if (checkedFields.has(field)) {\n return;\n }\n checkedFields.add(field);\n const schemaObj = getSchemaByObjectPath(schema, field);\n if (schemaObj.type === 'boolean') {\n ret.push(field);\n }\n });\n });\n ret.push('_deleted');\n\n return uniqueArray(ret);\n}\n\n"],"mappings":"AAKA,SAASA,KAAK,QAAQ,OAAO;AAE7B,SAASC,SAAS,EAAEC,kBAAkB,EAAEC,WAAW,EAAEC,WAAW,EAAEC,OAAO,EAAEC,WAAW,QAAQ,mBAAmB;AACjH,SACIC,2BAA2B,EAC3BC,qBAAqB,QAClB,2BAA2B;AAElC,OAAO,IAAMC,qBAAqB,GAAG,MAAM;AAC3C,OAAO,IAAMC,wBAAwB,GAAG,SAAS;AACjD,OAAO,IAAMC,4BAA4B,GAAG,aAAa;AAEzD,OAAO,IAAMC,qBAAqB,GAAG,OAAO;AAE5C,IAAMC,sBAA0D,GAAG,IAAIC,GAAG,CAAC,CAAC;AAC5E,IAAMC,sBAA0D,GAAG,IAAID,GAAG,CAAC,CAAC;AAC5E,OAAO,SAASE,oBAAoBA,CAChCC,YAAoB,EACpBC,cAAsB,EACtBC,QAAuB,EACvBC,MAAyB,EACJ;EACrB,IAAMC,WAAW,GAAG,aAAa,GAAGJ,YAAY,GAAG,IAAI,GAAGG,MAAM,CAACE,OAAO,GAAG,IAAI,GAAGJ,cAAc;EAEhG,IAAMK,KAAK,GAAGrB,kBAAkB,CAC5BW,sBAAsB,EACtBQ,WAAW,EACX,MAAM;IACF,IAAMG,KAAK,GAAG,CAAC,YAAY;MACvB;AAChB;AACA;AACA;AACA;MACgB,IAAMC,WAAW,GAAGxB,SAAS,CAACkB,QAAQ,CAAC;MACvCM,WAAW,CAACC,QAAQ,GAAG,KAAK;MAC5B,IAAMC,OAAO,GAAG,IAAI3B,KAAK,CAACqB,WAAW,EAAEI,WAAW,CAAC;MACnD,IAAMG,mBAAmB,GAAG;QACxB,CAACnB,qBAAqB,GAAGoB,mBAAmB,CAACT,MAAM,CAAC;QACpD,CAACV,wBAAwB,GAAG,gBAAgB;QAC5C,CAACC,4BAA4B,GAAG;MACpC,CAAC;MAEDgB,OAAO,CAACL,OAAO,CAAC,CAAC,CAAC,CAACQ,MAAM,CAACF,mBAAmB,CAAC;MAC9C,MAAMD,OAAO,CAACI,IAAI,CAAC,CAAC;MAEpB,OAAO;QACHJ,OAAO;QACPK,UAAU,EAAGL,OAAO,CAASlB,qBAAqB,CAAC;QACnDwB,qBAAqB,EAAGN,OAAO,CAAShB,4BAA4B,CAAC;QACrEuB,cAAc,EAAEC,iBAAiB,CAACf,MAAM;MAC5C,CAAC;IACL,CAAC,EAAE,CAAC;IACJP,sBAAsB,CAACuB,GAAG,CAACf,WAAW,EAAEE,KAAK,CAAC;IAC9CR,sBAAsB,CAACqB,GAAG,CAACb,KAAK,EAAE,CAAC,CAAC;IACpC,OAAOC,KAAK;EAChB,CACJ,CAAC;EACD,OAAOD,KAAK;AAChB;AAEA,OAAO,eAAec,YAAYA,CAACC,YAAmC,EAAE;EACpE,IAAMf,KAAK,GAAG,MAAMe,YAAY;EAChC,IAAMC,SAAS,GAAGxB,sBAAsB,CAACyB,GAAG,CAACF,YAAY,CAAC;EAC1D,IAAMG,QAAQ,GAAIF,SAAS,GAAW,CAAC;EACvC,IAAIE,QAAQ,KAAK,CAAC,EAAE;IAChBlB,KAAK,CAACI,OAAO,CAACe,KAAK,CAAC,CAAC;IACrB3B,sBAAsB,CAAC4B,MAAM,CAACL,YAAY,CAAC;EAC/C,CAAC,MAAM;IACHvB,sBAAsB,CAACqB,GAAG,CAACE,YAAY,EAAEG,QAAQ,CAAC;EACtD;AACJ;;AAIA;AACA;AACA;AACA;AACA;AACA,OAAO,IAAMG,qBAAqB,GAAG,IAAI;AACzC,OAAO,SAASC,4BAA4BA,CAACC,GAAW,EAAU;EAC9D,IAAMC,KAAK,GAAGD,GAAG,CAACC,KAAK,CAAC,GAAG,CAAC;EAC5B,IAAIA,KAAK,CAACC,MAAM,GAAG,CAAC,EAAE;IAClB,OAAOD,KAAK,CAACE,GAAG,CAACC,IAAI,IAAIL,4BAA4B,CAACK,IAAI,CAAC,CAAC,CAACC,IAAI,CAAC,GAAG,CAAC;EAC1E;EAEA,IAAIL,GAAG,CAACM,UAAU,CAAC,GAAG,CAAC,EAAE;IACrB,IAAMC,YAAY,GAAGP,GAAG,CAACQ,SAAS,CAAC,CAAC,CAAC;IACrC,OAAOV,qBAAqB,GAAGS,YAAY;EAC/C,CAAC,MAAM;IACH,OAAOP,GAAG;EACd;AACJ;AAEA,OAAO,SAASS,kCAAkCA,CAACT,GAAW,EAAU;EACpE,IAAMC,KAAK,GAAGD,GAAG,CAACC,KAAK,CAAC,GAAG,CAAC;EAC5B,IAAIA,KAAK,CAACC,MAAM,GAAG,CAAC,EAAE;IAClB,OAAOD,KAAK,CAACE,GAAG,CAACC,IAAI,IAAIK,kCAAkC,CAACL,IAAI,CAAC,CAAC,CAACC,IAAI,CAAC,GAAG,CAAC;EAChF;EAEA,IAAIL,GAAG,CAACM,UAAU,CAACR,qBAAqB,CAAC,EAAE;IACvC,IAAMS,YAAY,GAAGP,GAAG,CAACQ,SAAS,CAACV,qBAAqB,CAACI,MAAM,CAAC;IAChE,OAAO,GAAG,GAAGK,YAAY;EAC7B,CAAC,MAAM;IACH,OAAOP,GAAG;EACd;AACJ;;AAGA;AACA;AACA;AACA;AACA;AACA,OAAO,SAASU,kBAAkBA,CAC9BtB,cAAwB,EACxBuB,CAA4B,EACzB;EACH,IAAI,CAACA,CAAC,EAAE;IACJ,OAAOA,CAAC;EACZ;EACAA,CAAC,GAAGxD,SAAS,CAACwD,CAAC,CAAC;EAChBA,CAAC,GAAGC,uBAAuB,CAACD,CAAC,CAAC;EAE9BvB,cAAc,CAACyB,OAAO,CAACC,GAAG,IAAI;IAC1B,IAAMC,GAAG,GAAG1D,WAAW,CAACsD,CAAC,EAAEG,GAAG,CAAC;IAC/B,IAAME,MAAM,GAAGD,GAAG,GAAG,GAAG,GAAG,GAAG;IAC9BzD,WAAW,CAACqD,CAAC,EAAEG,GAAG,EAAEE,MAAM,CAAC;EAC/B,CAAC,CAAC;EAEF,OAAOL,CAAC;AACZ;AACA,OAAO,SAASM,kBAAkBA,CAC9B7B,cAAwB,EACxBuB,CAAM,EACmB;EACzB,IAAI,CAACA,CAAC,EAAE;IACJ,OAAOA,CAAC;EACZ;EAEAA,CAAC,GAAGxD,SAAS,CAACwD,CAAC,CAAC;EAChBA,CAAC,GAAGO,uBAAuB,CAACP,CAAC,CAAC;EAE9BvB,cAAc,CAACyB,OAAO,CAACC,GAAG,IAAI;IAC1B,IAAMC,GAAG,GAAG1D,WAAW,CAACsD,CAAC,EAAEG,GAAG,CAAC;IAC/B,IAAME,MAAM,GAAGD,GAAG,KAAK,GAAG,GAAG,IAAI,GAAG,KAAK;IACzCzD,WAAW,CAACqD,CAAC,EAAEG,GAAG,EAAEE,MAAM,CAAC;EAC/B,CAAC,CAAC;EAEF,OAAOL,CAAC;AACZ;;AAEA;AACA;AACA;AACA,OAAO,SAASC,uBAAuBA,CAACO,YAAiC,EAAO;EAC5E,IACI,CAACA,YAAY,IACb,OAAOA,YAAY,KAAK,QAAQ,IAChC,OAAOA,YAAY,KAAK,QAAQ,IAChC,OAAOA,YAAY,KAAK,SAAS,EACnC;IACE,OAAOA,YAAY;EACvB,CAAC,MAAM,IAAIC,KAAK,CAACC,OAAO,CAACF,YAAY,CAAC,EAAE;IACpC,OAAOA,YAAY,CAAChB,GAAG,CAACmB,GAAG,IAAIV,uBAAuB,CAACU,GAAG,CAAC,CAAC;EAChE,CAAC,MAAM,IAAI,OAAOH,YAAY,KAAK,QAAQ,EAAE;IACzC,IAAMI,GAAQ,GAAG,CAAC,CAAC;IACnBC,MAAM,CAACC,OAAO,CAACN,YAAY,CAAC,CAACN,OAAO,CAAC,CAAC,CAACa,GAAG,EAAEhD,KAAK,CAAC,KAAK;MACnD,IAAI,OAAOA,KAAK,KAAK,QAAQ,EAAE;QAC3BA,KAAK,GAAGkC,uBAAuB,CAAClC,KAAK,CAAC;MAC1C;MACA6C,GAAG,CAACxB,4BAA4B,CAAC2B,GAAG,CAAC,CAAC,GAAGhD,KAAK;IAClD,CAAC,CAAC;IACF,OAAO6C,GAAG;EACd;AACJ;AAEA,OAAO,SAASL,uBAAuBA,CAACC,YAAiB,EAAuB;EAC5E,IAAI,CAACA,YAAY,IAAI,OAAOA,YAAY,KAAK,QAAQ,IAAI,OAAOA,YAAY,KAAK,QAAQ,IAAI,OAAOA,YAAY,KAAK,SAAS,EAAE;IAC5H,OAAOA,YAAY;EACvB,CAAC,MAAM,IAAIC,KAAK,CAACC,OAAO,CAACF,YAAY,CAAC,EAAE;IACpC,OAAOA,YAAY,CAAChB,GAAG,CAACmB,GAAG,IAAIJ,uBAAuB,CAACI,GAAG,CAAC,CAAC;EAChE,CAAC,MAAM,IAAI,OAAOH,YAAY,KAAK,QAAQ,EAAE;IACzC,IAAMI,GAAQ,GAAG,CAAC,CAAC;IACnBC,MAAM,CAACC,OAAO,CAACN,YAAY,CAAC,CAACN,OAAO,CAAC,CAAC,CAACa,GAAG,EAAEhD,KAAK,CAAC,KAAK;MACnD,IAAI,OAAOA,KAAK,KAAK,QAAQ,IAAI0C,KAAK,CAACC,OAAO,CAACF,YAAY,CAAC,EAAE;QAC1DzC,KAAK,GAAGwC,uBAAuB,CAACxC,KAAK,CAAC;MAC1C;MACA6C,GAAG,CAACd,kCAAkC,CAACiB,GAAG,CAAC,CAAC,GAAGhD,KAAK;IACxD,CAAC,CAAC;IACF,OAAO6C,GAAG;EACd;AACJ;;AAGA;AACA;AACA;AACA;AACA,OAAO,SAASxC,mBAAmBA,CAC/B4C,YAA+B,EACzB;EACN,IAAIC,KAAiB,GAAG,EAAE;;EAE1B;AACJ;AACA;AACA;EACI,IAAMC,UAAU,GAAGpE,2BAA2B,CAACkE,YAAY,CAACE,UAAU,CAAC;EACvED,KAAK,CAACE,IAAI,CAAC,CAACD,UAAU,CAAC,CAAC;EACxBD,KAAK,CAACE,IAAI,CAAC,CAAC,UAAU,EAAED,UAAU,CAAC,CAAC;;EAEpC;EACA,IAAIF,YAAY,CAACI,OAAO,EAAE;IACtBJ,YAAY,CAACI,OAAO,CAAClB,OAAO,CAACmB,KAAK,IAAI;MAClC,IAAMC,OAAO,GAAG1E,OAAO,CAACyE,KAAK,CAAC;MAC9BJ,KAAK,CAACE,IAAI,CAACG,OAAO,CAAC;IACvB,CAAC,CAAC;EACN;;EAEA;EACAL,KAAK,CAACE,IAAI,CAAC,CAAC,WAAW,EAAED,UAAU,CAAC,CAAC;;EAErC;EACAD,KAAK,CAACE,IAAI,CAAC,CAAC,WAAW,CAAC,CAAC;;EAEzB;AACJ;AACA;AACA;AACA;EACIF,KAAK,GAAGA,KAAK,CAACzB,GAAG,CAACC,IAAI,IAAI;IACtB,OAAOA,IAAI,CAACD,GAAG,CAACH,GAAG,IAAID,4BAA4B,CAACC,GAAG,CAAC,CAAC;EAC7D,CAAC,CAAC;EAEF,IAAIkC,eAAe,GAAGN,KAAK,CAACzB,GAAG,CAACC,IAAI,IAAI;IACpC,IAAIA,IAAI,CAACF,MAAM,KAAK,CAAC,EAAE;MACnB,OAAOE,IAAI,CAAC,CAAC,CAAC;IAClB,CAAC,MAAM;MACH,OAAO,GAAG,GAAGA,IAAI,CAACC,IAAI,CAAC,GAAG,CAAC,GAAG,GAAG;IACrC;EACJ,CAAC,CAAC;EACF6B,eAAe,GAAGA,eAAe,CAACC,MAAM,CAAC,CAACC,IAAS,EAAEC,GAAQ,EAAEC,GAAQ,KAAKA,GAAG,CAACC,OAAO,CAACH,IAAI,CAAC,KAAKC,GAAG,CAAC,CAAC,CAAC;EACxG,IAAMG,WAAW,GAAGN,eAAe,CAAC7B,IAAI,CAAC,IAAI,CAAC;EAE9C,OAAOmC,WAAW;AACtB;;AAEA;AACA;AACA;AACA;AACA,OAAO,eAAeC,WAAWA,CAC7BC,SAAgC,EAChCC,MAAgB,EACoB;EACpC,IAAMlE,KAAK,GAAG,MAAMiE,SAAS;EAC7B,IAAME,QAAQ,GAAG,MAAMnE,KAAK,CAACS,UAAU,CAAC2D,OAAO,CAACF,MAAM,CAAC;EACvD,OAAOC,QAAQ,CAACzC,GAAG,CAACQ,CAAC,IAAIM,kBAAkB,CAACxC,KAAK,CAACW,cAAc,EAAEuB,CAAC,CAAC,CAAC;AACzE;AAGA,OAAO,SAASmC,kBAAkBA,CAACC,UAAkB,EAAEC,YAAoB,EAAU;EACjF,OAAOD,UAAU,GAAG,IAAI,GAAGC,YAAY;AAC3C;AAGA,OAAO,SAAS3D,iBAAiBA,CAACf,MAAyB,EAAY;EACnE,IAAM2E,aAAa,GAAG,IAAIC,GAAG,CAAS,CAAC;EACvC,IAAM3B,GAAa,GAAG,EAAE;EACxB,IAAI,CAACjD,MAAM,CAACyD,OAAO,EAAE;IACjB,OAAOR,GAAG;EACd;EACAjD,MAAM,CAACyD,OAAO,CAAClB,OAAO,CAACmB,KAAK,IAAI;IAC5B,IAAMmB,MAAM,GAAG5F,OAAO,CAACyE,KAAK,CAAC;IAC7BmB,MAAM,CAACtC,OAAO,CAACuC,KAAK,IAAI;MACpB,IAAIH,aAAa,CAACI,GAAG,CAACD,KAAK,CAAC,EAAE;QAC1B;MACJ;MACAH,aAAa,CAACK,GAAG,CAACF,KAAK,CAAC;MACxB,IAAMG,SAAS,GAAG7F,qBAAqB,CAACY,MAAM,EAAE8E,KAAK,CAAC;MACtD,IAAIG,SAAS,CAACC,IAAI,KAAK,SAAS,EAAE;QAC9BjC,GAAG,CAACO,IAAI,CAACsB,KAAK,CAAC;MACnB;IACJ,CAAC,CAAC;EACN,CAAC,CAAC;EACF7B,GAAG,CAACO,IAAI,CAAC,UAAU,CAAC;EAEpB,OAAOtE,WAAW,CAAC+D,GAAG,CAAC;AAC3B","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/storage-dexie/dexie-query.js b/dist/esm/plugins/storage-dexie/dexie-query.js deleted file mode 100644 index 5e53934783b..00000000000 --- a/dist/esm/plugins/storage-dexie/dexie-query.js +++ /dev/null @@ -1,151 +0,0 @@ -import { INDEX_MAX, INDEX_MIN } from "../../query-planner.js"; -import { getQueryMatcher, getSortComparator } from "../../rx-query-helper.js"; -import { dexieReplaceIfStartsWithPipe, DEXIE_DOCS_TABLE_NAME, fromDexieToStorage } from "./dexie-helper.js"; -export function mapKeyForKeyRange(k) { - if (k === INDEX_MIN) { - return -Infinity; - } else { - return k; - } -} -function rangeFieldToBooleanSubstitute(booleanIndexes, fieldName, value) { - if (booleanIndexes.includes(fieldName)) { - var newValue = value === INDEX_MAX || value === true ? '1' : '0'; - return newValue; - } else { - return value; - } -} -export function getKeyRangeByQueryPlan(booleanIndexes, queryPlan, IDBKeyRange) { - if (!IDBKeyRange) { - if (typeof window === 'undefined') { - throw new Error('IDBKeyRange missing'); - } else { - IDBKeyRange = window.IDBKeyRange; - } - } - var startKeys = queryPlan.startKeys.map((v, i) => { - var fieldName = queryPlan.index[i]; - return rangeFieldToBooleanSubstitute(booleanIndexes, fieldName, v); - }).map(mapKeyForKeyRange); - var endKeys = queryPlan.endKeys.map((v, i) => { - var fieldName = queryPlan.index[i]; - return rangeFieldToBooleanSubstitute(booleanIndexes, fieldName, v); - }).map(mapKeyForKeyRange); - var keyRange = IDBKeyRange.bound(startKeys, endKeys, !queryPlan.inclusiveStart, !queryPlan.inclusiveEnd); - return keyRange; -} - -/** - * Runs mango queries over the Dexie.js database. - */ -export async function dexieQuery(instance, preparedQuery) { - var state = await instance.internals; - var query = preparedQuery.query; - var skip = query.skip ? query.skip : 0; - var limit = query.limit ? query.limit : Infinity; - var skipPlusLimit = skip + limit; - var queryPlan = preparedQuery.queryPlan; - var queryMatcher = false; - if (!queryPlan.selectorSatisfiedByIndex) { - queryMatcher = getQueryMatcher(instance.schema, preparedQuery.query); - } - var keyRange = getKeyRangeByQueryPlan(state.booleanIndexes, queryPlan, state.dexieDb._options.IDBKeyRange); - var queryPlanFields = queryPlan.index; - var rows = []; - await state.dexieDb.transaction('r', state.dexieTable, async dexieTx => { - /** - * TODO here we use the native IndexedDB transaction - * to get the cursor. - * Instead we should not leave Dexie.js API and find - * a way to create the cursor with Dexie.js. - */ - var tx = dexieTx.idbtrans; - - // const nativeIndexedDB = state.dexieDb.backendDB(); - // const trans = nativeIndexedDB.transaction([DEXIE_DOCS_TABLE_NAME], 'readonly'); - - var store = tx.objectStore(DEXIE_DOCS_TABLE_NAME); - var index; - var indexName; - indexName = '[' + queryPlanFields.map(field => dexieReplaceIfStartsWithPipe(field)).join('+') + ']'; - index = store.index(indexName); - var cursorReq = index.openCursor(keyRange); - await new Promise(res => { - cursorReq.onsuccess = function (e) { - var cursor = e.target.result; - if (cursor) { - // We have a record in cursor.value - var docData = fromDexieToStorage(state.booleanIndexes, cursor.value); - if (!queryMatcher || queryMatcher(docData)) { - rows.push(docData); - } - - /** - * If we do not have to manually sort - * and have enough documents, - * we can abort iterating over the cursor - * because we already have every relevant document. - */ - if (queryPlan.sortSatisfiedByIndex && rows.length === skipPlusLimit) { - res(); - } else { - cursor.continue(); - } - } else { - // Iteration complete - res(); - } - }; - }); - }); - if (!queryPlan.sortSatisfiedByIndex) { - var sortComparator = getSortComparator(instance.schema, preparedQuery.query); - rows = rows.sort(sortComparator); - } - - // apply skip and limit boundaries. - rows = rows.slice(skip, skipPlusLimit); - - /** - * Comment this in for debugging to check all fields in the database. - */ - // const docsInDb = await state.dexieTable.filter(queryMatcher).toArray(); - // let documents = docsInDb - // .map(docData => stripDexieKey(docData)) - // .sort(sortComparator); - // if (preparedQuery.skip) { - // documents = documents.slice(preparedQuery.skip); - // } - // if (preparedQuery.limit && documents.length > preparedQuery.limit) { - // documents = documents.slice(0, preparedQuery.limit); - // } - - return { - documents: rows - }; -} -export async function dexieCount(instance, preparedQuery) { - var state = await instance.internals; - var queryPlan = preparedQuery.queryPlan; - var queryPlanFields = queryPlan.index; - var keyRange = getKeyRangeByQueryPlan(state.booleanIndexes, queryPlan, state.dexieDb._options.IDBKeyRange); - var count = -1; - await state.dexieDb.transaction('r', state.dexieTable, async dexieTx => { - var tx = dexieTx.idbtrans; - var store = tx.objectStore(DEXIE_DOCS_TABLE_NAME); - var index; - var indexName; - indexName = '[' + queryPlanFields.map(field => dexieReplaceIfStartsWithPipe(field)).join('+') + ']'; - index = store.index(indexName); - var request = index.count(keyRange); - count = await new Promise((res, rej) => { - request.onsuccess = function () { - res(request.result); - }; - request.onerror = err => rej(err); - }); - }); - return count; -} -//# sourceMappingURL=dexie-query.js.map \ No newline at end of file diff --git a/dist/esm/plugins/storage-dexie/dexie-query.js.map b/dist/esm/plugins/storage-dexie/dexie-query.js.map deleted file mode 100644 index 853cdc9510f..00000000000 --- a/dist/esm/plugins/storage-dexie/dexie-query.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"dexie-query.js","names":["INDEX_MAX","INDEX_MIN","getQueryMatcher","getSortComparator","dexieReplaceIfStartsWithPipe","DEXIE_DOCS_TABLE_NAME","fromDexieToStorage","mapKeyForKeyRange","k","Infinity","rangeFieldToBooleanSubstitute","booleanIndexes","fieldName","value","includes","newValue","getKeyRangeByQueryPlan","queryPlan","IDBKeyRange","window","Error","startKeys","map","v","i","index","endKeys","keyRange","bound","inclusiveStart","inclusiveEnd","dexieQuery","instance","preparedQuery","state","internals","query","skip","limit","skipPlusLimit","queryMatcher","selectorSatisfiedByIndex","schema","dexieDb","_options","queryPlanFields","rows","transaction","dexieTable","dexieTx","tx","idbtrans","store","objectStore","indexName","field","join","cursorReq","openCursor","Promise","res","onsuccess","e","cursor","target","result","docData","push","sortSatisfiedByIndex","length","continue","sortComparator","sort","slice","documents","dexieCount","count","request","rej","onerror","err"],"sources":["../../../../src/plugins/storage-dexie/dexie-query.ts"],"sourcesContent":["import { INDEX_MAX, INDEX_MIN } from '../../query-planner.ts';\nimport { getQueryMatcher, getSortComparator } from '../../rx-query-helper.ts';\nimport type {\n PreparedQuery,\n QueryMatcher,\n RxDocumentData,\n RxQueryPlan,\n RxStorageQueryResult\n} from '../../types/index.d.ts';\nimport {\n dexieReplaceIfStartsWithPipe,\n DEXIE_DOCS_TABLE_NAME,\n fromDexieToStorage\n} from './dexie-helper.ts';\nimport type { RxStorageInstanceDexie } from './rx-storage-instance-dexie.ts';\n\nexport function mapKeyForKeyRange(k: any) {\n if (k === INDEX_MIN) {\n return -Infinity;\n } else {\n return k;\n }\n}\n\nfunction rangeFieldToBooleanSubstitute(\n booleanIndexes: string[],\n fieldName: string,\n value: any\n) {\n if (booleanIndexes.includes(fieldName)) {\n const newValue = value === INDEX_MAX || value === true ? '1' : '0';\n return newValue;\n } else {\n return value;\n }\n}\n\nexport function getKeyRangeByQueryPlan(\n booleanIndexes: string[],\n queryPlan: RxQueryPlan,\n IDBKeyRange?: any\n) {\n if (!IDBKeyRange) {\n if (typeof window === 'undefined') {\n throw new Error('IDBKeyRange missing');\n } else {\n IDBKeyRange = window.IDBKeyRange;\n }\n }\n\n\n const startKeys = queryPlan.startKeys\n .map((v, i) => {\n const fieldName = queryPlan.index[i];\n return rangeFieldToBooleanSubstitute(booleanIndexes, fieldName, v);\n })\n .map(mapKeyForKeyRange);\n const endKeys = queryPlan.endKeys\n .map((v, i) => {\n const fieldName = queryPlan.index[i];\n return rangeFieldToBooleanSubstitute(booleanIndexes, fieldName, v);\n })\n .map(mapKeyForKeyRange);\n\n const keyRange = IDBKeyRange.bound(\n startKeys,\n endKeys,\n !queryPlan.inclusiveStart,\n !queryPlan.inclusiveEnd\n );\n return keyRange;\n}\n\n\n/**\n * Runs mango queries over the Dexie.js database.\n */\nexport async function dexieQuery(\n instance: RxStorageInstanceDexie,\n preparedQuery: PreparedQuery\n): Promise> {\n const state = await instance.internals;\n const query = preparedQuery.query;\n\n const skip = query.skip ? query.skip : 0;\n const limit = query.limit ? query.limit : Infinity;\n const skipPlusLimit = skip + limit;\n const queryPlan = preparedQuery.queryPlan;\n\n let queryMatcher: QueryMatcher> | false = false;\n if (!queryPlan.selectorSatisfiedByIndex) {\n queryMatcher = getQueryMatcher(\n instance.schema,\n preparedQuery.query\n );\n }\n const keyRange = getKeyRangeByQueryPlan(\n state.booleanIndexes,\n queryPlan,\n (state.dexieDb as any)._options.IDBKeyRange\n );\n\n const queryPlanFields: string[] = queryPlan.index;\n\n let rows: any[] = [];\n await state.dexieDb.transaction(\n 'r',\n state.dexieTable,\n async (dexieTx) => {\n /**\n * TODO here we use the native IndexedDB transaction\n * to get the cursor.\n * Instead we should not leave Dexie.js API and find\n * a way to create the cursor with Dexie.js.\n */\n const tx = (dexieTx as any).idbtrans;\n\n // const nativeIndexedDB = state.dexieDb.backendDB();\n // const trans = nativeIndexedDB.transaction([DEXIE_DOCS_TABLE_NAME], 'readonly');\n\n const store = tx.objectStore(DEXIE_DOCS_TABLE_NAME);\n let index: any;\n let indexName: string;\n indexName = '[' +\n queryPlanFields\n .map(field => dexieReplaceIfStartsWithPipe(field))\n .join('+')\n + ']';\n index = store.index(indexName);\n\n\n const cursorReq = index.openCursor(keyRange);\n await new Promise(res => {\n cursorReq.onsuccess = function (e: any) {\n const cursor = e.target.result;\n if (cursor) {\n // We have a record in cursor.value\n const docData = fromDexieToStorage(state.booleanIndexes, cursor.value);\n if (!queryMatcher || queryMatcher(docData)) {\n rows.push(docData);\n }\n\n /**\n * If we do not have to manually sort\n * and have enough documents,\n * we can abort iterating over the cursor\n * because we already have every relevant document.\n */\n if (\n queryPlan.sortSatisfiedByIndex &&\n rows.length === skipPlusLimit\n ) {\n res();\n } else {\n cursor.continue();\n }\n } else {\n // Iteration complete\n res();\n }\n };\n });\n\n\n }\n );\n\n\n if (!queryPlan.sortSatisfiedByIndex) {\n const sortComparator = getSortComparator(instance.schema, preparedQuery.query);\n rows = rows.sort(sortComparator);\n }\n\n // apply skip and limit boundaries.\n rows = rows.slice(skip, skipPlusLimit);\n\n /**\n * Comment this in for debugging to check all fields in the database.\n */\n // const docsInDb = await state.dexieTable.filter(queryMatcher).toArray();\n // let documents = docsInDb\n // .map(docData => stripDexieKey(docData))\n // .sort(sortComparator);\n // if (preparedQuery.skip) {\n // documents = documents.slice(preparedQuery.skip);\n // }\n // if (preparedQuery.limit && documents.length > preparedQuery.limit) {\n // documents = documents.slice(0, preparedQuery.limit);\n // }\n\n\n\n return {\n documents: rows\n };\n}\n\n\nexport async function dexieCount(\n instance: RxStorageInstanceDexie,\n preparedQuery: PreparedQuery\n): Promise {\n const state = await instance.internals;\n const queryPlan = preparedQuery.queryPlan;\n const queryPlanFields: string[] = queryPlan.index;\n\n const keyRange = getKeyRangeByQueryPlan(\n state.booleanIndexes,\n queryPlan,\n (state.dexieDb as any)._options.IDBKeyRange\n );\n let count: number = -1;\n await state.dexieDb.transaction(\n 'r',\n state.dexieTable,\n async (dexieTx) => {\n const tx = (dexieTx as any).idbtrans;\n const store = tx.objectStore(DEXIE_DOCS_TABLE_NAME);\n let index: any;\n let indexName: string;\n indexName = '[' +\n queryPlanFields\n .map(field => dexieReplaceIfStartsWithPipe(field))\n .join('+')\n + ']';\n index = store.index(indexName);\n const request = index.count(keyRange);\n count = await new Promise((res, rej) => {\n request.onsuccess = function () {\n res(request.result);\n };\n request.onerror = (err: any) => rej(err);\n });\n }\n );\n return count;\n}\n"],"mappings":"AAAA,SAASA,SAAS,EAAEC,SAAS,QAAQ,wBAAwB;AAC7D,SAASC,eAAe,EAAEC,iBAAiB,QAAQ,0BAA0B;AAQ7E,SACIC,4BAA4B,EAC5BC,qBAAqB,EACrBC,kBAAkB,QACf,mBAAmB;AAG1B,OAAO,SAASC,iBAAiBA,CAACC,CAAM,EAAE;EACtC,IAAIA,CAAC,KAAKP,SAAS,EAAE;IACjB,OAAO,CAACQ,QAAQ;EACpB,CAAC,MAAM;IACH,OAAOD,CAAC;EACZ;AACJ;AAEA,SAASE,6BAA6BA,CAClCC,cAAwB,EACxBC,SAAiB,EACjBC,KAAU,EACZ;EACE,IAAIF,cAAc,CAACG,QAAQ,CAACF,SAAS,CAAC,EAAE;IACpC,IAAMG,QAAQ,GAAGF,KAAK,KAAKb,SAAS,IAAIa,KAAK,KAAK,IAAI,GAAG,GAAG,GAAG,GAAG;IAClE,OAAOE,QAAQ;EACnB,CAAC,MAAM;IACH,OAAOF,KAAK;EAChB;AACJ;AAEA,OAAO,SAASG,sBAAsBA,CAClCL,cAAwB,EACxBM,SAAsB,EACtBC,WAAiB,EACnB;EACE,IAAI,CAACA,WAAW,EAAE;IACd,IAAI,OAAOC,MAAM,KAAK,WAAW,EAAE;MAC/B,MAAM,IAAIC,KAAK,CAAC,qBAAqB,CAAC;IAC1C,CAAC,MAAM;MACHF,WAAW,GAAGC,MAAM,CAACD,WAAW;IACpC;EACJ;EAGA,IAAMG,SAAS,GAAGJ,SAAS,CAACI,SAAS,CAChCC,GAAG,CAAC,CAACC,CAAC,EAAEC,CAAC,KAAK;IACX,IAAMZ,SAAS,GAAGK,SAAS,CAACQ,KAAK,CAACD,CAAC,CAAC;IACpC,OAAOd,6BAA6B,CAACC,cAAc,EAAEC,SAAS,EAAEW,CAAC,CAAC;EACtE,CAAC,CAAC,CACDD,GAAG,CAACf,iBAAiB,CAAC;EAC3B,IAAMmB,OAAO,GAAGT,SAAS,CAACS,OAAO,CAC5BJ,GAAG,CAAC,CAACC,CAAC,EAAEC,CAAC,KAAK;IACX,IAAMZ,SAAS,GAAGK,SAAS,CAACQ,KAAK,CAACD,CAAC,CAAC;IACpC,OAAOd,6BAA6B,CAACC,cAAc,EAAEC,SAAS,EAAEW,CAAC,CAAC;EACtE,CAAC,CAAC,CACDD,GAAG,CAACf,iBAAiB,CAAC;EAE3B,IAAMoB,QAAQ,GAAGT,WAAW,CAACU,KAAK,CAC9BP,SAAS,EACTK,OAAO,EACP,CAACT,SAAS,CAACY,cAAc,EACzB,CAACZ,SAAS,CAACa,YACf,CAAC;EACD,OAAOH,QAAQ;AACnB;;AAGA;AACA;AACA;AACA,OAAO,eAAeI,UAAUA,CAC5BC,QAA2C,EAC3CC,aAAuC,EACC;EACxC,IAAMC,KAAK,GAAG,MAAMF,QAAQ,CAACG,SAAS;EACtC,IAAMC,KAAK,GAAGH,aAAa,CAACG,KAAK;EAEjC,IAAMC,IAAI,GAAGD,KAAK,CAACC,IAAI,GAAGD,KAAK,CAACC,IAAI,GAAG,CAAC;EACxC,IAAMC,KAAK,GAAGF,KAAK,CAACE,KAAK,GAAGF,KAAK,CAACE,KAAK,GAAG7B,QAAQ;EAClD,IAAM8B,aAAa,GAAGF,IAAI,GAAGC,KAAK;EAClC,IAAMrB,SAAS,GAAGgB,aAAa,CAAChB,SAAS;EAEzC,IAAIuB,YAA6D,GAAG,KAAK;EACzE,IAAI,CAACvB,SAAS,CAACwB,wBAAwB,EAAE;IACrCD,YAAY,GAAGtC,eAAe,CAC1B8B,QAAQ,CAACU,MAAM,EACfT,aAAa,CAACG,KAClB,CAAC;EACL;EACA,IAAMT,QAAQ,GAAGX,sBAAsB,CACnCkB,KAAK,CAACvB,cAAc,EACpBM,SAAS,EACRiB,KAAK,CAACS,OAAO,CAASC,QAAQ,CAAC1B,WACpC,CAAC;EAED,IAAM2B,eAAyB,GAAG5B,SAAS,CAACQ,KAAK;EAEjD,IAAIqB,IAAW,GAAG,EAAE;EACpB,MAAMZ,KAAK,CAACS,OAAO,CAACI,WAAW,CAC3B,GAAG,EACHb,KAAK,CAACc,UAAU,EAChB,MAAOC,OAAO,IAAK;IACf;AACZ;AACA;AACA;AACA;AACA;IACY,IAAMC,EAAE,GAAID,OAAO,CAASE,QAAQ;;IAEpC;IACA;;IAEA,IAAMC,KAAK,GAAGF,EAAE,CAACG,WAAW,CAAChD,qBAAqB,CAAC;IACnD,IAAIoB,KAAU;IACd,IAAI6B,SAAiB;IACrBA,SAAS,GAAG,GAAG,GACXT,eAAe,CACVvB,GAAG,CAACiC,KAAK,IAAInD,4BAA4B,CAACmD,KAAK,CAAC,CAAC,CACjDC,IAAI,CAAC,GAAG,CAAC,GACZ,GAAG;IACT/B,KAAK,GAAG2B,KAAK,CAAC3B,KAAK,CAAC6B,SAAS,CAAC;IAG9B,IAAMG,SAAS,GAAGhC,KAAK,CAACiC,UAAU,CAAC/B,QAAQ,CAAC;IAC5C,MAAM,IAAIgC,OAAO,CAAOC,GAAG,IAAI;MAC3BH,SAAS,CAACI,SAAS,GAAG,UAAUC,CAAM,EAAE;QACpC,IAAMC,MAAM,GAAGD,CAAC,CAACE,MAAM,CAACC,MAAM;QAC9B,IAAIF,MAAM,EAAE;UACR;UACA,IAAMG,OAAO,GAAG5D,kBAAkB,CAAY4B,KAAK,CAACvB,cAAc,EAAEoD,MAAM,CAAClD,KAAK,CAAC;UACjF,IAAI,CAAC2B,YAAY,IAAIA,YAAY,CAAC0B,OAAO,CAAC,EAAE;YACxCpB,IAAI,CAACqB,IAAI,CAACD,OAAO,CAAC;UACtB;;UAEA;AACxB;AACA;AACA;AACA;AACA;UACwB,IACIjD,SAAS,CAACmD,oBAAoB,IAC9BtB,IAAI,CAACuB,MAAM,KAAK9B,aAAa,EAC/B;YACEqB,GAAG,CAAC,CAAC;UACT,CAAC,MAAM;YACHG,MAAM,CAACO,QAAQ,CAAC,CAAC;UACrB;QACJ,CAAC,MAAM;UACH;UACAV,GAAG,CAAC,CAAC;QACT;MACJ,CAAC;IACL,CAAC,CAAC;EAGN,CACJ,CAAC;EAGD,IAAI,CAAC3C,SAAS,CAACmD,oBAAoB,EAAE;IACjC,IAAMG,cAAc,GAAGpE,iBAAiB,CAAC6B,QAAQ,CAACU,MAAM,EAAET,aAAa,CAACG,KAAK,CAAC;IAC9EU,IAAI,GAAGA,IAAI,CAAC0B,IAAI,CAACD,cAAc,CAAC;EACpC;;EAEA;EACAzB,IAAI,GAAGA,IAAI,CAAC2B,KAAK,CAACpC,IAAI,EAAEE,aAAa,CAAC;;EAEtC;AACJ;AACA;EACI;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;;EAIA,OAAO;IACHmC,SAAS,EAAE5B;EACf,CAAC;AACL;AAGA,OAAO,eAAe6B,UAAUA,CAC5B3C,QAA2C,EAC3CC,aAAuC,EACxB;EACf,IAAMC,KAAK,GAAG,MAAMF,QAAQ,CAACG,SAAS;EACtC,IAAMlB,SAAS,GAAGgB,aAAa,CAAChB,SAAS;EACzC,IAAM4B,eAAyB,GAAG5B,SAAS,CAACQ,KAAK;EAEjD,IAAME,QAAQ,GAAGX,sBAAsB,CACnCkB,KAAK,CAACvB,cAAc,EACpBM,SAAS,EACRiB,KAAK,CAACS,OAAO,CAASC,QAAQ,CAAC1B,WACpC,CAAC;EACD,IAAI0D,KAAa,GAAG,CAAC,CAAC;EACtB,MAAM1C,KAAK,CAACS,OAAO,CAACI,WAAW,CAC3B,GAAG,EACHb,KAAK,CAACc,UAAU,EAChB,MAAOC,OAAO,IAAK;IACf,IAAMC,EAAE,GAAID,OAAO,CAASE,QAAQ;IACpC,IAAMC,KAAK,GAAGF,EAAE,CAACG,WAAW,CAAChD,qBAAqB,CAAC;IACnD,IAAIoB,KAAU;IACd,IAAI6B,SAAiB;IACrBA,SAAS,GAAG,GAAG,GACXT,eAAe,CACVvB,GAAG,CAACiC,KAAK,IAAInD,4BAA4B,CAACmD,KAAK,CAAC,CAAC,CACjDC,IAAI,CAAC,GAAG,CAAC,GACZ,GAAG;IACT/B,KAAK,GAAG2B,KAAK,CAAC3B,KAAK,CAAC6B,SAAS,CAAC;IAC9B,IAAMuB,OAAO,GAAGpD,KAAK,CAACmD,KAAK,CAACjD,QAAQ,CAAC;IACrCiD,KAAK,GAAG,MAAM,IAAIjB,OAAO,CAAS,CAACC,GAAG,EAAEkB,GAAG,KAAK;MAC5CD,OAAO,CAAChB,SAAS,GAAG,YAAY;QAC5BD,GAAG,CAACiB,OAAO,CAACZ,MAAM,CAAC;MACvB,CAAC;MACDY,OAAO,CAACE,OAAO,GAAIC,GAAQ,IAAKF,GAAG,CAACE,GAAG,CAAC;IAC5C,CAAC,CAAC;EACN,CACJ,CAAC;EACD,OAAOJ,KAAK;AAChB","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/storage-dexie/index.js b/dist/esm/plugins/storage-dexie/index.js deleted file mode 100644 index e8962638ad2..00000000000 --- a/dist/esm/plugins/storage-dexie/index.js +++ /dev/null @@ -1,5 +0,0 @@ -export * from "./rx-storage-dexie.js"; -export * from "./rx-storage-instance-dexie.js"; -export * from "./dexie-helper.js"; -export * from "./dexie-query.js"; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/esm/plugins/storage-dexie/index.js.map b/dist/esm/plugins/storage-dexie/index.js.map deleted file mode 100644 index 3d455f0c088..00000000000 --- a/dist/esm/plugins/storage-dexie/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":[],"sources":["../../../../src/plugins/storage-dexie/index.ts"],"sourcesContent":["export * from './rx-storage-dexie.ts';\nexport * from './rx-storage-instance-dexie.ts';\nexport * from './dexie-helper.ts';\nexport * from './dexie-query.ts';\n"],"mappings":"AAAA,cAAc,uBAAuB;AACrC,cAAc,gCAAgC;AAC9C,cAAc,mBAAmB;AACjC,cAAc,kBAAkB","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/storage-dexie/rx-storage-dexie.js b/dist/esm/plugins/storage-dexie/rx-storage-dexie.js deleted file mode 100644 index 57b8e1d1018..00000000000 --- a/dist/esm/plugins/storage-dexie/rx-storage-dexie.js +++ /dev/null @@ -1,22 +0,0 @@ -import { RX_STORAGE_NAME_DEXIE } from "./dexie-helper.js"; -import { createDexieStorageInstance } from "./rx-storage-instance-dexie.js"; -import { ensureRxStorageInstanceParamsAreCorrect } from "../../rx-storage-helper.js"; -import { RXDB_VERSION } from "../utils/utils-rxdb-version.js"; -export var RxStorageDexie = /*#__PURE__*/function () { - function RxStorageDexie(settings) { - this.name = RX_STORAGE_NAME_DEXIE; - this.rxdbVersion = RXDB_VERSION; - this.settings = settings; - } - var _proto = RxStorageDexie.prototype; - _proto.createStorageInstance = function createStorageInstance(params) { - ensureRxStorageInstanceParamsAreCorrect(params); - return createDexieStorageInstance(this, params, this.settings); - }; - return RxStorageDexie; -}(); -export function getRxStorageDexie(settings = {}) { - var storage = new RxStorageDexie(settings); - return storage; -} -//# sourceMappingURL=rx-storage-dexie.js.map \ No newline at end of file diff --git a/dist/esm/plugins/storage-dexie/rx-storage-dexie.js.map b/dist/esm/plugins/storage-dexie/rx-storage-dexie.js.map deleted file mode 100644 index ae01887b68c..00000000000 --- a/dist/esm/plugins/storage-dexie/rx-storage-dexie.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-storage-dexie.js","names":["RX_STORAGE_NAME_DEXIE","createDexieStorageInstance","ensureRxStorageInstanceParamsAreCorrect","RXDB_VERSION","RxStorageDexie","settings","name","rxdbVersion","_proto","prototype","createStorageInstance","params","getRxStorageDexie","storage"],"sources":["../../../../src/plugins/storage-dexie/rx-storage-dexie.ts"],"sourcesContent":["import type {\n RxStorage,\n RxStorageInstanceCreationParams\n} from '../../types/index.d.ts';\nimport {\n RX_STORAGE_NAME_DEXIE\n} from './dexie-helper.ts';\nimport type {\n DexieSettings,\n DexieStorageInternals\n} from '../../types/plugins/dexie.d.ts';\nimport {\n createDexieStorageInstance,\n RxStorageInstanceDexie\n} from './rx-storage-instance-dexie.ts';\nimport { ensureRxStorageInstanceParamsAreCorrect } from '../../rx-storage-helper.ts';\nimport { RXDB_VERSION } from '../utils/utils-rxdb-version.ts';\n\n\n\nexport class RxStorageDexie implements RxStorage {\n public name = RX_STORAGE_NAME_DEXIE;\n public readonly rxdbVersion = RXDB_VERSION;\n constructor(\n public settings: DexieSettings\n ) { }\n\n public createStorageInstance(\n params: RxStorageInstanceCreationParams\n ): Promise> {\n ensureRxStorageInstanceParamsAreCorrect(params);\n return createDexieStorageInstance(this, params, this.settings);\n }\n}\n\n\nexport function getRxStorageDexie(\n settings: DexieSettings = {}\n): RxStorageDexie {\n const storage = new RxStorageDexie(settings);\n return storage;\n}\n"],"mappings":"AAIA,SACIA,qBAAqB,QAClB,mBAAmB;AAK1B,SACIC,0BAA0B,QAEvB,gCAAgC;AACvC,SAASC,uCAAuC,QAAQ,4BAA4B;AACpF,SAASC,YAAY,QAAQ,gCAAgC;AAI7D,WAAaC,cAAc;EAGvB,SAAAA,eACWC,QAAuB,EAChC;IAAA,KAJKC,IAAI,GAAGN,qBAAqB;IAAA,KACnBO,WAAW,GAAGJ,YAAY;IAAA,KAE/BE,QAAuB,GAAvBA,QAAuB;EAC9B;EAAC,IAAAG,MAAA,GAAAJ,cAAA,CAAAK,SAAA;EAAAD,MAAA,CAEEE,qBAAqB,GAA5B,SAAAA,sBACIC,MAAiE,EACvB;IAC1CT,uCAAuC,CAACS,MAAM,CAAC;IAC/C,OAAOV,0BAA0B,CAAC,IAAI,EAAEU,MAAM,EAAE,IAAI,CAACN,QAAQ,CAAC;EAClE,CAAC;EAAA,OAAAD,cAAA;AAAA;AAIL,OAAO,SAASQ,iBAAiBA,CAC7BP,QAAuB,GAAG,CAAC,CAAC,EACd;EACd,IAAMQ,OAAO,GAAG,IAAIT,cAAc,CAACC,QAAQ,CAAC;EAC5C,OAAOQ,OAAO;AAClB","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/storage-dexie/rx-storage-instance-dexie.js b/dist/esm/plugins/storage-dexie/rx-storage-instance-dexie.js deleted file mode 100644 index ec8e768d8d6..00000000000 --- a/dist/esm/plugins/storage-dexie/rx-storage-instance-dexie.js +++ /dev/null @@ -1,251 +0,0 @@ -import { Subject } from 'rxjs'; -import { now, ensureNotFalsy, defaultHashSha256, RXDB_UTILS_GLOBAL, PREMIUM_FLAG_HASH } from "../utils/index.js"; -import { attachmentObjectId, closeDexieDb, fromStorageToDexie, getDexieDbWithTables, getDocsInDb, RX_STORAGE_NAME_DEXIE } from "./dexie-helper.js"; -import { dexieCount, dexieQuery } from "./dexie-query.js"; -import { getPrimaryFieldOfPrimaryKey } from "../../rx-schema-helper.js"; -import { categorizeBulkWriteRows, flatCloneDocWithMeta } from "../../rx-storage-helper.js"; -import { addRxStorageMultiInstanceSupport } from "../../rx-storage-multiinstance.js"; -import { newRxError } from "../../rx-error.js"; -var instanceId = now(); -export var DEXIE_TEST_META_FIELD = 'dexieTestMetaField'; -var shownNonPremiumLog = false; -export var RxStorageInstanceDexie = /*#__PURE__*/function () { - function RxStorageInstanceDexie(storage, databaseName, collectionName, schema, internals, options, settings, devMode) { - this.changes$ = new Subject(); - this.instanceId = instanceId++; - this.storage = storage; - this.databaseName = databaseName; - this.collectionName = collectionName; - this.schema = schema; - this.internals = internals; - this.options = options; - this.settings = settings; - this.devMode = devMode; - this.primaryPath = getPrimaryFieldOfPrimaryKey(this.schema.primaryKey); - } - var _proto = RxStorageInstanceDexie.prototype; - _proto.bulkWrite = async function bulkWrite(documentWrites, context) { - ensureNotClosed(this); - if (!shownNonPremiumLog && (!RXDB_UTILS_GLOBAL.premium || typeof RXDB_UTILS_GLOBAL.premium !== 'string' || (await defaultHashSha256(RXDB_UTILS_GLOBAL.premium)) !== PREMIUM_FLAG_HASH)) { - console.warn(['-------------- RxDB Open Core RxStorage -------------------------------', 'You are using the free Dexie.js based RxStorage implementation from RxDB https://rxdb.info/rx-storage-dexie.html?console=dexie ', 'While this is a great option, we want to let you know that there are faster storage solutions available in our premium plugins.', 'For professional users and production environments, we highly recommend considering these premium options to enhance performance and reliability.', ' https://rxdb.info/premium?console=dexie ', 'If you already purchased premium access you can disable this log by calling the setPremiumFlag() function from rxdb-premium/plugins/shared.', '---------------------------------------------------------------------'].join('\n')); - shownNonPremiumLog = true; - } else { - shownNonPremiumLog = true; - } - - /** - * Check some assumptions to ensure RxDB - * does not call the storage with an invalid write. - */ - documentWrites.forEach(row => { - // ensure revision is set - if (!row.document._rev || row.previous && !row.previous._rev) { - throw newRxError('SNH', { - args: { - row - } - }); - } - - // ensure prev-data is set - if (this.devMode) { - if (row.previous && (!row.previous._meta[DEXIE_TEST_META_FIELD] || row.previous._meta[DEXIE_TEST_META_FIELD] !== row.previous._rev)) { - console.dir(row); - throw new Error('missing or wrong _meta.' + DEXIE_TEST_META_FIELD); - } - } - }); - var state = await this.internals; - var ret = { - success: [], - error: [] - }; - - /** - * Some storages might add any _meta fields - * internally. To ensure RxDB can work with that in the - * test suite, we add a random field here. - * To ensure - */ - if (this.devMode) { - documentWrites = documentWrites.map(row => { - var doc = flatCloneDocWithMeta(row.document); - doc._meta[DEXIE_TEST_META_FIELD] = doc._rev; - return { - previous: row.previous, - document: doc - }; - }); - } - var documentKeys = documentWrites.map(writeRow => writeRow.document[this.primaryPath]); - var categorized; - await state.dexieDb.transaction('rw', state.dexieTable, state.dexieAttachmentsTable, async () => { - var docsInDbMap = new Map(); - var docsInDbWithInternals = await getDocsInDb(this.internals, documentKeys); - docsInDbWithInternals.forEach(docWithDexieInternals => { - var doc = docWithDexieInternals; - if (doc) { - docsInDbMap.set(doc[this.primaryPath], doc); - } - return doc; - }); - categorized = categorizeBulkWriteRows(this, this.primaryPath, docsInDbMap, documentWrites, context); - ret.error = categorized.errors; - - /** - * Batch up the database operations - * so we can later run them in bulk. - */ - var bulkPutDocs = []; - categorized.bulkInsertDocs.forEach(row => { - ret.success.push(row.document); - bulkPutDocs.push(row.document); - }); - categorized.bulkUpdateDocs.forEach(row => { - ret.success.push(row.document); - bulkPutDocs.push(row.document); - }); - bulkPutDocs = bulkPutDocs.map(d => fromStorageToDexie(state.booleanIndexes, d)); - if (bulkPutDocs.length > 0) { - await state.dexieTable.bulkPut(bulkPutDocs); - } - - // handle attachments - var putAttachments = []; - categorized.attachmentsAdd.forEach(attachment => { - putAttachments.push({ - id: attachmentObjectId(attachment.documentId, attachment.attachmentId), - data: attachment.attachmentData.data - }); - }); - categorized.attachmentsUpdate.forEach(attachment => { - putAttachments.push({ - id: attachmentObjectId(attachment.documentId, attachment.attachmentId), - data: attachment.attachmentData.data - }); - }); - await state.dexieAttachmentsTable.bulkPut(putAttachments); - await state.dexieAttachmentsTable.bulkDelete(categorized.attachmentsRemove.map(attachment => attachmentObjectId(attachment.documentId, attachment.attachmentId))); - }); - categorized = ensureNotFalsy(categorized); - if (categorized.eventBulk.events.length > 0) { - var lastState = ensureNotFalsy(categorized.newestRow).document; - categorized.eventBulk.checkpoint = { - id: lastState[this.primaryPath], - lwt: lastState._meta.lwt - }; - categorized.eventBulk.endTime = now(); - this.changes$.next(categorized.eventBulk); - } - return ret; - }; - _proto.findDocumentsById = async function findDocumentsById(ids, deleted) { - ensureNotClosed(this); - var state = await this.internals; - var ret = []; - await state.dexieDb.transaction('r', state.dexieTable, async () => { - var docsInDb = await getDocsInDb(this.internals, ids); - docsInDb.forEach(documentInDb => { - if (documentInDb && (!documentInDb._deleted || deleted)) { - ret.push(documentInDb); - } - }); - }); - return ret; - }; - _proto.query = function query(preparedQuery) { - ensureNotClosed(this); - return dexieQuery(this, preparedQuery); - }; - _proto.count = async function count(preparedQuery) { - if (preparedQuery.queryPlan.selectorSatisfiedByIndex) { - var result = await dexieCount(this, preparedQuery); - return { - count: result, - mode: 'fast' - }; - } else { - var _result = await dexieQuery(this, preparedQuery); - return { - count: _result.documents.length, - mode: 'slow' - }; - } - }; - _proto.changeStream = function changeStream() { - ensureNotClosed(this); - return this.changes$.asObservable(); - }; - _proto.cleanup = async function cleanup(minimumDeletedTime) { - ensureNotClosed(this); - var state = await this.internals; - await state.dexieDb.transaction('rw', state.dexieTable, async () => { - var maxDeletionTime = now() - minimumDeletedTime; - /** - * TODO only fetch _deleted=true - */ - var toRemove = await state.dexieTable.where('_meta.lwt').below(maxDeletionTime).toArray(); - var removeIds = []; - toRemove.forEach(doc => { - if (doc._deleted === '1') { - removeIds.push(doc[this.primaryPath]); - } - }); - await state.dexieTable.bulkDelete(removeIds); - }); - - /** - * TODO instead of deleting all deleted docs at once, - * only clean up some of them and return false if there are more documents to clean up. - * This ensures that when many documents have to be purged, - * we do not block the more important tasks too long. - */ - return true; - }; - _proto.getAttachmentData = async function getAttachmentData(documentId, attachmentId, _digest) { - ensureNotClosed(this); - var state = await this.internals; - var id = attachmentObjectId(documentId, attachmentId); - return await state.dexieDb.transaction('r', state.dexieAttachmentsTable, async () => { - var attachment = await state.dexieAttachmentsTable.get(id); - if (attachment) { - return attachment.data; - } else { - throw new Error('attachment missing documentId: ' + documentId + ' attachmentId: ' + attachmentId); - } - }); - }; - _proto.remove = async function remove() { - ensureNotClosed(this); - var state = await this.internals; - await state.dexieTable.clear(); - return this.close(); - }; - _proto.close = function close() { - if (this.closed) { - return this.closed; - } - this.closed = (async () => { - this.changes$.complete(); - await closeDexieDb(this.internals); - })(); - return this.closed; - }; - _proto.conflictResultionTasks = function conflictResultionTasks() { - return new Subject(); - }; - _proto.resolveConflictResultionTask = async function resolveConflictResultionTask(_taskSolution) {}; - return RxStorageInstanceDexie; -}(); -export async function createDexieStorageInstance(storage, params, settings) { - var internals = getDexieDbWithTables(params.databaseName, params.collectionName, settings, params.schema); - var instance = new RxStorageInstanceDexie(storage, params.databaseName, params.collectionName, params.schema, internals, params.options, settings, params.devMode); - await addRxStorageMultiInstanceSupport(RX_STORAGE_NAME_DEXIE, params, instance); - return Promise.resolve(instance); -} -function ensureNotClosed(instance) { - if (instance.closed) { - throw new Error('RxStorageInstanceDexie is closed ' + instance.databaseName + '-' + instance.collectionName); - } -} -//# sourceMappingURL=rx-storage-instance-dexie.js.map \ No newline at end of file diff --git a/dist/esm/plugins/storage-dexie/rx-storage-instance-dexie.js.map b/dist/esm/plugins/storage-dexie/rx-storage-instance-dexie.js.map deleted file mode 100644 index 27524a0eab7..00000000000 --- a/dist/esm/plugins/storage-dexie/rx-storage-instance-dexie.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-storage-instance-dexie.js","names":["Subject","now","ensureNotFalsy","defaultHashSha256","RXDB_UTILS_GLOBAL","PREMIUM_FLAG_HASH","attachmentObjectId","closeDexieDb","fromStorageToDexie","getDexieDbWithTables","getDocsInDb","RX_STORAGE_NAME_DEXIE","dexieCount","dexieQuery","getPrimaryFieldOfPrimaryKey","categorizeBulkWriteRows","flatCloneDocWithMeta","addRxStorageMultiInstanceSupport","newRxError","instanceId","DEXIE_TEST_META_FIELD","shownNonPremiumLog","RxStorageInstanceDexie","storage","databaseName","collectionName","schema","internals","options","settings","devMode","changes$","primaryPath","primaryKey","_proto","prototype","bulkWrite","documentWrites","context","ensureNotClosed","premium","console","warn","join","forEach","row","document","_rev","previous","args","_meta","dir","Error","state","ret","success","error","map","doc","documentKeys","writeRow","categorized","dexieDb","transaction","dexieTable","dexieAttachmentsTable","docsInDbMap","Map","docsInDbWithInternals","docWithDexieInternals","set","errors","bulkPutDocs","bulkInsertDocs","push","bulkUpdateDocs","d","booleanIndexes","length","bulkPut","putAttachments","attachmentsAdd","attachment","id","documentId","attachmentId","data","attachmentData","attachmentsUpdate","bulkDelete","attachmentsRemove","eventBulk","events","lastState","newestRow","checkpoint","lwt","endTime","next","findDocumentsById","ids","deleted","docsInDb","documentInDb","_deleted","query","preparedQuery","count","queryPlan","selectorSatisfiedByIndex","result","mode","documents","changeStream","asObservable","cleanup","minimumDeletedTime","maxDeletionTime","toRemove","where","below","toArray","removeIds","getAttachmentData","_digest","get","remove","clear","close","closed","complete","conflictResultionTasks","resolveConflictResultionTask","_taskSolution","createDexieStorageInstance","params","instance","Promise","resolve"],"sources":["../../../../src/plugins/storage-dexie/rx-storage-instance-dexie.ts"],"sourcesContent":["import {\n Subject,\n Observable\n} from 'rxjs';\nimport {\n now,\n ensureNotFalsy,\n defaultHashSha256,\n RXDB_UTILS_GLOBAL,\n PREMIUM_FLAG_HASH\n} from '../utils/index.ts';\nimport type {\n RxStorageInstance,\n RxStorageChangeEvent,\n RxDocumentData,\n BulkWriteRow,\n RxStorageBulkWriteResponse,\n RxStorageQueryResult,\n RxJsonSchema,\n RxStorageInstanceCreationParams,\n EventBulk,\n StringKeys,\n RxConflictResultionTask,\n RxConflictResultionTaskSolution,\n RxStorageDefaultCheckpoint,\n CategorizeBulkWriteRowsOutput,\n RxStorageCountResult,\n PreparedQuery\n} from '../../types/index.d.ts';\nimport type {\n DexieSettings,\n DexieStorageInternals\n} from '../../types/plugins/dexie.d.ts';\nimport { RxStorageDexie } from './rx-storage-dexie.ts';\nimport {\n attachmentObjectId,\n closeDexieDb,\n fromStorageToDexie,\n getDexieDbWithTables,\n getDocsInDb,\n RX_STORAGE_NAME_DEXIE\n} from './dexie-helper.ts';\nimport { dexieCount, dexieQuery } from './dexie-query.ts';\nimport { getPrimaryFieldOfPrimaryKey } from '../../rx-schema-helper.ts';\nimport { categorizeBulkWriteRows, flatCloneDocWithMeta } from '../../rx-storage-helper.ts';\nimport { addRxStorageMultiInstanceSupport } from '../../rx-storage-multiinstance.ts';\nimport { newRxError } from '../../rx-error.ts';\n\nlet instanceId = now();\nexport const DEXIE_TEST_META_FIELD = 'dexieTestMetaField';\n\nlet shownNonPremiumLog = false;\n\n\nexport class RxStorageInstanceDexie implements RxStorageInstance<\n RxDocType,\n DexieStorageInternals,\n DexieSettings,\n RxStorageDefaultCheckpoint\n> {\n public readonly primaryPath: StringKeys>;\n private changes$: Subject>, RxStorageDefaultCheckpoint>> = new Subject();\n public readonly instanceId = instanceId++;\n public closed?: Promise;\n\n constructor(\n public readonly storage: RxStorageDexie,\n public readonly databaseName: string,\n public readonly collectionName: string,\n public readonly schema: Readonly>>,\n public readonly internals: DexieStorageInternals,\n public readonly options: Readonly,\n public readonly settings: DexieSettings,\n public readonly devMode: boolean\n ) {\n this.primaryPath = getPrimaryFieldOfPrimaryKey(this.schema.primaryKey);\n }\n\n async bulkWrite(\n documentWrites: BulkWriteRow[],\n context: string\n ): Promise> {\n ensureNotClosed(this);\n\n\n if (\n !shownNonPremiumLog &&\n (\n !RXDB_UTILS_GLOBAL.premium ||\n typeof RXDB_UTILS_GLOBAL.premium !== 'string' ||\n (await defaultHashSha256(RXDB_UTILS_GLOBAL.premium) !== PREMIUM_FLAG_HASH)\n )\n ) {\n console.warn(\n [\n '-------------- RxDB Open Core RxStorage -------------------------------',\n 'You are using the free Dexie.js based RxStorage implementation from RxDB https://rxdb.info/rx-storage-dexie.html?console=dexie ',\n 'While this is a great option, we want to let you know that there are faster storage solutions available in our premium plugins.',\n 'For professional users and production environments, we highly recommend considering these premium options to enhance performance and reliability.',\n ' https://rxdb.info/premium?console=dexie ',\n 'If you already purchased premium access you can disable this log by calling the setPremiumFlag() function from rxdb-premium/plugins/shared.',\n '---------------------------------------------------------------------'\n ].join('\\n')\n );\n shownNonPremiumLog = true;\n } else {\n shownNonPremiumLog = true;\n }\n\n\n /**\n * Check some assumptions to ensure RxDB\n * does not call the storage with an invalid write.\n */\n documentWrites.forEach(row => {\n // ensure revision is set\n if (\n !row.document._rev ||\n (\n row.previous &&\n !row.previous._rev\n )\n ) {\n throw newRxError('SNH', { args: { row } });\n }\n\n // ensure prev-data is set\n if (this.devMode) {\n if (\n row.previous &&\n (\n !row.previous._meta[DEXIE_TEST_META_FIELD] ||\n row.previous._meta[DEXIE_TEST_META_FIELD] !== row.previous._rev\n )\n ) {\n console.dir(row);\n throw new Error('missing or wrong _meta.' + DEXIE_TEST_META_FIELD);\n }\n }\n });\n\n const state = await this.internals;\n const ret: RxStorageBulkWriteResponse = {\n success: [],\n error: []\n };\n\n /**\n * Some storages might add any _meta fields\n * internally. To ensure RxDB can work with that in the\n * test suite, we add a random field here.\n * To ensure \n */\n if (this.devMode) {\n documentWrites = documentWrites.map(row => {\n const doc = flatCloneDocWithMeta(row.document);\n doc._meta[DEXIE_TEST_META_FIELD] = doc._rev;\n return {\n previous: row.previous,\n document: doc\n }\n })\n }\n\n\n const documentKeys: string[] = documentWrites.map(writeRow => writeRow.document[this.primaryPath] as any);\n let categorized: CategorizeBulkWriteRowsOutput | undefined;\n await state.dexieDb.transaction(\n 'rw',\n state.dexieTable,\n state.dexieAttachmentsTable,\n async () => {\n const docsInDbMap = new Map>();\n const docsInDbWithInternals = await getDocsInDb(this.internals, documentKeys);\n docsInDbWithInternals.forEach(docWithDexieInternals => {\n const doc = docWithDexieInternals;\n if (doc) {\n docsInDbMap.set((doc as any)[this.primaryPath], doc as any);\n }\n return doc;\n });\n\n categorized = categorizeBulkWriteRows(\n this,\n this.primaryPath as any,\n docsInDbMap,\n documentWrites,\n context\n );\n ret.error = categorized.errors;\n\n /**\n * Batch up the database operations\n * so we can later run them in bulk.\n */\n let bulkPutDocs: any[] = [];\n categorized.bulkInsertDocs.forEach(row => {\n ret.success.push(row.document);\n bulkPutDocs.push(row.document);\n });\n categorized.bulkUpdateDocs.forEach(row => {\n ret.success.push(row.document);\n bulkPutDocs.push(row.document);\n });\n bulkPutDocs = bulkPutDocs.map(d => fromStorageToDexie(state.booleanIndexes, d));\n if (bulkPutDocs.length > 0) {\n await state.dexieTable.bulkPut(bulkPutDocs);\n }\n\n // handle attachments\n const putAttachments: { id: string, data: string }[] = [];\n categorized.attachmentsAdd.forEach(attachment => {\n putAttachments.push({\n id: attachmentObjectId(attachment.documentId, attachment.attachmentId),\n data: attachment.attachmentData.data\n });\n });\n categorized.attachmentsUpdate.forEach(attachment => {\n putAttachments.push({\n id: attachmentObjectId(attachment.documentId, attachment.attachmentId),\n data: attachment.attachmentData.data\n });\n });\n await state.dexieAttachmentsTable.bulkPut(putAttachments);\n await state.dexieAttachmentsTable.bulkDelete(\n categorized.attachmentsRemove.map(attachment => attachmentObjectId(attachment.documentId, attachment.attachmentId))\n );\n\n });\n\n categorized = ensureNotFalsy(categorized);\n if (categorized.eventBulk.events.length > 0) {\n const lastState = ensureNotFalsy(categorized.newestRow).document;\n categorized.eventBulk.checkpoint = {\n id: lastState[this.primaryPath],\n lwt: lastState._meta.lwt\n };\n categorized.eventBulk.endTime = now();\n this.changes$.next(categorized.eventBulk);\n }\n\n return ret;\n }\n\n async findDocumentsById(\n ids: string[],\n deleted: boolean\n ): Promise[]> {\n ensureNotClosed(this);\n const state = await this.internals;\n const ret: RxDocumentData[] = [];\n\n await state.dexieDb.transaction(\n 'r',\n state.dexieTable,\n async () => {\n const docsInDb = await getDocsInDb(this.internals, ids);\n docsInDb.forEach(documentInDb => {\n if (\n documentInDb &&\n (!documentInDb._deleted || deleted)\n ) {\n ret.push(documentInDb);\n }\n });\n });\n return ret;\n }\n\n query(preparedQuery: PreparedQuery): Promise> {\n ensureNotClosed(this);\n return dexieQuery(\n this,\n preparedQuery\n );\n }\n async count(\n preparedQuery: PreparedQuery\n ): Promise {\n if (preparedQuery.queryPlan.selectorSatisfiedByIndex) {\n const result = await dexieCount(this, preparedQuery);\n return {\n count: result,\n mode: 'fast'\n };\n } else {\n const result = await dexieQuery(this, preparedQuery);\n return {\n count: result.documents.length,\n mode: 'slow'\n };\n }\n }\n\n changeStream(): Observable>, RxStorageDefaultCheckpoint>> {\n ensureNotClosed(this);\n return this.changes$.asObservable();\n }\n\n async cleanup(minimumDeletedTime: number): Promise {\n ensureNotClosed(this);\n const state = await this.internals;\n await state.dexieDb.transaction(\n 'rw',\n state.dexieTable,\n async () => {\n const maxDeletionTime = now() - minimumDeletedTime;\n /**\n * TODO only fetch _deleted=true\n */\n const toRemove = await state.dexieTable\n .where('_meta.lwt')\n .below(maxDeletionTime)\n .toArray();\n const removeIds: string[] = [];\n toRemove.forEach(doc => {\n if (doc._deleted === '1') {\n removeIds.push(doc[this.primaryPath]);\n }\n });\n await state.dexieTable.bulkDelete(removeIds);\n }\n );\n\n /**\n * TODO instead of deleting all deleted docs at once,\n * only clean up some of them and return false if there are more documents to clean up.\n * This ensures that when many documents have to be purged,\n * we do not block the more important tasks too long.\n */\n return true;\n }\n\n async getAttachmentData(documentId: string, attachmentId: string, _digest: string): Promise {\n ensureNotClosed(this);\n const state = await this.internals;\n const id = attachmentObjectId(documentId, attachmentId);\n return await state.dexieDb.transaction(\n 'r',\n state.dexieAttachmentsTable,\n async () => {\n\n const attachment = await state.dexieAttachmentsTable.get(id);\n if (attachment) {\n return attachment.data;\n } else {\n throw new Error('attachment missing documentId: ' + documentId + ' attachmentId: ' + attachmentId);\n }\n });\n }\n\n async remove(): Promise {\n ensureNotClosed(this);\n const state = await this.internals;\n await state.dexieTable.clear()\n return this.close();\n }\n\n\n close(): Promise {\n if (this.closed) {\n return this.closed;\n }\n this.closed = (async () => {\n this.changes$.complete();\n await closeDexieDb(this.internals);\n })();\n return this.closed;\n }\n\n conflictResultionTasks(): Observable> {\n return new Subject();\n }\n async resolveConflictResultionTask(_taskSolution: RxConflictResultionTaskSolution): Promise { }\n\n}\n\n\nexport async function createDexieStorageInstance(\n storage: RxStorageDexie,\n params: RxStorageInstanceCreationParams,\n settings: DexieSettings\n): Promise> {\n const internals = getDexieDbWithTables(\n params.databaseName,\n params.collectionName,\n settings,\n params.schema\n );\n\n const instance = new RxStorageInstanceDexie(\n storage,\n params.databaseName,\n params.collectionName,\n params.schema,\n internals,\n params.options,\n settings,\n params.devMode\n );\n\n await addRxStorageMultiInstanceSupport(\n RX_STORAGE_NAME_DEXIE,\n params,\n instance\n );\n\n return Promise.resolve(instance);\n}\n\n\n\nfunction ensureNotClosed(\n instance: RxStorageInstanceDexie\n) {\n if (instance.closed) {\n throw new Error('RxStorageInstanceDexie is closed ' + instance.databaseName + '-' + instance.collectionName);\n }\n}\n"],"mappings":"AAAA,SACIA,OAAO,QAEJ,MAAM;AACb,SACIC,GAAG,EACHC,cAAc,EACdC,iBAAiB,EACjBC,iBAAiB,EACjBC,iBAAiB,QACd,mBAAmB;AAwB1B,SACIC,kBAAkB,EAClBC,YAAY,EACZC,kBAAkB,EAClBC,oBAAoB,EACpBC,WAAW,EACXC,qBAAqB,QAClB,mBAAmB;AAC1B,SAASC,UAAU,EAAEC,UAAU,QAAQ,kBAAkB;AACzD,SAASC,2BAA2B,QAAQ,2BAA2B;AACvE,SAASC,uBAAuB,EAAEC,oBAAoB,QAAQ,4BAA4B;AAC1F,SAASC,gCAAgC,QAAQ,mCAAmC;AACpF,SAASC,UAAU,QAAQ,mBAAmB;AAE9C,IAAIC,UAAU,GAAGlB,GAAG,CAAC,CAAC;AACtB,OAAO,IAAMmB,qBAAqB,GAAG,oBAAoB;AAEzD,IAAIC,kBAAkB,GAAG,KAAK;AAG9B,WAAaC,sBAAsB;EAW/B,SAAAA,uBACoBC,OAAuB,EACvBC,YAAoB,EACpBC,cAAsB,EACtBC,MAAyD,EACzDC,SAAgC,EAChCC,OAAgC,EAChCC,QAAuB,EACvBC,OAAgB,EAClC;IAAA,KAbMC,QAAQ,GAAoG,IAAI/B,OAAO,CAAC,CAAC;IAAA,KACjHmB,UAAU,GAAGA,UAAU,EAAE;IAAA,KAIrBI,OAAuB,GAAvBA,OAAuB;IAAA,KACvBC,YAAoB,GAApBA,YAAoB;IAAA,KACpBC,cAAsB,GAAtBA,cAAsB;IAAA,KACtBC,MAAyD,GAAzDA,MAAyD;IAAA,KACzDC,SAAgC,GAAhCA,SAAgC;IAAA,KAChCC,OAAgC,GAAhCA,OAAgC;IAAA,KAChCC,QAAuB,GAAvBA,QAAuB;IAAA,KACvBC,OAAgB,GAAhBA,OAAgB;IAEhC,IAAI,CAACE,WAAW,GAAGlB,2BAA2B,CAAC,IAAI,CAACY,MAAM,CAACO,UAAU,CAAC;EAC1E;EAAC,IAAAC,MAAA,GAAAZ,sBAAA,CAAAa,SAAA;EAAAD,MAAA,CAEKE,SAAS,GAAf,eAAAA,UACIC,cAAyC,EACzCC,OAAe,EAC+B;IAC9CC,eAAe,CAAC,IAAI,CAAC;IAGrB,IACI,CAAClB,kBAAkB,KAEf,CAACjB,iBAAiB,CAACoC,OAAO,IAC1B,OAAOpC,iBAAiB,CAACoC,OAAO,KAAK,QAAQ,IAC5C,OAAMrC,iBAAiB,CAACC,iBAAiB,CAACoC,OAAO,CAAC,MAAKnC,iBAAkB,CAC7E,EACH;MACEoC,OAAO,CAACC,IAAI,CACR,CACI,yEAAyE,EACzE,iIAAiI,EACjI,iIAAiI,EACjI,mJAAmJ,EACnJ,2CAA2C,EAC3C,6IAA6I,EAC7I,uEAAuE,CAC1E,CAACC,IAAI,CAAC,IAAI,CACf,CAAC;MACDtB,kBAAkB,GAAG,IAAI;IAC7B,CAAC,MAAM;MACHA,kBAAkB,GAAG,IAAI;IAC7B;;IAGA;AACR;AACA;AACA;IACQgB,cAAc,CAACO,OAAO,CAACC,GAAG,IAAI;MAC1B;MACA,IACI,CAACA,GAAG,CAACC,QAAQ,CAACC,IAAI,IAEdF,GAAG,CAACG,QAAQ,IACZ,CAACH,GAAG,CAACG,QAAQ,CAACD,IACjB,EACH;QACE,MAAM7B,UAAU,CAAC,KAAK,EAAE;UAAE+B,IAAI,EAAE;YAAEJ;UAAI;QAAE,CAAC,CAAC;MAC9C;;MAEA;MACA,IAAI,IAAI,CAACf,OAAO,EAAE;QACd,IACIe,GAAG,CAACG,QAAQ,KAER,CAACH,GAAG,CAACG,QAAQ,CAACE,KAAK,CAAC9B,qBAAqB,CAAC,IAC1CyB,GAAG,CAACG,QAAQ,CAACE,KAAK,CAAC9B,qBAAqB,CAAC,KAAKyB,GAAG,CAACG,QAAQ,CAACD,IAAI,CAClE,EACH;UACEN,OAAO,CAACU,GAAG,CAACN,GAAG,CAAC;UAChB,MAAM,IAAIO,KAAK,CAAC,yBAAyB,GAAGhC,qBAAqB,CAAC;QACtE;MACJ;IACJ,CAAC,CAAC;IAEF,IAAMiC,KAAK,GAAG,MAAM,IAAI,CAAC1B,SAAS;IAClC,IAAM2B,GAA0C,GAAG;MAC/CC,OAAO,EAAE,EAAE;MACXC,KAAK,EAAE;IACX,CAAC;;IAED;AACR;AACA;AACA;AACA;AACA;IACQ,IAAI,IAAI,CAAC1B,OAAO,EAAE;MACdO,cAAc,GAAGA,cAAc,CAACoB,GAAG,CAACZ,GAAG,IAAI;QACvC,IAAMa,GAAG,GAAG1C,oBAAoB,CAAC6B,GAAG,CAACC,QAAQ,CAAC;QAC9CY,GAAG,CAACR,KAAK,CAAC9B,qBAAqB,CAAC,GAAGsC,GAAG,CAACX,IAAI;QAC3C,OAAO;UACHC,QAAQ,EAAEH,GAAG,CAACG,QAAQ;UACtBF,QAAQ,EAAEY;QACd,CAAC;MACL,CAAC,CAAC;IACN;IAGA,IAAMC,YAAsB,GAAGtB,cAAc,CAACoB,GAAG,CAACG,QAAQ,IAAIA,QAAQ,CAACd,QAAQ,CAAC,IAAI,CAACd,WAAW,CAAQ,CAAC;IACzG,IAAI6B,WAAiE;IACrE,MAAMR,KAAK,CAACS,OAAO,CAACC,WAAW,CAC3B,IAAI,EACJV,KAAK,CAACW,UAAU,EAChBX,KAAK,CAACY,qBAAqB,EAC3B,YAAY;MACR,IAAMC,WAAW,GAAG,IAAIC,GAAG,CAAoC,CAAC;MAChE,IAAMC,qBAAqB,GAAG,MAAM1D,WAAW,CAAY,IAAI,CAACiB,SAAS,EAAEgC,YAAY,CAAC;MACxFS,qBAAqB,CAACxB,OAAO,CAACyB,qBAAqB,IAAI;QACnD,IAAMX,GAAG,GAAGW,qBAAqB;QACjC,IAAIX,GAAG,EAAE;UACLQ,WAAW,CAACI,GAAG,CAAEZ,GAAG,CAAS,IAAI,CAAC1B,WAAW,CAAC,EAAE0B,GAAU,CAAC;QAC/D;QACA,OAAOA,GAAG;MACd,CAAC,CAAC;MAEFG,WAAW,GAAG9C,uBAAuB,CACjC,IAAI,EACJ,IAAI,CAACiB,WAAW,EAChBkC,WAAW,EACX7B,cAAc,EACdC,OACJ,CAAC;MACDgB,GAAG,CAACE,KAAK,GAAGK,WAAW,CAACU,MAAM;;MAE9B;AAChB;AACA;AACA;MACgB,IAAIC,WAAkB,GAAG,EAAE;MAC3BX,WAAW,CAACY,cAAc,CAAC7B,OAAO,CAACC,GAAG,IAAI;QACtCS,GAAG,CAACC,OAAO,CAACmB,IAAI,CAAC7B,GAAG,CAACC,QAAQ,CAAC;QAC9B0B,WAAW,CAACE,IAAI,CAAC7B,GAAG,CAACC,QAAQ,CAAC;MAClC,CAAC,CAAC;MACFe,WAAW,CAACc,cAAc,CAAC/B,OAAO,CAACC,GAAG,IAAI;QACtCS,GAAG,CAACC,OAAO,CAACmB,IAAI,CAAC7B,GAAG,CAACC,QAAQ,CAAC;QAC9B0B,WAAW,CAACE,IAAI,CAAC7B,GAAG,CAACC,QAAQ,CAAC;MAClC,CAAC,CAAC;MACF0B,WAAW,GAAGA,WAAW,CAACf,GAAG,CAACmB,CAAC,IAAIpE,kBAAkB,CAAC6C,KAAK,CAACwB,cAAc,EAAED,CAAC,CAAC,CAAC;MAC/E,IAAIJ,WAAW,CAACM,MAAM,GAAG,CAAC,EAAE;QACxB,MAAMzB,KAAK,CAACW,UAAU,CAACe,OAAO,CAACP,WAAW,CAAC;MAC/C;;MAEA;MACA,IAAMQ,cAA8C,GAAG,EAAE;MACzDnB,WAAW,CAACoB,cAAc,CAACrC,OAAO,CAACsC,UAAU,IAAI;QAC7CF,cAAc,CAACN,IAAI,CAAC;UAChBS,EAAE,EAAE7E,kBAAkB,CAAC4E,UAAU,CAACE,UAAU,EAAEF,UAAU,CAACG,YAAY,CAAC;UACtEC,IAAI,EAAEJ,UAAU,CAACK,cAAc,CAACD;QACpC,CAAC,CAAC;MACN,CAAC,CAAC;MACFzB,WAAW,CAAC2B,iBAAiB,CAAC5C,OAAO,CAACsC,UAAU,IAAI;QAChDF,cAAc,CAACN,IAAI,CAAC;UAChBS,EAAE,EAAE7E,kBAAkB,CAAC4E,UAAU,CAACE,UAAU,EAAEF,UAAU,CAACG,YAAY,CAAC;UACtEC,IAAI,EAAEJ,UAAU,CAACK,cAAc,CAACD;QACpC,CAAC,CAAC;MACN,CAAC,CAAC;MACF,MAAMjC,KAAK,CAACY,qBAAqB,CAACc,OAAO,CAACC,cAAc,CAAC;MACzD,MAAM3B,KAAK,CAACY,qBAAqB,CAACwB,UAAU,CACxC5B,WAAW,CAAC6B,iBAAiB,CAACjC,GAAG,CAACyB,UAAU,IAAI5E,kBAAkB,CAAC4E,UAAU,CAACE,UAAU,EAAEF,UAAU,CAACG,YAAY,CAAC,CACtH,CAAC;IAEL,CAAC,CAAC;IAENxB,WAAW,GAAG3D,cAAc,CAAC2D,WAAW,CAAC;IACzC,IAAIA,WAAW,CAAC8B,SAAS,CAACC,MAAM,CAACd,MAAM,GAAG,CAAC,EAAE;MACzC,IAAMe,SAAS,GAAG3F,cAAc,CAAC2D,WAAW,CAACiC,SAAS,CAAC,CAAChD,QAAQ;MAChEe,WAAW,CAAC8B,SAAS,CAACI,UAAU,GAAG;QAC/BZ,EAAE,EAAEU,SAAS,CAAC,IAAI,CAAC7D,WAAW,CAAC;QAC/BgE,GAAG,EAAEH,SAAS,CAAC3C,KAAK,CAAC8C;MACzB,CAAC;MACDnC,WAAW,CAAC8B,SAAS,CAACM,OAAO,GAAGhG,GAAG,CAAC,CAAC;MACrC,IAAI,CAAC8B,QAAQ,CAACmE,IAAI,CAACrC,WAAW,CAAC8B,SAAS,CAAC;IAC7C;IAEA,OAAOrC,GAAG;EACd,CAAC;EAAApB,MAAA,CAEKiE,iBAAiB,GAAvB,eAAAA,kBACIC,GAAa,EACbC,OAAgB,EACoB;IACpC9D,eAAe,CAAC,IAAI,CAAC;IACrB,IAAMc,KAAK,GAAG,MAAM,IAAI,CAAC1B,SAAS;IAClC,IAAM2B,GAAgC,GAAG,EAAE;IAE3C,MAAMD,KAAK,CAACS,OAAO,CAACC,WAAW,CAC3B,GAAG,EACHV,KAAK,CAACW,UAAU,EAChB,YAAY;MACR,IAAMsC,QAAQ,GAAG,MAAM5F,WAAW,CAAY,IAAI,CAACiB,SAAS,EAAEyE,GAAG,CAAC;MAClEE,QAAQ,CAAC1D,OAAO,CAAC2D,YAAY,IAAI;QAC7B,IACIA,YAAY,KACX,CAACA,YAAY,CAACC,QAAQ,IAAIH,OAAO,CAAC,EACrC;UACE/C,GAAG,CAACoB,IAAI,CAAC6B,YAAY,CAAC;QAC1B;MACJ,CAAC,CAAC;IACN,CAAC,CAAC;IACN,OAAOjD,GAAG;EACd,CAAC;EAAApB,MAAA,CAEDuE,KAAK,GAAL,SAAAA,MAAMC,aAAuC,EAA4C;IACrFnE,eAAe,CAAC,IAAI,CAAC;IACrB,OAAO1B,UAAU,CACb,IAAI,EACJ6F,aACJ,CAAC;EACL,CAAC;EAAAxE,MAAA,CACKyE,KAAK,GAAX,eAAAA,MACID,aAAuC,EACV;IAC7B,IAAIA,aAAa,CAACE,SAAS,CAACC,wBAAwB,EAAE;MAClD,IAAMC,MAAM,GAAG,MAAMlG,UAAU,CAAC,IAAI,EAAE8F,aAAa,CAAC;MACpD,OAAO;QACHC,KAAK,EAAEG,MAAM;QACbC,IAAI,EAAE;MACV,CAAC;IACL,CAAC,MAAM;MACH,IAAMD,OAAM,GAAG,MAAMjG,UAAU,CAAC,IAAI,EAAE6F,aAAa,CAAC;MACpD,OAAO;QACHC,KAAK,EAAEG,OAAM,CAACE,SAAS,CAAClC,MAAM;QAC9BiC,IAAI,EAAE;MACV,CAAC;IACL;EACJ,CAAC;EAAA7E,MAAA,CAED+E,YAAY,GAAZ,SAAAA,aAAA,EAAmH;IAC/G1E,eAAe,CAAC,IAAI,CAAC;IACrB,OAAO,IAAI,CAACR,QAAQ,CAACmF,YAAY,CAAC,CAAC;EACvC,CAAC;EAAAhF,MAAA,CAEKiF,OAAO,GAAb,eAAAA,QAAcC,kBAA0B,EAAoB;IACxD7E,eAAe,CAAC,IAAI,CAAC;IACrB,IAAMc,KAAK,GAAG,MAAM,IAAI,CAAC1B,SAAS;IAClC,MAAM0B,KAAK,CAACS,OAAO,CAACC,WAAW,CAC3B,IAAI,EACJV,KAAK,CAACW,UAAU,EAChB,YAAY;MACR,IAAMqD,eAAe,GAAGpH,GAAG,CAAC,CAAC,GAAGmH,kBAAkB;MAClD;AAChB;AACA;MACgB,IAAME,QAAQ,GAAG,MAAMjE,KAAK,CAACW,UAAU,CAClCuD,KAAK,CAAC,WAAW,CAAC,CAClBC,KAAK,CAACH,eAAe,CAAC,CACtBI,OAAO,CAAC,CAAC;MACd,IAAMC,SAAmB,GAAG,EAAE;MAC9BJ,QAAQ,CAAC1E,OAAO,CAACc,GAAG,IAAI;QACpB,IAAIA,GAAG,CAAC8C,QAAQ,KAAK,GAAG,EAAE;UACtBkB,SAAS,CAAChD,IAAI,CAAChB,GAAG,CAAC,IAAI,CAAC1B,WAAW,CAAC,CAAC;QACzC;MACJ,CAAC,CAAC;MACF,MAAMqB,KAAK,CAACW,UAAU,CAACyB,UAAU,CAACiC,SAAS,CAAC;IAChD,CACJ,CAAC;;IAED;AACR;AACA;AACA;AACA;AACA;IACQ,OAAO,IAAI;EACf,CAAC;EAAAxF,MAAA,CAEKyF,iBAAiB,GAAvB,eAAAA,kBAAwBvC,UAAkB,EAAEC,YAAoB,EAAEuC,OAAe,EAAmB;IAChGrF,eAAe,CAAC,IAAI,CAAC;IACrB,IAAMc,KAAK,GAAG,MAAM,IAAI,CAAC1B,SAAS;IAClC,IAAMwD,EAAE,GAAG7E,kBAAkB,CAAC8E,UAAU,EAAEC,YAAY,CAAC;IACvD,OAAO,MAAMhC,KAAK,CAACS,OAAO,CAACC,WAAW,CAClC,GAAG,EACHV,KAAK,CAACY,qBAAqB,EAC3B,YAAY;MAER,IAAMiB,UAAU,GAAG,MAAM7B,KAAK,CAACY,qBAAqB,CAAC4D,GAAG,CAAC1C,EAAE,CAAC;MAC5D,IAAID,UAAU,EAAE;QACZ,OAAOA,UAAU,CAACI,IAAI;MAC1B,CAAC,MAAM;QACH,MAAM,IAAIlC,KAAK,CAAC,iCAAiC,GAAGgC,UAAU,GAAG,iBAAiB,GAAGC,YAAY,CAAC;MACtG;IACJ,CAAC,CAAC;EACV,CAAC;EAAAnD,MAAA,CAEK4F,MAAM,GAAZ,eAAAA,OAAA,EAA8B;IAC1BvF,eAAe,CAAC,IAAI,CAAC;IACrB,IAAMc,KAAK,GAAG,MAAM,IAAI,CAAC1B,SAAS;IAClC,MAAM0B,KAAK,CAACW,UAAU,CAAC+D,KAAK,CAAC,CAAC;IAC9B,OAAO,IAAI,CAACC,KAAK,CAAC,CAAC;EACvB,CAAC;EAAA9F,MAAA,CAGD8F,KAAK,GAAL,SAAAA,MAAA,EAAuB;IACnB,IAAI,IAAI,CAACC,MAAM,EAAE;MACb,OAAO,IAAI,CAACA,MAAM;IACtB;IACA,IAAI,CAACA,MAAM,GAAG,CAAC,YAAY;MACvB,IAAI,CAAClG,QAAQ,CAACmG,QAAQ,CAAC,CAAC;MACxB,MAAM3H,YAAY,CAAC,IAAI,CAACoB,SAAS,CAAC;IACtC,CAAC,EAAE,CAAC;IACJ,OAAO,IAAI,CAACsG,MAAM;EACtB,CAAC;EAAA/F,MAAA,CAEDiG,sBAAsB,GAAtB,SAAAA,uBAAA,EAAyE;IACrE,OAAO,IAAInI,OAAO,CAAC,CAAC;EACxB,CAAC;EAAAkC,MAAA,CACKkG,4BAA4B,GAAlC,eAAAA,6BAAmCC,aAAyD,EAAiB,CAAE,CAAC;EAAA,OAAA/G,sBAAA;AAAA;AAKpH,OAAO,eAAegH,0BAA0BA,CAC5C/G,OAAuB,EACvBgH,MAAiE,EACjE1G,QAAuB,EACmB;EAC1C,IAAMF,SAAS,GAAGlB,oBAAoB,CAClC8H,MAAM,CAAC/G,YAAY,EACnB+G,MAAM,CAAC9G,cAAc,EACrBI,QAAQ,EACR0G,MAAM,CAAC7G,MACX,CAAC;EAED,IAAM8G,QAAQ,GAAG,IAAIlH,sBAAsB,CACvCC,OAAO,EACPgH,MAAM,CAAC/G,YAAY,EACnB+G,MAAM,CAAC9G,cAAc,EACrB8G,MAAM,CAAC7G,MAAM,EACbC,SAAS,EACT4G,MAAM,CAAC3G,OAAO,EACdC,QAAQ,EACR0G,MAAM,CAACzG,OACX,CAAC;EAED,MAAMb,gCAAgC,CAClCN,qBAAqB,EACrB4H,MAAM,EACNC,QACJ,CAAC;EAED,OAAOC,OAAO,CAACC,OAAO,CAACF,QAAQ,CAAC;AACpC;AAIA,SAASjG,eAAeA,CACpBiG,QAAqC,EACvC;EACE,IAAIA,QAAQ,CAACP,MAAM,EAAE;IACjB,MAAM,IAAI7E,KAAK,CAAC,mCAAmC,GAAGoF,QAAQ,CAAChH,YAAY,GAAG,GAAG,GAAGgH,QAAQ,CAAC/G,cAAc,CAAC;EAChH;AACJ","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/storage-foundationdb/foundationdb-helpers.js b/dist/esm/plugins/storage-foundationdb/foundationdb-helpers.js deleted file mode 100644 index 31b412525da..00000000000 --- a/dist/esm/plugins/storage-foundationdb/foundationdb-helpers.js +++ /dev/null @@ -1,6 +0,0 @@ -export function getFoundationDBIndexName(index) { - return index.join('|'); -} -export var CLEANUP_INDEX = ['_deleted', '_meta.lwt']; -export var FOUNDATION_DB_WRITE_BATCH_SIZE = 2000; -//# sourceMappingURL=foundationdb-helpers.js.map \ No newline at end of file diff --git a/dist/esm/plugins/storage-foundationdb/foundationdb-helpers.js.map b/dist/esm/plugins/storage-foundationdb/foundationdb-helpers.js.map deleted file mode 100644 index 12b61ae1791..00000000000 --- a/dist/esm/plugins/storage-foundationdb/foundationdb-helpers.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"foundationdb-helpers.js","names":["getFoundationDBIndexName","index","join","CLEANUP_INDEX","FOUNDATION_DB_WRITE_BATCH_SIZE"],"sources":["../../../../src/plugins/storage-foundationdb/foundationdb-helpers.ts"],"sourcesContent":["export function getFoundationDBIndexName(index: string[]): string {\n return index.join('|');\n}\nexport const CLEANUP_INDEX: string[] = ['_deleted', '_meta.lwt'];\n\nexport const FOUNDATION_DB_WRITE_BATCH_SIZE = 2000;\n"],"mappings":"AAAA,OAAO,SAASA,wBAAwBA,CAACC,KAAe,EAAU;EAC9D,OAAOA,KAAK,CAACC,IAAI,CAAC,GAAG,CAAC;AAC1B;AACA,OAAO,IAAMC,aAAuB,GAAG,CAAC,UAAU,EAAE,WAAW,CAAC;AAEhE,OAAO,IAAMC,8BAA8B,GAAG,IAAI","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/storage-foundationdb/foundationdb-query.js b/dist/esm/plugins/storage-foundationdb/foundationdb-query.js deleted file mode 100644 index 16676d136fc..00000000000 --- a/dist/esm/plugins/storage-foundationdb/foundationdb-query.js +++ /dev/null @@ -1,109 +0,0 @@ -import { changeIndexableStringByOneQuantum, getStartIndexStringFromLowerBound, getStartIndexStringFromUpperBound } from "../../custom-index.js"; -import { ensureNotFalsy, lastOfArray } from "../../plugins/utils/index.js"; -import { getFoundationDBIndexName } from "./foundationdb-helpers.js"; -import { getQueryMatcher, getSortComparator } from "../../rx-query-helper.js"; -export async function queryFoundationDB(instance, preparedQuery) { - var queryPlan = preparedQuery.queryPlan; - var query = preparedQuery.query; - var skip = query.skip ? query.skip : 0; - var limit = query.limit ? query.limit : Infinity; - var skipPlusLimit = skip + limit; - var queryPlanFields = queryPlan.index; - var mustManuallyResort = !queryPlan.sortSatisfiedByIndex; - var queryMatcher = false; - if (!queryPlan.selectorSatisfiedByIndex) { - queryMatcher = getQueryMatcher(instance.schema, preparedQuery.query); - } - var dbs = await instance.internals.dbsPromise; - var indexForName = queryPlanFields.slice(0); - var indexName = getFoundationDBIndexName(indexForName); - var indexDB = ensureNotFalsy(dbs.indexes[indexName]).db; - var lowerBound = queryPlan.startKeys; - var lowerBoundString = getStartIndexStringFromLowerBound(instance.schema, indexForName, lowerBound); - var upperBound = queryPlan.endKeys; - var upperBoundString = getStartIndexStringFromUpperBound(instance.schema, indexForName, upperBound); - var result = await dbs.root.doTransaction(async tx => { - var innerResult = []; - var indexTx = tx.at(indexDB.subspace); - var mainTx = tx.at(dbs.main.subspace); - - /** - * TODO for whatever reason the keySelectors like firstGreaterThan etc. - * do not work properly. So we have to hack here to find the correct - * document in case lowerBoundString===upperBoundString. - * This likely must be fixed in the foundationdb library. - * When it is fixed, we do not need this if-case and instead - * can rely on .getRangeBatch() in all cases. - */ - if (lowerBoundString === upperBoundString) { - var docId = await indexTx.get(lowerBoundString); - if (docId) { - var docData = await mainTx.get(docId); - if (!queryMatcher || queryMatcher(docData)) { - innerResult.push(docData); - } - } - return innerResult; - } - if (!queryPlan.inclusiveStart) { - lowerBoundString = changeIndexableStringByOneQuantum(lowerBoundString, 1); - } - if (queryPlan.inclusiveEnd) { - upperBoundString = changeIndexableStringByOneQuantum(upperBoundString, +1); - } - var range = indexTx.getRangeBatch(lowerBoundString, upperBoundString, - // queryPlan.inclusiveStart ? keySelector.firstGreaterThan(lowerBoundString) : keySelector.firstGreaterOrEqual(lowerBoundString), - // queryPlan.inclusiveEnd ? keySelector.lastLessOrEqual(upperBoundString) : keySelector.lastLessThan(upperBoundString), - { - // TODO these options seem to be broken in the foundationdb node bindings - // limit: instance.settings.batchSize, - // streamingMode: StreamingMode.Exact - }); - var done = false; - while (!done) { - var next = await range.next(); - if (next.done) { - done = true; - break; - } - var rows = next.value; - if (!queryPlan.inclusiveStart) { - var firstRow = rows[0]; - if (firstRow && firstRow[0] === lowerBoundString) { - rows.shift(); - } - } - if (!queryPlan.inclusiveEnd) { - var lastRow = lastOfArray(rows); - if (lastRow && lastRow[0] === upperBoundString) { - rows.pop(); - } - } - var docIds = rows.map(row => row[1]); - var docsData = await Promise.all(docIds.map(docId => mainTx.get(docId))); - docsData.forEach(docData => { - if (!done) { - if (!queryMatcher || queryMatcher(docData)) { - innerResult.push(docData); - } - } - if (!mustManuallyResort && innerResult.length === skipPlusLimit) { - done = true; - range.return(); - } - }); - } - return innerResult; - }); - if (mustManuallyResort) { - var sortComparator = getSortComparator(instance.schema, preparedQuery.query); - result = result.sort(sortComparator); - } - - // apply skip and limit boundaries. - result = result.slice(skip, skipPlusLimit); - return { - documents: result - }; -} -//# sourceMappingURL=foundationdb-query.js.map \ No newline at end of file diff --git a/dist/esm/plugins/storage-foundationdb/foundationdb-query.js.map b/dist/esm/plugins/storage-foundationdb/foundationdb-query.js.map deleted file mode 100644 index acb12d17d8e..00000000000 --- a/dist/esm/plugins/storage-foundationdb/foundationdb-query.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"foundationdb-query.js","names":["changeIndexableStringByOneQuantum","getStartIndexStringFromLowerBound","getStartIndexStringFromUpperBound","ensureNotFalsy","lastOfArray","getFoundationDBIndexName","getQueryMatcher","getSortComparator","queryFoundationDB","instance","preparedQuery","queryPlan","query","skip","limit","Infinity","skipPlusLimit","queryPlanFields","index","mustManuallyResort","sortSatisfiedByIndex","queryMatcher","selectorSatisfiedByIndex","schema","dbs","internals","dbsPromise","indexForName","slice","indexName","indexDB","indexes","db","lowerBound","startKeys","lowerBoundString","upperBound","endKeys","upperBoundString","result","root","doTransaction","tx","innerResult","indexTx","at","subspace","mainTx","main","docId","get","docData","push","inclusiveStart","inclusiveEnd","range","getRangeBatch","done","next","rows","value","firstRow","shift","lastRow","pop","docIds","map","row","docsData","Promise","all","forEach","length","return","sortComparator","sort","documents"],"sources":["../../../../src/plugins/storage-foundationdb/foundationdb-query.ts"],"sourcesContent":["import {\n changeIndexableStringByOneQuantum,\n getStartIndexStringFromLowerBound,\n getStartIndexStringFromUpperBound\n} from '../../custom-index.ts';\nimport type {\n PreparedQuery,\n QueryMatcher,\n RxDocumentData,\n RxStorageQueryResult\n} from '../../types/index.d.ts';\nimport { ensureNotFalsy, lastOfArray } from '../../plugins/utils/index.ts';\nimport { getFoundationDBIndexName } from './foundationdb-helpers.ts';\nimport { RxStorageInstanceFoundationDB } from './rx-storage-instance-foundationdb.ts';\nimport { getQueryMatcher, getSortComparator } from '../../rx-query-helper.ts';\n\nexport async function queryFoundationDB(\n instance: RxStorageInstanceFoundationDB,\n preparedQuery: PreparedQuery\n): Promise> {\n const queryPlan = preparedQuery.queryPlan;\n const query = preparedQuery.query;\n const skip = query.skip ? query.skip : 0;\n const limit = query.limit ? query.limit : Infinity;\n const skipPlusLimit = skip + limit;\n const queryPlanFields: string[] = queryPlan.index;\n const mustManuallyResort = !queryPlan.sortSatisfiedByIndex;\n\n\n let queryMatcher: QueryMatcher> | false = false;\n if (!queryPlan.selectorSatisfiedByIndex) {\n queryMatcher = getQueryMatcher(\n instance.schema,\n preparedQuery.query\n );\n }\n\n const dbs = await instance.internals.dbsPromise;\n\n\n const indexForName = queryPlanFields.slice(0);\n const indexName = getFoundationDBIndexName(indexForName);\n const indexDB = ensureNotFalsy(dbs.indexes[indexName]).db;\n\n let lowerBound: any[] = queryPlan.startKeys;\n let lowerBoundString = getStartIndexStringFromLowerBound(\n instance.schema,\n indexForName,\n lowerBound\n );\n\n let upperBound: any[] = queryPlan.endKeys;\n let upperBoundString = getStartIndexStringFromUpperBound(\n instance.schema,\n indexForName,\n upperBound\n );\n let result: RxDocumentData[] = await dbs.root.doTransaction(async (tx: any) => {\n const innerResult: RxDocumentData[] = [];\n const indexTx = tx.at(indexDB.subspace);\n const mainTx = tx.at(dbs.main.subspace);\n\n\n /**\n * TODO for whatever reason the keySelectors like firstGreaterThan etc.\n * do not work properly. So we have to hack here to find the correct\n * document in case lowerBoundString===upperBoundString.\n * This likely must be fixed in the foundationdb library.\n * When it is fixed, we do not need this if-case and instead\n * can rely on .getRangeBatch() in all cases.\n */\n if (lowerBoundString === upperBoundString) {\n const docId: string = await indexTx.get(lowerBoundString);\n if (docId) {\n const docData = await mainTx.get(docId);\n if (!queryMatcher || queryMatcher(docData)) {\n innerResult.push(docData);\n }\n }\n return innerResult;\n }\n\n if (!queryPlan.inclusiveStart) {\n lowerBoundString = changeIndexableStringByOneQuantum(lowerBoundString, 1);\n }\n if (queryPlan.inclusiveEnd) {\n upperBoundString = changeIndexableStringByOneQuantum(upperBoundString, +1);\n }\n\n const range = indexTx.getRangeBatch(\n lowerBoundString,\n upperBoundString,\n // queryPlan.inclusiveStart ? keySelector.firstGreaterThan(lowerBoundString) : keySelector.firstGreaterOrEqual(lowerBoundString),\n // queryPlan.inclusiveEnd ? keySelector.lastLessOrEqual(upperBoundString) : keySelector.lastLessThan(upperBoundString),\n {\n // TODO these options seem to be broken in the foundationdb node bindings\n // limit: instance.settings.batchSize,\n // streamingMode: StreamingMode.Exact\n }\n );\n let done = false;\n while (!done) {\n const next = await range.next();\n if (next.done) {\n done = true;\n break;\n }\n const rows: [string, string] = next.value;\n\n if (!queryPlan.inclusiveStart) {\n const firstRow = rows[0];\n if (\n firstRow &&\n firstRow[0] === lowerBoundString\n ) {\n rows.shift();\n }\n }\n if (!queryPlan.inclusiveEnd) {\n const lastRow = lastOfArray(rows);\n if (\n lastRow &&\n lastRow[0] === upperBoundString\n ) {\n rows.pop();\n }\n }\n\n const docIds = rows.map(row => row[1]);\n const docsData: RxDocumentData[] = await Promise.all(docIds.map((docId: string) => mainTx.get(docId)));\n\n docsData.forEach((docData) => {\n if (!done) {\n if (!queryMatcher || queryMatcher(docData)) {\n innerResult.push(docData);\n }\n }\n if (\n !mustManuallyResort &&\n innerResult.length === skipPlusLimit\n ) {\n done = true;\n range.return();\n }\n });\n }\n return innerResult;\n });\n if (mustManuallyResort) {\n const sortComparator = getSortComparator(instance.schema, preparedQuery.query);\n result = result.sort(sortComparator);\n }\n\n // apply skip and limit boundaries.\n result = result.slice(skip, skipPlusLimit);\n\n return {\n documents: result\n };\n}\n"],"mappings":"AAAA,SACIA,iCAAiC,EACjCC,iCAAiC,EACjCC,iCAAiC,QAC9B,uBAAuB;AAO9B,SAASC,cAAc,EAAEC,WAAW,QAAQ,8BAA8B;AAC1E,SAASC,wBAAwB,QAAQ,2BAA2B;AAEpE,SAASC,eAAe,EAAEC,iBAAiB,QAAQ,0BAA0B;AAE7E,OAAO,eAAeC,iBAAiBA,CACnCC,QAAkD,EAClDC,aAAuC,EACC;EACxC,IAAMC,SAAS,GAAGD,aAAa,CAACC,SAAS;EACzC,IAAMC,KAAK,GAAGF,aAAa,CAACE,KAAK;EACjC,IAAMC,IAAI,GAAGD,KAAK,CAACC,IAAI,GAAGD,KAAK,CAACC,IAAI,GAAG,CAAC;EACxC,IAAMC,KAAK,GAAGF,KAAK,CAACE,KAAK,GAAGF,KAAK,CAACE,KAAK,GAAGC,QAAQ;EAClD,IAAMC,aAAa,GAAGH,IAAI,GAAGC,KAAK;EAClC,IAAMG,eAAyB,GAAGN,SAAS,CAACO,KAAK;EACjD,IAAMC,kBAAkB,GAAG,CAACR,SAAS,CAACS,oBAAoB;EAG1D,IAAIC,YAA6D,GAAG,KAAK;EACzE,IAAI,CAACV,SAAS,CAACW,wBAAwB,EAAE;IACrCD,YAAY,GAAGf,eAAe,CAC1BG,QAAQ,CAACc,MAAM,EACfb,aAAa,CAACE,KAClB,CAAC;EACL;EAEA,IAAMY,GAAG,GAAG,MAAMf,QAAQ,CAACgB,SAAS,CAACC,UAAU;EAG/C,IAAMC,YAAY,GAAGV,eAAe,CAACW,KAAK,CAAC,CAAC,CAAC;EAC7C,IAAMC,SAAS,GAAGxB,wBAAwB,CAACsB,YAAY,CAAC;EACxD,IAAMG,OAAO,GAAG3B,cAAc,CAACqB,GAAG,CAACO,OAAO,CAACF,SAAS,CAAC,CAAC,CAACG,EAAE;EAEzD,IAAIC,UAAiB,GAAGtB,SAAS,CAACuB,SAAS;EAC3C,IAAIC,gBAAgB,GAAGlC,iCAAiC,CACpDQ,QAAQ,CAACc,MAAM,EACfI,YAAY,EACZM,UACJ,CAAC;EAED,IAAIG,UAAiB,GAAGzB,SAAS,CAAC0B,OAAO;EACzC,IAAIC,gBAAgB,GAAGpC,iCAAiC,CACpDO,QAAQ,CAACc,MAAM,EACfI,YAAY,EACZS,UACJ,CAAC;EACD,IAAIG,MAAmC,GAAG,MAAMf,GAAG,CAACgB,IAAI,CAACC,aAAa,CAAC,MAAOC,EAAO,IAAK;IACtF,IAAMC,WAAwC,GAAG,EAAE;IACnD,IAAMC,OAAO,GAAGF,EAAE,CAACG,EAAE,CAACf,OAAO,CAACgB,QAAQ,CAAC;IACvC,IAAMC,MAAM,GAAGL,EAAE,CAACG,EAAE,CAACrB,GAAG,CAACwB,IAAI,CAACF,QAAQ,CAAC;;IAGvC;AACR;AACA;AACA;AACA;AACA;AACA;AACA;IACQ,IAAIX,gBAAgB,KAAKG,gBAAgB,EAAE;MACvC,IAAMW,KAAa,GAAG,MAAML,OAAO,CAACM,GAAG,CAACf,gBAAgB,CAAC;MACzD,IAAIc,KAAK,EAAE;QACP,IAAME,OAAO,GAAG,MAAMJ,MAAM,CAACG,GAAG,CAACD,KAAK,CAAC;QACvC,IAAI,CAAC5B,YAAY,IAAIA,YAAY,CAAC8B,OAAO,CAAC,EAAE;UACxCR,WAAW,CAACS,IAAI,CAACD,OAAO,CAAC;QAC7B;MACJ;MACA,OAAOR,WAAW;IACtB;IAEA,IAAI,CAAChC,SAAS,CAAC0C,cAAc,EAAE;MAC3BlB,gBAAgB,GAAGnC,iCAAiC,CAACmC,gBAAgB,EAAE,CAAC,CAAC;IAC7E;IACA,IAAIxB,SAAS,CAAC2C,YAAY,EAAE;MACxBhB,gBAAgB,GAAGtC,iCAAiC,CAACsC,gBAAgB,EAAE,CAAC,CAAC,CAAC;IAC9E;IAEA,IAAMiB,KAAK,GAAGX,OAAO,CAACY,aAAa,CAC/BrB,gBAAgB,EAChBG,gBAAgB;IAChB;IACA;IACA;MACI;MACA;MACA;IAAA,CAER,CAAC;IACD,IAAImB,IAAI,GAAG,KAAK;IAChB,OAAO,CAACA,IAAI,EAAE;MACV,IAAMC,IAAI,GAAG,MAAMH,KAAK,CAACG,IAAI,CAAC,CAAC;MAC/B,IAAIA,IAAI,CAACD,IAAI,EAAE;QACXA,IAAI,GAAG,IAAI;QACX;MACJ;MACA,IAAME,IAAsB,GAAGD,IAAI,CAACE,KAAK;MAEzC,IAAI,CAACjD,SAAS,CAAC0C,cAAc,EAAE;QAC3B,IAAMQ,QAAQ,GAAGF,IAAI,CAAC,CAAC,CAAC;QACxB,IACIE,QAAQ,IACRA,QAAQ,CAAC,CAAC,CAAC,KAAK1B,gBAAgB,EAClC;UACEwB,IAAI,CAACG,KAAK,CAAC,CAAC;QAChB;MACJ;MACA,IAAI,CAACnD,SAAS,CAAC2C,YAAY,EAAE;QACzB,IAAMS,OAAO,GAAG3D,WAAW,CAACuD,IAAI,CAAC;QACjC,IACII,OAAO,IACPA,OAAO,CAAC,CAAC,CAAC,KAAKzB,gBAAgB,EACjC;UACEqB,IAAI,CAACK,GAAG,CAAC,CAAC;QACd;MACJ;MAEA,IAAMC,MAAM,GAAGN,IAAI,CAACO,GAAG,CAACC,GAAG,IAAIA,GAAG,CAAC,CAAC,CAAC,CAAC;MACtC,IAAMC,QAAqC,GAAG,MAAMC,OAAO,CAACC,GAAG,CAACL,MAAM,CAACC,GAAG,CAAEjB,KAAa,IAAKF,MAAM,CAACG,GAAG,CAACD,KAAK,CAAC,CAAC,CAAC;MAEjHmB,QAAQ,CAACG,OAAO,CAAEpB,OAAO,IAAK;QAC1B,IAAI,CAACM,IAAI,EAAE;UACP,IAAI,CAACpC,YAAY,IAAIA,YAAY,CAAC8B,OAAO,CAAC,EAAE;YACxCR,WAAW,CAACS,IAAI,CAACD,OAAO,CAAC;UAC7B;QACJ;QACA,IACI,CAAChC,kBAAkB,IACnBwB,WAAW,CAAC6B,MAAM,KAAKxD,aAAa,EACtC;UACEyC,IAAI,GAAG,IAAI;UACXF,KAAK,CAACkB,MAAM,CAAC,CAAC;QAClB;MACJ,CAAC,CAAC;IACN;IACA,OAAO9B,WAAW;EACtB,CAAC,CAAC;EACF,IAAIxB,kBAAkB,EAAE;IACpB,IAAMuD,cAAc,GAAGnE,iBAAiB,CAACE,QAAQ,CAACc,MAAM,EAAEb,aAAa,CAACE,KAAK,CAAC;IAC9E2B,MAAM,GAAGA,MAAM,CAACoC,IAAI,CAACD,cAAc,CAAC;EACxC;;EAEA;EACAnC,MAAM,GAAGA,MAAM,CAACX,KAAK,CAACf,IAAI,EAAEG,aAAa,CAAC;EAE1C,OAAO;IACH4D,SAAS,EAAErC;EACf,CAAC;AACL","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/storage-foundationdb/foundationdb-types.js b/dist/esm/plugins/storage-foundationdb/foundationdb-types.js deleted file mode 100644 index 79b86a85a1b..00000000000 --- a/dist/esm/plugins/storage-foundationdb/foundationdb-types.js +++ /dev/null @@ -1,3 +0,0 @@ -export {}; -export {}; -//# sourceMappingURL=foundationdb-types.js.map \ No newline at end of file diff --git a/dist/esm/plugins/storage-foundationdb/foundationdb-types.js.map b/dist/esm/plugins/storage-foundationdb/foundationdb-types.js.map deleted file mode 100644 index 9f48cdef0bb..00000000000 --- a/dist/esm/plugins/storage-foundationdb/foundationdb-types.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"foundationdb-types.js","names":[],"sources":["../../../../src/plugins/storage-foundationdb/foundationdb-types.ts"],"sourcesContent":["/* eslint-disable no-unused-vars */\n/* eslint-disable @typescript-eslint/no-unused-vars */\n\nimport type {\n EventBulk,\n RxAttachmentWriteData,\n RxDocumentData,\n RxStorage,\n RxStorageChangeEvent,\n RxStorageDefaultCheckpoint\n} from '../../types/index.d.ts';\nexport type RxStorageFoundationDBSettings = {\n /**\n * Version of the API of the foundationDB server.\n */\n apiVersion: number;\n /**\n * Path to the foundationDB cluster file\n * like '/path/to/fdb.cluster'\n * (optional)\n */\n clusterFile?: string;\n batchSize?: number;\n};\nexport type RxStorageFoundationDBInstanceCreationOptions = {\n // can be overwritten per instance\n batchSize?: number;\n};\n\n/**\n * TODO atm we cannot import types from 'foundationdb'\n * because 'foundationdb' is an optional peer dependency\n * this is NOT also in the devDependencies.\n * This is because it requires to install the foundationdb client cli\n * which would mean everyone that wants to develop RxDB must have this installed manually.\n */\n// import {\n// open as foundationDBOpen,\n// Database,\n// Transaction\n// } from 'foundationdb';\n\nexport type FoundationDBIndexMeta = {\n indexName: string;\n index: string[];\n getIndexableString: (doc: RxDocumentData) => string;\n db: FoundationDBDatabase;\n};\n\nexport type FoundationDBConnection = any; // ReturnType;\nexport type FoundationDBDatabase = any; // Database;\nexport type FoundationDBTransaction = any; // Transaction, any>;\nexport type FoundationDBStorageInternals = {\n connection: FoundationDBConnection;\n dbsPromise: Promise<{\n root: FoundationDBDatabase;\n main: FoundationDBDatabase;\n attachments: FoundationDBDatabase;\n events: FoundationDBDatabase>, RxStorageDefaultCheckpoint>>;\n indexes: {\n [indexName: string]: FoundationDBIndexMeta;\n };\n }>;\n};\nexport type RxStorageFoundationDB = RxStorage, RxStorageFoundationDBInstanceCreationOptions> & {};\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/storage-foundationdb/index.js b/dist/esm/plugins/storage-foundationdb/index.js deleted file mode 100644 index f8af77fe2c3..00000000000 --- a/dist/esm/plugins/storage-foundationdb/index.js +++ /dev/null @@ -1,31 +0,0 @@ -import { ensureRxStorageInstanceParamsAreCorrect } from "../../rx-storage-helper.js"; -import { RXDB_VERSION } from "../utils/utils-rxdb-version.js"; -import { createFoundationDBStorageInstance } from "./rx-storage-instance-foundationdb.js"; -var versionSet; -export function getRxStorageFoundationDB(settings) { - if (versionSet && versionSet !== settings.apiVersion) { - throw new Error('foundationdb already initialized with api version ' + versionSet); - } else if (!versionSet) { - versionSet = settings.apiVersion; - var { - setAPIVersion - } = require('foundationdb'); - setAPIVersion(settings.apiVersion); - } - var storage = { - name: 'foundationdb', - rxdbVersion: RXDB_VERSION, - createStorageInstance(params) { - ensureRxStorageInstanceParamsAreCorrect(params); - var useSettings = Object.assign({}, settings, params.options); - if (!useSettings.batchSize) { - useSettings.batchSize = 50; - } - return createFoundationDBStorageInstance(this, params, useSettings); - } - }; - return storage; -} -export * from "./foundationdb-types.js"; -export * from "./foundationdb-helpers.js"; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/esm/plugins/storage-foundationdb/index.js.map b/dist/esm/plugins/storage-foundationdb/index.js.map deleted file mode 100644 index e44514849e2..00000000000 --- a/dist/esm/plugins/storage-foundationdb/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["ensureRxStorageInstanceParamsAreCorrect","RXDB_VERSION","createFoundationDBStorageInstance","versionSet","getRxStorageFoundationDB","settings","apiVersion","Error","setAPIVersion","require","storage","name","rxdbVersion","createStorageInstance","params","useSettings","Object","assign","options","batchSize"],"sources":["../../../../src/plugins/storage-foundationdb/index.ts"],"sourcesContent":["import { ensureRxStorageInstanceParamsAreCorrect } from '../../rx-storage-helper.ts';\nimport type {\n RxStorageInstanceCreationParams\n} from '../../types/index.d.ts';\nimport { RXDB_VERSION } from '../utils/utils-rxdb-version.ts';\nimport type {\n RxStorageFoundationDB,\n RxStorageFoundationDBInstanceCreationOptions,\n RxStorageFoundationDBSettings\n} from './foundationdb-types.ts';\n\nimport {\n createFoundationDBStorageInstance,\n RxStorageInstanceFoundationDB\n} from './rx-storage-instance-foundationdb.ts';\n\n\nlet versionSet: undefined | number;\n\nexport function getRxStorageFoundationDB(\n settings: RxStorageFoundationDBSettings\n): RxStorageFoundationDB {\n if (versionSet && versionSet !== settings.apiVersion) {\n throw new Error('foundationdb already initialized with api version ' + versionSet);\n } else if (!versionSet) {\n versionSet = settings.apiVersion;\n const { setAPIVersion } = require('foundationdb');\n setAPIVersion(settings.apiVersion);\n }\n\n\n const storage: RxStorageFoundationDB = {\n name: 'foundationdb',\n rxdbVersion: RXDB_VERSION,\n\n createStorageInstance(\n params: RxStorageInstanceCreationParams\n ): Promise> {\n ensureRxStorageInstanceParamsAreCorrect(params);\n const useSettings = Object.assign(\n {},\n settings,\n params.options\n );\n if (!useSettings.batchSize) {\n useSettings.batchSize = 50;\n }\n return createFoundationDBStorageInstance(this, params, useSettings);\n }\n };\n\n return storage;\n}\n\n\nexport * from './foundationdb-types.ts';\nexport * from './foundationdb-helpers.ts';\n"],"mappings":"AAAA,SAASA,uCAAuC,QAAQ,4BAA4B;AAIpF,SAASC,YAAY,QAAQ,gCAAgC;AAO7D,SACIC,iCAAiC,QAE9B,uCAAuC;AAG9C,IAAIC,UAA8B;AAElC,OAAO,SAASC,wBAAwBA,CACpCC,QAAuC,EAClB;EACrB,IAAIF,UAAU,IAAIA,UAAU,KAAKE,QAAQ,CAACC,UAAU,EAAE;IAClD,MAAM,IAAIC,KAAK,CAAC,oDAAoD,GAAGJ,UAAU,CAAC;EACtF,CAAC,MAAM,IAAI,CAACA,UAAU,EAAE;IACpBA,UAAU,GAAGE,QAAQ,CAACC,UAAU;IAChC,IAAM;MAAEE;IAAc,CAAC,GAAGC,OAAO,CAAC,cAAc,CAAC;IACjDD,aAAa,CAACH,QAAQ,CAACC,UAAU,CAAC;EACtC;EAGA,IAAMI,OAA8B,GAAG;IACnCC,IAAI,EAAE,cAAc;IACpBC,WAAW,EAAEX,YAAY;IAEzBY,qBAAqBA,CACjBC,MAAgG,EAC/C;MACjDd,uCAAuC,CAACc,MAAM,CAAC;MAC/C,IAAMC,WAAW,GAAGC,MAAM,CAACC,MAAM,CAC7B,CAAC,CAAC,EACFZ,QAAQ,EACRS,MAAM,CAACI,OACX,CAAC;MACD,IAAI,CAACH,WAAW,CAACI,SAAS,EAAE;QACxBJ,WAAW,CAACI,SAAS,GAAG,EAAE;MAC9B;MACA,OAAOjB,iCAAiC,CAAC,IAAI,EAAEY,MAAM,EAAEC,WAAW,CAAC;IACvE;EACJ,CAAC;EAED,OAAOL,OAAO;AAClB;AAGA,cAAc,yBAAyB;AACvC,cAAc,2BAA2B","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/storage-foundationdb/rx-storage-instance-foundationdb.js b/dist/esm/plugins/storage-foundationdb/rx-storage-instance-foundationdb.js deleted file mode 100644 index 74eb9e4fdf2..00000000000 --- a/dist/esm/plugins/storage-foundationdb/rx-storage-instance-foundationdb.js +++ /dev/null @@ -1,287 +0,0 @@ -import { Subject } from 'rxjs'; -import { getPrimaryFieldOfPrimaryKey } from "../../rx-schema-helper.js"; -// import { -// open as foundationDBOpen, -// directory as foundationDBDirectory, -// encoders as foundationDBEncoders, -// keySelector as foundationDBKeySelector, -// StreamingMode as foundationDBStreamingMode -// } from 'foundationdb'; -import { categorizeBulkWriteRows } from "../../rx-storage-helper.js"; -import { CLEANUP_INDEX, FOUNDATION_DB_WRITE_BATCH_SIZE, getFoundationDBIndexName } from "./foundationdb-helpers.js"; -import { getIndexableStringMonad, getStartIndexStringFromLowerBound, getStartIndexStringFromUpperBound } from "../../custom-index.js"; -import { appendToArray, batchArray, ensureNotFalsy, now, PROMISE_RESOLVE_VOID, toArray } from "../../plugins/utils/index.js"; -import { queryFoundationDB } from "./foundationdb-query.js"; -import { INDEX_MAX } from "../../query-planner.js"; -import { attachmentMapKey } from "../storage-memory/index.js"; -export var RxStorageInstanceFoundationDB = /*#__PURE__*/function () { - function RxStorageInstanceFoundationDB(storage, databaseName, collectionName, schema, internals, options, settings) { - this.changes$ = new Subject(); - this.storage = storage; - this.databaseName = databaseName; - this.collectionName = collectionName; - this.schema = schema; - this.internals = internals; - this.options = options; - this.settings = settings; - this.primaryPath = getPrimaryFieldOfPrimaryKey(this.schema.primaryKey); - } - var _proto = RxStorageInstanceFoundationDB.prototype; - _proto.bulkWrite = async function bulkWrite(documentWrites, context) { - var dbs = await this.internals.dbsPromise; - var ret = { - success: [], - error: [] - }; - - /** - * Doing too many write in a single transaction - * will throw with a 'Transaction exceeds byte limit' - * so we have to batch up the writes. - */ - var writeBatches = batchArray(documentWrites, FOUNDATION_DB_WRITE_BATCH_SIZE); - await Promise.all(writeBatches.map(async writeBatch => { - var categorized = null; - await dbs.root.doTransaction(async tx => { - var ids = writeBatch.map(row => row.document[this.primaryPath]); - var mainTx = tx.at(dbs.main.subspace); - var attachmentTx = tx.at(dbs.attachments.subspace); - var docsInDB = new Map(); - /** - * TODO this might be faster if fdb - * any time adds a bulk-fetch-by-key method. - */ - await Promise.all(ids.map(async id => { - var doc = await mainTx.get(id); - docsInDB.set(id, doc); - })); - categorized = categorizeBulkWriteRows(this, this.primaryPath, docsInDB, writeBatch, context); - appendToArray(ret.error, categorized.errors); - - // INSERTS - categorized.bulkInsertDocs.forEach(writeRow => { - var docId = writeRow.document[this.primaryPath]; - ret.success.push(writeRow.document); - - // insert document data - mainTx.set(docId, writeRow.document); - - // insert secondary indexes - Object.values(dbs.indexes).forEach(indexMeta => { - var indexString = indexMeta.getIndexableString(writeRow.document); - var indexTx = tx.at(indexMeta.db.subspace); - indexTx.set(indexString, docId); - }); - }); - // UPDATES - categorized.bulkUpdateDocs.forEach(writeRow => { - var docId = writeRow.document[this.primaryPath]; - - // overwrite document data - mainTx.set(docId, writeRow.document); - - // update secondary indexes - Object.values(dbs.indexes).forEach(indexMeta => { - var oldIndexString = indexMeta.getIndexableString(ensureNotFalsy(writeRow.previous)); - var newIndexString = indexMeta.getIndexableString(writeRow.document); - if (oldIndexString !== newIndexString) { - var indexTx = tx.at(indexMeta.db.subspace); - indexTx.delete(oldIndexString); - indexTx.set(newIndexString, docId); - } - }); - ret.success.push(writeRow.document); - }); - - // attachments - categorized.attachmentsAdd.forEach(attachment => { - attachmentTx.set(attachmentMapKey(attachment.documentId, attachment.attachmentId), attachment.attachmentData); - }); - categorized.attachmentsUpdate.forEach(attachment => { - attachmentTx.set(attachmentMapKey(attachment.documentId, attachment.attachmentId), attachment.attachmentData); - }); - categorized.attachmentsRemove.forEach(attachment => { - attachmentTx.delete(attachmentMapKey(attachment.documentId, attachment.attachmentId)); - }); - }); - categorized = ensureNotFalsy(categorized); - /** - * The events must be emitted AFTER the transaction - * has finished. - * Otherwise an observable changestream might cause a read - * to a document that does not already exist outside of the transaction. - */ - if (categorized.eventBulk.events.length > 0) { - var lastState = ensureNotFalsy(categorized.newestRow).document; - categorized.eventBulk.checkpoint = { - id: lastState[this.primaryPath], - lwt: lastState._meta.lwt - }; - categorized.eventBulk.endTime = now(); - this.changes$.next(categorized.eventBulk); - } - })); - return ret; - }; - _proto.findDocumentsById = async function findDocumentsById(ids, withDeleted) { - var dbs = await this.internals.dbsPromise; - return dbs.main.doTransaction(async tx => { - var ret = []; - await Promise.all(ids.map(async docId => { - var docInDb = await tx.get(docId); - if (docInDb && (!docInDb._deleted || withDeleted)) { - ret.push(docInDb); - } - })); - return ret; - }); - }; - _proto.query = function query(preparedQuery) { - return queryFoundationDB(this, preparedQuery); - }; - _proto.count = async function count(preparedQuery) { - /** - * At this point in time (end 2022), FoundationDB does not support - * range counts. So we have to run a normal query and use the result set length. - * @link https://github.com/apple/foundationdb/issues/5981 - */ - var result = await this.query(preparedQuery); - return { - count: result.documents.length, - mode: 'fast' - }; - }; - _proto.getAttachmentData = async function getAttachmentData(documentId, attachmentId, _digest) { - var dbs = await this.internals.dbsPromise; - var attachment = await dbs.attachments.get(attachmentMapKey(documentId, attachmentId)); - return attachment.data; - }; - _proto.changeStream = function changeStream() { - return this.changes$.asObservable(); - }; - _proto.remove = async function remove() { - var dbs = await this.internals.dbsPromise; - await dbs.root.doTransaction(tx => { - tx.clearRange('', INDEX_MAX); - return PROMISE_RESOLVE_VOID; - }); - return this.close(); - }; - _proto.cleanup = async function cleanup(minimumDeletedTime) { - var { - keySelector, - StreamingMode - } = require('foundationdb'); - var maxDeletionTime = now() - minimumDeletedTime; - var dbs = await this.internals.dbsPromise; - var index = CLEANUP_INDEX; - var indexName = getFoundationDBIndexName(index); - var indexMeta = dbs.indexes[indexName]; - var lowerBoundString = getStartIndexStringFromLowerBound(this.schema, index, [true, - /** - * Do not use 0 here, - * because 1 is the minimum value for _meta.lwt - */ - 1]); - var upperBoundString = getStartIndexStringFromUpperBound(this.schema, index, [true, maxDeletionTime]); - var noMoreUndeleted = true; - await dbs.root.doTransaction(async tx => { - var batchSize = ensureNotFalsy(this.settings.batchSize); - var indexTx = tx.at(indexMeta.db.subspace); - var mainTx = tx.at(dbs.main.subspace); - var range = await indexTx.getRangeAll(keySelector.firstGreaterThan(lowerBoundString), upperBoundString, { - limit: batchSize + 1, - // get one more extra to detect what to return from cleanup() - streamingMode: StreamingMode.Exact - }); - if (range.length > batchSize) { - noMoreUndeleted = false; - range.pop(); - } - var docIds = range.map(row => row[1]); - var docsData = await Promise.all(docIds.map(docId => mainTx.get(docId))); - Object.values(dbs.indexes).forEach(indexMetaInner => { - var subIndexDB = tx.at(indexMetaInner.db.subspace); - docsData.forEach(docData => { - var indexString = indexMetaInner.getIndexableString(docData); - subIndexDB.delete(indexString); - }); - }); - docIds.forEach(id => mainTx.delete(id)); - }); - return noMoreUndeleted; - }; - _proto.conflictResultionTasks = function conflictResultionTasks() { - return new Subject().asObservable(); - }; - _proto.resolveConflictResultionTask = function resolveConflictResultionTask(_taskSolution) { - return PROMISE_RESOLVE_VOID; - }; - _proto.close = async function close() { - if (this.closed) { - return this.closed; - } - this.closed = (async () => { - this.changes$.complete(); - var dbs = await this.internals.dbsPromise; - await dbs.root.close(); - - // TODO shouldn't we close the index databases? - // Object.values(dbs.indexes).forEach(db => db.close()); - })(); - return this.closed; - }; - return RxStorageInstanceFoundationDB; -}(); -export function createFoundationDBStorageInstance(storage, params, settings) { - var primaryPath = getPrimaryFieldOfPrimaryKey(params.schema.primaryKey); - var { - open, - directory, - encoders - } = require('foundationdb'); - var connection = open(settings.clusterFile); - var dbsPromise = (async () => { - var dir = await directory.createOrOpen(connection, 'rxdb'); - var root = connection.at(dir).at(params.databaseName + '.').at(params.collectionName + '.').at(params.schema.version + '.'); - var main = root.at('main.').withKeyEncoding(encoders.string) // automatically encode & decode keys using tuples - .withValueEncoding(encoders.json); // and values using JSON - - var events = root.at('events.').withKeyEncoding(encoders.string).withValueEncoding(encoders.json); - var attachments = root.at('attachments.').withKeyEncoding(encoders.string).withValueEncoding(encoders.json); - var indexDBs = {}; - var useIndexes = params.schema.indexes ? params.schema.indexes.slice(0) : []; - useIndexes.push([primaryPath]); - var useIndexesFinal = useIndexes.map(index => { - var indexAr = toArray(index); - return indexAr; - }); - // used for `getChangedDocumentsSince()` - useIndexesFinal.push(['_meta.lwt', primaryPath]); - useIndexesFinal.push(CLEANUP_INDEX); - useIndexesFinal.forEach(indexAr => { - var indexName = getFoundationDBIndexName(indexAr); - var indexDB = root.at(indexName + '.').withKeyEncoding(encoders.string).withValueEncoding(encoders.string); - indexDBs[indexName] = { - indexName, - db: indexDB, - getIndexableString: getIndexableStringMonad(params.schema, indexAr), - index: indexAr - }; - }); - return { - root, - main, - events, - attachments, - indexes: indexDBs - }; - })(); - var internals = { - connection, - dbsPromise: dbsPromise - }; - var instance = new RxStorageInstanceFoundationDB(storage, params.databaseName, params.collectionName, params.schema, internals, params.options, settings); - return Promise.resolve(instance); -} -//# sourceMappingURL=rx-storage-instance-foundationdb.js.map \ No newline at end of file diff --git a/dist/esm/plugins/storage-foundationdb/rx-storage-instance-foundationdb.js.map b/dist/esm/plugins/storage-foundationdb/rx-storage-instance-foundationdb.js.map deleted file mode 100644 index 7d823defaf8..00000000000 --- a/dist/esm/plugins/storage-foundationdb/rx-storage-instance-foundationdb.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-storage-instance-foundationdb.js","names":["Subject","getPrimaryFieldOfPrimaryKey","categorizeBulkWriteRows","CLEANUP_INDEX","FOUNDATION_DB_WRITE_BATCH_SIZE","getFoundationDBIndexName","getIndexableStringMonad","getStartIndexStringFromLowerBound","getStartIndexStringFromUpperBound","appendToArray","batchArray","ensureNotFalsy","now","PROMISE_RESOLVE_VOID","toArray","queryFoundationDB","INDEX_MAX","attachmentMapKey","RxStorageInstanceFoundationDB","storage","databaseName","collectionName","schema","internals","options","settings","changes$","primaryPath","primaryKey","_proto","prototype","bulkWrite","documentWrites","context","dbs","dbsPromise","ret","success","error","writeBatches","Promise","all","map","writeBatch","categorized","root","doTransaction","tx","ids","row","document","mainTx","at","main","subspace","attachmentTx","attachments","docsInDB","Map","id","doc","get","set","errors","bulkInsertDocs","forEach","writeRow","docId","push","Object","values","indexes","indexMeta","indexString","getIndexableString","indexTx","db","bulkUpdateDocs","oldIndexString","previous","newIndexString","delete","attachmentsAdd","attachment","documentId","attachmentId","attachmentData","attachmentsUpdate","attachmentsRemove","eventBulk","events","length","lastState","newestRow","checkpoint","lwt","_meta","endTime","next","findDocumentsById","withDeleted","docInDb","_deleted","query","preparedQuery","count","result","documents","mode","getAttachmentData","_digest","data","changeStream","asObservable","remove","clearRange","close","cleanup","minimumDeletedTime","keySelector","StreamingMode","require","maxDeletionTime","index","indexName","lowerBoundString","upperBoundString","noMoreUndeleted","batchSize","range","getRangeAll","firstGreaterThan","limit","streamingMode","Exact","pop","docIds","docsData","indexMetaInner","subIndexDB","docData","conflictResultionTasks","resolveConflictResultionTask","_taskSolution","closed","complete","createFoundationDBStorageInstance","params","open","directory","encoders","connection","clusterFile","dir","createOrOpen","version","withKeyEncoding","string","withValueEncoding","json","indexDBs","useIndexes","slice","useIndexesFinal","indexAr","indexDB","instance","resolve"],"sources":["../../../../src/plugins/storage-foundationdb/rx-storage-instance-foundationdb.ts"],"sourcesContent":["import { Observable, Subject } from 'rxjs';\nimport { getPrimaryFieldOfPrimaryKey } from '../../rx-schema-helper.ts';\nimport type {\n BulkWriteRow,\n CategorizeBulkWriteRowsOutput,\n EventBulk,\n PreparedQuery,\n RxAttachmentWriteData,\n RxConflictResultionTask,\n RxConflictResultionTaskSolution,\n RxDocumentData,\n RxJsonSchema,\n RxStorageBulkWriteResponse,\n RxStorageChangeEvent,\n RxStorageCountResult,\n RxStorageDefaultCheckpoint,\n RxStorageInstance,\n RxStorageInstanceCreationParams,\n RxStorageQueryResult,\n StringKeys\n} from '../../types/index.d.ts';\nimport type {\n FoundationDBDatabase,\n FoundationDBIndexMeta,\n FoundationDBStorageInternals,\n RxStorageFoundationDB,\n RxStorageFoundationDBInstanceCreationOptions,\n RxStorageFoundationDBSettings\n} from './foundationdb-types.ts';\n// import {\n// open as foundationDBOpen,\n// directory as foundationDBDirectory,\n// encoders as foundationDBEncoders,\n// keySelector as foundationDBKeySelector,\n// StreamingMode as foundationDBStreamingMode\n// } from 'foundationdb';\nimport {\n categorizeBulkWriteRows\n} from '../../rx-storage-helper.ts';\nimport {\n\n CLEANUP_INDEX,\n FOUNDATION_DB_WRITE_BATCH_SIZE,\n getFoundationDBIndexName\n} from './foundationdb-helpers.ts';\nimport {\n getIndexableStringMonad,\n getStartIndexStringFromLowerBound,\n getStartIndexStringFromUpperBound\n} from '../../custom-index.ts';\nimport {\n appendToArray,\n batchArray,\n ensureNotFalsy,\n lastOfArray,\n now,\n PROMISE_RESOLVE_VOID,\n toArray\n} from '../../plugins/utils/index.ts';\nimport { queryFoundationDB } from './foundationdb-query.ts';\nimport { INDEX_MAX } from '../../query-planner.ts';\nimport { attachmentMapKey } from '../storage-memory/index.ts';\n\nexport class RxStorageInstanceFoundationDB implements RxStorageInstance<\n RxDocType,\n FoundationDBStorageInternals,\n RxStorageFoundationDBInstanceCreationOptions,\n RxStorageDefaultCheckpoint\n> {\n public readonly primaryPath: StringKeys>;\n\n public closed?: Promise;\n private changes$: Subject>, RxStorageDefaultCheckpoint>> = new Subject();\n\n constructor(\n public readonly storage: RxStorageFoundationDB,\n public readonly databaseName: string,\n public readonly collectionName: string,\n public readonly schema: Readonly>>,\n public readonly internals: FoundationDBStorageInternals,\n public readonly options: Readonly,\n public readonly settings: RxStorageFoundationDBSettings\n ) {\n this.primaryPath = getPrimaryFieldOfPrimaryKey(this.schema.primaryKey);\n }\n\n async bulkWrite(\n documentWrites: BulkWriteRow[],\n context: string\n ): Promise> {\n const dbs = await this.internals.dbsPromise;\n const ret: RxStorageBulkWriteResponse = {\n success: [],\n error: []\n };\n\n /**\n * Doing too many write in a single transaction\n * will throw with a 'Transaction exceeds byte limit'\n * so we have to batch up the writes.\n */\n const writeBatches = batchArray(documentWrites, FOUNDATION_DB_WRITE_BATCH_SIZE);\n await Promise.all(\n writeBatches.map(async (writeBatch) => {\n let categorized: CategorizeBulkWriteRowsOutput | undefined = null as any;\n await dbs.root.doTransaction(async (tx: any) => {\n const ids = writeBatch.map(row => (row.document as any)[this.primaryPath]);\n const mainTx = tx.at(dbs.main.subspace);\n const attachmentTx = tx.at(dbs.attachments.subspace);\n const docsInDB = new Map>();\n /**\n * TODO this might be faster if fdb\n * any time adds a bulk-fetch-by-key method.\n */\n await Promise.all(\n ids.map(async (id) => {\n const doc = await mainTx.get(id);\n docsInDB.set(id, doc);\n })\n );\n categorized = categorizeBulkWriteRows(\n this,\n this.primaryPath as any,\n docsInDB,\n writeBatch,\n context\n );\n appendToArray(ret.error, categorized.errors);\n\n // INSERTS\n categorized.bulkInsertDocs.forEach(writeRow => {\n const docId: string = writeRow.document[this.primaryPath] as any;\n ret.success.push(writeRow.document);\n\n // insert document data\n mainTx.set(docId, writeRow.document);\n\n // insert secondary indexes\n Object.values(dbs.indexes).forEach(indexMeta => {\n const indexString = indexMeta.getIndexableString(writeRow.document as any);\n const indexTx = tx.at(indexMeta.db.subspace);\n indexTx.set(indexString, docId);\n });\n });\n // UPDATES\n categorized.bulkUpdateDocs.forEach((writeRow: BulkWriteRow) => {\n const docId: string = writeRow.document[this.primaryPath] as any;\n\n // overwrite document data\n mainTx.set(docId, writeRow.document);\n\n // update secondary indexes\n Object.values(dbs.indexes).forEach(indexMeta => {\n const oldIndexString = indexMeta.getIndexableString(ensureNotFalsy(writeRow.previous));\n const newIndexString = indexMeta.getIndexableString(writeRow.document as any);\n if (oldIndexString !== newIndexString) {\n const indexTx = tx.at(indexMeta.db.subspace);\n indexTx.delete(oldIndexString);\n indexTx.set(newIndexString, docId);\n }\n });\n ret.success.push(writeRow.document as any);\n });\n\n // attachments\n categorized.attachmentsAdd.forEach(attachment => {\n attachmentTx.set(\n attachmentMapKey(attachment.documentId, attachment.attachmentId),\n attachment.attachmentData\n );\n });\n categorized.attachmentsUpdate.forEach(attachment => {\n attachmentTx.set(\n attachmentMapKey(attachment.documentId, attachment.attachmentId),\n attachment.attachmentData\n );\n });\n categorized.attachmentsRemove.forEach(attachment => {\n attachmentTx.delete(\n attachmentMapKey(attachment.documentId, attachment.attachmentId)\n );\n });\n });\n categorized = ensureNotFalsy(categorized);\n /**\n * The events must be emitted AFTER the transaction\n * has finished.\n * Otherwise an observable changestream might cause a read\n * to a document that does not already exist outside of the transaction.\n */\n if (categorized.eventBulk.events.length > 0) {\n const lastState = ensureNotFalsy(categorized.newestRow).document;\n categorized.eventBulk.checkpoint = {\n id: lastState[this.primaryPath],\n lwt: lastState._meta.lwt\n };\n categorized.eventBulk.endTime = now();\n this.changes$.next(categorized.eventBulk);\n }\n })\n );\n\n\n return ret;\n }\n\n async findDocumentsById(ids: string[], withDeleted: boolean): Promise[]> {\n const dbs = await this.internals.dbsPromise;\n return dbs.main.doTransaction(async (tx: any) => {\n const ret: RxDocumentData[] = [];\n await Promise.all(\n ids.map(async (docId) => {\n const docInDb = await tx.get(docId);\n if (\n docInDb &&\n (\n !docInDb._deleted ||\n withDeleted\n )\n ) {\n ret.push(docInDb);\n }\n })\n );\n return ret;\n });\n }\n query(preparedQuery: PreparedQuery): Promise> {\n return queryFoundationDB(this, preparedQuery);\n }\n async count(\n preparedQuery: PreparedQuery\n ): Promise {\n /**\n * At this point in time (end 2022), FoundationDB does not support\n * range counts. So we have to run a normal query and use the result set length.\n * @link https://github.com/apple/foundationdb/issues/5981\n */\n const result = await this.query(preparedQuery);\n return {\n count: result.documents.length,\n mode: 'fast'\n };\n }\n\n async getAttachmentData(documentId: string, attachmentId: string, _digest: string): Promise {\n const dbs = await this.internals.dbsPromise;\n const attachment = await dbs.attachments.get(attachmentMapKey(documentId, attachmentId));\n return attachment.data;\n }\n changeStream(): Observable, RxStorageDefaultCheckpoint>> {\n return this.changes$.asObservable();\n }\n\n async remove(): Promise {\n const dbs = await this.internals.dbsPromise;\n await dbs.root.doTransaction((tx: any) => {\n tx.clearRange('', INDEX_MAX);\n return PROMISE_RESOLVE_VOID;\n });\n return this.close();\n }\n async cleanup(minimumDeletedTime: number): Promise {\n const {\n keySelector,\n StreamingMode\n } = require('foundationdb');\n const maxDeletionTime = now() - minimumDeletedTime;\n const dbs = await this.internals.dbsPromise;\n const index = CLEANUP_INDEX;\n const indexName = getFoundationDBIndexName(index);\n const indexMeta = dbs.indexes[indexName];\n const lowerBoundString = getStartIndexStringFromLowerBound(\n this.schema,\n index,\n [\n true,\n /**\n * Do not use 0 here,\n * because 1 is the minimum value for _meta.lwt\n */\n 1\n ]\n );\n const upperBoundString = getStartIndexStringFromUpperBound(\n this.schema,\n index,\n [\n true,\n maxDeletionTime\n ]\n );\n let noMoreUndeleted: boolean = true;\n await dbs.root.doTransaction(async (tx: any) => {\n const batchSize = ensureNotFalsy(this.settings.batchSize);\n const indexTx = tx.at(indexMeta.db.subspace);\n const mainTx = tx.at(dbs.main.subspace);\n const range = await indexTx.getRangeAll(\n keySelector.firstGreaterThan(lowerBoundString),\n upperBoundString,\n {\n limit: batchSize + 1, // get one more extra to detect what to return from cleanup()\n streamingMode: StreamingMode.Exact\n }\n );\n if (range.length > batchSize) {\n noMoreUndeleted = false;\n range.pop();\n }\n const docIds = range.map((row: string[]) => row[1]);\n const docsData: RxDocumentData[] = await Promise.all(docIds.map((docId: string) => mainTx.get(docId)));\n\n Object\n .values(dbs.indexes)\n .forEach(indexMetaInner => {\n const subIndexDB = tx.at(indexMetaInner.db.subspace);\n docsData.forEach(docData => {\n const indexString = indexMetaInner.getIndexableString(docData);\n subIndexDB.delete(indexString);\n });\n });\n docIds.forEach((id: string) => mainTx.delete(id));\n });\n\n return noMoreUndeleted;\n }\n\n conflictResultionTasks(): Observable> {\n return new Subject().asObservable();\n }\n resolveConflictResultionTask(_taskSolution: RxConflictResultionTaskSolution): Promise {\n return PROMISE_RESOLVE_VOID;\n }\n\n async close() {\n if (this.closed) {\n return this.closed;\n }\n this.closed = (async () => {\n this.changes$.complete();\n const dbs = await this.internals.dbsPromise;\n await dbs.root.close();\n\n // TODO shouldn't we close the index databases?\n // Object.values(dbs.indexes).forEach(db => db.close());\n })();\n return this.closed;\n }\n}\n\n\nexport function createFoundationDBStorageInstance(\n storage: RxStorageFoundationDB,\n params: RxStorageInstanceCreationParams,\n settings: RxStorageFoundationDBSettings\n): Promise> {\n const primaryPath = getPrimaryFieldOfPrimaryKey(params.schema.primaryKey);\n\n const {\n open,\n directory,\n encoders\n } = require('foundationdb');\n\n const connection = open(settings.clusterFile);\n const dbsPromise = (async () => {\n const dir = await directory.createOrOpen(connection, 'rxdb');\n\n const root = connection\n .at(dir)\n .at(params.databaseName + '.')\n .at(params.collectionName + '.')\n .at(params.schema.version + '.');\n const main: FoundationDBDatabase = root\n .at('main.')\n .withKeyEncoding(encoders.string) // automatically encode & decode keys using tuples\n .withValueEncoding(encoders.json) as any; // and values using JSON\n\n\n const events: FoundationDBDatabase>, RxStorageDefaultCheckpoint>> = root\n .at('events.')\n .withKeyEncoding(encoders.string)\n .withValueEncoding(encoders.json) as any;\n\n const attachments: FoundationDBDatabase = root\n .at('attachments.')\n .withKeyEncoding(encoders.string)\n .withValueEncoding(encoders.json) as any;\n\n\n const indexDBs: { [indexName: string]: FoundationDBIndexMeta; } = {};\n const useIndexes = params.schema.indexes ? params.schema.indexes.slice(0) : [];\n useIndexes.push([primaryPath]);\n const useIndexesFinal = useIndexes.map(index => {\n const indexAr = toArray(index);\n return indexAr;\n });\n // used for `getChangedDocumentsSince()`\n useIndexesFinal.push([\n '_meta.lwt',\n primaryPath\n ]);\n useIndexesFinal.push(CLEANUP_INDEX);\n useIndexesFinal.forEach(indexAr => {\n const indexName = getFoundationDBIndexName(indexAr);\n const indexDB = root.at(indexName + '.')\n .withKeyEncoding(encoders.string)\n .withValueEncoding(encoders.string);\n indexDBs[indexName] = {\n indexName,\n db: indexDB,\n getIndexableString: getIndexableStringMonad(params.schema, indexAr),\n index: indexAr\n };\n });\n\n return {\n root,\n main,\n events,\n attachments,\n indexes: indexDBs\n };\n })();\n\n\n const internals: FoundationDBStorageInternals = {\n connection,\n dbsPromise: dbsPromise\n };\n\n const instance = new RxStorageInstanceFoundationDB(\n storage,\n params.databaseName,\n params.collectionName,\n params.schema,\n internals,\n params.options,\n settings\n );\n return Promise.resolve(instance);\n}\n"],"mappings":"AAAA,SAAqBA,OAAO,QAAQ,MAAM;AAC1C,SAASC,2BAA2B,QAAQ,2BAA2B;AA4BvE;AACA;AACA;AACA;AACA;AACA;AACA;AACA,SACIC,uBAAuB,QACpB,4BAA4B;AACnC,SAEIC,aAAa,EACbC,8BAA8B,EAC9BC,wBAAwB,QACrB,2BAA2B;AAClC,SACIC,uBAAuB,EACvBC,iCAAiC,EACjCC,iCAAiC,QAC9B,uBAAuB;AAC9B,SACIC,aAAa,EACbC,UAAU,EACVC,cAAc,EAEdC,GAAG,EACHC,oBAAoB,EACpBC,OAAO,QACJ,8BAA8B;AACrC,SAASC,iBAAiB,QAAQ,yBAAyB;AAC3D,SAASC,SAAS,QAAQ,wBAAwB;AAClD,SAASC,gBAAgB,QAAQ,4BAA4B;AAE7D,WAAaC,6BAA6B;EAWtC,SAAAA,8BACoBC,OAA8B,EAC9BC,YAAoB,EACpBC,cAAsB,EACtBC,MAAyD,EACzDC,SAAkD,EAClDC,OAA+D,EAC/DC,QAAuC,EACzD;IAAA,KAVMC,QAAQ,GAAoG,IAAI1B,OAAO,CAAC,CAAC;IAAA,KAG7GmB,OAA8B,GAA9BA,OAA8B;IAAA,KAC9BC,YAAoB,GAApBA,YAAoB;IAAA,KACpBC,cAAsB,GAAtBA,cAAsB;IAAA,KACtBC,MAAyD,GAAzDA,MAAyD;IAAA,KACzDC,SAAkD,GAAlDA,SAAkD;IAAA,KAClDC,OAA+D,GAA/DA,OAA+D;IAAA,KAC/DC,QAAuC,GAAvCA,QAAuC;IAEvD,IAAI,CAACE,WAAW,GAAG1B,2BAA2B,CAAC,IAAI,CAACqB,MAAM,CAACM,UAAU,CAAC;EAC1E;EAAC,IAAAC,MAAA,GAAAX,6BAAA,CAAAY,SAAA;EAAAD,MAAA,CAEKE,SAAS,GAAf,eAAAA,UACIC,cAAyC,EACzCC,OAAe,EAC+B;IAC9C,IAAMC,GAAG,GAAG,MAAM,IAAI,CAACX,SAAS,CAACY,UAAU;IAC3C,IAAMC,GAA0C,GAAG;MAC/CC,OAAO,EAAE,EAAE;MACXC,KAAK,EAAE;IACX,CAAC;;IAED;AACR;AACA;AACA;AACA;IACQ,IAAMC,YAAY,GAAG7B,UAAU,CAACsB,cAAc,EAAE5B,8BAA8B,CAAC;IAC/E,MAAMoC,OAAO,CAACC,GAAG,CACbF,YAAY,CAACG,GAAG,CAAC,MAAOC,UAAU,IAAK;MACnC,IAAIC,WAAiE,GAAG,IAAW;MACnF,MAAMV,GAAG,CAACW,IAAI,CAACC,aAAa,CAAC,MAAOC,EAAO,IAAK;QAC5C,IAAMC,GAAG,GAAGL,UAAU,CAACD,GAAG,CAACO,GAAG,IAAKA,GAAG,CAACC,QAAQ,CAAS,IAAI,CAACvB,WAAW,CAAC,CAAC;QAC1E,IAAMwB,MAAM,GAAGJ,EAAE,CAACK,EAAE,CAAClB,GAAG,CAACmB,IAAI,CAACC,QAAQ,CAAC;QACvC,IAAMC,YAAY,GAAGR,EAAE,CAACK,EAAE,CAAClB,GAAG,CAACsB,WAAW,CAACF,QAAQ,CAAC;QACpD,IAAMG,QAAQ,GAAG,IAAIC,GAAG,CAAoC,CAAC;QAC7D;AACpB;AACA;AACA;QACoB,MAAMlB,OAAO,CAACC,GAAG,CACbO,GAAG,CAACN,GAAG,CAAC,MAAOiB,EAAE,IAAK;UAClB,IAAMC,GAAG,GAAG,MAAMT,MAAM,CAACU,GAAG,CAACF,EAAE,CAAC;UAChCF,QAAQ,CAACK,GAAG,CAACH,EAAE,EAAEC,GAAG,CAAC;QACzB,CAAC,CACL,CAAC;QACDhB,WAAW,GAAG1C,uBAAuB,CACjC,IAAI,EACJ,IAAI,CAACyB,WAAW,EAChB8B,QAAQ,EACRd,UAAU,EACVV,OACJ,CAAC;QACDxB,aAAa,CAAC2B,GAAG,CAACE,KAAK,EAAEM,WAAW,CAACmB,MAAM,CAAC;;QAE5C;QACAnB,WAAW,CAACoB,cAAc,CAACC,OAAO,CAACC,QAAQ,IAAI;UAC3C,IAAMC,KAAa,GAAGD,QAAQ,CAAChB,QAAQ,CAAC,IAAI,CAACvB,WAAW,CAAQ;UAChES,GAAG,CAACC,OAAO,CAAC+B,IAAI,CAACF,QAAQ,CAAChB,QAAQ,CAAC;;UAEnC;UACAC,MAAM,CAACW,GAAG,CAACK,KAAK,EAAED,QAAQ,CAAChB,QAAQ,CAAC;;UAEpC;UACAmB,MAAM,CAACC,MAAM,CAACpC,GAAG,CAACqC,OAAO,CAAC,CAACN,OAAO,CAACO,SAAS,IAAI;YAC5C,IAAMC,WAAW,GAAGD,SAAS,CAACE,kBAAkB,CAACR,QAAQ,CAAChB,QAAe,CAAC;YAC1E,IAAMyB,OAAO,GAAG5B,EAAE,CAACK,EAAE,CAACoB,SAAS,CAACI,EAAE,CAACtB,QAAQ,CAAC;YAC5CqB,OAAO,CAACb,GAAG,CAACW,WAAW,EAAEN,KAAK,CAAC;UACnC,CAAC,CAAC;QACN,CAAC,CAAC;QACF;QACAvB,WAAW,CAACiC,cAAc,CAACZ,OAAO,CAAEC,QAAiC,IAAK;UACtE,IAAMC,KAAa,GAAGD,QAAQ,CAAChB,QAAQ,CAAC,IAAI,CAACvB,WAAW,CAAQ;;UAEhE;UACAwB,MAAM,CAACW,GAAG,CAACK,KAAK,EAAED,QAAQ,CAAChB,QAAQ,CAAC;;UAEpC;UACAmB,MAAM,CAACC,MAAM,CAACpC,GAAG,CAACqC,OAAO,CAAC,CAACN,OAAO,CAACO,SAAS,IAAI;YAC5C,IAAMM,cAAc,GAAGN,SAAS,CAACE,kBAAkB,CAAC/D,cAAc,CAACuD,QAAQ,CAACa,QAAQ,CAAC,CAAC;YACtF,IAAMC,cAAc,GAAGR,SAAS,CAACE,kBAAkB,CAACR,QAAQ,CAAChB,QAAe,CAAC;YAC7E,IAAI4B,cAAc,KAAKE,cAAc,EAAE;cACnC,IAAML,OAAO,GAAG5B,EAAE,CAACK,EAAE,CAACoB,SAAS,CAACI,EAAE,CAACtB,QAAQ,CAAC;cAC5CqB,OAAO,CAACM,MAAM,CAACH,cAAc,CAAC;cAC9BH,OAAO,CAACb,GAAG,CAACkB,cAAc,EAAEb,KAAK,CAAC;YACtC;UACJ,CAAC,CAAC;UACF/B,GAAG,CAACC,OAAO,CAAC+B,IAAI,CAACF,QAAQ,CAAChB,QAAe,CAAC;QAC9C,CAAC,CAAC;;QAEF;QACAN,WAAW,CAACsC,cAAc,CAACjB,OAAO,CAACkB,UAAU,IAAI;UAC7C5B,YAAY,CAACO,GAAG,CACZ7C,gBAAgB,CAACkE,UAAU,CAACC,UAAU,EAAED,UAAU,CAACE,YAAY,CAAC,EAChEF,UAAU,CAACG,cACf,CAAC;QACL,CAAC,CAAC;QACF1C,WAAW,CAAC2C,iBAAiB,CAACtB,OAAO,CAACkB,UAAU,IAAI;UAChD5B,YAAY,CAACO,GAAG,CACZ7C,gBAAgB,CAACkE,UAAU,CAACC,UAAU,EAAED,UAAU,CAACE,YAAY,CAAC,EAChEF,UAAU,CAACG,cACf,CAAC;QACL,CAAC,CAAC;QACF1C,WAAW,CAAC4C,iBAAiB,CAACvB,OAAO,CAACkB,UAAU,IAAI;UAChD5B,YAAY,CAAC0B,MAAM,CACfhE,gBAAgB,CAACkE,UAAU,CAACC,UAAU,EAAED,UAAU,CAACE,YAAY,CACnE,CAAC;QACL,CAAC,CAAC;MACN,CAAC,CAAC;MACFzC,WAAW,GAAGjC,cAAc,CAACiC,WAAW,CAAC;MACzC;AAChB;AACA;AACA;AACA;AACA;MACgB,IAAIA,WAAW,CAAC6C,SAAS,CAACC,MAAM,CAACC,MAAM,GAAG,CAAC,EAAE;QACzC,IAAMC,SAAS,GAAGjF,cAAc,CAACiC,WAAW,CAACiD,SAAS,CAAC,CAAC3C,QAAQ;QAChEN,WAAW,CAAC6C,SAAS,CAACK,UAAU,GAAG;UAC/BnC,EAAE,EAAEiC,SAAS,CAAC,IAAI,CAACjE,WAAW,CAAC;UAC/BoE,GAAG,EAAEH,SAAS,CAACI,KAAK,CAACD;QACzB,CAAC;QACDnD,WAAW,CAAC6C,SAAS,CAACQ,OAAO,GAAGrF,GAAG,CAAC,CAAC;QACrC,IAAI,CAACc,QAAQ,CAACwE,IAAI,CAACtD,WAAW,CAAC6C,SAAS,CAAC;MAC7C;IACJ,CAAC,CACL,CAAC;IAGD,OAAOrD,GAAG;EACd,CAAC;EAAAP,MAAA,CAEKsE,iBAAiB,GAAvB,eAAAA,kBAAwBnD,GAAa,EAAEoD,WAAoB,EAAwC;IAC/F,IAAMlE,GAAG,GAAG,MAAM,IAAI,CAACX,SAAS,CAACY,UAAU;IAC3C,OAAOD,GAAG,CAACmB,IAAI,CAACP,aAAa,CAAC,MAAOC,EAAO,IAAK;MAC7C,IAAMX,GAAgC,GAAG,EAAE;MAC3C,MAAMI,OAAO,CAACC,GAAG,CACbO,GAAG,CAACN,GAAG,CAAC,MAAOyB,KAAK,IAAK;QACrB,IAAMkC,OAAO,GAAG,MAAMtD,EAAE,CAACc,GAAG,CAACM,KAAK,CAAC;QACnC,IACIkC,OAAO,KAEH,CAACA,OAAO,CAACC,QAAQ,IACjBF,WAAW,CACd,EACH;UACEhE,GAAG,CAACgC,IAAI,CAACiC,OAAO,CAAC;QACrB;MACJ,CAAC,CACL,CAAC;MACD,OAAOjE,GAAG;IACd,CAAC,CAAC;EACN,CAAC;EAAAP,MAAA,CACD0E,KAAK,GAAL,SAAAA,MAAMC,aAAuC,EAA4C;IACrF,OAAOzF,iBAAiB,CAAC,IAAI,EAAEyF,aAAa,CAAC;EACjD,CAAC;EAAA3E,MAAA,CACK4E,KAAK,GAAX,eAAAA,MACID,aAAuC,EACV;IAC7B;AACR;AACA;AACA;AACA;IACQ,IAAME,MAAM,GAAG,MAAM,IAAI,CAACH,KAAK,CAACC,aAAa,CAAC;IAC9C,OAAO;MACHC,KAAK,EAAEC,MAAM,CAACC,SAAS,CAAChB,MAAM;MAC9BiB,IAAI,EAAE;IACV,CAAC;EACL,CAAC;EAAA/E,MAAA,CAEKgF,iBAAiB,GAAvB,eAAAA,kBAAwBzB,UAAkB,EAAEC,YAAoB,EAAEyB,OAAe,EAAmB;IAChG,IAAM5E,GAAG,GAAG,MAAM,IAAI,CAACX,SAAS,CAACY,UAAU;IAC3C,IAAMgD,UAAU,GAAG,MAAMjD,GAAG,CAACsB,WAAW,CAACK,GAAG,CAAC5C,gBAAgB,CAACmE,UAAU,EAAEC,YAAY,CAAC,CAAC;IACxF,OAAOF,UAAU,CAAC4B,IAAI;EAC1B,CAAC;EAAAlF,MAAA,CACDmF,YAAY,GAAZ,SAAAA,aAAA,EAAmG;IAC/F,OAAO,IAAI,CAACtF,QAAQ,CAACuF,YAAY,CAAC,CAAC;EACvC,CAAC;EAAApF,MAAA,CAEKqF,MAAM,GAAZ,eAAAA,OAAA,EAA8B;IAC1B,IAAMhF,GAAG,GAAG,MAAM,IAAI,CAACX,SAAS,CAACY,UAAU;IAC3C,MAAMD,GAAG,CAACW,IAAI,CAACC,aAAa,CAAEC,EAAO,IAAK;MACtCA,EAAE,CAACoE,UAAU,CAAC,EAAE,EAAEnG,SAAS,CAAC;MAC5B,OAAOH,oBAAoB;IAC/B,CAAC,CAAC;IACF,OAAO,IAAI,CAACuG,KAAK,CAAC,CAAC;EACvB,CAAC;EAAAvF,MAAA,CACKwF,OAAO,GAAb,eAAAA,QAAcC,kBAA0B,EAAoB;IACxD,IAAM;MACFC,WAAW;MACXC;IACJ,CAAC,GAAGC,OAAO,CAAC,cAAc,CAAC;IAC3B,IAAMC,eAAe,GAAG9G,GAAG,CAAC,CAAC,GAAG0G,kBAAkB;IAClD,IAAMpF,GAAG,GAAG,MAAM,IAAI,CAACX,SAAS,CAACY,UAAU;IAC3C,IAAMwF,KAAK,GAAGxH,aAAa;IAC3B,IAAMyH,SAAS,GAAGvH,wBAAwB,CAACsH,KAAK,CAAC;IACjD,IAAMnD,SAAS,GAAGtC,GAAG,CAACqC,OAAO,CAACqD,SAAS,CAAC;IACxC,IAAMC,gBAAgB,GAAGtH,iCAAiC,CACtD,IAAI,CAACe,MAAM,EACXqG,KAAK,EACL,CACI,IAAI;IACJ;AAChB;AACA;AACA;IACgB,CAAC,CAET,CAAC;IACD,IAAMG,gBAAgB,GAAGtH,iCAAiC,CACtD,IAAI,CAACc,MAAM,EACXqG,KAAK,EACL,CACI,IAAI,EACJD,eAAe,CAEvB,CAAC;IACD,IAAIK,eAAwB,GAAG,IAAI;IACnC,MAAM7F,GAAG,CAACW,IAAI,CAACC,aAAa,CAAC,MAAOC,EAAO,IAAK;MAC5C,IAAMiF,SAAS,GAAGrH,cAAc,CAAC,IAAI,CAACc,QAAQ,CAACuG,SAAS,CAAC;MACzD,IAAMrD,OAAO,GAAG5B,EAAE,CAACK,EAAE,CAACoB,SAAS,CAACI,EAAE,CAACtB,QAAQ,CAAC;MAC5C,IAAMH,MAAM,GAAGJ,EAAE,CAACK,EAAE,CAAClB,GAAG,CAACmB,IAAI,CAACC,QAAQ,CAAC;MACvC,IAAM2E,KAAK,GAAG,MAAMtD,OAAO,CAACuD,WAAW,CACnCX,WAAW,CAACY,gBAAgB,CAACN,gBAAgB,CAAC,EAC9CC,gBAAgB,EAChB;QACIM,KAAK,EAAEJ,SAAS,GAAG,CAAC;QAAE;QACtBK,aAAa,EAAEb,aAAa,CAACc;MACjC,CACJ,CAAC;MACD,IAAIL,KAAK,CAACtC,MAAM,GAAGqC,SAAS,EAAE;QAC1BD,eAAe,GAAG,KAAK;QACvBE,KAAK,CAACM,GAAG,CAAC,CAAC;MACf;MACA,IAAMC,MAAM,GAAGP,KAAK,CAACvF,GAAG,CAAEO,GAAa,IAAKA,GAAG,CAAC,CAAC,CAAC,CAAC;MACnD,IAAMwF,QAAqC,GAAG,MAAMjG,OAAO,CAACC,GAAG,CAAC+F,MAAM,CAAC9F,GAAG,CAAEyB,KAAa,IAAKhB,MAAM,CAACU,GAAG,CAACM,KAAK,CAAC,CAAC,CAAC;MAEjHE,MAAM,CACDC,MAAM,CAACpC,GAAG,CAACqC,OAAO,CAAC,CACnBN,OAAO,CAACyE,cAAc,IAAI;QACvB,IAAMC,UAAU,GAAG5F,EAAE,CAACK,EAAE,CAACsF,cAAc,CAAC9D,EAAE,CAACtB,QAAQ,CAAC;QACpDmF,QAAQ,CAACxE,OAAO,CAAC2E,OAAO,IAAI;UACxB,IAAMnE,WAAW,GAAGiE,cAAc,CAAChE,kBAAkB,CAACkE,OAAO,CAAC;UAC9DD,UAAU,CAAC1D,MAAM,CAACR,WAAW,CAAC;QAClC,CAAC,CAAC;MACN,CAAC,CAAC;MACN+D,MAAM,CAACvE,OAAO,CAAEN,EAAU,IAAKR,MAAM,CAAC8B,MAAM,CAACtB,EAAE,CAAC,CAAC;IACrD,CAAC,CAAC;IAEF,OAAOoE,eAAe;EAC1B,CAAC;EAAAlG,MAAA,CAEDgH,sBAAsB,GAAtB,SAAAA,uBAAA,EAAyE;IACrE,OAAO,IAAI7I,OAAO,CAAM,CAAC,CAACiH,YAAY,CAAC,CAAC;EAC5C,CAAC;EAAApF,MAAA,CACDiH,4BAA4B,GAA5B,SAAAA,6BAA6BC,aAAyD,EAAiB;IACnG,OAAOlI,oBAAoB;EAC/B,CAAC;EAAAgB,MAAA,CAEKuF,KAAK,GAAX,eAAAA,MAAA,EAAc;IACV,IAAI,IAAI,CAAC4B,MAAM,EAAE;MACb,OAAO,IAAI,CAACA,MAAM;IACtB;IACA,IAAI,CAACA,MAAM,GAAG,CAAC,YAAY;MACvB,IAAI,CAACtH,QAAQ,CAACuH,QAAQ,CAAC,CAAC;MACxB,IAAM/G,GAAG,GAAG,MAAM,IAAI,CAACX,SAAS,CAACY,UAAU;MAC3C,MAAMD,GAAG,CAACW,IAAI,CAACuE,KAAK,CAAC,CAAC;;MAEtB;MACA;IACJ,CAAC,EAAE,CAAC;IACJ,OAAO,IAAI,CAAC4B,MAAM;EACtB,CAAC;EAAA,OAAA9H,6BAAA;AAAA;AAIL,OAAO,SAASgI,iCAAiCA,CAC7C/H,OAA8B,EAC9BgI,MAAgG,EAChG1H,QAAuC,EACU;EACjD,IAAME,WAAW,GAAG1B,2BAA2B,CAACkJ,MAAM,CAAC7H,MAAM,CAACM,UAAU,CAAC;EAEzE,IAAM;IACFwH,IAAI;IACJC,SAAS;IACTC;EACJ,CAAC,GAAG7B,OAAO,CAAC,cAAc,CAAC;EAE3B,IAAM8B,UAAU,GAAGH,IAAI,CAAC3H,QAAQ,CAAC+H,WAAW,CAAC;EAC7C,IAAMrH,UAAU,GAAG,CAAC,YAAY;IAC5B,IAAMsH,GAAG,GAAG,MAAMJ,SAAS,CAACK,YAAY,CAACH,UAAU,EAAE,MAAM,CAAC;IAE5D,IAAM1G,IAAI,GAAG0G,UAAU,CAClBnG,EAAE,CAACqG,GAAG,CAAC,CACPrG,EAAE,CAAC+F,MAAM,CAAC/H,YAAY,GAAG,GAAG,CAAC,CAC7BgC,EAAE,CAAC+F,MAAM,CAAC9H,cAAc,GAAG,GAAG,CAAC,CAC/B+B,EAAE,CAAC+F,MAAM,CAAC7H,MAAM,CAACqI,OAAO,GAAG,GAAG,CAAC;IACpC,IAAMtG,IAAqC,GAAGR,IAAI,CAC7CO,EAAE,CAAC,OAAO,CAAC,CACXwG,eAAe,CAACN,QAAQ,CAACO,MAAM,CAAC,CAAC;IAAA,CACjCC,iBAAiB,CAACR,QAAQ,CAACS,IAAI,CAAQ,CAAC,CAAC;;IAG9C,IAAMrE,MAAoH,GAAG7C,IAAI,CAC5HO,EAAE,CAAC,SAAS,CAAC,CACbwG,eAAe,CAACN,QAAQ,CAACO,MAAM,CAAC,CAChCC,iBAAiB,CAACR,QAAQ,CAACS,IAAI,CAAQ;IAE5C,IAAMvG,WAAwD,GAAGX,IAAI,CAChEO,EAAE,CAAC,cAAc,CAAC,CAClBwG,eAAe,CAACN,QAAQ,CAACO,MAAM,CAAC,CAChCC,iBAAiB,CAACR,QAAQ,CAACS,IAAI,CAAQ;IAG5C,IAAMC,QAAoE,GAAG,CAAC,CAAC;IAC/E,IAAMC,UAAU,GAAGd,MAAM,CAAC7H,MAAM,CAACiD,OAAO,GAAG4E,MAAM,CAAC7H,MAAM,CAACiD,OAAO,CAAC2F,KAAK,CAAC,CAAC,CAAC,GAAG,EAAE;IAC9ED,UAAU,CAAC7F,IAAI,CAAC,CAACzC,WAAW,CAAC,CAAC;IAC9B,IAAMwI,eAAe,GAAGF,UAAU,CAACvH,GAAG,CAACiF,KAAK,IAAI;MAC5C,IAAMyC,OAAO,GAAGtJ,OAAO,CAAC6G,KAAK,CAAC;MAC9B,OAAOyC,OAAO;IAClB,CAAC,CAAC;IACF;IACAD,eAAe,CAAC/F,IAAI,CAAC,CACjB,WAAW,EACXzC,WAAW,CACd,CAAC;IACFwI,eAAe,CAAC/F,IAAI,CAACjE,aAAa,CAAC;IACnCgK,eAAe,CAAClG,OAAO,CAACmG,OAAO,IAAI;MAC/B,IAAMxC,SAAS,GAAGvH,wBAAwB,CAAC+J,OAAO,CAAC;MACnD,IAAMC,OAAO,GAAGxH,IAAI,CAACO,EAAE,CAACwE,SAAS,GAAG,GAAG,CAAC,CACnCgC,eAAe,CAACN,QAAQ,CAACO,MAAM,CAAC,CAChCC,iBAAiB,CAACR,QAAQ,CAACO,MAAM,CAAC;MACvCG,QAAQ,CAACpC,SAAS,CAAC,GAAG;QAClBA,SAAS;QACThD,EAAE,EAAEyF,OAAO;QACX3F,kBAAkB,EAAEpE,uBAAuB,CAAC6I,MAAM,CAAC7H,MAAM,EAAE8I,OAAO,CAAC;QACnEzC,KAAK,EAAEyC;MACX,CAAC;IACL,CAAC,CAAC;IAEF,OAAO;MACHvH,IAAI;MACJQ,IAAI;MACJqC,MAAM;MACNlC,WAAW;MACXe,OAAO,EAAEyF;IACb,CAAC;EACL,CAAC,EAAE,CAAC;EAGJ,IAAMzI,SAAkD,GAAG;IACvDgI,UAAU;IACVpH,UAAU,EAAEA;EAChB,CAAC;EAED,IAAMmI,QAAQ,GAAG,IAAIpJ,6BAA6B,CAC9CC,OAAO,EACPgI,MAAM,CAAC/H,YAAY,EACnB+H,MAAM,CAAC9H,cAAc,EACrB8H,MAAM,CAAC7H,MAAM,EACbC,SAAS,EACT4H,MAAM,CAAC3H,OAAO,EACdC,QACJ,CAAC;EACD,OAAOe,OAAO,CAAC+H,OAAO,CAACD,QAAQ,CAAC;AACpC","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/storage-lokijs/index.js b/dist/esm/plugins/storage-lokijs/index.js deleted file mode 100644 index afd70ef3a2a..00000000000 --- a/dist/esm/plugins/storage-lokijs/index.js +++ /dev/null @@ -1,4 +0,0 @@ -export * from "./rx-storage-lokijs.js"; -export * from "./lokijs-helper.js"; -export * from "./rx-storage-instance-loki.js"; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/esm/plugins/storage-lokijs/index.js.map b/dist/esm/plugins/storage-lokijs/index.js.map deleted file mode 100644 index 002949f133f..00000000000 --- a/dist/esm/plugins/storage-lokijs/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":[],"sources":["../../../../src/plugins/storage-lokijs/index.ts"],"sourcesContent":["export * from './rx-storage-lokijs.ts';\nexport * from './lokijs-helper.ts';\nexport * from './rx-storage-instance-loki.ts';\n"],"mappings":"AAAA,cAAc,wBAAwB;AACtC,cAAc,oBAAoB;AAClC,cAAc,+BAA+B","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/storage-lokijs/loki-save-queue.js b/dist/esm/plugins/storage-lokijs/loki-save-queue.js deleted file mode 100644 index 4e96462f452..00000000000 --- a/dist/esm/plugins/storage-lokijs/loki-save-queue.js +++ /dev/null @@ -1,84 +0,0 @@ -import { PROMISE_RESOLVE_VOID, requestIdlePromise } from "../utils/index.js"; - -/** - * The autosave feature of lokijs has strange behaviors - * and often runs a save in critical moments when other - * more important tasks are running. - * So instead we use a custom save queue that ensures we - * only run loki.saveDatabase() when nothing else is running. - */ -export var LokiSaveQueue = /*#__PURE__*/function () { - /** - * Ensures that we do not run multiple saves - * in parallel - */ - - // track amount of non-finished save calls in the queue. - - function LokiSaveQueue(lokiDatabase, databaseSettings) { - this.writesSinceLastRun = 0; - this.saveQueue = PROMISE_RESOLVE_VOID; - this.saveQueueC = 0; - this.lokiDatabase = lokiDatabase; - this.databaseSettings = databaseSettings; - } - var _proto = LokiSaveQueue.prototype; - _proto.addWrite = function addWrite() { - this.writesSinceLastRun = this.writesSinceLastRun + 1; - this.run(); - }; - _proto.run = function run() { - if ( - // no persistence adapter given, so we do not need to save - !this.databaseSettings.adapter || - // do not add more then two pending calls to the queue. - this.saveQueueC > 2) { - return this.saveQueue; - } - this.saveQueueC = this.saveQueueC + 1; - this.saveQueue = this.saveQueue.then(async () => { - /** - * Always wait until the JavaScript process is idle. - * This ensures that CPU blocking writes are finished - * before we proceed. - */ - await requestIdlePromise(); - - // no write happened since the last save call - if (this.writesSinceLastRun === 0) { - return; - } - - /** - * Because LokiJS is a in-memory database, - * we can just wait until the JavaScript process is idle - * via requestIdlePromise(). Then we know that nothing important - * is running at the moment. - */ - await requestIdlePromise().then(() => requestIdlePromise()); - if (this.writesSinceLastRun === 0) { - return; - } - var writeAmount = this.writesSinceLastRun; - this.writesSinceLastRun = 0; - return new Promise((res, rej) => { - this.lokiDatabase.saveDatabase(err => { - if (err) { - this.writesSinceLastRun = this.writesSinceLastRun + writeAmount; - rej(err); - } else { - if (this.databaseSettings.autosaveCallback) { - this.databaseSettings.autosaveCallback(); - } - res(); - } - }); - }); - }).catch(() => {}).then(() => { - this.saveQueueC = this.saveQueueC - 1; - }); - return this.saveQueue; - }; - return LokiSaveQueue; -}(); -//# sourceMappingURL=loki-save-queue.js.map \ No newline at end of file diff --git a/dist/esm/plugins/storage-lokijs/loki-save-queue.js.map b/dist/esm/plugins/storage-lokijs/loki-save-queue.js.map deleted file mode 100644 index 48706acc2af..00000000000 --- a/dist/esm/plugins/storage-lokijs/loki-save-queue.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"loki-save-queue.js","names":["PROMISE_RESOLVE_VOID","requestIdlePromise","LokiSaveQueue","lokiDatabase","databaseSettings","writesSinceLastRun","saveQueue","saveQueueC","_proto","prototype","addWrite","run","adapter","then","writeAmount","Promise","res","rej","saveDatabase","err","autosaveCallback","catch"],"sources":["../../../../src/plugins/storage-lokijs/loki-save-queue.ts"],"sourcesContent":["import type { LokiDatabaseSettings } from '../../types/index.d.ts';\nimport {\n PROMISE_RESOLVE_VOID,\n requestIdlePromise\n} from '../utils/index.ts';\n\n/**\n * The autosave feature of lokijs has strange behaviors\n * and often runs a save in critical moments when other\n * more important tasks are running.\n * So instead we use a custom save queue that ensures we\n * only run loki.saveDatabase() when nothing else is running.\n */\nexport class LokiSaveQueue {\n public writesSinceLastRun: number = 0;\n\n /**\n * Ensures that we do not run multiple saves\n * in parallel\n */\n public saveQueue: Promise = PROMISE_RESOLVE_VOID;\n // track amount of non-finished save calls in the queue.\n public saveQueueC = 0;\n\n constructor(\n public readonly lokiDatabase: any,\n public readonly databaseSettings: LokiDatabaseSettings\n ) {\n\n }\n\n public addWrite() {\n this.writesSinceLastRun = this.writesSinceLastRun + 1;\n this.run();\n }\n\n public run() {\n if (\n // no persistence adapter given, so we do not need to save\n !this.databaseSettings.adapter ||\n // do not add more then two pending calls to the queue.\n this.saveQueueC > 2\n\n ) {\n return this.saveQueue;\n }\n\n this.saveQueueC = this.saveQueueC + 1;\n this.saveQueue = this.saveQueue\n .then(async () => {\n /**\n * Always wait until the JavaScript process is idle.\n * This ensures that CPU blocking writes are finished\n * before we proceed.\n */\n await requestIdlePromise();\n\n // no write happened since the last save call\n if (this.writesSinceLastRun === 0) {\n return;\n }\n\n /**\n * Because LokiJS is a in-memory database,\n * we can just wait until the JavaScript process is idle\n * via requestIdlePromise(). Then we know that nothing important\n * is running at the moment.\n */\n await requestIdlePromise().then(() => requestIdlePromise());\n\n if (this.writesSinceLastRun === 0) {\n return;\n }\n\n const writeAmount = this.writesSinceLastRun;\n this.writesSinceLastRun = 0;\n return new Promise((res, rej) => {\n this.lokiDatabase.saveDatabase((err: any) => {\n if (err) {\n this.writesSinceLastRun = this.writesSinceLastRun + writeAmount;\n rej(err);\n } else {\n if (this.databaseSettings.autosaveCallback) {\n this.databaseSettings.autosaveCallback();\n }\n res();\n }\n });\n });\n })\n .catch(() => { })\n .then(() => {\n this.saveQueueC = this.saveQueueC - 1;\n });\n return this.saveQueue;\n }\n}\n"],"mappings":"AACA,SACIA,oBAAoB,EACpBC,kBAAkB,QACf,mBAAmB;;AAE1B;AACA;AACA;AACA;AACA;AACA;AACA;AACA,WAAaC,aAAa;EAGtB;AACJ;AACA;AACA;;EAEI;;EAGA,SAAAA,cACoBC,YAAiB,EACjBC,gBAAsC,EACxD;IAAA,KAbKC,kBAAkB,GAAW,CAAC;IAAA,KAM9BC,SAAS,GAAkBN,oBAAoB;IAAA,KAE/CO,UAAU,GAAG,CAAC;IAAA,KAGDJ,YAAiB,GAAjBA,YAAiB;IAAA,KACjBC,gBAAsC,GAAtCA,gBAAsC;EAG1D;EAAC,IAAAI,MAAA,GAAAN,aAAA,CAAAO,SAAA;EAAAD,MAAA,CAEME,QAAQ,GAAf,SAAAA,SAAA,EAAkB;IACd,IAAI,CAACL,kBAAkB,GAAG,IAAI,CAACA,kBAAkB,GAAG,CAAC;IACrD,IAAI,CAACM,GAAG,CAAC,CAAC;EACd,CAAC;EAAAH,MAAA,CAEMG,GAAG,GAAV,SAAAA,IAAA,EAAa;IACT;IACI;IACA,CAAC,IAAI,CAACP,gBAAgB,CAACQ,OAAO;IAC9B;IACA,IAAI,CAACL,UAAU,GAAG,CAAC,EAErB;MACE,OAAO,IAAI,CAACD,SAAS;IACzB;IAEA,IAAI,CAACC,UAAU,GAAG,IAAI,CAACA,UAAU,GAAG,CAAC;IACrC,IAAI,CAACD,SAAS,GAAG,IAAI,CAACA,SAAS,CAC1BO,IAAI,CAAC,YAAY;MACd;AAChB;AACA;AACA;AACA;MACgB,MAAMZ,kBAAkB,CAAC,CAAC;;MAE1B;MACA,IAAI,IAAI,CAACI,kBAAkB,KAAK,CAAC,EAAE;QAC/B;MACJ;;MAEA;AAChB;AACA;AACA;AACA;AACA;MACgB,MAAMJ,kBAAkB,CAAC,CAAC,CAACY,IAAI,CAAC,MAAMZ,kBAAkB,CAAC,CAAC,CAAC;MAE3D,IAAI,IAAI,CAACI,kBAAkB,KAAK,CAAC,EAAE;QAC/B;MACJ;MAEA,IAAMS,WAAW,GAAG,IAAI,CAACT,kBAAkB;MAC3C,IAAI,CAACA,kBAAkB,GAAG,CAAC;MAC3B,OAAO,IAAIU,OAAO,CAAO,CAACC,GAAG,EAAEC,GAAG,KAAK;QACnC,IAAI,CAACd,YAAY,CAACe,YAAY,CAAEC,GAAQ,IAAK;UACzC,IAAIA,GAAG,EAAE;YACL,IAAI,CAACd,kBAAkB,GAAG,IAAI,CAACA,kBAAkB,GAAGS,WAAW;YAC/DG,GAAG,CAACE,GAAG,CAAC;UACZ,CAAC,MAAM;YACH,IAAI,IAAI,CAACf,gBAAgB,CAACgB,gBAAgB,EAAE;cACxC,IAAI,CAAChB,gBAAgB,CAACgB,gBAAgB,CAAC,CAAC;YAC5C;YACAJ,GAAG,CAAC,CAAC;UACT;QACJ,CAAC,CAAC;MACN,CAAC,CAAC;IACN,CAAC,CAAC,CACDK,KAAK,CAAC,MAAM,CAAE,CAAC,CAAC,CAChBR,IAAI,CAAC,MAAM;MACR,IAAI,CAACN,UAAU,GAAG,IAAI,CAACA,UAAU,GAAG,CAAC;IACzC,CAAC,CAAC;IACN,OAAO,IAAI,CAACD,SAAS;EACzB,CAAC;EAAA,OAAAJ,aAAA;AAAA","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/storage-lokijs/lokijs-helper.js b/dist/esm/plugins/storage-lokijs/lokijs-helper.js deleted file mode 100644 index 72340bb3a93..00000000000 --- a/dist/esm/plugins/storage-lokijs/lokijs-helper.js +++ /dev/null @@ -1,433 +0,0 @@ -import { createLokiLocalState } from "./rx-storage-instance-loki.js"; -import Loki from 'lokijs'; -import { add as unloadAdd } from 'unload'; -import { ensureNotFalsy, flatClone, getFromMapOrCreate, getProperty, promiseWait, randomCouchString } from "../utils/index.js"; -import { LokiSaveQueue } from "./loki-save-queue.js"; -import { newRxError } from "../../rx-error.js"; -import { getBroadcastChannelReference } from "../../rx-storage-multiinstance.js"; -import { getLeaderElectorByBroadcastChannel } from "../leader-election/index.js"; -import { overwritable } from "../../overwritable.js"; -export var CHANGES_COLLECTION_SUFFIX = '-rxdb-changes'; -export var LOKI_BROADCAST_CHANNEL_MESSAGE_TYPE = 'rxdb-lokijs-remote-request'; -export var LOKI_KEY_OBJECT_BROADCAST_CHANNEL_MESSAGE_TYPE = 'rxdb-lokijs-remote-request-key-object'; -export var RX_STORAGE_NAME_LOKIJS = 'lokijs'; - -/** - * Loki attaches a $loki property to all data - * which must be removed before returning the data back to RxDB. - */ -export function stripLokiKey(docData) { - if (!docData.$loki) { - return docData; - } - var cloned = flatClone(docData); - - /** - * In RxDB version 12.0.0, - * we introduced the _meta field that already contains the last write time. - * To be backwards compatible, we have to move the $lastWriteAt to the _meta field. - * TODO remove this in the next major version. - */ - if (cloned.$lastWriteAt) { - cloned._meta = { - lwt: cloned.$lastWriteAt - }; - delete cloned.$lastWriteAt; - } - delete cloned.$loki; - return cloned; -} - -/** - * Used to check in tests if all instances have been cleaned up. - */ -export var OPEN_LOKIJS_STORAGE_INSTANCES = new Set(); -export var LOKIJS_COLLECTION_DEFAULT_OPTIONS = { - disableChangesApi: true, - disableMeta: true, - disableDeltaChangesApi: true, - disableFreeze: true, - // TODO use 'immutable' like WatermelonDB does it - cloneMethod: 'shallow-assign', - clone: false, - transactional: false, - autoupdate: false -}; -var LOKI_DATABASE_STATE_BY_NAME = new Map(); -export function getLokiDatabase(databaseName, databaseSettings) { - return getFromMapOrCreate(LOKI_DATABASE_STATE_BY_NAME, databaseName, () => { - /** - * We assume that as soon as an adapter is passed, - * the database has to be persistent. - */ - var hasPersistence = !!databaseSettings.adapter; - var databaseState = (async () => { - var persistenceMethod = hasPersistence ? 'adapter' : 'memory'; - if (databaseSettings.persistenceMethod) { - persistenceMethod = databaseSettings.persistenceMethod; - } - var useSettings = Object.assign( - // defaults - { - autoload: hasPersistence, - persistenceMethod, - verbose: true - }, databaseSettings, - // overwrites - { - /** - * RxDB uses its custom load and save handling - * so we disable the LokiJS save/load handlers. - */ - autoload: false, - autosave: false, - throttledSaves: false - }); - var database = new Loki(databaseName + '.db', flatClone(useSettings)); - var lokiSaveQueue = new LokiSaveQueue(database, useSettings); - - /** - * Wait until all data is loaded from persistence adapter. - * Wrap the loading into the saveQueue to ensure that when many - * collections are created at the same time, the load-calls do not interfere - * with each other and cause error logs. - */ - if (hasPersistence) { - var loadDatabasePromise = new Promise((res, rej) => { - try { - database.loadDatabase({ - recursiveWait: false - }, err => { - if (useSettings.autoloadCallback) { - useSettings.autoloadCallback(err); - } - if (err) { - rej(err); - } else { - res(); - } - }); - } catch (err) { - rej(err); - } - }); - lokiSaveQueue.saveQueue = lokiSaveQueue.saveQueue.then(() => loadDatabasePromise); - await loadDatabasePromise; - } - - /** - * Autosave database on process end - */ - var unloads = []; - if (hasPersistence) { - unloads.push(unloadAdd(() => lokiSaveQueue.run())); - } - var state = { - database, - databaseSettings: useSettings, - saveQueue: lokiSaveQueue, - collections: {}, - unloads - }; - return state; - })(); - return databaseState; - }); -} -export async function closeLokiCollections(databaseName, collections) { - var databaseState = await LOKI_DATABASE_STATE_BY_NAME.get(databaseName); - if (!databaseState) { - // already closed - return; - } - await databaseState.saveQueue.run(); - collections.forEach(collection => { - var collectionName = collection.name; - delete databaseState.collections[collectionName]; - }); - if (Object.keys(databaseState.collections).length === 0) { - // all collections closed -> also close database - LOKI_DATABASE_STATE_BY_NAME.delete(databaseName); - databaseState.unloads.forEach(u => u.remove()); - await new Promise((res, rej) => { - databaseState.database.close(err => { - if (err) { - rej(err); - } else { - res(); - } - }); - }); - } -} - -/** - * This function is at lokijs-helper - * because we need it in multiple places. - */ -export function getLokiSortComparator(_schema, query) { - if (!query.sort) { - throw newRxError('SNH', { - query - }); - } - var sortOptions = query.sort; - var fun = (a, b) => { - var compareResult = 0; // 1 | -1 - sortOptions.find(sortPart => { - var fieldName = Object.keys(sortPart)[0]; - var direction = Object.values(sortPart)[0]; - var directionMultiplier = direction === 'asc' ? 1 : -1; - var valueA = getProperty(a, fieldName); - var valueB = getProperty(b, fieldName); - if (valueA === valueB) { - return false; - } else { - if (valueA > valueB) { - compareResult = 1 * directionMultiplier; - return true; - } else { - compareResult = -1 * directionMultiplier; - return true; - } - } - }); - - /** - * Two different objects should never have the same sort position. - * We ensure this by having the unique primaryKey in the sort params - * which is added by RxDB if not existing yet. - */ - if (!compareResult) { - throw newRxError('SNH', { - args: { - query, - a, - b - } - }); - } - return compareResult; - }; - return fun; -} -export function getLokiLeaderElector(databaseInstanceToken, broadcastChannelRefObject, databaseName) { - var broadcastChannel = getBroadcastChannelReference(RX_STORAGE_NAME_LOKIJS, databaseInstanceToken, databaseName, broadcastChannelRefObject); - var elector = getLeaderElectorByBroadcastChannel(broadcastChannel); - return elector; -} - -/** - * For multi-instance usage, we send requests to the RxStorage - * to the current leading instance over the BroadcastChannel. - */ -export async function requestRemoteInstance(instance, operation, params) { - var isRxStorageInstanceLoki = typeof instance.query === 'function'; - var messageType = isRxStorageInstanceLoki ? LOKI_BROADCAST_CHANNEL_MESSAGE_TYPE : LOKI_KEY_OBJECT_BROADCAST_CHANNEL_MESSAGE_TYPE; - var leaderElector = ensureNotFalsy(instance.internals.leaderElector); - await waitUntilHasLeader(leaderElector); - var broadcastChannel = leaderElector.broadcastChannel; - var whenDeathListener; - var leaderDeadPromise = new Promise(res => { - whenDeathListener = msg => { - if (msg.context === 'leader' && msg.action === 'death') { - res({ - retry: true - }); - } - }; - broadcastChannel.addEventListener('internal', whenDeathListener); - }); - var requestId = randomCouchString(12); - var responseListener; - var responsePromise = new Promise((res, _rej) => { - responseListener = msg => { - if (msg.type === messageType && msg.response === true && msg.requestId === requestId) { - if (msg.isError) { - res({ - retry: false, - error: msg.result - }); - } else { - res({ - retry: false, - result: msg.result - }); - } - } - }; - broadcastChannel.addEventListener('message', responseListener); - }); - - // send out the request to the other instance - broadcastChannel.postMessage({ - response: false, - type: messageType, - operation, - params, - requestId, - databaseName: instance.databaseName, - collectionName: instance.collectionName - }); - var timeout; - return Promise.race([leaderDeadPromise, responsePromise - // // comment in timeout to debug - // new Promise(res => { - // timeout = setTimeout(() => { - // res({ error: new Error('requestRemoteInstance() timeout errorored'), retry: false }); - // }, 500); - // }) - ]).then(firstResolved => { - if (timeout) { - clearTimeout(timeout); - } - - // clean up listeners - broadcastChannel.removeEventListener('message', responseListener); - broadcastChannel.removeEventListener('internal', whenDeathListener); - if (firstResolved.retry) { - /** - * The leader died while a remote request was running - * we re-run the whole operation. - * We cannot just re-run requestRemoteInstance() - * because the current instance might be the new leader now - * and then we have to use the local state instead of requesting the remote. - */ - return instance[operation](...params); - } else { - if (firstResolved.error) { - throw firstResolved.error; - } else { - return firstResolved.result; - } - } - }); -} - -/** - * Handles a request that came from a remote instance via requestRemoteInstance() - * Runs the requested operation over the local db instance and sends back the result. - */ -export async function handleRemoteRequest(instance, msg) { - if (msg.type === LOKI_BROADCAST_CHANNEL_MESSAGE_TYPE && msg.requestId && msg.databaseName === instance.databaseName && msg.collectionName === instance.collectionName && !msg.response) { - var operation = msg.operation; - var params = msg.params; - var result; - var isError = false; - try { - result = await instance[operation](...params); - } catch (err) { - console.dir(err); - isError = true; - result = err; - } - var response = { - response: true, - requestId: msg.requestId, - databaseName: instance.databaseName, - collectionName: instance.collectionName, - result, - isError, - type: msg.type - }; - ensureNotFalsy(instance.internals.leaderElector).broadcastChannel.postMessage(response); - } -} -export async function waitUntilHasLeader(leaderElector) { - leaderElector.awaitLeadership().catch(() => {}); - await promiseWait(0); - while (true) { - var has = await leaderElector.hasLeader(); - if (has || leaderElector.broadcastChannel.isClosed || leaderElector.isDead) { - return; - } - if (leaderElector.applyOnce) { - await leaderElector.applyOnce(); - } else { - /** - * Trigger applying for leadership - * but do not await it in case another - * instance becomes leader first. - */ - leaderElector.awaitLeadership().catch(() => {}); - } - await promiseWait(20); - } -} - -/** - * If the local state must be used, that one is returned. - * Returns false if a remote instance must be used. - */ -export async function mustUseLocalState(instance) { - if (instance.closed) { - /** - * If this happens, it means that RxDB made a call to an already closed storage instance. - * This must never happen because when RxDB closes a collection or database, - * all tasks must be cleared so that no more calls are made the the storage. - */ - throw new Error('already closed ' + JSON.stringify({ - instanceClosed: instance.closed, - databaseName: instance.databaseName, - collectionName: instance.collectionName - })); - } - if (instance.internals.localState) { - return instance.internals.localState; - } - var leaderElector = ensureNotFalsy(instance.internals.leaderElector); - await waitUntilHasLeader(leaderElector); - - /** - * It might already have a localState after the applying - * because another subtask also called mustUSeLocalState() - */ - if (instance.internals.localState) { - return instance.internals.localState; - } - if (leaderElector.isLeader && !instance.internals.localState) { - // own is leader, use local instance - instance.internals.localState = createLokiLocalState({ - databaseInstanceToken: instance.databaseInstanceToken, - databaseName: instance.databaseName, - collectionName: instance.collectionName, - options: instance.options, - schema: instance.schema, - multiInstance: instance.internals.leaderElector ? true : false, - devMode: overwritable.isDevMode() - }, instance.databaseSettings); - return ensureNotFalsy(instance.internals.localState); - } else { - // other is leader, send message to remote leading instance - return false; - } -} - -/** - * LokiJS does not understand the 'official' $regex operator, - * so we have to transform these back into RegExp objects. - * @recursive - */ -export function transformRegexToRegExp(selector) { - if (typeof selector !== 'object' || selector === null) { - return selector; - } - var keys = Object.keys(selector); - var ret = {}; - keys.forEach(key => { - var value = selector[key]; - if (key === '$options') { - return; - } - if (key === '$regex' && !(value instanceof RegExp)) { - var opts = selector['$options']; - ret[key] = new RegExp(value, opts); - } else if (Array.isArray(value)) { - ret[key] = value.map(item => transformRegexToRegExp(item)); - } else { - ret[key] = transformRegexToRegExp(value); - } - }); - return ret; -} -//# sourceMappingURL=lokijs-helper.js.map \ No newline at end of file diff --git a/dist/esm/plugins/storage-lokijs/lokijs-helper.js.map b/dist/esm/plugins/storage-lokijs/lokijs-helper.js.map deleted file mode 100644 index 1c391a63479..00000000000 --- a/dist/esm/plugins/storage-lokijs/lokijs-helper.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"lokijs-helper.js","names":["createLokiLocalState","Loki","add","unloadAdd","ensureNotFalsy","flatClone","getFromMapOrCreate","getProperty","promiseWait","randomCouchString","LokiSaveQueue","newRxError","getBroadcastChannelReference","getLeaderElectorByBroadcastChannel","overwritable","CHANGES_COLLECTION_SUFFIX","LOKI_BROADCAST_CHANNEL_MESSAGE_TYPE","LOKI_KEY_OBJECT_BROADCAST_CHANNEL_MESSAGE_TYPE","RX_STORAGE_NAME_LOKIJS","stripLokiKey","docData","$loki","cloned","$lastWriteAt","_meta","lwt","OPEN_LOKIJS_STORAGE_INSTANCES","Set","LOKIJS_COLLECTION_DEFAULT_OPTIONS","disableChangesApi","disableMeta","disableDeltaChangesApi","disableFreeze","cloneMethod","clone","transactional","autoupdate","LOKI_DATABASE_STATE_BY_NAME","Map","getLokiDatabase","databaseName","databaseSettings","hasPersistence","adapter","databaseState","persistenceMethod","useSettings","Object","assign","autoload","verbose","autosave","throttledSaves","database","lokiSaveQueue","loadDatabasePromise","Promise","res","rej","loadDatabase","recursiveWait","err","autoloadCallback","saveQueue","then","unloads","push","run","state","collections","closeLokiCollections","get","forEach","collection","collectionName","name","keys","length","delete","u","remove","close","getLokiSortComparator","_schema","query","sort","sortOptions","fun","a","b","compareResult","find","sortPart","fieldName","direction","values","directionMultiplier","valueA","valueB","args","getLokiLeaderElector","databaseInstanceToken","broadcastChannelRefObject","broadcastChannel","elector","requestRemoteInstance","instance","operation","params","isRxStorageInstanceLoki","messageType","leaderElector","internals","waitUntilHasLeader","whenDeathListener","leaderDeadPromise","msg","context","action","retry","addEventListener","requestId","responseListener","responsePromise","_rej","type","response","isError","error","result","postMessage","timeout","race","firstResolved","clearTimeout","removeEventListener","handleRemoteRequest","console","dir","awaitLeadership","catch","has","hasLeader","isClosed","isDead","applyOnce","mustUseLocalState","closed","Error","JSON","stringify","instanceClosed","localState","isLeader","options","schema","multiInstance","devMode","isDevMode","transformRegexToRegExp","selector","ret","key","value","RegExp","opts","Array","isArray","map","item"],"sources":["../../../../src/plugins/storage-lokijs/lokijs-helper.ts"],"sourcesContent":["import {\n createLokiLocalState,\n RxStorageInstanceLoki\n} from './rx-storage-instance-loki.ts';\nimport Loki from 'lokijs';\nimport type {\n DeterministicSortComparator,\n FilledMangoQuery,\n LokiDatabaseSettings,\n LokiDatabaseState,\n LokiLocalDatabaseState,\n LokiRemoteResponseBroadcastMessage,\n MangoQuerySortDirection,\n MangoQuerySortPart,\n RxDocumentData,\n RxJsonSchema\n} from '../../types/index.d.ts';\nimport {\n add as unloadAdd,\n AddReturn\n} from 'unload';\nimport {\n ensureNotFalsy,\n flatClone,\n getFromMapOrCreate,\n getProperty,\n promiseWait,\n randomCouchString\n} from '../utils/index.ts';\nimport { LokiSaveQueue } from './loki-save-queue.ts';\nimport { newRxError } from '../../rx-error.ts';\nimport {\n LeaderElector,\n OnMessageHandler\n} from 'broadcast-channel';\nimport { getBroadcastChannelReference } from '../../rx-storage-multiinstance.ts';\nimport { getLeaderElectorByBroadcastChannel } from '../leader-election/index.ts';\nimport { overwritable } from '../../overwritable.ts';\n\nexport const CHANGES_COLLECTION_SUFFIX = '-rxdb-changes';\nexport const LOKI_BROADCAST_CHANNEL_MESSAGE_TYPE = 'rxdb-lokijs-remote-request';\nexport const LOKI_KEY_OBJECT_BROADCAST_CHANNEL_MESSAGE_TYPE = 'rxdb-lokijs-remote-request-key-object';\nexport const RX_STORAGE_NAME_LOKIJS = 'lokijs';\n\n/**\n * Loki attaches a $loki property to all data\n * which must be removed before returning the data back to RxDB.\n */\nexport function stripLokiKey(docData: RxDocumentData & { $loki?: number; }): T {\n if (!docData.$loki) {\n return docData;\n }\n const cloned = flatClone(docData);\n\n /**\n * In RxDB version 12.0.0,\n * we introduced the _meta field that already contains the last write time.\n * To be backwards compatible, we have to move the $lastWriteAt to the _meta field.\n * TODO remove this in the next major version.\n */\n if ((cloned as any).$lastWriteAt) {\n cloned._meta = {\n lwt: (cloned as any).$lastWriteAt\n };\n delete (cloned as any).$lastWriteAt;\n }\n\n delete cloned.$loki;\n return cloned;\n}\n\n/**\n * Used to check in tests if all instances have been cleaned up.\n */\nexport const OPEN_LOKIJS_STORAGE_INSTANCES: Set> = new Set();\n\n\nexport const LOKIJS_COLLECTION_DEFAULT_OPTIONS: Partial = {\n disableChangesApi: true,\n disableMeta: true,\n disableDeltaChangesApi: true,\n disableFreeze: true,\n // TODO use 'immutable' like WatermelonDB does it\n cloneMethod: 'shallow-assign',\n clone: false,\n transactional: false,\n autoupdate: false\n};\n\nconst LOKI_DATABASE_STATE_BY_NAME: Map> = new Map();\nexport function getLokiDatabase(\n databaseName: string,\n databaseSettings: LokiDatabaseSettings\n): Promise {\n\n\n return getFromMapOrCreate(\n LOKI_DATABASE_STATE_BY_NAME,\n databaseName,\n () => {\n /**\n * We assume that as soon as an adapter is passed,\n * the database has to be persistent.\n */\n const hasPersistence: boolean = !!databaseSettings.adapter;\n const databaseState = (async () => {\n let persistenceMethod = hasPersistence ? 'adapter' : 'memory';\n if (databaseSettings.persistenceMethod) {\n persistenceMethod = databaseSettings.persistenceMethod;\n }\n const useSettings = Object.assign(\n // defaults\n {\n autoload: hasPersistence,\n persistenceMethod,\n verbose: true\n },\n databaseSettings,\n // overwrites\n {\n /**\n * RxDB uses its custom load and save handling\n * so we disable the LokiJS save/load handlers.\n */\n autoload: false,\n autosave: false,\n throttledSaves: false\n }\n );\n const database = new Loki(\n databaseName + '.db',\n flatClone(useSettings)\n );\n const lokiSaveQueue = new LokiSaveQueue(\n database,\n useSettings\n );\n\n /**\n * Wait until all data is loaded from persistence adapter.\n * Wrap the loading into the saveQueue to ensure that when many\n * collections are created at the same time, the load-calls do not interfere\n * with each other and cause error logs.\n */\n if (hasPersistence) {\n const loadDatabasePromise = new Promise((res, rej) => {\n try {\n database.loadDatabase({\n recursiveWait: false\n }, (err) => {\n if (useSettings.autoloadCallback) {\n useSettings.autoloadCallback(err);\n }\n if (err) {\n rej(err);\n } else {\n res();\n }\n });\n } catch (err) {\n rej(err);\n }\n });\n lokiSaveQueue.saveQueue = lokiSaveQueue.saveQueue.then(() => loadDatabasePromise);\n await loadDatabasePromise;\n }\n\n /**\n * Autosave database on process end\n */\n const unloads: AddReturn[] = [];\n if (hasPersistence) {\n unloads.push(\n unloadAdd(() => lokiSaveQueue.run())\n );\n }\n\n const state: LokiDatabaseState = {\n database,\n databaseSettings: useSettings,\n saveQueue: lokiSaveQueue,\n collections: {},\n unloads\n };\n\n return state;\n })();\n return databaseState;\n }\n );\n}\n\nexport async function closeLokiCollections(\n databaseName: string,\n collections: any[]\n) {\n const databaseState = await LOKI_DATABASE_STATE_BY_NAME.get(databaseName);\n if (!databaseState) {\n // already closed\n return;\n }\n await databaseState.saveQueue.run();\n collections.forEach(collection => {\n const collectionName = collection.name;\n delete databaseState.collections[collectionName];\n });\n if (Object.keys(databaseState.collections).length === 0) {\n // all collections closed -> also close database\n LOKI_DATABASE_STATE_BY_NAME.delete(databaseName);\n databaseState.unloads.forEach(u => u.remove());\n await new Promise((res, rej) => {\n databaseState.database.close((err: any) => {\n if (err) {\n rej(err);\n } else {\n res();\n }\n });\n });\n }\n}\n\n/**\n * This function is at lokijs-helper\n * because we need it in multiple places.\n */\nexport function getLokiSortComparator(\n _schema: RxJsonSchema>,\n query: FilledMangoQuery\n): DeterministicSortComparator {\n if (!query.sort) {\n throw newRxError('SNH', { query });\n }\n const sortOptions: MangoQuerySortPart[] = query.sort;\n\n const fun: DeterministicSortComparator = (a: RxDocType, b: RxDocType) => {\n let compareResult: number = 0; // 1 | -1\n sortOptions.find(sortPart => {\n const fieldName: string = Object.keys(sortPart)[0];\n const direction: MangoQuerySortDirection = Object.values(sortPart)[0];\n const directionMultiplier = direction === 'asc' ? 1 : -1;\n const valueA: any = getProperty(a as any, fieldName);\n const valueB: any = getProperty(b as any, fieldName);\n if (valueA === valueB) {\n return false;\n } else {\n if (valueA > valueB) {\n compareResult = 1 * directionMultiplier;\n return true;\n } else {\n compareResult = -1 * directionMultiplier;\n return true;\n }\n }\n });\n\n /**\n * Two different objects should never have the same sort position.\n * We ensure this by having the unique primaryKey in the sort params\n * which is added by RxDB if not existing yet.\n */\n if (!compareResult) {\n throw newRxError('SNH', { args: { query, a, b } });\n }\n\n return compareResult as any;\n };\n return fun;\n}\n\nexport function getLokiLeaderElector(\n databaseInstanceToken: string,\n broadcastChannelRefObject: any,\n databaseName: string\n): LeaderElector {\n const broadcastChannel = getBroadcastChannelReference(\n RX_STORAGE_NAME_LOKIJS,\n databaseInstanceToken,\n databaseName,\n broadcastChannelRefObject\n );\n const elector = getLeaderElectorByBroadcastChannel(broadcastChannel);\n return elector;\n}\n\n/**\n * For multi-instance usage, we send requests to the RxStorage\n * to the current leading instance over the BroadcastChannel.\n */\nexport async function requestRemoteInstance(\n instance: RxStorageInstanceLoki,\n operation: string,\n params: any[]\n): Promise {\n const isRxStorageInstanceLoki = typeof (instance as any).query === 'function';\n const messageType = isRxStorageInstanceLoki ? LOKI_BROADCAST_CHANNEL_MESSAGE_TYPE : LOKI_KEY_OBJECT_BROADCAST_CHANNEL_MESSAGE_TYPE;\n\n const leaderElector = ensureNotFalsy(instance.internals.leaderElector);\n await waitUntilHasLeader(leaderElector);\n const broadcastChannel = leaderElector.broadcastChannel;\n\n type WinningPromise = {\n retry: boolean;\n result?: any;\n error?: any;\n };\n\n let whenDeathListener: OnMessageHandler;\n const leaderDeadPromise = new Promise(res => {\n whenDeathListener = (msg: any) => {\n if (msg.context === 'leader' && msg.action === 'death') {\n res({\n retry: true\n });\n }\n };\n broadcastChannel.addEventListener('internal', whenDeathListener);\n });\n const requestId = randomCouchString(12);\n let responseListener: OnMessageHandler;\n const responsePromise = new Promise((res, _rej) => {\n responseListener = (msg: any) => {\n if (\n msg.type === messageType &&\n msg.response === true &&\n msg.requestId === requestId\n ) {\n if (msg.isError) {\n res({\n retry: false,\n error: msg.result\n });\n } else {\n res({\n retry: false,\n result: msg.result\n });\n }\n }\n };\n broadcastChannel.addEventListener('message', responseListener);\n });\n\n // send out the request to the other instance\n broadcastChannel.postMessage({\n response: false,\n type: messageType,\n operation,\n params,\n requestId,\n databaseName: instance.databaseName,\n collectionName: instance.collectionName\n });\n let timeout: ReturnType;\n return Promise.race([\n leaderDeadPromise,\n responsePromise,\n // // comment in timeout to debug\n // new Promise(res => {\n // timeout = setTimeout(() => {\n // res({ error: new Error('requestRemoteInstance() timeout errorored'), retry: false });\n // }, 500);\n // })\n\n ]).then(firstResolved => {\n if (timeout) {\n clearTimeout(timeout);\n }\n\n // clean up listeners\n broadcastChannel.removeEventListener('message', responseListener);\n broadcastChannel.removeEventListener('internal', whenDeathListener);\n\n if (firstResolved.retry) {\n /**\n * The leader died while a remote request was running\n * we re-run the whole operation.\n * We cannot just re-run requestRemoteInstance()\n * because the current instance might be the new leader now\n * and then we have to use the local state instead of requesting the remote.\n */\n return (instance as any)[operation](...params);\n } else {\n if (firstResolved.error) {\n throw firstResolved.error;\n } else {\n return firstResolved.result;\n }\n }\n });\n}\n\n/**\n * Handles a request that came from a remote instance via requestRemoteInstance()\n * Runs the requested operation over the local db instance and sends back the result.\n */\nexport async function handleRemoteRequest(\n instance: RxStorageInstanceLoki,\n msg: any\n) {\n if (\n msg.type === LOKI_BROADCAST_CHANNEL_MESSAGE_TYPE &&\n msg.requestId &&\n msg.databaseName === instance.databaseName &&\n msg.collectionName === instance.collectionName &&\n !msg.response\n ) {\n const operation = (msg as any).operation;\n const params = (msg as any).params;\n let result: any;\n let isError = false;\n try {\n result = await (instance as any)[operation](...params);\n } catch (err) {\n console.dir(err);\n isError = true;\n result = err;\n }\n const response: LokiRemoteResponseBroadcastMessage = {\n response: true,\n requestId: msg.requestId,\n databaseName: instance.databaseName,\n collectionName: instance.collectionName,\n result,\n isError,\n type: msg.type\n };\n ensureNotFalsy(instance.internals.leaderElector).broadcastChannel.postMessage(response);\n }\n}\n\nexport async function waitUntilHasLeader(leaderElector: LeaderElector) {\n leaderElector.awaitLeadership().catch(() => { });\n await promiseWait(0);\n while (true) {\n const has = await leaderElector.hasLeader();\n if (\n has ||\n leaderElector.broadcastChannel.isClosed ||\n leaderElector.isDead\n ) {\n return;\n }\n\n if (leaderElector.applyOnce) {\n await leaderElector.applyOnce();\n } else {\n /**\n * Trigger applying for leadership\n * but do not await it in case another\n * instance becomes leader first.\n */\n leaderElector.awaitLeadership().catch(() => { });\n }\n await promiseWait(20);\n }\n}\n\n/**\n * If the local state must be used, that one is returned.\n * Returns false if a remote instance must be used.\n */\nexport async function mustUseLocalState(\n instance: RxStorageInstanceLoki\n): Promise {\n if (instance.closed) {\n /**\n * If this happens, it means that RxDB made a call to an already closed storage instance.\n * This must never happen because when RxDB closes a collection or database,\n * all tasks must be cleared so that no more calls are made the the storage.\n */\n throw new Error('already closed ' + JSON.stringify(\n {\n instanceClosed: instance.closed,\n databaseName: instance.databaseName,\n collectionName: instance.collectionName\n }\n ));\n }\n\n\n if (instance.internals.localState) {\n return instance.internals.localState;\n }\n const leaderElector = ensureNotFalsy(instance.internals.leaderElector);\n await waitUntilHasLeader(leaderElector);\n\n /**\n * It might already have a localState after the applying\n * because another subtask also called mustUSeLocalState()\n */\n if (instance.internals.localState) {\n return instance.internals.localState;\n }\n\n if (\n leaderElector.isLeader &&\n !instance.internals.localState\n ) {\n // own is leader, use local instance\n instance.internals.localState = createLokiLocalState({\n databaseInstanceToken: instance.databaseInstanceToken,\n databaseName: instance.databaseName,\n collectionName: instance.collectionName,\n options: instance.options,\n schema: (instance as RxStorageInstanceLoki).schema,\n multiInstance: instance.internals.leaderElector ? true : false,\n devMode: overwritable.isDevMode()\n }, instance.databaseSettings);\n return ensureNotFalsy(instance.internals.localState);\n } else {\n // other is leader, send message to remote leading instance\n return false;\n }\n}\n\n\n/**\n * LokiJS does not understand the 'official' $regex operator,\n * so we have to transform these back into RegExp objects.\n * @recursive\n */\nexport function transformRegexToRegExp(selector: any) {\n if (typeof selector !== 'object' || selector === null) {\n return selector;\n }\n\n const keys = Object.keys(selector);\n const ret: any = {};\n keys.forEach(key => {\n const value: any = selector[key];\n if (key === '$options') {\n return;\n }\n if (\n key === '$regex' &&\n !(value instanceof RegExp)\n ) {\n const opts = selector['$options'];\n ret[key] = new RegExp(value, opts);\n } else if (Array.isArray(value)) {\n ret[key] = value.map(item => transformRegexToRegExp(item));\n } else {\n ret[key] = transformRegexToRegExp(value);\n }\n });\n return ret;\n}\n"],"mappings":"AAAA,SACIA,oBAAoB,QAEjB,+BAA+B;AACtC,OAAOC,IAAI,MAAM,QAAQ;AAazB,SACIC,GAAG,IAAIC,SAAS,QAEb,QAAQ;AACf,SACIC,cAAc,EACdC,SAAS,EACTC,kBAAkB,EAClBC,WAAW,EACXC,WAAW,EACXC,iBAAiB,QACd,mBAAmB;AAC1B,SAASC,aAAa,QAAQ,sBAAsB;AACpD,SAASC,UAAU,QAAQ,mBAAmB;AAK9C,SAASC,4BAA4B,QAAQ,mCAAmC;AAChF,SAASC,kCAAkC,QAAQ,6BAA6B;AAChF,SAASC,YAAY,QAAQ,uBAAuB;AAEpD,OAAO,IAAMC,yBAAyB,GAAG,eAAe;AACxD,OAAO,IAAMC,mCAAmC,GAAG,4BAA4B;AAC/E,OAAO,IAAMC,8CAA8C,GAAG,uCAAuC;AACrG,OAAO,IAAMC,sBAAsB,GAAG,QAAQ;;AAE9C;AACA;AACA;AACA;AACA,OAAO,SAASC,YAAYA,CAAIC,OAAgD,EAAK;EACjF,IAAI,CAACA,OAAO,CAACC,KAAK,EAAE;IAChB,OAAOD,OAAO;EAClB;EACA,IAAME,MAAM,GAAGjB,SAAS,CAACe,OAAO,CAAC;;EAEjC;AACJ;AACA;AACA;AACA;AACA;EACI,IAAKE,MAAM,CAASC,YAAY,EAAE;IAC9BD,MAAM,CAACE,KAAK,GAAG;MACXC,GAAG,EAAGH,MAAM,CAASC;IACzB,CAAC;IACD,OAAQD,MAAM,CAASC,YAAY;EACvC;EAEA,OAAOD,MAAM,CAACD,KAAK;EACnB,OAAOC,MAAM;AACjB;;AAEA;AACA;AACA;AACA,OAAO,IAAMI,6BAA8D,GAAG,IAAIC,GAAG,CAAC,CAAC;AAGvF,OAAO,IAAMC,iCAA+C,GAAG;EAC3DC,iBAAiB,EAAE,IAAI;EACvBC,WAAW,EAAE,IAAI;EACjBC,sBAAsB,EAAE,IAAI;EAC5BC,aAAa,EAAE,IAAI;EACnB;EACAC,WAAW,EAAE,gBAAgB;EAC7BC,KAAK,EAAE,KAAK;EACZC,aAAa,EAAE,KAAK;EACpBC,UAAU,EAAE;AAChB,CAAC;AAED,IAAMC,2BAAoE,GAAG,IAAIC,GAAG,CAAC,CAAC;AACtF,OAAO,SAASC,eAAeA,CAC3BC,YAAoB,EACpBC,gBAAsC,EACZ;EAG1B,OAAOnC,kBAAkB,CACrB+B,2BAA2B,EAC3BG,YAAY,EACZ,MAAM;IACF;AACZ;AACA;AACA;IACY,IAAME,cAAuB,GAAG,CAAC,CAACD,gBAAgB,CAACE,OAAO;IAC1D,IAAMC,aAAa,GAAG,CAAC,YAAY;MAC/B,IAAIC,iBAAiB,GAAGH,cAAc,GAAG,SAAS,GAAG,QAAQ;MAC7D,IAAID,gBAAgB,CAACI,iBAAiB,EAAE;QACpCA,iBAAiB,GAAGJ,gBAAgB,CAACI,iBAAiB;MAC1D;MACA,IAAMC,WAAW,GAAGC,MAAM,CAACC,MAAM;MAC7B;MACA;QACIC,QAAQ,EAAEP,cAAc;QACxBG,iBAAiB;QACjBK,OAAO,EAAE;MACb,CAAC,EACDT,gBAAgB;MAChB;MACA;QACI;AACxB;AACA;AACA;QACwBQ,QAAQ,EAAE,KAAK;QACfE,QAAQ,EAAE,KAAK;QACfC,cAAc,EAAE;MACpB,CACJ,CAAC;MACD,IAAMC,QAAQ,GAAG,IAAIpD,IAAI,CACrBuC,YAAY,GAAG,KAAK,EACpBnC,SAAS,CAACyC,WAAW,CACzB,CAAC;MACD,IAAMQ,aAAa,GAAG,IAAI5C,aAAa,CACnC2C,QAAQ,EACRP,WACJ,CAAC;;MAED;AAChB;AACA;AACA;AACA;AACA;MACgB,IAAIJ,cAAc,EAAE;QAChB,IAAMa,mBAAmB,GAAG,IAAIC,OAAO,CAAO,CAACC,GAAG,EAAEC,GAAG,KAAK;UACxD,IAAI;YACAL,QAAQ,CAACM,YAAY,CAAC;cAClBC,aAAa,EAAE;YACnB,CAAC,EAAGC,GAAG,IAAK;cACR,IAAIf,WAAW,CAACgB,gBAAgB,EAAE;gBAC9BhB,WAAW,CAACgB,gBAAgB,CAACD,GAAG,CAAC;cACrC;cACA,IAAIA,GAAG,EAAE;gBACLH,GAAG,CAACG,GAAG,CAAC;cACZ,CAAC,MAAM;gBACHJ,GAAG,CAAC,CAAC;cACT;YACJ,CAAC,CAAC;UACN,CAAC,CAAC,OAAOI,GAAG,EAAE;YACVH,GAAG,CAACG,GAAG,CAAC;UACZ;QACJ,CAAC,CAAC;QACFP,aAAa,CAACS,SAAS,GAAGT,aAAa,CAACS,SAAS,CAACC,IAAI,CAAC,MAAMT,mBAAmB,CAAC;QACjF,MAAMA,mBAAmB;MAC7B;;MAEA;AAChB;AACA;MACgB,IAAMU,OAAoB,GAAG,EAAE;MAC/B,IAAIvB,cAAc,EAAE;QAChBuB,OAAO,CAACC,IAAI,CACR/D,SAAS,CAAC,MAAMmD,aAAa,CAACa,GAAG,CAAC,CAAC,CACvC,CAAC;MACL;MAEA,IAAMC,KAAwB,GAAG;QAC7Bf,QAAQ;QACRZ,gBAAgB,EAAEK,WAAW;QAC7BiB,SAAS,EAAET,aAAa;QACxBe,WAAW,EAAE,CAAC,CAAC;QACfJ;MACJ,CAAC;MAED,OAAOG,KAAK;IAChB,CAAC,EAAE,CAAC;IACJ,OAAOxB,aAAa;EACxB,CACJ,CAAC;AACL;AAEA,OAAO,eAAe0B,oBAAoBA,CACtC9B,YAAoB,EACpB6B,WAAkB,EACpB;EACE,IAAMzB,aAAa,GAAG,MAAMP,2BAA2B,CAACkC,GAAG,CAAC/B,YAAY,CAAC;EACzE,IAAI,CAACI,aAAa,EAAE;IAChB;IACA;EACJ;EACA,MAAMA,aAAa,CAACmB,SAAS,CAACI,GAAG,CAAC,CAAC;EACnCE,WAAW,CAACG,OAAO,CAACC,UAAU,IAAI;IAC9B,IAAMC,cAAc,GAAGD,UAAU,CAACE,IAAI;IACtC,OAAO/B,aAAa,CAACyB,WAAW,CAACK,cAAc,CAAC;EACpD,CAAC,CAAC;EACF,IAAI3B,MAAM,CAAC6B,IAAI,CAAChC,aAAa,CAACyB,WAAW,CAAC,CAACQ,MAAM,KAAK,CAAC,EAAE;IACrD;IACAxC,2BAA2B,CAACyC,MAAM,CAACtC,YAAY,CAAC;IAChDI,aAAa,CAACqB,OAAO,CAACO,OAAO,CAACO,CAAC,IAAIA,CAAC,CAACC,MAAM,CAAC,CAAC,CAAC;IAC9C,MAAM,IAAIxB,OAAO,CAAO,CAACC,GAAG,EAAEC,GAAG,KAAK;MAClCd,aAAa,CAACS,QAAQ,CAAC4B,KAAK,CAAEpB,GAAQ,IAAK;QACvC,IAAIA,GAAG,EAAE;UACLH,GAAG,CAACG,GAAG,CAAC;QACZ,CAAC,MAAM;UACHJ,GAAG,CAAC,CAAC;QACT;MACJ,CAAC,CAAC;IACN,CAAC,CAAC;EACN;AACJ;;AAEA;AACA;AACA;AACA;AACA,OAAO,SAASyB,qBAAqBA,CACjCC,OAAgD,EAChDC,KAAkC,EACI;EACtC,IAAI,CAACA,KAAK,CAACC,IAAI,EAAE;IACb,MAAM1E,UAAU,CAAC,KAAK,EAAE;MAAEyE;IAAM,CAAC,CAAC;EACtC;EACA,IAAME,WAA4C,GAAGF,KAAK,CAACC,IAAI;EAE/D,IAAME,GAA2C,GAAGA,CAACC,CAAY,EAAEC,CAAY,KAAK;IAChF,IAAIC,aAAqB,GAAG,CAAC,CAAC,CAAC;IAC/BJ,WAAW,CAACK,IAAI,CAACC,QAAQ,IAAI;MACzB,IAAMC,SAAiB,GAAG9C,MAAM,CAAC6B,IAAI,CAACgB,QAAQ,CAAC,CAAC,CAAC,CAAC;MAClD,IAAME,SAAkC,GAAG/C,MAAM,CAACgD,MAAM,CAACH,QAAQ,CAAC,CAAC,CAAC,CAAC;MACrE,IAAMI,mBAAmB,GAAGF,SAAS,KAAK,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC;MACxD,IAAMG,MAAW,GAAG1F,WAAW,CAACiF,CAAC,EAASK,SAAS,CAAC;MACpD,IAAMK,MAAW,GAAG3F,WAAW,CAACkF,CAAC,EAASI,SAAS,CAAC;MACpD,IAAII,MAAM,KAAKC,MAAM,EAAE;QACnB,OAAO,KAAK;MAChB,CAAC,MAAM;QACH,IAAID,MAAM,GAAGC,MAAM,EAAE;UACjBR,aAAa,GAAG,CAAC,GAAGM,mBAAmB;UACvC,OAAO,IAAI;QACf,CAAC,MAAM;UACHN,aAAa,GAAG,CAAC,CAAC,GAAGM,mBAAmB;UACxC,OAAO,IAAI;QACf;MACJ;IACJ,CAAC,CAAC;;IAEF;AACR;AACA;AACA;AACA;IACQ,IAAI,CAACN,aAAa,EAAE;MAChB,MAAM/E,UAAU,CAAC,KAAK,EAAE;QAAEwF,IAAI,EAAE;UAAEf,KAAK;UAAEI,CAAC;UAAEC;QAAE;MAAE,CAAC,CAAC;IACtD;IAEA,OAAOC,aAAa;EACxB,CAAC;EACD,OAAOH,GAAG;AACd;AAEA,OAAO,SAASa,oBAAoBA,CAChCC,qBAA6B,EAC7BC,yBAA8B,EAC9B9D,YAAoB,EACP;EACb,IAAM+D,gBAAgB,GAAG3F,4BAA4B,CACjDM,sBAAsB,EACtBmF,qBAAqB,EACrB7D,YAAY,EACZ8D,yBACJ,CAAC;EACD,IAAME,OAAO,GAAG3F,kCAAkC,CAAC0F,gBAAgB,CAAC;EACpE,OAAOC,OAAO;AAClB;;AAEA;AACA;AACA;AACA;AACA,OAAO,eAAeC,qBAAqBA,CACvCC,QAAoC,EACpCC,SAAiB,EACjBC,MAAa,EACO;EACpB,IAAMC,uBAAuB,GAAG,OAAQH,QAAQ,CAAStB,KAAK,KAAK,UAAU;EAC7E,IAAM0B,WAAW,GAAGD,uBAAuB,GAAG7F,mCAAmC,GAAGC,8CAA8C;EAElI,IAAM8F,aAAa,GAAG3G,cAAc,CAACsG,QAAQ,CAACM,SAAS,CAACD,aAAa,CAAC;EACtE,MAAME,kBAAkB,CAACF,aAAa,CAAC;EACvC,IAAMR,gBAAgB,GAAGQ,aAAa,CAACR,gBAAgB;EAQvD,IAAIW,iBAAwC;EAC5C,IAAMC,iBAAiB,GAAG,IAAI3D,OAAO,CAAiBC,GAAG,IAAI;IACzDyD,iBAAiB,GAAIE,GAAQ,IAAK;MAC9B,IAAIA,GAAG,CAACC,OAAO,KAAK,QAAQ,IAAID,GAAG,CAACE,MAAM,KAAK,OAAO,EAAE;QACpD7D,GAAG,CAAC;UACA8D,KAAK,EAAE;QACX,CAAC,CAAC;MACN;IACJ,CAAC;IACDhB,gBAAgB,CAACiB,gBAAgB,CAAC,UAAU,EAAEN,iBAAiB,CAAC;EACpE,CAAC,CAAC;EACF,IAAMO,SAAS,GAAGhH,iBAAiB,CAAC,EAAE,CAAC;EACvC,IAAIiH,gBAAuC;EAC3C,IAAMC,eAAe,GAAG,IAAInE,OAAO,CAAiB,CAACC,GAAG,EAAEmE,IAAI,KAAK;IAC/DF,gBAAgB,GAAIN,GAAQ,IAAK;MAC7B,IACIA,GAAG,CAACS,IAAI,KAAKf,WAAW,IACxBM,GAAG,CAACU,QAAQ,KAAK,IAAI,IACrBV,GAAG,CAACK,SAAS,KAAKA,SAAS,EAC7B;QACE,IAAIL,GAAG,CAACW,OAAO,EAAE;UACbtE,GAAG,CAAC;YACA8D,KAAK,EAAE,KAAK;YACZS,KAAK,EAAEZ,GAAG,CAACa;UACf,CAAC,CAAC;QACN,CAAC,MAAM;UACHxE,GAAG,CAAC;YACA8D,KAAK,EAAE,KAAK;YACZU,MAAM,EAAEb,GAAG,CAACa;UAChB,CAAC,CAAC;QACN;MACJ;IACJ,CAAC;IACD1B,gBAAgB,CAACiB,gBAAgB,CAAC,SAAS,EAAEE,gBAAgB,CAAC;EAClE,CAAC,CAAC;;EAEF;EACAnB,gBAAgB,CAAC2B,WAAW,CAAC;IACzBJ,QAAQ,EAAE,KAAK;IACfD,IAAI,EAAEf,WAAW;IACjBH,SAAS;IACTC,MAAM;IACNa,SAAS;IACTjF,YAAY,EAAEkE,QAAQ,CAAClE,YAAY;IACnCkC,cAAc,EAAEgC,QAAQ,CAAChC;EAC7B,CAAC,CAAC;EACF,IAAIyD,OAAsC;EAC1C,OAAO3E,OAAO,CAAC4E,IAAI,CAAC,CAChBjB,iBAAiB,EACjBQ;EACA;EACA;EACA;EACA;EACA;EACA;EAAA,CAEH,CAAC,CAAC3D,IAAI,CAACqE,aAAa,IAAI;IACrB,IAAIF,OAAO,EAAE;MACTG,YAAY,CAACH,OAAO,CAAC;IACzB;;IAEA;IACA5B,gBAAgB,CAACgC,mBAAmB,CAAC,SAAS,EAAEb,gBAAgB,CAAC;IACjEnB,gBAAgB,CAACgC,mBAAmB,CAAC,UAAU,EAAErB,iBAAiB,CAAC;IAEnE,IAAImB,aAAa,CAACd,KAAK,EAAE;MACrB;AACZ;AACA;AACA;AACA;AACA;AACA;MACY,OAAQb,QAAQ,CAASC,SAAS,CAAC,CAAC,GAAGC,MAAM,CAAC;IAClD,CAAC,MAAM;MACH,IAAIyB,aAAa,CAACL,KAAK,EAAE;QACrB,MAAMK,aAAa,CAACL,KAAK;MAC7B,CAAC,MAAM;QACH,OAAOK,aAAa,CAACJ,MAAM;MAC/B;IACJ;EACJ,CAAC,CAAC;AACN;;AAEA;AACA;AACA;AACA;AACA,OAAO,eAAeO,mBAAmBA,CACrC9B,QAAoC,EACpCU,GAAQ,EACV;EACE,IACIA,GAAG,CAACS,IAAI,KAAK7G,mCAAmC,IAChDoG,GAAG,CAACK,SAAS,IACbL,GAAG,CAAC5E,YAAY,KAAKkE,QAAQ,CAAClE,YAAY,IAC1C4E,GAAG,CAAC1C,cAAc,KAAKgC,QAAQ,CAAChC,cAAc,IAC9C,CAAC0C,GAAG,CAACU,QAAQ,EACf;IACE,IAAMnB,SAAS,GAAIS,GAAG,CAAST,SAAS;IACxC,IAAMC,MAAM,GAAIQ,GAAG,CAASR,MAAM;IAClC,IAAIqB,MAAW;IACf,IAAIF,OAAO,GAAG,KAAK;IACnB,IAAI;MACAE,MAAM,GAAG,MAAOvB,QAAQ,CAASC,SAAS,CAAC,CAAC,GAAGC,MAAM,CAAC;IAC1D,CAAC,CAAC,OAAO/C,GAAG,EAAE;MACV4E,OAAO,CAACC,GAAG,CAAC7E,GAAG,CAAC;MAChBkE,OAAO,GAAG,IAAI;MACdE,MAAM,GAAGpE,GAAG;IAChB;IACA,IAAMiE,QAA4C,GAAG;MACjDA,QAAQ,EAAE,IAAI;MACdL,SAAS,EAAEL,GAAG,CAACK,SAAS;MACxBjF,YAAY,EAAEkE,QAAQ,CAAClE,YAAY;MACnCkC,cAAc,EAAEgC,QAAQ,CAAChC,cAAc;MACvCuD,MAAM;MACNF,OAAO;MACPF,IAAI,EAAET,GAAG,CAACS;IACd,CAAC;IACDzH,cAAc,CAACsG,QAAQ,CAACM,SAAS,CAACD,aAAa,CAAC,CAACR,gBAAgB,CAAC2B,WAAW,CAACJ,QAAQ,CAAC;EAC3F;AACJ;AAEA,OAAO,eAAeb,kBAAkBA,CAACF,aAA4B,EAAE;EACnEA,aAAa,CAAC4B,eAAe,CAAC,CAAC,CAACC,KAAK,CAAC,MAAM,CAAE,CAAC,CAAC;EAChD,MAAMpI,WAAW,CAAC,CAAC,CAAC;EACpB,OAAO,IAAI,EAAE;IACT,IAAMqI,GAAG,GAAG,MAAM9B,aAAa,CAAC+B,SAAS,CAAC,CAAC;IAC3C,IACID,GAAG,IACH9B,aAAa,CAACR,gBAAgB,CAACwC,QAAQ,IACvChC,aAAa,CAACiC,MAAM,EACtB;MACE;IACJ;IAEA,IAAIjC,aAAa,CAACkC,SAAS,EAAE;MACzB,MAAMlC,aAAa,CAACkC,SAAS,CAAC,CAAC;IACnC,CAAC,MAAM;MACH;AACZ;AACA;AACA;AACA;MACYlC,aAAa,CAAC4B,eAAe,CAAC,CAAC,CAACC,KAAK,CAAC,MAAM,CAAE,CAAC,CAAC;IACpD;IACA,MAAMpI,WAAW,CAAC,EAAE,CAAC;EACzB;AACJ;;AAEA;AACA;AACA;AACA;AACA,OAAO,eAAe0I,iBAAiBA,CACnCxC,QAAoC,EACG;EACvC,IAAIA,QAAQ,CAACyC,MAAM,EAAE;IACjB;AACR;AACA;AACA;AACA;IACQ,MAAM,IAAIC,KAAK,CAAC,iBAAiB,GAAGC,IAAI,CAACC,SAAS,CAC9C;MACIC,cAAc,EAAE7C,QAAQ,CAACyC,MAAM;MAC/B3G,YAAY,EAAEkE,QAAQ,CAAClE,YAAY;MACnCkC,cAAc,EAAEgC,QAAQ,CAAChC;IAC7B,CACJ,CAAC,CAAC;EACN;EAGA,IAAIgC,QAAQ,CAACM,SAAS,CAACwC,UAAU,EAAE;IAC/B,OAAO9C,QAAQ,CAACM,SAAS,CAACwC,UAAU;EACxC;EACA,IAAMzC,aAAa,GAAG3G,cAAc,CAACsG,QAAQ,CAACM,SAAS,CAACD,aAAa,CAAC;EACtE,MAAME,kBAAkB,CAACF,aAAa,CAAC;;EAEvC;AACJ;AACA;AACA;EACI,IAAIL,QAAQ,CAACM,SAAS,CAACwC,UAAU,EAAE;IAC/B,OAAO9C,QAAQ,CAACM,SAAS,CAACwC,UAAU;EACxC;EAEA,IACIzC,aAAa,CAAC0C,QAAQ,IACtB,CAAC/C,QAAQ,CAACM,SAAS,CAACwC,UAAU,EAChC;IACE;IACA9C,QAAQ,CAACM,SAAS,CAACwC,UAAU,GAAGxJ,oBAAoB,CAAM;MACtDqG,qBAAqB,EAAEK,QAAQ,CAACL,qBAAqB;MACrD7D,YAAY,EAAEkE,QAAQ,CAAClE,YAAY;MACnCkC,cAAc,EAAEgC,QAAQ,CAAChC,cAAc;MACvCgF,OAAO,EAAEhD,QAAQ,CAACgD,OAAO;MACzBC,MAAM,EAAGjD,QAAQ,CAAgCiD,MAAM;MACvDC,aAAa,EAAElD,QAAQ,CAACM,SAAS,CAACD,aAAa,GAAG,IAAI,GAAG,KAAK;MAC9D8C,OAAO,EAAE/I,YAAY,CAACgJ,SAAS,CAAC;IACpC,CAAC,EAAEpD,QAAQ,CAACjE,gBAAgB,CAAC;IAC7B,OAAOrC,cAAc,CAACsG,QAAQ,CAACM,SAAS,CAACwC,UAAU,CAAC;EACxD,CAAC,MAAM;IACH;IACA,OAAO,KAAK;EAChB;AACJ;;AAGA;AACA;AACA;AACA;AACA;AACA,OAAO,SAASO,sBAAsBA,CAACC,QAAa,EAAE;EAClD,IAAI,OAAOA,QAAQ,KAAK,QAAQ,IAAIA,QAAQ,KAAK,IAAI,EAAE;IACnD,OAAOA,QAAQ;EACnB;EAEA,IAAMpF,IAAI,GAAG7B,MAAM,CAAC6B,IAAI,CAACoF,QAAQ,CAAC;EAClC,IAAMC,GAAQ,GAAG,CAAC,CAAC;EACnBrF,IAAI,CAACJ,OAAO,CAAC0F,GAAG,IAAI;IAChB,IAAMC,KAAU,GAAGH,QAAQ,CAACE,GAAG,CAAC;IAChC,IAAIA,GAAG,KAAK,UAAU,EAAE;MACpB;IACJ;IACA,IACIA,GAAG,KAAK,QAAQ,IAChB,EAAEC,KAAK,YAAYC,MAAM,CAAC,EAC5B;MACE,IAAMC,IAAI,GAAGL,QAAQ,CAAC,UAAU,CAAC;MACjCC,GAAG,CAACC,GAAG,CAAC,GAAG,IAAIE,MAAM,CAACD,KAAK,EAAEE,IAAI,CAAC;IACtC,CAAC,MAAM,IAAIC,KAAK,CAACC,OAAO,CAACJ,KAAK,CAAC,EAAE;MAC7BF,GAAG,CAACC,GAAG,CAAC,GAAGC,KAAK,CAACK,GAAG,CAACC,IAAI,IAAIV,sBAAsB,CAACU,IAAI,CAAC,CAAC;IAC9D,CAAC,MAAM;MACHR,GAAG,CAACC,GAAG,CAAC,GAAGH,sBAAsB,CAACI,KAAK,CAAC;IAC5C;EACJ,CAAC,CAAC;EACF,OAAOF,GAAG;AACd","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/storage-lokijs/rx-storage-instance-loki.js b/dist/esm/plugins/storage-lokijs/rx-storage-instance-loki.js deleted file mode 100644 index efab21cece9..00000000000 --- a/dist/esm/plugins/storage-lokijs/rx-storage-instance-loki.js +++ /dev/null @@ -1,317 +0,0 @@ -import { Subject } from 'rxjs'; -import { flatClone, now, ensureNotFalsy, isMaybeReadonlyArray, getFromMapOrThrow, hasDeepProperty, RXDB_UTILS_GLOBAL, defaultHashSha256, PREMIUM_FLAG_HASH } from "../utils/index.js"; -import { newRxError } from "../../rx-error.js"; -import { closeLokiCollections, getLokiDatabase, OPEN_LOKIJS_STORAGE_INSTANCES, LOKIJS_COLLECTION_DEFAULT_OPTIONS, stripLokiKey, getLokiSortComparator, getLokiLeaderElector, requestRemoteInstance, mustUseLocalState, handleRemoteRequest, RX_STORAGE_NAME_LOKIJS, transformRegexToRegExp } from "./lokijs-helper.js"; -import { getPrimaryFieldOfPrimaryKey } from "../../rx-schema-helper.js"; -import { categorizeBulkWriteRows } from "../../rx-storage-helper.js"; -import { addRxStorageMultiInstanceSupport, removeBroadcastChannelReference } from "../../rx-storage-multiinstance.js"; -import { getQueryMatcher } from "../../rx-query-helper.js"; -var instanceId = now(); -var shownNonPremiumLog = false; -export var RxStorageInstanceLoki = /*#__PURE__*/function () { - function RxStorageInstanceLoki(databaseInstanceToken, storage, databaseName, collectionName, schema, internals, options, databaseSettings) { - this.changes$ = new Subject(); - this.instanceId = instanceId++; - this.databaseInstanceToken = databaseInstanceToken; - this.storage = storage; - this.databaseName = databaseName; - this.collectionName = collectionName; - this.schema = schema; - this.internals = internals; - this.options = options; - this.databaseSettings = databaseSettings; - this.primaryPath = getPrimaryFieldOfPrimaryKey(this.schema.primaryKey); - OPEN_LOKIJS_STORAGE_INSTANCES.add(this); - if (this.internals.leaderElector) { - /** - * To run handleRemoteRequest(), - * the instance will call its own methods. - * But these methods could have already been swapped out by a RxStorageWrapper - * so we must store the original methods here and use them instead. - */ - var copiedSelf = { - bulkWrite: this.bulkWrite.bind(this), - changeStream: this.changeStream.bind(this), - cleanup: this.cleanup.bind(this), - close: this.close.bind(this), - query: this.query.bind(this), - count: this.count.bind(this), - findDocumentsById: this.findDocumentsById.bind(this), - collectionName: this.collectionName, - databaseName: this.databaseName, - conflictResultionTasks: this.conflictResultionTasks.bind(this), - getAttachmentData: this.getAttachmentData.bind(this), - internals: this.internals, - options: this.options, - remove: this.remove.bind(this), - resolveConflictResultionTask: this.resolveConflictResultionTask.bind(this), - schema: this.schema - }; - this.internals.leaderElector.awaitLeadership().then(() => { - // this instance is leader now, so it has to reply to queries from other instances - ensureNotFalsy(this.internals.leaderElector).broadcastChannel.addEventListener('message', msg => handleRemoteRequest(copiedSelf, msg)); - }).catch(() => {}); - } - } - var _proto = RxStorageInstanceLoki.prototype; - _proto.bulkWrite = async function bulkWrite(documentWrites, context) { - if (!shownNonPremiumLog && (!RXDB_UTILS_GLOBAL.premium || typeof RXDB_UTILS_GLOBAL.premium !== 'string' || (await defaultHashSha256(RXDB_UTILS_GLOBAL.premium)) !== PREMIUM_FLAG_HASH)) { - console.warn(['-------------- RxDB Open Core RxStorage -------------------------------', 'You are using the free LokiJS based RxStorage implementation from RxDB https://rxdb.info/rx-storage-lokijs.html?console=loki ', 'While this is a great option, we want to let you know that there are faster storage solutions available in our premium plugins.', 'For professional users and production environments, we highly recommend considering these premium options to enhance performance and reliability.', ' https://rxdb.info/premium?console=loki ', 'If you already purchased premium access you can disable this log by calling the setPremiumFlag() function from rxdb-premium/plugins/shared.', '---------------------------------------------------------------------'].join('\n')); - shownNonPremiumLog = true; - } else { - shownNonPremiumLog = true; - } - if (documentWrites.length === 0) { - throw newRxError('P2', { - args: { - documentWrites - } - }); - } - var localState = await mustUseLocalState(this); - if (!localState) { - return requestRemoteInstance(this, 'bulkWrite', [documentWrites]); - } - var ret = { - success: [], - error: [] - }; - var docsInDb = new Map(); - var docsInDbWithLokiKey = new Map(); - documentWrites.forEach(writeRow => { - var id = writeRow.document[this.primaryPath]; - var documentInDb = localState.collection.by(this.primaryPath, id); - if (documentInDb) { - docsInDbWithLokiKey.set(id, documentInDb); - docsInDb.set(id, stripLokiKey(documentInDb)); - } - }); - var categorized = categorizeBulkWriteRows(this, this.primaryPath, docsInDb, documentWrites, context); - ret.error = categorized.errors; - categorized.bulkInsertDocs.forEach(writeRow => { - localState.collection.insert(flatClone(writeRow.document)); - ret.success.push(writeRow.document); - }); - categorized.bulkUpdateDocs.forEach(writeRow => { - var docId = writeRow.document[this.primaryPath]; - var documentInDbWithLokiKey = getFromMapOrThrow(docsInDbWithLokiKey, docId); - var writeDoc = Object.assign({}, writeRow.document, { - $loki: documentInDbWithLokiKey.$loki - }); - localState.collection.update(writeDoc); - ret.success.push(writeRow.document); - }); - localState.databaseState.saveQueue.addWrite(); - if (categorized.eventBulk.events.length > 0) { - var lastState = ensureNotFalsy(categorized.newestRow).document; - categorized.eventBulk.checkpoint = { - id: lastState[this.primaryPath], - lwt: lastState._meta.lwt - }; - categorized.eventBulk.endTime = now(); - this.changes$.next(categorized.eventBulk); - } - return ret; - }; - _proto.findDocumentsById = async function findDocumentsById(ids, deleted) { - var localState = await mustUseLocalState(this); - if (!localState) { - return requestRemoteInstance(this, 'findDocumentsById', [ids, deleted]); - } - var ret = []; - ids.forEach(id => { - var documentInDb = localState.collection.by(this.primaryPath, id); - if (documentInDb && (!documentInDb._deleted || deleted)) { - ret.push(stripLokiKey(documentInDb)); - } - }); - return ret; - }; - _proto.query = async function query(preparedQueryOriginal) { - var localState = await mustUseLocalState(this); - if (!localState) { - return requestRemoteInstance(this, 'query', [preparedQueryOriginal]); - } - var preparedQuery = ensureNotFalsy(preparedQueryOriginal.query); - if (preparedQuery.selector) { - preparedQuery = flatClone(preparedQuery); - preparedQuery.selector = transformRegexToRegExp(preparedQuery.selector); - } - var query = preparedQueryOriginal.query; - var skip = query.skip ? query.skip : 0; - var limit = query.limit ? query.limit : Infinity; - var skipPlusLimit = skip + limit; - - /** - * LokiJS is not able to give correct results for some - * operators, so we have to check all documents in that case - * and laster apply skip and limit manually. - * @link https://github.com/pubkey/rxdb/issues/5320 - */ - var mustRunMatcher = false; - if (hasDeepProperty(preparedQuery.selector, '$in')) { - mustRunMatcher = true; - } - var lokiQuery = localState.collection.chain().find(mustRunMatcher ? {} : preparedQuery.selector); - if (preparedQuery.sort) { - lokiQuery = lokiQuery.sort(getLokiSortComparator(this.schema, preparedQuery)); - } - var foundDocuments = lokiQuery.data().map(lokiDoc => stripLokiKey(lokiDoc)); - - /** - * LokiJS returned wrong results on some queries - * with complex indexes. Therefore we run the query-match - * over all result docs to patch this bug. - * TODO create an issue at the LokiJS repository. - */ - var queryMatcher = getQueryMatcher(this.schema, preparedQuery); - foundDocuments = foundDocuments.filter(d => queryMatcher(d)); - - // always apply offset and limit like this, because - // sylvieQuery.offset() and sylvieQuery.limit() results were inconsistent - foundDocuments = foundDocuments.slice(skip, skipPlusLimit); - return { - documents: foundDocuments - }; - }; - _proto.count = async function count(preparedQuery) { - var result = await this.query(preparedQuery); - return { - count: result.documents.length, - mode: 'fast' - }; - }; - _proto.getAttachmentData = function getAttachmentData(_documentId, _attachmentId, _digest) { - throw new Error('Attachments are not implemented in the lokijs RxStorage. Make a pull request.'); - }; - _proto.changeStream = function changeStream() { - return this.changes$.asObservable(); - }; - _proto.cleanup = async function cleanup(minimumDeletedTime) { - var localState = await mustUseLocalState(this); - if (!localState) { - return requestRemoteInstance(this, 'cleanup', [minimumDeletedTime]); - } - var deleteAmountPerRun = 10; - var maxDeletionTime = now() - minimumDeletedTime; - var query = localState.collection.chain().find({ - _deleted: true, - '_meta.lwt': { - $lt: maxDeletionTime - } - }).limit(deleteAmountPerRun); - var foundDocuments = query.data(); - if (foundDocuments.length > 0) { - localState.collection.remove(foundDocuments); - localState.databaseState.saveQueue.addWrite(); - } - return foundDocuments.length !== deleteAmountPerRun; - }; - _proto.close = async function close() { - if (this.closed) { - return this.closed; - } - this.closed = (async () => { - this.changes$.complete(); - OPEN_LOKIJS_STORAGE_INSTANCES.delete(this); - if (this.internals.localState) { - var localState = await this.internals.localState; - var dbState = await getLokiDatabase(this.databaseName, this.databaseSettings); - await dbState.saveQueue.run(); - await closeLokiCollections(this.databaseName, [localState.collection]); - } - })(); - return this.closed; - }; - _proto.remove = async function remove() { - var localState = await mustUseLocalState(this); - if (!localState) { - return requestRemoteInstance(this, 'remove', []); - } - localState.databaseState.database.removeCollection(localState.collection.name); - await localState.databaseState.saveQueue.run(); - return this.close(); - }; - _proto.conflictResultionTasks = function conflictResultionTasks() { - return new Subject(); - }; - _proto.resolveConflictResultionTask = async function resolveConflictResultionTask(_taskSolution) {}; - return RxStorageInstanceLoki; -}(); -export async function createLokiLocalState(params, databaseSettings) { - if (!params.options) { - params.options = {}; - } - var databaseState = await getLokiDatabase(params.databaseName, databaseSettings); - - /** - * Construct loki indexes from RxJsonSchema indexes. - * TODO what about compound indexes? Are they possible in lokijs? - */ - var indices = []; - if (params.schema.indexes) { - params.schema.indexes.forEach(idx => { - if (!isMaybeReadonlyArray(idx)) { - indices.push(idx); - } - }); - } - /** - * LokiJS has no concept of custom primary key, they use a number-id that is generated. - * To be able to query fast by primary key, we always add an index to the primary. - */ - var primaryKey = getPrimaryFieldOfPrimaryKey(params.schema.primaryKey); - indices.push(primaryKey); - var lokiCollectionName = params.collectionName + '-' + params.schema.version; - var collectionOptions = Object.assign({}, lokiCollectionName, { - indices: indices, - unique: [primaryKey] - }, LOKIJS_COLLECTION_DEFAULT_OPTIONS); - var collection = databaseState.database.addCollection(lokiCollectionName, collectionOptions); - databaseState.collections[params.collectionName] = collection; - var ret = { - databaseState, - collection - }; - return ret; -} -export async function createLokiStorageInstance(storage, params, databaseSettings) { - var internals = {}; - var broadcastChannelRefObject = {}; - if (params.multiInstance) { - var leaderElector = getLokiLeaderElector(params.databaseInstanceToken, broadcastChannelRefObject, params.databaseName); - internals.leaderElector = leaderElector; - } else { - // optimisation shortcut, directly create db is non multi instance. - internals.localState = createLokiLocalState(params, databaseSettings); - await internals.localState; - } - var instance = new RxStorageInstanceLoki(params.databaseInstanceToken, storage, params.databaseName, params.collectionName, params.schema, internals, params.options, databaseSettings); - await addRxStorageMultiInstanceSupport(RX_STORAGE_NAME_LOKIJS, params, instance, internals.leaderElector ? internals.leaderElector.broadcastChannel : undefined); - if (params.multiInstance) { - /** - * Clean up the broadcast-channel reference on close() - */ - var closeBefore = instance.close.bind(instance); - instance.close = function () { - removeBroadcastChannelReference(params.databaseInstanceToken, broadcastChannelRefObject); - return closeBefore(); - }; - var removeBefore = instance.remove.bind(instance); - instance.remove = function () { - removeBroadcastChannelReference(params.databaseInstanceToken, broadcastChannelRefObject); - return removeBefore(); - }; - - /** - * Directly create the localState when/if the db becomes leader. - */ - ensureNotFalsy(internals.leaderElector).awaitLeadership().then(() => { - if (!instance.closed) { - mustUseLocalState(instance); - } - }); - } - return instance; -} -//# sourceMappingURL=rx-storage-instance-loki.js.map \ No newline at end of file diff --git a/dist/esm/plugins/storage-lokijs/rx-storage-instance-loki.js.map b/dist/esm/plugins/storage-lokijs/rx-storage-instance-loki.js.map deleted file mode 100644 index db8e861fa51..00000000000 --- a/dist/esm/plugins/storage-lokijs/rx-storage-instance-loki.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-storage-instance-loki.js","names":["Subject","flatClone","now","ensureNotFalsy","isMaybeReadonlyArray","getFromMapOrThrow","hasDeepProperty","RXDB_UTILS_GLOBAL","defaultHashSha256","PREMIUM_FLAG_HASH","newRxError","closeLokiCollections","getLokiDatabase","OPEN_LOKIJS_STORAGE_INSTANCES","LOKIJS_COLLECTION_DEFAULT_OPTIONS","stripLokiKey","getLokiSortComparator","getLokiLeaderElector","requestRemoteInstance","mustUseLocalState","handleRemoteRequest","RX_STORAGE_NAME_LOKIJS","transformRegexToRegExp","getPrimaryFieldOfPrimaryKey","categorizeBulkWriteRows","addRxStorageMultiInstanceSupport","removeBroadcastChannelReference","getQueryMatcher","instanceId","shownNonPremiumLog","RxStorageInstanceLoki","databaseInstanceToken","storage","databaseName","collectionName","schema","internals","options","databaseSettings","changes$","primaryPath","primaryKey","add","leaderElector","copiedSelf","bulkWrite","bind","changeStream","cleanup","close","query","count","findDocumentsById","conflictResultionTasks","getAttachmentData","remove","resolveConflictResultionTask","awaitLeadership","then","broadcastChannel","addEventListener","msg","catch","_proto","prototype","documentWrites","context","premium","console","warn","join","length","args","localState","ret","success","error","docsInDb","Map","docsInDbWithLokiKey","forEach","writeRow","id","document","documentInDb","collection","by","set","categorized","errors","bulkInsertDocs","insert","push","bulkUpdateDocs","docId","documentInDbWithLokiKey","writeDoc","Object","assign","$loki","update","databaseState","saveQueue","addWrite","eventBulk","events","lastState","newestRow","checkpoint","lwt","_meta","endTime","next","ids","deleted","_deleted","preparedQueryOriginal","preparedQuery","selector","skip","limit","Infinity","skipPlusLimit","mustRunMatcher","lokiQuery","chain","find","sort","foundDocuments","data","map","lokiDoc","queryMatcher","filter","d","slice","documents","result","mode","_documentId","_attachmentId","_digest","Error","asObservable","minimumDeletedTime","deleteAmountPerRun","maxDeletionTime","$lt","closed","complete","delete","dbState","run","database","removeCollection","name","_taskSolution","createLokiLocalState","params","indices","indexes","idx","lokiCollectionName","version","collectionOptions","unique","addCollection","collections","createLokiStorageInstance","broadcastChannelRefObject","multiInstance","instance","undefined","closeBefore","removeBefore"],"sources":["../../../../src/plugins/storage-lokijs/rx-storage-instance-loki.ts"],"sourcesContent":["import {\n Subject,\n Observable\n} from 'rxjs';\nimport {\n flatClone,\n now,\n ensureNotFalsy,\n isMaybeReadonlyArray,\n getFromMapOrThrow,\n hasDeepProperty,\n RXDB_UTILS_GLOBAL,\n defaultHashSha256,\n PREMIUM_FLAG_HASH\n} from '../utils/index.ts';\nimport { newRxError } from '../../rx-error.ts';\nimport type {\n RxStorageInstance,\n LokiSettings,\n RxStorageChangeEvent,\n RxDocumentData,\n BulkWriteRow,\n RxStorageBulkWriteResponse,\n RxStorageQueryResult,\n RxJsonSchema,\n MangoQuery,\n LokiStorageInternals,\n RxStorageInstanceCreationParams,\n LokiDatabaseSettings,\n LokiLocalDatabaseState,\n EventBulk,\n StringKeys,\n DeepReadonly,\n RxConflictResultionTask,\n RxConflictResultionTaskSolution,\n RxStorageDefaultCheckpoint,\n RxStorageCountResult,\n PreparedQuery\n} from '../../types/index.d.ts';\nimport {\n closeLokiCollections,\n getLokiDatabase,\n OPEN_LOKIJS_STORAGE_INSTANCES,\n LOKIJS_COLLECTION_DEFAULT_OPTIONS,\n stripLokiKey,\n getLokiSortComparator,\n getLokiLeaderElector,\n requestRemoteInstance,\n mustUseLocalState,\n handleRemoteRequest,\n RX_STORAGE_NAME_LOKIJS,\n transformRegexToRegExp\n} from './lokijs-helper.ts';\nimport type { RxStorageLoki } from './rx-storage-lokijs.ts';\nimport { getPrimaryFieldOfPrimaryKey } from '../../rx-schema-helper.ts';\nimport { categorizeBulkWriteRows } from '../../rx-storage-helper.ts';\nimport {\n addRxStorageMultiInstanceSupport,\n removeBroadcastChannelReference\n} from '../../rx-storage-multiinstance.ts';\nimport { getQueryMatcher } from '../../rx-query-helper.ts';\n\nlet instanceId = now();\nlet shownNonPremiumLog = false;\n\nexport class RxStorageInstanceLoki implements RxStorageInstance<\n RxDocType,\n LokiStorageInternals,\n LokiSettings,\n RxStorageDefaultCheckpoint\n> {\n\n public readonly primaryPath: StringKeys>;\n private changes$: Subject>, RxStorageDefaultCheckpoint>> = new Subject();\n public readonly instanceId = instanceId++;\n\n public closed?: Promise;\n\n constructor(\n public readonly databaseInstanceToken: string,\n public readonly storage: RxStorageLoki,\n public readonly databaseName: string,\n public readonly collectionName: string,\n public readonly schema: Readonly>>,\n public readonly internals: LokiStorageInternals,\n public readonly options: Readonly,\n public readonly databaseSettings: LokiDatabaseSettings\n ) {\n this.primaryPath = getPrimaryFieldOfPrimaryKey(this.schema.primaryKey);\n OPEN_LOKIJS_STORAGE_INSTANCES.add(this);\n if (this.internals.leaderElector) {\n\n\n /**\n * To run handleRemoteRequest(),\n * the instance will call its own methods.\n * But these methods could have already been swapped out by a RxStorageWrapper\n * so we must store the original methods here and use them instead.\n */\n const copiedSelf: RxStorageInstance = {\n bulkWrite: this.bulkWrite.bind(this),\n changeStream: this.changeStream.bind(this),\n cleanup: this.cleanup.bind(this),\n close: this.close.bind(this),\n query: this.query.bind(this),\n count: this.count.bind(this),\n findDocumentsById: this.findDocumentsById.bind(this),\n collectionName: this.collectionName,\n databaseName: this.databaseName,\n conflictResultionTasks: this.conflictResultionTasks.bind(this),\n getAttachmentData: this.getAttachmentData.bind(this),\n internals: this.internals,\n options: this.options,\n remove: this.remove.bind(this),\n resolveConflictResultionTask: this.resolveConflictResultionTask.bind(this),\n schema: this.schema\n };\n\n this.internals.leaderElector.awaitLeadership().then(() => {\n // this instance is leader now, so it has to reply to queries from other instances\n ensureNotFalsy(this.internals.leaderElector).broadcastChannel\n .addEventListener('message', (msg) => handleRemoteRequest(copiedSelf as any, msg));\n }).catch(() => { });\n }\n }\n\n async bulkWrite(\n documentWrites: BulkWriteRow[],\n context: string\n ): Promise> {\n\n\n if (\n !shownNonPremiumLog &&\n (\n !RXDB_UTILS_GLOBAL.premium ||\n typeof RXDB_UTILS_GLOBAL.premium !== 'string' ||\n (await defaultHashSha256(RXDB_UTILS_GLOBAL.premium) !== PREMIUM_FLAG_HASH)\n )\n ) {\n console.warn(\n [\n '-------------- RxDB Open Core RxStorage -------------------------------',\n 'You are using the free LokiJS based RxStorage implementation from RxDB https://rxdb.info/rx-storage-lokijs.html?console=loki ',\n 'While this is a great option, we want to let you know that there are faster storage solutions available in our premium plugins.',\n 'For professional users and production environments, we highly recommend considering these premium options to enhance performance and reliability.',\n ' https://rxdb.info/premium?console=loki ',\n 'If you already purchased premium access you can disable this log by calling the setPremiumFlag() function from rxdb-premium/plugins/shared.',\n '---------------------------------------------------------------------'\n ].join('\\n')\n );\n shownNonPremiumLog = true;\n } else {\n shownNonPremiumLog = true;\n }\n\n if (documentWrites.length === 0) {\n throw newRxError('P2', {\n args: {\n documentWrites\n }\n });\n }\n const localState = await mustUseLocalState(this);\n if (!localState) {\n return requestRemoteInstance(this, 'bulkWrite', [documentWrites]);\n }\n\n const ret: RxStorageBulkWriteResponse = {\n success: [],\n error: []\n };\n\n const docsInDb: Map[StringKeys], RxDocumentData> = new Map();\n const docsInDbWithLokiKey: Map<\n RxDocumentData[StringKeys],\n RxDocumentData & { $loki: number; }\n > = new Map();\n documentWrites.forEach(writeRow => {\n const id = writeRow.document[this.primaryPath];\n const documentInDb = localState.collection.by(this.primaryPath, id);\n if (documentInDb) {\n docsInDbWithLokiKey.set(id as any, documentInDb);\n docsInDb.set(id as any, stripLokiKey(documentInDb));\n }\n });\n\n const categorized = categorizeBulkWriteRows(\n this,\n this.primaryPath as any,\n docsInDb,\n documentWrites,\n context\n );\n ret.error = categorized.errors;\n\n categorized.bulkInsertDocs.forEach(writeRow => {\n localState.collection.insert(flatClone(writeRow.document));\n ret.success.push(writeRow.document);\n });\n categorized.bulkUpdateDocs.forEach(writeRow => {\n const docId = writeRow.document[this.primaryPath];\n const documentInDbWithLokiKey = getFromMapOrThrow(docsInDbWithLokiKey, docId as any);\n const writeDoc: any = Object.assign(\n {},\n writeRow.document,\n {\n $loki: documentInDbWithLokiKey.$loki\n }\n );\n localState.collection.update(writeDoc);\n ret.success.push(writeRow.document);\n });\n localState.databaseState.saveQueue.addWrite();\n\n if (categorized.eventBulk.events.length > 0) {\n const lastState = ensureNotFalsy(categorized.newestRow).document;\n categorized.eventBulk.checkpoint = {\n id: lastState[this.primaryPath],\n lwt: lastState._meta.lwt\n };\n categorized.eventBulk.endTime = now();\n this.changes$.next(categorized.eventBulk);\n }\n\n return ret;\n }\n async findDocumentsById(ids: string[], deleted: boolean): Promise[]> {\n const localState = await mustUseLocalState(this);\n if (!localState) {\n return requestRemoteInstance(this, 'findDocumentsById', [ids, deleted]);\n }\n\n const ret: RxDocumentData[] = [];\n ids.forEach(id => {\n const documentInDb = localState.collection.by(this.primaryPath, id);\n if (\n documentInDb &&\n (!documentInDb._deleted || deleted)\n ) {\n ret.push(stripLokiKey(documentInDb));\n }\n });\n return ret;\n }\n async query(preparedQueryOriginal: PreparedQuery): Promise> {\n const localState = await mustUseLocalState(this);\n if (!localState) {\n return requestRemoteInstance(this, 'query', [preparedQueryOriginal]);\n }\n\n let preparedQuery = ensureNotFalsy(preparedQueryOriginal.query);\n if (preparedQuery.selector) {\n preparedQuery = flatClone(preparedQuery);\n preparedQuery.selector = transformRegexToRegExp(preparedQuery.selector);\n }\n\n const query = preparedQueryOriginal.query;\n const skip = query.skip ? query.skip : 0;\n const limit = query.limit ? query.limit : Infinity;\n const skipPlusLimit = skip + limit;\n\n /**\n * LokiJS is not able to give correct results for some\n * operators, so we have to check all documents in that case\n * and laster apply skip and limit manually.\n * @link https://github.com/pubkey/rxdb/issues/5320\n */\n let mustRunMatcher = false;\n if (hasDeepProperty(preparedQuery.selector, '$in')) {\n mustRunMatcher = true;\n }\n\n\n let lokiQuery = localState.collection\n .chain()\n .find(mustRunMatcher ? {} : preparedQuery.selector);\n\n if (preparedQuery.sort) {\n lokiQuery = lokiQuery.sort(getLokiSortComparator(this.schema, preparedQuery));\n }\n\n\n let foundDocuments = lokiQuery.data().map((lokiDoc: any) => stripLokiKey(lokiDoc));\n\n\n /**\n * LokiJS returned wrong results on some queries\n * with complex indexes. Therefore we run the query-match\n * over all result docs to patch this bug.\n * TODO create an issue at the LokiJS repository.\n */\n const queryMatcher = getQueryMatcher(\n this.schema,\n preparedQuery as any\n );\n foundDocuments = foundDocuments.filter((d: any) => queryMatcher(d));\n\n // always apply offset and limit like this, because\n // sylvieQuery.offset() and sylvieQuery.limit() results were inconsistent\n foundDocuments = foundDocuments.slice(skip, skipPlusLimit);\n\n return {\n documents: foundDocuments\n };\n }\n async count(\n preparedQuery: PreparedQuery\n ): Promise {\n const result = await this.query(preparedQuery);\n return {\n count: result.documents.length,\n mode: 'fast'\n };\n }\n getAttachmentData(_documentId: string, _attachmentId: string, _digest: string): Promise {\n throw new Error('Attachments are not implemented in the lokijs RxStorage. Make a pull request.');\n }\n\n changeStream(): Observable>, RxStorageDefaultCheckpoint>> {\n return this.changes$.asObservable();\n }\n\n async cleanup(minimumDeletedTime: number): Promise {\n const localState = await mustUseLocalState(this);\n if (!localState) {\n return requestRemoteInstance(this, 'cleanup', [minimumDeletedTime]);\n }\n\n const deleteAmountPerRun = 10;\n const maxDeletionTime = now() - minimumDeletedTime;\n const query = localState.collection\n .chain()\n .find({\n _deleted: true,\n '_meta.lwt': {\n $lt: maxDeletionTime\n }\n }).limit(deleteAmountPerRun);\n const foundDocuments = query.data();\n if (foundDocuments.length > 0) {\n localState.collection.remove(foundDocuments);\n localState.databaseState.saveQueue.addWrite();\n }\n\n return foundDocuments.length !== deleteAmountPerRun;\n }\n\n async close(): Promise {\n if (this.closed) {\n return this.closed;\n }\n this.closed = (async () => {\n this.changes$.complete();\n OPEN_LOKIJS_STORAGE_INSTANCES.delete(this);\n if (this.internals.localState) {\n const localState = await this.internals.localState;\n const dbState = await getLokiDatabase(\n this.databaseName,\n this.databaseSettings\n );\n await dbState.saveQueue.run();\n await closeLokiCollections(\n this.databaseName,\n [\n localState.collection\n ]\n );\n }\n })();\n return this.closed;\n }\n async remove(): Promise {\n const localState = await mustUseLocalState(this);\n if (!localState) {\n return requestRemoteInstance(this, 'remove', []);\n }\n localState.databaseState.database.removeCollection(localState.collection.name);\n await localState.databaseState.saveQueue.run();\n return this.close();\n }\n\n conflictResultionTasks(): Observable> {\n return new Subject();\n }\n async resolveConflictResultionTask(_taskSolution: RxConflictResultionTaskSolution): Promise { }\n\n}\n\nexport async function createLokiLocalState(\n params: RxStorageInstanceCreationParams,\n databaseSettings: LokiDatabaseSettings\n): Promise {\n if (!params.options) {\n params.options = {};\n }\n\n const databaseState = await getLokiDatabase(\n params.databaseName,\n databaseSettings\n );\n\n /**\n * Construct loki indexes from RxJsonSchema indexes.\n * TODO what about compound indexes? Are they possible in lokijs?\n */\n const indices: string[] = [];\n if (params.schema.indexes) {\n params.schema.indexes.forEach(idx => {\n if (!isMaybeReadonlyArray(idx)) {\n indices.push(idx);\n }\n });\n }\n /**\n * LokiJS has no concept of custom primary key, they use a number-id that is generated.\n * To be able to query fast by primary key, we always add an index to the primary.\n */\n const primaryKey = getPrimaryFieldOfPrimaryKey(params.schema.primaryKey);\n indices.push(primaryKey as string);\n\n const lokiCollectionName = params.collectionName + '-' + params.schema.version;\n const collectionOptions: Partial = Object.assign(\n {},\n lokiCollectionName,\n {\n indices: indices as string[],\n unique: [primaryKey]\n } as any,\n LOKIJS_COLLECTION_DEFAULT_OPTIONS\n );\n\n const collection: any = databaseState.database.addCollection(\n lokiCollectionName,\n collectionOptions as any\n );\n databaseState.collections[params.collectionName] = collection;\n const ret: LokiLocalDatabaseState = {\n databaseState,\n collection\n };\n\n return ret;\n}\n\n\nexport async function createLokiStorageInstance(\n storage: RxStorageLoki,\n params: RxStorageInstanceCreationParams,\n databaseSettings: LokiDatabaseSettings\n): Promise> {\n const internals: LokiStorageInternals = {};\n\n const broadcastChannelRefObject: DeepReadonly = {};\n\n\n if (params.multiInstance) {\n const leaderElector = getLokiLeaderElector(\n params.databaseInstanceToken,\n broadcastChannelRefObject,\n params.databaseName\n );\n internals.leaderElector = leaderElector;\n } else {\n // optimisation shortcut, directly create db is non multi instance.\n internals.localState = createLokiLocalState(params, databaseSettings);\n await internals.localState;\n }\n\n const instance = new RxStorageInstanceLoki(\n params.databaseInstanceToken,\n storage,\n params.databaseName,\n params.collectionName,\n params.schema,\n internals,\n params.options,\n databaseSettings\n );\n\n await addRxStorageMultiInstanceSupport(\n RX_STORAGE_NAME_LOKIJS,\n params,\n instance,\n internals.leaderElector ? internals.leaderElector.broadcastChannel : undefined\n );\n\n if (params.multiInstance) {\n /**\n * Clean up the broadcast-channel reference on close()\n */\n const closeBefore = instance.close.bind(instance);\n instance.close = function () {\n removeBroadcastChannelReference(\n params.databaseInstanceToken,\n broadcastChannelRefObject\n );\n return closeBefore();\n };\n const removeBefore = instance.remove.bind(instance);\n instance.remove = function () {\n removeBroadcastChannelReference(\n params.databaseInstanceToken,\n broadcastChannelRefObject\n );\n return removeBefore();\n };\n\n /**\n * Directly create the localState when/if the db becomes leader.\n */\n ensureNotFalsy(internals.leaderElector)\n .awaitLeadership()\n .then(() => {\n if (!instance.closed) {\n mustUseLocalState(instance);\n }\n });\n }\n\n\n return instance;\n}\n"],"mappings":"AAAA,SACIA,OAAO,QAEJ,MAAM;AACb,SACIC,SAAS,EACTC,GAAG,EACHC,cAAc,EACdC,oBAAoB,EACpBC,iBAAiB,EACjBC,eAAe,EACfC,iBAAiB,EACjBC,iBAAiB,EACjBC,iBAAiB,QACd,mBAAmB;AAC1B,SAASC,UAAU,QAAQ,mBAAmB;AAwB9C,SACIC,oBAAoB,EACpBC,eAAe,EACfC,6BAA6B,EAC7BC,iCAAiC,EACjCC,YAAY,EACZC,qBAAqB,EACrBC,oBAAoB,EACpBC,qBAAqB,EACrBC,iBAAiB,EACjBC,mBAAmB,EACnBC,sBAAsB,EACtBC,sBAAsB,QACnB,oBAAoB;AAE3B,SAASC,2BAA2B,QAAQ,2BAA2B;AACvE,SAASC,uBAAuB,QAAQ,4BAA4B;AACpE,SACIC,gCAAgC,EAChCC,+BAA+B,QAC5B,mCAAmC;AAC1C,SAASC,eAAe,QAAQ,0BAA0B;AAE1D,IAAIC,UAAU,GAAG1B,GAAG,CAAC,CAAC;AACtB,IAAI2B,kBAAkB,GAAG,KAAK;AAE9B,WAAaC,qBAAqB;EAa9B,SAAAA,sBACoBC,qBAA6B,EAC7BC,OAAsB,EACtBC,YAAoB,EACpBC,cAAsB,EACtBC,MAAyD,EACzDC,SAA+B,EAC/BC,OAA+B,EAC/BC,gBAAsC,EACxD;IAAA,KAdMC,QAAQ,GAAoG,IAAIvC,OAAO,CAAC,CAAC;IAAA,KACjH4B,UAAU,GAAGA,UAAU,EAAE;IAAA,KAKrBG,qBAA6B,GAA7BA,qBAA6B;IAAA,KAC7BC,OAAsB,GAAtBA,OAAsB;IAAA,KACtBC,YAAoB,GAApBA,YAAoB;IAAA,KACpBC,cAAsB,GAAtBA,cAAsB;IAAA,KACtBC,MAAyD,GAAzDA,MAAyD;IAAA,KACzDC,SAA+B,GAA/BA,SAA+B;IAAA,KAC/BC,OAA+B,GAA/BA,OAA+B;IAAA,KAC/BC,gBAAsC,GAAtCA,gBAAsC;IAEtD,IAAI,CAACE,WAAW,GAAGjB,2BAA2B,CAAC,IAAI,CAACY,MAAM,CAACM,UAAU,CAAC;IACtE5B,6BAA6B,CAAC6B,GAAG,CAAC,IAAI,CAAC;IACvC,IAAI,IAAI,CAACN,SAAS,CAACO,aAAa,EAAE;MAG9B;AACZ;AACA;AACA;AACA;AACA;MACY,IAAMC,UAAkD,GAAG;QACvDC,SAAS,EAAE,IAAI,CAACA,SAAS,CAACC,IAAI,CAAC,IAAI,CAAC;QACpCC,YAAY,EAAE,IAAI,CAACA,YAAY,CAACD,IAAI,CAAC,IAAI,CAAC;QAC1CE,OAAO,EAAE,IAAI,CAACA,OAAO,CAACF,IAAI,CAAC,IAAI,CAAC;QAChCG,KAAK,EAAE,IAAI,CAACA,KAAK,CAACH,IAAI,CAAC,IAAI,CAAC;QAC5BI,KAAK,EAAE,IAAI,CAACA,KAAK,CAACJ,IAAI,CAAC,IAAI,CAAC;QAC5BK,KAAK,EAAE,IAAI,CAACA,KAAK,CAACL,IAAI,CAAC,IAAI,CAAC;QAC5BM,iBAAiB,EAAE,IAAI,CAACA,iBAAiB,CAACN,IAAI,CAAC,IAAI,CAAC;QACpDZ,cAAc,EAAE,IAAI,CAACA,cAAc;QACnCD,YAAY,EAAE,IAAI,CAACA,YAAY;QAC/BoB,sBAAsB,EAAE,IAAI,CAACA,sBAAsB,CAACP,IAAI,CAAC,IAAI,CAAC;QAC9DQ,iBAAiB,EAAE,IAAI,CAACA,iBAAiB,CAACR,IAAI,CAAC,IAAI,CAAC;QACpDV,SAAS,EAAE,IAAI,CAACA,SAAS;QACzBC,OAAO,EAAE,IAAI,CAACA,OAAO;QACrBkB,MAAM,EAAE,IAAI,CAACA,MAAM,CAACT,IAAI,CAAC,IAAI,CAAC;QAC9BU,4BAA4B,EAAE,IAAI,CAACA,4BAA4B,CAACV,IAAI,CAAC,IAAI,CAAC;QAC1EX,MAAM,EAAE,IAAI,CAACA;MACjB,CAAC;MAED,IAAI,CAACC,SAAS,CAACO,aAAa,CAACc,eAAe,CAAC,CAAC,CAACC,IAAI,CAAC,MAAM;QACtD;QACAvD,cAAc,CAAC,IAAI,CAACiC,SAAS,CAACO,aAAa,CAAC,CAACgB,gBAAgB,CACxDC,gBAAgB,CAAC,SAAS,EAAGC,GAAG,IAAKzC,mBAAmB,CAACwB,UAAU,EAASiB,GAAG,CAAC,CAAC;MAC1F,CAAC,CAAC,CAACC,KAAK,CAAC,MAAM,CAAE,CAAC,CAAC;IACvB;EACJ;EAAC,IAAAC,MAAA,GAAAjC,qBAAA,CAAAkC,SAAA;EAAAD,MAAA,CAEKlB,SAAS,GAAf,eAAAA,UACIoB,cAAyC,EACzCC,OAAe,EAC+B;IAG9C,IACI,CAACrC,kBAAkB,KAEf,CAACtB,iBAAiB,CAAC4D,OAAO,IAC1B,OAAO5D,iBAAiB,CAAC4D,OAAO,KAAK,QAAQ,IAC5C,OAAM3D,iBAAiB,CAACD,iBAAiB,CAAC4D,OAAO,CAAC,MAAK1D,iBAAkB,CAC7E,EACH;MACE2D,OAAO,CAACC,IAAI,CACR,CACI,yEAAyE,EACzE,+HAA+H,EAC/H,iIAAiI,EACjI,mJAAmJ,EACnJ,0CAA0C,EAC1C,6IAA6I,EAC7I,uEAAuE,CAC1E,CAACC,IAAI,CAAC,IAAI,CACf,CAAC;MACDzC,kBAAkB,GAAG,IAAI;IAC7B,CAAC,MAAM;MACHA,kBAAkB,GAAG,IAAI;IAC7B;IAEA,IAAIoC,cAAc,CAACM,MAAM,KAAK,CAAC,EAAE;MAC7B,MAAM7D,UAAU,CAAC,IAAI,EAAE;QACnB8D,IAAI,EAAE;UACFP;QACJ;MACJ,CAAC,CAAC;IACN;IACA,IAAMQ,UAAU,GAAG,MAAMtD,iBAAiB,CAAC,IAAI,CAAC;IAChD,IAAI,CAACsD,UAAU,EAAE;MACb,OAAOvD,qBAAqB,CAAC,IAAI,EAAE,WAAW,EAAE,CAAC+C,cAAc,CAAC,CAAC;IACrE;IAEA,IAAMS,GAA0C,GAAG;MAC/CC,OAAO,EAAE,EAAE;MACXC,KAAK,EAAE;IACX,CAAC;IAED,IAAMC,QAA0F,GAAG,IAAIC,GAAG,CAAC,CAAC;IAC5G,IAAMC,mBAGL,GAAG,IAAID,GAAG,CAAC,CAAC;IACbb,cAAc,CAACe,OAAO,CAACC,QAAQ,IAAI;MAC/B,IAAMC,EAAE,GAAGD,QAAQ,CAACE,QAAQ,CAAC,IAAI,CAAC3C,WAAW,CAAC;MAC9C,IAAM4C,YAAY,GAAGX,UAAU,CAACY,UAAU,CAACC,EAAE,CAAC,IAAI,CAAC9C,WAAW,EAAE0C,EAAE,CAAC;MACnE,IAAIE,YAAY,EAAE;QACdL,mBAAmB,CAACQ,GAAG,CAACL,EAAE,EAASE,YAAY,CAAC;QAChDP,QAAQ,CAACU,GAAG,CAACL,EAAE,EAASnE,YAAY,CAACqE,YAAY,CAAC,CAAC;MACvD;IACJ,CAAC,CAAC;IAEF,IAAMI,WAAW,GAAGhE,uBAAuB,CACvC,IAAI,EACJ,IAAI,CAACgB,WAAW,EAChBqC,QAAQ,EACRZ,cAAc,EACdC,OACJ,CAAC;IACDQ,GAAG,CAACE,KAAK,GAAGY,WAAW,CAACC,MAAM;IAE9BD,WAAW,CAACE,cAAc,CAACV,OAAO,CAACC,QAAQ,IAAI;MAC3CR,UAAU,CAACY,UAAU,CAACM,MAAM,CAAC1F,SAAS,CAACgF,QAAQ,CAACE,QAAQ,CAAC,CAAC;MAC1DT,GAAG,CAACC,OAAO,CAACiB,IAAI,CAACX,QAAQ,CAACE,QAAQ,CAAC;IACvC,CAAC,CAAC;IACFK,WAAW,CAACK,cAAc,CAACb,OAAO,CAACC,QAAQ,IAAI;MAC3C,IAAMa,KAAK,GAAGb,QAAQ,CAACE,QAAQ,CAAC,IAAI,CAAC3C,WAAW,CAAC;MACjD,IAAMuD,uBAAuB,GAAG1F,iBAAiB,CAAC0E,mBAAmB,EAAEe,KAAY,CAAC;MACpF,IAAME,QAAa,GAAGC,MAAM,CAACC,MAAM,CAC/B,CAAC,CAAC,EACFjB,QAAQ,CAACE,QAAQ,EACjB;QACIgB,KAAK,EAAEJ,uBAAuB,CAACI;MACnC,CACJ,CAAC;MACD1B,UAAU,CAACY,UAAU,CAACe,MAAM,CAACJ,QAAQ,CAAC;MACtCtB,GAAG,CAACC,OAAO,CAACiB,IAAI,CAACX,QAAQ,CAACE,QAAQ,CAAC;IACvC,CAAC,CAAC;IACFV,UAAU,CAAC4B,aAAa,CAACC,SAAS,CAACC,QAAQ,CAAC,CAAC;IAE7C,IAAIf,WAAW,CAACgB,SAAS,CAACC,MAAM,CAAClC,MAAM,GAAG,CAAC,EAAE;MACzC,IAAMmC,SAAS,GAAGvG,cAAc,CAACqF,WAAW,CAACmB,SAAS,CAAC,CAACxB,QAAQ;MAChEK,WAAW,CAACgB,SAAS,CAACI,UAAU,GAAG;QAC/B1B,EAAE,EAAEwB,SAAS,CAAC,IAAI,CAAClE,WAAW,CAAC;QAC/BqE,GAAG,EAAEH,SAAS,CAACI,KAAK,CAACD;MACzB,CAAC;MACDrB,WAAW,CAACgB,SAAS,CAACO,OAAO,GAAG7G,GAAG,CAAC,CAAC;MACrC,IAAI,CAACqC,QAAQ,CAACyE,IAAI,CAACxB,WAAW,CAACgB,SAAS,CAAC;IAC7C;IAEA,OAAO9B,GAAG;EACd,CAAC;EAAAX,MAAA,CACKX,iBAAiB,GAAvB,eAAAA,kBAAwB6D,GAAa,EAAEC,OAAgB,EAAwC;IAC3F,IAAMzC,UAAU,GAAG,MAAMtD,iBAAiB,CAAC,IAAI,CAAC;IAChD,IAAI,CAACsD,UAAU,EAAE;MACb,OAAOvD,qBAAqB,CAAC,IAAI,EAAE,mBAAmB,EAAE,CAAC+F,GAAG,EAAEC,OAAO,CAAC,CAAC;IAC3E;IAEA,IAAMxC,GAAgC,GAAG,EAAE;IAC3CuC,GAAG,CAACjC,OAAO,CAACE,EAAE,IAAI;MACd,IAAME,YAAY,GAAGX,UAAU,CAACY,UAAU,CAACC,EAAE,CAAC,IAAI,CAAC9C,WAAW,EAAE0C,EAAE,CAAC;MACnE,IACIE,YAAY,KACX,CAACA,YAAY,CAAC+B,QAAQ,IAAID,OAAO,CAAC,EACrC;QACExC,GAAG,CAACkB,IAAI,CAAC7E,YAAY,CAACqE,YAAY,CAAC,CAAC;MACxC;IACJ,CAAC,CAAC;IACF,OAAOV,GAAG;EACd,CAAC;EAAAX,MAAA,CACKb,KAAK,GAAX,eAAAA,MAAYkE,qBAA+C,EAA4C;IACnG,IAAM3C,UAAU,GAAG,MAAMtD,iBAAiB,CAAC,IAAI,CAAC;IAChD,IAAI,CAACsD,UAAU,EAAE;MACb,OAAOvD,qBAAqB,CAAC,IAAI,EAAE,OAAO,EAAE,CAACkG,qBAAqB,CAAC,CAAC;IACxE;IAEA,IAAIC,aAAa,GAAGlH,cAAc,CAACiH,qBAAqB,CAAClE,KAAK,CAAC;IAC/D,IAAImE,aAAa,CAACC,QAAQ,EAAE;MACxBD,aAAa,GAAGpH,SAAS,CAACoH,aAAa,CAAC;MACxCA,aAAa,CAACC,QAAQ,GAAGhG,sBAAsB,CAAC+F,aAAa,CAACC,QAAQ,CAAC;IAC3E;IAEA,IAAMpE,KAAK,GAAGkE,qBAAqB,CAAClE,KAAK;IACzC,IAAMqE,IAAI,GAAGrE,KAAK,CAACqE,IAAI,GAAGrE,KAAK,CAACqE,IAAI,GAAG,CAAC;IACxC,IAAMC,KAAK,GAAGtE,KAAK,CAACsE,KAAK,GAAGtE,KAAK,CAACsE,KAAK,GAAGC,QAAQ;IAClD,IAAMC,aAAa,GAAGH,IAAI,GAAGC,KAAK;;IAElC;AACR;AACA;AACA;AACA;AACA;IACQ,IAAIG,cAAc,GAAG,KAAK;IAC1B,IAAIrH,eAAe,CAAC+G,aAAa,CAACC,QAAQ,EAAE,KAAK,CAAC,EAAE;MAChDK,cAAc,GAAG,IAAI;IACzB;IAGA,IAAIC,SAAS,GAAGnD,UAAU,CAACY,UAAU,CAChCwC,KAAK,CAAC,CAAC,CACPC,IAAI,CAACH,cAAc,GAAG,CAAC,CAAC,GAAGN,aAAa,CAACC,QAAQ,CAAC;IAEvD,IAAID,aAAa,CAACU,IAAI,EAAE;MACpBH,SAAS,GAAGA,SAAS,CAACG,IAAI,CAAC/G,qBAAqB,CAAC,IAAI,CAACmB,MAAM,EAAEkF,aAAa,CAAC,CAAC;IACjF;IAGA,IAAIW,cAAc,GAAGJ,SAAS,CAACK,IAAI,CAAC,CAAC,CAACC,GAAG,CAAEC,OAAY,IAAKpH,YAAY,CAACoH,OAAO,CAAC,CAAC;;IAGlF;AACR;AACA;AACA;AACA;AACA;IACQ,IAAMC,YAAY,GAAGzG,eAAe,CAChC,IAAI,CAACQ,MAAM,EACXkF,aACJ,CAAC;IACDW,cAAc,GAAGA,cAAc,CAACK,MAAM,CAAEC,CAAM,IAAKF,YAAY,CAACE,CAAC,CAAC,CAAC;;IAEnE;IACA;IACAN,cAAc,GAAGA,cAAc,CAACO,KAAK,CAAChB,IAAI,EAAEG,aAAa,CAAC;IAE1D,OAAO;MACHc,SAAS,EAAER;IACf,CAAC;EACL,CAAC;EAAAjE,MAAA,CACKZ,KAAK,GAAX,eAAAA,MACIkE,aAAuC,EACV;IAC7B,IAAMoB,MAAM,GAAG,MAAM,IAAI,CAACvF,KAAK,CAACmE,aAAa,CAAC;IAC9C,OAAO;MACHlE,KAAK,EAAEsF,MAAM,CAACD,SAAS,CAACjE,MAAM;MAC9BmE,IAAI,EAAE;IACV,CAAC;EACL,CAAC;EAAA3E,MAAA,CACDT,iBAAiB,GAAjB,SAAAA,kBAAkBqF,WAAmB,EAAEC,aAAqB,EAAEC,OAAe,EAAmB;IAC5F,MAAM,IAAIC,KAAK,CAAC,+EAA+E,CAAC;EACpG,CAAC;EAAA/E,MAAA,CAEDhB,YAAY,GAAZ,SAAAA,aAAA,EAAmH;IAC/G,OAAO,IAAI,CAACR,QAAQ,CAACwG,YAAY,CAAC,CAAC;EACvC,CAAC;EAAAhF,MAAA,CAEKf,OAAO,GAAb,eAAAA,QAAcgG,kBAA0B,EAAoB;IACxD,IAAMvE,UAAU,GAAG,MAAMtD,iBAAiB,CAAC,IAAI,CAAC;IAChD,IAAI,CAACsD,UAAU,EAAE;MACb,OAAOvD,qBAAqB,CAAC,IAAI,EAAE,SAAS,EAAE,CAAC8H,kBAAkB,CAAC,CAAC;IACvE;IAEA,IAAMC,kBAAkB,GAAG,EAAE;IAC7B,IAAMC,eAAe,GAAGhJ,GAAG,CAAC,CAAC,GAAG8I,kBAAkB;IAClD,IAAM9F,KAAK,GAAGuB,UAAU,CAACY,UAAU,CAC9BwC,KAAK,CAAC,CAAC,CACPC,IAAI,CAAC;MACFX,QAAQ,EAAE,IAAI;MACd,WAAW,EAAE;QACTgC,GAAG,EAAED;MACT;IACJ,CAAC,CAAC,CAAC1B,KAAK,CAACyB,kBAAkB,CAAC;IAChC,IAAMjB,cAAc,GAAG9E,KAAK,CAAC+E,IAAI,CAAC,CAAC;IACnC,IAAID,cAAc,CAACzD,MAAM,GAAG,CAAC,EAAE;MAC3BE,UAAU,CAACY,UAAU,CAAC9B,MAAM,CAACyE,cAAc,CAAC;MAC5CvD,UAAU,CAAC4B,aAAa,CAACC,SAAS,CAACC,QAAQ,CAAC,CAAC;IACjD;IAEA,OAAOyB,cAAc,CAACzD,MAAM,KAAK0E,kBAAkB;EACvD,CAAC;EAAAlF,MAAA,CAEKd,KAAK,GAAX,eAAAA,MAAA,EAA6B;IACzB,IAAI,IAAI,CAACmG,MAAM,EAAE;MACb,OAAO,IAAI,CAACA,MAAM;IACtB;IACA,IAAI,CAACA,MAAM,GAAG,CAAC,YAAY;MACvB,IAAI,CAAC7G,QAAQ,CAAC8G,QAAQ,CAAC,CAAC;MACxBxI,6BAA6B,CAACyI,MAAM,CAAC,IAAI,CAAC;MAC1C,IAAI,IAAI,CAAClH,SAAS,CAACqC,UAAU,EAAE;QAC3B,IAAMA,UAAU,GAAG,MAAM,IAAI,CAACrC,SAAS,CAACqC,UAAU;QAClD,IAAM8E,OAAO,GAAG,MAAM3I,eAAe,CACjC,IAAI,CAACqB,YAAY,EACjB,IAAI,CAACK,gBACT,CAAC;QACD,MAAMiH,OAAO,CAACjD,SAAS,CAACkD,GAAG,CAAC,CAAC;QAC7B,MAAM7I,oBAAoB,CACtB,IAAI,CAACsB,YAAY,EACjB,CACIwC,UAAU,CAACY,UAAU,CAE7B,CAAC;MACL;IACJ,CAAC,EAAE,CAAC;IACJ,OAAO,IAAI,CAAC+D,MAAM;EACtB,CAAC;EAAArF,MAAA,CACKR,MAAM,GAAZ,eAAAA,OAAA,EAA8B;IAC1B,IAAMkB,UAAU,GAAG,MAAMtD,iBAAiB,CAAC,IAAI,CAAC;IAChD,IAAI,CAACsD,UAAU,EAAE;MACb,OAAOvD,qBAAqB,CAAC,IAAI,EAAE,QAAQ,EAAE,EAAE,CAAC;IACpD;IACAuD,UAAU,CAAC4B,aAAa,CAACoD,QAAQ,CAACC,gBAAgB,CAACjF,UAAU,CAACY,UAAU,CAACsE,IAAI,CAAC;IAC9E,MAAMlF,UAAU,CAAC4B,aAAa,CAACC,SAAS,CAACkD,GAAG,CAAC,CAAC;IAC9C,OAAO,IAAI,CAACvG,KAAK,CAAC,CAAC;EACvB,CAAC;EAAAc,MAAA,CAEDV,sBAAsB,GAAtB,SAAAA,uBAAA,EAAyE;IACrE,OAAO,IAAIrD,OAAO,CAAC,CAAC;EACxB,CAAC;EAAA+D,MAAA,CACKP,4BAA4B,GAAlC,eAAAA,6BAAmCoG,aAAyD,EAAiB,CAAE,CAAC;EAAA,OAAA9H,qBAAA;AAAA;AAIpH,OAAO,eAAe+H,oBAAoBA,CACtCC,MAAgE,EAChExH,gBAAsC,EACP;EAC/B,IAAI,CAACwH,MAAM,CAACzH,OAAO,EAAE;IACjByH,MAAM,CAACzH,OAAO,GAAG,CAAC,CAAC;EACvB;EAEA,IAAMgE,aAAa,GAAG,MAAMzF,eAAe,CACvCkJ,MAAM,CAAC7H,YAAY,EACnBK,gBACJ,CAAC;;EAED;AACJ;AACA;AACA;EACI,IAAMyH,OAAiB,GAAG,EAAE;EAC5B,IAAID,MAAM,CAAC3H,MAAM,CAAC6H,OAAO,EAAE;IACvBF,MAAM,CAAC3H,MAAM,CAAC6H,OAAO,CAAChF,OAAO,CAACiF,GAAG,IAAI;MACjC,IAAI,CAAC7J,oBAAoB,CAAC6J,GAAG,CAAC,EAAE;QAC5BF,OAAO,CAACnE,IAAI,CAACqE,GAAG,CAAC;MACrB;IACJ,CAAC,CAAC;EACN;EACA;AACJ;AACA;AACA;EACI,IAAMxH,UAAU,GAAGlB,2BAA2B,CAACuI,MAAM,CAAC3H,MAAM,CAACM,UAAU,CAAC;EACxEsH,OAAO,CAACnE,IAAI,CAACnD,UAAoB,CAAC;EAElC,IAAMyH,kBAAkB,GAAGJ,MAAM,CAAC5H,cAAc,GAAG,GAAG,GAAG4H,MAAM,CAAC3H,MAAM,CAACgI,OAAO;EAC9E,IAAMC,iBAA+B,GAAGnE,MAAM,CAACC,MAAM,CACjD,CAAC,CAAC,EACFgE,kBAAkB,EAClB;IACIH,OAAO,EAAEA,OAAmB;IAC5BM,MAAM,EAAE,CAAC5H,UAAU;EACvB,CAAC,EACD3B,iCACJ,CAAC;EAED,IAAMuE,UAAe,GAAGgB,aAAa,CAACoD,QAAQ,CAACa,aAAa,CACxDJ,kBAAkB,EAClBE,iBACJ,CAAC;EACD/D,aAAa,CAACkE,WAAW,CAACT,MAAM,CAAC5H,cAAc,CAAC,GAAGmD,UAAU;EAC7D,IAAMX,GAA2B,GAAG;IAChC2B,aAAa;IACbhB;EACJ,CAAC;EAED,OAAOX,GAAG;AACd;AAGA,OAAO,eAAe8F,yBAAyBA,CAC3CxI,OAAsB,EACtB8H,MAAgE,EAChExH,gBAAsC,EACG;EACzC,IAAMF,SAA+B,GAAG,CAAC,CAAC;EAE1C,IAAMqI,yBAA4C,GAAG,CAAC,CAAC;EAGvD,IAAIX,MAAM,CAACY,aAAa,EAAE;IACtB,IAAM/H,aAAa,GAAG1B,oBAAoB,CACtC6I,MAAM,CAAC/H,qBAAqB,EAC5B0I,yBAAyB,EACzBX,MAAM,CAAC7H,YACX,CAAC;IACDG,SAAS,CAACO,aAAa,GAAGA,aAAa;EAC3C,CAAC,MAAM;IACH;IACAP,SAAS,CAACqC,UAAU,GAAGoF,oBAAoB,CAACC,MAAM,EAAExH,gBAAgB,CAAC;IACrE,MAAMF,SAAS,CAACqC,UAAU;EAC9B;EAEA,IAAMkG,QAAQ,GAAG,IAAI7I,qBAAqB,CACtCgI,MAAM,CAAC/H,qBAAqB,EAC5BC,OAAO,EACP8H,MAAM,CAAC7H,YAAY,EACnB6H,MAAM,CAAC5H,cAAc,EACrB4H,MAAM,CAAC3H,MAAM,EACbC,SAAS,EACT0H,MAAM,CAACzH,OAAO,EACdC,gBACJ,CAAC;EAED,MAAMb,gCAAgC,CAClCJ,sBAAsB,EACtByI,MAAM,EACNa,QAAQ,EACRvI,SAAS,CAACO,aAAa,GAAGP,SAAS,CAACO,aAAa,CAACgB,gBAAgB,GAAGiH,SACzE,CAAC;EAED,IAAId,MAAM,CAACY,aAAa,EAAE;IACtB;AACR;AACA;IACQ,IAAMG,WAAW,GAAGF,QAAQ,CAAC1H,KAAK,CAACH,IAAI,CAAC6H,QAAQ,CAAC;IACjDA,QAAQ,CAAC1H,KAAK,GAAG,YAAY;MACzBvB,+BAA+B,CAC3BoI,MAAM,CAAC/H,qBAAqB,EAC5B0I,yBACJ,CAAC;MACD,OAAOI,WAAW,CAAC,CAAC;IACxB,CAAC;IACD,IAAMC,YAAY,GAAGH,QAAQ,CAACpH,MAAM,CAACT,IAAI,CAAC6H,QAAQ,CAAC;IACnDA,QAAQ,CAACpH,MAAM,GAAG,YAAY;MAC1B7B,+BAA+B,CAC3BoI,MAAM,CAAC/H,qBAAqB,EAC5B0I,yBACJ,CAAC;MACD,OAAOK,YAAY,CAAC,CAAC;IACzB,CAAC;;IAED;AACR;AACA;IACQ3K,cAAc,CAACiC,SAAS,CAACO,aAAa,CAAC,CAClCc,eAAe,CAAC,CAAC,CACjBC,IAAI,CAAC,MAAM;MACR,IAAI,CAACiH,QAAQ,CAACvB,MAAM,EAAE;QAClBjI,iBAAiB,CAACwJ,QAAQ,CAAC;MAC/B;IACJ,CAAC,CAAC;EACV;EAGA,OAAOA,QAAQ;AACnB","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/storage-lokijs/rx-storage-lokijs.js b/dist/esm/plugins/storage-lokijs/rx-storage-lokijs.js deleted file mode 100644 index 1f4ae6210a5..00000000000 --- a/dist/esm/plugins/storage-lokijs/rx-storage-lokijs.js +++ /dev/null @@ -1,34 +0,0 @@ -import { createLokiStorageInstance } from "./rx-storage-instance-loki.js"; -import { RX_STORAGE_NAME_LOKIJS } from "./lokijs-helper.js"; -import { ensureRxStorageInstanceParamsAreCorrect } from "../../rx-storage-helper.js"; -import { RXDB_VERSION } from "../utils/utils-rxdb-version.js"; -export var RxStorageLoki = /*#__PURE__*/function () { - /** - * Create one leader elector by db name. - * This is done inside of the storage, not globally - * to make it easier to test multi-tab behavior. - */ - - function RxStorageLoki(databaseSettings) { - this.name = RX_STORAGE_NAME_LOKIJS; - this.rxdbVersion = RXDB_VERSION; - this.leaderElectorByLokiDbName = new Map(); - this.databaseSettings = databaseSettings; - } - var _proto = RxStorageLoki.prototype; - _proto.createStorageInstance = function createStorageInstance(params) { - ensureRxStorageInstanceParamsAreCorrect(params); - return createLokiStorageInstance(this, params, this.databaseSettings); - }; - return RxStorageLoki; -}(); - -/** - * @deprecated The lokijs RxStorage is deprecated, more info at: - * @link https://rxdb.info/rx-storage-lokijs.html - */ -export function getRxStorageLoki(databaseSettings = {}) { - var storage = new RxStorageLoki(databaseSettings); - return storage; -} -//# sourceMappingURL=rx-storage-lokijs.js.map \ No newline at end of file diff --git a/dist/esm/plugins/storage-lokijs/rx-storage-lokijs.js.map b/dist/esm/plugins/storage-lokijs/rx-storage-lokijs.js.map deleted file mode 100644 index 917eec19787..00000000000 --- a/dist/esm/plugins/storage-lokijs/rx-storage-lokijs.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-storage-lokijs.js","names":["createLokiStorageInstance","RX_STORAGE_NAME_LOKIJS","ensureRxStorageInstanceParamsAreCorrect","RXDB_VERSION","RxStorageLoki","databaseSettings","name","rxdbVersion","leaderElectorByLokiDbName","Map","_proto","prototype","createStorageInstance","params","getRxStorageLoki","storage"],"sources":["../../../../src/plugins/storage-lokijs/rx-storage-lokijs.ts"],"sourcesContent":["import type {\n LokiDatabaseSettings,\n LokiSettings,\n LokiStorageInternals,\n RxStorage,\n RxStorageInstanceCreationParams\n} from '../../types/index.d.ts';\nimport {\n createLokiStorageInstance,\n RxStorageInstanceLoki\n} from './rx-storage-instance-loki.ts';\nimport { RX_STORAGE_NAME_LOKIJS } from './lokijs-helper.ts';\nimport type { LeaderElector } from 'broadcast-channel';\n\nimport { ensureRxStorageInstanceParamsAreCorrect } from '../../rx-storage-helper.ts';\nimport { RXDB_VERSION } from '../utils/utils-rxdb-version.ts';\n\nexport class RxStorageLoki implements RxStorage {\n public name = RX_STORAGE_NAME_LOKIJS;\n public readonly rxdbVersion = RXDB_VERSION;\n\n /**\n * Create one leader elector by db name.\n * This is done inside of the storage, not globally\n * to make it easier to test multi-tab behavior.\n */\n public leaderElectorByLokiDbName: Map = new Map();\n\n constructor(\n public databaseSettings: LokiDatabaseSettings\n ) { }\n\n public createStorageInstance(\n params: RxStorageInstanceCreationParams\n ): Promise> {\n ensureRxStorageInstanceParamsAreCorrect(params);\n return createLokiStorageInstance(this, params, this.databaseSettings);\n }\n}\n\n/**\n * @deprecated The lokijs RxStorage is deprecated, more info at:\n * @link https://rxdb.info/rx-storage-lokijs.html\n */\nexport function getRxStorageLoki(\n databaseSettings: LokiDatabaseSettings = {}\n): RxStorageLoki {\n const storage = new RxStorageLoki(databaseSettings);\n return storage;\n}\n"],"mappings":"AAOA,SACIA,yBAAyB,QAEtB,+BAA+B;AACtC,SAASC,sBAAsB,QAAQ,oBAAoB;AAG3D,SAASC,uCAAuC,QAAQ,4BAA4B;AACpF,SAASC,YAAY,QAAQ,gCAAgC;AAE7D,WAAaC,aAAa;EAItB;AACJ;AACA;AACA;AACA;;EAUI,SAAAA,cACWC,gBAAsC,EAC/C;IAAA,KAnBKC,IAAI,GAAGL,sBAAsB;IAAA,KACpBM,WAAW,GAAGJ,YAAY;IAAA,KAOnCK,yBAAyB,GAO3B,IAAIC,GAAG,CAAC,CAAC;IAAA,KAGHJ,gBAAsC,GAAtCA,gBAAsC;EAC7C;EAAC,IAAAK,MAAA,GAAAN,aAAA,CAAAO,SAAA;EAAAD,MAAA,CAEEE,qBAAqB,GAA5B,SAAAA,sBACIC,MAAgE,EACvB;IACzCX,uCAAuC,CAACW,MAAM,CAAC;IAC/C,OAAOb,yBAAyB,CAAC,IAAI,EAAEa,MAAM,EAAE,IAAI,CAACR,gBAAgB,CAAC;EACzE,CAAC;EAAA,OAAAD,aAAA;AAAA;;AAGL;AACA;AACA;AACA;AACA,OAAO,SAASU,gBAAgBA,CAC5BT,gBAAsC,GAAG,CAAC,CAAC,EAC9B;EACb,IAAMU,OAAO,GAAG,IAAIX,aAAa,CAACC,gBAAgB,CAAC;EACnD,OAAOU,OAAO;AAClB","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/storage-memory/binary-search-bounds.js b/dist/esm/plugins/storage-memory/binary-search-bounds.js deleted file mode 100644 index c3f547e1772..00000000000 --- a/dist/esm/plugins/storage-memory/binary-search-bounds.js +++ /dev/null @@ -1,106 +0,0 @@ -/** - * Everything in this file was copied and adapted from - * @link https://github.com/mikolalysenko/binary-search-bounds - * - * TODO We should use the original npm module instead when this bug is fixed: - * @link https://github.com/mikolalysenko/binary-search-bounds/pull/14 - */ - -function ge(a, y, c, l, h) { - var i = h + 1; - while (l <= h) { - var m = l + h >>> 1; - var x = a[m]; - var p = c !== undefined ? c(x, y) : x - y; - if (p >= 0) { - i = m; - h = m - 1; - } else { - l = m + 1; - } - } - return i; -} -function gt(a, y, c, l, h) { - var i = h + 1; - while (l <= h) { - var m = l + h >>> 1; - var x = a[m]; - var p = c !== undefined ? c(x, y) : x - y; - if (p > 0) { - i = m; - h = m - 1; - } else { - l = m + 1; - } - } - return i; -} -function lt(a, y, c, l, h) { - var i = l - 1; - while (l <= h) { - var m = l + h >>> 1, - x = a[m]; - var p = c !== undefined ? c(x, y) : x - y; - if (p < 0) { - i = m; - l = m + 1; - } else { - h = m - 1; - } - } - return i; -} -function le(a, y, c, l, h) { - var i = l - 1; - while (l <= h) { - var m = l + h >>> 1, - x = a[m]; - var p = c !== undefined ? c(x, y) : x - y; - if (p <= 0) { - i = m; - l = m + 1; - } else { - h = m - 1; - } - } - return i; -} -function eq(a, y, c, l, h) { - while (l <= h) { - var m = l + h >>> 1, - x = a[m]; - var p = c !== undefined ? c(x, y) : x - y; - if (p === 0) { - return m; - } - if (p <= 0) { - l = m + 1; - } else { - h = m - 1; - } - } - return -1; -} -function norm(a, y, c, l, h, f) { - if (typeof c === 'function') { - return f(a, y, c, l === undefined ? 0 : l | 0, h === undefined ? a.length - 1 : h | 0); - } - return f(a, y, undefined, c === undefined ? 0 : c | 0, l === undefined ? a.length - 1 : l | 0); -} -export function boundGE(a, y, c, l, h) { - return norm(a, y, c, l, h, ge); -} -export function boundGT(a, y, c, l, h) { - return norm(a, y, c, l, h, gt); -} -export function boundLT(a, y, c, l, h) { - return norm(a, y, c, l, h, lt); -} -export function boundLE(a, y, c, l, h) { - return norm(a, y, c, l, h, le); -} -export function boundEQ(a, y, c, l, h) { - return norm(a, y, c, l, h, eq); -} -//# sourceMappingURL=binary-search-bounds.js.map \ No newline at end of file diff --git a/dist/esm/plugins/storage-memory/binary-search-bounds.js.map b/dist/esm/plugins/storage-memory/binary-search-bounds.js.map deleted file mode 100644 index 4c7378c0a8a..00000000000 --- a/dist/esm/plugins/storage-memory/binary-search-bounds.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"binary-search-bounds.js","names":["ge","a","y","c","l","h","i","m","x","p","undefined","gt","lt","le","eq","norm","f","length","boundGE","boundGT","boundLT","boundLE","boundEQ"],"sources":["../../../../src/plugins/storage-memory/binary-search-bounds.ts"],"sourcesContent":["/**\n * Everything in this file was copied and adapted from\n * @link https://github.com/mikolalysenko/binary-search-bounds\n *\n * TODO We should use the original npm module instead when this bug is fixed:\n * @link https://github.com/mikolalysenko/binary-search-bounds/pull/14\n */\n\n\n\ntype Compare = ((a: T, b: T) => number | null | undefined);\n\nfunction ge(a: T[], y: T, c: Compare, l?: any, h?: any): number {\n let i: number = h + 1;\n while (l <= h) {\n const m = (l + h) >>> 1;\n const x: any = a[m];\n const p: any = (c !== undefined) ? c(x, y) : (x - (y as any));\n if (p >= 0) {\n i = m; h = m - 1;\n } else {\n l = m + 1;\n }\n }\n return i;\n}\n\nfunction gt(a: T[], y: T, c: Compare, l?: any, h?: any): number {\n let i = h + 1;\n while (l <= h) {\n const m = (l + h) >>> 1;\n const x = a[m];\n const p: any = (c !== undefined) ? c(x, y) : ((x as any) - (y as any));\n if (p > 0) {\n i = m; h = m - 1;\n } else {\n l = m + 1;\n }\n }\n return i;\n}\n\nfunction lt(a: T[], y: T, c: Compare, l?: any, h?: any): number {\n let i = l - 1;\n while (l <= h) {\n const m = (l + h) >>> 1, x = a[m];\n const p: any = (c !== undefined) ? c(x, y) : ((x as any) - (y as any));\n if (p < 0) {\n i = m; l = m + 1;\n } else {\n h = m - 1;\n }\n }\n return i;\n}\n\nfunction le(a: T[], y: T, c: Compare, l?: any, h?: any): number {\n let i = l - 1;\n while (l <= h) {\n const m = (l + h) >>> 1, x = a[m];\n const p: any = (c !== undefined) ? c(x, y) : ((x as any) - (y as any));\n if (p <= 0) {\n i = m; l = m + 1;\n } else {\n h = m - 1;\n }\n }\n return i;\n}\n\nfunction eq(a: T[], y: T, c: Compare, l?: any, h?: any): number {\n while (l <= h) {\n const m = (l + h) >>> 1, x = a[m];\n const p: any = (c !== undefined) ? c(x, y) : ((x as any) - (y as any));\n if (p === 0) {\n return m;\n }\n if (p <= 0) {\n l = m + 1;\n } else {\n h = m - 1;\n }\n }\n return -1;\n}\n\nfunction norm(a: T[], y: T, c: Compare, l: any, h: any, f: any) {\n if (typeof c === 'function') {\n return f(a, y, c, (l === undefined) ? 0 : l | 0, (h === undefined) ? a.length - 1 : h | 0);\n }\n return f(a, y, undefined, (c === undefined) ? 0 : c | 0, (l === undefined) ? a.length - 1 : l | 0);\n}\n\n\nexport function boundGE(a: T[], y: T, c: Compare, l?: any, h?: any) {\n return norm(a, y, c, l, h, ge);\n}\nexport function boundGT(a: T[], y: T, c: Compare, l?: any, h?: any) {\n return norm(a, y, c, l, h, gt);\n}\nexport function boundLT(a: T[], y: T, c: Compare, l?: any, h?: any) {\n return norm(a, y, c, l, h, lt);\n}\nexport function boundLE(a: T[], y: T, c: Compare, l?: any, h?: any) {\n return norm(a, y, c, l, h, le);\n}\nexport function boundEQ(a: T[], y: T, c: Compare, l?: any, h?: any) {\n return norm(a, y, c, l, h, eq);\n}\n"],"mappings":"AAAA;AACA;AACA;AACA;AACA;AACA;AACA;;AAMA,SAASA,EAAEA,CAAIC,CAAM,EAAEC,CAAI,EAAEC,CAAa,EAAEC,CAAO,EAAEC,CAAO,EAAU;EAClE,IAAIC,CAAS,GAAGD,CAAC,GAAG,CAAC;EACrB,OAAOD,CAAC,IAAIC,CAAC,EAAE;IACX,IAAME,CAAC,GAAIH,CAAC,GAAGC,CAAC,KAAM,CAAC;IACvB,IAAMG,CAAM,GAAGP,CAAC,CAACM,CAAC,CAAC;IACnB,IAAME,CAAM,GAAIN,CAAC,KAAKO,SAAS,GAAIP,CAAC,CAACK,CAAC,EAAEN,CAAC,CAAC,GAAIM,CAAC,GAAIN,CAAU;IAC7D,IAAIO,CAAC,IAAI,CAAC,EAAE;MACRH,CAAC,GAAGC,CAAC;MAAEF,CAAC,GAAGE,CAAC,GAAG,CAAC;IACpB,CAAC,MAAM;MACHH,CAAC,GAAGG,CAAC,GAAG,CAAC;IACb;EACJ;EACA,OAAOD,CAAC;AACZ;AAEA,SAASK,EAAEA,CAAIV,CAAM,EAAEC,CAAI,EAAEC,CAAa,EAAEC,CAAO,EAAEC,CAAO,EAAU;EAClE,IAAIC,CAAC,GAAGD,CAAC,GAAG,CAAC;EACb,OAAOD,CAAC,IAAIC,CAAC,EAAE;IACX,IAAME,CAAC,GAAIH,CAAC,GAAGC,CAAC,KAAM,CAAC;IACvB,IAAMG,CAAC,GAAGP,CAAC,CAACM,CAAC,CAAC;IACd,IAAME,CAAM,GAAIN,CAAC,KAAKO,SAAS,GAAIP,CAAC,CAACK,CAAC,EAAEN,CAAC,CAAC,GAAKM,CAAC,GAAYN,CAAU;IACtE,IAAIO,CAAC,GAAG,CAAC,EAAE;MACPH,CAAC,GAAGC,CAAC;MAAEF,CAAC,GAAGE,CAAC,GAAG,CAAC;IACpB,CAAC,MAAM;MACHH,CAAC,GAAGG,CAAC,GAAG,CAAC;IACb;EACJ;EACA,OAAOD,CAAC;AACZ;AAEA,SAASM,EAAEA,CAAIX,CAAM,EAAEC,CAAI,EAAEC,CAAa,EAAEC,CAAO,EAAEC,CAAO,EAAU;EAClE,IAAIC,CAAC,GAAGF,CAAC,GAAG,CAAC;EACb,OAAOA,CAAC,IAAIC,CAAC,EAAE;IACX,IAAME,CAAC,GAAIH,CAAC,GAAGC,CAAC,KAAM,CAAC;MAAEG,CAAC,GAAGP,CAAC,CAACM,CAAC,CAAC;IACjC,IAAME,CAAM,GAAIN,CAAC,KAAKO,SAAS,GAAIP,CAAC,CAACK,CAAC,EAAEN,CAAC,CAAC,GAAKM,CAAC,GAAYN,CAAU;IACtE,IAAIO,CAAC,GAAG,CAAC,EAAE;MACPH,CAAC,GAAGC,CAAC;MAAEH,CAAC,GAAGG,CAAC,GAAG,CAAC;IACpB,CAAC,MAAM;MACHF,CAAC,GAAGE,CAAC,GAAG,CAAC;IACb;EACJ;EACA,OAAOD,CAAC;AACZ;AAEA,SAASO,EAAEA,CAAIZ,CAAM,EAAEC,CAAI,EAAEC,CAAa,EAAEC,CAAO,EAAEC,CAAO,EAAU;EAClE,IAAIC,CAAC,GAAGF,CAAC,GAAG,CAAC;EACb,OAAOA,CAAC,IAAIC,CAAC,EAAE;IACX,IAAME,CAAC,GAAIH,CAAC,GAAGC,CAAC,KAAM,CAAC;MAAEG,CAAC,GAAGP,CAAC,CAACM,CAAC,CAAC;IACjC,IAAME,CAAM,GAAIN,CAAC,KAAKO,SAAS,GAAIP,CAAC,CAACK,CAAC,EAAEN,CAAC,CAAC,GAAKM,CAAC,GAAYN,CAAU;IACtE,IAAIO,CAAC,IAAI,CAAC,EAAE;MACRH,CAAC,GAAGC,CAAC;MAAEH,CAAC,GAAGG,CAAC,GAAG,CAAC;IACpB,CAAC,MAAM;MACHF,CAAC,GAAGE,CAAC,GAAG,CAAC;IACb;EACJ;EACA,OAAOD,CAAC;AACZ;AAEA,SAASQ,EAAEA,CAAIb,CAAM,EAAEC,CAAI,EAAEC,CAAa,EAAEC,CAAO,EAAEC,CAAO,EAAU;EAClE,OAAOD,CAAC,IAAIC,CAAC,EAAE;IACX,IAAME,CAAC,GAAIH,CAAC,GAAGC,CAAC,KAAM,CAAC;MAAEG,CAAC,GAAGP,CAAC,CAACM,CAAC,CAAC;IACjC,IAAME,CAAM,GAAIN,CAAC,KAAKO,SAAS,GAAIP,CAAC,CAACK,CAAC,EAAEN,CAAC,CAAC,GAAKM,CAAC,GAAYN,CAAU;IACtE,IAAIO,CAAC,KAAK,CAAC,EAAE;MACT,OAAOF,CAAC;IACZ;IACA,IAAIE,CAAC,IAAI,CAAC,EAAE;MACRL,CAAC,GAAGG,CAAC,GAAG,CAAC;IACb,CAAC,MAAM;MACHF,CAAC,GAAGE,CAAC,GAAG,CAAC;IACb;EACJ;EACA,OAAO,CAAC,CAAC;AACb;AAEA,SAASQ,IAAIA,CAAId,CAAM,EAAEC,CAAI,EAAEC,CAAa,EAAEC,CAAM,EAAEC,CAAM,EAAEW,CAAM,EAAE;EAClE,IAAI,OAAOb,CAAC,KAAK,UAAU,EAAE;IACzB,OAAOa,CAAC,CAACf,CAAC,EAAEC,CAAC,EAAEC,CAAC,EAAGC,CAAC,KAAKM,SAAS,GAAI,CAAC,GAAGN,CAAC,GAAG,CAAC,EAAGC,CAAC,KAAKK,SAAS,GAAIT,CAAC,CAACgB,MAAM,GAAG,CAAC,GAAGZ,CAAC,GAAG,CAAC,CAAC;EAC9F;EACA,OAAOW,CAAC,CAACf,CAAC,EAAEC,CAAC,EAAEQ,SAAS,EAAGP,CAAC,KAAKO,SAAS,GAAI,CAAC,GAAGP,CAAC,GAAG,CAAC,EAAGC,CAAC,KAAKM,SAAS,GAAIT,CAAC,CAACgB,MAAM,GAAG,CAAC,GAAGb,CAAC,GAAG,CAAC,CAAC;AACtG;AAGA,OAAO,SAASc,OAAOA,CAAIjB,CAAM,EAAEC,CAAI,EAAEC,CAAa,EAAEC,CAAO,EAAEC,CAAO,EAAE;EACtE,OAAOU,IAAI,CAACd,CAAC,EAAEC,CAAC,EAAEC,CAAC,EAAEC,CAAC,EAAEC,CAAC,EAAEL,EAAE,CAAC;AAClC;AACA,OAAO,SAASmB,OAAOA,CAAIlB,CAAM,EAAEC,CAAI,EAAEC,CAAa,EAAEC,CAAO,EAAEC,CAAO,EAAE;EACtE,OAAOU,IAAI,CAACd,CAAC,EAAEC,CAAC,EAAEC,CAAC,EAAEC,CAAC,EAAEC,CAAC,EAAEM,EAAE,CAAC;AAClC;AACA,OAAO,SAASS,OAAOA,CAAInB,CAAM,EAAEC,CAAI,EAAEC,CAAa,EAAEC,CAAO,EAAEC,CAAO,EAAE;EACtE,OAAOU,IAAI,CAACd,CAAC,EAAEC,CAAC,EAAEC,CAAC,EAAEC,CAAC,EAAEC,CAAC,EAAEO,EAAE,CAAC;AAClC;AACA,OAAO,SAASS,OAAOA,CAAIpB,CAAM,EAAEC,CAAI,EAAEC,CAAa,EAAEC,CAAO,EAAEC,CAAO,EAAE;EACtE,OAAOU,IAAI,CAACd,CAAC,EAAEC,CAAC,EAAEC,CAAC,EAAEC,CAAC,EAAEC,CAAC,EAAEQ,EAAE,CAAC;AAClC;AACA,OAAO,SAASS,OAAOA,CAAIrB,CAAM,EAAEC,CAAI,EAAEC,CAAa,EAAEC,CAAO,EAAEC,CAAO,EAAE;EACtE,OAAOU,IAAI,CAACd,CAAC,EAAEC,CAAC,EAAEC,CAAC,EAAEC,CAAC,EAAEC,CAAC,EAAES,EAAE,CAAC;AAClC","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/storage-memory/index.js b/dist/esm/plugins/storage-memory/index.js deleted file mode 100644 index ca21a88d331..00000000000 --- a/dist/esm/plugins/storage-memory/index.js +++ /dev/null @@ -1,29 +0,0 @@ -import { ensureRxStorageInstanceParamsAreCorrect } from "../../rx-storage-helper.js"; -import { RXDB_VERSION } from "../utils/utils-rxdb-version.js"; -import { createMemoryStorageInstance } from "./rx-storage-instance-memory.js"; - -/** - * Keep the state even when the storage instance is closed. - * This makes it easier to use the memory storage - * to test filesystem-like and multiInstance behaviors. - */ -var COLLECTION_STATES = new Map(); -export function getRxStorageMemory(settings = {}) { - var storage = { - name: 'memory', - rxdbVersion: RXDB_VERSION, - collectionStates: COLLECTION_STATES, - createStorageInstance(params) { - ensureRxStorageInstanceParamsAreCorrect(params); - var useSettings = Object.assign({}, settings, params.options); - return createMemoryStorageInstance(this, params, useSettings); - } - }; - return storage; -} -export * from "./memory-helper.js"; -export * from "./binary-search-bounds.js"; -export * from "./memory-types.js"; -export * from "./memory-indexes.js"; -export * from "./rx-storage-instance-memory.js"; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/esm/plugins/storage-memory/index.js.map b/dist/esm/plugins/storage-memory/index.js.map deleted file mode 100644 index 67132e5ebba..00000000000 --- a/dist/esm/plugins/storage-memory/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["ensureRxStorageInstanceParamsAreCorrect","RXDB_VERSION","createMemoryStorageInstance","COLLECTION_STATES","Map","getRxStorageMemory","settings","storage","name","rxdbVersion","collectionStates","createStorageInstance","params","useSettings","Object","assign","options"],"sources":["../../../../src/plugins/storage-memory/index.ts"],"sourcesContent":["import { ensureRxStorageInstanceParamsAreCorrect } from '../../rx-storage-helper.ts';\nimport type { RxStorageInstanceCreationParams } from '../../types/index.d.ts';\nimport { RXDB_VERSION } from '../utils/utils-rxdb-version.ts';\nimport type {\n RxStorageMemory,\n RxStorageMemoryInstanceCreationOptions,\n RxStorageMemorySettings\n} from './memory-types.ts';\nimport {\n createMemoryStorageInstance,\n RxStorageInstanceMemory\n} from './rx-storage-instance-memory.ts';\n\n/**\n * Keep the state even when the storage instance is closed.\n * This makes it easier to use the memory storage\n * to test filesystem-like and multiInstance behaviors.\n */\nconst COLLECTION_STATES = new Map();\n\nexport function getRxStorageMemory(\n settings: RxStorageMemorySettings = {}\n): RxStorageMemory {\n\n const storage: RxStorageMemory = {\n name: 'memory',\n rxdbVersion: RXDB_VERSION,\n collectionStates: COLLECTION_STATES,\n createStorageInstance(\n params: RxStorageInstanceCreationParams\n ): Promise> {\n ensureRxStorageInstanceParamsAreCorrect(params);\n const useSettings = Object.assign(\n {},\n settings,\n params.options\n );\n\n\n return createMemoryStorageInstance(this, params, useSettings);\n }\n };\n\n return storage;\n}\n\n\nexport * from './memory-helper.ts';\nexport * from './binary-search-bounds.ts';\nexport * from './memory-types.ts';\nexport * from './memory-indexes.ts';\nexport * from './rx-storage-instance-memory.ts';\n"],"mappings":"AAAA,SAASA,uCAAuC,QAAQ,4BAA4B;AAEpF,SAASC,YAAY,QAAQ,gCAAgC;AAM7D,SACIC,2BAA2B,QAExB,iCAAiC;;AAExC;AACA;AACA;AACA;AACA;AACA,IAAMC,iBAAiB,GAAG,IAAIC,GAAG,CAAC,CAAC;AAEnC,OAAO,SAASC,kBAAkBA,CAC9BC,QAAiC,GAAG,CAAC,CAAC,EACvB;EAEf,IAAMC,OAAwB,GAAG;IAC7BC,IAAI,EAAE,QAAQ;IACdC,WAAW,EAAER,YAAY;IACzBS,gBAAgB,EAAEP,iBAAiB;IACnCQ,qBAAqBA,CACjBC,MAA0F,EAC/C;MAC3CZ,uCAAuC,CAACY,MAAM,CAAC;MAC/C,IAAMC,WAAW,GAAGC,MAAM,CAACC,MAAM,CAC7B,CAAC,CAAC,EACFT,QAAQ,EACRM,MAAM,CAACI,OACX,CAAC;MAGD,OAAOd,2BAA2B,CAAC,IAAI,EAAEU,MAAM,EAAEC,WAAW,CAAC;IACjE;EACJ,CAAC;EAED,OAAON,OAAO;AAClB;AAGA,cAAc,oBAAoB;AAClC,cAAc,2BAA2B;AACzC,cAAc,mBAAmB;AACjC,cAAc,qBAAqB;AACnC,cAAc,iCAAiC","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/storage-memory/memory-helper.js b/dist/esm/plugins/storage-memory/memory-helper.js deleted file mode 100644 index 344b7df8af4..00000000000 --- a/dist/esm/plugins/storage-memory/memory-helper.js +++ /dev/null @@ -1,101 +0,0 @@ -import { pushAtSortPosition } from 'array-push-at-sort-position'; -import { newRxError } from "../../rx-error.js"; -import { boundEQ } from "./binary-search-bounds.js"; -export function getMemoryCollectionKey(databaseName, collectionName, schemaVersion) { - return [databaseName, collectionName, schemaVersion].join('--memory--'); -} -export function ensureNotRemoved(instance) { - if (instance.internals.removed) { - throw new Error('removed'); - } -} -export function attachmentMapKey(documentId, attachmentId) { - return documentId + '||' + attachmentId; -} -function sortByIndexStringComparator(a, b) { - if (a.indexString < b.indexString) { - return -1; - } else { - return 1; - } -} - -/** - * @hotPath - */ -export function putWriteRowToState(docId, state, stateByIndex, row, docInState) { - var document = row.document; - state.documents.set(docId, document); - for (var i = 0; i < stateByIndex.length; ++i) { - var byIndex = stateByIndex[i]; - var docsWithIndex = byIndex.docsWithIndex; - var getIndexableString = byIndex.getIndexableString; - var newIndexString = getIndexableString(document); - var insertPosition = pushAtSortPosition(docsWithIndex, { - id: docId, - doc: document, - indexString: newIndexString - }, sortByIndexStringComparator, 0); - - /** - * Remove previous if it was in the state - */ - if (docInState) { - var previousIndexString = getIndexableString(docInState); - if (previousIndexString === newIndexString) { - /** - * Performance shortcut. - * If index was not changed -> The old doc must be before or after the new one. - */ - var prev = docsWithIndex[insertPosition - 1]; - if (prev && prev.id === docId) { - docsWithIndex.splice(insertPosition - 1, 1); - } else { - var next = docsWithIndex[insertPosition + 1]; - if (next.id === docId) { - docsWithIndex.splice(insertPosition + 1, 1); - } else { - throw newRxError('SNH', { - args: { - row, - byIndex - } - }); - } - } - } else { - /** - * Index changed, we must search for the old one and remove it. - */ - var indexBefore = boundEQ(docsWithIndex, { - indexString: previousIndexString - }, compareDocsWithIndex); - docsWithIndex.splice(indexBefore, 1); - } - } - } -} -export function removeDocFromState(primaryPath, schema, state, doc) { - var docId = doc[primaryPath]; - state.documents.delete(docId); - Object.values(state.byIndex).forEach(byIndex => { - var docsWithIndex = byIndex.docsWithIndex; - var indexString = byIndex.getIndexableString(doc); - var positionInIndex = boundEQ(docsWithIndex, { - indexString - }, compareDocsWithIndex); - docsWithIndex.splice(positionInIndex, 1); - }); -} -export function compareDocsWithIndex(a, b) { - var indexStringA = a.indexString; - var indexStringB = b.indexString; - if (indexStringA < indexStringB) { - return -1; - } else if (indexStringA === indexStringB) { - return 0; - } else { - return 1; - } -} -//# sourceMappingURL=memory-helper.js.map \ No newline at end of file diff --git a/dist/esm/plugins/storage-memory/memory-helper.js.map b/dist/esm/plugins/storage-memory/memory-helper.js.map deleted file mode 100644 index 4c937c4eb33..00000000000 --- a/dist/esm/plugins/storage-memory/memory-helper.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"memory-helper.js","names":["pushAtSortPosition","newRxError","boundEQ","getMemoryCollectionKey","databaseName","collectionName","schemaVersion","join","ensureNotRemoved","instance","internals","removed","Error","attachmentMapKey","documentId","attachmentId","sortByIndexStringComparator","a","b","indexString","putWriteRowToState","docId","state","stateByIndex","row","docInState","document","documents","set","i","length","byIndex","docsWithIndex","getIndexableString","newIndexString","insertPosition","id","doc","previousIndexString","prev","splice","next","args","indexBefore","compareDocsWithIndex","removeDocFromState","primaryPath","schema","delete","Object","values","forEach","positionInIndex","indexStringA","indexStringB"],"sources":["../../../../src/plugins/storage-memory/memory-helper.ts"],"sourcesContent":["import type {\n BulkWriteRow,\n RxDocumentData,\n RxJsonSchema\n} from '../../types/index.d.ts';\nimport type {\n DocWithIndexString,\n MemoryStorageInternals,\n MemoryStorageInternalsByIndex\n} from './memory-types.ts';\nimport type { RxStorageInstanceMemory } from './rx-storage-instance-memory.ts';\nimport {\n pushAtSortPosition\n} from 'array-push-at-sort-position';\nimport { newRxError } from '../../rx-error.ts';\nimport { boundEQ } from './binary-search-bounds.ts';\n\n\nexport function getMemoryCollectionKey(\n databaseName: string,\n collectionName: string,\n schemaVersion: number\n): string {\n return [\n databaseName,\n collectionName,\n schemaVersion\n ].join('--memory--');\n}\n\n\nexport function ensureNotRemoved(\n instance: RxStorageInstanceMemory\n) {\n if (instance.internals.removed) {\n throw new Error('removed');\n }\n}\n\nexport function attachmentMapKey(documentId: string, attachmentId: string): string {\n return documentId + '||' + attachmentId;\n}\n\nfunction sortByIndexStringComparator(a: DocWithIndexString, b: DocWithIndexString) {\n if (a.indexString < b.indexString) {\n return -1;\n } else {\n return 1;\n }\n}\n\n\n\n/**\n * @hotPath\n */\nexport function putWriteRowToState(\n docId: string,\n state: MemoryStorageInternals,\n stateByIndex: MemoryStorageInternalsByIndex[],\n row: BulkWriteRow,\n docInState?: RxDocumentData\n) {\n const document = row.document;\n state.documents.set(docId, document as any);\n for (let i = 0; i < stateByIndex.length; ++i) {\n const byIndex = stateByIndex[i];\n const docsWithIndex = byIndex.docsWithIndex;\n const getIndexableString = byIndex.getIndexableString;\n const newIndexString = getIndexableString(document as any);\n const insertPosition = pushAtSortPosition(\n docsWithIndex,\n {\n id: docId,\n doc: document,\n indexString: newIndexString\n },\n sortByIndexStringComparator,\n 0\n );\n\n /**\n * Remove previous if it was in the state\n */\n if (docInState) {\n const previousIndexString = getIndexableString(docInState);\n if (previousIndexString === newIndexString) {\n /**\n * Performance shortcut.\n * If index was not changed -> The old doc must be before or after the new one.\n */\n const prev = docsWithIndex[insertPosition - 1];\n if (prev && prev.id === docId) {\n docsWithIndex.splice(insertPosition - 1, 1);\n } else {\n const next = docsWithIndex[insertPosition + 1];\n if (next.id === docId) {\n docsWithIndex.splice(insertPosition + 1, 1);\n } else {\n throw newRxError('SNH', {\n args: {\n row,\n byIndex\n }\n });\n }\n }\n } else {\n /**\n * Index changed, we must search for the old one and remove it.\n */\n const indexBefore = boundEQ(\n docsWithIndex,\n {\n indexString: previousIndexString\n } as any,\n compareDocsWithIndex\n );\n docsWithIndex.splice(indexBefore, 1);\n }\n }\n }\n}\n\n\nexport function removeDocFromState(\n primaryPath: string,\n schema: RxJsonSchema>,\n state: MemoryStorageInternals,\n doc: RxDocumentData\n) {\n const docId: string = (doc as any)[primaryPath];\n state.documents.delete(docId);\n\n Object.values(state.byIndex).forEach(byIndex => {\n const docsWithIndex = byIndex.docsWithIndex;\n const indexString = byIndex.getIndexableString(doc);\n\n const positionInIndex = boundEQ(\n docsWithIndex,\n {\n indexString\n } as any,\n compareDocsWithIndex\n );\n docsWithIndex.splice(positionInIndex, 1);\n });\n}\n\n\nexport function compareDocsWithIndex(\n a: DocWithIndexString,\n b: DocWithIndexString\n): 1 | 0 | -1 {\n const indexStringA = a.indexString;\n const indexStringB = b.indexString;\n if (indexStringA < indexStringB) {\n return -1;\n } else if (indexStringA === indexStringB) {\n return 0;\n } else {\n return 1;\n }\n}\n"],"mappings":"AAWA,SACIA,kBAAkB,QACf,6BAA6B;AACpC,SAASC,UAAU,QAAQ,mBAAmB;AAC9C,SAASC,OAAO,QAAQ,2BAA2B;AAGnD,OAAO,SAASC,sBAAsBA,CAClCC,YAAoB,EACpBC,cAAsB,EACtBC,aAAqB,EACf;EACN,OAAO,CACHF,YAAY,EACZC,cAAc,EACdC,aAAa,CAChB,CAACC,IAAI,CAAC,YAAY,CAAC;AACxB;AAGA,OAAO,SAASC,gBAAgBA,CAC5BC,QAAsC,EACxC;EACE,IAAIA,QAAQ,CAACC,SAAS,CAACC,OAAO,EAAE;IAC5B,MAAM,IAAIC,KAAK,CAAC,SAAS,CAAC;EAC9B;AACJ;AAEA,OAAO,SAASC,gBAAgBA,CAACC,UAAkB,EAAEC,YAAoB,EAAU;EAC/E,OAAOD,UAAU,GAAG,IAAI,GAAGC,YAAY;AAC3C;AAEA,SAASC,2BAA2BA,CAAYC,CAAgC,EAAEC,CAAgC,EAAE;EAChH,IAAID,CAAC,CAACE,WAAW,GAAGD,CAAC,CAACC,WAAW,EAAE;IAC/B,OAAO,CAAC,CAAC;EACb,CAAC,MAAM;IACH,OAAO,CAAC;EACZ;AACJ;;AAIA;AACA;AACA;AACA,OAAO,SAASC,kBAAkBA,CAC9BC,KAAa,EACbC,KAAwC,EACxCC,YAAwD,EACxDC,GAA4B,EAC5BC,UAAsC,EACxC;EACE,IAAMC,QAAQ,GAAGF,GAAG,CAACE,QAAQ;EAC7BJ,KAAK,CAACK,SAAS,CAACC,GAAG,CAACP,KAAK,EAAEK,QAAe,CAAC;EAC3C,KAAK,IAAIG,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGN,YAAY,CAACO,MAAM,EAAE,EAAED,CAAC,EAAE;IAC1C,IAAME,OAAO,GAAGR,YAAY,CAACM,CAAC,CAAC;IAC/B,IAAMG,aAAa,GAAGD,OAAO,CAACC,aAAa;IAC3C,IAAMC,kBAAkB,GAAGF,OAAO,CAACE,kBAAkB;IACrD,IAAMC,cAAc,GAAGD,kBAAkB,CAACP,QAAe,CAAC;IAC1D,IAAMS,cAAc,GAAGnC,kBAAkB,CACrCgC,aAAa,EACb;MACII,EAAE,EAAEf,KAAK;MACTgB,GAAG,EAAEX,QAAQ;MACbP,WAAW,EAAEe;IACjB,CAAC,EACDlB,2BAA2B,EAC3B,CACJ,CAAC;;IAED;AACR;AACA;IACQ,IAAIS,UAAU,EAAE;MACZ,IAAMa,mBAAmB,GAAGL,kBAAkB,CAACR,UAAU,CAAC;MAC1D,IAAIa,mBAAmB,KAAKJ,cAAc,EAAE;QACxC;AAChB;AACA;AACA;QACgB,IAAMK,IAAI,GAAGP,aAAa,CAACG,cAAc,GAAG,CAAC,CAAC;QAC9C,IAAII,IAAI,IAAIA,IAAI,CAACH,EAAE,KAAKf,KAAK,EAAE;UAC3BW,aAAa,CAACQ,MAAM,CAACL,cAAc,GAAG,CAAC,EAAE,CAAC,CAAC;QAC/C,CAAC,MAAM;UACH,IAAMM,IAAI,GAAGT,aAAa,CAACG,cAAc,GAAG,CAAC,CAAC;UAC9C,IAAIM,IAAI,CAACL,EAAE,KAAKf,KAAK,EAAE;YACnBW,aAAa,CAACQ,MAAM,CAACL,cAAc,GAAG,CAAC,EAAE,CAAC,CAAC;UAC/C,CAAC,MAAM;YACH,MAAMlC,UAAU,CAAC,KAAK,EAAE;cACpByC,IAAI,EAAE;gBACFlB,GAAG;gBACHO;cACJ;YACJ,CAAC,CAAC;UACN;QACJ;MACJ,CAAC,MAAM;QACH;AAChB;AACA;QACgB,IAAMY,WAAW,GAAGzC,OAAO,CACvB8B,aAAa,EACb;UACIb,WAAW,EAAEmB;QACjB,CAAC,EACDM,oBACJ,CAAC;QACDZ,aAAa,CAACQ,MAAM,CAACG,WAAW,EAAE,CAAC,CAAC;MACxC;IACJ;EACJ;AACJ;AAGA,OAAO,SAASE,kBAAkBA,CAC9BC,WAAmB,EACnBC,MAA+C,EAC/CzB,KAAwC,EACxCe,GAA8B,EAChC;EACE,IAAMhB,KAAa,GAAIgB,GAAG,CAASS,WAAW,CAAC;EAC/CxB,KAAK,CAACK,SAAS,CAACqB,MAAM,CAAC3B,KAAK,CAAC;EAE7B4B,MAAM,CAACC,MAAM,CAAC5B,KAAK,CAACS,OAAO,CAAC,CAACoB,OAAO,CAACpB,OAAO,IAAI;IAC5C,IAAMC,aAAa,GAAGD,OAAO,CAACC,aAAa;IAC3C,IAAMb,WAAW,GAAGY,OAAO,CAACE,kBAAkB,CAACI,GAAG,CAAC;IAEnD,IAAMe,eAAe,GAAGlD,OAAO,CAC3B8B,aAAa,EACb;MACIb;IACJ,CAAC,EACDyB,oBACJ,CAAC;IACDZ,aAAa,CAACQ,MAAM,CAACY,eAAe,EAAE,CAAC,CAAC;EAC5C,CAAC,CAAC;AACN;AAGA,OAAO,SAASR,oBAAoBA,CAChC3B,CAAgC,EAChCC,CAAgC,EACtB;EACV,IAAMmC,YAAY,GAAGpC,CAAC,CAACE,WAAW;EAClC,IAAMmC,YAAY,GAAGpC,CAAC,CAACC,WAAW;EAClC,IAAIkC,YAAY,GAAGC,YAAY,EAAE;IAC7B,OAAO,CAAC,CAAC;EACb,CAAC,MAAM,IAAID,YAAY,KAAKC,YAAY,EAAE;IACtC,OAAO,CAAC;EACZ,CAAC,MAAM;IACH,OAAO,CAAC;EACZ;AACJ","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/storage-memory/memory-indexes.js b/dist/esm/plugins/storage-memory/memory-indexes.js deleted file mode 100644 index a7a4f1d358b..00000000000 --- a/dist/esm/plugins/storage-memory/memory-indexes.js +++ /dev/null @@ -1,21 +0,0 @@ -import { getIndexableStringMonad } from "../../custom-index.js"; -import { getPrimaryFieldOfPrimaryKey } from "../../rx-schema-helper.js"; -import { toArray } from "../../plugins/utils/index.js"; -export function addIndexesToInternalsState(state, schema) { - var primaryPath = getPrimaryFieldOfPrimaryKey(schema.primaryKey); - var useIndexes = !schema.indexes ? [] : schema.indexes.map(row => toArray(row)); - - // we need this index for running cleanup() - useIndexes.push(['_deleted', '_meta.lwt', primaryPath]); - useIndexes.forEach(indexAr => { - state.byIndex[getMemoryIndexName(indexAr)] = { - index: indexAr, - docsWithIndex: [], - getIndexableString: getIndexableStringMonad(schema, indexAr) - }; - }); -} -export function getMemoryIndexName(index) { - return index.join(','); -} -//# sourceMappingURL=memory-indexes.js.map \ No newline at end of file diff --git a/dist/esm/plugins/storage-memory/memory-indexes.js.map b/dist/esm/plugins/storage-memory/memory-indexes.js.map deleted file mode 100644 index 315b0708709..00000000000 --- a/dist/esm/plugins/storage-memory/memory-indexes.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"memory-indexes.js","names":["getIndexableStringMonad","getPrimaryFieldOfPrimaryKey","toArray","addIndexesToInternalsState","state","schema","primaryPath","primaryKey","useIndexes","indexes","map","row","push","forEach","indexAr","byIndex","getMemoryIndexName","index","docsWithIndex","getIndexableString","join"],"sources":["../../../../src/plugins/storage-memory/memory-indexes.ts"],"sourcesContent":["import { getIndexableStringMonad } from '../../custom-index.ts';\nimport { getPrimaryFieldOfPrimaryKey } from '../../rx-schema-helper.ts';\nimport type { RxDocumentData, RxJsonSchema } from '../../types/index.d.ts';\nimport { toArray } from '../../plugins/utils/index.ts';\nimport type { MemoryStorageInternals } from './memory-types.ts';\n\nexport function addIndexesToInternalsState(\n state: MemoryStorageInternals,\n schema: RxJsonSchema>\n) {\n const primaryPath = getPrimaryFieldOfPrimaryKey(schema.primaryKey);\n const useIndexes: string[][] = !schema.indexes ? [] : schema.indexes.map(row => toArray(row)) as any;\n\n // we need this index for running cleanup()\n useIndexes.push([\n '_deleted',\n '_meta.lwt',\n primaryPath\n ]);\n\n\n useIndexes.forEach(indexAr => {\n state.byIndex[getMemoryIndexName(indexAr)] = {\n index: indexAr,\n docsWithIndex: [],\n getIndexableString: getIndexableStringMonad(schema, indexAr)\n };\n });\n}\n\n\nexport function getMemoryIndexName(index: string[]): string {\n return index.join(',');\n}\n"],"mappings":"AAAA,SAASA,uBAAuB,QAAQ,uBAAuB;AAC/D,SAASC,2BAA2B,QAAQ,2BAA2B;AAEvE,SAASC,OAAO,QAAQ,8BAA8B;AAGtD,OAAO,SAASC,0BAA0BA,CACtCC,KAAwC,EACxCC,MAA+C,EACjD;EACE,IAAMC,WAAW,GAAGL,2BAA2B,CAACI,MAAM,CAACE,UAAU,CAAC;EAClE,IAAMC,UAAsB,GAAG,CAACH,MAAM,CAACI,OAAO,GAAG,EAAE,GAAGJ,MAAM,CAACI,OAAO,CAACC,GAAG,CAACC,GAAG,IAAIT,OAAO,CAACS,GAAG,CAAC,CAAQ;;EAEpG;EACAH,UAAU,CAACI,IAAI,CAAC,CACZ,UAAU,EACV,WAAW,EACXN,WAAW,CACd,CAAC;EAGFE,UAAU,CAACK,OAAO,CAACC,OAAO,IAAI;IAC1BV,KAAK,CAACW,OAAO,CAACC,kBAAkB,CAACF,OAAO,CAAC,CAAC,GAAG;MACzCG,KAAK,EAAEH,OAAO;MACdI,aAAa,EAAE,EAAE;MACjBC,kBAAkB,EAAEnB,uBAAuB,CAACK,MAAM,EAAES,OAAO;IAC/D,CAAC;EACL,CAAC,CAAC;AACN;AAGA,OAAO,SAASE,kBAAkBA,CAACC,KAAe,EAAU;EACxD,OAAOA,KAAK,CAACG,IAAI,CAAC,GAAG,CAAC;AAC1B","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/storage-memory/memory-types.js b/dist/esm/plugins/storage-memory/memory-types.js deleted file mode 100644 index b185e73d64b..00000000000 --- a/dist/esm/plugins/storage-memory/memory-types.js +++ /dev/null @@ -1,3 +0,0 @@ -export {}; -export {}; -//# sourceMappingURL=memory-types.js.map \ No newline at end of file diff --git a/dist/esm/plugins/storage-memory/memory-types.js.map b/dist/esm/plugins/storage-memory/memory-types.js.map deleted file mode 100644 index 4520f15daa1..00000000000 --- a/dist/esm/plugins/storage-memory/memory-types.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"memory-types.js","names":[],"sources":["../../../../src/plugins/storage-memory/memory-types.ts"],"sourcesContent":["import { Subject } from 'rxjs';\nimport type {\n CategorizeBulkWriteRowsOutput,\n EventBulk,\n RxAttachmentWriteData,\n RxConflictResultionTask,\n RxDocumentData,\n RxJsonSchema,\n RxStorage,\n RxStorageChangeEvent,\n RxStorageDefaultCheckpoint\n} from '../../types/index.d.ts';\n\nexport type RxStorageMemorySettings = {};\nexport type RxStorageMemoryInstanceCreationOptions = {};\nexport type RxStorageMemory = RxStorage, RxStorageMemoryInstanceCreationOptions> & {\n /**\n * State by collectionKey\n */\n collectionStates: Map>;\n};\n\nexport type MemoryStorageInternalsByIndex = {\n index: string[];\n docsWithIndex: DocWithIndexString[];\n getIndexableString: (docData: RxDocumentData) => string;\n};\n\n/**\n * The internals are shared between multiple storage instances\n * that have been created with the same [databaseName+collectionName] combination.\n */\nexport type MemoryStorageInternals = {\n // used to debug stuff and identify instances\n id: string;\n\n /**\n * Schema of the first instance created with the given settings.\n * Used to ensure that the same storage is not re-created with\n * a different schema.\n */\n schema: RxJsonSchema>;\n\n /**\n * We reuse the memory state when multiple instances\n * are created with the same params.\n * If refCount becomes 0, we can delete the state.\n */\n refCount: number;\n /**\n * If this becomes true,\n * it means that an instance has called remove()\n * so all other instances should also not work anymore.\n */\n removed: boolean;\n documents: Map>;\n /**\n * Attachments data, indexed by a combined string\n * consisting of [documentId + '||' + attachmentId]\n */\n attachments: Map;\n byIndex: {\n /**\n * Because RxDB requires a deterministic sorting\n * on all indexes, we can be sure that the composed index key\n * of each document is unique, because it contains the primaryKey\n * as last index part.\n * So we do not have to store the index-position when we want to do fast\n * writes. Instead we can do a binary search over the existing array\n * because RxDB also knows the previous state of the document when we do a bulkWrite().\n */\n [indexName: string]: MemoryStorageInternalsByIndex;\n };\n\n /**\n * We need these to do lazy writes.\n */\n ensurePersistenceTask?: CategorizeBulkWriteRowsOutput;\n ensurePersistenceIdlePromise?: Promise;\n\n /**\n * To easier test the conflict resolution,\n * the memory storage exposes the conflict resolution task subject\n * so that we can inject own tasks during tests.\n */\n conflictResultionTasks$: Subject>;\n changes$: Subject>, RxStorageDefaultCheckpoint>>;\n};\n\nexport type DocWithIndexString = {\n id: string;\n doc: RxDocumentData;\n indexString: string;\n};\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/storage-memory/rx-storage-instance-memory.js b/dist/esm/plugins/storage-memory/rx-storage-instance-memory.js deleted file mode 100644 index 8c63c5d74f6..00000000000 --- a/dist/esm/plugins/storage-memory/rx-storage-instance-memory.js +++ /dev/null @@ -1,334 +0,0 @@ -import { Subject } from 'rxjs'; -import { getStartIndexStringFromLowerBound, getStartIndexStringFromUpperBound } from "../../custom-index.js"; -import { getPrimaryFieldOfPrimaryKey } from "../../rx-schema-helper.js"; -import { categorizeBulkWriteRows } from "../../rx-storage-helper.js"; -import { deepEqual, ensureNotFalsy, now, PROMISE_RESOLVE_TRUE, PROMISE_RESOLVE_VOID, randomCouchString, requestIdlePromiseNoQueue } from "../../plugins/utils/index.js"; -import { boundGE, boundGT, boundLE, boundLT } from "./binary-search-bounds.js"; -import { attachmentMapKey, compareDocsWithIndex, ensureNotRemoved, getMemoryCollectionKey, putWriteRowToState, removeDocFromState } from "./memory-helper.js"; -import { addIndexesToInternalsState, getMemoryIndexName } from "./memory-indexes.js"; -import { getQueryMatcher, getSortComparator } from "../../rx-query-helper.js"; - -/** - * Used in tests to ensure everything - * is closed correctly - */ -export var OPEN_MEMORY_INSTANCES = new Set(); -export var RxStorageInstanceMemory = /*#__PURE__*/function () { - /** - * Used by some plugins and storage wrappers - * to find out details about the internals of a write operation. - * For example if you want to know which documents really have been replaced - * or newly inserted. - */ - - function RxStorageInstanceMemory(storage, databaseName, collectionName, schema, internals, options, settings, devMode) { - this.closed = false; - this.categorizedByWriteInput = new WeakMap(); - this.storage = storage; - this.databaseName = databaseName; - this.collectionName = collectionName; - this.schema = schema; - this.internals = internals; - this.options = options; - this.settings = settings; - this.devMode = devMode; - OPEN_MEMORY_INSTANCES.add(this); - this.primaryPath = getPrimaryFieldOfPrimaryKey(this.schema.primaryKey); - } - var _proto = RxStorageInstanceMemory.prototype; - _proto.bulkWrite = function bulkWrite(documentWrites, context) { - this.ensurePersistence(); - ensureNotRemoved(this); - var internals = this.internals; - var documentsById = this.internals.documents; - var primaryPath = this.primaryPath; - var categorized = categorizeBulkWriteRows(this, primaryPath, documentsById, documentWrites, context); - var error = categorized.errors; - var success = new Array(categorized.bulkInsertDocs.length); - /** - * @performance - * We have to return a Promise but we do not want to wait - * one tick, so we directly create the promise - * which makes it likely to be already resolved later. - */ - var awaitMe = Promise.resolve({ - success, - error - }); - var bulkInsertDocs = categorized.bulkInsertDocs; - for (var i = 0; i < bulkInsertDocs.length; ++i) { - var writeRow = bulkInsertDocs[i]; - var doc = writeRow.document; - success[i] = doc; - } - var bulkUpdateDocs = categorized.bulkUpdateDocs; - for (var _i = 0; _i < bulkUpdateDocs.length; ++_i) { - var _writeRow = bulkUpdateDocs[_i]; - var _doc = _writeRow.document; - success.push(_doc); - } - this.categorizedByWriteInput.set(documentWrites, categorized); - this.internals.ensurePersistenceTask = categorized; - if (!this.internals.ensurePersistenceIdlePromise) { - this.internals.ensurePersistenceIdlePromise = requestIdlePromiseNoQueue().then(() => { - this.internals.ensurePersistenceIdlePromise = undefined; - this.ensurePersistence(); - }); - } - - /** - * Important: The events must be emitted AFTER the persistence - * task has been added. - */ - if (categorized.eventBulk.events.length > 0) { - var lastState = ensureNotFalsy(categorized.newestRow).document; - categorized.eventBulk.checkpoint = { - id: lastState[primaryPath], - lwt: lastState._meta.lwt - }; - categorized.eventBulk.endTime = now(); - internals.changes$.next(categorized.eventBulk); - } - return awaitMe; - } - - /** - * Instead of directly inserting the documents into all indexes, - * we do it lazy in the background. This gives the application time - * to directly work with the write-result and to do stuff like rendering DOM - * notes and processing RxDB queries. - * Then in some later time, or just before the next read/write, - * it is ensured that the indexes have been written. - */; - _proto.ensurePersistence = function ensurePersistence() { - if (!this.internals.ensurePersistenceTask) { - return; - } - var internals = this.internals; - var documentsById = this.internals.documents; - var primaryPath = this.primaryPath; - var categorized = this.internals.ensurePersistenceTask; - this.internals.ensurePersistenceTask = undefined; - - /** - * Do inserts/updates - */ - var stateByIndex = Object.values(this.internals.byIndex); - var bulkInsertDocs = categorized.bulkInsertDocs; - for (var i = 0; i < bulkInsertDocs.length; ++i) { - var writeRow = bulkInsertDocs[i]; - var doc = writeRow.document; - var docId = doc[primaryPath]; - putWriteRowToState(docId, internals, stateByIndex, writeRow, undefined); - } - var bulkUpdateDocs = categorized.bulkUpdateDocs; - for (var _i2 = 0; _i2 < bulkUpdateDocs.length; ++_i2) { - var _writeRow2 = bulkUpdateDocs[_i2]; - var _doc2 = _writeRow2.document; - var _docId = _doc2[primaryPath]; - putWriteRowToState(_docId, internals, stateByIndex, _writeRow2, documentsById.get(_docId)); - } - - /** - * Handle attachments - */ - if (this.schema.attachments) { - var attachmentsMap = internals.attachments; - categorized.attachmentsAdd.forEach(attachment => { - attachmentsMap.set(attachmentMapKey(attachment.documentId, attachment.attachmentId), { - writeData: attachment.attachmentData, - digest: attachment.digest - }); - }); - if (this.schema.attachments) { - categorized.attachmentsUpdate.forEach(attachment => { - attachmentsMap.set(attachmentMapKey(attachment.documentId, attachment.attachmentId), { - writeData: attachment.attachmentData, - digest: attachment.digest - }); - }); - categorized.attachmentsRemove.forEach(attachment => { - attachmentsMap.delete(attachmentMapKey(attachment.documentId, attachment.attachmentId)); - }); - } - } - }; - _proto.findDocumentsById = function findDocumentsById(docIds, withDeleted) { - this.ensurePersistence(); - var documentsById = this.internals.documents; - var ret = []; - if (documentsById.size === 0) { - return Promise.resolve(ret); - } - for (var i = 0; i < docIds.length; ++i) { - var docId = docIds[i]; - var docInDb = documentsById.get(docId); - if (docInDb && (!docInDb._deleted || withDeleted)) { - ret.push(docInDb); - } - } - return Promise.resolve(ret); - }; - _proto.query = function query(preparedQuery) { - this.ensurePersistence(); - var queryPlan = preparedQuery.queryPlan; - var query = preparedQuery.query; - var skip = query.skip ? query.skip : 0; - var limit = query.limit ? query.limit : Infinity; - var skipPlusLimit = skip + limit; - var queryMatcher = false; - if (!queryPlan.selectorSatisfiedByIndex) { - queryMatcher = getQueryMatcher(this.schema, preparedQuery.query); - } - var queryPlanFields = queryPlan.index; - var mustManuallyResort = !queryPlan.sortSatisfiedByIndex; - var index = queryPlanFields; - var lowerBound = queryPlan.startKeys; - var lowerBoundString = getStartIndexStringFromLowerBound(this.schema, index, lowerBound); - var upperBound = queryPlan.endKeys; - upperBound = upperBound; - var upperBoundString = getStartIndexStringFromUpperBound(this.schema, index, upperBound); - var indexName = getMemoryIndexName(index); - if (!this.internals.byIndex[indexName]) { - throw new Error('index does not exist ' + indexName); - } - var docsWithIndex = this.internals.byIndex[indexName].docsWithIndex; - var indexOfLower = (queryPlan.inclusiveStart ? boundGE : boundGT)(docsWithIndex, { - indexString: lowerBoundString - }, compareDocsWithIndex); - var indexOfUpper = (queryPlan.inclusiveEnd ? boundLE : boundLT)(docsWithIndex, { - indexString: upperBoundString - }, compareDocsWithIndex); - var rows = []; - var done = false; - while (!done) { - var currentRow = docsWithIndex[indexOfLower]; - if (!currentRow || indexOfLower > indexOfUpper) { - break; - } - var currentDoc = currentRow.doc; - if (!queryMatcher || queryMatcher(currentDoc)) { - rows.push(currentDoc); - } - if (rows.length >= skipPlusLimit && !mustManuallyResort) { - done = true; - } - indexOfLower++; - } - if (mustManuallyResort) { - var sortComparator = getSortComparator(this.schema, preparedQuery.query); - rows = rows.sort(sortComparator); - } - - // apply skip and limit boundaries. - rows = rows.slice(skip, skipPlusLimit); - return Promise.resolve({ - documents: rows - }); - }; - _proto.count = async function count(preparedQuery) { - this.ensurePersistence(); - var result = await this.query(preparedQuery); - return { - count: result.documents.length, - mode: 'fast' - }; - }; - _proto.cleanup = function cleanup(minimumDeletedTime) { - this.ensurePersistence(); - var maxDeletionTime = now() - minimumDeletedTime; - var index = ['_deleted', '_meta.lwt', this.primaryPath]; - var indexName = getMemoryIndexName(index); - var docsWithIndex = this.internals.byIndex[indexName].docsWithIndex; - var lowerBoundString = getStartIndexStringFromLowerBound(this.schema, index, [true, 0, '']); - var indexOfLower = boundGT(docsWithIndex, { - indexString: lowerBoundString - }, compareDocsWithIndex); - var done = false; - while (!done) { - var currentDoc = docsWithIndex[indexOfLower]; - if (!currentDoc || currentDoc.doc._meta.lwt > maxDeletionTime) { - done = true; - } else { - removeDocFromState(this.primaryPath, this.schema, this.internals, currentDoc.doc); - indexOfLower++; - } - } - return PROMISE_RESOLVE_TRUE; - }; - _proto.getAttachmentData = function getAttachmentData(documentId, attachmentId, digest) { - this.ensurePersistence(); - ensureNotRemoved(this); - var key = attachmentMapKey(documentId, attachmentId); - var data = this.internals.attachments.get(key); - if (!digest || !data || data.digest !== digest) { - throw new Error('attachment does not exist: ' + key); - } - return Promise.resolve(data.writeData.data); - }; - _proto.changeStream = function changeStream() { - ensureNotRemoved(this); - return this.internals.changes$.asObservable(); - }; - _proto.remove = async function remove() { - if (this.closed) { - throw new Error('closed'); - } - this.ensurePersistence(); - ensureNotRemoved(this); - this.internals.removed = true; - this.storage.collectionStates.delete(getMemoryCollectionKey(this.databaseName, this.collectionName, this.schema.version)); - await this.close(); - }; - _proto.close = function close() { - OPEN_MEMORY_INSTANCES.delete(this); - this.ensurePersistence(); - if (this.closed) { - return PROMISE_RESOLVE_VOID; - } - this.closed = true; - this.internals.refCount = this.internals.refCount - 1; - return PROMISE_RESOLVE_VOID; - }; - _proto.conflictResultionTasks = function conflictResultionTasks() { - return this.internals.conflictResultionTasks$.asObservable(); - }; - _proto.resolveConflictResultionTask = function resolveConflictResultionTask(_taskSolution) { - return PROMISE_RESOLVE_VOID; - }; - return RxStorageInstanceMemory; -}(); -export function createMemoryStorageInstance(storage, params, settings) { - var collectionKey = getMemoryCollectionKey(params.databaseName, params.collectionName, params.schema.version); - var internals = storage.collectionStates.get(collectionKey); - if (!internals) { - internals = { - id: randomCouchString(5), - schema: params.schema, - removed: false, - refCount: 1, - documents: new Map(), - attachments: params.schema.attachments ? new Map() : undefined, - byIndex: {}, - conflictResultionTasks$: new Subject(), - changes$: new Subject() - }; - addIndexesToInternalsState(internals, params.schema); - storage.collectionStates.set(collectionKey, internals); - } else { - /** - * Ensure that the storage was not already - * created with a different schema. - * This is very important because if this check - * does not exist here, we have hard-to-debug problems - * downstream. - */ - if (params.devMode && !deepEqual(internals.schema, params.schema)) { - throw new Error('storage was already created with a different schema'); - } - internals.refCount = internals.refCount + 1; - } - var instance = new RxStorageInstanceMemory(storage, params.databaseName, params.collectionName, params.schema, internals, params.options, settings, params.devMode); - return Promise.resolve(instance); -} -//# sourceMappingURL=rx-storage-instance-memory.js.map \ No newline at end of file diff --git a/dist/esm/plugins/storage-memory/rx-storage-instance-memory.js.map b/dist/esm/plugins/storage-memory/rx-storage-instance-memory.js.map deleted file mode 100644 index 76d6fa4c26b..00000000000 --- a/dist/esm/plugins/storage-memory/rx-storage-instance-memory.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-storage-instance-memory.js","names":["Subject","getStartIndexStringFromLowerBound","getStartIndexStringFromUpperBound","getPrimaryFieldOfPrimaryKey","categorizeBulkWriteRows","deepEqual","ensureNotFalsy","now","PROMISE_RESOLVE_TRUE","PROMISE_RESOLVE_VOID","randomCouchString","requestIdlePromiseNoQueue","boundGE","boundGT","boundLE","boundLT","attachmentMapKey","compareDocsWithIndex","ensureNotRemoved","getMemoryCollectionKey","putWriteRowToState","removeDocFromState","addIndexesToInternalsState","getMemoryIndexName","getQueryMatcher","getSortComparator","OPEN_MEMORY_INSTANCES","Set","RxStorageInstanceMemory","storage","databaseName","collectionName","schema","internals","options","settings","devMode","closed","categorizedByWriteInput","WeakMap","add","primaryPath","primaryKey","_proto","prototype","bulkWrite","documentWrites","context","ensurePersistence","documentsById","documents","categorized","error","errors","success","Array","bulkInsertDocs","length","awaitMe","Promise","resolve","i","writeRow","doc","document","bulkUpdateDocs","push","set","ensurePersistenceTask","ensurePersistenceIdlePromise","then","undefined","eventBulk","events","lastState","newestRow","checkpoint","id","lwt","_meta","endTime","changes$","next","stateByIndex","Object","values","byIndex","docId","get","attachments","attachmentsMap","attachmentsAdd","forEach","attachment","documentId","attachmentId","writeData","attachmentData","digest","attachmentsUpdate","attachmentsRemove","delete","findDocumentsById","docIds","withDeleted","ret","size","docInDb","_deleted","query","preparedQuery","queryPlan","skip","limit","Infinity","skipPlusLimit","queryMatcher","selectorSatisfiedByIndex","queryPlanFields","index","mustManuallyResort","sortSatisfiedByIndex","lowerBound","startKeys","lowerBoundString","upperBound","endKeys","upperBoundString","indexName","Error","docsWithIndex","indexOfLower","inclusiveStart","indexString","indexOfUpper","inclusiveEnd","rows","done","currentRow","currentDoc","sortComparator","sort","slice","count","result","mode","cleanup","minimumDeletedTime","maxDeletionTime","getAttachmentData","key","data","changeStream","asObservable","remove","removed","collectionStates","version","close","refCount","conflictResultionTasks","conflictResultionTasks$","resolveConflictResultionTask","_taskSolution","createMemoryStorageInstance","params","collectionKey","Map","instance"],"sources":["../../../../src/plugins/storage-memory/rx-storage-instance-memory.ts"],"sourcesContent":["import {\n Observable,\n Subject\n} from 'rxjs';\nimport {\n getStartIndexStringFromLowerBound,\n getStartIndexStringFromUpperBound\n} from '../../custom-index.ts';\nimport { getPrimaryFieldOfPrimaryKey } from '../../rx-schema-helper.ts';\nimport {\n categorizeBulkWriteRows\n} from '../../rx-storage-helper.ts';\nimport type {\n BulkWriteRow,\n CategorizeBulkWriteRowsOutput,\n EventBulk,\n PreparedQuery,\n QueryMatcher,\n RxConflictResultionTask,\n RxConflictResultionTaskSolution,\n RxDocumentData,\n RxJsonSchema,\n RxStorageBulkWriteResponse,\n RxStorageChangeEvent,\n RxStorageCountResult,\n RxStorageDefaultCheckpoint,\n RxStorageInstance,\n RxStorageInstanceCreationParams,\n RxStorageQueryResult,\n StringKeys\n} from '../../types/index.d.ts';\nimport {\n deepEqual,\n ensureNotFalsy,\n now,\n PROMISE_RESOLVE_TRUE,\n PROMISE_RESOLVE_VOID,\n promiseWait,\n randomCouchString,\n requestIdlePromiseNoQueue\n} from '../../plugins/utils/index.ts';\nimport {\n boundGE,\n boundGT,\n boundLE,\n boundLT\n} from './binary-search-bounds.ts';\nimport {\n attachmentMapKey,\n compareDocsWithIndex,\n ensureNotRemoved,\n getMemoryCollectionKey,\n putWriteRowToState,\n removeDocFromState\n} from './memory-helper.ts';\nimport {\n addIndexesToInternalsState,\n getMemoryIndexName\n} from './memory-indexes.ts';\nimport type {\n MemoryStorageInternals,\n RxStorageMemory,\n RxStorageMemoryInstanceCreationOptions,\n RxStorageMemorySettings\n} from './memory-types.ts';\nimport { getQueryMatcher, getSortComparator } from '../../rx-query-helper.ts';\n\n/**\n * Used in tests to ensure everything\n * is closed correctly\n */\nexport const OPEN_MEMORY_INSTANCES = new Set>();\n\nexport class RxStorageInstanceMemory implements RxStorageInstance<\n RxDocType,\n MemoryStorageInternals,\n RxStorageMemoryInstanceCreationOptions,\n RxStorageDefaultCheckpoint\n> {\n\n public readonly primaryPath: StringKeys>;\n public closed = false;\n\n /**\n * Used by some plugins and storage wrappers\n * to find out details about the internals of a write operation.\n * For example if you want to know which documents really have been replaced\n * or newly inserted.\n */\n public categorizedByWriteInput = new WeakMap[], CategorizeBulkWriteRowsOutput>();\n\n constructor(\n public readonly storage: RxStorageMemory,\n public readonly databaseName: string,\n public readonly collectionName: string,\n public readonly schema: Readonly>>,\n public readonly internals: MemoryStorageInternals,\n public readonly options: Readonly,\n public readonly settings: RxStorageMemorySettings,\n public readonly devMode: boolean\n ) {\n OPEN_MEMORY_INSTANCES.add(this);\n this.primaryPath = getPrimaryFieldOfPrimaryKey(this.schema.primaryKey);\n }\n\n bulkWrite(\n documentWrites: BulkWriteRow[],\n context: string\n ): Promise> {\n this.ensurePersistence();\n ensureNotRemoved(this);\n const internals = this.internals;\n const documentsById = this.internals.documents;\n const primaryPath = this.primaryPath;\n\n\n const categorized = categorizeBulkWriteRows(\n this,\n primaryPath as any,\n documentsById,\n documentWrites,\n context\n );\n const error = categorized.errors;\n let success: RxDocumentData[] = new Array(categorized.bulkInsertDocs.length);\n /**\n * @performance\n * We have to return a Promise but we do not want to wait\n * one tick, so we directly create the promise\n * which makes it likely to be already resolved later.\n */\n const awaitMe = Promise.resolve({ success, error });\n\n const bulkInsertDocs = categorized.bulkInsertDocs;\n for (let i = 0; i < bulkInsertDocs.length; ++i) {\n const writeRow = bulkInsertDocs[i];\n const doc = writeRow.document;\n success[i] = doc;\n }\n const bulkUpdateDocs = categorized.bulkUpdateDocs;\n for (let i = 0; i < bulkUpdateDocs.length; ++i) {\n const writeRow = bulkUpdateDocs[i];\n const doc = writeRow.document;\n success.push(doc);\n }\n\n this.categorizedByWriteInput.set(documentWrites, categorized);\n this.internals.ensurePersistenceTask = categorized;\n\n if (!this.internals.ensurePersistenceIdlePromise) {\n this.internals.ensurePersistenceIdlePromise = requestIdlePromiseNoQueue().then(() => {\n this.internals.ensurePersistenceIdlePromise = undefined;\n this.ensurePersistence();\n });\n }\n\n /**\n * Important: The events must be emitted AFTER the persistence\n * task has been added.\n */\n if (categorized.eventBulk.events.length > 0) {\n const lastState = ensureNotFalsy(categorized.newestRow).document;\n categorized.eventBulk.checkpoint = {\n id: lastState[primaryPath],\n lwt: lastState._meta.lwt\n };\n categorized.eventBulk.endTime = now();\n internals.changes$.next(categorized.eventBulk);\n }\n return awaitMe;\n }\n\n /**\n * Instead of directly inserting the documents into all indexes,\n * we do it lazy in the background. This gives the application time\n * to directly work with the write-result and to do stuff like rendering DOM\n * notes and processing RxDB queries.\n * Then in some later time, or just before the next read/write,\n * it is ensured that the indexes have been written.\n */\n public ensurePersistence() {\n if (\n !this.internals.ensurePersistenceTask\n ) {\n return;\n }\n const internals = this.internals;\n const documentsById = this.internals.documents;\n const primaryPath = this.primaryPath;\n\n const categorized = this.internals.ensurePersistenceTask;\n this.internals.ensurePersistenceTask = undefined;\n\n /**\n * Do inserts/updates\n */\n const stateByIndex = Object.values(this.internals.byIndex);\n\n const bulkInsertDocs = categorized.bulkInsertDocs;\n for (let i = 0; i < bulkInsertDocs.length; ++i) {\n const writeRow = bulkInsertDocs[i];\n const doc = writeRow.document;\n const docId = doc[primaryPath];\n putWriteRowToState(\n docId as any,\n internals,\n stateByIndex,\n writeRow,\n undefined\n );\n }\n\n const bulkUpdateDocs = categorized.bulkUpdateDocs;\n for (let i = 0; i < bulkUpdateDocs.length; ++i) {\n const writeRow = bulkUpdateDocs[i];\n const doc = writeRow.document;\n const docId = doc[primaryPath];\n putWriteRowToState(\n docId as any,\n internals,\n stateByIndex,\n writeRow,\n documentsById.get(docId as any)\n );\n }\n\n /**\n * Handle attachments\n */\n if (this.schema.attachments) {\n const attachmentsMap = internals.attachments;\n categorized.attachmentsAdd.forEach(attachment => {\n attachmentsMap.set(\n attachmentMapKey(attachment.documentId, attachment.attachmentId),\n {\n writeData: attachment.attachmentData,\n digest: attachment.digest\n }\n );\n });\n if (this.schema.attachments) {\n categorized.attachmentsUpdate.forEach(attachment => {\n attachmentsMap.set(\n attachmentMapKey(attachment.documentId, attachment.attachmentId),\n {\n writeData: attachment.attachmentData,\n digest: attachment.digest\n }\n );\n });\n categorized.attachmentsRemove.forEach(attachment => {\n attachmentsMap.delete(\n attachmentMapKey(attachment.documentId, attachment.attachmentId)\n );\n });\n }\n }\n }\n\n findDocumentsById(\n docIds: string[],\n withDeleted: boolean\n ): Promise[]> {\n this.ensurePersistence();\n const documentsById = this.internals.documents;\n const ret: RxDocumentData[] = [];\n if (documentsById.size === 0) {\n return Promise.resolve(ret);\n }\n for (let i = 0; i < docIds.length; ++i) {\n const docId = docIds[i];\n const docInDb = documentsById.get(docId);\n if (\n docInDb &&\n (\n !docInDb._deleted ||\n withDeleted\n )\n ) {\n ret.push(docInDb);\n }\n }\n return Promise.resolve(ret);\n }\n\n query(\n preparedQuery: PreparedQuery\n ): Promise> {\n this.ensurePersistence();\n\n const queryPlan = preparedQuery.queryPlan;\n const query = preparedQuery.query;\n\n const skip = query.skip ? query.skip : 0;\n const limit = query.limit ? query.limit : Infinity;\n const skipPlusLimit = skip + limit;\n\n let queryMatcher: QueryMatcher> | false = false;\n if (!queryPlan.selectorSatisfiedByIndex) {\n queryMatcher = getQueryMatcher(\n this.schema,\n preparedQuery.query\n );\n }\n\n const queryPlanFields: string[] = queryPlan.index;\n const mustManuallyResort = !queryPlan.sortSatisfiedByIndex;\n const index: string[] | undefined = queryPlanFields;\n const lowerBound: any[] = queryPlan.startKeys;\n const lowerBoundString = getStartIndexStringFromLowerBound(\n this.schema,\n index,\n lowerBound\n );\n\n let upperBound: any[] = queryPlan.endKeys;\n upperBound = upperBound;\n const upperBoundString = getStartIndexStringFromUpperBound(\n this.schema,\n index,\n upperBound\n );\n const indexName = getMemoryIndexName(index);\n\n if (!this.internals.byIndex[indexName]) {\n throw new Error('index does not exist ' + indexName);\n }\n const docsWithIndex = this.internals.byIndex[indexName].docsWithIndex;\n\n\n\n let indexOfLower = (queryPlan.inclusiveStart ? boundGE : boundGT)(\n docsWithIndex,\n {\n indexString: lowerBoundString\n } as any,\n compareDocsWithIndex\n );\n\n const indexOfUpper = (queryPlan.inclusiveEnd ? boundLE : boundLT)(\n docsWithIndex,\n {\n indexString: upperBoundString\n } as any,\n compareDocsWithIndex\n );\n\n let rows: RxDocumentData[] = [];\n let done = false;\n while (!done) {\n const currentRow = docsWithIndex[indexOfLower];\n if (\n !currentRow ||\n indexOfLower > indexOfUpper\n ) {\n break;\n }\n const currentDoc = currentRow.doc;\n\n if (!queryMatcher || queryMatcher(currentDoc)) {\n rows.push(currentDoc);\n }\n\n if (\n (rows.length >= skipPlusLimit && !mustManuallyResort)\n ) {\n done = true;\n }\n\n indexOfLower++;\n }\n\n if (mustManuallyResort) {\n const sortComparator = getSortComparator(this.schema, preparedQuery.query);\n rows = rows.sort(sortComparator);\n }\n\n // apply skip and limit boundaries.\n rows = rows.slice(skip, skipPlusLimit);\n return Promise.resolve({\n documents: rows\n });\n }\n\n async count(\n preparedQuery: PreparedQuery\n ): Promise {\n this.ensurePersistence();\n const result = await this.query(preparedQuery);\n return {\n count: result.documents.length,\n mode: 'fast'\n };\n }\n\n cleanup(minimumDeletedTime: number): Promise {\n this.ensurePersistence();\n const maxDeletionTime = now() - minimumDeletedTime;\n const index = ['_deleted', '_meta.lwt', this.primaryPath as any];\n const indexName = getMemoryIndexName(index);\n const docsWithIndex = this.internals.byIndex[indexName].docsWithIndex;\n\n const lowerBoundString = getStartIndexStringFromLowerBound(\n this.schema,\n index,\n [\n true,\n 0,\n ''\n ]\n );\n\n let indexOfLower = boundGT(\n docsWithIndex,\n {\n indexString: lowerBoundString\n } as any,\n compareDocsWithIndex\n );\n\n let done = false;\n while (!done) {\n const currentDoc = docsWithIndex[indexOfLower];\n if (!currentDoc || currentDoc.doc._meta.lwt > maxDeletionTime) {\n done = true;\n } else {\n removeDocFromState(\n this.primaryPath as any,\n this.schema,\n this.internals,\n currentDoc.doc\n );\n indexOfLower++;\n }\n }\n return PROMISE_RESOLVE_TRUE;\n }\n\n getAttachmentData(\n documentId: string,\n attachmentId: string,\n digest: string\n ): Promise {\n this.ensurePersistence();\n ensureNotRemoved(this);\n const key = attachmentMapKey(documentId, attachmentId);\n const data = this.internals.attachments.get(key);\n\n if (\n !digest ||\n !data ||\n data.digest !== digest\n ) {\n throw new Error('attachment does not exist: ' + key);\n }\n return Promise.resolve(data.writeData.data);\n }\n\n changeStream(): Observable>, RxStorageDefaultCheckpoint>> {\n ensureNotRemoved(this);\n return this.internals.changes$.asObservable();\n }\n\n async remove(): Promise {\n if (this.closed) {\n throw new Error('closed');\n }\n this.ensurePersistence();\n ensureNotRemoved(this);\n\n this.internals.removed = true;\n this.storage.collectionStates.delete(\n getMemoryCollectionKey(\n this.databaseName,\n this.collectionName,\n this.schema.version\n )\n );\n await this.close();\n }\n\n close(): Promise {\n OPEN_MEMORY_INSTANCES.delete(this);\n\n this.ensurePersistence();\n if (this.closed) {\n return PROMISE_RESOLVE_VOID;\n }\n this.closed = true;\n\n this.internals.refCount = this.internals.refCount - 1;\n return PROMISE_RESOLVE_VOID;\n }\n\n conflictResultionTasks(): Observable> {\n return this.internals.conflictResultionTasks$.asObservable();\n }\n resolveConflictResultionTask(_taskSolution: RxConflictResultionTaskSolution): Promise {\n return PROMISE_RESOLVE_VOID;\n }\n}\n\nexport function createMemoryStorageInstance(\n storage: RxStorageMemory,\n params: RxStorageInstanceCreationParams,\n settings: RxStorageMemorySettings\n): Promise> {\n const collectionKey = getMemoryCollectionKey(\n params.databaseName,\n params.collectionName,\n params.schema.version\n );\n\n let internals = storage.collectionStates.get(collectionKey);\n if (!internals) {\n internals = {\n id: randomCouchString(5),\n schema: params.schema,\n removed: false,\n refCount: 1,\n documents: new Map(),\n attachments: params.schema.attachments ? new Map() : undefined as any,\n byIndex: {},\n conflictResultionTasks$: new Subject(),\n changes$: new Subject()\n };\n addIndexesToInternalsState(internals, params.schema);\n storage.collectionStates.set(collectionKey, internals);\n } else {\n /**\n * Ensure that the storage was not already\n * created with a different schema.\n * This is very important because if this check\n * does not exist here, we have hard-to-debug problems\n * downstream.\n */\n if (\n params.devMode &&\n !deepEqual(internals.schema, params.schema)\n ) {\n throw new Error('storage was already created with a different schema');\n }\n internals.refCount = internals.refCount + 1;\n }\n\n const instance = new RxStorageInstanceMemory(\n storage,\n params.databaseName,\n params.collectionName,\n params.schema,\n internals,\n params.options,\n settings,\n params.devMode\n );\n return Promise.resolve(instance);\n}\n"],"mappings":"AAAA,SAEIA,OAAO,QACJ,MAAM;AACb,SACIC,iCAAiC,EACjCC,iCAAiC,QAC9B,uBAAuB;AAC9B,SAASC,2BAA2B,QAAQ,2BAA2B;AACvE,SACIC,uBAAuB,QACpB,4BAA4B;AAoBnC,SACIC,SAAS,EACTC,cAAc,EACdC,GAAG,EACHC,oBAAoB,EACpBC,oBAAoB,EAEpBC,iBAAiB,EACjBC,yBAAyB,QACtB,8BAA8B;AACrC,SACIC,OAAO,EACPC,OAAO,EACPC,OAAO,EACPC,OAAO,QACJ,2BAA2B;AAClC,SACIC,gBAAgB,EAChBC,oBAAoB,EACpBC,gBAAgB,EAChBC,sBAAsB,EACtBC,kBAAkB,EAClBC,kBAAkB,QACf,oBAAoB;AAC3B,SACIC,0BAA0B,EAC1BC,kBAAkB,QACf,qBAAqB;AAO5B,SAASC,eAAe,EAAEC,iBAAiB,QAAQ,0BAA0B;;AAE7E;AACA;AACA;AACA;AACA,OAAO,IAAMC,qBAAqB,GAAG,IAAIC,GAAG,CAA+B,CAAC;AAE5E,WAAaC,uBAAuB;EAUhC;AACJ;AACA;AACA;AACA;AACA;;EAGI,SAAAA,wBACoBC,OAAwB,EACxBC,YAAoB,EACpBC,cAAsB,EACtBC,MAAyD,EACzDC,SAA4C,EAC5CC,OAAyD,EACzDC,QAAiC,EACjCC,OAAgB,EAClC;IAAA,KAnBKC,MAAM,GAAG,KAAK;IAAA,KAQdC,uBAAuB,GAAG,IAAIC,OAAO,CAAsE,CAAC;IAAA,KAG/FV,OAAwB,GAAxBA,OAAwB;IAAA,KACxBC,YAAoB,GAApBA,YAAoB;IAAA,KACpBC,cAAsB,GAAtBA,cAAsB;IAAA,KACtBC,MAAyD,GAAzDA,MAAyD;IAAA,KACzDC,SAA4C,GAA5CA,SAA4C;IAAA,KAC5CC,OAAyD,GAAzDA,OAAyD;IAAA,KACzDC,QAAiC,GAAjCA,QAAiC;IAAA,KACjCC,OAAgB,GAAhBA,OAAgB;IAEhCV,qBAAqB,CAACc,GAAG,CAAC,IAAI,CAAC;IAC/B,IAAI,CAACC,WAAW,GAAGtC,2BAA2B,CAAC,IAAI,CAAC6B,MAAM,CAACU,UAAU,CAAC;EAC1E;EAAC,IAAAC,MAAA,GAAAf,uBAAA,CAAAgB,SAAA;EAAAD,MAAA,CAEDE,SAAS,GAAT,SAAAA,UACIC,cAAyC,EACzCC,OAAe,EAC+B;IAC9C,IAAI,CAACC,iBAAiB,CAAC,CAAC;IACxB9B,gBAAgB,CAAC,IAAI,CAAC;IACtB,IAAMe,SAAS,GAAG,IAAI,CAACA,SAAS;IAChC,IAAMgB,aAAa,GAAG,IAAI,CAAChB,SAAS,CAACiB,SAAS;IAC9C,IAAMT,WAAW,GAAG,IAAI,CAACA,WAAW;IAGpC,IAAMU,WAAW,GAAG/C,uBAAuB,CACvC,IAAI,EACJqC,WAAW,EACXQ,aAAa,EACbH,cAAc,EACdC,OACJ,CAAC;IACD,IAAMK,KAAK,GAAGD,WAAW,CAACE,MAAM;IAChC,IAAIC,OAAoC,GAAG,IAAIC,KAAK,CAACJ,WAAW,CAACK,cAAc,CAACC,MAAM,CAAC;IACvF;AACR;AACA;AACA;AACA;AACA;IACQ,IAAMC,OAAO,GAAGC,OAAO,CAACC,OAAO,CAAC;MAAEN,OAAO;MAAEF;IAAM,CAAC,CAAC;IAEnD,IAAMI,cAAc,GAAGL,WAAW,CAACK,cAAc;IACjD,KAAK,IAAIK,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGL,cAAc,CAACC,MAAM,EAAE,EAAEI,CAAC,EAAE;MAC5C,IAAMC,QAAQ,GAAGN,cAAc,CAACK,CAAC,CAAC;MAClC,IAAME,GAAG,GAAGD,QAAQ,CAACE,QAAQ;MAC7BV,OAAO,CAACO,CAAC,CAAC,GAAGE,GAAG;IACpB;IACA,IAAME,cAAc,GAAGd,WAAW,CAACc,cAAc;IACjD,KAAK,IAAIJ,EAAC,GAAG,CAAC,EAAEA,EAAC,GAAGI,cAAc,CAACR,MAAM,EAAE,EAAEI,EAAC,EAAE;MAC5C,IAAMC,SAAQ,GAAGG,cAAc,CAACJ,EAAC,CAAC;MAClC,IAAME,IAAG,GAAGD,SAAQ,CAACE,QAAQ;MAC7BV,OAAO,CAACY,IAAI,CAACH,IAAG,CAAC;IACrB;IAEA,IAAI,CAACzB,uBAAuB,CAAC6B,GAAG,CAACrB,cAAc,EAAEK,WAAW,CAAC;IAC7D,IAAI,CAAClB,SAAS,CAACmC,qBAAqB,GAAGjB,WAAW;IAElD,IAAI,CAAC,IAAI,CAAClB,SAAS,CAACoC,4BAA4B,EAAE;MAC9C,IAAI,CAACpC,SAAS,CAACoC,4BAA4B,GAAG1D,yBAAyB,CAAC,CAAC,CAAC2D,IAAI,CAAC,MAAM;QACjF,IAAI,CAACrC,SAAS,CAACoC,4BAA4B,GAAGE,SAAS;QACvD,IAAI,CAACvB,iBAAiB,CAAC,CAAC;MAC5B,CAAC,CAAC;IACN;;IAEA;AACR;AACA;AACA;IACQ,IAAIG,WAAW,CAACqB,SAAS,CAACC,MAAM,CAAChB,MAAM,GAAG,CAAC,EAAE;MACzC,IAAMiB,SAAS,GAAGpE,cAAc,CAAC6C,WAAW,CAACwB,SAAS,CAAC,CAACX,QAAQ;MAChEb,WAAW,CAACqB,SAAS,CAACI,UAAU,GAAG;QAC/BC,EAAE,EAAEH,SAAS,CAACjC,WAAW,CAAC;QAC1BqC,GAAG,EAAEJ,SAAS,CAACK,KAAK,CAACD;MACzB,CAAC;MACD3B,WAAW,CAACqB,SAAS,CAACQ,OAAO,GAAGzE,GAAG,CAAC,CAAC;MACrC0B,SAAS,CAACgD,QAAQ,CAACC,IAAI,CAAC/B,WAAW,CAACqB,SAAS,CAAC;IAClD;IACA,OAAOd,OAAO;EAClB;;EAEA;AACJ;AACA;AACA;AACA;AACA;AACA;AACA,KAPI;EAAAf,MAAA,CAQOK,iBAAiB,GAAxB,SAAAA,kBAAA,EAA2B;IACvB,IACI,CAAC,IAAI,CAACf,SAAS,CAACmC,qBAAqB,EACvC;MACE;IACJ;IACA,IAAMnC,SAAS,GAAG,IAAI,CAACA,SAAS;IAChC,IAAMgB,aAAa,GAAG,IAAI,CAAChB,SAAS,CAACiB,SAAS;IAC9C,IAAMT,WAAW,GAAG,IAAI,CAACA,WAAW;IAEpC,IAAMU,WAAW,GAAG,IAAI,CAAClB,SAAS,CAACmC,qBAAqB;IACxD,IAAI,CAACnC,SAAS,CAACmC,qBAAqB,GAAGG,SAAS;;IAEhD;AACR;AACA;IACQ,IAAMY,YAAY,GAAGC,MAAM,CAACC,MAAM,CAAC,IAAI,CAACpD,SAAS,CAACqD,OAAO,CAAC;IAE1D,IAAM9B,cAAc,GAAGL,WAAW,CAACK,cAAc;IACjD,KAAK,IAAIK,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGL,cAAc,CAACC,MAAM,EAAE,EAAEI,CAAC,EAAE;MAC5C,IAAMC,QAAQ,GAAGN,cAAc,CAACK,CAAC,CAAC;MAClC,IAAME,GAAG,GAAGD,QAAQ,CAACE,QAAQ;MAC7B,IAAMuB,KAAK,GAAGxB,GAAG,CAACtB,WAAW,CAAC;MAC9BrB,kBAAkB,CACdmE,KAAK,EACLtD,SAAS,EACTkD,YAAY,EACZrB,QAAQ,EACRS,SACJ,CAAC;IACL;IAEA,IAAMN,cAAc,GAAGd,WAAW,CAACc,cAAc;IACjD,KAAK,IAAIJ,GAAC,GAAG,CAAC,EAAEA,GAAC,GAAGI,cAAc,CAACR,MAAM,EAAE,EAAEI,GAAC,EAAE;MAC5C,IAAMC,UAAQ,GAAGG,cAAc,CAACJ,GAAC,CAAC;MAClC,IAAME,KAAG,GAAGD,UAAQ,CAACE,QAAQ;MAC7B,IAAMuB,MAAK,GAAGxB,KAAG,CAACtB,WAAW,CAAC;MAC9BrB,kBAAkB,CACdmE,MAAK,EACLtD,SAAS,EACTkD,YAAY,EACZrB,UAAQ,EACRb,aAAa,CAACuC,GAAG,CAACD,MAAY,CAClC,CAAC;IACL;;IAEA;AACR;AACA;IACQ,IAAI,IAAI,CAACvD,MAAM,CAACyD,WAAW,EAAE;MACzB,IAAMC,cAAc,GAAGzD,SAAS,CAACwD,WAAW;MAC5CtC,WAAW,CAACwC,cAAc,CAACC,OAAO,CAACC,UAAU,IAAI;QAC7CH,cAAc,CAACvB,GAAG,CACdnD,gBAAgB,CAAC6E,UAAU,CAACC,UAAU,EAAED,UAAU,CAACE,YAAY,CAAC,EAChE;UACIC,SAAS,EAAEH,UAAU,CAACI,cAAc;UACpCC,MAAM,EAAEL,UAAU,CAACK;QACvB,CACJ,CAAC;MACL,CAAC,CAAC;MACF,IAAI,IAAI,CAAClE,MAAM,CAACyD,WAAW,EAAE;QACzBtC,WAAW,CAACgD,iBAAiB,CAACP,OAAO,CAACC,UAAU,IAAI;UAChDH,cAAc,CAACvB,GAAG,CACdnD,gBAAgB,CAAC6E,UAAU,CAACC,UAAU,EAAED,UAAU,CAACE,YAAY,CAAC,EAChE;YACIC,SAAS,EAAEH,UAAU,CAACI,cAAc;YACpCC,MAAM,EAAEL,UAAU,CAACK;UACvB,CACJ,CAAC;QACL,CAAC,CAAC;QACF/C,WAAW,CAACiD,iBAAiB,CAACR,OAAO,CAACC,UAAU,IAAI;UAChDH,cAAc,CAACW,MAAM,CACjBrF,gBAAgB,CAAC6E,UAAU,CAACC,UAAU,EAAED,UAAU,CAACE,YAAY,CACnE,CAAC;QACL,CAAC,CAAC;MACN;IACJ;EACJ,CAAC;EAAApD,MAAA,CAED2D,iBAAiB,GAAjB,SAAAA,kBACIC,MAAgB,EAChBC,WAAoB,EACgB;IACpC,IAAI,CAACxD,iBAAiB,CAAC,CAAC;IACxB,IAAMC,aAAa,GAAG,IAAI,CAAChB,SAAS,CAACiB,SAAS;IAC9C,IAAMuD,GAAgC,GAAG,EAAE;IAC3C,IAAIxD,aAAa,CAACyD,IAAI,KAAK,CAAC,EAAE;MAC1B,OAAO/C,OAAO,CAACC,OAAO,CAAC6C,GAAG,CAAC;IAC/B;IACA,KAAK,IAAI5C,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAG0C,MAAM,CAAC9C,MAAM,EAAE,EAAEI,CAAC,EAAE;MACpC,IAAM0B,KAAK,GAAGgB,MAAM,CAAC1C,CAAC,CAAC;MACvB,IAAM8C,OAAO,GAAG1D,aAAa,CAACuC,GAAG,CAACD,KAAK,CAAC;MACxC,IACIoB,OAAO,KAEH,CAACA,OAAO,CAACC,QAAQ,IACjBJ,WAAW,CACd,EACH;QACEC,GAAG,CAACvC,IAAI,CAACyC,OAAO,CAAC;MACrB;IACJ;IACA,OAAOhD,OAAO,CAACC,OAAO,CAAC6C,GAAG,CAAC;EAC/B,CAAC;EAAA9D,MAAA,CAEDkE,KAAK,GAAL,SAAAA,MACIC,aAAuC,EACC;IACxC,IAAI,CAAC9D,iBAAiB,CAAC,CAAC;IAExB,IAAM+D,SAAS,GAAGD,aAAa,CAACC,SAAS;IACzC,IAAMF,KAAK,GAAGC,aAAa,CAACD,KAAK;IAEjC,IAAMG,IAAI,GAAGH,KAAK,CAACG,IAAI,GAAGH,KAAK,CAACG,IAAI,GAAG,CAAC;IACxC,IAAMC,KAAK,GAAGJ,KAAK,CAACI,KAAK,GAAGJ,KAAK,CAACI,KAAK,GAAGC,QAAQ;IAClD,IAAMC,aAAa,GAAGH,IAAI,GAAGC,KAAK;IAElC,IAAIG,YAA6D,GAAG,KAAK;IACzE,IAAI,CAACL,SAAS,CAACM,wBAAwB,EAAE;MACrCD,YAAY,GAAG5F,eAAe,CAC1B,IAAI,CAACQ,MAAM,EACX8E,aAAa,CAACD,KAClB,CAAC;IACL;IAEA,IAAMS,eAAyB,GAAGP,SAAS,CAACQ,KAAK;IACjD,IAAMC,kBAAkB,GAAG,CAACT,SAAS,CAACU,oBAAoB;IAC1D,IAAMF,KAA2B,GAAGD,eAAe;IACnD,IAAMI,UAAiB,GAAGX,SAAS,CAACY,SAAS;IAC7C,IAAMC,gBAAgB,GAAG3H,iCAAiC,CACtD,IAAI,CAAC+B,MAAM,EACXuF,KAAK,EACLG,UACJ,CAAC;IAED,IAAIG,UAAiB,GAAGd,SAAS,CAACe,OAAO;IACzCD,UAAU,GAAGA,UAAU;IACvB,IAAME,gBAAgB,GAAG7H,iCAAiC,CACtD,IAAI,CAAC8B,MAAM,EACXuF,KAAK,EACLM,UACJ,CAAC;IACD,IAAMG,SAAS,GAAGzG,kBAAkB,CAACgG,KAAK,CAAC;IAE3C,IAAI,CAAC,IAAI,CAACtF,SAAS,CAACqD,OAAO,CAAC0C,SAAS,CAAC,EAAE;MACpC,MAAM,IAAIC,KAAK,CAAC,uBAAuB,GAAGD,SAAS,CAAC;IACxD;IACA,IAAME,aAAa,GAAG,IAAI,CAACjG,SAAS,CAACqD,OAAO,CAAC0C,SAAS,CAAC,CAACE,aAAa;IAIrE,IAAIC,YAAY,GAAG,CAACpB,SAAS,CAACqB,cAAc,GAAGxH,OAAO,GAAGC,OAAO,EAC5DqH,aAAa,EACb;MACIG,WAAW,EAAET;IACjB,CAAC,EACD3G,oBACJ,CAAC;IAED,IAAMqH,YAAY,GAAG,CAACvB,SAAS,CAACwB,YAAY,GAAGzH,OAAO,GAAGC,OAAO,EAC5DmH,aAAa,EACb;MACIG,WAAW,EAAEN;IACjB,CAAC,EACD9G,oBACJ,CAAC;IAED,IAAIuH,IAAiC,GAAG,EAAE;IAC1C,IAAIC,IAAI,GAAG,KAAK;IAChB,OAAO,CAACA,IAAI,EAAE;MACV,IAAMC,UAAU,GAAGR,aAAa,CAACC,YAAY,CAAC;MAC9C,IACI,CAACO,UAAU,IACXP,YAAY,GAAGG,YAAY,EAC7B;QACE;MACJ;MACA,IAAMK,UAAU,GAAGD,UAAU,CAAC3E,GAAG;MAEjC,IAAI,CAACqD,YAAY,IAAIA,YAAY,CAACuB,UAAU,CAAC,EAAE;QAC3CH,IAAI,CAACtE,IAAI,CAACyE,UAAU,CAAC;MACzB;MAEA,IACKH,IAAI,CAAC/E,MAAM,IAAI0D,aAAa,IAAI,CAACK,kBAAkB,EACtD;QACEiB,IAAI,GAAG,IAAI;MACf;MAEAN,YAAY,EAAE;IAClB;IAEA,IAAIX,kBAAkB,EAAE;MACpB,IAAMoB,cAAc,GAAGnH,iBAAiB,CAAC,IAAI,CAACO,MAAM,EAAE8E,aAAa,CAACD,KAAK,CAAC;MAC1E2B,IAAI,GAAGA,IAAI,CAACK,IAAI,CAACD,cAAc,CAAC;IACpC;;IAEA;IACAJ,IAAI,GAAGA,IAAI,CAACM,KAAK,CAAC9B,IAAI,EAAEG,aAAa,CAAC;IACtC,OAAOxD,OAAO,CAACC,OAAO,CAAC;MACnBV,SAAS,EAAEsF;IACf,CAAC,CAAC;EACN,CAAC;EAAA7F,MAAA,CAEKoG,KAAK,GAAX,eAAAA,MACIjC,aAAuC,EACV;IAC7B,IAAI,CAAC9D,iBAAiB,CAAC,CAAC;IACxB,IAAMgG,MAAM,GAAG,MAAM,IAAI,CAACnC,KAAK,CAACC,aAAa,CAAC;IAC9C,OAAO;MACHiC,KAAK,EAAEC,MAAM,CAAC9F,SAAS,CAACO,MAAM;MAC9BwF,IAAI,EAAE;IACV,CAAC;EACL,CAAC;EAAAtG,MAAA,CAEDuG,OAAO,GAAP,SAAAA,QAAQC,kBAA0B,EAAoB;IAClD,IAAI,CAACnG,iBAAiB,CAAC,CAAC;IACxB,IAAMoG,eAAe,GAAG7I,GAAG,CAAC,CAAC,GAAG4I,kBAAkB;IAClD,IAAM5B,KAAK,GAAG,CAAC,UAAU,EAAE,WAAW,EAAE,IAAI,CAAC9E,WAAW,CAAQ;IAChE,IAAMuF,SAAS,GAAGzG,kBAAkB,CAACgG,KAAK,CAAC;IAC3C,IAAMW,aAAa,GAAG,IAAI,CAACjG,SAAS,CAACqD,OAAO,CAAC0C,SAAS,CAAC,CAACE,aAAa;IAErE,IAAMN,gBAAgB,GAAG3H,iCAAiC,CACtD,IAAI,CAAC+B,MAAM,EACXuF,KAAK,EACL,CACI,IAAI,EACJ,CAAC,EACD,EAAE,CAEV,CAAC;IAED,IAAIY,YAAY,GAAGtH,OAAO,CACtBqH,aAAa,EACb;MACIG,WAAW,EAAET;IACjB,CAAC,EACD3G,oBACJ,CAAC;IAED,IAAIwH,IAAI,GAAG,KAAK;IAChB,OAAO,CAACA,IAAI,EAAE;MACV,IAAME,UAAU,GAAGT,aAAa,CAACC,YAAY,CAAC;MAC9C,IAAI,CAACQ,UAAU,IAAIA,UAAU,CAAC5E,GAAG,CAACgB,KAAK,CAACD,GAAG,GAAGsE,eAAe,EAAE;QAC3DX,IAAI,GAAG,IAAI;MACf,CAAC,MAAM;QACHpH,kBAAkB,CACd,IAAI,CAACoB,WAAW,EAChB,IAAI,CAACT,MAAM,EACX,IAAI,CAACC,SAAS,EACd0G,UAAU,CAAC5E,GACf,CAAC;QACDoE,YAAY,EAAE;MAClB;IACJ;IACA,OAAO3H,oBAAoB;EAC/B,CAAC;EAAAmC,MAAA,CAED0G,iBAAiB,GAAjB,SAAAA,kBACIvD,UAAkB,EAClBC,YAAoB,EACpBG,MAAc,EACC;IACf,IAAI,CAAClD,iBAAiB,CAAC,CAAC;IACxB9B,gBAAgB,CAAC,IAAI,CAAC;IACtB,IAAMoI,GAAG,GAAGtI,gBAAgB,CAAC8E,UAAU,EAAEC,YAAY,CAAC;IACtD,IAAMwD,IAAI,GAAG,IAAI,CAACtH,SAAS,CAACwD,WAAW,CAACD,GAAG,CAAC8D,GAAG,CAAC;IAEhD,IACI,CAACpD,MAAM,IACP,CAACqD,IAAI,IACLA,IAAI,CAACrD,MAAM,KAAKA,MAAM,EACxB;MACE,MAAM,IAAI+B,KAAK,CAAC,6BAA6B,GAAGqB,GAAG,CAAC;IACxD;IACA,OAAO3F,OAAO,CAACC,OAAO,CAAC2F,IAAI,CAACvD,SAAS,CAACuD,IAAI,CAAC;EAC/C,CAAC;EAAA5G,MAAA,CAED6G,YAAY,GAAZ,SAAAA,aAAA,EAAmH;IAC/GtI,gBAAgB,CAAC,IAAI,CAAC;IACtB,OAAO,IAAI,CAACe,SAAS,CAACgD,QAAQ,CAACwE,YAAY,CAAC,CAAC;EACjD,CAAC;EAAA9G,MAAA,CAEK+G,MAAM,GAAZ,eAAAA,OAAA,EAA8B;IAC1B,IAAI,IAAI,CAACrH,MAAM,EAAE;MACb,MAAM,IAAI4F,KAAK,CAAC,QAAQ,CAAC;IAC7B;IACA,IAAI,CAACjF,iBAAiB,CAAC,CAAC;IACxB9B,gBAAgB,CAAC,IAAI,CAAC;IAEtB,IAAI,CAACe,SAAS,CAAC0H,OAAO,GAAG,IAAI;IAC7B,IAAI,CAAC9H,OAAO,CAAC+H,gBAAgB,CAACvD,MAAM,CAChClF,sBAAsB,CAClB,IAAI,CAACW,YAAY,EACjB,IAAI,CAACC,cAAc,EACnB,IAAI,CAACC,MAAM,CAAC6H,OAChB,CACJ,CAAC;IACD,MAAM,IAAI,CAACC,KAAK,CAAC,CAAC;EACtB,CAAC;EAAAnH,MAAA,CAEDmH,KAAK,GAAL,SAAAA,MAAA,EAAuB;IACnBpI,qBAAqB,CAAC2E,MAAM,CAAC,IAAI,CAAC;IAElC,IAAI,CAACrD,iBAAiB,CAAC,CAAC;IACxB,IAAI,IAAI,CAACX,MAAM,EAAE;MACb,OAAO5B,oBAAoB;IAC/B;IACA,IAAI,CAAC4B,MAAM,GAAG,IAAI;IAElB,IAAI,CAACJ,SAAS,CAAC8H,QAAQ,GAAG,IAAI,CAAC9H,SAAS,CAAC8H,QAAQ,GAAG,CAAC;IACrD,OAAOtJ,oBAAoB;EAC/B,CAAC;EAAAkC,MAAA,CAEDqH,sBAAsB,GAAtB,SAAAA,uBAAA,EAAyE;IACrE,OAAO,IAAI,CAAC/H,SAAS,CAACgI,uBAAuB,CAACR,YAAY,CAAC,CAAC;EAChE,CAAC;EAAA9G,MAAA,CACDuH,4BAA4B,GAA5B,SAAAA,6BAA6BC,aAAyD,EAAiB;IACnG,OAAO1J,oBAAoB;EAC/B,CAAC;EAAA,OAAAmB,uBAAA;AAAA;AAGL,OAAO,SAASwI,2BAA2BA,CACvCvI,OAAwB,EACxBwI,MAA0F,EAC1FlI,QAAiC,EACU;EAC3C,IAAMmI,aAAa,GAAGnJ,sBAAsB,CACxCkJ,MAAM,CAACvI,YAAY,EACnBuI,MAAM,CAACtI,cAAc,EACrBsI,MAAM,CAACrI,MAAM,CAAC6H,OAClB,CAAC;EAED,IAAI5H,SAAS,GAAGJ,OAAO,CAAC+H,gBAAgB,CAACpE,GAAG,CAAC8E,aAAa,CAAC;EAC3D,IAAI,CAACrI,SAAS,EAAE;IACZA,SAAS,GAAG;MACR4C,EAAE,EAAEnE,iBAAiB,CAAC,CAAC,CAAC;MACxBsB,MAAM,EAAEqI,MAAM,CAACrI,MAAM;MACrB2H,OAAO,EAAE,KAAK;MACdI,QAAQ,EAAE,CAAC;MACX7G,SAAS,EAAE,IAAIqH,GAAG,CAAC,CAAC;MACpB9E,WAAW,EAAE4E,MAAM,CAACrI,MAAM,CAACyD,WAAW,GAAG,IAAI8E,GAAG,CAAC,CAAC,GAAGhG,SAAgB;MACrEe,OAAO,EAAE,CAAC,CAAC;MACX2E,uBAAuB,EAAE,IAAIjK,OAAO,CAAC,CAAC;MACtCiF,QAAQ,EAAE,IAAIjF,OAAO,CAAC;IAC1B,CAAC;IACDsB,0BAA0B,CAACW,SAAS,EAAEoI,MAAM,CAACrI,MAAM,CAAC;IACpDH,OAAO,CAAC+H,gBAAgB,CAACzF,GAAG,CAACmG,aAAa,EAAErI,SAAS,CAAC;EAC1D,CAAC,MAAM;IACH;AACR;AACA;AACA;AACA;AACA;AACA;IACQ,IACIoI,MAAM,CAACjI,OAAO,IACd,CAAC/B,SAAS,CAAC4B,SAAS,CAACD,MAAM,EAAEqI,MAAM,CAACrI,MAAM,CAAC,EAC7C;MACE,MAAM,IAAIiG,KAAK,CAAC,qDAAqD,CAAC;IAC1E;IACAhG,SAAS,CAAC8H,QAAQ,GAAG9H,SAAS,CAAC8H,QAAQ,GAAG,CAAC;EAC/C;EAEA,IAAMS,QAAQ,GAAG,IAAI5I,uBAAuB,CACxCC,OAAO,EACPwI,MAAM,CAACvI,YAAY,EACnBuI,MAAM,CAACtI,cAAc,EACrBsI,MAAM,CAACrI,MAAM,EACbC,SAAS,EACToI,MAAM,CAACnI,OAAO,EACdC,QAAQ,EACRkI,MAAM,CAACjI,OACX,CAAC;EACD,OAAOuB,OAAO,CAACC,OAAO,CAAC4G,QAAQ,CAAC;AACpC","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/storage-mongodb/index.js b/dist/esm/plugins/storage-mongodb/index.js deleted file mode 100644 index b74381c93c3..00000000000 --- a/dist/esm/plugins/storage-mongodb/index.js +++ /dev/null @@ -1,5 +0,0 @@ -export * from "./rx-storage-mongodb.js"; -export * from "./rx-storage-instance-mongodb.js"; -export * from "./mongodb-helper.js"; -export * from "./mongodb-types.js"; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/esm/plugins/storage-mongodb/index.js.map b/dist/esm/plugins/storage-mongodb/index.js.map deleted file mode 100644 index d199235a226..00000000000 --- a/dist/esm/plugins/storage-mongodb/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":[],"sources":["../../../../src/plugins/storage-mongodb/index.ts"],"sourcesContent":["export * from './rx-storage-mongodb.ts';\nexport * from './rx-storage-instance-mongodb.ts';\nexport * from './mongodb-helper.ts';\nexport * from './mongodb-types.ts';\n"],"mappings":"AAAA,cAAc,yBAAyB;AACvC,cAAc,kCAAkC;AAChD,cAAc,qBAAqB;AACnC,cAAc,oBAAoB","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/storage-mongodb/mongodb-helper.js b/dist/esm/plugins/storage-mongodb/mongodb-helper.js deleted file mode 100644 index a3b9af54528..00000000000 --- a/dist/esm/plugins/storage-mongodb/mongodb-helper.js +++ /dev/null @@ -1,79 +0,0 @@ -import { flatClone } from "../utils/index.js"; -import { getPrimaryFieldOfPrimaryKey } from "../../rx-schema-helper.js"; -export var RX_STORAGE_NAME_MONGODB = 'mongodb'; - -/** - * MongoDB uses the _id field by itself (max 12 bytes) - * so we have to substitute the _id field if - * it is used in the RxDocType. - */ -export var MONGO_ID_SUBSTITUTE_FIELDNAME = '__id'; -export function primarySwapMongoDBQuerySelector(primaryKey, selector) { - selector = flatClone(selector); - if (primaryKey !== '_id') { - return selector; - } - if (Array.isArray(selector)) { - return selector.map(item => primarySwapMongoDBQuerySelector(primaryKey, item)); - } else if (typeof selector === 'object') { - var ret = {}; - Object.entries(selector).forEach(([k, v]) => { - if (k === primaryKey) { - ret._id = v; - } else { - if (k.startsWith('$')) { - ret[k] = primarySwapMongoDBQuerySelector(primaryKey, v); - } else { - ret[k] = v; - } - } - }); - return ret; - } else { - return selector; - } -} -export function prepareMongoDBQuery(schema, mutateableQuery) { - var primaryKey = getPrimaryFieldOfPrimaryKey(schema.primaryKey); - var preparedQuery = { - query: mutateableQuery, - mongoSelector: primarySwapMongoDBQuerySelector(primaryKey, mutateableQuery.selector), - mongoSort: swapToMongoSort(mutateableQuery.sort) - }; - return preparedQuery; -} -; -export function swapMongoToRxDoc(docData) { - docData = flatClone(docData); - if (docData[MONGO_ID_SUBSTITUTE_FIELDNAME]) { - var value = docData[MONGO_ID_SUBSTITUTE_FIELDNAME]; - delete docData[MONGO_ID_SUBSTITUTE_FIELDNAME]; - docData._id = value; - } else { - delete docData._id; - } - return docData; -} -export function swapRxDocToMongo(docData) { - docData = flatClone(docData); - if (docData._id) { - var value = docData._id; - delete docData._id; - docData[MONGO_ID_SUBSTITUTE_FIELDNAME] = value; - } - return docData; -} -export function swapToMongoSort(sort) { - var ret = {}; - sort.forEach(sortPart => { - var [key, direction] = Object.entries(sortPart)[0]; - var mongoKey = key === '_id' ? MONGO_ID_SUBSTITUTE_FIELDNAME : key; - var mongoDirection = direction === 'asc' ? 1 : -1; - ret[mongoKey] = mongoDirection; - }); - return ret; -} -export function getMongoDBIndexName(index) { - return index.join('|'); -} -//# sourceMappingURL=mongodb-helper.js.map \ No newline at end of file diff --git a/dist/esm/plugins/storage-mongodb/mongodb-helper.js.map b/dist/esm/plugins/storage-mongodb/mongodb-helper.js.map deleted file mode 100644 index 500c4b3b858..00000000000 --- a/dist/esm/plugins/storage-mongodb/mongodb-helper.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"mongodb-helper.js","names":["flatClone","getPrimaryFieldOfPrimaryKey","RX_STORAGE_NAME_MONGODB","MONGO_ID_SUBSTITUTE_FIELDNAME","primarySwapMongoDBQuerySelector","primaryKey","selector","Array","isArray","map","item","ret","Object","entries","forEach","k","v","_id","startsWith","prepareMongoDBQuery","schema","mutateableQuery","preparedQuery","query","mongoSelector","mongoSort","swapToMongoSort","sort","swapMongoToRxDoc","docData","value","swapRxDocToMongo","sortPart","key","direction","mongoKey","mongoDirection","getMongoDBIndexName","index","join"],"sources":["../../../../src/plugins/storage-mongodb/mongodb-helper.ts"],"sourcesContent":["import type {\n FilledMangoQuery,\n MangoQuerySelector,\n MangoQuerySortPart,\n RxDocumentData,\n RxJsonSchema\n} from '../../types/index.d.ts';\nimport {\n Sort as MongoSort\n} from 'mongodb';\nimport { flatClone } from '../utils/index.ts';\nimport { MongoDBPreparedQuery, MongoQuerySelector } from './mongodb-types.ts';\nimport { getPrimaryFieldOfPrimaryKey } from '../../rx-schema-helper.ts';\nexport const RX_STORAGE_NAME_MONGODB = 'mongodb';\n\n/**\n * MongoDB uses the _id field by itself (max 12 bytes)\n * so we have to substitute the _id field if\n * it is used in the RxDocType.\n */\nexport const MONGO_ID_SUBSTITUTE_FIELDNAME = '__id';\n\nexport function primarySwapMongoDBQuerySelector(\n primaryKey: keyof RxDocType,\n selector: MangoQuerySelector\n): MongoQuerySelector {\n selector = flatClone(selector);\n\n if (primaryKey !== '_id') {\n return selector as any;\n }\n if (Array.isArray(selector)) {\n return selector.map(item => primarySwapMongoDBQuerySelector(primaryKey, item)) as any;\n } else if (typeof selector === 'object') {\n const ret: any = {};\n Object.entries(selector).forEach(([k, v]) => {\n if (k === primaryKey) {\n ret._id = v;\n } else {\n if (k.startsWith('$')) {\n ret[k] = primarySwapMongoDBQuerySelector(primaryKey, v as any);\n } else {\n ret[k] = v;\n }\n }\n });\n return ret;\n } else {\n return selector;\n }\n}\n\n\nexport function prepareMongoDBQuery(\n schema: RxJsonSchema>,\n mutateableQuery: FilledMangoQuery\n) {\n const primaryKey = getPrimaryFieldOfPrimaryKey(schema.primaryKey) as any;\n const preparedQuery: MongoDBPreparedQuery = {\n query: mutateableQuery,\n mongoSelector: primarySwapMongoDBQuerySelector(\n primaryKey,\n mutateableQuery.selector\n ),\n mongoSort: swapToMongoSort(mutateableQuery.sort)\n };\n return preparedQuery;\n};\n\n\nexport function swapMongoToRxDoc(\n docData: any\n): RxDocumentData {\n docData = flatClone(docData);\n if ((docData as any)[MONGO_ID_SUBSTITUTE_FIELDNAME]) {\n const value = (docData as any)[MONGO_ID_SUBSTITUTE_FIELDNAME];\n delete (docData as any)[MONGO_ID_SUBSTITUTE_FIELDNAME];\n (docData as any)._id = value;\n } else {\n delete (docData as any)._id;\n }\n return docData;\n}\n\nexport function swapRxDocToMongo(\n docData: RxDocumentData\n): any {\n docData = flatClone(docData);\n if ((docData as any)._id) {\n const value = (docData as any)._id;\n delete (docData as any)._id;\n (docData as any)[MONGO_ID_SUBSTITUTE_FIELDNAME] = value;\n }\n return docData;\n}\n\nexport function swapToMongoSort(\n sort: MangoQuerySortPart[]\n): MongoSort {\n const ret: MongoSort = {};\n sort.forEach(sortPart => {\n const [key, direction] = Object.entries(sortPart)[0];\n const mongoKey = key === '_id' ? MONGO_ID_SUBSTITUTE_FIELDNAME : key;\n const mongoDirection = direction === 'asc' ? 1 : -1;\n ret[mongoKey] = mongoDirection;\n });\n return ret;\n}\n\nexport function getMongoDBIndexName(index: string[]): string {\n return index.join('|');\n}\n"],"mappings":"AAUA,SAASA,SAAS,QAAQ,mBAAmB;AAE7C,SAASC,2BAA2B,QAAQ,2BAA2B;AACvE,OAAO,IAAMC,uBAAuB,GAAG,SAAS;;AAEhD;AACA;AACA;AACA;AACA;AACA,OAAO,IAAMC,6BAA6B,GAAG,MAAM;AAEnD,OAAO,SAASC,+BAA+BA,CAC3CC,UAA2B,EAC3BC,QAAuC,EACV;EAC7BA,QAAQ,GAAGN,SAAS,CAACM,QAAQ,CAAC;EAE9B,IAAID,UAAU,KAAK,KAAK,EAAE;IACtB,OAAOC,QAAQ;EACnB;EACA,IAAIC,KAAK,CAACC,OAAO,CAACF,QAAQ,CAAC,EAAE;IACzB,OAAOA,QAAQ,CAACG,GAAG,CAACC,IAAI,IAAIN,+BAA+B,CAACC,UAAU,EAAEK,IAAI,CAAC,CAAC;EAClF,CAAC,MAAM,IAAI,OAAOJ,QAAQ,KAAK,QAAQ,EAAE;IACrC,IAAMK,GAAQ,GAAG,CAAC,CAAC;IACnBC,MAAM,CAACC,OAAO,CAACP,QAAQ,CAAC,CAACQ,OAAO,CAAC,CAAC,CAACC,CAAC,EAAEC,CAAC,CAAC,KAAK;MACzC,IAAID,CAAC,KAAKV,UAAU,EAAE;QAClBM,GAAG,CAACM,GAAG,GAAGD,CAAC;MACf,CAAC,MAAM;QACH,IAAID,CAAC,CAACG,UAAU,CAAC,GAAG,CAAC,EAAE;UACnBP,GAAG,CAACI,CAAC,CAAC,GAAGX,+BAA+B,CAACC,UAAU,EAAEW,CAAQ,CAAC;QAClE,CAAC,MAAM;UACHL,GAAG,CAACI,CAAC,CAAC,GAAGC,CAAC;QACd;MACJ;IACJ,CAAC,CAAC;IACF,OAAOL,GAAG;EACd,CAAC,MAAM;IACH,OAAOL,QAAQ;EACnB;AACJ;AAGA,OAAO,SAASa,mBAAmBA,CAC/BC,MAA+C,EAC/CC,eAA4C,EAC9C;EACE,IAAMhB,UAAU,GAAGJ,2BAA2B,CAACmB,MAAM,CAACf,UAAU,CAAQ;EACxE,IAAMiB,aAA8C,GAAG;IACnDC,KAAK,EAAEF,eAAe;IACtBG,aAAa,EAAEpB,+BAA+B,CAC1CC,UAAU,EACVgB,eAAe,CAACf,QACpB,CAAC;IACDmB,SAAS,EAAEC,eAAe,CAACL,eAAe,CAACM,IAAI;EACnD,CAAC;EACD,OAAOL,aAAa;AACxB;AAAC;AAGD,OAAO,SAASM,gBAAgBA,CAC5BC,OAAY,EACa;EACzBA,OAAO,GAAG7B,SAAS,CAAC6B,OAAO,CAAC;EAC5B,IAAKA,OAAO,CAAS1B,6BAA6B,CAAC,EAAE;IACjD,IAAM2B,KAAK,GAAID,OAAO,CAAS1B,6BAA6B,CAAC;IAC7D,OAAQ0B,OAAO,CAAS1B,6BAA6B,CAAC;IACrD0B,OAAO,CAASZ,GAAG,GAAGa,KAAK;EAChC,CAAC,MAAM;IACH,OAAQD,OAAO,CAASZ,GAAG;EAC/B;EACA,OAAOY,OAAO;AAClB;AAEA,OAAO,SAASE,gBAAgBA,CAC5BF,OAAkC,EAC/B;EACHA,OAAO,GAAG7B,SAAS,CAAC6B,OAAO,CAAC;EAC5B,IAAKA,OAAO,CAASZ,GAAG,EAAE;IACtB,IAAMa,KAAK,GAAID,OAAO,CAASZ,GAAG;IAClC,OAAQY,OAAO,CAASZ,GAAG;IAC1BY,OAAO,CAAS1B,6BAA6B,CAAC,GAAG2B,KAAK;EAC3D;EACA,OAAOD,OAAO;AAClB;AAEA,OAAO,SAASH,eAAeA,CAC3BC,IAAqC,EAC5B;EACT,IAAMhB,GAAc,GAAG,CAAC,CAAC;EACzBgB,IAAI,CAACb,OAAO,CAACkB,QAAQ,IAAI;IACrB,IAAM,CAACC,GAAG,EAAEC,SAAS,CAAC,GAAGtB,MAAM,CAACC,OAAO,CAACmB,QAAQ,CAAC,CAAC,CAAC,CAAC;IACpD,IAAMG,QAAQ,GAAGF,GAAG,KAAK,KAAK,GAAG9B,6BAA6B,GAAG8B,GAAG;IACpE,IAAMG,cAAc,GAAGF,SAAS,KAAK,KAAK,GAAG,CAAC,GAAG,CAAC,CAAC;IACnDvB,GAAG,CAACwB,QAAQ,CAAC,GAAGC,cAAc;EAClC,CAAC,CAAC;EACF,OAAOzB,GAAG;AACd;AAEA,OAAO,SAAS0B,mBAAmBA,CAACC,KAAe,EAAU;EACzD,OAAOA,KAAK,CAACC,IAAI,CAAC,GAAG,CAAC;AAC1B","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/storage-mongodb/mongodb-types.js b/dist/esm/plugins/storage-mongodb/mongodb-types.js deleted file mode 100644 index b7687038169..00000000000 --- a/dist/esm/plugins/storage-mongodb/mongodb-types.js +++ /dev/null @@ -1,3 +0,0 @@ -export {}; -export {}; -//# sourceMappingURL=mongodb-types.js.map \ No newline at end of file diff --git a/dist/esm/plugins/storage-mongodb/mongodb-types.js.map b/dist/esm/plugins/storage-mongodb/mongodb-types.js.map deleted file mode 100644 index d3ed6f50624..00000000000 --- a/dist/esm/plugins/storage-mongodb/mongodb-types.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"mongodb-types.js","names":[],"sources":["../../../../src/plugins/storage-mongodb/mongodb-types.ts"],"sourcesContent":["import type {\n Filter as MongoQueryFilter,\n Sort as MongoSort,\n TransactionOptions\n} from 'mongodb';\nimport type {\n FilledMangoQuery, RxDocumentData\n} from '../../types/index.d.ts';\nexport type MongoQuerySelector = MongoQueryFilter;\nexport type MongoDBDatabaseSettings = {\n /**\n * MongoDB ConnectionString\n * Example: mongodb://localhost:\n */\n connection: string | 'mongodb://localhost:27017';\n transactionOptions?: TransactionOptions;\n};\n\nexport type MongoDBPreparedQuery = {\n query: FilledMangoQuery;\n mongoSelector: MongoQuerySelector>;\n mongoSort: MongoSort;\n};\n\nexport type MongoDBSettings = {};\nexport type MongoDBStorageInternals = {};\nexport type RxStorageMongoDBInstanceCreationOptions = {};\nexport type RxStorageMongoDBSettings = {};\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/storage-mongodb/rx-storage-instance-mongodb.js b/dist/esm/plugins/storage-mongodb/rx-storage-instance-mongodb.js deleted file mode 100644 index f10245933f9..00000000000 --- a/dist/esm/plugins/storage-mongodb/rx-storage-instance-mongodb.js +++ /dev/null @@ -1,323 +0,0 @@ -import { BehaviorSubject, Subject, filter, firstValueFrom } from 'rxjs'; -import { getPrimaryFieldOfPrimaryKey } from "../../rx-schema-helper.js"; -import { ensureNotFalsy, getFromMapOrThrow, isMaybeReadonlyArray, now, PROMISE_RESOLVE_VOID, requestIdlePromise } from "../../plugins/utils/index.js"; -import { MongoClient } from 'mongodb'; -import { categorizeBulkWriteRows } from "../../rx-storage-helper.js"; -import { MONGO_ID_SUBSTITUTE_FIELDNAME, getMongoDBIndexName, prepareMongoDBQuery, swapMongoToRxDoc, swapRxDocToMongo } from "./mongodb-helper.js"; -export var RxStorageInstanceMongoDB = /*#__PURE__*/function () { - // public mongoChangeStream?: MongoChangeStream>; - - /** - * Closing the connection must not happen when - * an operation is running, otherwise we get an error. - * So we store all running operations here so that - * they can be awaited. - */ - - /** - * We use this to be able to still fetch - * the objectId after transforming the document from mongo-style (with _id) - * to RxDB - */ - - function RxStorageInstanceMongoDB(storage, databaseName, collectionName, schema, internals, options, settings) { - this.changes$ = new Subject(); - this.runningOperations = new BehaviorSubject(0); - this.writeQueue = PROMISE_RESOLVE_VOID; - this.mongoObjectIdCache = new WeakMap(); - this.storage = storage; - this.databaseName = databaseName; - this.collectionName = collectionName; - this.schema = schema; - this.internals = internals; - this.options = options; - this.settings = settings; - if (this.schema.attachments) { - throw new Error('attachments not supported in mongodb storage, make a PR if you need that'); - } - this.primaryPath = getPrimaryFieldOfPrimaryKey(this.schema.primaryKey); - this.inMongoPrimaryPath = this.primaryPath === '_id' ? MONGO_ID_SUBSTITUTE_FIELDNAME : this.primaryPath; - this.mongoClient = new MongoClient(storage.databaseSettings.connection); - this.mongoDatabase = this.mongoClient.db(databaseName + '-v' + this.schema.version); - var indexes = (this.schema.indexes ? this.schema.indexes.slice() : []).map(index => { - var arIndex = isMaybeReadonlyArray(index) ? index.slice(0) : [index]; - return arIndex; - }); - indexes.push([this.inMongoPrimaryPath]); - this.mongoCollectionPromise = this.mongoDatabase.createCollection(collectionName).then(async mongoCollection => { - await mongoCollection.createIndexes(indexes.map(index => { - var mongoIndex = {}; - index.forEach(field => mongoIndex[field] = 1); - return { - name: getMongoDBIndexName(index), - key: mongoIndex - }; - })); - - /** - * TODO in a setup where multiple servers run node.js - * processes that use the mongodb storage, we should propagate - * events by listening to the mongodb changestream. - * This maybe should be a premium feature. - */ - // this.mongoChangeStream = mongoCollection.watch( - // undefined, { - // batchSize: 100 - // } - // ).on('change', change => { - - // const eventBulkId = randomCouchString(10); - // const newDocData: RxDocumentData = (change as any).fullDocument; - // const documentId = newDocData[this.primaryPath] as any; - - // const eventBulk: EventBulk>, RxStorageDefaultCheckpoint> = { - // checkpoint: { - // id: newDocData[this.primaryPath] as any, - // lwt: newDocData._meta.lwt - // }, - // context: 'mongodb-write', - // id: eventBulkId, - // events: [{ - // documentData: newDocData, - // documentId, - // operation: 'INSERT', - // previousDocumentData: undefined, - // }], - // startTime: now(), - // endTime: now() - // }; - - // this.changes$.next(eventBulk); - // }); - - return mongoCollection; - }); - } - - /** - * Bulk writes on the mongodb storage. - * Notice that MongoDB does not support cross-document transactions - * so we have to do a update-if-previous-is-correct like operations. - * (Similar to what RxDB does with the revision system) - */ - var _proto = RxStorageInstanceMongoDB.prototype; - _proto.bulkWrite = function bulkWrite(documentWrites, context) { - this.writeQueue = this.writeQueue.then(async () => { - this.runningOperations.next(this.runningOperations.getValue() + 1); - var mongoCollection = await this.mongoCollectionPromise; - if (this.closed) { - return Promise.reject(new Error('already closed')); - } - var primaryPath = this.primaryPath; - var ret = { - success: [], - error: [] - }; - var docIds = documentWrites.map(d => d.document[primaryPath]); - var documentStates = await this.findDocumentsById(docIds, true); - var documentStatesMap = new Map(); - documentStates.forEach(doc => { - var docId = doc[primaryPath]; - documentStatesMap.set(docId, doc); - }); - var categorized = categorizeBulkWriteRows(this, primaryPath, documentStatesMap, documentWrites, context); - var changeByDocId = new Map(); - categorized.eventBulk.events.forEach(change => { - changeByDocId.set(change.documentId, change); - }); - ret.error = categorized.errors; - - /** - * Reset the event bulk because - * conflicts can still appear after the categorization - */ - var eventBulk = categorized.eventBulk; - eventBulk.events = []; - await Promise.all([ - /** - * Inserts - * @link https://sparkbyexamples.com/mongodb/mongodb-insert-if-not-exists/ - */ - Promise.all(categorized.bulkInsertDocs.map(async writeRow => { - var docId = writeRow.document[primaryPath]; - var writeResult = await mongoCollection.findOneAndUpdate({ - [this.inMongoPrimaryPath]: docId - }, { - $setOnInsert: swapRxDocToMongo(writeRow.document) - }, { - upsert: true, - includeResultMetadata: true - }); - if (writeResult.value) { - // had insert conflict - var conflictError = { - status: 409, - documentId: docId, - writeRow, - documentInDb: swapMongoToRxDoc(ensureNotFalsy(writeResult.value)), - isError: true - }; - ret.error.push(conflictError); - } else { - var event = changeByDocId.get(docId); - if (event) { - eventBulk.events.push(event); - } - ret.success.push(writeRow.document); - } - })), - /** - * Updates - */ - Promise.all(categorized.bulkUpdateDocs.map(async writeRow => { - var docId = writeRow.document[primaryPath]; - var writeResult = await mongoCollection.findOneAndReplace({ - [this.inMongoPrimaryPath]: docId, - _rev: ensureNotFalsy(writeRow.previous)._rev - }, swapRxDocToMongo(writeRow.document), { - includeResultMetadata: true, - upsert: false, - returnDocument: 'before' - }); - if (!writeResult.ok) { - var currentDocState = await this.findDocumentsById([docId], true); - var currentDoc = currentDocState[0]; - // had insert conflict - var conflictError = { - status: 409, - documentId: docId, - writeRow, - documentInDb: ensureNotFalsy(currentDoc), - isError: true - }; - ret.error.push(conflictError); - } else { - var event = getFromMapOrThrow(changeByDocId, docId); - eventBulk.events.push(event); - ret.success.push(writeRow.document); - } - }))]); - if (categorized.eventBulk.events.length > 0) { - var lastState = ensureNotFalsy(categorized.newestRow).document; - categorized.eventBulk.checkpoint = { - id: lastState[primaryPath], - lwt: lastState._meta.lwt - }; - categorized.eventBulk.endTime = now(); - this.changes$.next(categorized.eventBulk); - } - this.runningOperations.next(this.runningOperations.getValue() - 1); - return ret; - }); - return this.writeQueue; - }; - _proto.findDocumentsById = async function findDocumentsById(docIds, withDeleted, session) { - this.runningOperations.next(this.runningOperations.getValue() + 1); - var mongoCollection = await this.mongoCollectionPromise; - var primaryPath = this.primaryPath; - var plainQuery = { - [primaryPath]: { - $in: docIds - } - }; - if (!withDeleted) { - plainQuery._deleted = false; - } - var result = []; - var queryResult = await mongoCollection.find(plainQuery, { - session - }).toArray(); - queryResult.forEach(row => { - result.push(swapMongoToRxDoc(row)); - }); - this.runningOperations.next(this.runningOperations.getValue() - 1); - return result; - }; - _proto.query = async function query(originalPreparedQuery) { - var preparedQuery = prepareMongoDBQuery(this.schema, originalPreparedQuery.query); - this.runningOperations.next(this.runningOperations.getValue() + 1); - await this.writeQueue; - var mongoCollection = await this.mongoCollectionPromise; - var query = mongoCollection.find(preparedQuery.mongoSelector); - if (preparedQuery.query.skip) { - query = query.skip(preparedQuery.query.skip); - } - if (preparedQuery.query.limit) { - query = query.limit(preparedQuery.query.limit); - } - if (preparedQuery.query.sort) { - query = query.sort(preparedQuery.mongoSort); - } - var resultDocs = await query.toArray(); - this.runningOperations.next(this.runningOperations.getValue() - 1); - return { - documents: resultDocs.map(d => swapMongoToRxDoc(d)) - }; - }; - _proto.count = async function count(originalPreparedQuery) { - var preparedQuery = prepareMongoDBQuery(this.schema, originalPreparedQuery.query); - this.runningOperations.next(this.runningOperations.getValue() + 1); - await this.writeQueue; - var mongoCollection = await this.mongoCollectionPromise; - var count = await mongoCollection.countDocuments(preparedQuery.mongoSelector); - this.runningOperations.next(this.runningOperations.getValue() - 1); - return { - count, - mode: 'fast' - }; - }; - _proto.cleanup = async function cleanup(minimumDeletedTime) { - this.runningOperations.next(this.runningOperations.getValue() + 1); - var mongoCollection = await this.mongoCollectionPromise; - var maxDeletionTime = now() - minimumDeletedTime; - await mongoCollection.deleteMany({ - _deleted: true, - '_meta.lwt': { - $lt: maxDeletionTime - } - }); - this.runningOperations.next(this.runningOperations.getValue() - 1); - return true; - }; - _proto.getAttachmentData = async function getAttachmentData(_documentId, _attachmentId, _digest) { - await this.mongoCollectionPromise; - throw new Error('attachments not implemented, make a PR'); - }; - _proto.changeStream = function changeStream() { - return this.changes$; - }; - _proto.remove = async function remove() { - if (this.closed) { - throw new Error('already closed'); - } - this.runningOperations.next(this.runningOperations.getValue() + 1); - var mongoCollection = await this.mongoCollectionPromise; - await mongoCollection.drop(); - this.runningOperations.next(this.runningOperations.getValue() - 1); - await this.close(); - }; - _proto.close = async function close() { - // TODO without this next-tick we have random fails in the tests - await requestIdlePromise(200); - if (this.closed) { - return this.closed; - } - this.closed = (async () => { - await this.mongoCollectionPromise; - await firstValueFrom(this.runningOperations.pipe(filter(c => c === 0))); - // await ensureNotFalsy(this.mongoChangeStream).close(); - await this.mongoClient.close(); - })(); - return this.closed; - }; - _proto.conflictResultionTasks = function conflictResultionTasks() { - return new Subject(); - }; - _proto.resolveConflictResultionTask = async function resolveConflictResultionTask(_taskSolution) {}; - return RxStorageInstanceMongoDB; -}(); -export function createMongoDBStorageInstance(storage, params, settings) { - var instance = new RxStorageInstanceMongoDB(storage, params.databaseName, params.collectionName, params.schema, {}, params.options, settings); - return Promise.resolve(instance); -} -//# sourceMappingURL=rx-storage-instance-mongodb.js.map \ No newline at end of file diff --git a/dist/esm/plugins/storage-mongodb/rx-storage-instance-mongodb.js.map b/dist/esm/plugins/storage-mongodb/rx-storage-instance-mongodb.js.map deleted file mode 100644 index 0a7159089dc..00000000000 --- a/dist/esm/plugins/storage-mongodb/rx-storage-instance-mongodb.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-storage-instance-mongodb.js","names":["BehaviorSubject","Subject","filter","firstValueFrom","getPrimaryFieldOfPrimaryKey","ensureNotFalsy","getFromMapOrThrow","isMaybeReadonlyArray","now","PROMISE_RESOLVE_VOID","requestIdlePromise","MongoClient","categorizeBulkWriteRows","MONGO_ID_SUBSTITUTE_FIELDNAME","getMongoDBIndexName","prepareMongoDBQuery","swapMongoToRxDoc","swapRxDocToMongo","RxStorageInstanceMongoDB","storage","databaseName","collectionName","schema","internals","options","settings","changes$","runningOperations","writeQueue","mongoObjectIdCache","WeakMap","attachments","Error","primaryPath","primaryKey","inMongoPrimaryPath","mongoClient","databaseSettings","connection","mongoDatabase","db","version","indexes","slice","map","index","arIndex","push","mongoCollectionPromise","createCollection","then","mongoCollection","createIndexes","mongoIndex","forEach","field","name","key","_proto","prototype","bulkWrite","documentWrites","context","next","getValue","closed","Promise","reject","ret","success","error","docIds","d","document","documentStates","findDocumentsById","documentStatesMap","Map","doc","docId","set","categorized","changeByDocId","eventBulk","events","change","documentId","errors","all","bulkInsertDocs","writeRow","writeResult","findOneAndUpdate","$setOnInsert","upsert","includeResultMetadata","value","conflictError","status","documentInDb","isError","event","get","bulkUpdateDocs","findOneAndReplace","_rev","previous","returnDocument","ok","currentDocState","currentDoc","length","lastState","newestRow","checkpoint","id","lwt","_meta","endTime","withDeleted","session","plainQuery","$in","_deleted","result","queryResult","find","toArray","row","query","originalPreparedQuery","preparedQuery","mongoSelector","skip","limit","sort","mongoSort","resultDocs","documents","count","countDocuments","mode","cleanup","minimumDeletedTime","maxDeletionTime","deleteMany","$lt","getAttachmentData","_documentId","_attachmentId","_digest","changeStream","remove","drop","close","pipe","c","conflictResultionTasks","resolveConflictResultionTask","_taskSolution","createMongoDBStorageInstance","params","instance","resolve"],"sources":["../../../../src/plugins/storage-mongodb/rx-storage-instance-mongodb.ts"],"sourcesContent":["import {\n BehaviorSubject,\n Observable,\n Subject,\n filter,\n firstValueFrom\n} from 'rxjs';\nimport { getPrimaryFieldOfPrimaryKey } from '../../rx-schema-helper.ts';\nimport type {\n BulkWriteRow,\n EventBulk,\n PreparedQuery,\n RxConflictResultionTask,\n RxConflictResultionTaskSolution,\n RxDocumentData,\n RxJsonSchema,\n RxStorageBulkWriteResponse,\n RxStorageChangeEvent,\n RxStorageCountResult,\n RxStorageDefaultCheckpoint,\n RxStorageInstance,\n RxStorageInstanceCreationParams,\n RxStorageQueryResult,\n RxStorageWriteErrorConflict,\n StringKeys\n} from '../../types/index.d.ts';\nimport {\n ensureNotFalsy,\n getFromMapOrThrow,\n isMaybeReadonlyArray,\n now,\n PROMISE_RESOLVE_VOID,\n requestIdlePromise\n} from '../../plugins/utils/index.ts';\nimport {\n MongoDBPreparedQuery,\n MongoDBStorageInternals,\n MongoQuerySelector,\n RxStorageMongoDBInstanceCreationOptions,\n RxStorageMongoDBSettings\n} from './mongodb-types.ts';\nimport { RxStorageMongoDB } from './rx-storage-mongodb.ts';\nimport {\n Db as MongoDatabase,\n Collection as MongoCollection,\n MongoClient,\n ObjectId,\n ClientSession\n} from 'mongodb';\nimport { categorizeBulkWriteRows } from '../../rx-storage-helper.ts';\nimport {\n MONGO_ID_SUBSTITUTE_FIELDNAME,\n getMongoDBIndexName,\n prepareMongoDBQuery,\n swapMongoToRxDoc,\n swapRxDocToMongo\n} from './mongodb-helper.ts';\n\nexport class RxStorageInstanceMongoDB implements RxStorageInstance<\n RxDocType,\n MongoDBStorageInternals,\n RxStorageMongoDBInstanceCreationOptions,\n RxStorageDefaultCheckpoint\n> {\n\n public readonly primaryPath: StringKeys>;\n public readonly inMongoPrimaryPath: string;\n public closed?: Promise;\n private readonly changes$: Subject>, RxStorageDefaultCheckpoint>> = new Subject();\n public readonly mongoClient: MongoClient;\n public readonly mongoDatabase: MongoDatabase;\n public readonly mongoCollectionPromise: Promise | any>>;\n // public mongoChangeStream?: MongoChangeStream>;\n\n\n /**\n * Closing the connection must not happen when\n * an operation is running, otherwise we get an error.\n * So we store all running operations here so that\n * they can be awaited.\n */\n public readonly runningOperations = new BehaviorSubject(0);\n public writeQueue: Promise = PROMISE_RESOLVE_VOID;\n\n /**\n * We use this to be able to still fetch\n * the objectId after transforming the document from mongo-style (with _id)\n * to RxDB\n */\n public readonly mongoObjectIdCache = new WeakMap, ObjectId>();\n\n constructor(\n public readonly storage: RxStorageMongoDB,\n public readonly databaseName: string,\n public readonly collectionName: string,\n public readonly schema: Readonly>>,\n public readonly internals: MongoDBStorageInternals,\n public readonly options: Readonly,\n public readonly settings: RxStorageMongoDBSettings\n ) {\n if (this.schema.attachments) {\n throw new Error('attachments not supported in mongodb storage, make a PR if you need that');\n }\n this.primaryPath = getPrimaryFieldOfPrimaryKey(this.schema.primaryKey);\n this.inMongoPrimaryPath = this.primaryPath === '_id' ? MONGO_ID_SUBSTITUTE_FIELDNAME : this.primaryPath;\n this.mongoClient = new MongoClient(storage.databaseSettings.connection);\n this.mongoDatabase = this.mongoClient.db(databaseName + '-v' + this.schema.version);\n\n const indexes = (this.schema.indexes ? this.schema.indexes.slice() : []).map(index => {\n const arIndex = isMaybeReadonlyArray(index) ? index.slice(0) : [index];\n return arIndex;\n });\n indexes.push([this.inMongoPrimaryPath]);\n\n this.mongoCollectionPromise = this.mongoDatabase.createCollection(collectionName)\n .then(async (mongoCollection) => {\n await mongoCollection.createIndexes(\n indexes.map(index => {\n const mongoIndex: any = {};\n index.forEach(field => mongoIndex[field] = 1);\n return { name: getMongoDBIndexName(index), key: mongoIndex };\n })\n );\n\n /**\n * TODO in a setup where multiple servers run node.js\n * processes that use the mongodb storage, we should propagate\n * events by listening to the mongodb changestream.\n * This maybe should be a premium feature.\n */\n // this.mongoChangeStream = mongoCollection.watch(\n // undefined, {\n // batchSize: 100\n // }\n // ).on('change', change => {\n\n\n // const eventBulkId = randomCouchString(10);\n // const newDocData: RxDocumentData = (change as any).fullDocument;\n // const documentId = newDocData[this.primaryPath] as any;\n\n // const eventBulk: EventBulk>, RxStorageDefaultCheckpoint> = {\n // checkpoint: {\n // id: newDocData[this.primaryPath] as any,\n // lwt: newDocData._meta.lwt\n // },\n // context: 'mongodb-write',\n // id: eventBulkId,\n // events: [{\n // documentData: newDocData,\n // documentId,\n // operation: 'INSERT',\n // previousDocumentData: undefined,\n // }],\n // startTime: now(),\n // endTime: now()\n // };\n\n // this.changes$.next(eventBulk);\n // });\n\n\n return mongoCollection;\n });\n\n\n }\n\n /**\n * Bulk writes on the mongodb storage.\n * Notice that MongoDB does not support cross-document transactions\n * so we have to do a update-if-previous-is-correct like operations.\n * (Similar to what RxDB does with the revision system)\n */\n bulkWrite(\n documentWrites: BulkWriteRow[],\n context: string\n ): Promise> {\n\n this.writeQueue = this.writeQueue.then(async () => {\n this.runningOperations.next(this.runningOperations.getValue() + 1);\n\n const mongoCollection = await this.mongoCollectionPromise;\n if (this.closed) {\n return Promise.reject(new Error('already closed'));\n }\n const primaryPath = this.primaryPath;\n const ret: RxStorageBulkWriteResponse = {\n success: [],\n error: []\n };\n\n\n const docIds = documentWrites.map(d => (d.document as any)[primaryPath]);\n const documentStates = await this.findDocumentsById(\n docIds,\n true\n );\n const documentStatesMap = new Map();\n documentStates.forEach(doc => {\n const docId = doc[primaryPath];\n documentStatesMap.set(docId, doc);\n });\n const categorized = categorizeBulkWriteRows(\n this,\n primaryPath as any,\n documentStatesMap,\n documentWrites,\n context\n );\n\n const changeByDocId = new Map>>();\n categorized.eventBulk.events.forEach(change => {\n changeByDocId.set(change.documentId, change);\n });\n\n\n ret.error = categorized.errors;\n\n /**\n * Reset the event bulk because\n * conflicts can still appear after the categorization\n */\n const eventBulk = categorized.eventBulk;\n eventBulk.events = [];\n\n await Promise.all([\n /**\n * Inserts\n * @link https://sparkbyexamples.com/mongodb/mongodb-insert-if-not-exists/\n */\n Promise.all(\n categorized.bulkInsertDocs.map(async (writeRow) => {\n const docId: string = writeRow.document[primaryPath] as any;\n const writeResult = await mongoCollection.findOneAndUpdate(\n {\n [this.inMongoPrimaryPath]: docId\n },\n {\n $setOnInsert: swapRxDocToMongo(writeRow.document)\n },\n {\n upsert: true,\n includeResultMetadata: true\n }\n );\n if (writeResult.value) {\n // had insert conflict\n const conflictError: RxStorageWriteErrorConflict = {\n status: 409,\n documentId: docId,\n writeRow,\n documentInDb: swapMongoToRxDoc(ensureNotFalsy(writeResult.value)),\n isError: true\n };\n ret.error.push(conflictError);\n } else {\n const event = changeByDocId.get(docId);\n if (event) {\n eventBulk.events.push(event);\n }\n ret.success.push(writeRow.document);\n }\n })\n ),\n /**\n * Updates\n */\n Promise.all(\n categorized.bulkUpdateDocs.map(async (writeRow) => {\n const docId = writeRow.document[primaryPath] as string;\n const writeResult = await mongoCollection.findOneAndReplace(\n {\n [this.inMongoPrimaryPath]: docId,\n _rev: ensureNotFalsy(writeRow.previous)._rev\n },\n swapRxDocToMongo(writeRow.document),\n {\n includeResultMetadata: true,\n upsert: false,\n returnDocument: 'before'\n }\n );\n if (!writeResult.ok) {\n const currentDocState = await this.findDocumentsById([docId], true);\n const currentDoc = currentDocState[0];\n // had insert conflict\n const conflictError: RxStorageWriteErrorConflict = {\n status: 409,\n documentId: docId,\n writeRow,\n documentInDb: ensureNotFalsy(currentDoc),\n isError: true\n };\n ret.error.push(conflictError);\n } else {\n const event = getFromMapOrThrow(changeByDocId, docId);\n eventBulk.events.push(event);\n ret.success.push(writeRow.document);\n }\n\n })\n )\n ]);\n\n if (categorized.eventBulk.events.length > 0) {\n const lastState = ensureNotFalsy(categorized.newestRow).document;\n categorized.eventBulk.checkpoint = {\n id: lastState[primaryPath],\n lwt: lastState._meta.lwt\n };\n categorized.eventBulk.endTime = now();\n this.changes$.next(categorized.eventBulk);\n }\n\n this.runningOperations.next(this.runningOperations.getValue() - 1);\n return ret;\n });\n return this.writeQueue;\n\n }\n\n async findDocumentsById(\n docIds: string[],\n withDeleted: boolean,\n session?: ClientSession\n ): Promise[]> {\n this.runningOperations.next(this.runningOperations.getValue() + 1);\n const mongoCollection = await this.mongoCollectionPromise;\n const primaryPath = this.primaryPath;\n\n const plainQuery: MongoQuerySelector = {\n [primaryPath]: {\n $in: docIds\n }\n };\n if (!withDeleted) {\n plainQuery._deleted = false;\n }\n const result: RxDocumentData[] = [];\n const queryResult = await mongoCollection.find(\n plainQuery,\n {\n session\n }\n ).toArray();\n queryResult.forEach(row => {\n result.push(\n swapMongoToRxDoc(\n row as any\n )\n );\n });\n this.runningOperations.next(this.runningOperations.getValue() - 1);\n return result;\n }\n\n async query(\n originalPreparedQuery: PreparedQuery\n ): Promise> {\n const preparedQuery = prepareMongoDBQuery(this.schema, originalPreparedQuery.query);\n\n this.runningOperations.next(this.runningOperations.getValue() + 1);\n await this.writeQueue;\n const mongoCollection = await this.mongoCollectionPromise;\n\n let query = mongoCollection.find(preparedQuery.mongoSelector);\n if (preparedQuery.query.skip) {\n query = query.skip(preparedQuery.query.skip);\n }\n if (preparedQuery.query.limit) {\n query = query.limit(preparedQuery.query.limit);\n }\n if (preparedQuery.query.sort) {\n query = query.sort(preparedQuery.mongoSort);\n }\n const resultDocs = await query.toArray();\n this.runningOperations.next(this.runningOperations.getValue() - 1);\n return {\n documents: resultDocs.map(d => swapMongoToRxDoc(d))\n };\n }\n\n async count(\n originalPreparedQuery: PreparedQuery\n ): Promise {\n const preparedQuery = prepareMongoDBQuery(this.schema, originalPreparedQuery.query);\n this.runningOperations.next(this.runningOperations.getValue() + 1);\n await this.writeQueue;\n const mongoCollection = await this.mongoCollectionPromise;\n const count = await mongoCollection.countDocuments(preparedQuery.mongoSelector);\n this.runningOperations.next(this.runningOperations.getValue() - 1);\n return {\n count,\n mode: 'fast'\n };\n }\n\n async cleanup(minimumDeletedTime: number): Promise {\n this.runningOperations.next(this.runningOperations.getValue() + 1);\n const mongoCollection = await this.mongoCollectionPromise;\n const maxDeletionTime = now() - minimumDeletedTime;\n await mongoCollection.deleteMany({\n _deleted: true,\n '_meta.lwt': {\n $lt: maxDeletionTime\n }\n });\n this.runningOperations.next(this.runningOperations.getValue() - 1);\n return true;\n }\n\n async getAttachmentData(\n _documentId: string,\n _attachmentId: string,\n _digest: string\n ): Promise {\n await this.mongoCollectionPromise;\n throw new Error('attachments not implemented, make a PR');\n }\n\n changeStream(): Observable>, RxStorageDefaultCheckpoint>> {\n return this.changes$;\n }\n\n async remove(): Promise {\n if (this.closed) {\n throw new Error('already closed');\n }\n this.runningOperations.next(this.runningOperations.getValue() + 1);\n const mongoCollection = await this.mongoCollectionPromise;\n await mongoCollection.drop();\n this.runningOperations.next(this.runningOperations.getValue() - 1);\n await this.close();\n }\n\n async close(): Promise {\n // TODO without this next-tick we have random fails in the tests\n await requestIdlePromise(200);\n\n if (this.closed) {\n return this.closed;\n }\n this.closed = (async () => {\n await this.mongoCollectionPromise;\n await firstValueFrom(this.runningOperations.pipe(filter(c => c === 0)));\n // await ensureNotFalsy(this.mongoChangeStream).close();\n await this.mongoClient.close();\n })();\n return this.closed;\n }\n\n conflictResultionTasks(): Observable> {\n return new Subject();\n }\n async resolveConflictResultionTask(_taskSolution: RxConflictResultionTaskSolution): Promise { }\n}\n\nexport function createMongoDBStorageInstance(\n storage: RxStorageMongoDB,\n params: RxStorageInstanceCreationParams,\n settings: RxStorageMongoDBSettings\n): Promise> {\n const instance = new RxStorageInstanceMongoDB(\n storage,\n params.databaseName,\n params.collectionName,\n params.schema,\n {},\n params.options,\n settings\n );\n return Promise.resolve(instance);\n}\n"],"mappings":"AAAA,SACIA,eAAe,EAEfC,OAAO,EACPC,MAAM,EACNC,cAAc,QACX,MAAM;AACb,SAASC,2BAA2B,QAAQ,2BAA2B;AAmBvE,SACIC,cAAc,EACdC,iBAAiB,EACjBC,oBAAoB,EACpBC,GAAG,EACHC,oBAAoB,EACpBC,kBAAkB,QACf,8BAA8B;AASrC,SAGIC,WAAW,QAGR,SAAS;AAChB,SAASC,uBAAuB,QAAQ,4BAA4B;AACpE,SACIC,6BAA6B,EAC7BC,mBAAmB,EACnBC,mBAAmB,EACnBC,gBAAgB,EAChBC,gBAAgB,QACb,qBAAqB;AAE5B,WAAaC,wBAAwB;EAcjC;;EAGA;AACJ;AACA;AACA;AACA;AACA;;EAII;AACJ;AACA;AACA;AACA;;EAGI,SAAAA,yBACoBC,OAAyB,EACzBC,YAAoB,EACpBC,cAAsB,EACtBC,MAAyD,EACzDC,SAAkC,EAClCC,OAA0D,EAC1DC,QAAkC,EACpD;IAAA,KA/BeC,QAAQ,GAAoG,IAAIzB,OAAO,CAAC,CAAC;IAAA,KAa1H0B,iBAAiB,GAAG,IAAI3B,eAAe,CAAC,CAAC,CAAC;IAAA,KACnD4B,UAAU,GAAiBnB,oBAAoB;IAAA,KAOtCoB,kBAAkB,GAAG,IAAIC,OAAO,CAAsC,CAAC;IAAA,KAGnEX,OAAyB,GAAzBA,OAAyB;IAAA,KACzBC,YAAoB,GAApBA,YAAoB;IAAA,KACpBC,cAAsB,GAAtBA,cAAsB;IAAA,KACtBC,MAAyD,GAAzDA,MAAyD;IAAA,KACzDC,SAAkC,GAAlCA,SAAkC;IAAA,KAClCC,OAA0D,GAA1DA,OAA0D;IAAA,KAC1DC,QAAkC,GAAlCA,QAAkC;IAElD,IAAI,IAAI,CAACH,MAAM,CAACS,WAAW,EAAE;MACzB,MAAM,IAAIC,KAAK,CAAC,0EAA0E,CAAC;IAC/F;IACA,IAAI,CAACC,WAAW,GAAG7B,2BAA2B,CAAC,IAAI,CAACkB,MAAM,CAACY,UAAU,CAAC;IACtE,IAAI,CAACC,kBAAkB,GAAG,IAAI,CAACF,WAAW,KAAK,KAAK,GAAGpB,6BAA6B,GAAG,IAAI,CAACoB,WAAW;IACvG,IAAI,CAACG,WAAW,GAAG,IAAIzB,WAAW,CAACQ,OAAO,CAACkB,gBAAgB,CAACC,UAAU,CAAC;IACvE,IAAI,CAACC,aAAa,GAAG,IAAI,CAACH,WAAW,CAACI,EAAE,CAACpB,YAAY,GAAG,IAAI,GAAG,IAAI,CAACE,MAAM,CAACmB,OAAO,CAAC;IAEnF,IAAMC,OAAO,GAAG,CAAC,IAAI,CAACpB,MAAM,CAACoB,OAAO,GAAG,IAAI,CAACpB,MAAM,CAACoB,OAAO,CAACC,KAAK,CAAC,CAAC,GAAG,EAAE,EAAEC,GAAG,CAACC,KAAK,IAAI;MAClF,IAAMC,OAAO,GAAGvC,oBAAoB,CAACsC,KAAK,CAAC,GAAGA,KAAK,CAACF,KAAK,CAAC,CAAC,CAAC,GAAG,CAACE,KAAK,CAAC;MACtE,OAAOC,OAAO;IAClB,CAAC,CAAC;IACFJ,OAAO,CAACK,IAAI,CAAC,CAAC,IAAI,CAACZ,kBAAkB,CAAC,CAAC;IAEvC,IAAI,CAACa,sBAAsB,GAAG,IAAI,CAACT,aAAa,CAACU,gBAAgB,CAAC5B,cAAc,CAAC,CAC5E6B,IAAI,CAAC,MAAOC,eAAe,IAAK;MAC7B,MAAMA,eAAe,CAACC,aAAa,CAC/BV,OAAO,CAACE,GAAG,CAACC,KAAK,IAAI;QACjB,IAAMQ,UAAe,GAAG,CAAC,CAAC;QAC1BR,KAAK,CAACS,OAAO,CAACC,KAAK,IAAIF,UAAU,CAACE,KAAK,CAAC,GAAG,CAAC,CAAC;QAC7C,OAAO;UAAEC,IAAI,EAAE1C,mBAAmB,CAAC+B,KAAK,CAAC;UAAEY,GAAG,EAAEJ;QAAW,CAAC;MAChE,CAAC,CACL,CAAC;;MAED;AAChB;AACA;AACA;AACA;AACA;MACgB;MACA;MACA;MACA;MACA;;MAGA;MACA;MACA;;MAEA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;MACA;;MAEA;MACA;;MAGA,OAAOF,eAAe;IAC1B,CAAC,CAAC;EAGV;;EAEA;AACJ;AACA;AACA;AACA;AACA;EALI,IAAAO,MAAA,GAAAxC,wBAAA,CAAAyC,SAAA;EAAAD,MAAA,CAMAE,SAAS,GAAT,SAAAA,UACIC,cAAyC,EACzCC,OAAe,EAC+B;IAE9C,IAAI,CAAClC,UAAU,GAAG,IAAI,CAACA,UAAU,CAACsB,IAAI,CAAC,YAAY;MAC/C,IAAI,CAACvB,iBAAiB,CAACoC,IAAI,CAAC,IAAI,CAACpC,iBAAiB,CAACqC,QAAQ,CAAC,CAAC,GAAG,CAAC,CAAC;MAElE,IAAMb,eAAe,GAAG,MAAM,IAAI,CAACH,sBAAsB;MACzD,IAAI,IAAI,CAACiB,MAAM,EAAE;QACb,OAAOC,OAAO,CAACC,MAAM,CAAC,IAAInC,KAAK,CAAC,gBAAgB,CAAC,CAAC;MACtD;MACA,IAAMC,WAAW,GAAG,IAAI,CAACA,WAAW;MACpC,IAAMmC,GAA0C,GAAG;QAC/CC,OAAO,EAAE,EAAE;QACXC,KAAK,EAAE;MACX,CAAC;MAGD,IAAMC,MAAM,GAAGV,cAAc,CAACjB,GAAG,CAAC4B,CAAC,IAAKA,CAAC,CAACC,QAAQ,CAASxC,WAAW,CAAC,CAAC;MACxE,IAAMyC,cAAc,GAAG,MAAM,IAAI,CAACC,iBAAiB,CAC/CJ,MAAM,EACN,IACJ,CAAC;MACD,IAAMK,iBAAiB,GAAG,IAAIC,GAAG,CAAC,CAAC;MACnCH,cAAc,CAACpB,OAAO,CAACwB,GAAG,IAAI;QAC1B,IAAMC,KAAK,GAAGD,GAAG,CAAC7C,WAAW,CAAC;QAC9B2C,iBAAiB,CAACI,GAAG,CAACD,KAAK,EAAED,GAAG,CAAC;MACrC,CAAC,CAAC;MACF,IAAMG,WAAW,GAAGrE,uBAAuB,CACvC,IAAI,EACJqB,WAAW,EACX2C,iBAAiB,EACjBf,cAAc,EACdC,OACJ,CAAC;MAED,IAAMoB,aAAa,GAAG,IAAIL,GAAG,CAA0D,CAAC;MACxFI,WAAW,CAACE,SAAS,CAACC,MAAM,CAAC9B,OAAO,CAAC+B,MAAM,IAAI;QAC3CH,aAAa,CAACF,GAAG,CAACK,MAAM,CAACC,UAAU,EAAED,MAAM,CAAC;MAChD,CAAC,CAAC;MAGFjB,GAAG,CAACE,KAAK,GAAGW,WAAW,CAACM,MAAM;;MAE9B;AACZ;AACA;AACA;MACY,IAAMJ,SAAS,GAAGF,WAAW,CAACE,SAAS;MACvCA,SAAS,CAACC,MAAM,GAAG,EAAE;MAErB,MAAMlB,OAAO,CAACsB,GAAG,CAAC;MACd;AAChB;AACA;AACA;MACgBtB,OAAO,CAACsB,GAAG,CACPP,WAAW,CAACQ,cAAc,CAAC7C,GAAG,CAAC,MAAO8C,QAAQ,IAAK;QAC/C,IAAMX,KAAa,GAAGW,QAAQ,CAACjB,QAAQ,CAACxC,WAAW,CAAQ;QAC3D,IAAM0D,WAAW,GAAG,MAAMxC,eAAe,CAACyC,gBAAgB,CACtD;UACI,CAAC,IAAI,CAACzD,kBAAkB,GAAG4C;QAC/B,CAAC,EACD;UACIc,YAAY,EAAE5E,gBAAgB,CAACyE,QAAQ,CAACjB,QAAQ;QACpD,CAAC,EACD;UACIqB,MAAM,EAAE,IAAI;UACZC,qBAAqB,EAAE;QAC3B,CACJ,CAAC;QACD,IAAIJ,WAAW,CAACK,KAAK,EAAE;UACnB;UACA,IAAMC,aAAqD,GAAG;YAC1DC,MAAM,EAAE,GAAG;YACXZ,UAAU,EAAEP,KAAK;YACjBW,QAAQ;YACRS,YAAY,EAAEnF,gBAAgB,CAACX,cAAc,CAACsF,WAAW,CAACK,KAAK,CAAC,CAAC;YACjEI,OAAO,EAAE;UACb,CAAC;UACDhC,GAAG,CAACE,KAAK,CAACvB,IAAI,CAACkD,aAAa,CAAC;QACjC,CAAC,MAAM;UACH,IAAMI,KAAK,GAAGnB,aAAa,CAACoB,GAAG,CAACvB,KAAK,CAAC;UACtC,IAAIsB,KAAK,EAAE;YACPlB,SAAS,CAACC,MAAM,CAACrC,IAAI,CAACsD,KAAK,CAAC;UAChC;UACAjC,GAAG,CAACC,OAAO,CAACtB,IAAI,CAAC2C,QAAQ,CAACjB,QAAQ,CAAC;QACvC;MACJ,CAAC,CACL,CAAC;MACD;AAChB;AACA;MACgBP,OAAO,CAACsB,GAAG,CACPP,WAAW,CAACsB,cAAc,CAAC3D,GAAG,CAAC,MAAO8C,QAAQ,IAAK;QAC/C,IAAMX,KAAK,GAAGW,QAAQ,CAACjB,QAAQ,CAACxC,WAAW,CAAW;QACtD,IAAM0D,WAAW,GAAG,MAAMxC,eAAe,CAACqD,iBAAiB,CACvD;UACI,CAAC,IAAI,CAACrE,kBAAkB,GAAG4C,KAAK;UAChC0B,IAAI,EAAEpG,cAAc,CAACqF,QAAQ,CAACgB,QAAQ,CAAC,CAACD;QAC5C,CAAC,EACDxF,gBAAgB,CAACyE,QAAQ,CAACjB,QAAQ,CAAC,EACnC;UACIsB,qBAAqB,EAAE,IAAI;UAC3BD,MAAM,EAAE,KAAK;UACba,cAAc,EAAE;QACpB,CACJ,CAAC;QACD,IAAI,CAAChB,WAAW,CAACiB,EAAE,EAAE;UACjB,IAAMC,eAAe,GAAG,MAAM,IAAI,CAAClC,iBAAiB,CAAC,CAACI,KAAK,CAAC,EAAE,IAAI,CAAC;UACnE,IAAM+B,UAAU,GAAGD,eAAe,CAAC,CAAC,CAAC;UACrC;UACA,IAAMZ,aAAqD,GAAG;YAC1DC,MAAM,EAAE,GAAG;YACXZ,UAAU,EAAEP,KAAK;YACjBW,QAAQ;YACRS,YAAY,EAAE9F,cAAc,CAACyG,UAAU,CAAC;YACxCV,OAAO,EAAE;UACb,CAAC;UACDhC,GAAG,CAACE,KAAK,CAACvB,IAAI,CAACkD,aAAa,CAAC;QACjC,CAAC,MAAM;UACH,IAAMI,KAAK,GAAG/F,iBAAiB,CAAC4E,aAAa,EAAEH,KAAK,CAAC;UACrDI,SAAS,CAACC,MAAM,CAACrC,IAAI,CAACsD,KAAK,CAAC;UAC5BjC,GAAG,CAACC,OAAO,CAACtB,IAAI,CAAC2C,QAAQ,CAACjB,QAAQ,CAAC;QACvC;MAEJ,CAAC,CACL,CAAC,CACJ,CAAC;MAEF,IAAIQ,WAAW,CAACE,SAAS,CAACC,MAAM,CAAC2B,MAAM,GAAG,CAAC,EAAE;QACzC,IAAMC,SAAS,GAAG3G,cAAc,CAAC4E,WAAW,CAACgC,SAAS,CAAC,CAACxC,QAAQ;QAChEQ,WAAW,CAACE,SAAS,CAAC+B,UAAU,GAAG;UAC/BC,EAAE,EAAEH,SAAS,CAAC/E,WAAW,CAAC;UAC1BmF,GAAG,EAAEJ,SAAS,CAACK,KAAK,CAACD;QACzB,CAAC;QACDnC,WAAW,CAACE,SAAS,CAACmC,OAAO,GAAG9G,GAAG,CAAC,CAAC;QACrC,IAAI,CAACkB,QAAQ,CAACqC,IAAI,CAACkB,WAAW,CAACE,SAAS,CAAC;MAC7C;MAEA,IAAI,CAACxD,iBAAiB,CAACoC,IAAI,CAAC,IAAI,CAACpC,iBAAiB,CAACqC,QAAQ,CAAC,CAAC,GAAG,CAAC,CAAC;MAClE,OAAOI,GAAG;IACd,CAAC,CAAC;IACF,OAAO,IAAI,CAACxC,UAAU;EAE1B,CAAC;EAAA8B,MAAA,CAEKiB,iBAAiB,GAAvB,eAAAA,kBACIJ,MAAgB,EAChBgD,WAAoB,EACpBC,OAAuB,EACa;IACpC,IAAI,CAAC7F,iBAAiB,CAACoC,IAAI,CAAC,IAAI,CAACpC,iBAAiB,CAACqC,QAAQ,CAAC,CAAC,GAAG,CAAC,CAAC;IAClE,IAAMb,eAAe,GAAG,MAAM,IAAI,CAACH,sBAAsB;IACzD,IAAMf,WAAW,GAAG,IAAI,CAACA,WAAW;IAEpC,IAAMwF,UAAmC,GAAG;MACxC,CAACxF,WAAW,GAAG;QACXyF,GAAG,EAAEnD;MACT;IACJ,CAAC;IACD,IAAI,CAACgD,WAAW,EAAE;MACdE,UAAU,CAACE,QAAQ,GAAG,KAAK;IAC/B;IACA,IAAMC,MAAmC,GAAG,EAAE;IAC9C,IAAMC,WAAW,GAAG,MAAM1E,eAAe,CAAC2E,IAAI,CAC1CL,UAAU,EACV;MACID;IACJ,CACJ,CAAC,CAACO,OAAO,CAAC,CAAC;IACXF,WAAW,CAACvE,OAAO,CAAC0E,GAAG,IAAI;MACvBJ,MAAM,CAAC7E,IAAI,CACP/B,gBAAgB,CACZgH,GACJ,CACJ,CAAC;IACL,CAAC,CAAC;IACF,IAAI,CAACrG,iBAAiB,CAACoC,IAAI,CAAC,IAAI,CAACpC,iBAAiB,CAACqC,QAAQ,CAAC,CAAC,GAAG,CAAC,CAAC;IAClE,OAAO4D,MAAM;EACjB,CAAC;EAAAlE,MAAA,CAEKuE,KAAK,GAAX,eAAAA,MACIC,qBAA+C,EACP;IACxC,IAAMC,aAAa,GAAGpH,mBAAmB,CAAC,IAAI,CAACO,MAAM,EAAE4G,qBAAqB,CAACD,KAAK,CAAC;IAEnF,IAAI,CAACtG,iBAAiB,CAACoC,IAAI,CAAC,IAAI,CAACpC,iBAAiB,CAACqC,QAAQ,CAAC,CAAC,GAAG,CAAC,CAAC;IAClE,MAAM,IAAI,CAACpC,UAAU;IACrB,IAAMuB,eAAe,GAAG,MAAM,IAAI,CAACH,sBAAsB;IAEzD,IAAIiF,KAAK,GAAG9E,eAAe,CAAC2E,IAAI,CAACK,aAAa,CAACC,aAAa,CAAC;IAC7D,IAAID,aAAa,CAACF,KAAK,CAACI,IAAI,EAAE;MAC1BJ,KAAK,GAAGA,KAAK,CAACI,IAAI,CAACF,aAAa,CAACF,KAAK,CAACI,IAAI,CAAC;IAChD;IACA,IAAIF,aAAa,CAACF,KAAK,CAACK,KAAK,EAAE;MAC3BL,KAAK,GAAGA,KAAK,CAACK,KAAK,CAACH,aAAa,CAACF,KAAK,CAACK,KAAK,CAAC;IAClD;IACA,IAAIH,aAAa,CAACF,KAAK,CAACM,IAAI,EAAE;MAC1BN,KAAK,GAAGA,KAAK,CAACM,IAAI,CAACJ,aAAa,CAACK,SAAS,CAAC;IAC/C;IACA,IAAMC,UAAU,GAAG,MAAMR,KAAK,CAACF,OAAO,CAAC,CAAC;IACxC,IAAI,CAACpG,iBAAiB,CAACoC,IAAI,CAAC,IAAI,CAACpC,iBAAiB,CAACqC,QAAQ,CAAC,CAAC,GAAG,CAAC,CAAC;IAClE,OAAO;MACH0E,SAAS,EAAED,UAAU,CAAC7F,GAAG,CAAC4B,CAAC,IAAIxD,gBAAgB,CAACwD,CAAC,CAAC;IACtD,CAAC;EACL,CAAC;EAAAd,MAAA,CAEKiF,KAAK,GAAX,eAAAA,MACIT,qBAA+C,EAClB;IAC7B,IAAMC,aAAa,GAAGpH,mBAAmB,CAAC,IAAI,CAACO,MAAM,EAAE4G,qBAAqB,CAACD,KAAK,CAAC;IACnF,IAAI,CAACtG,iBAAiB,CAACoC,IAAI,CAAC,IAAI,CAACpC,iBAAiB,CAACqC,QAAQ,CAAC,CAAC,GAAG,CAAC,CAAC;IAClE,MAAM,IAAI,CAACpC,UAAU;IACrB,IAAMuB,eAAe,GAAG,MAAM,IAAI,CAACH,sBAAsB;IACzD,IAAM2F,KAAK,GAAG,MAAMxF,eAAe,CAACyF,cAAc,CAACT,aAAa,CAACC,aAAa,CAAC;IAC/E,IAAI,CAACzG,iBAAiB,CAACoC,IAAI,CAAC,IAAI,CAACpC,iBAAiB,CAACqC,QAAQ,CAAC,CAAC,GAAG,CAAC,CAAC;IAClE,OAAO;MACH2E,KAAK;MACLE,IAAI,EAAE;IACV,CAAC;EACL,CAAC;EAAAnF,MAAA,CAEKoF,OAAO,GAAb,eAAAA,QAAcC,kBAA0B,EAAoB;IACxD,IAAI,CAACpH,iBAAiB,CAACoC,IAAI,CAAC,IAAI,CAACpC,iBAAiB,CAACqC,QAAQ,CAAC,CAAC,GAAG,CAAC,CAAC;IAClE,IAAMb,eAAe,GAAG,MAAM,IAAI,CAACH,sBAAsB;IACzD,IAAMgG,eAAe,GAAGxI,GAAG,CAAC,CAAC,GAAGuI,kBAAkB;IAClD,MAAM5F,eAAe,CAAC8F,UAAU,CAAC;MAC7BtB,QAAQ,EAAE,IAAI;MACd,WAAW,EAAE;QACTuB,GAAG,EAAEF;MACT;IACJ,CAAC,CAAC;IACF,IAAI,CAACrH,iBAAiB,CAACoC,IAAI,CAAC,IAAI,CAACpC,iBAAiB,CAACqC,QAAQ,CAAC,CAAC,GAAG,CAAC,CAAC;IAClE,OAAO,IAAI;EACf,CAAC;EAAAN,MAAA,CAEKyF,iBAAiB,GAAvB,eAAAA,kBACIC,WAAmB,EACnBC,aAAqB,EACrBC,OAAe,EACA;IACf,MAAM,IAAI,CAACtG,sBAAsB;IACjC,MAAM,IAAIhB,KAAK,CAAC,wCAAwC,CAAC;EAC7D,CAAC;EAAA0B,MAAA,CAED6F,YAAY,GAAZ,SAAAA,aAAA,EAAmH;IAC/G,OAAO,IAAI,CAAC7H,QAAQ;EACxB,CAAC;EAAAgC,MAAA,CAEK8F,MAAM,GAAZ,eAAAA,OAAA,EAA8B;IAC1B,IAAI,IAAI,CAACvF,MAAM,EAAE;MACb,MAAM,IAAIjC,KAAK,CAAC,gBAAgB,CAAC;IACrC;IACA,IAAI,CAACL,iBAAiB,CAACoC,IAAI,CAAC,IAAI,CAACpC,iBAAiB,CAACqC,QAAQ,CAAC,CAAC,GAAG,CAAC,CAAC;IAClE,IAAMb,eAAe,GAAG,MAAM,IAAI,CAACH,sBAAsB;IACzD,MAAMG,eAAe,CAACsG,IAAI,CAAC,CAAC;IAC5B,IAAI,CAAC9H,iBAAiB,CAACoC,IAAI,CAAC,IAAI,CAACpC,iBAAiB,CAACqC,QAAQ,CAAC,CAAC,GAAG,CAAC,CAAC;IAClE,MAAM,IAAI,CAAC0F,KAAK,CAAC,CAAC;EACtB,CAAC;EAAAhG,MAAA,CAEKgG,KAAK,GAAX,eAAAA,MAAA,EAA6B;IACzB;IACA,MAAMhJ,kBAAkB,CAAC,GAAG,CAAC;IAE7B,IAAI,IAAI,CAACuD,MAAM,EAAE;MACb,OAAO,IAAI,CAACA,MAAM;IACtB;IACA,IAAI,CAACA,MAAM,GAAG,CAAC,YAAY;MACvB,MAAM,IAAI,CAACjB,sBAAsB;MACjC,MAAM7C,cAAc,CAAC,IAAI,CAACwB,iBAAiB,CAACgI,IAAI,CAACzJ,MAAM,CAAC0J,CAAC,IAAIA,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;MACvE;MACA,MAAM,IAAI,CAACxH,WAAW,CAACsH,KAAK,CAAC,CAAC;IAClC,CAAC,EAAE,CAAC;IACJ,OAAO,IAAI,CAACzF,MAAM;EACtB,CAAC;EAAAP,MAAA,CAEDmG,sBAAsB,GAAtB,SAAAA,uBAAA,EAAyE;IACrE,OAAO,IAAI5J,OAAO,CAAC,CAAC;EACxB,CAAC;EAAAyD,MAAA,CACKoG,4BAA4B,GAAlC,eAAAA,6BAAmCC,aAAyD,EAAiB,CAAE,CAAC;EAAA,OAAA7I,wBAAA;AAAA;AAGpH,OAAO,SAAS8I,4BAA4BA,CACxC7I,OAAyB,EACzB8I,MAA2F,EAC3FxI,QAAkC,EACU;EAC5C,IAAMyI,QAAQ,GAAG,IAAIhJ,wBAAwB,CACzCC,OAAO,EACP8I,MAAM,CAAC7I,YAAY,EACnB6I,MAAM,CAAC5I,cAAc,EACrB4I,MAAM,CAAC3I,MAAM,EACb,CAAC,CAAC,EACF2I,MAAM,CAACzI,OAAO,EACdC,QACJ,CAAC;EACD,OAAOyC,OAAO,CAACiG,OAAO,CAACD,QAAQ,CAAC;AACpC","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/storage-mongodb/rx-storage-mongodb.js b/dist/esm/plugins/storage-mongodb/rx-storage-mongodb.js deleted file mode 100644 index e7a6f80b82f..00000000000 --- a/dist/esm/plugins/storage-mongodb/rx-storage-mongodb.js +++ /dev/null @@ -1,22 +0,0 @@ -import { ensureRxStorageInstanceParamsAreCorrect } from "../../rx-storage-helper.js"; -import { RX_STORAGE_NAME_MONGODB } from "./mongodb-helper.js"; -import { createMongoDBStorageInstance } from "./rx-storage-instance-mongodb.js"; -import { RXDB_VERSION } from "../utils/utils-rxdb-version.js"; -export var RxStorageMongoDB = /*#__PURE__*/function () { - function RxStorageMongoDB(databaseSettings) { - this.name = RX_STORAGE_NAME_MONGODB; - this.rxdbVersion = RXDB_VERSION; - this.databaseSettings = databaseSettings; - } - var _proto = RxStorageMongoDB.prototype; - _proto.createStorageInstance = function createStorageInstance(params) { - ensureRxStorageInstanceParamsAreCorrect(params); - return createMongoDBStorageInstance(this, params, this.databaseSettings); - }; - return RxStorageMongoDB; -}(); -export function getRxStorageMongoDB(databaseSettings) { - var storage = new RxStorageMongoDB(databaseSettings); - return storage; -} -//# sourceMappingURL=rx-storage-mongodb.js.map \ No newline at end of file diff --git a/dist/esm/plugins/storage-mongodb/rx-storage-mongodb.js.map b/dist/esm/plugins/storage-mongodb/rx-storage-mongodb.js.map deleted file mode 100644 index 1174b7e7176..00000000000 --- a/dist/esm/plugins/storage-mongodb/rx-storage-mongodb.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-storage-mongodb.js","names":["ensureRxStorageInstanceParamsAreCorrect","RX_STORAGE_NAME_MONGODB","createMongoDBStorageInstance","RXDB_VERSION","RxStorageMongoDB","databaseSettings","name","rxdbVersion","_proto","prototype","createStorageInstance","params","getRxStorageMongoDB","storage"],"sources":["../../../../src/plugins/storage-mongodb/rx-storage-mongodb.ts"],"sourcesContent":["import type {\n RxStorage,\n RxStorageInstanceCreationParams\n} from '../../types/index.d.ts';\n\nimport { ensureRxStorageInstanceParamsAreCorrect } from '../../rx-storage-helper.ts';\nimport { RX_STORAGE_NAME_MONGODB } from './mongodb-helper.ts';\nimport type { MongoDBDatabaseSettings, MongoDBSettings, MongoDBStorageInternals } from './mongodb-types.ts';\nimport { RxStorageInstanceMongoDB, createMongoDBStorageInstance } from './rx-storage-instance-mongodb.ts';\nimport { RXDB_VERSION } from '../utils/utils-rxdb-version.ts';\n\nexport class RxStorageMongoDB implements RxStorage {\n public name = RX_STORAGE_NAME_MONGODB;\n public readonly rxdbVersion = RXDB_VERSION;\n\n constructor(\n public databaseSettings: MongoDBDatabaseSettings\n ) { }\n\n public createStorageInstance(\n params: RxStorageInstanceCreationParams\n ): Promise> {\n ensureRxStorageInstanceParamsAreCorrect(params);\n return createMongoDBStorageInstance(this, params, this.databaseSettings);\n }\n}\n\nexport function getRxStorageMongoDB(\n databaseSettings: MongoDBDatabaseSettings\n): RxStorageMongoDB {\n const storage = new RxStorageMongoDB(databaseSettings);\n return storage;\n}\n"],"mappings":"AAKA,SAASA,uCAAuC,QAAQ,4BAA4B;AACpF,SAASC,uBAAuB,QAAQ,qBAAqB;AAE7D,SAAmCC,4BAA4B,QAAQ,kCAAkC;AACzG,SAASC,YAAY,QAAQ,gCAAgC;AAE7D,WAAaC,gBAAgB;EAIzB,SAAAA,iBACWC,gBAAyC,EAClD;IAAA,KALKC,IAAI,GAAGL,uBAAuB;IAAA,KACrBM,WAAW,GAAGJ,YAAY;IAAA,KAG/BE,gBAAyC,GAAzCA,gBAAyC;EAChD;EAAC,IAAAG,MAAA,GAAAJ,gBAAA,CAAAK,SAAA;EAAAD,MAAA,CAEEE,qBAAqB,GAA5B,SAAAA,sBACIC,MAAmE,EACvB;IAC5CX,uCAAuC,CAACW,MAAM,CAAC;IAC/C,OAAOT,4BAA4B,CAAC,IAAI,EAAES,MAAM,EAAE,IAAI,CAACN,gBAAgB,CAAC;EAC5E,CAAC;EAAA,OAAAD,gBAAA;AAAA;AAGL,OAAO,SAASQ,mBAAmBA,CAC/BP,gBAAyC,EACzB;EAChB,IAAMQ,OAAO,GAAG,IAAIT,gBAAgB,CAACC,gBAAgB,CAAC;EACtD,OAAOQ,OAAO;AAClB","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/storage-remote-websocket/index.js b/dist/esm/plugins/storage-remote-websocket/index.js deleted file mode 100644 index bc68ecc115f..00000000000 --- a/dist/esm/plugins/storage-remote-websocket/index.js +++ /dev/null @@ -1,73 +0,0 @@ -import { Subject } from 'rxjs'; -import { PROMISE_RESOLVE_VOID, getFromMapOrThrow } from "../../plugins/utils/index.js"; -import { createWebSocketClient, startSocketServer } from "../replication-websocket/index.js"; -import { exposeRxStorageRemote } from "../storage-remote/remote.js"; -import { getRxStorageRemote } from "../storage-remote/rx-storage-remote.js"; -import { createErrorAnswer } from "../storage-remote/storage-remote-helpers.js"; -export function startRxStorageRemoteWebsocketServer(options) { - var serverState = startSocketServer(options); - var websocketByConnectionId = new Map(); - var messages$ = new Subject(); - var exposeSettings = { - messages$: messages$.asObservable(), - storage: options.storage, - database: options.database, - customRequestHandler: options.customRequestHandler, - send(msg) { - var ws = getFromMapOrThrow(websocketByConnectionId, msg.connectionId); - ws.send(JSON.stringify(msg)); - } - }; - var exposeState = exposeRxStorageRemote(exposeSettings); - serverState.onConnection$.subscribe(ws => { - var onCloseHandlers = []; - ws.onclose = () => { - onCloseHandlers.map(fn => fn()); - }; - ws.on('message', messageString => { - var message = JSON.parse(messageString); - var connectionId = message.connectionId; - if (!websocketByConnectionId.has(connectionId)) { - /** - * If first message is not 'create', - * it is an error. - */ - if (message.method !== 'create' && message.method !== 'custom') { - ws.send(JSON.stringify(createErrorAnswer(message, new Error('First call must be a create call but is: ' + JSON.stringify(message))))); - return; - } - websocketByConnectionId.set(connectionId, ws); - } - messages$.next(message); - }); - }); - return { - serverState, - exposeState - }; -} -export function getRxStorageRemoteWebsocket(options) { - var identifier = [options.url, 'rx-remote-storage-websocket'].join(''); - var storage = getRxStorageRemote({ - identifier, - mode: options.mode, - async messageChannelCreator() { - var messages$ = new Subject(); - var websocketClient = await createWebSocketClient(options); - websocketClient.message$.subscribe(msg => messages$.next(msg)); - return { - messages$, - send(msg) { - return websocketClient.socket.send(JSON.stringify(msg)); - }, - close() { - websocketClient.socket.close(); - return PROMISE_RESOLVE_VOID; - } - }; - } - }); - return storage; -} -export * from "./types.js"; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/esm/plugins/storage-remote-websocket/index.js.map b/dist/esm/plugins/storage-remote-websocket/index.js.map deleted file mode 100644 index c4a1a283bb9..00000000000 --- a/dist/esm/plugins/storage-remote-websocket/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["Subject","PROMISE_RESOLVE_VOID","getFromMapOrThrow","createWebSocketClient","startSocketServer","exposeRxStorageRemote","getRxStorageRemote","createErrorAnswer","startRxStorageRemoteWebsocketServer","options","serverState","websocketByConnectionId","Map","messages$","exposeSettings","asObservable","storage","database","customRequestHandler","send","msg","ws","connectionId","JSON","stringify","exposeState","onConnection$","subscribe","onCloseHandlers","onclose","map","fn","on","messageString","message","parse","has","method","Error","set","next","getRxStorageRemoteWebsocket","identifier","url","join","mode","messageChannelCreator","websocketClient","message$","socket","close"],"sources":["../../../../src/plugins/storage-remote-websocket/index.ts"],"sourcesContent":["import { Subject } from 'rxjs';\nimport type {\n WebSocket\n} from 'ws';\nimport {\n PROMISE_RESOLVE_VOID,\n getFromMapOrThrow\n} from '../../plugins/utils/index.ts';\nimport {\n createWebSocketClient,\n startSocketServer\n} from '../replication-websocket/index.ts';\nimport { exposeRxStorageRemote } from '../storage-remote/remote.ts';\nimport { getRxStorageRemote } from '../storage-remote/rx-storage-remote.ts';\nimport { createErrorAnswer } from '../storage-remote/storage-remote-helpers.ts';\nimport type {\n MessageFromRemote,\n MessageToRemote,\n RxStorageRemoteExposeSettings\n} from '../storage-remote/storage-remote-types.ts';\nimport type {\n RxStorageRemoteWebsocketClient,\n RxStorageRemoteWebsocketClientOptions,\n RxStorageRemoteWebsocketServerOptions,\n RxStorageRemoteWebsocketServerState\n} from './types.ts';\nexport function startRxStorageRemoteWebsocketServer(\n options: RxStorageRemoteWebsocketServerOptions\n): RxStorageRemoteWebsocketServerState {\n const serverState = startSocketServer(options);\n\n const websocketByConnectionId = new Map();\n const messages$ = new Subject();\n const exposeSettings: RxStorageRemoteExposeSettings = {\n messages$: messages$.asObservable(),\n storage: options.storage as any,\n database: options.database as any,\n customRequestHandler: options.customRequestHandler,\n send(msg) {\n const ws = getFromMapOrThrow(websocketByConnectionId, msg.connectionId);\n ws.send(JSON.stringify(msg));\n }\n };\n const exposeState = exposeRxStorageRemote(exposeSettings);\n\n serverState.onConnection$.subscribe(ws => {\n const onCloseHandlers: Function[] = [];\n ws.onclose = () => {\n onCloseHandlers.map(fn => fn());\n };\n ws.on('message', (messageString: string) => {\n const message: MessageToRemote = JSON.parse(messageString);\n const connectionId = message.connectionId;\n if (!websocketByConnectionId.has(connectionId)) {\n /**\n * If first message is not 'create',\n * it is an error.\n */\n if (\n message.method !== 'create' &&\n message.method !== 'custom'\n ) {\n ws.send(\n JSON.stringify(\n createErrorAnswer(message, new Error('First call must be a create call but is: ' + JSON.stringify(message)))\n )\n );\n return;\n }\n websocketByConnectionId.set(connectionId, ws);\n }\n messages$.next(message);\n });\n });\n\n return {\n serverState,\n exposeState\n };\n}\n\nexport function getRxStorageRemoteWebsocket(\n options: RxStorageRemoteWebsocketClientOptions\n): RxStorageRemoteWebsocketClient {\n const identifier = [\n options.url,\n 'rx-remote-storage-websocket'\n ].join('');\n const storage = getRxStorageRemote({\n identifier,\n mode: options.mode,\n async messageChannelCreator() {\n const messages$ = new Subject();\n const websocketClient = await createWebSocketClient(options as any);\n websocketClient.message$.subscribe(msg => messages$.next(msg));\n return {\n messages$,\n send(msg) {\n return websocketClient.socket.send(JSON.stringify(msg));\n },\n close() {\n websocketClient.socket.close();\n return PROMISE_RESOLVE_VOID;\n }\n };\n\n }\n });\n return storage;\n}\n\n\nexport * from './types.ts';\n\n"],"mappings":"AAAA,SAASA,OAAO,QAAQ,MAAM;AAI9B,SACIC,oBAAoB,EACpBC,iBAAiB,QACd,8BAA8B;AACrC,SACIC,qBAAqB,EACrBC,iBAAiB,QACd,mCAAmC;AAC1C,SAASC,qBAAqB,QAAQ,6BAA6B;AACnE,SAASC,kBAAkB,QAAQ,wCAAwC;AAC3E,SAASC,iBAAiB,QAAQ,6CAA6C;AAY/E,OAAO,SAASC,mCAAmCA,CAC/CC,OAA8C,EACX;EACnC,IAAMC,WAAW,GAAGN,iBAAiB,CAACK,OAAO,CAAC;EAE9C,IAAME,uBAAuB,GAAG,IAAIC,GAAG,CAAoB,CAAC;EAC5D,IAAMC,SAAS,GAAG,IAAIb,OAAO,CAAkB,CAAC;EAChD,IAAMc,cAA6C,GAAG;IAClDD,SAAS,EAAEA,SAAS,CAACE,YAAY,CAAC,CAAC;IACnCC,OAAO,EAAEP,OAAO,CAACO,OAAc;IAC/BC,QAAQ,EAAER,OAAO,CAACQ,QAAe;IACjCC,oBAAoB,EAAET,OAAO,CAACS,oBAAoB;IAClDC,IAAIA,CAACC,GAAG,EAAE;MACN,IAAMC,EAAE,GAAGnB,iBAAiB,CAACS,uBAAuB,EAAES,GAAG,CAACE,YAAY,CAAC;MACvED,EAAE,CAACF,IAAI,CAACI,IAAI,CAACC,SAAS,CAACJ,GAAG,CAAC,CAAC;IAChC;EACJ,CAAC;EACD,IAAMK,WAAW,GAAGpB,qBAAqB,CAACS,cAAc,CAAC;EAEzDJ,WAAW,CAACgB,aAAa,CAACC,SAAS,CAACN,EAAE,IAAI;IACtC,IAAMO,eAA2B,GAAG,EAAE;IACtCP,EAAE,CAACQ,OAAO,GAAG,MAAM;MACfD,eAAe,CAACE,GAAG,CAACC,EAAE,IAAIA,EAAE,CAAC,CAAC,CAAC;IACnC,CAAC;IACDV,EAAE,CAACW,EAAE,CAAC,SAAS,EAAGC,aAAqB,IAAK;MACxC,IAAMC,OAAwB,GAAGX,IAAI,CAACY,KAAK,CAACF,aAAa,CAAC;MAC1D,IAAMX,YAAY,GAAGY,OAAO,CAACZ,YAAY;MACzC,IAAI,CAACX,uBAAuB,CAACyB,GAAG,CAACd,YAAY,CAAC,EAAE;QAC5C;AAChB;AACA;AACA;QACgB,IACIY,OAAO,CAACG,MAAM,KAAK,QAAQ,IAC3BH,OAAO,CAACG,MAAM,KAAK,QAAQ,EAC7B;UACEhB,EAAE,CAACF,IAAI,CACHI,IAAI,CAACC,SAAS,CACVjB,iBAAiB,CAAC2B,OAAO,EAAE,IAAII,KAAK,CAAC,2CAA2C,GAAGf,IAAI,CAACC,SAAS,CAACU,OAAO,CAAC,CAAC,CAC/G,CACJ,CAAC;UACD;QACJ;QACAvB,uBAAuB,CAAC4B,GAAG,CAACjB,YAAY,EAAED,EAAE,CAAC;MACjD;MACAR,SAAS,CAAC2B,IAAI,CAACN,OAAO,CAAC;IAC3B,CAAC,CAAC;EACN,CAAC,CAAC;EAEF,OAAO;IACHxB,WAAW;IACXe;EACJ,CAAC;AACL;AAEA,OAAO,SAASgB,2BAA2BA,CACvChC,OAA8C,EAChB;EAC9B,IAAMiC,UAAU,GAAG,CACfjC,OAAO,CAACkC,GAAG,EACX,6BAA6B,CAChC,CAACC,IAAI,CAAC,EAAE,CAAC;EACV,IAAM5B,OAAO,GAAGV,kBAAkB,CAAC;IAC/BoC,UAAU;IACVG,IAAI,EAAEpC,OAAO,CAACoC,IAAI;IAClB,MAAMC,qBAAqBA,CAAA,EAAG;MAC1B,IAAMjC,SAAS,GAAG,IAAIb,OAAO,CAAoB,CAAC;MAClD,IAAM+C,eAAe,GAAG,MAAM5C,qBAAqB,CAACM,OAAc,CAAC;MACnEsC,eAAe,CAACC,QAAQ,CAACrB,SAAS,CAACP,GAAG,IAAIP,SAAS,CAAC2B,IAAI,CAACpB,GAAG,CAAC,CAAC;MAC9D,OAAO;QACHP,SAAS;QACTM,IAAIA,CAACC,GAAG,EAAE;UACN,OAAO2B,eAAe,CAACE,MAAM,CAAC9B,IAAI,CAACI,IAAI,CAACC,SAAS,CAACJ,GAAG,CAAC,CAAC;QAC3D,CAAC;QACD8B,KAAKA,CAAA,EAAG;UACJH,eAAe,CAACE,MAAM,CAACC,KAAK,CAAC,CAAC;UAC9B,OAAOjD,oBAAoB;QAC/B;MACJ,CAAC;IAEL;EACJ,CAAC,CAAC;EACF,OAAOe,OAAO;AAClB;AAGA,cAAc,YAAY","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/storage-remote-websocket/types.js b/dist/esm/plugins/storage-remote-websocket/types.js deleted file mode 100644 index f4623599160..00000000000 --- a/dist/esm/plugins/storage-remote-websocket/types.js +++ /dev/null @@ -1,3 +0,0 @@ -export {}; -export {}; -//# sourceMappingURL=types.js.map \ No newline at end of file diff --git a/dist/esm/plugins/storage-remote-websocket/types.js.map b/dist/esm/plugins/storage-remote-websocket/types.js.map deleted file mode 100644 index dd77baacfbb..00000000000 --- a/dist/esm/plugins/storage-remote-websocket/types.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"types.js","names":[],"sources":["../../../../src/plugins/storage-remote-websocket/types.ts"],"sourcesContent":["import type {\n WebsocketServerState\n} from '../replication-websocket/index.ts';\nimport type { ServerOptions, ClientOptions } from 'ws';\nimport type { RxDatabase, RxStorage } from '../../types/index.d.ts';\nimport type {\n CustomRequestHandler,\n RxStorageRemoteExposeType,\n RxStorageRemoteSettings\n} from '../storage-remote/storage-remote-types.ts';\nimport { RxStorageRemote } from '../storage-remote/index.ts';\n\nexport type RxStorageRemoteWebsocketServerOptions = ServerOptions & {\n storage?: RxStorage;\n database?: RxDatabase;\n customRequestHandler?: CustomRequestHandler;\n};\n\nexport type RxStorageRemoteWebsocketServerState = {\n serverState: WebsocketServerState;\n exposeState: RxStorageRemoteExposeType;\n};\n\nexport type RxStorageRemoteWebsocketClientOptions = ClientOptions & {\n url: string;\n mode: RxStorageRemoteSettings['mode'];\n};\n\nexport type RxStorageRemoteWebsocketClient = RxStorageRemote;\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/storage-remote/index.js b/dist/esm/plugins/storage-remote/index.js deleted file mode 100644 index f79310ad1eb..00000000000 --- a/dist/esm/plugins/storage-remote/index.js +++ /dev/null @@ -1,6 +0,0 @@ -export * from "./rx-storage-remote.js"; -export * from "./storage-remote-types.js"; -export * from "./storage-remote-helpers.js"; -export * from "./message-channel-cache.js"; -export * from "./remote.js"; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/esm/plugins/storage-remote/index.js.map b/dist/esm/plugins/storage-remote/index.js.map deleted file mode 100644 index a5d9c3482b9..00000000000 --- a/dist/esm/plugins/storage-remote/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":[],"sources":["../../../../src/plugins/storage-remote/index.ts"],"sourcesContent":["export * from './rx-storage-remote.ts';\nexport * from './storage-remote-types.ts';\nexport * from './storage-remote-helpers.ts';\nexport * from './message-channel-cache.ts';\nexport * from './remote.ts';\n"],"mappings":"AAAA,cAAc,wBAAwB;AACtC,cAAc,2BAA2B;AACzC,cAAc,6BAA6B;AAC3C,cAAc,4BAA4B;AAC1C,cAAc,aAAa","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/storage-remote/message-channel-cache.js b/dist/esm/plugins/storage-remote/message-channel-cache.js deleted file mode 100644 index ddce2fd711a..00000000000 --- a/dist/esm/plugins/storage-remote/message-channel-cache.js +++ /dev/null @@ -1,44 +0,0 @@ -import { PROMISE_RESOLVE_VOID, getFromMapOrCreate, getFromMapOrThrow } from "../utils/index.js"; -export var MESSAGE_CHANNEL_CACHE_BY_IDENTIFIER = new Map(); -export var CACHE_ITEM_BY_MESSAGE_CHANNEL = new WeakMap(); -export var OPEN_REMOTE_MESSAGE_CHANNELS = new Set(); -function getMessageChannelCache(identifier) { - return getFromMapOrCreate(MESSAGE_CHANNEL_CACHE_BY_IDENTIFIER, identifier, () => new Map()); -} -export function getMessageChannel(settings, cacheKeys, keepAlive = false) { - var cacheKey = getCacheKey(settings, cacheKeys); - var cacheItem = getFromMapOrCreate(getMessageChannelCache(settings.identifier), cacheKey, () => { - var newCacheItem = { - identifier: settings.identifier, - cacheKey, - keepAlive, - refCount: 1, - messageChannel: settings.messageChannelCreator().then(messageChannel => { - OPEN_REMOTE_MESSAGE_CHANNELS.add(messageChannel); - CACHE_ITEM_BY_MESSAGE_CHANNEL.set(messageChannel, newCacheItem); - return messageChannel; - }) - }; - return newCacheItem; - }, existingCacheItem => { - existingCacheItem.refCount = existingCacheItem.refCount + 1; - }); - return cacheItem.messageChannel; -} -export function closeMessageChannel(messageChannel) { - var cacheItem = getFromMapOrThrow(CACHE_ITEM_BY_MESSAGE_CHANNEL, messageChannel); - cacheItem.refCount = cacheItem.refCount - 1; - if (cacheItem.refCount === 0 && !cacheItem.keepAlive) { - getMessageChannelCache(cacheItem.identifier).delete(cacheItem.cacheKey); - OPEN_REMOTE_MESSAGE_CHANNELS.delete(messageChannel); - return messageChannel.close(); - } else { - return PROMISE_RESOLVE_VOID; - } -} -function getCacheKey(settings, cacheKeys) { - cacheKeys = cacheKeys.slice(0); - cacheKeys.unshift(settings.identifier); - return cacheKeys.join('||'); -} -//# sourceMappingURL=message-channel-cache.js.map \ No newline at end of file diff --git a/dist/esm/plugins/storage-remote/message-channel-cache.js.map b/dist/esm/plugins/storage-remote/message-channel-cache.js.map deleted file mode 100644 index 435cde06eda..00000000000 --- a/dist/esm/plugins/storage-remote/message-channel-cache.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"message-channel-cache.js","names":["PROMISE_RESOLVE_VOID","getFromMapOrCreate","getFromMapOrThrow","MESSAGE_CHANNEL_CACHE_BY_IDENTIFIER","Map","CACHE_ITEM_BY_MESSAGE_CHANNEL","WeakMap","OPEN_REMOTE_MESSAGE_CHANNELS","Set","getMessageChannelCache","identifier","getMessageChannel","settings","cacheKeys","keepAlive","cacheKey","getCacheKey","cacheItem","newCacheItem","refCount","messageChannel","messageChannelCreator","then","add","set","existingCacheItem","closeMessageChannel","delete","close","slice","unshift","join"],"sources":["../../../../src/plugins/storage-remote/message-channel-cache.ts"],"sourcesContent":["import {\n PROMISE_RESOLVE_VOID,\n getFromMapOrCreate,\n getFromMapOrThrow\n} from '../utils/index.ts';\nimport {\n RemoteMessageChannel,\n RxStorageRemoteSettings\n} from './storage-remote-types.ts';\n\nexport type RemoteMessageChannelCacheItem = {\n identifier: string;\n cacheKey: string;\n messageChannel: Promise;\n refCount: number;\n keepAlive: boolean;\n};\n\nexport const MESSAGE_CHANNEL_CACHE_BY_IDENTIFIER = new Map>();\nexport const CACHE_ITEM_BY_MESSAGE_CHANNEL = new WeakMap();\n\n\nexport const OPEN_REMOTE_MESSAGE_CHANNELS = new Set();\n\nfunction getMessageChannelCache(\n identifier: string\n) {\n return getFromMapOrCreate(\n MESSAGE_CHANNEL_CACHE_BY_IDENTIFIER,\n identifier,\n () => new Map()\n );\n}\n\nexport function getMessageChannel(\n settings: RxStorageRemoteSettings,\n cacheKeys: string[],\n keepAlive: boolean = false\n): Promise {\n const cacheKey = getCacheKey(settings, cacheKeys);\n const cacheItem = getFromMapOrCreate(\n getMessageChannelCache(settings.identifier),\n cacheKey,\n () => {\n const newCacheItem: RemoteMessageChannelCacheItem = {\n identifier: settings.identifier,\n cacheKey,\n keepAlive,\n refCount: 1,\n messageChannel: settings.messageChannelCreator()\n .then((messageChannel) => {\n OPEN_REMOTE_MESSAGE_CHANNELS.add(messageChannel);\n CACHE_ITEM_BY_MESSAGE_CHANNEL.set(messageChannel, newCacheItem);\n return messageChannel;\n }),\n };\n return newCacheItem;\n },\n (existingCacheItem) => {\n existingCacheItem.refCount = existingCacheItem.refCount + 1;\n }\n );\n return cacheItem.messageChannel;\n}\n\n\nexport function closeMessageChannel(\n messageChannel: RemoteMessageChannel\n): Promise {\n const cacheItem = getFromMapOrThrow(CACHE_ITEM_BY_MESSAGE_CHANNEL, messageChannel);\n cacheItem.refCount = cacheItem.refCount - 1;\n if (cacheItem.refCount === 0 && !cacheItem.keepAlive) {\n getMessageChannelCache(cacheItem.identifier).delete(cacheItem.cacheKey);\n OPEN_REMOTE_MESSAGE_CHANNELS.delete(messageChannel);\n return messageChannel.close();\n } else {\n return PROMISE_RESOLVE_VOID;\n }\n}\n\nfunction getCacheKey(\n settings: RxStorageRemoteSettings,\n cacheKeys: string[]\n): string {\n cacheKeys = cacheKeys.slice(0);\n cacheKeys.unshift(settings.identifier);\n return cacheKeys.join('||');\n}\n"],"mappings":"AAAA,SACIA,oBAAoB,EACpBC,kBAAkB,EAClBC,iBAAiB,QACd,mBAAmB;AAc1B,OAAO,IAAMC,mCAAmC,GAAG,IAAIC,GAAG,CAAqD,CAAC;AAChH,OAAO,IAAMC,6BAA6B,GAAG,IAAIC,OAAO,CAAsD,CAAC;AAG/G,OAAO,IAAMC,4BAA4B,GAAG,IAAIC,GAAG,CAAuB,CAAC;AAE3E,SAASC,sBAAsBA,CAC3BC,UAAkB,EACpB;EACE,OAAOT,kBAAkB,CACrBE,mCAAmC,EACnCO,UAAU,EACV,MAAM,IAAIN,GAAG,CAAC,CAClB,CAAC;AACL;AAEA,OAAO,SAASO,iBAAiBA,CAC7BC,QAAiC,EACjCC,SAAmB,EACnBC,SAAkB,GAAG,KAAK,EACG;EAC7B,IAAMC,QAAQ,GAAGC,WAAW,CAACJ,QAAQ,EAAEC,SAAS,CAAC;EACjD,IAAMI,SAAS,GAAGhB,kBAAkB,CAChCQ,sBAAsB,CAACG,QAAQ,CAACF,UAAU,CAAC,EAC3CK,QAAQ,EACR,MAAM;IACF,IAAMG,YAA2C,GAAG;MAChDR,UAAU,EAAEE,QAAQ,CAACF,UAAU;MAC/BK,QAAQ;MACRD,SAAS;MACTK,QAAQ,EAAE,CAAC;MACXC,cAAc,EAAER,QAAQ,CAACS,qBAAqB,CAAC,CAAC,CAC3CC,IAAI,CAAEF,cAAc,IAAK;QACtBb,4BAA4B,CAACgB,GAAG,CAACH,cAAc,CAAC;QAChDf,6BAA6B,CAACmB,GAAG,CAACJ,cAAc,EAAEF,YAAY,CAAC;QAC/D,OAAOE,cAAc;MACzB,CAAC;IACT,CAAC;IACD,OAAOF,YAAY;EACvB,CAAC,EACAO,iBAAiB,IAAK;IACnBA,iBAAiB,CAACN,QAAQ,GAAGM,iBAAiB,CAACN,QAAQ,GAAG,CAAC;EAC/D,CACJ,CAAC;EACD,OAAOF,SAAS,CAACG,cAAc;AACnC;AAGA,OAAO,SAASM,mBAAmBA,CAC/BN,cAAoC,EACvB;EACb,IAAMH,SAAS,GAAGf,iBAAiB,CAACG,6BAA6B,EAAEe,cAAc,CAAC;EAClFH,SAAS,CAACE,QAAQ,GAAGF,SAAS,CAACE,QAAQ,GAAG,CAAC;EAC3C,IAAIF,SAAS,CAACE,QAAQ,KAAK,CAAC,IAAI,CAACF,SAAS,CAACH,SAAS,EAAE;IAClDL,sBAAsB,CAACQ,SAAS,CAACP,UAAU,CAAC,CAACiB,MAAM,CAACV,SAAS,CAACF,QAAQ,CAAC;IACvER,4BAA4B,CAACoB,MAAM,CAACP,cAAc,CAAC;IACnD,OAAOA,cAAc,CAACQ,KAAK,CAAC,CAAC;EACjC,CAAC,MAAM;IACH,OAAO5B,oBAAoB;EAC/B;AACJ;AAEA,SAASgB,WAAWA,CAChBJ,QAAiC,EACjCC,SAAmB,EACb;EACNA,SAAS,GAAGA,SAAS,CAACgB,KAAK,CAAC,CAAC,CAAC;EAC9BhB,SAAS,CAACiB,OAAO,CAAClB,QAAQ,CAACF,UAAU,CAAC;EACtC,OAAOG,SAAS,CAACkB,IAAI,CAAC,IAAI,CAAC;AAC/B","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/storage-remote/remote.js b/dist/esm/plugins/storage-remote/remote.js deleted file mode 100644 index a0d921119f9..00000000000 --- a/dist/esm/plugins/storage-remote/remote.js +++ /dev/null @@ -1,202 +0,0 @@ -import { filter } from 'rxjs'; -import { deepEqual, ensureNotFalsy } from "../../plugins/utils/index.js"; -import { createAnswer, createErrorAnswer } from "./storage-remote-helpers.js"; -import { getChangedDocumentsSince } from "../../rx-storage-helper.js"; - -/** - * Run this on the 'remote' part, - * so that RxStorageMessageChannel can connect to it. - */ -export function exposeRxStorageRemote(settings) { - var instanceByFullName = new Map(); - settings.messages$.pipe(filter(msg => msg.method === 'custom')).subscribe(async msg => { - if (!settings.customRequestHandler) { - settings.send(createErrorAnswer(msg, new Error('Remote storage: cannot resolve custom request because settings.customRequestHandler is not set'))); - } else { - try { - var result = await settings.customRequestHandler(msg.params); - settings.send(createAnswer(msg, result)); - } catch (err) { - settings.send(createErrorAnswer(msg, err)); - } - } - }); - function getRxStorageInstance(params) { - if (settings.storage) { - return settings.storage.createStorageInstance(params); - } else if (settings.database) { - var storageInstances = Array.from(settings.database.storageInstances); - var collectionName = params.collectionName; - var storageInstance = storageInstances.find(instance => instance.collectionName === collectionName); - if (!storageInstance) { - console.dir(storageInstances); - throw new Error('storageInstance does not exist ' + JSON.stringify({ - collectionName - })); - } - var schema = params.schema; - if (!deepEqual(schema, storageInstance.schema)) { - throw new Error('Wrong schema ' + JSON.stringify({ - schema, - existingSchema: storageInstance.schema - })); - } - return Promise.resolve(storageInstance); - } else { - throw new Error('no base given'); - } - } - settings.messages$.pipe(filter(msg => msg.method === 'create')).subscribe(async msg => { - var connectionId = msg.connectionId; - - /** - * Do an isArray check here - * for runtime check types to ensure we have - * instance creation params and not method input params. - */ - if (Array.isArray(msg.params)) { - return; - } - var params = msg.params; - var collectionName = params.collectionName; - - /** - * We de-duplicate the storage instances. - * This makes sense in many environments like - * electron where on main process contains the storage - * for multiple renderer processes. Same goes for SharedWorkers etc. - */ - var fullName = [params.databaseName, params.collectionName, params.schema.version].join('|'); - var state = instanceByFullName.get(fullName); - if (!state) { - try { - state = { - /** - * We work with a promise here to ensure - * that parallel create-calls will still end up - * with exactly one instance and not more. - */ - storageInstancePromise: getRxStorageInstance(params), - connectionIds: new Set(), - params - }; - instanceByFullName.set(fullName, state); - - /** - * Must await the creation here - * so that in case of an error, - * it knows about the error message and can send - * that back to the main process. - */ - await state.storageInstancePromise; - } catch (err) { - settings.send(createErrorAnswer(msg, err)); - return; - } - } else { - // if instance already existed, ensure that the schema is equal - if (!deepEqual(params.schema, state.params.schema)) { - settings.send(createErrorAnswer(msg, new Error('Remote storage: schema not equal to existing storage'))); - return; - } - } - state.connectionIds.add(msg.connectionId); - var subs = []; - var storageInstance = await state.storageInstancePromise; - /** - * Automatically subscribe to the changeStream() - * because we always need them. - */ - subs.push(storageInstance.changeStream().subscribe(changes => { - var message = { - connectionId, - answerTo: 'changestream', - method: 'changeStream', - return: changes - }; - settings.send(message); - })); - subs.push(storageInstance.conflictResultionTasks().subscribe(conflicts => { - var message = { - connectionId, - answerTo: 'conflictResultionTasks', - method: 'conflictResultionTasks', - return: conflicts - }; - settings.send(message); - })); - var connectionClosed = false; - function closeThisConnection() { - if (connectionClosed) { - return; - } - connectionClosed = true; - subs.forEach(sub => sub.unsubscribe()); - ensureNotFalsy(state).connectionIds.delete(connectionId); - instanceByFullName.delete(fullName); - /** - * TODO how to notify the other ports on remove() ? - */ - } - - // also close the connection when the collection gets destroyed - if (settings.database) { - var database = settings.database; - var collection = database.collections[collectionName]; - if (collection) { - collection.onDestroy.push(() => closeThisConnection()); - } else { - database.onDestroy.push(() => closeThisConnection()); - } - } - subs.push(settings.messages$.pipe(filter(subMsg => subMsg.connectionId === connectionId)).subscribe(async plainMessage => { - var message = plainMessage; - if (message.method === 'create' || message.method === 'custom') { - return; - } - if (!Array.isArray(message.params)) { - return; - } - var result; - try { - if (message.method === 'close' && settings.database) { - /** - * Do not close the storageInstance if it was taken from - * a running RxDatabase. - * In that case we only close the instance - * when the RxDatabase gets destroyed. - */ - settings.send(createAnswer(message, null)); - return; - } - /** - * On calls to 'close()', - * we only close the main instance if there are no other - * ports connected. - */ - if (message.method === 'close' && ensureNotFalsy(state).connectionIds.size > 1) { - settings.send(createAnswer(message, null)); - ensureNotFalsy(state).connectionIds.delete(connectionId); - subs.forEach(sub => sub.unsubscribe()); - return; - } - if (message.method === 'getChangedDocumentsSince' && !storageInstance.getChangedDocumentsSince) { - result = await getChangedDocumentsSince(storageInstance, message.params[0], message.params[1]); - } else { - result = await storageInstance[message.method](message.params[0], message.params[1], message.params[2], message.params[3]); - } - if (message.method === 'close' || message.method === 'remove') { - closeThisConnection(); - } - settings.send(createAnswer(message, result)); - } catch (err) { - settings.send(createErrorAnswer(message, err)); - } - })); - settings.send(createAnswer(msg, 'ok')); - }); - return { - instanceByFullName - }; -} -//# sourceMappingURL=remote.js.map \ No newline at end of file diff --git a/dist/esm/plugins/storage-remote/remote.js.map b/dist/esm/plugins/storage-remote/remote.js.map deleted file mode 100644 index 67de51abb60..00000000000 --- a/dist/esm/plugins/storage-remote/remote.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"remote.js","names":["filter","deepEqual","ensureNotFalsy","createAnswer","createErrorAnswer","getChangedDocumentsSince","exposeRxStorageRemote","settings","instanceByFullName","Map","messages$","pipe","msg","method","subscribe","customRequestHandler","send","Error","result","params","err","getRxStorageInstance","storage","createStorageInstance","database","storageInstances","Array","from","collectionName","storageInstance","find","instance","console","dir","JSON","stringify","schema","existingSchema","Promise","resolve","connectionId","isArray","fullName","databaseName","version","join","state","get","storageInstancePromise","connectionIds","Set","set","add","subs","push","changeStream","changes","message","answerTo","return","conflictResultionTasks","conflicts","connectionClosed","closeThisConnection","forEach","sub","unsubscribe","delete","collection","collections","onDestroy","subMsg","plainMessage","size"],"sources":["../../../../src/plugins/storage-remote/remote.ts"],"sourcesContent":["import { filter, Subscription } from 'rxjs';\nimport type {\n RxStorageInstance,\n RxStorageInstanceCreationParams\n} from '../../types/index.d.ts';\nimport {\n deepEqual,\n ensureNotFalsy\n} from '../../plugins/utils/index.ts';\nimport { createAnswer, createErrorAnswer } from './storage-remote-helpers.ts';\nimport type {\n MessageFromRemote,\n MessageToRemote,\n RxStorageRemoteExposeSettings,\n RxStorageRemoteExposeSettingsRxDatabase,\n RxStorageRemoteExposeSettingsRxStorage,\n RxStorageRemoteExposeType\n} from './storage-remote-types.ts';\nimport { getChangedDocumentsSince } from '../../rx-storage-helper.ts';\n\n/**\n * Run this on the 'remote' part,\n * so that RxStorageMessageChannel can connect to it.\n */\nexport function exposeRxStorageRemote(settings: RxStorageRemoteExposeSettings): RxStorageRemoteExposeType {\n type InstanceState = {\n storageInstancePromise: Promise>;\n connectionIds: Set;\n params: RxStorageInstanceCreationParams;\n };\n const instanceByFullName: Map = new Map();\n\n\n settings.messages$.pipe(\n filter(msg => msg.method === 'custom')\n ).subscribe(async (msg) => {\n if (!settings.customRequestHandler) {\n settings.send(createErrorAnswer(\n msg,\n new Error('Remote storage: cannot resolve custom request because settings.customRequestHandler is not set')\n ));\n } else {\n try {\n const result = await settings.customRequestHandler(msg.params);\n settings.send(createAnswer(msg, result));\n } catch (err: any) {\n settings.send(createErrorAnswer(\n msg,\n err\n ));\n }\n }\n });\n\n\n function getRxStorageInstance(params: any): Promise> {\n if ((settings as RxStorageRemoteExposeSettingsRxStorage).storage) {\n return (settings as RxStorageRemoteExposeSettingsRxStorage).storage.createStorageInstance(params);\n } else if ((settings as RxStorageRemoteExposeSettingsRxDatabase).database) {\n const storageInstances = Array.from((settings as RxStorageRemoteExposeSettingsRxDatabase).database.storageInstances);\n const collectionName = params.collectionName;\n const storageInstance = storageInstances.find(instance => instance.collectionName === collectionName);\n if (!storageInstance) {\n console.dir(storageInstances);\n throw new Error('storageInstance does not exist ' + JSON.stringify({\n collectionName\n }));\n }\n const schema = params.schema;\n if (!deepEqual(schema, storageInstance.schema)) {\n throw new Error('Wrong schema ' + JSON.stringify({\n schema,\n existingSchema: storageInstance.schema\n }));\n }\n return Promise.resolve(storageInstance);\n } else {\n throw new Error('no base given');\n }\n }\n\n settings.messages$.pipe(\n filter(msg => msg.method === 'create')\n ).subscribe(async (msg) => {\n const connectionId = msg.connectionId;\n\n /**\n * Do an isArray check here\n * for runtime check types to ensure we have\n * instance creation params and not method input params.\n */\n if (Array.isArray(msg.params)) {\n return;\n }\n const params = msg.params;\n const collectionName = params.collectionName;\n\n /**\n * We de-duplicate the storage instances.\n * This makes sense in many environments like\n * electron where on main process contains the storage\n * for multiple renderer processes. Same goes for SharedWorkers etc.\n */\n const fullName = [\n params.databaseName,\n params.collectionName,\n params.schema.version\n ].join('|');\n let state = instanceByFullName.get(fullName);\n if (!state) {\n try {\n state = {\n /**\n * We work with a promise here to ensure\n * that parallel create-calls will still end up\n * with exactly one instance and not more.\n */\n storageInstancePromise: getRxStorageInstance(params),\n connectionIds: new Set(),\n params\n };\n instanceByFullName.set(fullName, state);\n\n /**\n * Must await the creation here\n * so that in case of an error,\n * it knows about the error message and can send\n * that back to the main process. \n */\n await state.storageInstancePromise;\n } catch (err: any) {\n settings.send(createErrorAnswer(msg, err));\n return;\n }\n } else {\n // if instance already existed, ensure that the schema is equal\n if (!deepEqual(params.schema, state.params.schema)) {\n settings.send(createErrorAnswer(msg, new Error('Remote storage: schema not equal to existing storage')));\n return;\n }\n }\n state.connectionIds.add(msg.connectionId);\n const subs: Subscription[] = [];\n\n const storageInstance = await state.storageInstancePromise;\n /**\n * Automatically subscribe to the changeStream()\n * because we always need them.\n */\n subs.push(\n storageInstance.changeStream().subscribe(changes => {\n const message: MessageFromRemote = {\n connectionId,\n answerTo: 'changestream',\n method: 'changeStream',\n return: changes\n };\n settings.send(message);\n })\n );\n subs.push(\n storageInstance.conflictResultionTasks().subscribe(conflicts => {\n const message: MessageFromRemote = {\n connectionId,\n answerTo: 'conflictResultionTasks',\n method: 'conflictResultionTasks',\n return: conflicts\n };\n settings.send(message);\n })\n );\n\n\n let connectionClosed = false;\n function closeThisConnection() {\n if (connectionClosed) {\n return;\n }\n connectionClosed = true;\n subs.forEach(sub => sub.unsubscribe());\n ensureNotFalsy(state).connectionIds.delete(connectionId);\n instanceByFullName.delete(fullName);\n /**\n * TODO how to notify the other ports on remove() ?\n */\n }\n\n // also close the connection when the collection gets destroyed\n if ((settings as RxStorageRemoteExposeSettingsRxDatabase).database) {\n const database = (settings as RxStorageRemoteExposeSettingsRxDatabase).database;\n const collection = database.collections[collectionName];\n if (collection) {\n collection.onDestroy.push(() => closeThisConnection());\n } else {\n database.onDestroy.push(() => closeThisConnection());\n }\n }\n\n subs.push(\n settings.messages$.pipe(\n filter(subMsg => (subMsg as MessageToRemote).connectionId === connectionId)\n ).subscribe(async (plainMessage) => {\n const message: MessageToRemote = plainMessage as any;\n if (\n message.method === 'create' ||\n message.method === 'custom'\n ) {\n return;\n }\n if (!Array.isArray(message.params)) {\n return;\n }\n let result;\n try {\n if (\n message.method === 'close' &&\n (settings as RxStorageRemoteExposeSettingsRxDatabase).database\n ) {\n /**\n * Do not close the storageInstance if it was taken from\n * a running RxDatabase.\n * In that case we only close the instance\n * when the RxDatabase gets destroyed.\n */\n settings.send(createAnswer(message, null));\n return;\n }\n /**\n * On calls to 'close()',\n * we only close the main instance if there are no other\n * ports connected.\n */\n if (\n message.method === 'close' &&\n ensureNotFalsy(state).connectionIds.size > 1\n ) {\n settings.send(createAnswer(message, null));\n ensureNotFalsy(state).connectionIds.delete(connectionId);\n subs.forEach(sub => sub.unsubscribe());\n return;\n }\n\n if (message.method === 'getChangedDocumentsSince' && !storageInstance.getChangedDocumentsSince) {\n result = await getChangedDocumentsSince(\n storageInstance,\n message.params[0],\n message.params[1]\n );\n } else {\n result = await (storageInstance as any)[message.method](\n message.params[0],\n message.params[1],\n message.params[2],\n message.params[3]\n );\n }\n if (\n message.method === 'close' ||\n message.method === 'remove'\n ) {\n closeThisConnection();\n }\n settings.send(createAnswer(message, result));\n } catch (err: any) {\n settings.send(createErrorAnswer(message, err));\n }\n })\n );\n\n settings.send(createAnswer(msg, 'ok'));\n });\n\n return {\n instanceByFullName\n };\n}\n"],"mappings":"AAAA,SAASA,MAAM,QAAsB,MAAM;AAK3C,SACIC,SAAS,EACTC,cAAc,QACX,8BAA8B;AACrC,SAASC,YAAY,EAAEC,iBAAiB,QAAQ,6BAA6B;AAS7E,SAASC,wBAAwB,QAAQ,4BAA4B;;AAErE;AACA;AACA;AACA;AACA,OAAO,SAASC,qBAAqBA,CAACC,QAAuC,EAA6B;EAMtG,IAAMC,kBAA8C,GAAG,IAAIC,GAAG,CAAC,CAAC;EAGhEF,QAAQ,CAACG,SAAS,CAACC,IAAI,CACnBX,MAAM,CAACY,GAAG,IAAIA,GAAG,CAACC,MAAM,KAAK,QAAQ,CACzC,CAAC,CAACC,SAAS,CAAC,MAAOF,GAAG,IAAK;IACvB,IAAI,CAACL,QAAQ,CAACQ,oBAAoB,EAAE;MAChCR,QAAQ,CAACS,IAAI,CAACZ,iBAAiB,CAC3BQ,GAAG,EACH,IAAIK,KAAK,CAAC,gGAAgG,CAC9G,CAAC,CAAC;IACN,CAAC,MAAM;MACH,IAAI;QACA,IAAMC,MAAM,GAAG,MAAMX,QAAQ,CAACQ,oBAAoB,CAACH,GAAG,CAACO,MAAM,CAAC;QAC9DZ,QAAQ,CAACS,IAAI,CAACb,YAAY,CAACS,GAAG,EAAEM,MAAM,CAAC,CAAC;MAC5C,CAAC,CAAC,OAAOE,GAAQ,EAAE;QACfb,QAAQ,CAACS,IAAI,CAACZ,iBAAiB,CAC3BQ,GAAG,EACHQ,GACJ,CAAC,CAAC;MACN;IACJ;EACJ,CAAC,CAAC;EAGF,SAASC,oBAAoBA,CAAYF,MAAW,EAAwD;IACxG,IAAKZ,QAAQ,CAA4Ce,OAAO,EAAE;MAC9D,OAAQf,QAAQ,CAA4Ce,OAAO,CAACC,qBAAqB,CAACJ,MAAM,CAAC;IACrG,CAAC,MAAM,IAAKZ,QAAQ,CAA6CiB,QAAQ,EAAE;MACvE,IAAMC,gBAAgB,GAAGC,KAAK,CAACC,IAAI,CAAEpB,QAAQ,CAA6CiB,QAAQ,CAACC,gBAAgB,CAAC;MACpH,IAAMG,cAAc,GAAGT,MAAM,CAACS,cAAc;MAC5C,IAAMC,eAAe,GAAGJ,gBAAgB,CAACK,IAAI,CAACC,QAAQ,IAAIA,QAAQ,CAACH,cAAc,KAAKA,cAAc,CAAC;MACrG,IAAI,CAACC,eAAe,EAAE;QAClBG,OAAO,CAACC,GAAG,CAACR,gBAAgB,CAAC;QAC7B,MAAM,IAAIR,KAAK,CAAC,iCAAiC,GAAGiB,IAAI,CAACC,SAAS,CAAC;UAC/DP;QACJ,CAAC,CAAC,CAAC;MACP;MACA,IAAMQ,MAAM,GAAGjB,MAAM,CAACiB,MAAM;MAC5B,IAAI,CAACnC,SAAS,CAACmC,MAAM,EAAEP,eAAe,CAACO,MAAM,CAAC,EAAE;QAC5C,MAAM,IAAInB,KAAK,CAAC,eAAe,GAAGiB,IAAI,CAACC,SAAS,CAAC;UAC7CC,MAAM;UACNC,cAAc,EAAER,eAAe,CAACO;QACpC,CAAC,CAAC,CAAC;MACP;MACA,OAAOE,OAAO,CAACC,OAAO,CAACV,eAAe,CAAC;IAC3C,CAAC,MAAM;MACH,MAAM,IAAIZ,KAAK,CAAC,eAAe,CAAC;IACpC;EACJ;EAEAV,QAAQ,CAACG,SAAS,CAACC,IAAI,CACnBX,MAAM,CAACY,GAAG,IAAIA,GAAG,CAACC,MAAM,KAAK,QAAQ,CACzC,CAAC,CAACC,SAAS,CAAC,MAAOF,GAAG,IAAK;IACvB,IAAM4B,YAAY,GAAG5B,GAAG,CAAC4B,YAAY;;IAErC;AACR;AACA;AACA;AACA;IACQ,IAAId,KAAK,CAACe,OAAO,CAAC7B,GAAG,CAACO,MAAM,CAAC,EAAE;MAC3B;IACJ;IACA,IAAMA,MAAM,GAAGP,GAAG,CAACO,MAAM;IACzB,IAAMS,cAAc,GAAGT,MAAM,CAACS,cAAc;;IAE5C;AACR;AACA;AACA;AACA;AACA;IACQ,IAAMc,QAAQ,GAAG,CACbvB,MAAM,CAACwB,YAAY,EACnBxB,MAAM,CAACS,cAAc,EACrBT,MAAM,CAACiB,MAAM,CAACQ,OAAO,CACxB,CAACC,IAAI,CAAC,GAAG,CAAC;IACX,IAAIC,KAAK,GAAGtC,kBAAkB,CAACuC,GAAG,CAACL,QAAQ,CAAC;IAC5C,IAAI,CAACI,KAAK,EAAE;MACR,IAAI;QACAA,KAAK,GAAG;UACJ;AACpB;AACA;AACA;AACA;UACoBE,sBAAsB,EAAE3B,oBAAoB,CAACF,MAAM,CAAC;UACpD8B,aAAa,EAAE,IAAIC,GAAG,CAAC,CAAC;UACxB/B;QACJ,CAAC;QACDX,kBAAkB,CAAC2C,GAAG,CAACT,QAAQ,EAAEI,KAAK,CAAC;;QAEvC;AAChB;AACA;AACA;AACA;AACA;QACgB,MAAMA,KAAK,CAACE,sBAAsB;MACtC,CAAC,CAAC,OAAO5B,GAAQ,EAAE;QACfb,QAAQ,CAACS,IAAI,CAACZ,iBAAiB,CAACQ,GAAG,EAAEQ,GAAG,CAAC,CAAC;QAC1C;MACJ;IACJ,CAAC,MAAM;MACH;MACA,IAAI,CAACnB,SAAS,CAACkB,MAAM,CAACiB,MAAM,EAAEU,KAAK,CAAC3B,MAAM,CAACiB,MAAM,CAAC,EAAE;QAChD7B,QAAQ,CAACS,IAAI,CAACZ,iBAAiB,CAACQ,GAAG,EAAE,IAAIK,KAAK,CAAC,sDAAsD,CAAC,CAAC,CAAC;QACxG;MACJ;IACJ;IACA6B,KAAK,CAACG,aAAa,CAACG,GAAG,CAACxC,GAAG,CAAC4B,YAAY,CAAC;IACzC,IAAMa,IAAoB,GAAG,EAAE;IAE/B,IAAMxB,eAAe,GAAG,MAAMiB,KAAK,CAACE,sBAAsB;IAC1D;AACR;AACA;AACA;IACQK,IAAI,CAACC,IAAI,CACLzB,eAAe,CAAC0B,YAAY,CAAC,CAAC,CAACzC,SAAS,CAAC0C,OAAO,IAAI;MAChD,IAAMC,OAA0B,GAAG;QAC/BjB,YAAY;QACZkB,QAAQ,EAAE,cAAc;QACxB7C,MAAM,EAAE,cAAc;QACtB8C,MAAM,EAAEH;MACZ,CAAC;MACDjD,QAAQ,CAACS,IAAI,CAACyC,OAAO,CAAC;IAC1B,CAAC,CACL,CAAC;IACDJ,IAAI,CAACC,IAAI,CACLzB,eAAe,CAAC+B,sBAAsB,CAAC,CAAC,CAAC9C,SAAS,CAAC+C,SAAS,IAAI;MAC5D,IAAMJ,OAA0B,GAAG;QAC/BjB,YAAY;QACZkB,QAAQ,EAAE,wBAAwB;QAClC7C,MAAM,EAAE,wBAAwB;QAChC8C,MAAM,EAAEE;MACZ,CAAC;MACDtD,QAAQ,CAACS,IAAI,CAACyC,OAAO,CAAC;IAC1B,CAAC,CACL,CAAC;IAGD,IAAIK,gBAAgB,GAAG,KAAK;IAC5B,SAASC,mBAAmBA,CAAA,EAAG;MAC3B,IAAID,gBAAgB,EAAE;QAClB;MACJ;MACAA,gBAAgB,GAAG,IAAI;MACvBT,IAAI,CAACW,OAAO,CAACC,GAAG,IAAIA,GAAG,CAACC,WAAW,CAAC,CAAC,CAAC;MACtChE,cAAc,CAAC4C,KAAK,CAAC,CAACG,aAAa,CAACkB,MAAM,CAAC3B,YAAY,CAAC;MACxDhC,kBAAkB,CAAC2D,MAAM,CAACzB,QAAQ,CAAC;MACnC;AACZ;AACA;IACQ;;IAEA;IACA,IAAKnC,QAAQ,CAA6CiB,QAAQ,EAAE;MAChE,IAAMA,QAAQ,GAAIjB,QAAQ,CAA6CiB,QAAQ;MAC/E,IAAM4C,UAAU,GAAG5C,QAAQ,CAAC6C,WAAW,CAACzC,cAAc,CAAC;MACvD,IAAIwC,UAAU,EAAE;QACZA,UAAU,CAACE,SAAS,CAAChB,IAAI,CAAC,MAAMS,mBAAmB,CAAC,CAAC,CAAC;MAC1D,CAAC,MAAM;QACHvC,QAAQ,CAAC8C,SAAS,CAAChB,IAAI,CAAC,MAAMS,mBAAmB,CAAC,CAAC,CAAC;MACxD;IACJ;IAEAV,IAAI,CAACC,IAAI,CACL/C,QAAQ,CAACG,SAAS,CAACC,IAAI,CACnBX,MAAM,CAACuE,MAAM,IAAKA,MAAM,CAAqB/B,YAAY,KAAKA,YAAY,CAC9E,CAAC,CAAC1B,SAAS,CAAC,MAAO0D,YAAY,IAAK;MAChC,IAAMf,OAAwB,GAAGe,YAAmB;MACpD,IACIf,OAAO,CAAC5C,MAAM,KAAK,QAAQ,IAC3B4C,OAAO,CAAC5C,MAAM,KAAK,QAAQ,EAC7B;QACE;MACJ;MACA,IAAI,CAACa,KAAK,CAACe,OAAO,CAACgB,OAAO,CAACtC,MAAM,CAAC,EAAE;QAChC;MACJ;MACA,IAAID,MAAM;MACV,IAAI;QACA,IACIuC,OAAO,CAAC5C,MAAM,KAAK,OAAO,IACzBN,QAAQ,CAA6CiB,QAAQ,EAChE;UACE;AACxB;AACA;AACA;AACA;AACA;UACwBjB,QAAQ,CAACS,IAAI,CAACb,YAAY,CAACsD,OAAO,EAAE,IAAI,CAAC,CAAC;UAC1C;QACJ;QACA;AACpB;AACA;AACA;AACA;QACoB,IACIA,OAAO,CAAC5C,MAAM,KAAK,OAAO,IAC1BX,cAAc,CAAC4C,KAAK,CAAC,CAACG,aAAa,CAACwB,IAAI,GAAG,CAAC,EAC9C;UACElE,QAAQ,CAACS,IAAI,CAACb,YAAY,CAACsD,OAAO,EAAE,IAAI,CAAC,CAAC;UAC1CvD,cAAc,CAAC4C,KAAK,CAAC,CAACG,aAAa,CAACkB,MAAM,CAAC3B,YAAY,CAAC;UACxDa,IAAI,CAACW,OAAO,CAACC,GAAG,IAAIA,GAAG,CAACC,WAAW,CAAC,CAAC,CAAC;UACtC;QACJ;QAEA,IAAIT,OAAO,CAAC5C,MAAM,KAAK,0BAA0B,IAAI,CAACgB,eAAe,CAACxB,wBAAwB,EAAE;UAC5Fa,MAAM,GAAG,MAAMb,wBAAwB,CACnCwB,eAAe,EACf4B,OAAO,CAACtC,MAAM,CAAC,CAAC,CAAC,EACjBsC,OAAO,CAACtC,MAAM,CAAC,CAAC,CACpB,CAAC;QACL,CAAC,MAAM;UACHD,MAAM,GAAG,MAAOW,eAAe,CAAS4B,OAAO,CAAC5C,MAAM,CAAC,CACnD4C,OAAO,CAACtC,MAAM,CAAC,CAAC,CAAC,EACjBsC,OAAO,CAACtC,MAAM,CAAC,CAAC,CAAC,EACjBsC,OAAO,CAACtC,MAAM,CAAC,CAAC,CAAC,EACjBsC,OAAO,CAACtC,MAAM,CAAC,CAAC,CACpB,CAAC;QACL;QACA,IACIsC,OAAO,CAAC5C,MAAM,KAAK,OAAO,IAC1B4C,OAAO,CAAC5C,MAAM,KAAK,QAAQ,EAC7B;UACEkD,mBAAmB,CAAC,CAAC;QACzB;QACAxD,QAAQ,CAACS,IAAI,CAACb,YAAY,CAACsD,OAAO,EAAEvC,MAAM,CAAC,CAAC;MAChD,CAAC,CAAC,OAAOE,GAAQ,EAAE;QACfb,QAAQ,CAACS,IAAI,CAACZ,iBAAiB,CAACqD,OAAO,EAAErC,GAAG,CAAC,CAAC;MAClD;IACJ,CAAC,CACL,CAAC;IAEDb,QAAQ,CAACS,IAAI,CAACb,YAAY,CAACS,GAAG,EAAE,IAAI,CAAC,CAAC;EAC1C,CAAC,CAAC;EAEF,OAAO;IACHJ;EACJ,CAAC;AACL","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/storage-remote/rx-storage-remote.js b/dist/esm/plugins/storage-remote/rx-storage-remote.js deleted file mode 100644 index 61efcf37ee9..00000000000 --- a/dist/esm/plugins/storage-remote/rx-storage-remote.js +++ /dev/null @@ -1,200 +0,0 @@ -import { firstValueFrom, filter, Subject } from 'rxjs'; -import { RXDB_VERSION, randomCouchString } from "../../plugins/utils/index.js"; -import { closeMessageChannel, getMessageChannel } from "./message-channel-cache.js"; -export var RxStorageRemote = /*#__PURE__*/function () { - function RxStorageRemote(settings) { - this.name = 'remote'; - this.rxdbVersion = RXDB_VERSION; - this.seed = randomCouchString(10); - this.lastRequestId = 0; - this.settings = settings; - if (settings.mode === 'one') { - this.messageChannelIfOneMode = getMessageChannel(settings, [], true); - } - } - var _proto = RxStorageRemote.prototype; - _proto.getRequestId = function getRequestId() { - var newId = this.lastRequestId++; - return this.seed + '|' + newId; - }; - _proto.createStorageInstance = async function createStorageInstance(params) { - var connectionId = 'c|' + this.getRequestId(); - var cacheKeys = ['mode-' + this.settings.mode]; - switch (this.settings.mode) { - case 'collection': - cacheKeys.push('collection-' + params.collectionName); - // eslint-disable-next-line no-fallthrough - case 'database': - cacheKeys.push('database-' + params.databaseName); - // eslint-disable-next-line no-fallthrough - case 'storage': - cacheKeys.push('seed-' + this.seed); - } - var messageChannel = await (this.messageChannelIfOneMode ? this.messageChannelIfOneMode : getMessageChannel(this.settings, cacheKeys)); - var requestId = this.getRequestId(); - var waitForOkPromise = firstValueFrom(messageChannel.messages$.pipe(filter(msg => msg.answerTo === requestId))); - messageChannel.send({ - connectionId, - method: 'create', - requestId, - params - }); - var waitForOkResult = await waitForOkPromise; - if (waitForOkResult.error) { - await closeMessageChannel(messageChannel); - throw new Error('could not create instance ' + JSON.stringify(waitForOkResult.error)); - } - return new RxStorageInstanceRemote(this, params.databaseName, params.collectionName, params.schema, { - params, - connectionId, - messageChannel - }, params.options); - }; - _proto.customRequest = async function customRequest(data) { - var messageChannel = await this.settings.messageChannelCreator(); - var requestId = this.getRequestId(); - var connectionId = 'custom|request|' + requestId; - var waitForAnswerPromise = firstValueFrom(messageChannel.messages$.pipe(filter(msg => msg.answerTo === requestId))); - messageChannel.send({ - connectionId, - method: 'custom', - requestId, - params: data - }); - var response = await waitForAnswerPromise; - if (response.error) { - await messageChannel.close(); - throw new Error('could not run customRequest(): ' + JSON.stringify({ - data, - error: response.error - })); - } else { - await messageChannel.close(); - return response.return; - } - }; - return RxStorageRemote; -}(); - -/** - * Because postMessage() can be very slow on complex objects, - * and some RxStorage implementations do need a JSON-string internally - * anyway, it is allowed to transfer a string instead of an object - * which must then be JSON.parse()-ed before RxDB can use it. - * @link https://surma.dev/things/is-postmessage-slow/ - */ -function getMessageReturn(msg) { - if (msg.method === 'getAttachmentData') { - return msg.return; - } else { - if (typeof msg.return === 'string') { - return JSON.parse(msg.return); - } else { - return msg.return; - } - } -} -export var RxStorageInstanceRemote = /*#__PURE__*/function () { - function RxStorageInstanceRemote(storage, databaseName, collectionName, schema, internals, options) { - this.changes$ = new Subject(); - this.conflicts$ = new Subject(); - this.subs = []; - this.storage = storage; - this.databaseName = databaseName; - this.collectionName = collectionName; - this.schema = schema; - this.internals = internals; - this.options = options; - this.messages$ = this.internals.messageChannel.messages$.pipe(filter(msg => msg.connectionId === this.internals.connectionId)); - this.subs.push(this.messages$.subscribe(msg => { - if (msg.method === 'changeStream') { - this.changes$.next(getMessageReturn(msg)); - } - if (msg.method === 'conflictResultionTasks') { - this.conflicts$.next(msg.return); - } - })); - } - var _proto2 = RxStorageInstanceRemote.prototype; - _proto2.requestRemote = async function requestRemote(methodName, params) { - var requestId = this.storage.getRequestId(); - var responsePromise = firstValueFrom(this.messages$.pipe(filter(msg => msg.answerTo === requestId))); - var message = { - connectionId: this.internals.connectionId, - requestId, - method: methodName, - params - }; - this.internals.messageChannel.send(message); - var response = await responsePromise; - if (response.error) { - throw new Error('could not requestRemote: ' + JSON.stringify({ - methodName, - params, - error: response.error - }, null, 4)); - } else { - return getMessageReturn(response); - } - }; - _proto2.bulkWrite = function bulkWrite(documentWrites, context) { - return this.requestRemote('bulkWrite', [documentWrites, context]); - }; - _proto2.findDocumentsById = function findDocumentsById(ids, deleted) { - return this.requestRemote('findDocumentsById', [ids, deleted]); - }; - _proto2.query = function query(preparedQuery) { - return this.requestRemote('query', [preparedQuery]); - }; - _proto2.count = function count(preparedQuery) { - return this.requestRemote('count', [preparedQuery]); - }; - _proto2.getAttachmentData = function getAttachmentData(documentId, attachmentId, digest) { - return this.requestRemote('getAttachmentData', [documentId, attachmentId, digest]); - }; - _proto2.getChangedDocumentsSince = function getChangedDocumentsSince(limit, checkpoint) { - return this.requestRemote('getChangedDocumentsSince', [limit, checkpoint]); - }; - _proto2.changeStream = function changeStream() { - return this.changes$.asObservable(); - }; - _proto2.cleanup = function cleanup(minDeletedTime) { - return this.requestRemote('cleanup', [minDeletedTime]); - }; - _proto2.close = async function close() { - if (this.closed) { - return this.closed; - } - this.closed = (async () => { - this.subs.forEach(sub => sub.unsubscribe()); - this.changes$.complete(); - await this.requestRemote('close', []); - await closeMessageChannel(this.internals.messageChannel); - })(); - return this.closed; - }; - _proto2.remove = async function remove() { - if (this.closed) { - throw new Error('already closed'); - } - this.closed = (async () => { - await this.requestRemote('remove', []); - await closeMessageChannel(this.internals.messageChannel); - })(); - return this.closed; - }; - _proto2.conflictResultionTasks = function conflictResultionTasks() { - return this.conflicts$; - }; - _proto2.resolveConflictResultionTask = async function resolveConflictResultionTask(taskSolution) { - await this.requestRemote('resolveConflictResultionTask', [taskSolution]); - }; - return RxStorageInstanceRemote; -}(); -export function getRxStorageRemote(settings) { - var withDefaults = Object.assign({ - mode: 'storage' - }, settings); - return new RxStorageRemote(withDefaults); -} -//# sourceMappingURL=rx-storage-remote.js.map \ No newline at end of file diff --git a/dist/esm/plugins/storage-remote/rx-storage-remote.js.map b/dist/esm/plugins/storage-remote/rx-storage-remote.js.map deleted file mode 100644 index ffbe2318d68..00000000000 --- a/dist/esm/plugins/storage-remote/rx-storage-remote.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-storage-remote.js","names":["firstValueFrom","filter","Subject","RXDB_VERSION","randomCouchString","closeMessageChannel","getMessageChannel","RxStorageRemote","settings","name","rxdbVersion","seed","lastRequestId","mode","messageChannelIfOneMode","_proto","prototype","getRequestId","newId","createStorageInstance","params","connectionId","cacheKeys","push","collectionName","databaseName","messageChannel","requestId","waitForOkPromise","messages$","pipe","msg","answerTo","send","method","waitForOkResult","error","Error","JSON","stringify","RxStorageInstanceRemote","schema","options","customRequest","data","messageChannelCreator","waitForAnswerPromise","response","close","return","getMessageReturn","parse","storage","internals","changes$","conflicts$","subs","subscribe","next","_proto2","requestRemote","methodName","responsePromise","message","bulkWrite","documentWrites","context","findDocumentsById","ids","deleted","query","preparedQuery","count","getAttachmentData","documentId","attachmentId","digest","getChangedDocumentsSince","limit","checkpoint","changeStream","asObservable","cleanup","minDeletedTime","closed","forEach","sub","unsubscribe","complete","remove","conflictResultionTasks","resolveConflictResultionTask","taskSolution","getRxStorageRemote","withDefaults","Object","assign"],"sources":["../../../../src/plugins/storage-remote/rx-storage-remote.ts"],"sourcesContent":["import {\n firstValueFrom,\n filter,\n Observable,\n Subject,\n Subscription\n} from 'rxjs';\nimport type {\n BulkWriteRow,\n EventBulk,\n RxConflictResultionTask,\n RxConflictResultionTaskSolution,\n RxDocumentData,\n RxJsonSchema,\n RxStorage,\n RxStorageBulkWriteResponse,\n RxStorageChangeEvent,\n RxStorageCountResult,\n RxStorageInstance,\n RxStorageInstanceCreationParams,\n RxStorageQueryResult\n} from '../../types/index.d.ts';\nimport {\n RXDB_VERSION,\n randomCouchString\n} from '../../plugins/utils/index.ts';\nimport type {\n MessageFromRemote,\n MessageToRemote,\n RemoteMessageChannel,\n RxStorageRemoteInternals,\n RxStorageRemoteSettings\n} from './storage-remote-types.ts';\nimport { closeMessageChannel, getMessageChannel } from './message-channel-cache.ts';\nimport { ensureRxStorageInstanceParamsAreCorrect } from '../../rx-storage-helper.ts';\n\n\nexport class RxStorageRemote implements RxStorage {\n public readonly name: string = 'remote';\n public readonly rxdbVersion = RXDB_VERSION;\n\n private seed: string = randomCouchString(10);\n private lastRequestId: number = 0;\n public messageChannelIfOneMode?: Promise;\n constructor(\n public readonly settings: RxStorageRemoteSettings\n ) {\n if (settings.mode === 'one') {\n this.messageChannelIfOneMode = getMessageChannel(\n settings,\n [],\n true\n );\n }\n }\n\n public getRequestId() {\n const newId = this.lastRequestId++;\n return this.seed + '|' + newId;\n }\n\n async createStorageInstance(\n params: RxStorageInstanceCreationParams\n ): Promise> {\n const connectionId = 'c|' + this.getRequestId();\n\n const cacheKeys: string[] = [\n 'mode-' + this.settings.mode\n ];\n switch (this.settings.mode) {\n case 'collection':\n cacheKeys.push('collection-' + params.collectionName);\n // eslint-disable-next-line no-fallthrough\n case 'database':\n cacheKeys.push('database-' + params.databaseName);\n // eslint-disable-next-line no-fallthrough\n case 'storage':\n cacheKeys.push('seed-' + this.seed);\n }\n const messageChannel = await (this.messageChannelIfOneMode ?\n this.messageChannelIfOneMode :\n getMessageChannel(\n this.settings,\n cacheKeys\n )\n );\n\n const requestId = this.getRequestId();\n const waitForOkPromise = firstValueFrom(messageChannel.messages$.pipe(\n filter(msg => msg.answerTo === requestId)\n ));\n messageChannel.send({\n connectionId,\n method: 'create',\n requestId,\n params\n });\n\n const waitForOkResult = await waitForOkPromise;\n if (waitForOkResult.error) {\n await closeMessageChannel(messageChannel);\n throw new Error('could not create instance ' + JSON.stringify(waitForOkResult.error));\n }\n\n return new RxStorageInstanceRemote(\n this,\n params.databaseName,\n params.collectionName,\n params.schema,\n {\n params,\n connectionId,\n messageChannel\n },\n params.options\n );\n }\n\n async customRequest(data: In): Promise {\n const messageChannel = await this.settings.messageChannelCreator();\n const requestId = this.getRequestId();\n const connectionId = 'custom|request|' + requestId;\n const waitForAnswerPromise = firstValueFrom(messageChannel.messages$.pipe(\n filter(msg => msg.answerTo === requestId)\n ));\n messageChannel.send({\n connectionId,\n method: 'custom',\n requestId,\n params: data\n });\n const response = await waitForAnswerPromise;\n if (response.error) {\n await messageChannel.close();\n throw new Error('could not run customRequest(): ' + JSON.stringify({\n data,\n error: response.error\n }));\n } else {\n await messageChannel.close();\n return response.return;\n }\n\n }\n}\n\n/**\n * Because postMessage() can be very slow on complex objects,\n * and some RxStorage implementations do need a JSON-string internally\n * anyway, it is allowed to transfer a string instead of an object\n * which must then be JSON.parse()-ed before RxDB can use it.\n * @link https://surma.dev/things/is-postmessage-slow/\n */\nfunction getMessageReturn(\n msg: MessageFromRemote\n) {\n if (msg.method === 'getAttachmentData') {\n return msg.return;\n } else {\n if (typeof msg.return === 'string') {\n return JSON.parse(msg.return);\n } else {\n return msg.return;\n }\n }\n}\n\nexport class RxStorageInstanceRemote implements RxStorageInstance {\n private changes$: Subject>, any>> = new Subject();\n private conflicts$: Subject> = new Subject();\n private subs: Subscription[] = [];\n\n private closed?: Promise;\n messages$: Observable;\n\n constructor(\n public readonly storage: RxStorageRemote,\n public readonly databaseName: string,\n public readonly collectionName: string,\n public readonly schema: Readonly>>,\n public readonly internals: RxStorageRemoteInternals,\n public readonly options: Readonly\n ) {\n this.messages$ = this.internals.messageChannel.messages$.pipe(\n filter(msg => msg.connectionId === this.internals.connectionId)\n );\n this.subs.push(\n this.messages$.subscribe(msg => {\n if (msg.method === 'changeStream') {\n this.changes$.next(getMessageReturn(msg));\n }\n if (msg.method === 'conflictResultionTasks') {\n this.conflicts$.next(msg.return);\n }\n })\n );\n }\n\n private async requestRemote(\n methodName: keyof RxStorageInstance,\n params: any\n ) {\n const requestId = this.storage.getRequestId();\n const responsePromise = firstValueFrom(\n this.messages$.pipe(\n filter(msg => msg.answerTo === requestId)\n )\n );\n const message: MessageToRemote = {\n connectionId: this.internals.connectionId,\n requestId,\n method: methodName,\n params\n };\n this.internals.messageChannel.send(message);\n const response = await responsePromise;\n if (response.error) {\n throw new Error('could not requestRemote: ' + JSON.stringify({\n methodName,\n params,\n error: response.error\n }, null, 4));\n } else {\n return getMessageReturn(response);\n }\n }\n bulkWrite(\n documentWrites: BulkWriteRow[],\n context: string\n ): Promise> {\n return this.requestRemote('bulkWrite', [documentWrites, context]);\n }\n findDocumentsById(ids: string[], deleted: boolean): Promise[]> {\n return this.requestRemote('findDocumentsById', [ids, deleted]);\n }\n query(preparedQuery: any): Promise> {\n return this.requestRemote('query', [preparedQuery]);\n }\n count(preparedQuery: any): Promise {\n return this.requestRemote('count', [preparedQuery]);\n }\n getAttachmentData(documentId: string, attachmentId: string, digest: string): Promise {\n return this.requestRemote('getAttachmentData', [documentId, attachmentId, digest]);\n }\n getChangedDocumentsSince(\n limit: number,\n checkpoint?: any\n ): Promise<\n {\n documents: RxDocumentData[];\n checkpoint: any;\n }> {\n return this.requestRemote('getChangedDocumentsSince', [limit, checkpoint]);\n }\n changeStream(): Observable>, any>> {\n return this.changes$.asObservable();\n }\n cleanup(minDeletedTime: number): Promise {\n return this.requestRemote('cleanup', [minDeletedTime]);\n }\n async close(): Promise {\n if (this.closed) {\n return this.closed;\n }\n this.closed = (async () => {\n this.subs.forEach(sub => sub.unsubscribe());\n this.changes$.complete();\n await this.requestRemote('close', []);\n await closeMessageChannel(this.internals.messageChannel);\n })();\n return this.closed;\n }\n async remove(): Promise {\n if (this.closed) {\n throw new Error('already closed');\n }\n this.closed = (async () => {\n await this.requestRemote('remove', []);\n await closeMessageChannel(this.internals.messageChannel);\n })();\n return this.closed;\n }\n conflictResultionTasks(): Observable> {\n return this.conflicts$;\n }\n async resolveConflictResultionTask(taskSolution: RxConflictResultionTaskSolution): Promise {\n await this.requestRemote('resolveConflictResultionTask', [taskSolution]);\n }\n}\n\nexport function getRxStorageRemote(settings: RxStorageRemoteSettings): RxStorageRemote {\n const withDefaults = Object.assign({\n mode: 'storage'\n }, settings);\n return new RxStorageRemote(withDefaults);\n}\n"],"mappings":"AAAA,SACIA,cAAc,EACdC,MAAM,EAENC,OAAO,QAEJ,MAAM;AAgBb,SACIC,YAAY,EACZC,iBAAiB,QACd,8BAA8B;AAQrC,SAASC,mBAAmB,EAAEC,iBAAiB,QAAQ,4BAA4B;AAInF,WAAaC,eAAe;EAOxB,SAAAA,gBACoBC,QAAiC,EACnD;IAAA,KARcC,IAAI,GAAW,QAAQ;IAAA,KACvBC,WAAW,GAAGP,YAAY;IAAA,KAElCQ,IAAI,GAAWP,iBAAiB,CAAC,EAAE,CAAC;IAAA,KACpCQ,aAAa,GAAW,CAAC;IAAA,KAGbJ,QAAiC,GAAjCA,QAAiC;IAEjD,IAAIA,QAAQ,CAACK,IAAI,KAAK,KAAK,EAAE;MACzB,IAAI,CAACC,uBAAuB,GAAGR,iBAAiB,CAC5CE,QAAQ,EACR,EAAE,EACF,IACJ,CAAC;IACL;EACJ;EAAC,IAAAO,MAAA,GAAAR,eAAA,CAAAS,SAAA;EAAAD,MAAA,CAEME,YAAY,GAAnB,SAAAA,aAAA,EAAsB;IAClB,IAAMC,KAAK,GAAG,IAAI,CAACN,aAAa,EAAE;IAClC,OAAO,IAAI,CAACD,IAAI,GAAG,GAAG,GAAGO,KAAK;EAClC,CAAC;EAAAH,MAAA,CAEKI,qBAAqB,GAA3B,eAAAA,sBACIC,MAAuD,EACZ;IAC3C,IAAMC,YAAY,GAAG,IAAI,GAAG,IAAI,CAACJ,YAAY,CAAC,CAAC;IAE/C,IAAMK,SAAmB,GAAG,CACxB,OAAO,GAAG,IAAI,CAACd,QAAQ,CAACK,IAAI,CAC/B;IACD,QAAQ,IAAI,CAACL,QAAQ,CAACK,IAAI;MACtB,KAAK,YAAY;QACbS,SAAS,CAACC,IAAI,CAAC,aAAa,GAAGH,MAAM,CAACI,cAAc,CAAC;MACzD;MACA,KAAK,UAAU;QACXF,SAAS,CAACC,IAAI,CAAC,WAAW,GAAGH,MAAM,CAACK,YAAY,CAAC;MACrD;MACA,KAAK,SAAS;QACVH,SAAS,CAACC,IAAI,CAAC,OAAO,GAAG,IAAI,CAACZ,IAAI,CAAC;IAC3C;IACA,IAAMe,cAAc,GAAG,OAAO,IAAI,CAACZ,uBAAuB,GACtD,IAAI,CAACA,uBAAuB,GAC5BR,iBAAiB,CACb,IAAI,CAACE,QAAQ,EACbc,SACJ,CAAC,CACJ;IAED,IAAMK,SAAS,GAAG,IAAI,CAACV,YAAY,CAAC,CAAC;IACrC,IAAMW,gBAAgB,GAAG5B,cAAc,CAAC0B,cAAc,CAACG,SAAS,CAACC,IAAI,CACjE7B,MAAM,CAAC8B,GAAG,IAAIA,GAAG,CAACC,QAAQ,KAAKL,SAAS,CAC5C,CAAC,CAAC;IACFD,cAAc,CAACO,IAAI,CAAC;MAChBZ,YAAY;MACZa,MAAM,EAAE,QAAQ;MAChBP,SAAS;MACTP;IACJ,CAAC,CAAC;IAEF,IAAMe,eAAe,GAAG,MAAMP,gBAAgB;IAC9C,IAAIO,eAAe,CAACC,KAAK,EAAE;MACvB,MAAM/B,mBAAmB,CAACqB,cAAc,CAAC;MACzC,MAAM,IAAIW,KAAK,CAAC,4BAA4B,GAAGC,IAAI,CAACC,SAAS,CAACJ,eAAe,CAACC,KAAK,CAAC,CAAC;IACzF;IAEA,OAAO,IAAII,uBAAuB,CAC9B,IAAI,EACJpB,MAAM,CAACK,YAAY,EACnBL,MAAM,CAACI,cAAc,EACrBJ,MAAM,CAACqB,MAAM,EACb;MACIrB,MAAM;MACNC,YAAY;MACZK;IACJ,CAAC,EACDN,MAAM,CAACsB,OACX,CAAC;EACL,CAAC;EAAA3B,MAAA,CAEK4B,aAAa,GAAnB,eAAAA,cAA6BC,IAAQ,EAAgB;IACjD,IAAMlB,cAAc,GAAG,MAAM,IAAI,CAAClB,QAAQ,CAACqC,qBAAqB,CAAC,CAAC;IAClE,IAAMlB,SAAS,GAAG,IAAI,CAACV,YAAY,CAAC,CAAC;IACrC,IAAMI,YAAY,GAAG,iBAAiB,GAAGM,SAAS;IAClD,IAAMmB,oBAAoB,GAAG9C,cAAc,CAAC0B,cAAc,CAACG,SAAS,CAACC,IAAI,CACrE7B,MAAM,CAAC8B,GAAG,IAAIA,GAAG,CAACC,QAAQ,KAAKL,SAAS,CAC5C,CAAC,CAAC;IACFD,cAAc,CAACO,IAAI,CAAC;MAChBZ,YAAY;MACZa,MAAM,EAAE,QAAQ;MAChBP,SAAS;MACTP,MAAM,EAAEwB;IACZ,CAAC,CAAC;IACF,IAAMG,QAAQ,GAAG,MAAMD,oBAAoB;IAC3C,IAAIC,QAAQ,CAACX,KAAK,EAAE;MAChB,MAAMV,cAAc,CAACsB,KAAK,CAAC,CAAC;MAC5B,MAAM,IAAIX,KAAK,CAAC,iCAAiC,GAAGC,IAAI,CAACC,SAAS,CAAC;QAC/DK,IAAI;QACJR,KAAK,EAAEW,QAAQ,CAACX;MACpB,CAAC,CAAC,CAAC;IACP,CAAC,MAAM;MACH,MAAMV,cAAc,CAACsB,KAAK,CAAC,CAAC;MAC5B,OAAOD,QAAQ,CAACE,MAAM;IAC1B;EAEJ,CAAC;EAAA,OAAA1C,eAAA;AAAA;;AAGL;AACA;AACA;AACA;AACA;AACA;AACA;AACA,SAAS2C,gBAAgBA,CACrBnB,GAAsB,EACxB;EACE,IAAIA,GAAG,CAACG,MAAM,KAAK,mBAAmB,EAAE;IACpC,OAAOH,GAAG,CAACkB,MAAM;EACrB,CAAC,MAAM;IACH,IAAI,OAAOlB,GAAG,CAACkB,MAAM,KAAK,QAAQ,EAAE;MAChC,OAAOX,IAAI,CAACa,KAAK,CAACpB,GAAG,CAACkB,MAAM,CAAC;IACjC,CAAC,MAAM;MACH,OAAOlB,GAAG,CAACkB,MAAM;IACrB;EACJ;AACJ;AAEA,WAAaT,uBAAuB;EAQhC,SAAAA,wBACoBY,OAAwB,EACxB3B,YAAoB,EACpBD,cAAsB,EACtBiB,MAAyD,EACzDY,SAAmC,EACnCX,OAAsB,EACxC;IAAA,KAdMY,QAAQ,GAA6E,IAAIpD,OAAO,CAAC,CAAC;IAAA,KAClGqD,UAAU,GAAgD,IAAIrD,OAAO,CAAC,CAAC;IAAA,KACvEsD,IAAI,GAAmB,EAAE;IAAA,KAMbJ,OAAwB,GAAxBA,OAAwB;IAAA,KACxB3B,YAAoB,GAApBA,YAAoB;IAAA,KACpBD,cAAsB,GAAtBA,cAAsB;IAAA,KACtBiB,MAAyD,GAAzDA,MAAyD;IAAA,KACzDY,SAAmC,GAAnCA,SAAmC;IAAA,KACnCX,OAAsB,GAAtBA,OAAsB;IAEtC,IAAI,CAACb,SAAS,GAAG,IAAI,CAACwB,SAAS,CAAC3B,cAAc,CAACG,SAAS,CAACC,IAAI,CACzD7B,MAAM,CAAC8B,GAAG,IAAIA,GAAG,CAACV,YAAY,KAAK,IAAI,CAACgC,SAAS,CAAChC,YAAY,CAClE,CAAC;IACD,IAAI,CAACmC,IAAI,CAACjC,IAAI,CACV,IAAI,CAACM,SAAS,CAAC4B,SAAS,CAAC1B,GAAG,IAAI;MAC5B,IAAIA,GAAG,CAACG,MAAM,KAAK,cAAc,EAAE;QAC/B,IAAI,CAACoB,QAAQ,CAACI,IAAI,CAACR,gBAAgB,CAACnB,GAAG,CAAC,CAAC;MAC7C;MACA,IAAIA,GAAG,CAACG,MAAM,KAAK,wBAAwB,EAAE;QACzC,IAAI,CAACqB,UAAU,CAACG,IAAI,CAAC3B,GAAG,CAACkB,MAAM,CAAC;MACpC;IACJ,CAAC,CACL,CAAC;EACL;EAAC,IAAAU,OAAA,GAAAnB,uBAAA,CAAAxB,SAAA;EAAA2C,OAAA,CAEaC,aAAa,GAA3B,eAAAA,cACIC,UAAkD,EAClDzC,MAAW,EACb;IACE,IAAMO,SAAS,GAAG,IAAI,CAACyB,OAAO,CAACnC,YAAY,CAAC,CAAC;IAC7C,IAAM6C,eAAe,GAAG9D,cAAc,CAClC,IAAI,CAAC6B,SAAS,CAACC,IAAI,CACf7B,MAAM,CAAC8B,GAAG,IAAIA,GAAG,CAACC,QAAQ,KAAKL,SAAS,CAC5C,CACJ,CAAC;IACD,IAAMoC,OAAwB,GAAG;MAC7B1C,YAAY,EAAE,IAAI,CAACgC,SAAS,CAAChC,YAAY;MACzCM,SAAS;MACTO,MAAM,EAAE2B,UAAU;MAClBzC;IACJ,CAAC;IACD,IAAI,CAACiC,SAAS,CAAC3B,cAAc,CAACO,IAAI,CAAC8B,OAAO,CAAC;IAC3C,IAAMhB,QAAQ,GAAG,MAAMe,eAAe;IACtC,IAAIf,QAAQ,CAACX,KAAK,EAAE;MAChB,MAAM,IAAIC,KAAK,CAAC,2BAA2B,GAAGC,IAAI,CAACC,SAAS,CAAC;QACzDsB,UAAU;QACVzC,MAAM;QACNgB,KAAK,EAAEW,QAAQ,CAACX;MACpB,CAAC,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC;IAChB,CAAC,MAAM;MACH,OAAOc,gBAAgB,CAACH,QAAQ,CAAC;IACrC;EACJ,CAAC;EAAAY,OAAA,CACDK,SAAS,GAAT,SAAAA,UACIC,cAAyC,EACzCC,OAAe,EAC+B;IAC9C,OAAO,IAAI,CAACN,aAAa,CAAC,WAAW,EAAE,CAACK,cAAc,EAAEC,OAAO,CAAC,CAAC;EACrE,CAAC;EAAAP,OAAA,CACDQ,iBAAiB,GAAjB,SAAAA,kBAAkBC,GAAa,EAAEC,OAAgB,EAAwC;IACrF,OAAO,IAAI,CAACT,aAAa,CAAC,mBAAmB,EAAE,CAACQ,GAAG,EAAEC,OAAO,CAAC,CAAC;EAClE,CAAC;EAAAV,OAAA,CACDW,KAAK,GAAL,SAAAA,MAAMC,aAAkB,EAA4C;IAChE,OAAO,IAAI,CAACX,aAAa,CAAC,OAAO,EAAE,CAACW,aAAa,CAAC,CAAC;EACvD,CAAC;EAAAZ,OAAA,CACDa,KAAK,GAAL,SAAAA,MAAMD,aAAkB,EAAiC;IACrD,OAAO,IAAI,CAACX,aAAa,CAAC,OAAO,EAAE,CAACW,aAAa,CAAC,CAAC;EACvD,CAAC;EAAAZ,OAAA,CACDc,iBAAiB,GAAjB,SAAAA,kBAAkBC,UAAkB,EAAEC,YAAoB,EAAEC,MAAc,EAAmB;IACzF,OAAO,IAAI,CAAChB,aAAa,CAAC,mBAAmB,EAAE,CAACc,UAAU,EAAEC,YAAY,EAAEC,MAAM,CAAC,CAAC;EACtF,CAAC;EAAAjB,OAAA,CACDkB,wBAAwB,GAAxB,SAAAA,yBACIC,KAAa,EACbC,UAAgB,EAKb;IACH,OAAO,IAAI,CAACnB,aAAa,CAAC,0BAA0B,EAAE,CAACkB,KAAK,EAAEC,UAAU,CAAC,CAAC;EAC9E,CAAC;EAAApB,OAAA,CACDqB,YAAY,GAAZ,SAAAA,aAAA,EAA4F;IACxF,OAAO,IAAI,CAAC1B,QAAQ,CAAC2B,YAAY,CAAC,CAAC;EACvC,CAAC;EAAAtB,OAAA,CACDuB,OAAO,GAAP,SAAAA,QAAQC,cAAsB,EAAoB;IAC9C,OAAO,IAAI,CAACvB,aAAa,CAAC,SAAS,EAAE,CAACuB,cAAc,CAAC,CAAC;EAC1D,CAAC;EAAAxB,OAAA,CACKX,KAAK,GAAX,eAAAA,MAAA,EAA6B;IACzB,IAAI,IAAI,CAACoC,MAAM,EAAE;MACb,OAAO,IAAI,CAACA,MAAM;IACtB;IACA,IAAI,CAACA,MAAM,GAAG,CAAC,YAAY;MACvB,IAAI,CAAC5B,IAAI,CAAC6B,OAAO,CAACC,GAAG,IAAIA,GAAG,CAACC,WAAW,CAAC,CAAC,CAAC;MAC3C,IAAI,CAACjC,QAAQ,CAACkC,QAAQ,CAAC,CAAC;MACxB,MAAM,IAAI,CAAC5B,aAAa,CAAC,OAAO,EAAE,EAAE,CAAC;MACrC,MAAMvD,mBAAmB,CAAC,IAAI,CAACgD,SAAS,CAAC3B,cAAc,CAAC;IAC5D,CAAC,EAAE,CAAC;IACJ,OAAO,IAAI,CAAC0D,MAAM;EACtB,CAAC;EAAAzB,OAAA,CACK8B,MAAM,GAAZ,eAAAA,OAAA,EAA8B;IAC1B,IAAI,IAAI,CAACL,MAAM,EAAE;MACb,MAAM,IAAI/C,KAAK,CAAC,gBAAgB,CAAC;IACrC;IACA,IAAI,CAAC+C,MAAM,GAAG,CAAC,YAAY;MACvB,MAAM,IAAI,CAACxB,aAAa,CAAC,QAAQ,EAAE,EAAE,CAAC;MACtC,MAAMvD,mBAAmB,CAAC,IAAI,CAACgD,SAAS,CAAC3B,cAAc,CAAC;IAC5D,CAAC,EAAE,CAAC;IACJ,OAAO,IAAI,CAAC0D,MAAM;EACtB,CAAC;EAAAzB,OAAA,CACD+B,sBAAsB,GAAtB,SAAAA,uBAAA,EAAyE;IACrE,OAAO,IAAI,CAACnC,UAAU;EAC1B,CAAC;EAAAI,OAAA,CACKgC,4BAA4B,GAAlC,eAAAA,6BAAmCC,YAAwD,EAAiB;IACxG,MAAM,IAAI,CAAChC,aAAa,CAAC,8BAA8B,EAAE,CAACgC,YAAY,CAAC,CAAC;EAC5E,CAAC;EAAA,OAAApD,uBAAA;AAAA;AAGL,OAAO,SAASqD,kBAAkBA,CAACrF,QAAiC,EAAmB;EACnF,IAAMsF,YAAY,GAAGC,MAAM,CAACC,MAAM,CAAC;IAC/BnF,IAAI,EAAE;EACV,CAAC,EAAEL,QAAQ,CAAC;EACZ,OAAO,IAAID,eAAe,CAACuF,YAAY,CAAC;AAC5C","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/storage-remote/storage-remote-helpers.js b/dist/esm/plugins/storage-remote/storage-remote-helpers.js deleted file mode 100644 index 7e901b634e8..00000000000 --- a/dist/esm/plugins/storage-remote/storage-remote-helpers.js +++ /dev/null @@ -1,18 +0,0 @@ -import { errorToPlainJson } from "../../plugins/utils/index.js"; -export function createErrorAnswer(msg, error) { - return { - connectionId: msg.connectionId, - answerTo: msg.requestId, - method: msg.method, - error: errorToPlainJson(error) - }; -} -export function createAnswer(msg, ret) { - return { - connectionId: msg.connectionId, - answerTo: msg.requestId, - method: msg.method, - return: ret - }; -} -//# sourceMappingURL=storage-remote-helpers.js.map \ No newline at end of file diff --git a/dist/esm/plugins/storage-remote/storage-remote-helpers.js.map b/dist/esm/plugins/storage-remote/storage-remote-helpers.js.map deleted file mode 100644 index dcd31790da4..00000000000 --- a/dist/esm/plugins/storage-remote/storage-remote-helpers.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"storage-remote-helpers.js","names":["errorToPlainJson","createErrorAnswer","msg","error","connectionId","answerTo","requestId","method","createAnswer","ret","return"],"sources":["../../../../src/plugins/storage-remote/storage-remote-helpers.ts"],"sourcesContent":["import type {\n RxError,\n RxTypeError\n} from '../../types/index.d.ts';\nimport { errorToPlainJson } from '../../plugins/utils/index.ts';\nimport type {\n MessageFromRemote,\n MessageToRemote\n} from './storage-remote-types.ts';\n\nexport function createErrorAnswer(\n msg: MessageToRemote,\n error: Error | TypeError | RxError | RxTypeError\n): MessageFromRemote {\n return {\n connectionId: msg.connectionId,\n answerTo: msg.requestId,\n method: msg.method,\n error: errorToPlainJson(error)\n };\n}\n\nexport function createAnswer(\n msg: MessageToRemote,\n ret: any\n): MessageFromRemote {\n return {\n connectionId: msg.connectionId,\n answerTo: msg.requestId,\n method: msg.method,\n return: ret\n };\n}\n"],"mappings":"AAIA,SAASA,gBAAgB,QAAQ,8BAA8B;AAM/D,OAAO,SAASC,iBAAiBA,CAC7BC,GAAoB,EACpBC,KAAgD,EAC/B;EACjB,OAAO;IACHC,YAAY,EAAEF,GAAG,CAACE,YAAY;IAC9BC,QAAQ,EAAEH,GAAG,CAACI,SAAS;IACvBC,MAAM,EAAEL,GAAG,CAACK,MAAM;IAClBJ,KAAK,EAAEH,gBAAgB,CAACG,KAAK;EACjC,CAAC;AACL;AAEA,OAAO,SAASK,YAAYA,CACxBN,GAAoB,EACpBO,GAAQ,EACS;EACjB,OAAO;IACHL,YAAY,EAAEF,GAAG,CAACE,YAAY;IAC9BC,QAAQ,EAAEH,GAAG,CAACI,SAAS;IACvBC,MAAM,EAAEL,GAAG,CAACK,MAAM;IAClBG,MAAM,EAAED;EACZ,CAAC;AACL","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/storage-remote/storage-remote-types.js b/dist/esm/plugins/storage-remote/storage-remote-types.js deleted file mode 100644 index d254b57488d..00000000000 --- a/dist/esm/plugins/storage-remote/storage-remote-types.js +++ /dev/null @@ -1,3 +0,0 @@ -export {}; -export {}; -//# sourceMappingURL=storage-remote-types.js.map \ No newline at end of file diff --git a/dist/esm/plugins/storage-remote/storage-remote-types.js.map b/dist/esm/plugins/storage-remote/storage-remote-types.js.map deleted file mode 100644 index c14574b1815..00000000000 --- a/dist/esm/plugins/storage-remote/storage-remote-types.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"storage-remote-types.js","names":[],"sources":["../../../../src/plugins/storage-remote/storage-remote-types.ts"],"sourcesContent":["import type { Observable } from 'rxjs';\nimport type {\n MaybePromise,\n PlainJsonError,\n RxDatabase,\n RxStorage,\n RxStorageInstance,\n RxStorageInstanceCreationParams\n} from '../../types/index.d.ts';\n\n\n\nexport type MessageFromRemote = {\n connectionId: string;\n answerTo: string; // id of the request\n method: keyof RxStorageInstance | 'create' | 'custom';\n error?: PlainJsonError;\n return?: any | string;\n};\n\nexport type MessageToRemote = {\n connectionId: string;\n /**\n * Unique ID of the request\n */\n requestId: string;\n method: keyof RxStorageInstance | 'create' | 'custom';\n params:\n RxStorageInstanceCreationParams | // used in the create call\n any[] | // used to call RxStorageInstance methods\n any; // used in custom requests\n};\n\n\n/**\n * A message channel represents a single\n * channel that is able to communicate with the remote.\n * For example a single websocket connection or WebWorker instance.\n * The storage must be able to open and close MessageChannels\n * according to the modes settings.\n */\nexport type RemoteMessageChannel = {\n send(msg: MessageToRemote): void;\n messages$: Observable;\n close(): Promise;\n};\n\nexport type RxStorageRemoteSettings = {\n identifier: string;\n /**\n * There are different modes\n * that determine how many message channels are used.\n * These modes can have different performance patterns.\n *\n * [default='storage']\n */\n mode?:\n // create exactly one RemoteMessageChannel and reuse that everywhere.\n | 'one'\n // storage: create one RemoteMessageChannel per call to getRxStorage...()\n | 'storage'\n // database: create one RemoteMessageChannel for each database\n | 'database'\n // collection: create one RemoteMessageChannel for each collection\n | 'collection';\n messageChannelCreator: () => Promise;\n};\n\nexport type RxStorageRemoteInternals = {\n params: RxStorageInstanceCreationParams;\n connectionId: string;\n messageChannel: RemoteMessageChannel;\n};\n\nexport type RxStorageRemoteExposeSettingsBase = {\n send(msg: MessageFromRemote): void;\n messages$: Observable;\n customRequestHandler?: CustomRequestHandler;\n};\n\nexport type RxStorageRemoteExposeSettingsRxDatabase = RxStorageRemoteExposeSettingsBase & {\n /**\n * The database which must be mapped to the remote storage server.\n */\n database: RxDatabase;\n};\n\nexport type RxStorageRemoteExposeSettingsRxStorage = RxStorageRemoteExposeSettingsBase & {\n /**\n * The original storage\n * which actually stores the data.\n */\n storage: RxStorage;\n};\n\nexport type RxStorageRemoteExposeSettings = RxStorageRemoteExposeSettingsRxDatabase | RxStorageRemoteExposeSettingsRxStorage;\n\nexport type RxStorageRemoteExposeType = {\n instanceByFullName: Map;\n};\n\n/**\n * If set, the clients can send RxDB-unrelated custom messages\n * to the remote storage and it will answer them.\n */\nexport type CustomRequestHandler = (data: In) => MaybePromise;\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/test-utils/config.js b/dist/esm/plugins/test-utils/config.js deleted file mode 100644 index c2600626153..00000000000 --- a/dist/esm/plugins/test-utils/config.js +++ /dev/null @@ -1,106 +0,0 @@ -/// -import { ensureNotFalsy, isPromise, randomCouchString } from "../utils/index.js"; -import { enforceOptions as broadcastChannelEnforceOptions } from 'broadcast-channel'; -import events from 'node:events'; -import { wrappedKeyEncryptionCryptoJsStorage } from "../encryption-crypto-js/index.js"; -export var isDeno = typeof window !== 'undefined' && 'Deno' in window; -export var isBun = typeof process !== 'undefined' && !!process.versions.bun; -export var isNode = !isDeno && !isBun && typeof window === 'undefined'; -var config; -export function setConfig(newConfig) { - config = newConfig; -} -var initDone = false; -export function getConfig() { - if (!initDone) { - initTestEnvironment(); - initDone = true; - } - return ensureNotFalsy(config, 'testConfig not set'); -} -function getEnvVariables() { - if (isDeno) { - var ret = {}; - ['DEFAULT_STORAGE', 'NODE_ENV'].forEach(k => { - ret[k] = Deno.env.get(k); - }); - return ret; - } - return isBun || isNode ? process.env : window.__karma__.config.env; -} -export var ENV_VARIABLES = getEnvVariables(); -export var DEFAULT_STORAGE = ENV_VARIABLES.DEFAULT_STORAGE; -export function isFastMode() { - try { - return ENV_VARIABLES.NODE_ENV === 'fast'; - } catch (err) { - return false; - } -} -export function initTestEnvironment() { - if (ENV_VARIABLES.NODE_ENV === 'fast') { - broadcastChannelEnforceOptions({ - type: 'simulate' - }); - } - - /** - * Overwrite the console for easier debugging - */ - var oldConsoleLog = console.log.bind(console); - var oldConsoleDir = console.dir.bind(console); - function newLog(value) { - if (isPromise(value)) { - oldConsoleDir(value); - throw new Error('cannot log Promise(), you should await it first'); - } - if (typeof value === 'string' || typeof value === 'number') { - oldConsoleLog(value); - return; - } - try { - JSON.stringify(value); - oldConsoleLog(JSON.stringify(value, null, 4)); - } catch (err) { - oldConsoleDir(value); - } - } - console.log = newLog.bind(console); - console.dir = newLog.bind(console); - console.log('DEFAULT_STORAGE: ' + DEFAULT_STORAGE); - if (isNode) { - process.setMaxListeners(100); - events.EventEmitter.defaultMaxListeners = 100; - - /** - * Add a global function to process, so we can debug timings - */ - process.startTime = performance.now(); - process.logTime = (msg = '') => { - var diff = performance.now() - process.startTime; - console.log('process logTime(' + msg + ') ' + diff + 'ms'); - }; - } -} -export function getEncryptedStorage(baseStorage = getConfig().storage.getStorage()) { - var ret = config.storage.hasEncryption ? baseStorage : wrappedKeyEncryptionCryptoJsStorage({ - storage: baseStorage - }); - return ret; -} -export function isNotOneOfTheseStorages(storageNames) { - var isName = getConfig().storage.name; - if (storageNames.includes(isName)) { - return false; - } else { - return true; - } -} -export function getPassword() { - if (getConfig().storage.hasEncryption) { - return ensureNotFalsy(getConfig().storage.hasEncryption)(); - } else { - return Promise.resolve('test-password-' + randomCouchString(10)); - } -} -//# sourceMappingURL=config.js.map \ No newline at end of file diff --git a/dist/esm/plugins/test-utils/config.js.map b/dist/esm/plugins/test-utils/config.js.map deleted file mode 100644 index 02de81ed57e..00000000000 --- a/dist/esm/plugins/test-utils/config.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"config.js","names":["ensureNotFalsy","isPromise","randomCouchString","enforceOptions","broadcastChannelEnforceOptions","events","wrappedKeyEncryptionCryptoJsStorage","isDeno","window","isBun","process","versions","bun","isNode","config","setConfig","newConfig","initDone","getConfig","initTestEnvironment","getEnvVariables","ret","forEach","k","Deno","env","get","__karma__","ENV_VARIABLES","DEFAULT_STORAGE","isFastMode","NODE_ENV","err","type","oldConsoleLog","console","log","bind","oldConsoleDir","dir","newLog","value","Error","JSON","stringify","setMaxListeners","EventEmitter","defaultMaxListeners","startTime","performance","now","logTime","msg","diff","getEncryptedStorage","baseStorage","storage","getStorage","hasEncryption","isNotOneOfTheseStorages","storageNames","isName","name","includes","getPassword","Promise","resolve"],"sources":["../../../../src/plugins/test-utils/config.ts"],"sourcesContent":["/// \nimport {\n ensureNotFalsy,\n isPromise,\n randomCouchString\n} from '../utils/index.ts';\nimport {\n enforceOptions as broadcastChannelEnforceOptions\n} from 'broadcast-channel';\nimport events from 'node:events';\nimport * as path from 'node:path';\nimport url from 'node:url';\nimport type { RxStorage, RxTestStorage } from '../../types';\nimport { wrappedKeyEncryptionCryptoJsStorage } from '../encryption-crypto-js/index.ts';\n\nexport type TestConfig = {\n storage: RxTestStorage;\n};\n\nexport const isDeno = typeof window !== 'undefined' && 'Deno' in window;\nexport const isBun = typeof process !== 'undefined' && !!process.versions.bun;\nexport const isNode = !isDeno && !isBun && typeof window === 'undefined';\n\nlet config: TestConfig;\n\nexport function setConfig(newConfig: TestConfig) {\n config = newConfig;\n}\n\nlet initDone = false;\nexport function getConfig() {\n if (!initDone) {\n initTestEnvironment();\n initDone = true;\n }\n return ensureNotFalsy(config, 'testConfig not set')\n}\n\n\ndeclare const Deno: any;\nfunction getEnvVariables() {\n if (isDeno) {\n const ret: any = {};\n [\n 'DEFAULT_STORAGE',\n 'NODE_ENV'\n ].forEach(k => {\n ret[k] = Deno.env.get(k);\n });\n return ret;\n }\n\n return isBun || isNode ? process.env : (window as any).__karma__.config.env;\n}\nexport const ENV_VARIABLES = getEnvVariables();\nexport const DEFAULT_STORAGE = ENV_VARIABLES.DEFAULT_STORAGE as string;\n\nexport function isFastMode(): boolean {\n try {\n return ENV_VARIABLES.NODE_ENV === 'fast';\n } catch (err) {\n return false;\n }\n}\n\nexport function initTestEnvironment() {\n if (ENV_VARIABLES.NODE_ENV === 'fast') {\n broadcastChannelEnforceOptions({\n type: 'simulate'\n });\n }\n\n /**\n * Overwrite the console for easier debugging\n */\n const oldConsoleLog = console.log.bind(console);\n const oldConsoleDir = console.dir.bind(console);\n function newLog(this: typeof console, value: any) {\n if (isPromise(value)) {\n oldConsoleDir(value);\n throw new Error('cannot log Promise(), you should await it first');\n }\n if (typeof value === 'string' || typeof value === 'number') {\n oldConsoleLog(value);\n return;\n }\n try {\n JSON.stringify(value);\n oldConsoleLog(JSON.stringify(value, null, 4));\n } catch (err) {\n oldConsoleDir(value);\n }\n }\n console.log = newLog.bind(console);\n console.dir = newLog.bind(console);\n\n console.log('DEFAULT_STORAGE: ' + DEFAULT_STORAGE);\n\n if (isNode) {\n process.setMaxListeners(100);\n\n events.EventEmitter.defaultMaxListeners = 100;\n\n /**\n * Add a global function to process, so we can debug timings\n */\n (process as any).startTime = performance.now();\n (process as any).logTime = (msg: string = '') => {\n const diff = performance.now() - (process as any).startTime;\n console.log('process logTime(' + msg + ') ' + diff + 'ms');\n };\n }\n}\n\nexport function getEncryptedStorage(baseStorage = getConfig().storage.getStorage()): RxStorage {\n const ret = config.storage.hasEncryption ?\n baseStorage :\n wrappedKeyEncryptionCryptoJsStorage({\n storage: baseStorage\n });\n return ret;\n}\n\nexport function isNotOneOfTheseStorages(storageNames: string[]) {\n const isName = getConfig().storage.name;\n if (storageNames.includes(isName)) {\n return false;\n } else {\n return true;\n }\n}\n\n\nexport function getPassword(): Promise {\n if (getConfig().storage.hasEncryption) {\n return ensureNotFalsy(getConfig().storage.hasEncryption)();\n } else {\n return Promise.resolve('test-password-' + randomCouchString(10));\n }\n}\n"],"mappings":"AAAA;AACA,SACIA,cAAc,EACdC,SAAS,EACTC,iBAAiB,QACd,mBAAmB;AAC1B,SACIC,cAAc,IAAIC,8BAA8B,QAC7C,mBAAmB;AAC1B,OAAOC,MAAM,MAAM,aAAa;AAIhC,SAASC,mCAAmC,QAAQ,kCAAkC;AAMtF,OAAO,IAAMC,MAAM,GAAG,OAAOC,MAAM,KAAK,WAAW,IAAI,MAAM,IAAIA,MAAM;AACvE,OAAO,IAAMC,KAAK,GAAG,OAAOC,OAAO,KAAK,WAAW,IAAI,CAAC,CAACA,OAAO,CAACC,QAAQ,CAACC,GAAG;AAC7E,OAAO,IAAMC,MAAM,GAAG,CAACN,MAAM,IAAI,CAACE,KAAK,IAAI,OAAOD,MAAM,KAAK,WAAW;AAExE,IAAIM,MAAkB;AAEtB,OAAO,SAASC,SAASA,CAACC,SAAqB,EAAE;EAC7CF,MAAM,GAAGE,SAAS;AACtB;AAEA,IAAIC,QAAQ,GAAG,KAAK;AACpB,OAAO,SAASC,SAASA,CAAA,EAAG;EACxB,IAAI,CAACD,QAAQ,EAAE;IACXE,mBAAmB,CAAC,CAAC;IACrBF,QAAQ,GAAG,IAAI;EACnB;EACA,OAAOjB,cAAc,CAACc,MAAM,EAAE,oBAAoB,CAAC;AACvD;AAIA,SAASM,eAAeA,CAAA,EAAG;EACvB,IAAIb,MAAM,EAAE;IACR,IAAMc,GAAQ,GAAG,CAAC,CAAC;IACnB,CACI,iBAAiB,EACjB,UAAU,CACb,CAACC,OAAO,CAACC,CAAC,IAAI;MACXF,GAAG,CAACE,CAAC,CAAC,GAAGC,IAAI,CAACC,GAAG,CAACC,GAAG,CAACH,CAAC,CAAC;IAC5B,CAAC,CAAC;IACF,OAAOF,GAAG;EACd;EAEA,OAAOZ,KAAK,IAAII,MAAM,GAAGH,OAAO,CAACe,GAAG,GAAIjB,MAAM,CAASmB,SAAS,CAACb,MAAM,CAACW,GAAG;AAC/E;AACA,OAAO,IAAMG,aAAa,GAAGR,eAAe,CAAC,CAAC;AAC9C,OAAO,IAAMS,eAAe,GAAGD,aAAa,CAACC,eAAyB;AAEtE,OAAO,SAASC,UAAUA,CAAA,EAAY;EAClC,IAAI;IACA,OAAOF,aAAa,CAACG,QAAQ,KAAK,MAAM;EAC5C,CAAC,CAAC,OAAOC,GAAG,EAAE;IACV,OAAO,KAAK;EAChB;AACJ;AAEA,OAAO,SAASb,mBAAmBA,CAAA,EAAG;EAClC,IAAIS,aAAa,CAACG,QAAQ,KAAK,MAAM,EAAE;IACnC3B,8BAA8B,CAAC;MAC3B6B,IAAI,EAAE;IACV,CAAC,CAAC;EACN;;EAEA;AACJ;AACA;EACI,IAAMC,aAAa,GAAGC,OAAO,CAACC,GAAG,CAACC,IAAI,CAACF,OAAO,CAAC;EAC/C,IAAMG,aAAa,GAAGH,OAAO,CAACI,GAAG,CAACF,IAAI,CAACF,OAAO,CAAC;EAC/C,SAASK,MAAMA,CAAuBC,KAAU,EAAE;IAC9C,IAAIxC,SAAS,CAACwC,KAAK,CAAC,EAAE;MAClBH,aAAa,CAACG,KAAK,CAAC;MACpB,MAAM,IAAIC,KAAK,CAAC,iDAAiD,CAAC;IACtE;IACA,IAAI,OAAOD,KAAK,KAAK,QAAQ,IAAI,OAAOA,KAAK,KAAK,QAAQ,EAAE;MACxDP,aAAa,CAACO,KAAK,CAAC;MACpB;IACJ;IACA,IAAI;MACAE,IAAI,CAACC,SAAS,CAACH,KAAK,CAAC;MACrBP,aAAa,CAACS,IAAI,CAACC,SAAS,CAACH,KAAK,EAAE,IAAI,EAAE,CAAC,CAAC,CAAC;IACjD,CAAC,CAAC,OAAOT,GAAG,EAAE;MACVM,aAAa,CAACG,KAAK,CAAC;IACxB;EACJ;EACAN,OAAO,CAACC,GAAG,GAAGI,MAAM,CAACH,IAAI,CAACF,OAAO,CAAC;EAClCA,OAAO,CAACI,GAAG,GAAGC,MAAM,CAACH,IAAI,CAACF,OAAO,CAAC;EAElCA,OAAO,CAACC,GAAG,CAAC,mBAAmB,GAAGP,eAAe,CAAC;EAElD,IAAIhB,MAAM,EAAE;IACRH,OAAO,CAACmC,eAAe,CAAC,GAAG,CAAC;IAE5BxC,MAAM,CAACyC,YAAY,CAACC,mBAAmB,GAAG,GAAG;;IAE7C;AACR;AACA;IACSrC,OAAO,CAASsC,SAAS,GAAGC,WAAW,CAACC,GAAG,CAAC,CAAC;IAC7CxC,OAAO,CAASyC,OAAO,GAAG,CAACC,GAAW,GAAG,EAAE,KAAK;MAC7C,IAAMC,IAAI,GAAGJ,WAAW,CAACC,GAAG,CAAC,CAAC,GAAIxC,OAAO,CAASsC,SAAS;MAC3Db,OAAO,CAACC,GAAG,CAAC,kBAAkB,GAAGgB,GAAG,GAAG,IAAI,GAAGC,IAAI,GAAG,IAAI,CAAC;IAC9D,CAAC;EACL;AACJ;AAEA,OAAO,SAASC,mBAAmBA,CAACC,WAAW,GAAGrC,SAAS,CAAC,CAAC,CAACsC,OAAO,CAACC,UAAU,CAAC,CAAC,EAAuB;EACrG,IAAMpC,GAAG,GAAGP,MAAM,CAAC0C,OAAO,CAACE,aAAa,GACpCH,WAAW,GACXjD,mCAAmC,CAAC;IAChCkD,OAAO,EAAED;EACb,CAAC,CAAC;EACN,OAAOlC,GAAG;AACd;AAEA,OAAO,SAASsC,uBAAuBA,CAACC,YAAsB,EAAE;EAC5D,IAAMC,MAAM,GAAG3C,SAAS,CAAC,CAAC,CAACsC,OAAO,CAACM,IAAI;EACvC,IAAIF,YAAY,CAACG,QAAQ,CAACF,MAAM,CAAC,EAAE;IAC/B,OAAO,KAAK;EAChB,CAAC,MAAM;IACH,OAAO,IAAI;EACf;AACJ;AAGA,OAAO,SAASG,WAAWA,CAAA,EAAoB;EAC3C,IAAI9C,SAAS,CAAC,CAAC,CAACsC,OAAO,CAACE,aAAa,EAAE;IACnC,OAAO1D,cAAc,CAACkB,SAAS,CAAC,CAAC,CAACsC,OAAO,CAACE,aAAa,CAAC,CAAC,CAAC;EAC9D,CAAC,MAAM;IACH,OAAOO,OAAO,CAACC,OAAO,CAAC,gBAAgB,GAAGhE,iBAAiB,CAAC,EAAE,CAAC,CAAC;EACpE;AACJ","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/test-utils/humans-collection.js b/dist/esm/plugins/test-utils/humans-collection.js deleted file mode 100644 index fb4756ef32e..00000000000 --- a/dist/esm/plugins/test-utils/humans-collection.js +++ /dev/null @@ -1,370 +0,0 @@ -import clone from 'clone'; -import * as schemas from "./schemas.js"; -import * as schemaObjects from "./schema-objects.js"; -import { getConfig } from "./config.js"; -import assert from 'assert'; -import { createRxDatabase, randomCouchString } from "../../index.js"; -export async function create(size = 20, collectionName = 'human', multiInstance = true, eventReduce = true, storage = getConfig().storage.getStorage()) { - var db = await createRxDatabase({ - name: randomCouchString(10), - storage, - multiInstance, - eventReduce, - ignoreDuplicate: true, - localDocuments: true - }); - var collections = await db.addCollections({ - [collectionName]: { - schema: schemas.human, - localDocuments: true - } - }); - - // insert data - if (size > 0) { - var docsData = new Array(size).fill(0).map(() => schemaObjects.humanData()); - var writeResult = await collections[collectionName].bulkInsert(docsData); - assert.deepStrictEqual(writeResult.error, []); - } - return collections[collectionName]; -} -export async function createBySchema(schema, name = 'human', storage = getConfig().storage.getStorage(), migrationStrategies) { - var db = await createRxDatabase({ - name: randomCouchString(10), - storage, - multiInstance: true, - eventReduce: true, - ignoreDuplicate: true - }); - var collections = await db.addCollections({ - [name]: { - schema, - migrationStrategies - } - }); - return collections[name]; -} -export async function createAttachments(size = 20, name = 'human', multiInstance = true) { - if (!name) { - name = 'human'; - } - var db = await createRxDatabase({ - name: randomCouchString(10), - storage: getConfig().storage.getStorage(), - multiInstance, - eventReduce: true, - ignoreDuplicate: true - }); - var schemaJson = clone(schemas.human); - schemaJson.attachments = {}; - var collections = await db.addCollections({ - [name]: { - schema: schemaJson - } - }); - - // insert data - if (size > 0) { - var docsData = new Array(size).fill(0).map(() => schemaObjects.humanData()); - await collections[name].bulkInsert(docsData); - } - return collections[name]; -} -export async function createNoCompression(size = 20, name = 'human') { - var db = await createRxDatabase({ - name: randomCouchString(10), - storage: getConfig().storage.getStorage(), - eventReduce: true, - ignoreDuplicate: true - }); - var schemaJSON = clone(schemas.human); - schemaJSON.keyCompression = false; - // setTimeout(() => db.destroy(), dbLifetime); - var collections = await db.addCollections({ - [name]: { - schema: schemaJSON - } - }); - - // insert data - if (size > 0) { - var docsData = new Array(size).fill(0).map(() => schemaObjects.humanData()); - await collections[name].bulkInsert(docsData); - } - return collections[name]; -} -export async function createAgeIndex(amount = 20) { - var db = await createRxDatabase({ - name: randomCouchString(10), - storage: getConfig().storage.getStorage(), - eventReduce: true, - ignoreDuplicate: true - }); - // setTimeout(() => db.destroy(), dbLifetime); - var collections = await db.addCollections({ - humana: { - schema: schemas.humanAgeIndex - } - }); - - // insert data - if (amount > 0) { - var docsData = new Array(amount).fill(0).map(() => schemaObjects.humanData()); - await collections.humana.bulkInsert(docsData); - } - return collections.humana; -} -export async function multipleOnSameDB(size = 10) { - var db = await createRxDatabase({ - name: randomCouchString(10), - storage: getConfig().storage.getStorage(), - eventReduce: true, - ignoreDuplicate: true - }); - // setTimeout(() => db.destroy(), dbLifetime); - var collections = await db.addCollections({ - human: { - schema: schemas.human - }, - human2: { - schema: schemas.human - } - }); - - // insert data - if (size > 0) { - var docsData = new Array(size).fill(0).map(() => schemaObjects.humanData()); - await collections.human.bulkInsert(docsData); - var docsData2 = new Array(size).fill(0).map(() => schemaObjects.humanData()); - await collections.human2.bulkInsert(docsData2); - } - return { - db, - collection: collections.human, - collection2: collections.human2 - }; -} -export async function createNested(amount = 5) { - var db = await createRxDatabase({ - name: randomCouchString(10), - storage: getConfig().storage.getStorage(), - eventReduce: true, - ignoreDuplicate: true - }); - // setTimeout(() => db.destroy(), dbLifetime); - var collections = await db.addCollections({ - nestedhuman: { - schema: schemas.nestedHuman - } - }); - - // insert data - if (amount > 0) { - var docsData = new Array(amount).fill(0).map(() => schemaObjects.nestedHumanData()); - await collections.nestedhuman.bulkInsert(docsData); - } - return collections.nestedhuman; -} -export async function createDeepNested(amount = 5) { - var db = await createRxDatabase({ - name: randomCouchString(10), - storage: getConfig().storage.getStorage(), - eventReduce: true - }); - // setTimeout(() => db.destroy(), dbLifetime); - var collections = await db.addCollections({ - nestedhuman: { - schema: schemas.deepNestedHuman - } - }); - - // insert data - if (amount > 0) { - var docsData = new Array(amount).fill(0).map(() => schemaObjects.deepNestedHumanData()); - await collections.nestedhuman.bulkInsert(docsData); - } - return collections.nestedhuman; -} -export async function createMultiInstance(name, amount = 0, password = undefined, storage = getConfig().storage.getStorage()) { - if (!getConfig().storage.hasMultiInstance) { - throw new Error('createMultiInstance() cannot be called on a storage with hasMultiInstance:false'); - } - var db = await createRxDatabase({ - name, - storage, - password, - multiInstance: true, - eventReduce: true, - ignoreDuplicate: true, - localDocuments: true - }); - // setTimeout(() => db.destroy(), dbLifetime); - var collections = await db.addCollections({ - human: { - schema: schemas.human, - localDocuments: true - } - }); - // insert data - if (amount > 0) { - var docsData = new Array(amount).fill(0).map(() => schemaObjects.humanData()); - await collections.human.bulkInsert(docsData); - } - return collections.human; -} -export async function createPrimary(amount = 10, name = randomCouchString(10)) { - var db = await createRxDatabase({ - name, - storage: getConfig().storage.getStorage(), - multiInstance: true, - eventReduce: true, - ignoreDuplicate: true - }); - // setTimeout(() => db.destroy(), dbLifetime); - var collections = await db.addCollections({ - human: { - schema: schemas.primaryHuman - } - }); - - // insert data - if (amount > 0) { - var docsData = new Array(amount).fill(0).map(() => schemaObjects.simpleHumanData()); - await collections.human.bulkInsert(docsData); - } - return collections.human; -} -export async function createHumanWithTimestamp(amount = 0, databaseName = randomCouchString(10), multiInstance = true, storage = getConfig().storage.getStorage()) { - var db = await createRxDatabase({ - name: databaseName, - storage, - multiInstance, - eventReduce: true, - ignoreDuplicate: true - }); - // setTimeout(() => db.destroy(), dbLifetime); - var collections = await db.addCollections({ - humans: { - schema: schemas.humanWithTimestamp - } - }); - - // insert data - if (amount > 0) { - var docsData = new Array(amount).fill(0).map(() => schemaObjects.humanWithTimestampData()); - await collections.humans.bulkInsert(docsData); - } - return collections.humans; -} -export async function createMigrationCollection(amount = 0, addMigrationStrategies = {}, name = randomCouchString(10), autoMigrate = false, attachment) { - var migrationStrategies = Object.assign({ - 1: doc => doc, - 2: doc => doc, - 3: doc => doc - }, addMigrationStrategies); - var colName = 'human'; - var db = await createRxDatabase({ - name, - storage: getConfig().storage.getStorage(), - eventReduce: true, - ignoreDuplicate: true - }); - var cols = await db.addCollections({ - [colName]: { - schema: attachment !== undefined ? { - ...schemas.simpleHuman, - attachments: {} - } : schemas.simpleHuman, - autoMigrate: false - } - }); - await Promise.all(new Array(amount).fill(0).map(() => cols[colName].insert(schemaObjects.simpleHumanAge()).then(doc => { - if (attachment !== undefined) { - return doc.putAttachment(attachment); - } - }))); - await db.destroy(); - var db2 = await createRxDatabase({ - name, - storage: getConfig().storage.getStorage(), - eventReduce: true, - ignoreDuplicate: true - }); - var cols2 = await db2.addCollections({ - [colName]: { - schema: attachment !== undefined ? { - ...schemas.simpleHumanV3, - attachments: {} - } : schemas.simpleHumanV3, - autoMigrate, - migrationStrategies - } - }); - return cols2[colName]; -} -export async function createRelated(name = randomCouchString(10)) { - var db = await createRxDatabase({ - name, - storage: getConfig().storage.getStorage(), - multiInstance: true, - eventReduce: true, - ignoreDuplicate: true - }); - // setTimeout(() => db.destroy(), dbLifetime); - var collections = await db.addCollections({ - human: { - schema: schemas.refHuman - } - }); - var doc1 = schemaObjects.refHumanData(); - var doc2 = schemaObjects.refHumanData(doc1.name); - doc1.bestFriend = doc2.name; // cross-relation - - await collections.human.insert(doc1); - await collections.human.insert(doc2); - return collections.human; -} -export async function createRelatedNested(name = randomCouchString(10)) { - var db = await createRxDatabase({ - name, - storage: getConfig().storage.getStorage(), - multiInstance: true, - eventReduce: true, - ignoreDuplicate: true - }); - // setTimeout(() => db.destroy(), dbLifetime); - var collections = await db.addCollections({ - human: { - schema: schemas.refHumanNested - } - }); - var doc1 = schemaObjects.refHumanNestedData(); - var doc2 = schemaObjects.refHumanNestedData(doc1.name); - doc1.foo.bestFriend = doc2.name; // cross-relation - - await collections.human.insert(doc1); - await collections.human.insert(doc2); - return collections.human; -} -export async function createIdAndAgeIndex(amount = 20) { - var db = await createRxDatabase({ - name: randomCouchString(10), - storage: getConfig().storage.getStorage(), - eventReduce: true, - ignoreDuplicate: true - }); - // setTimeout(() => db.destroy(), dbLifetime); - var collections = await db.addCollections({ - humana: { - schema: schemas.humanIdAndAgeIndex - } - }); - - // insert data - if (amount > 0) { - var docsData = new Array(amount).fill(0).map(() => schemaObjects.humanWithIdAndAgeIndexDocumentType()); - await collections.humana.bulkInsert(docsData); - } - return collections.humana; -} -//# sourceMappingURL=humans-collection.js.map \ No newline at end of file diff --git a/dist/esm/plugins/test-utils/humans-collection.js.map b/dist/esm/plugins/test-utils/humans-collection.js.map deleted file mode 100644 index a1a5015653a..00000000000 --- a/dist/esm/plugins/test-utils/humans-collection.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"humans-collection.js","names":["clone","schemas","schemaObjects","getConfig","assert","createRxDatabase","randomCouchString","create","size","collectionName","multiInstance","eventReduce","storage","getStorage","db","name","ignoreDuplicate","localDocuments","collections","addCollections","schema","human","docsData","Array","fill","map","humanData","writeResult","bulkInsert","deepStrictEqual","error","createBySchema","migrationStrategies","createAttachments","schemaJson","attachments","createNoCompression","schemaJSON","keyCompression","createAgeIndex","amount","humana","humanAgeIndex","multipleOnSameDB","human2","docsData2","collection","collection2","createNested","nestedhuman","nestedHuman","nestedHumanData","createDeepNested","deepNestedHuman","deepNestedHumanData","createMultiInstance","password","undefined","hasMultiInstance","Error","createPrimary","primaryHuman","simpleHumanData","createHumanWithTimestamp","databaseName","humans","humanWithTimestamp","humanWithTimestampData","createMigrationCollection","addMigrationStrategies","autoMigrate","attachment","Object","assign","doc","colName","cols","simpleHuman","Promise","all","insert","simpleHumanAge","then","putAttachment","destroy","db2","cols2","simpleHumanV3","createRelated","refHuman","doc1","refHumanData","doc2","bestFriend","createRelatedNested","refHumanNested","refHumanNestedData","foo","createIdAndAgeIndex","humanIdAndAgeIndex","humanWithIdAndAgeIndexDocumentType"],"sources":["../../../../src/plugins/test-utils/humans-collection.ts"],"sourcesContent":["import clone from 'clone';\nimport * as schemas from './schemas.ts';\nimport * as schemaObjects from './schema-objects.ts';\nimport { getConfig } from './config.ts';\nimport assert from 'assert';\n\nimport {\n createRxDatabase,\n RxJsonSchema,\n RxCollection,\n RxDatabase,\n randomCouchString,\n MigrationStrategies,\n RxAttachmentCreator,\n RxStorage\n} from '../../index.ts';\n\nimport { HumanDocumentType } from './schemas.ts';\n\nexport async function create(\n size: number = 20,\n collectionName: string = 'human',\n multiInstance: boolean = true,\n eventReduce: boolean = true,\n storage: RxStorage = getConfig().storage.getStorage()\n\n): Promise> {\n const db = await createRxDatabase<{ human: RxCollection; }>({\n name: randomCouchString(10),\n storage,\n multiInstance,\n eventReduce,\n ignoreDuplicate: true,\n localDocuments: true\n });\n\n const collections = await db.addCollections({\n [collectionName]: {\n schema: schemas.human,\n localDocuments: true\n }\n });\n\n // insert data\n if (size > 0) {\n const docsData = new Array(size)\n .fill(0)\n .map(() => schemaObjects.humanData());\n const writeResult = await collections[collectionName].bulkInsert(docsData);\n assert.deepStrictEqual(writeResult.error, []);\n }\n return collections[collectionName];\n}\n\nexport async function createBySchema(\n schema: RxJsonSchema,\n name = 'human',\n storage = getConfig().storage.getStorage(),\n migrationStrategies?: MigrationStrategies\n): Promise> {\n const db = await createRxDatabase<{ [prop: string]: RxCollection; }>({\n name: randomCouchString(10),\n storage,\n multiInstance: true,\n eventReduce: true,\n ignoreDuplicate: true\n });\n\n const collections = await db.addCollections({\n [name]: {\n schema,\n migrationStrategies\n }\n });\n\n return collections[name];\n}\n\nexport async function createAttachments(\n size = 20,\n name = 'human',\n multiInstance = true\n): Promise> {\n if (!name) {\n name = 'human';\n }\n const db = await createRxDatabase<{ [prop: string]: RxCollection; }>({\n name: randomCouchString(10),\n storage: getConfig().storage.getStorage(),\n multiInstance,\n eventReduce: true,\n ignoreDuplicate: true\n });\n\n const schemaJson = clone(schemas.human);\n schemaJson.attachments = {};\n\n const collections = await db.addCollections({\n [name]: {\n schema: schemaJson\n }\n });\n\n // insert data\n if (size > 0) {\n const docsData = new Array(size)\n .fill(0)\n .map(() => schemaObjects.humanData());\n await collections[name].bulkInsert(docsData);\n }\n\n return collections[name];\n}\n\nexport async function createNoCompression(\n size = 20,\n name = 'human'\n): Promise> {\n const db = await createRxDatabase<{ [prop: string]: RxCollection; }>({\n name: randomCouchString(10),\n storage: getConfig().storage.getStorage(),\n eventReduce: true,\n ignoreDuplicate: true\n });\n const schemaJSON = clone(schemas.human);\n schemaJSON.keyCompression = false;\n // setTimeout(() => db.destroy(), dbLifetime);\n const collections = await db.addCollections({\n [name]: {\n schema: schemaJSON\n }\n });\n\n // insert data\n if (size > 0) {\n const docsData = new Array(size)\n .fill(0)\n .map(() => schemaObjects.humanData());\n await collections[name].bulkInsert(docsData);\n }\n\n return collections[name];\n}\n\nexport async function createAgeIndex(\n amount = 20\n): Promise> {\n const db = await createRxDatabase<{ humana: RxCollection; }>({\n name: randomCouchString(10),\n storage: getConfig().storage.getStorage(),\n eventReduce: true,\n ignoreDuplicate: true\n });\n // setTimeout(() => db.destroy(), dbLifetime);\n const collections = await db.addCollections({\n humana: {\n schema: schemas.humanAgeIndex\n }\n });\n\n // insert data\n if (amount > 0) {\n const docsData = new Array(amount)\n .fill(0)\n .map(() => schemaObjects.humanData());\n await collections.humana.bulkInsert(docsData);\n }\n\n return collections.humana;\n}\n\nexport async function multipleOnSameDB(\n size = 10\n): Promise<{\n db: RxDatabase<{\n human: RxCollection;\n human2: RxCollection;\n }>;\n collection: RxCollection;\n collection2: RxCollection;\n}> {\n const db = await createRxDatabase<{\n human: RxCollection;\n human2: RxCollection;\n }>({\n name: randomCouchString(10),\n storage: getConfig().storage.getStorage(),\n eventReduce: true,\n ignoreDuplicate: true\n });\n // setTimeout(() => db.destroy(), dbLifetime);\n const collections = await db.addCollections({\n human: {\n schema: schemas.human\n },\n human2: {\n schema: schemas.human\n }\n });\n\n // insert data\n if (size > 0) {\n const docsData = new Array(size)\n .fill(0)\n .map(() => schemaObjects.humanData());\n await collections.human.bulkInsert(docsData);\n\n const docsData2 = new Array(size)\n .fill(0)\n .map(() => schemaObjects.humanData());\n await collections.human2.bulkInsert(docsData2);\n }\n\n return {\n db,\n collection: collections.human,\n collection2: collections.human2\n };\n}\n\nexport async function createNested(\n amount = 5\n): Promise> {\n const db = await createRxDatabase<{ nestedhuman: RxCollection; }>({\n name: randomCouchString(10),\n storage: getConfig().storage.getStorage(),\n eventReduce: true,\n ignoreDuplicate: true\n });\n // setTimeout(() => db.destroy(), dbLifetime);\n const collections = await db.addCollections({\n nestedhuman: {\n schema: schemas.nestedHuman\n }\n });\n\n // insert data\n if (amount > 0) {\n const docsData = new Array(amount)\n .fill(0)\n .map(() => schemaObjects.nestedHumanData());\n await collections.nestedhuman.bulkInsert(docsData);\n }\n\n return collections.nestedhuman;\n}\n\nexport async function createDeepNested(\n amount = 5\n): Promise> {\n const db = await createRxDatabase<{ nestedhuman: RxCollection; }>({\n name: randomCouchString(10),\n storage: getConfig().storage.getStorage(),\n eventReduce: true,\n });\n // setTimeout(() => db.destroy(), dbLifetime);\n const collections = await db.addCollections({\n nestedhuman: {\n schema: schemas.deepNestedHuman\n }\n });\n\n // insert data\n if (amount > 0) {\n const docsData = new Array(amount)\n .fill(0)\n .map(() => schemaObjects.deepNestedHumanData());\n await collections.nestedhuman.bulkInsert(docsData);\n }\n\n return collections.nestedhuman;\n}\n\nexport async function createMultiInstance(\n name: string,\n amount = 0,\n password = undefined,\n storage: RxStorage = getConfig().storage.getStorage()\n): Promise> {\n if (!getConfig().storage.hasMultiInstance) {\n throw new Error('createMultiInstance() cannot be called on a storage with hasMultiInstance:false');\n }\n\n const db = await createRxDatabase<{ human: RxCollection; }>({\n name,\n storage,\n password,\n multiInstance: true,\n eventReduce: true,\n ignoreDuplicate: true,\n localDocuments: true\n });\n // setTimeout(() => db.destroy(), dbLifetime);\n const collections = await db.addCollections({\n human: {\n schema: schemas.human,\n localDocuments: true\n }\n });\n // insert data\n if (amount > 0) {\n const docsData = new Array(amount)\n .fill(0)\n .map(() => schemaObjects.humanData());\n await collections.human.bulkInsert(docsData);\n }\n\n return collections.human;\n}\n\nexport async function createPrimary(\n amount = 10,\n name = randomCouchString(10)\n): Promise> {\n\n const db = await createRxDatabase<{ human: RxCollection; }>({\n name,\n storage: getConfig().storage.getStorage(),\n multiInstance: true,\n eventReduce: true,\n ignoreDuplicate: true\n });\n // setTimeout(() => db.destroy(), dbLifetime);\n const collections = await db.addCollections({\n human: {\n schema: schemas.primaryHuman\n }\n });\n\n // insert data\n if (amount > 0) {\n const docsData = new Array(amount)\n .fill(0)\n .map(() => schemaObjects.simpleHumanData());\n await collections.human.bulkInsert(docsData);\n }\n\n return collections.human;\n}\n\nexport async function createHumanWithTimestamp(\n amount = 0,\n databaseName = randomCouchString(10),\n multiInstance = true,\n storage = getConfig().storage.getStorage()\n): Promise> {\n\n const db = await createRxDatabase<{ humans: RxCollection; }>({\n name: databaseName,\n storage,\n multiInstance,\n eventReduce: true,\n ignoreDuplicate: true\n });\n // setTimeout(() => db.destroy(), dbLifetime);\n const collections = await db.addCollections({\n humans: {\n schema: schemas.humanWithTimestamp\n }\n });\n\n // insert data\n if (amount > 0) {\n const docsData = new Array(amount)\n .fill(0)\n .map(() => schemaObjects.humanWithTimestampData());\n await collections.humans.bulkInsert(docsData);\n }\n\n return collections.humans;\n}\n\nexport async function createMigrationCollection(\n amount = 0,\n addMigrationStrategies: MigrationStrategies = {},\n name = randomCouchString(10),\n autoMigrate = false,\n attachment?: RxAttachmentCreator\n): Promise> {\n\n const migrationStrategies: any = Object.assign(\n {\n 1: (doc: any) => doc,\n 2: (doc: any) => doc,\n 3: (doc: any) => doc\n },\n addMigrationStrategies\n );\n\n\n const colName = 'human';\n const db = await createRxDatabase<{ human: RxCollection; }>({\n name,\n storage: getConfig().storage.getStorage(),\n eventReduce: true,\n ignoreDuplicate: true\n });\n const cols = await db.addCollections({\n [colName]: {\n schema: attachment !== undefined ? { ...schemas.simpleHuman, attachments: {} } : schemas.simpleHuman,\n autoMigrate: false\n }\n });\n\n await Promise.all(\n new Array(amount)\n .fill(0)\n .map(() => cols[colName].insert(schemaObjects.simpleHumanAge()).then(doc => {\n if (attachment !== undefined) {\n return doc.putAttachment(attachment);\n }\n }))\n );\n await db.destroy();\n\n const db2 = await createRxDatabase<{ human: RxCollection; }>({\n name,\n storage: getConfig().storage.getStorage(),\n eventReduce: true,\n ignoreDuplicate: true\n });\n const cols2 = await db2.addCollections({\n [colName]: {\n schema: attachment !== undefined ? { ...schemas.simpleHumanV3, attachments: {} } : schemas.simpleHumanV3,\n autoMigrate,\n migrationStrategies\n }\n });\n\n return cols2[colName];\n}\n\nexport async function createRelated(\n name = randomCouchString(10)\n): Promise> {\n const db = await createRxDatabase<{ human: RxCollection; }>({\n name,\n storage: getConfig().storage.getStorage(),\n multiInstance: true,\n eventReduce: true,\n ignoreDuplicate: true\n });\n // setTimeout(() => db.destroy(), dbLifetime);\n const collections = await db.addCollections({\n human: {\n schema: schemas.refHuman\n }\n });\n\n const doc1 = schemaObjects.refHumanData();\n const doc2 = schemaObjects.refHumanData(doc1.name);\n doc1.bestFriend = doc2.name; // cross-relation\n\n await collections.human.insert(doc1);\n await collections.human.insert(doc2);\n\n return collections.human;\n}\n\nexport async function createRelatedNested(\n name = randomCouchString(10)\n): Promise> {\n\n const db = await createRxDatabase<{ human: RxCollection; }>({\n name,\n storage: getConfig().storage.getStorage(),\n multiInstance: true,\n eventReduce: true,\n ignoreDuplicate: true\n });\n // setTimeout(() => db.destroy(), dbLifetime);\n const collections = await db.addCollections({\n human: {\n schema: schemas.refHumanNested\n }\n });\n\n const doc1 = schemaObjects.refHumanNestedData();\n const doc2 = schemaObjects.refHumanNestedData(doc1.name);\n doc1.foo.bestFriend = doc2.name; // cross-relation\n\n await collections.human.insert(doc1);\n await collections.human.insert(doc2);\n\n return collections.human;\n}\n\nexport async function createIdAndAgeIndex(\n amount = 20\n): Promise> {\n const db = await createRxDatabase<{ humana: RxCollection; }>({\n name: randomCouchString(10),\n storage: getConfig().storage.getStorage(),\n eventReduce: true,\n ignoreDuplicate: true\n });\n // setTimeout(() => db.destroy(), dbLifetime);\n const collections = await db.addCollections({\n humana: {\n schema: schemas.humanIdAndAgeIndex\n }\n });\n\n // insert data\n if (amount > 0) {\n const docsData = new Array(amount)\n .fill(0)\n .map(() => schemaObjects.humanWithIdAndAgeIndexDocumentType());\n await collections.humana.bulkInsert(docsData);\n }\n\n return collections.humana;\n}\n"],"mappings":"AAAA,OAAOA,KAAK,MAAM,OAAO;AACzB,OAAO,KAAKC,OAAO,MAAM,cAAc;AACvC,OAAO,KAAKC,aAAa,MAAM,qBAAqB;AACpD,SAASC,SAAS,QAAQ,aAAa;AACvC,OAAOC,MAAM,MAAM,QAAQ;AAE3B,SACIC,gBAAgB,EAIhBC,iBAAiB,QAId,gBAAgB;AAIvB,OAAO,eAAeC,MAAMA,CACxBC,IAAY,GAAG,EAAE,EACjBC,cAAsB,GAAG,OAAO,EAChCC,aAAsB,GAAG,IAAI,EAC7BC,WAAoB,GAAG,IAAI,EAC3BC,OAA4B,GAAGT,SAAS,CAAC,CAAC,CAACS,OAAO,CAACC,UAAU,CAAC,CAAC,EAEf;EAChD,IAAMC,EAAE,GAAG,MAAMT,gBAAgB,CAA8C;IAC3EU,IAAI,EAAET,iBAAiB,CAAC,EAAE,CAAC;IAC3BM,OAAO;IACPF,aAAa;IACbC,WAAW;IACXK,eAAe,EAAE,IAAI;IACrBC,cAAc,EAAE;EACpB,CAAC,CAAC;EAEF,IAAMC,WAAW,GAAG,MAAMJ,EAAE,CAACK,cAAc,CAAC;IACxC,CAACV,cAAc,GAAG;MACdW,MAAM,EAAEnB,OAAO,CAACoB,KAAK;MACrBJ,cAAc,EAAE;IACpB;EACJ,CAAC,CAAC;;EAEF;EACA,IAAIT,IAAI,GAAG,CAAC,EAAE;IACV,IAAMc,QAAQ,GAAG,IAAIC,KAAK,CAACf,IAAI,CAAC,CAC3BgB,IAAI,CAAC,CAAC,CAAC,CACPC,GAAG,CAAC,MAAMvB,aAAa,CAACwB,SAAS,CAAC,CAAC,CAAC;IACzC,IAAMC,WAAW,GAAG,MAAMT,WAAW,CAACT,cAAc,CAAC,CAACmB,UAAU,CAACN,QAAQ,CAAC;IAC1ElB,MAAM,CAACyB,eAAe,CAACF,WAAW,CAACG,KAAK,EAAE,EAAE,CAAC;EACjD;EACA,OAAOZ,WAAW,CAACT,cAAc,CAAC;AACtC;AAEA,OAAO,eAAesB,cAAcA,CAChCX,MAAoC,EACpCL,IAAI,GAAG,OAAO,EACdH,OAAO,GAAGT,SAAS,CAAC,CAAC,CAACS,OAAO,CAACC,UAAU,CAAC,CAAC,EAC1CmB,mBAAyC,EACI;EAC7C,IAAMlB,EAAE,GAAG,MAAMT,gBAAgB,CAAoD;IACjFU,IAAI,EAAET,iBAAiB,CAAC,EAAE,CAAC;IAC3BM,OAAO;IACPF,aAAa,EAAE,IAAI;IACnBC,WAAW,EAAE,IAAI;IACjBK,eAAe,EAAE;EACrB,CAAC,CAAC;EAEF,IAAME,WAAW,GAAG,MAAMJ,EAAE,CAACK,cAAc,CAAC;IACxC,CAACJ,IAAI,GAAG;MACJK,MAAM;MACNY;IACJ;EACJ,CAAC,CAAC;EAEF,OAAOd,WAAW,CAACH,IAAI,CAAC;AAC5B;AAEA,OAAO,eAAekB,iBAAiBA,CACnCzB,IAAI,GAAG,EAAE,EACTO,IAAI,GAAG,OAAO,EACdL,aAAa,GAAG,IAAI,EAC4B;EAChD,IAAI,CAACK,IAAI,EAAE;IACPA,IAAI,GAAG,OAAO;EAClB;EACA,IAAMD,EAAE,GAAG,MAAMT,gBAAgB,CAAuD;IACpFU,IAAI,EAAET,iBAAiB,CAAC,EAAE,CAAC;IAC3BM,OAAO,EAAET,SAAS,CAAC,CAAC,CAACS,OAAO,CAACC,UAAU,CAAC,CAAC;IACzCH,aAAa;IACbC,WAAW,EAAE,IAAI;IACjBK,eAAe,EAAE;EACrB,CAAC,CAAC;EAEF,IAAMkB,UAAU,GAAGlC,KAAK,CAACC,OAAO,CAACoB,KAAK,CAAC;EACvCa,UAAU,CAACC,WAAW,GAAG,CAAC,CAAC;EAE3B,IAAMjB,WAAW,GAAG,MAAMJ,EAAE,CAACK,cAAc,CAAC;IACxC,CAACJ,IAAI,GAAG;MACJK,MAAM,EAAEc;IACZ;EACJ,CAAC,CAAC;;EAEF;EACA,IAAI1B,IAAI,GAAG,CAAC,EAAE;IACV,IAAMc,QAAQ,GAAG,IAAIC,KAAK,CAACf,IAAI,CAAC,CAC3BgB,IAAI,CAAC,CAAC,CAAC,CACPC,GAAG,CAAC,MAAMvB,aAAa,CAACwB,SAAS,CAAC,CAAC,CAAC;IACzC,MAAMR,WAAW,CAACH,IAAI,CAAC,CAACa,UAAU,CAACN,QAAQ,CAAC;EAChD;EAEA,OAAOJ,WAAW,CAACH,IAAI,CAAC;AAC5B;AAEA,OAAO,eAAeqB,mBAAmBA,CACrC5B,IAAI,GAAG,EAAE,EACTO,IAAI,GAAG,OAAO,EAC0B;EACxC,IAAMD,EAAE,GAAG,MAAMT,gBAAgB,CAAuD;IACpFU,IAAI,EAAET,iBAAiB,CAAC,EAAE,CAAC;IAC3BM,OAAO,EAAET,SAAS,CAAC,CAAC,CAACS,OAAO,CAACC,UAAU,CAAC,CAAC;IACzCF,WAAW,EAAE,IAAI;IACjBK,eAAe,EAAE;EACrB,CAAC,CAAC;EACF,IAAMqB,UAAU,GAAGrC,KAAK,CAACC,OAAO,CAACoB,KAAK,CAAC;EACvCgB,UAAU,CAACC,cAAc,GAAG,KAAK;EACjC;EACA,IAAMpB,WAAW,GAAG,MAAMJ,EAAE,CAACK,cAAc,CAAC;IACxC,CAACJ,IAAI,GAAG;MACJK,MAAM,EAAEiB;IACZ;EACJ,CAAC,CAAC;;EAEF;EACA,IAAI7B,IAAI,GAAG,CAAC,EAAE;IACV,IAAMc,QAAQ,GAAG,IAAIC,KAAK,CAACf,IAAI,CAAC,CAC3BgB,IAAI,CAAC,CAAC,CAAC,CACPC,GAAG,CAAC,MAAMvB,aAAa,CAACwB,SAAS,CAAC,CAAC,CAAC;IACzC,MAAMR,WAAW,CAACH,IAAI,CAAC,CAACa,UAAU,CAACN,QAAQ,CAAC;EAChD;EAEA,OAAOJ,WAAW,CAACH,IAAI,CAAC;AAC5B;AAEA,OAAO,eAAewB,cAAcA,CAChCC,MAAM,GAAG,EAAE,EAC6B;EACxC,IAAM1B,EAAE,GAAG,MAAMT,gBAAgB,CAA+C;IAC5EU,IAAI,EAAET,iBAAiB,CAAC,EAAE,CAAC;IAC3BM,OAAO,EAAET,SAAS,CAAC,CAAC,CAACS,OAAO,CAACC,UAAU,CAAC,CAAC;IACzCF,WAAW,EAAE,IAAI;IACjBK,eAAe,EAAE;EACrB,CAAC,CAAC;EACF;EACA,IAAME,WAAW,GAAG,MAAMJ,EAAE,CAACK,cAAc,CAAC;IACxCsB,MAAM,EAAE;MACJrB,MAAM,EAAEnB,OAAO,CAACyC;IACpB;EACJ,CAAC,CAAC;;EAEF;EACA,IAAIF,MAAM,GAAG,CAAC,EAAE;IACZ,IAAMlB,QAAQ,GAAG,IAAIC,KAAK,CAACiB,MAAM,CAAC,CAC7BhB,IAAI,CAAC,CAAC,CAAC,CACPC,GAAG,CAAC,MAAMvB,aAAa,CAACwB,SAAS,CAAC,CAAC,CAAC;IACzC,MAAMR,WAAW,CAACuB,MAAM,CAACb,UAAU,CAACN,QAAQ,CAAC;EACjD;EAEA,OAAOJ,WAAW,CAACuB,MAAM;AAC7B;AAEA,OAAO,eAAeE,gBAAgBA,CAClCnC,IAAI,GAAG,EAAE,EAQV;EACC,IAAMM,EAAE,GAAG,MAAMT,gBAAgB,CAG9B;IACCU,IAAI,EAAET,iBAAiB,CAAC,EAAE,CAAC;IAC3BM,OAAO,EAAET,SAAS,CAAC,CAAC,CAACS,OAAO,CAACC,UAAU,CAAC,CAAC;IACzCF,WAAW,EAAE,IAAI;IACjBK,eAAe,EAAE;EACrB,CAAC,CAAC;EACF;EACA,IAAME,WAAW,GAAG,MAAMJ,EAAE,CAACK,cAAc,CAAC;IACxCE,KAAK,EAAE;MACHD,MAAM,EAAEnB,OAAO,CAACoB;IACpB,CAAC;IACDuB,MAAM,EAAE;MACJxB,MAAM,EAAEnB,OAAO,CAACoB;IACpB;EACJ,CAAC,CAAC;;EAEF;EACA,IAAIb,IAAI,GAAG,CAAC,EAAE;IACV,IAAMc,QAAQ,GAAG,IAAIC,KAAK,CAACf,IAAI,CAAC,CAC3BgB,IAAI,CAAC,CAAC,CAAC,CACPC,GAAG,CAAC,MAAMvB,aAAa,CAACwB,SAAS,CAAC,CAAC,CAAC;IACzC,MAAMR,WAAW,CAACG,KAAK,CAACO,UAAU,CAACN,QAAQ,CAAC;IAE5C,IAAMuB,SAAS,GAAG,IAAItB,KAAK,CAACf,IAAI,CAAC,CAC5BgB,IAAI,CAAC,CAAC,CAAC,CACPC,GAAG,CAAC,MAAMvB,aAAa,CAACwB,SAAS,CAAC,CAAC,CAAC;IACzC,MAAMR,WAAW,CAAC0B,MAAM,CAAChB,UAAU,CAACiB,SAAS,CAAC;EAClD;EAEA,OAAO;IACH/B,EAAE;IACFgC,UAAU,EAAE5B,WAAW,CAACG,KAAK;IAC7B0B,WAAW,EAAE7B,WAAW,CAAC0B;EAC7B,CAAC;AACL;AAEA,OAAO,eAAeI,YAAYA,CAC9BR,MAAM,GAAG,CAAC,EACkD;EAC5D,IAAM1B,EAAE,GAAG,MAAMT,gBAAgB,CAAwE;IACrGU,IAAI,EAAET,iBAAiB,CAAC,EAAE,CAAC;IAC3BM,OAAO,EAAET,SAAS,CAAC,CAAC,CAACS,OAAO,CAACC,UAAU,CAAC,CAAC;IACzCF,WAAW,EAAE,IAAI;IACjBK,eAAe,EAAE;EACrB,CAAC,CAAC;EACF;EACA,IAAME,WAAW,GAAG,MAAMJ,EAAE,CAACK,cAAc,CAAC;IACxC8B,WAAW,EAAE;MACT7B,MAAM,EAAEnB,OAAO,CAACiD;IACpB;EACJ,CAAC,CAAC;;EAEF;EACA,IAAIV,MAAM,GAAG,CAAC,EAAE;IACZ,IAAMlB,QAAQ,GAAG,IAAIC,KAAK,CAACiB,MAAM,CAAC,CAC7BhB,IAAI,CAAC,CAAC,CAAC,CACPC,GAAG,CAAC,MAAMvB,aAAa,CAACiD,eAAe,CAAC,CAAC,CAAC;IAC/C,MAAMjC,WAAW,CAAC+B,WAAW,CAACrB,UAAU,CAACN,QAAQ,CAAC;EACtD;EAEA,OAAOJ,WAAW,CAAC+B,WAAW;AAClC;AAEA,OAAO,eAAeG,gBAAgBA,CAClCZ,MAAM,GAAG,CAAC,EACsD;EAChE,IAAM1B,EAAE,GAAG,MAAMT,gBAAgB,CAA4E;IACzGU,IAAI,EAAET,iBAAiB,CAAC,EAAE,CAAC;IAC3BM,OAAO,EAAET,SAAS,CAAC,CAAC,CAACS,OAAO,CAACC,UAAU,CAAC,CAAC;IACzCF,WAAW,EAAE;EACjB,CAAC,CAAC;EACF;EACA,IAAMO,WAAW,GAAG,MAAMJ,EAAE,CAACK,cAAc,CAAC;IACxC8B,WAAW,EAAE;MACT7B,MAAM,EAAEnB,OAAO,CAACoD;IACpB;EACJ,CAAC,CAAC;;EAEF;EACA,IAAIb,MAAM,GAAG,CAAC,EAAE;IACZ,IAAMlB,QAAQ,GAAG,IAAIC,KAAK,CAACiB,MAAM,CAAC,CAC7BhB,IAAI,CAAC,CAAC,CAAC,CACPC,GAAG,CAAC,MAAMvB,aAAa,CAACoD,mBAAmB,CAAC,CAAC,CAAC;IACnD,MAAMpC,WAAW,CAAC+B,WAAW,CAACrB,UAAU,CAACN,QAAQ,CAAC;EACtD;EAEA,OAAOJ,WAAW,CAAC+B,WAAW;AAClC;AAEA,OAAO,eAAeM,mBAAmBA,CACrCxC,IAAY,EACZyB,MAAM,GAAG,CAAC,EACVgB,QAAQ,GAAGC,SAAS,EACpB7C,OAA4B,GAAGT,SAAS,CAAC,CAAC,CAACS,OAAO,CAACC,UAAU,CAAC,CAAC,EACf;EAChD,IAAI,CAACV,SAAS,CAAC,CAAC,CAACS,OAAO,CAAC8C,gBAAgB,EAAE;IACvC,MAAM,IAAIC,KAAK,CAAC,iFAAiF,CAAC;EACtG;EAEA,IAAM7C,EAAE,GAAG,MAAMT,gBAAgB,CAA8C;IAC3EU,IAAI;IACJH,OAAO;IACP4C,QAAQ;IACR9C,aAAa,EAAE,IAAI;IACnBC,WAAW,EAAE,IAAI;IACjBK,eAAe,EAAE,IAAI;IACrBC,cAAc,EAAE;EACpB,CAAC,CAAC;EACF;EACA,IAAMC,WAAW,GAAG,MAAMJ,EAAE,CAACK,cAAc,CAAC;IACxCE,KAAK,EAAE;MACHD,MAAM,EAAEnB,OAAO,CAACoB,KAAK;MACrBJ,cAAc,EAAE;IACpB;EACJ,CAAC,CAAC;EACF;EACA,IAAIuB,MAAM,GAAG,CAAC,EAAE;IACZ,IAAMlB,QAAQ,GAAG,IAAIC,KAAK,CAACiB,MAAM,CAAC,CAC7BhB,IAAI,CAAC,CAAC,CAAC,CACPC,GAAG,CAAC,MAAMvB,aAAa,CAACwB,SAAS,CAAC,CAAC,CAAC;IACzC,MAAMR,WAAW,CAACG,KAAK,CAACO,UAAU,CAACN,QAAQ,CAAC;EAChD;EAEA,OAAOJ,WAAW,CAACG,KAAK;AAC5B;AAEA,OAAO,eAAeuC,aAAaA,CAC/BpB,MAAM,GAAG,EAAE,EACXzB,IAAI,GAAGT,iBAAiB,CAAC,EAAE,CAAC,EACgC;EAE5D,IAAMQ,EAAE,GAAG,MAAMT,gBAAgB,CAAkE;IAC/FU,IAAI;IACJH,OAAO,EAAET,SAAS,CAAC,CAAC,CAACS,OAAO,CAACC,UAAU,CAAC,CAAC;IACzCH,aAAa,EAAE,IAAI;IACnBC,WAAW,EAAE,IAAI;IACjBK,eAAe,EAAE;EACrB,CAAC,CAAC;EACF;EACA,IAAME,WAAW,GAAG,MAAMJ,EAAE,CAACK,cAAc,CAAC;IACxCE,KAAK,EAAE;MACHD,MAAM,EAAEnB,OAAO,CAAC4D;IACpB;EACJ,CAAC,CAAC;;EAEF;EACA,IAAIrB,MAAM,GAAG,CAAC,EAAE;IACZ,IAAMlB,QAAQ,GAAG,IAAIC,KAAK,CAACiB,MAAM,CAAC,CAC7BhB,IAAI,CAAC,CAAC,CAAC,CACPC,GAAG,CAAC,MAAMvB,aAAa,CAAC4D,eAAe,CAAC,CAAC,CAAC;IAC/C,MAAM5C,WAAW,CAACG,KAAK,CAACO,UAAU,CAACN,QAAQ,CAAC;EAChD;EAEA,OAAOJ,WAAW,CAACG,KAAK;AAC5B;AAEA,OAAO,eAAe0C,wBAAwBA,CAC1CvB,MAAM,GAAG,CAAC,EACVwB,YAAY,GAAG1D,iBAAiB,CAAC,EAAE,CAAC,EACpCI,aAAa,GAAG,IAAI,EACpBE,OAAO,GAAGT,SAAS,CAAC,CAAC,CAACS,OAAO,CAACC,UAAU,CAAC,CAAC,EACyB;EAEnE,IAAMC,EAAE,GAAG,MAAMT,gBAAgB,CAA0E;IACvGU,IAAI,EAAEiD,YAAY;IAClBpD,OAAO;IACPF,aAAa;IACbC,WAAW,EAAE,IAAI;IACjBK,eAAe,EAAE;EACrB,CAAC,CAAC;EACF;EACA,IAAME,WAAW,GAAG,MAAMJ,EAAE,CAACK,cAAc,CAAC;IACxC8C,MAAM,EAAE;MACJ7C,MAAM,EAAEnB,OAAO,CAACiE;IACpB;EACJ,CAAC,CAAC;;EAEF;EACA,IAAI1B,MAAM,GAAG,CAAC,EAAE;IACZ,IAAMlB,QAAQ,GAAG,IAAIC,KAAK,CAACiB,MAAM,CAAC,CAC7BhB,IAAI,CAAC,CAAC,CAAC,CACPC,GAAG,CAAC,MAAMvB,aAAa,CAACiE,sBAAsB,CAAC,CAAC,CAAC;IACtD,MAAMjD,WAAW,CAAC+C,MAAM,CAACrC,UAAU,CAACN,QAAQ,CAAC;EACjD;EAEA,OAAOJ,WAAW,CAAC+C,MAAM;AAC7B;AAEA,OAAO,eAAeG,yBAAyBA,CAC3C5B,MAAM,GAAG,CAAC,EACV6B,sBAA2C,GAAG,CAAC,CAAC,EAChDtD,IAAI,GAAGT,iBAAiB,CAAC,EAAE,CAAC,EAC5BgE,WAAW,GAAG,KAAK,EACnBC,UAAgC,EAC8B;EAE9D,IAAMvC,mBAAwB,GAAGwC,MAAM,CAACC,MAAM,CAC1C;IACI,CAAC,EAAGC,GAAQ,IAAKA,GAAG;IACpB,CAAC,EAAGA,GAAQ,IAAKA,GAAG;IACpB,CAAC,EAAGA,GAAQ,IAAKA;EACrB,CAAC,EACDL,sBACJ,CAAC;EAGD,IAAMM,OAAO,GAAG,OAAO;EACvB,IAAM7D,EAAE,GAAG,MAAMT,gBAAgB,CAAqE;IAClGU,IAAI;IACJH,OAAO,EAAET,SAAS,CAAC,CAAC,CAACS,OAAO,CAACC,UAAU,CAAC,CAAC;IACzCF,WAAW,EAAE,IAAI;IACjBK,eAAe,EAAE;EACrB,CAAC,CAAC;EACF,IAAM4D,IAAI,GAAG,MAAM9D,EAAE,CAACK,cAAc,CAAC;IACjC,CAACwD,OAAO,GAAG;MACPvD,MAAM,EAAEmD,UAAU,KAAKd,SAAS,GAAG;QAAE,GAAGxD,OAAO,CAAC4E,WAAW;QAAE1C,WAAW,EAAE,CAAC;MAAE,CAAC,GAAGlC,OAAO,CAAC4E,WAAW;MACpGP,WAAW,EAAE;IACjB;EACJ,CAAC,CAAC;EAEF,MAAMQ,OAAO,CAACC,GAAG,CACb,IAAIxD,KAAK,CAACiB,MAAM,CAAC,CACZhB,IAAI,CAAC,CAAC,CAAC,CACPC,GAAG,CAAC,MAAMmD,IAAI,CAACD,OAAO,CAAC,CAACK,MAAM,CAAC9E,aAAa,CAAC+E,cAAc,CAAC,CAAC,CAAC,CAACC,IAAI,CAACR,GAAG,IAAI;IACxE,IAAIH,UAAU,KAAKd,SAAS,EAAE;MAC1B,OAAOiB,GAAG,CAACS,aAAa,CAACZ,UAAU,CAAC;IACxC;EACJ,CAAC,CAAC,CACV,CAAC;EACD,MAAMzD,EAAE,CAACsE,OAAO,CAAC,CAAC;EAElB,IAAMC,GAAG,GAAG,MAAMhF,gBAAgB,CAAoE;IAClGU,IAAI;IACJH,OAAO,EAAET,SAAS,CAAC,CAAC,CAACS,OAAO,CAACC,UAAU,CAAC,CAAC;IACzCF,WAAW,EAAE,IAAI;IACjBK,eAAe,EAAE;EACrB,CAAC,CAAC;EACF,IAAMsE,KAAK,GAAG,MAAMD,GAAG,CAAClE,cAAc,CAAC;IACnC,CAACwD,OAAO,GAAG;MACPvD,MAAM,EAAEmD,UAAU,KAAKd,SAAS,GAAG;QAAE,GAAGxD,OAAO,CAACsF,aAAa;QAAEpD,WAAW,EAAE,CAAC;MAAE,CAAC,GAAGlC,OAAO,CAACsF,aAAa;MACxGjB,WAAW;MACXtC;IACJ;EACJ,CAAC,CAAC;EAEF,OAAOsD,KAAK,CAACX,OAAO,CAAC;AACzB;AAEA,OAAO,eAAea,aAAaA,CAC/BzE,IAAI,GAAGT,iBAAiB,CAAC,EAAE,CAAC,EAC6B;EACzD,IAAMQ,EAAE,GAAG,MAAMT,gBAAgB,CAA+D;IAC5FU,IAAI;IACJH,OAAO,EAAET,SAAS,CAAC,CAAC,CAACS,OAAO,CAACC,UAAU,CAAC,CAAC;IACzCH,aAAa,EAAE,IAAI;IACnBC,WAAW,EAAE,IAAI;IACjBK,eAAe,EAAE;EACrB,CAAC,CAAC;EACF;EACA,IAAME,WAAW,GAAG,MAAMJ,EAAE,CAACK,cAAc,CAAC;IACxCE,KAAK,EAAE;MACHD,MAAM,EAAEnB,OAAO,CAACwF;IACpB;EACJ,CAAC,CAAC;EAEF,IAAMC,IAAI,GAAGxF,aAAa,CAACyF,YAAY,CAAC,CAAC;EACzC,IAAMC,IAAI,GAAG1F,aAAa,CAACyF,YAAY,CAACD,IAAI,CAAC3E,IAAI,CAAC;EAClD2E,IAAI,CAACG,UAAU,GAAGD,IAAI,CAAC7E,IAAI,CAAC,CAAC;;EAE7B,MAAMG,WAAW,CAACG,KAAK,CAAC2D,MAAM,CAACU,IAAI,CAAC;EACpC,MAAMxE,WAAW,CAACG,KAAK,CAAC2D,MAAM,CAACY,IAAI,CAAC;EAEpC,OAAO1E,WAAW,CAACG,KAAK;AAC5B;AAEA,OAAO,eAAeyE,mBAAmBA,CACrC/E,IAAI,GAAGT,iBAAiB,CAAC,EAAE,CAAC,EACmC;EAE/D,IAAMQ,EAAE,GAAG,MAAMT,gBAAgB,CAAqE;IAClGU,IAAI;IACJH,OAAO,EAAET,SAAS,CAAC,CAAC,CAACS,OAAO,CAACC,UAAU,CAAC,CAAC;IACzCH,aAAa,EAAE,IAAI;IACnBC,WAAW,EAAE,IAAI;IACjBK,eAAe,EAAE;EACrB,CAAC,CAAC;EACF;EACA,IAAME,WAAW,GAAG,MAAMJ,EAAE,CAACK,cAAc,CAAC;IACxCE,KAAK,EAAE;MACHD,MAAM,EAAEnB,OAAO,CAAC8F;IACpB;EACJ,CAAC,CAAC;EAEF,IAAML,IAAI,GAAGxF,aAAa,CAAC8F,kBAAkB,CAAC,CAAC;EAC/C,IAAMJ,IAAI,GAAG1F,aAAa,CAAC8F,kBAAkB,CAACN,IAAI,CAAC3E,IAAI,CAAC;EACxD2E,IAAI,CAACO,GAAG,CAACJ,UAAU,GAAGD,IAAI,CAAC7E,IAAI,CAAC,CAAC;;EAEjC,MAAMG,WAAW,CAACG,KAAK,CAAC2D,MAAM,CAACU,IAAI,CAAC;EACpC,MAAMxE,WAAW,CAACG,KAAK,CAAC2D,MAAM,CAACY,IAAI,CAAC;EAEpC,OAAO1E,WAAW,CAACG,KAAK;AAC5B;AAEA,OAAO,eAAe6E,mBAAmBA,CACrC1D,MAAM,GAAG,EAAE,EAC4D;EACvE,IAAM1B,EAAE,GAAG,MAAMT,gBAAgB,CAA8E;IAC3GU,IAAI,EAAET,iBAAiB,CAAC,EAAE,CAAC;IAC3BM,OAAO,EAAET,SAAS,CAAC,CAAC,CAACS,OAAO,CAACC,UAAU,CAAC,CAAC;IACzCF,WAAW,EAAE,IAAI;IACjBK,eAAe,EAAE;EACrB,CAAC,CAAC;EACF;EACA,IAAME,WAAW,GAAG,MAAMJ,EAAE,CAACK,cAAc,CAAC;IACxCsB,MAAM,EAAE;MACJrB,MAAM,EAAEnB,OAAO,CAACkG;IACpB;EACJ,CAAC,CAAC;;EAEF;EACA,IAAI3D,MAAM,GAAG,CAAC,EAAE;IACZ,IAAMlB,QAAQ,GAAG,IAAIC,KAAK,CAACiB,MAAM,CAAC,CAC7BhB,IAAI,CAAC,CAAC,CAAC,CACPC,GAAG,CAAC,MAAMvB,aAAa,CAACkG,kCAAkC,CAAC,CAAC,CAAC;IAClE,MAAMlF,WAAW,CAACuB,MAAM,CAACb,UAAU,CAACN,QAAQ,CAAC;EACjD;EAEA,OAAOJ,WAAW,CAACuB,MAAM;AAC7B","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/test-utils/index.js b/dist/esm/plugins/test-utils/index.js deleted file mode 100644 index 71554f145f4..00000000000 --- a/dist/esm/plugins/test-utils/index.js +++ /dev/null @@ -1,20 +0,0 @@ -/** - * This plugins contains thing that are needed for testing - * in RxDB related context. Mostly used in the unit tests and - * also in the tests for the premium and the server repository. - */ - -export * from "./config.js"; -export * from "./humans-collection.js"; -export * from "./port-manager.js"; -export * from "./revisions.js"; -export * from "./test-util.js"; -export * from "./schema-objects.js"; -export * from "./schemas.js"; -import * as humansCollectionConst from "./humans-collection.js"; -export var humansCollection = humansCollectionConst; -import * as schemasConst from "./schemas.js"; -export var schemas = schemasConst; -import * as schemaObjectsConst from "./schema-objects.js"; -export var schemaObjects = schemaObjectsConst; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/esm/plugins/test-utils/index.js.map b/dist/esm/plugins/test-utils/index.js.map deleted file mode 100644 index 1bae5bc4ab1..00000000000 --- a/dist/esm/plugins/test-utils/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["humansCollectionConst","humansCollection","schemasConst","schemas","schemaObjectsConst","schemaObjects"],"sources":["../../../../src/plugins/test-utils/index.ts"],"sourcesContent":["/**\n * This plugins contains thing that are needed for testing\n * in RxDB related context. Mostly used in the unit tests and\n * also in the tests for the premium and the server repository.\n */\n\nexport * from './config.ts';\nexport * from './humans-collection.ts';\nexport * from './port-manager.ts';\nexport * from './revisions.ts';\nexport * from './test-util.ts';\n\nexport * from './schema-objects.ts';\nexport * from './schemas.ts';\n\nimport * as humansCollectionConst from './humans-collection.ts';\nexport const humansCollection = humansCollectionConst;\n\nimport * as schemasConst from './schemas.ts';\nexport const schemas = schemasConst;\nimport * as schemaObjectsConst from './schema-objects.ts';\nexport const schemaObjects = schemaObjectsConst;\n"],"mappings":"AAAA;AACA;AACA;AACA;AACA;;AAEA,cAAc,aAAa;AAC3B,cAAc,wBAAwB;AACtC,cAAc,mBAAmB;AACjC,cAAc,gBAAgB;AAC9B,cAAc,gBAAgB;AAE9B,cAAc,qBAAqB;AACnC,cAAc,cAAc;AAE5B,OAAO,KAAKA,qBAAqB,MAAM,wBAAwB;AAC/D,OAAO,IAAMC,gBAAgB,GAAGD,qBAAqB;AAErD,OAAO,KAAKE,YAAY,MAAM,cAAc;AAC5C,OAAO,IAAMC,OAAO,GAAGD,YAAY;AACnC,OAAO,KAAKE,kBAAkB,MAAM,qBAAqB;AACzD,OAAO,IAAMC,aAAa,GAAGD,kBAAkB","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/test-utils/port-manager.js b/dist/esm/plugins/test-utils/port-manager.js deleted file mode 100644 index eef64d76a9d..00000000000 --- a/dist/esm/plugins/test-utils/port-manager.js +++ /dev/null @@ -1,28 +0,0 @@ -import getPort, { makeRange } from 'get-port'; -import { PROMISE_RESOLVE_VOID } from "../utils/index.js"; - -/** - * For easier debugging, we increase the port each time - * to ensure that no port is reused in the tests. - */ -var startPort = 18669; -var PORT_MAX = 65535; -var portQueue = PROMISE_RESOLVE_VOID; - -/** - * Returns an unused port. - * Used to ensure that different tests - * do not accidentally use the same port. - */ -export function nextPort() { - portQueue = portQueue.then(async () => { - var port = await getPort({ - port: makeRange(startPort, PORT_MAX), - host: '0.0.0.0' - }); - startPort = port + 1; - return port; - }); - return portQueue; -} -//# sourceMappingURL=port-manager.js.map \ No newline at end of file diff --git a/dist/esm/plugins/test-utils/port-manager.js.map b/dist/esm/plugins/test-utils/port-manager.js.map deleted file mode 100644 index 49657976914..00000000000 --- a/dist/esm/plugins/test-utils/port-manager.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"port-manager.js","names":["getPort","makeRange","PROMISE_RESOLVE_VOID","startPort","PORT_MAX","portQueue","nextPort","then","port","host"],"sources":["../../../../src/plugins/test-utils/port-manager.ts"],"sourcesContent":["import getPort, { makeRange } from 'get-port';\nimport { PROMISE_RESOLVE_VOID } from '../utils/index.ts';\n\n/**\n * For easier debugging, we increase the port each time\n * to ensure that no port is reused in the tests.\n */\nlet startPort = 18669;\n\nconst PORT_MAX = 65535;\nlet portQueue: Promise = PROMISE_RESOLVE_VOID as any;\n\n/**\n * Returns an unused port.\n * Used to ensure that different tests\n * do not accidentally use the same port.\n */\nexport function nextPort(): Promise {\n portQueue = portQueue.then(async () => {\n const port = await getPort({\n port: makeRange(startPort, PORT_MAX),\n host: '0.0.0.0',\n });\n startPort = port + 1;\n return port;\n });\n return portQueue;\n}\n"],"mappings":"AAAA,OAAOA,OAAO,IAAIC,SAAS,QAAQ,UAAU;AAC7C,SAASC,oBAAoB,QAAQ,mBAAmB;;AAExD;AACA;AACA;AACA;AACA,IAAIC,SAAS,GAAG,KAAK;AAErB,IAAMC,QAAQ,GAAG,KAAK;AACtB,IAAIC,SAA0B,GAAGH,oBAA2B;;AAE5D;AACA;AACA;AACA;AACA;AACA,OAAO,SAASI,QAAQA,CAAA,EAAoB;EACxCD,SAAS,GAAGA,SAAS,CAACE,IAAI,CAAC,YAAY;IACnC,IAAMC,IAAI,GAAG,MAAMR,OAAO,CAAC;MACvBQ,IAAI,EAAEP,SAAS,CAACE,SAAS,EAAEC,QAAQ,CAAC;MACpCK,IAAI,EAAE;IACV,CAAC,CAAC;IACFN,SAAS,GAAGK,IAAI,GAAG,CAAC;IACpB,OAAOA,IAAI;EACf,CAAC,CAAC;EACF,OAAOH,SAAS;AACpB","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/test-utils/revisions.js b/dist/esm/plugins/test-utils/revisions.js deleted file mode 100644 index 33b2a7ccfad..00000000000 --- a/dist/esm/plugins/test-utils/revisions.js +++ /dev/null @@ -1,5 +0,0 @@ -export var EXAMPLE_REVISION_1 = '1-12080c42d471e3d2625e49dcca3b8e1a'; -export var EXAMPLE_REVISION_2 = '2-22080c42d471e3d2625e49dcca3b8e2b'; -export var EXAMPLE_REVISION_3 = '3-32080c42d471e3d2625e49dcca3b8e3c'; -export var EXAMPLE_REVISION_4 = '4-42080c42d471e3d2625e49dcca3b8e3c'; -//# sourceMappingURL=revisions.js.map \ No newline at end of file diff --git a/dist/esm/plugins/test-utils/revisions.js.map b/dist/esm/plugins/test-utils/revisions.js.map deleted file mode 100644 index 017c25ccabf..00000000000 --- a/dist/esm/plugins/test-utils/revisions.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"revisions.js","names":["EXAMPLE_REVISION_1","EXAMPLE_REVISION_2","EXAMPLE_REVISION_3","EXAMPLE_REVISION_4"],"sources":["../../../../src/plugins/test-utils/revisions.ts"],"sourcesContent":["export const EXAMPLE_REVISION_1 = '1-12080c42d471e3d2625e49dcca3b8e1a';\nexport const EXAMPLE_REVISION_2 = '2-22080c42d471e3d2625e49dcca3b8e2b';\nexport const EXAMPLE_REVISION_3 = '3-32080c42d471e3d2625e49dcca3b8e3c';\nexport const EXAMPLE_REVISION_4 = '4-42080c42d471e3d2625e49dcca3b8e3c';\n"],"mappings":"AAAA,OAAO,IAAMA,kBAAkB,GAAG,oCAAoC;AACtE,OAAO,IAAMC,kBAAkB,GAAG,oCAAoC;AACtE,OAAO,IAAMC,kBAAkB,GAAG,oCAAoC;AACtE,OAAO,IAAMC,kBAAkB,GAAG,oCAAoC","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/test-utils/schema-objects.js b/dist/esm/plugins/test-utils/schema-objects.js deleted file mode 100644 index 3a5ba8ffdc9..00000000000 --- a/dist/esm/plugins/test-utils/schema-objects.js +++ /dev/null @@ -1,241 +0,0 @@ -/** - * this file contains objects which match the schemas in schemas.js - */ - -import { faker } from '@faker-js/faker'; -import { randomNumber, randomString } from 'async-test-util'; -import * as schemas from "./schemas.js"; -import { ensureNotFalsy, lastOfArray } from "../utils/index.js"; - -/** - * Some storages had problems with umlauts and other special chars. - * So we add these to all test strings. - * TODO add emojis - */ -export var TEST_DATA_CHARSET = '0987654321ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzäöüÖÄßÜ[]{}\''; -export var TEST_DATA_CHARSET_LAST_SORTED = ensureNotFalsy(lastOfArray(TEST_DATA_CHARSET.split('').sort())); -// const someEmojis = '😊💩👵🍌'; -export function randomStringWithSpecialChars(length) { - return randomString(length, TEST_DATA_CHARSET); -} -export function humanData(passportId = randomStringWithSpecialChars(12), age = randomNumber(10, 50), firstName = faker.person.firstName()) { - return { - passportId: passportId, - firstName, - lastName: faker.person.lastName(), - age - }; -} -export function simpleHumanData() { - return { - passportId: randomStringWithSpecialChars(12), - firstName: faker.person.firstName(), - lastName: faker.person.lastName() - }; -} -export function simpleHumanV3Data(partial = {}) { - var defaultObj = { - passportId: randomStringWithSpecialChars(12), - age: randomNumber(10, 50) - }; - return Object.assign(defaultObj, partial); -} -export function simpleHumanAge(partial = {}) { - var defaultObj = { - passportId: randomStringWithSpecialChars(12), - age: randomNumber(10, 50) + '' - }; - return Object.assign(defaultObj, partial); -} -export function humanWithSubOther() { - return { - passportId: randomStringWithSpecialChars(12), - other: { - age: randomNumber(10, 50) - } - }; -} -export function NoIndexHuman() { - return { - firstName: faker.person.firstName(), - lastName: faker.person.lastName() - }; -} -export function nestedHumanData(partial = {}) { - var defaultObj = { - passportId: randomStringWithSpecialChars(12), - firstName: faker.person.firstName(), - mainSkill: { - name: randomStringWithSpecialChars(6), - level: 5 - } - }; - return Object.assign(defaultObj, partial); -} -export function deepNestedHumanData() { - return { - passportId: randomStringWithSpecialChars(12), - mainSkill: { - name: randomStringWithSpecialChars(6), - attack: { - good: false, - count: 5 - } - } - }; -} -export function bigHumanDocumentType() { - return { - passportId: randomStringWithSpecialChars(12), - dnaHash: randomStringWithSpecialChars(12), - firstName: faker.person.firstName(), - lastName: faker.person.lastName(), - age: randomNumber(10, 50) - }; -} -export function heroArrayData() { - return { - name: randomStringWithSpecialChars(6), - skills: new Array(3).fill(0).map(() => { - return { - name: randomStringWithSpecialChars(6), - damage: randomNumber(10, 50) - }; - }) - }; -} -export function simpleHeroArray(partial = {}) { - var defaultObj = { - name: randomStringWithSpecialChars(6), - skills: new Array(3).fill(0).map(() => randomStringWithSpecialChars(6)) - }; - return Object.assign(defaultObj, partial); -} -export function encryptedHumanData(secret = randomStringWithSpecialChars(12)) { - return { - passportId: randomStringWithSpecialChars(12), - firstName: faker.person.firstName(), - secret - }; -} -export function encryptedObjectHumanData() { - return { - passportId: randomStringWithSpecialChars(12), - firstName: faker.person.firstName(), - secret: { - name: randomStringWithSpecialChars(12), - subname: randomStringWithSpecialChars(12) - } - }; -} -export function encryptedDeepHumanDocumentType() { - return { - passportId: randomStringWithSpecialChars(12), - firstName: faker.person.firstName(), - firstLevelPassword: randomStringWithSpecialChars(12), - secretData: { - pw: randomStringWithSpecialChars(12) - }, - deepSecret: { - darkhole: { - pw: randomStringWithSpecialChars(12) - } - }, - nestedSecret: { - darkhole: { - pw: randomStringWithSpecialChars(12) - } - } - }; -} -export function compoundIndexData() { - return { - passportId: randomStringWithSpecialChars(12), - passportCountry: randomStringWithSpecialChars(12), - age: randomNumber(10, 50) - }; -} -export function compoundIndexNoStringData() { - return { - passportId: randomStringWithSpecialChars(12), - passportCountry: { - [randomStringWithSpecialChars(12)]: randomStringWithSpecialChars(12) - }, - age: randomNumber(10, 50) - }; -} -export function nostringIndex() { - return { - passportId: {}, - firstName: faker.person.firstName() - }; -} -export function refHumanData(bestFriend) { - return { - name: randomStringWithSpecialChars(12), - bestFriend - }; -} -export function refHumanNestedData(bestFriend) { - return { - name: randomStringWithSpecialChars(12), - foo: { - bestFriend - } - }; -} -export function humanWithTimestampData(givenData = {}) { - var ret = { - id: randomStringWithSpecialChars(12), - name: faker.person.firstName(), - age: randomNumber(1, 100), - // use some time in the past week - updatedAt: Date.now() - }; - ret = Object.assign({}, ret, givenData); - return ret; -} -var averageSchemaForFieldLength = schemas.averageSchema(); -export function averageSchemaData(partial = {}) { - return Object.assign({}, { - id: randomStringWithSpecialChars(ensureNotFalsy(averageSchemaForFieldLength.properties.id.maxLength)), - var1: randomStringWithSpecialChars(ensureNotFalsy(averageSchemaForFieldLength.properties.var1.maxLength)), - var2: randomNumber(100, ensureNotFalsy(averageSchemaForFieldLength.properties.var2.maximum)), - deep: { - deep1: randomStringWithSpecialChars(ensureNotFalsy(averageSchemaForFieldLength.properties.deep.properties.deep1.maxLength)), - deep2: randomStringWithSpecialChars(ensureNotFalsy(averageSchemaForFieldLength.properties.deep.properties.deep2.maxLength)), - deeper: { - deepNr: randomNumber(0, 10) - } - }, - list: new Array(5).fill(0).map(() => ({ - deep1: randomStringWithSpecialChars(5), - deep2: randomStringWithSpecialChars(8) - })) - }, partial); -} -export function pointData() { - return { - id: randomStringWithSpecialChars(12), - x: faker.number.int(), - y: faker.number.int() - }; -} -export function humanWithIdAndAgeIndexDocumentType(age = randomNumber(1, 100)) { - return { - id: randomStringWithSpecialChars(12), - name: faker.person.firstName(), - age - }; -} -export function humanWithCompositePrimary(partial = {}) { - var defaultObj = { - firstName: faker.person.firstName(), - lastName: faker.person.lastName(), - info: { - age: randomNumber(10, 50) - } - }; - return Object.assign(defaultObj, partial); -} -//# sourceMappingURL=schema-objects.js.map \ No newline at end of file diff --git a/dist/esm/plugins/test-utils/schema-objects.js.map b/dist/esm/plugins/test-utils/schema-objects.js.map deleted file mode 100644 index 2b848340e88..00000000000 --- a/dist/esm/plugins/test-utils/schema-objects.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"schema-objects.js","names":["faker","randomNumber","randomString","schemas","ensureNotFalsy","lastOfArray","TEST_DATA_CHARSET","TEST_DATA_CHARSET_LAST_SORTED","split","sort","randomStringWithSpecialChars","length","humanData","passportId","age","firstName","person","lastName","simpleHumanData","simpleHumanV3Data","partial","defaultObj","Object","assign","simpleHumanAge","humanWithSubOther","other","NoIndexHuman","nestedHumanData","mainSkill","name","level","deepNestedHumanData","attack","good","count","bigHumanDocumentType","dnaHash","heroArrayData","skills","Array","fill","map","damage","simpleHeroArray","encryptedHumanData","secret","encryptedObjectHumanData","subname","encryptedDeepHumanDocumentType","firstLevelPassword","secretData","pw","deepSecret","darkhole","nestedSecret","compoundIndexData","passportCountry","compoundIndexNoStringData","nostringIndex","refHumanData","bestFriend","refHumanNestedData","foo","humanWithTimestampData","givenData","ret","id","updatedAt","Date","now","averageSchemaForFieldLength","averageSchema","averageSchemaData","properties","maxLength","var1","var2","maximum","deep","deep1","deep2","deeper","deepNr","list","pointData","x","number","int","y","humanWithIdAndAgeIndexDocumentType","humanWithCompositePrimary","info"],"sources":["../../../../src/plugins/test-utils/schema-objects.ts"],"sourcesContent":["/**\n * this file contains objects which match the schemas in schemas.js\n */\n\nimport { faker } from '@faker-js/faker';\n\nimport {\n randomNumber,\n randomString\n} from 'async-test-util';\nimport { HumanDocumentType } from './schemas.ts';\nimport * as schemas from './schemas.ts';\nimport { ensureNotFalsy, lastOfArray } from '../utils/index.ts';\n\n/**\n * Some storages had problems with umlauts and other special chars.\n * So we add these to all test strings.\n * TODO add emojis\n */\nexport const TEST_DATA_CHARSET = '0987654321ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzäöüÖÄßÜ[]{}\\'';\nexport const TEST_DATA_CHARSET_LAST_SORTED = ensureNotFalsy(lastOfArray(TEST_DATA_CHARSET.split('').sort()));\n// const someEmojis = '😊💩👵🍌';\nexport function randomStringWithSpecialChars(length: number) {\n return randomString(length, TEST_DATA_CHARSET);\n}\n\n\nexport interface SimpleHumanDocumentType {\n passportId: string;\n firstName: string;\n lastName: string;\n}\n\nexport function humanData(\n passportId: string = randomStringWithSpecialChars(12),\n age: number = randomNumber(10, 50),\n firstName: string = faker.person.firstName()\n): HumanDocumentType {\n return {\n passportId: passportId,\n firstName,\n lastName: faker.person.lastName(),\n age\n };\n}\n\nexport function simpleHumanData(): SimpleHumanDocumentType {\n return {\n passportId: randomStringWithSpecialChars(12),\n firstName: faker.person.firstName(),\n lastName: faker.person.lastName()\n };\n}\n\nexport interface SimpleHumanV3DocumentType {\n passportId: string;\n age: number;\n oneOptional?: string;\n}\nexport function simpleHumanV3Data(partial: Partial = {}): SimpleHumanV3DocumentType {\n const defaultObj = {\n passportId: randomStringWithSpecialChars(12),\n age: randomNumber(10, 50)\n };\n return Object.assign(\n defaultObj,\n partial\n );\n}\n\nexport interface SimpleHumanAgeDocumentType {\n passportId: string;\n age: string;\n}\nexport function simpleHumanAge(partial: Partial = {}): SimpleHumanAgeDocumentType {\n const defaultObj = {\n passportId: randomStringWithSpecialChars(12),\n age: randomNumber(10, 50) + ''\n };\n return Object.assign(\n defaultObj,\n partial\n );\n}\n\nexport interface HumanWithSubOtherDocumentType {\n passportId: string;\n other: {\n age: number;\n };\n}\nexport function humanWithSubOther(): HumanWithSubOtherDocumentType {\n return {\n passportId: randomStringWithSpecialChars(12),\n other: {\n age: randomNumber(10, 50)\n }\n };\n}\n\nexport interface NoIndexHumanDocumentType {\n firstName: string;\n lastName: string;\n}\nexport function NoIndexHuman(): NoIndexHumanDocumentType {\n return {\n firstName: faker.person.firstName(),\n lastName: faker.person.lastName()\n };\n}\n\nexport interface NestedHumanDocumentType {\n passportId: string;\n firstName: string;\n mainSkill: {\n name: string;\n level: number;\n };\n}\nexport function nestedHumanData(partial: Partial = {}): NestedHumanDocumentType {\n const defaultObj = {\n passportId: randomStringWithSpecialChars(12),\n firstName: faker.person.firstName(),\n mainSkill: {\n name: randomStringWithSpecialChars(6),\n level: 5\n }\n };\n return Object.assign(\n defaultObj,\n partial\n );\n}\n\nexport interface DeepNestedHumanDocumentType {\n passportId: string;\n mainSkill: {\n name: string;\n attack: {\n good: boolean;\n count: number;\n };\n };\n}\nexport function deepNestedHumanData(): DeepNestedHumanDocumentType {\n return {\n passportId: randomStringWithSpecialChars(12),\n mainSkill: {\n name: randomStringWithSpecialChars(6),\n attack: {\n good: false,\n count: 5\n }\n }\n };\n}\n\nexport interface BigHumanDocumentType {\n passportId: string;\n dnaHash: string;\n firstName: string;\n lastName: string;\n age: number;\n}\nexport function bigHumanDocumentType(): BigHumanDocumentType {\n return {\n passportId: randomStringWithSpecialChars(12),\n dnaHash: randomStringWithSpecialChars(12),\n firstName: faker.person.firstName(),\n lastName: faker.person.lastName(),\n age: randomNumber(10, 50)\n };\n}\n\nexport interface HeroArrayDocumentType {\n name: string;\n skills: {\n name: string;\n damage: number;\n }[];\n}\nexport function heroArrayData(): HeroArrayDocumentType {\n return {\n name: randomStringWithSpecialChars(6),\n skills: new Array(3).fill(0).map(() => {\n return {\n name: randomStringWithSpecialChars(6),\n damage: randomNumber(10, 50)\n };\n })\n };\n}\n\nexport interface SimpleHeroArrayDocumentType {\n name: string;\n skills: string[];\n}\nexport function simpleHeroArray(partial: Partial = {}): SimpleHeroArrayDocumentType {\n const defaultObj = {\n name: randomStringWithSpecialChars(6),\n skills: new Array(3).fill(0).map(() => randomStringWithSpecialChars(6))\n };\n return Object.assign(\n defaultObj,\n partial\n );\n}\n\nexport interface EncryptedHumanDocumentType {\n passportId: string;\n firstName: string;\n secret: string;\n}\nexport function encryptedHumanData(secret = randomStringWithSpecialChars(12)): EncryptedHumanDocumentType {\n return {\n passportId: randomStringWithSpecialChars(12),\n firstName: faker.person.firstName(),\n secret\n };\n}\n\nexport interface EncryptedObjectHumanDocumentType {\n passportId: string;\n firstName: string;\n secret: {\n name: string;\n subname: string;\n };\n}\nexport function encryptedObjectHumanData(): EncryptedObjectHumanDocumentType {\n return {\n passportId: randomStringWithSpecialChars(12),\n firstName: faker.person.firstName(),\n secret: {\n name: randomStringWithSpecialChars(12),\n subname: randomStringWithSpecialChars(12)\n }\n };\n}\n\nexport interface EncryptedDeepHumanDocumentType {\n passportId: string;\n firstName: string;\n firstLevelPassword: string;\n secretData: {\n pw: string;\n };\n deepSecret: {\n darkhole: {\n pw: string;\n };\n };\n nestedSecret: {\n darkhole: {\n pw: string;\n };\n };\n}\nexport function encryptedDeepHumanDocumentType(): EncryptedDeepHumanDocumentType {\n return {\n passportId: randomStringWithSpecialChars(12),\n firstName: faker.person.firstName(),\n firstLevelPassword: randomStringWithSpecialChars(12),\n secretData: {\n pw: randomStringWithSpecialChars(12)\n },\n deepSecret: {\n darkhole: {\n pw: randomStringWithSpecialChars(12)\n }\n },\n nestedSecret: {\n darkhole: {\n pw: randomStringWithSpecialChars(12)\n }\n }\n };\n}\n\nexport interface CompoundIndexDocumentType {\n passportId: string;\n passportCountry: string;\n age: number;\n}\nexport function compoundIndexData(): CompoundIndexDocumentType {\n return {\n passportId: randomStringWithSpecialChars(12),\n passportCountry: randomStringWithSpecialChars(12),\n age: randomNumber(10, 50)\n };\n}\n\nexport interface CompoundIndexNoStringDocumentType {\n passportId: string;\n passportCountry: { [prop: string]: string; };\n age: number;\n}\nexport function compoundIndexNoStringData(): CompoundIndexNoStringDocumentType {\n return {\n passportId: randomStringWithSpecialChars(12),\n passportCountry: { [randomStringWithSpecialChars(12)]: randomStringWithSpecialChars(12) },\n age: randomNumber(10, 50)\n };\n}\n\nexport interface NostringIndexDocumentType {\n passportId: {};\n firstName: string;\n}\nexport function nostringIndex(): NostringIndexDocumentType {\n return {\n passportId: {},\n firstName: faker.person.firstName()\n };\n}\n\nexport interface RefHumanDocumentType {\n name: string;\n bestFriend: string;\n}\nexport function refHumanData(bestFriend?: string): RefHumanDocumentType {\n return {\n name: randomStringWithSpecialChars(12),\n bestFriend\n } as any;\n}\n\nexport interface RefHumanNestedDocumentType {\n name: string;\n foo: {\n bestFriend: string;\n };\n}\nexport function refHumanNestedData(bestFriend?: string): RefHumanNestedDocumentType {\n return {\n name: randomStringWithSpecialChars(12),\n foo: {\n bestFriend\n } as any\n };\n}\nexport interface HumanWithTimestampNestedDocumentType extends HumanWithTimestampDocumentType {\n address?: {\n street: string;\n suite: string;\n city: string;\n zipcode: string;\n geo: {\n lat: string;\n lng: string;\n };\n }\n}\n\nexport interface HumanWithTimestampDocumentType {\n id: string;\n name: string;\n age: number;\n updatedAt: number;\n deletedAt?: number;\n}\nexport function humanWithTimestampData(givenData: Partial = {}): HumanWithTimestampDocumentType {\n let ret = {\n id: randomStringWithSpecialChars(12),\n name: faker.person.firstName(),\n age: randomNumber(1, 100),\n // use some time in the past week\n updatedAt: Date.now()\n };\n ret = Object.assign({}, ret, givenData);\n return ret;\n}\n\nexport interface AverageSchemaDocumentType {\n id: string;\n var1: string;\n var2: number;\n deep: {\n deep1: string;\n deep2: string;\n deeper: {\n deepNr: number;\n };\n };\n list: {\n deep1: string;\n deep2: string;\n }[];\n}\n\n\nconst averageSchemaForFieldLength = schemas.averageSchema() as any;\nexport function averageSchemaData(\n partial: Partial = {}\n): AverageSchemaDocumentType {\n return Object.assign(\n {},\n {\n id: randomStringWithSpecialChars(ensureNotFalsy(averageSchemaForFieldLength.properties.id.maxLength)),\n var1: randomStringWithSpecialChars(ensureNotFalsy(averageSchemaForFieldLength.properties.var1.maxLength)),\n var2: randomNumber(100, ensureNotFalsy(averageSchemaForFieldLength.properties.var2.maximum)),\n deep: {\n deep1: randomStringWithSpecialChars(ensureNotFalsy(averageSchemaForFieldLength.properties.deep.properties.deep1.maxLength)),\n deep2: randomStringWithSpecialChars(ensureNotFalsy(averageSchemaForFieldLength.properties.deep.properties.deep2.maxLength)),\n deeper: {\n deepNr: randomNumber(0, 10)\n }\n },\n list: new Array(5).fill(0).map(() => ({\n deep1: randomStringWithSpecialChars(5),\n deep2: randomStringWithSpecialChars(8)\n }))\n },\n partial\n );\n}\n\nexport interface PointDocumentType {\n id: string;\n x: number;\n y: number;\n}\nexport function pointData(): PointDocumentType {\n return {\n id: randomStringWithSpecialChars(12),\n x: faker.number.int(),\n y: faker.number.int()\n };\n}\n\nexport interface HumanWithIdAndAgeIndexDocumentType {\n id: string;\n name: string;\n age: number;\n}\nexport function humanWithIdAndAgeIndexDocumentType(\n age: number = randomNumber(1, 100)\n): HumanWithIdAndAgeIndexDocumentType {\n return {\n id: randomStringWithSpecialChars(12),\n name: faker.person.firstName(),\n age\n };\n}\n\nexport type HumanWithCompositePrimary = {\n // optional because it might be created by RxDB and not known before\n id?: string;\n firstName: string;\n lastName: string;\n info: {\n age: number;\n };\n};\nexport function humanWithCompositePrimary(partial: Partial = {}): HumanWithCompositePrimary {\n const defaultObj = {\n firstName: faker.person.firstName(),\n lastName: faker.person.lastName(),\n info: {\n age: randomNumber(10, 50)\n }\n };\n return Object.assign(\n defaultObj,\n partial\n );\n}\n"],"mappings":"AAAA;AACA;AACA;;AAEA,SAASA,KAAK,QAAQ,iBAAiB;AAEvC,SACIC,YAAY,EACZC,YAAY,QACT,iBAAiB;AAExB,OAAO,KAAKC,OAAO,MAAM,cAAc;AACvC,SAASC,cAAc,EAAEC,WAAW,QAAQ,mBAAmB;;AAE/D;AACA;AACA;AACA;AACA;AACA,OAAO,IAAMC,iBAAiB,GAAG,6EAA6E;AAC9G,OAAO,IAAMC,6BAA6B,GAAGH,cAAc,CAACC,WAAW,CAACC,iBAAiB,CAACE,KAAK,CAAC,EAAE,CAAC,CAACC,IAAI,CAAC,CAAC,CAAC,CAAC;AAC5G;AACA,OAAO,SAASC,4BAA4BA,CAACC,MAAc,EAAE;EACzD,OAAOT,YAAY,CAACS,MAAM,EAAEL,iBAAiB,CAAC;AAClD;AASA,OAAO,SAASM,SAASA,CACrBC,UAAkB,GAAGH,4BAA4B,CAAC,EAAE,CAAC,EACrDI,GAAW,GAAGb,YAAY,CAAC,EAAE,EAAE,EAAE,CAAC,EAClCc,SAAiB,GAAGf,KAAK,CAACgB,MAAM,CAACD,SAAS,CAAC,CAAC,EAC3B;EACjB,OAAO;IACHF,UAAU,EAAEA,UAAU;IACtBE,SAAS;IACTE,QAAQ,EAAEjB,KAAK,CAACgB,MAAM,CAACC,QAAQ,CAAC,CAAC;IACjCH;EACJ,CAAC;AACL;AAEA,OAAO,SAASI,eAAeA,CAAA,EAA4B;EACvD,OAAO;IACHL,UAAU,EAAEH,4BAA4B,CAAC,EAAE,CAAC;IAC5CK,SAAS,EAAEf,KAAK,CAACgB,MAAM,CAACD,SAAS,CAAC,CAAC;IACnCE,QAAQ,EAAEjB,KAAK,CAACgB,MAAM,CAACC,QAAQ,CAAC;EACpC,CAAC;AACL;AAOA,OAAO,SAASE,iBAAiBA,CAACC,OAA2C,GAAG,CAAC,CAAC,EAA6B;EAC3G,IAAMC,UAAU,GAAG;IACfR,UAAU,EAAEH,4BAA4B,CAAC,EAAE,CAAC;IAC5CI,GAAG,EAAEb,YAAY,CAAC,EAAE,EAAE,EAAE;EAC5B,CAAC;EACD,OAAOqB,MAAM,CAACC,MAAM,CAChBF,UAAU,EACVD,OACJ,CAAC;AACL;AAMA,OAAO,SAASI,cAAcA,CAACJ,OAA4C,GAAG,CAAC,CAAC,EAA8B;EAC1G,IAAMC,UAAU,GAAG;IACfR,UAAU,EAAEH,4BAA4B,CAAC,EAAE,CAAC;IAC5CI,GAAG,EAAEb,YAAY,CAAC,EAAE,EAAE,EAAE,CAAC,GAAG;EAChC,CAAC;EACD,OAAOqB,MAAM,CAACC,MAAM,CAChBF,UAAU,EACVD,OACJ,CAAC;AACL;AAQA,OAAO,SAASK,iBAAiBA,CAAA,EAAkC;EAC/D,OAAO;IACHZ,UAAU,EAAEH,4BAA4B,CAAC,EAAE,CAAC;IAC5CgB,KAAK,EAAE;MACHZ,GAAG,EAAEb,YAAY,CAAC,EAAE,EAAE,EAAE;IAC5B;EACJ,CAAC;AACL;AAMA,OAAO,SAAS0B,YAAYA,CAAA,EAA6B;EACrD,OAAO;IACHZ,SAAS,EAAEf,KAAK,CAACgB,MAAM,CAACD,SAAS,CAAC,CAAC;IACnCE,QAAQ,EAAEjB,KAAK,CAACgB,MAAM,CAACC,QAAQ,CAAC;EACpC,CAAC;AACL;AAUA,OAAO,SAASW,eAAeA,CAACR,OAAyC,GAAG,CAAC,CAAC,EAA2B;EACrG,IAAMC,UAAU,GAAG;IACfR,UAAU,EAAEH,4BAA4B,CAAC,EAAE,CAAC;IAC5CK,SAAS,EAAEf,KAAK,CAACgB,MAAM,CAACD,SAAS,CAAC,CAAC;IACnCc,SAAS,EAAE;MACPC,IAAI,EAAEpB,4BAA4B,CAAC,CAAC,CAAC;MACrCqB,KAAK,EAAE;IACX;EACJ,CAAC;EACD,OAAOT,MAAM,CAACC,MAAM,CAChBF,UAAU,EACVD,OACJ,CAAC;AACL;AAYA,OAAO,SAASY,mBAAmBA,CAAA,EAAgC;EAC/D,OAAO;IACHnB,UAAU,EAAEH,4BAA4B,CAAC,EAAE,CAAC;IAC5CmB,SAAS,EAAE;MACPC,IAAI,EAAEpB,4BAA4B,CAAC,CAAC,CAAC;MACrCuB,MAAM,EAAE;QACJC,IAAI,EAAE,KAAK;QACXC,KAAK,EAAE;MACX;IACJ;EACJ,CAAC;AACL;AASA,OAAO,SAASC,oBAAoBA,CAAA,EAAyB;EACzD,OAAO;IACHvB,UAAU,EAAEH,4BAA4B,CAAC,EAAE,CAAC;IAC5C2B,OAAO,EAAE3B,4BAA4B,CAAC,EAAE,CAAC;IACzCK,SAAS,EAAEf,KAAK,CAACgB,MAAM,CAACD,SAAS,CAAC,CAAC;IACnCE,QAAQ,EAAEjB,KAAK,CAACgB,MAAM,CAACC,QAAQ,CAAC,CAAC;IACjCH,GAAG,EAAEb,YAAY,CAAC,EAAE,EAAE,EAAE;EAC5B,CAAC;AACL;AASA,OAAO,SAASqC,aAAaA,CAAA,EAA0B;EACnD,OAAO;IACHR,IAAI,EAAEpB,4BAA4B,CAAC,CAAC,CAAC;IACrC6B,MAAM,EAAE,IAAIC,KAAK,CAAC,CAAC,CAAC,CAACC,IAAI,CAAC,CAAC,CAAC,CAACC,GAAG,CAAC,MAAM;MACnC,OAAO;QACHZ,IAAI,EAAEpB,4BAA4B,CAAC,CAAC,CAAC;QACrCiC,MAAM,EAAE1C,YAAY,CAAC,EAAE,EAAE,EAAE;MAC/B,CAAC;IACL,CAAC;EACL,CAAC;AACL;AAMA,OAAO,SAAS2C,eAAeA,CAACxB,OAA6C,GAAG,CAAC,CAAC,EAA+B;EAC7G,IAAMC,UAAU,GAAG;IACfS,IAAI,EAAEpB,4BAA4B,CAAC,CAAC,CAAC;IACrC6B,MAAM,EAAE,IAAIC,KAAK,CAAC,CAAC,CAAC,CAACC,IAAI,CAAC,CAAC,CAAC,CAACC,GAAG,CAAC,MAAMhC,4BAA4B,CAAC,CAAC,CAAC;EAC1E,CAAC;EACD,OAAOY,MAAM,CAACC,MAAM,CAChBF,UAAU,EACVD,OACJ,CAAC;AACL;AAOA,OAAO,SAASyB,kBAAkBA,CAACC,MAAM,GAAGpC,4BAA4B,CAAC,EAAE,CAAC,EAA8B;EACtG,OAAO;IACHG,UAAU,EAAEH,4BAA4B,CAAC,EAAE,CAAC;IAC5CK,SAAS,EAAEf,KAAK,CAACgB,MAAM,CAACD,SAAS,CAAC,CAAC;IACnC+B;EACJ,CAAC;AACL;AAUA,OAAO,SAASC,wBAAwBA,CAAA,EAAqC;EACzE,OAAO;IACHlC,UAAU,EAAEH,4BAA4B,CAAC,EAAE,CAAC;IAC5CK,SAAS,EAAEf,KAAK,CAACgB,MAAM,CAACD,SAAS,CAAC,CAAC;IACnC+B,MAAM,EAAE;MACJhB,IAAI,EAAEpB,4BAA4B,CAAC,EAAE,CAAC;MACtCsC,OAAO,EAAEtC,4BAA4B,CAAC,EAAE;IAC5C;EACJ,CAAC;AACL;AAoBA,OAAO,SAASuC,8BAA8BA,CAAA,EAAmC;EAC7E,OAAO;IACHpC,UAAU,EAAEH,4BAA4B,CAAC,EAAE,CAAC;IAC5CK,SAAS,EAAEf,KAAK,CAACgB,MAAM,CAACD,SAAS,CAAC,CAAC;IACnCmC,kBAAkB,EAAExC,4BAA4B,CAAC,EAAE,CAAC;IACpDyC,UAAU,EAAE;MACRC,EAAE,EAAE1C,4BAA4B,CAAC,EAAE;IACvC,CAAC;IACD2C,UAAU,EAAE;MACRC,QAAQ,EAAE;QACNF,EAAE,EAAE1C,4BAA4B,CAAC,EAAE;MACvC;IACJ,CAAC;IACD6C,YAAY,EAAE;MACVD,QAAQ,EAAE;QACNF,EAAE,EAAE1C,4BAA4B,CAAC,EAAE;MACvC;IACJ;EACJ,CAAC;AACL;AAOA,OAAO,SAAS8C,iBAAiBA,CAAA,EAA8B;EAC3D,OAAO;IACH3C,UAAU,EAAEH,4BAA4B,CAAC,EAAE,CAAC;IAC5C+C,eAAe,EAAE/C,4BAA4B,CAAC,EAAE,CAAC;IACjDI,GAAG,EAAEb,YAAY,CAAC,EAAE,EAAE,EAAE;EAC5B,CAAC;AACL;AAOA,OAAO,SAASyD,yBAAyBA,CAAA,EAAsC;EAC3E,OAAO;IACH7C,UAAU,EAAEH,4BAA4B,CAAC,EAAE,CAAC;IAC5C+C,eAAe,EAAE;MAAE,CAAC/C,4BAA4B,CAAC,EAAE,CAAC,GAAGA,4BAA4B,CAAC,EAAE;IAAE,CAAC;IACzFI,GAAG,EAAEb,YAAY,CAAC,EAAE,EAAE,EAAE;EAC5B,CAAC;AACL;AAMA,OAAO,SAAS0D,aAAaA,CAAA,EAA8B;EACvD,OAAO;IACH9C,UAAU,EAAE,CAAC,CAAC;IACdE,SAAS,EAAEf,KAAK,CAACgB,MAAM,CAACD,SAAS,CAAC;EACtC,CAAC;AACL;AAMA,OAAO,SAAS6C,YAAYA,CAACC,UAAmB,EAAwB;EACpE,OAAO;IACH/B,IAAI,EAAEpB,4BAA4B,CAAC,EAAE,CAAC;IACtCmD;EACJ,CAAC;AACL;AAQA,OAAO,SAASC,kBAAkBA,CAACD,UAAmB,EAA8B;EAChF,OAAO;IACH/B,IAAI,EAAEpB,4BAA4B,CAAC,EAAE,CAAC;IACtCqD,GAAG,EAAE;MACDF;IACJ;EACJ,CAAC;AACL;AAqBA,OAAO,SAASG,sBAAsBA,CAACC,SAAkD,GAAG,CAAC,CAAC,EAAkC;EAC5H,IAAIC,GAAG,GAAG;IACNC,EAAE,EAAEzD,4BAA4B,CAAC,EAAE,CAAC;IACpCoB,IAAI,EAAE9B,KAAK,CAACgB,MAAM,CAACD,SAAS,CAAC,CAAC;IAC9BD,GAAG,EAAEb,YAAY,CAAC,CAAC,EAAE,GAAG,CAAC;IACzB;IACAmE,SAAS,EAAEC,IAAI,CAACC,GAAG,CAAC;EACxB,CAAC;EACDJ,GAAG,GAAG5C,MAAM,CAACC,MAAM,CAAC,CAAC,CAAC,EAAE2C,GAAG,EAAED,SAAS,CAAC;EACvC,OAAOC,GAAG;AACd;AAoBA,IAAMK,2BAA2B,GAAGpE,OAAO,CAACqE,aAAa,CAAC,CAAQ;AAClE,OAAO,SAASC,iBAAiBA,CAC7BrD,OAA2C,GAAG,CAAC,CAAC,EACvB;EACzB,OAAOE,MAAM,CAACC,MAAM,CAChB,CAAC,CAAC,EACF;IACI4C,EAAE,EAAEzD,4BAA4B,CAACN,cAAc,CAACmE,2BAA2B,CAACG,UAAU,CAACP,EAAE,CAACQ,SAAS,CAAC,CAAC;IACrGC,IAAI,EAAElE,4BAA4B,CAACN,cAAc,CAACmE,2BAA2B,CAACG,UAAU,CAACE,IAAI,CAACD,SAAS,CAAC,CAAC;IACzGE,IAAI,EAAE5E,YAAY,CAAC,GAAG,EAAEG,cAAc,CAACmE,2BAA2B,CAACG,UAAU,CAACG,IAAI,CAACC,OAAO,CAAC,CAAC;IAC5FC,IAAI,EAAE;MACFC,KAAK,EAAEtE,4BAA4B,CAACN,cAAc,CAACmE,2BAA2B,CAACG,UAAU,CAACK,IAAI,CAACL,UAAU,CAACM,KAAK,CAACL,SAAS,CAAC,CAAC;MAC3HM,KAAK,EAAEvE,4BAA4B,CAACN,cAAc,CAACmE,2BAA2B,CAACG,UAAU,CAACK,IAAI,CAACL,UAAU,CAACO,KAAK,CAACN,SAAS,CAAC,CAAC;MAC3HO,MAAM,EAAE;QACJC,MAAM,EAAElF,YAAY,CAAC,CAAC,EAAE,EAAE;MAC9B;IACJ,CAAC;IACDmF,IAAI,EAAE,IAAI5C,KAAK,CAAC,CAAC,CAAC,CAACC,IAAI,CAAC,CAAC,CAAC,CAACC,GAAG,CAAC,OAAO;MAClCsC,KAAK,EAAEtE,4BAA4B,CAAC,CAAC,CAAC;MACtCuE,KAAK,EAAEvE,4BAA4B,CAAC,CAAC;IACzC,CAAC,CAAC;EACN,CAAC,EACDU,OACJ,CAAC;AACL;AAOA,OAAO,SAASiE,SAASA,CAAA,EAAsB;EAC3C,OAAO;IACHlB,EAAE,EAAEzD,4BAA4B,CAAC,EAAE,CAAC;IACpC4E,CAAC,EAAEtF,KAAK,CAACuF,MAAM,CAACC,GAAG,CAAC,CAAC;IACrBC,CAAC,EAAEzF,KAAK,CAACuF,MAAM,CAACC,GAAG,CAAC;EACxB,CAAC;AACL;AAOA,OAAO,SAASE,kCAAkCA,CAC9C5E,GAAW,GAAGb,YAAY,CAAC,CAAC,EAAE,GAAG,CAAC,EACA;EAClC,OAAO;IACHkE,EAAE,EAAEzD,4BAA4B,CAAC,EAAE,CAAC;IACpCoB,IAAI,EAAE9B,KAAK,CAACgB,MAAM,CAACD,SAAS,CAAC,CAAC;IAC9BD;EACJ,CAAC;AACL;AAWA,OAAO,SAAS6E,yBAAyBA,CAACvE,OAA2C,GAAG,CAAC,CAAC,EAA6B;EACnH,IAAMC,UAAU,GAAG;IACfN,SAAS,EAAEf,KAAK,CAACgB,MAAM,CAACD,SAAS,CAAC,CAAC;IACnCE,QAAQ,EAAEjB,KAAK,CAACgB,MAAM,CAACC,QAAQ,CAAC,CAAC;IACjC2E,IAAI,EAAE;MACF9E,GAAG,EAAEb,YAAY,CAAC,EAAE,EAAE,EAAE;IAC5B;EACJ,CAAC;EACD,OAAOqB,MAAM,CAACC,MAAM,CAChBF,UAAU,EACVD,OACJ,CAAC;AACL","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/test-utils/schemas.js b/dist/esm/plugins/test-utils/schemas.js deleted file mode 100644 index ac471f773d4..00000000000 --- a/dist/esm/plugins/test-utils/schemas.js +++ /dev/null @@ -1,1205 +0,0 @@ -import AsyncTestUtil from 'async-test-util'; -import { overwritable } from "../../overwritable.js"; -import { toTypedRxJsonSchema } from "../../rx-schema.js"; -import { flatClone } from "../utils/index.js"; -export var humanSchemaLiteral = overwritable.deepFreezeWhenDevMode({ - title: 'human schema', - description: 'describes a human being', - version: 0, - keyCompression: false, - primaryKey: 'passportId', - type: 'object', - properties: { - passportId: { - type: 'string', - maxLength: 100 - }, - firstName: { - type: 'string', - maxLength: 100 - }, - lastName: { - type: 'string', - maxLength: 100 - }, - age: { - description: 'age in years', - type: 'integer', - minimum: 0, - maximum: 150, - multipleOf: 1 - } - }, - required: ['firstName', 'lastName', 'passportId', 'age'], - indexes: ['firstName'] -}); -var humanSchemaTyped = toTypedRxJsonSchema(humanSchemaLiteral); -export var human = humanSchemaLiteral; -export var humanDefault = overwritable.deepFreezeWhenDevMode({ - title: 'human schema', - version: 0, - description: 'describes a human being', - keyCompression: false, - primaryKey: 'passportId', - type: 'object', - properties: { - passportId: { - type: 'string', - maxLength: 100 - }, - firstName: { - type: 'string', - maxLength: 100 - }, - lastName: { - type: 'string', - maxLength: 100 - }, - age: { - description: 'age in years', - type: 'integer', - minimum: 0, - maximum: 150, - default: 20 - } - }, - indexes: [], - required: ['passportId'] -}); -export var humanFinal = overwritable.deepFreezeWhenDevMode({ - title: 'human schema with age set final', - version: 0, - keyCompression: false, - type: 'object', - primaryKey: 'passportId', - properties: { - passportId: { - type: 'string', - maxLength: 100 - }, - firstName: { - type: 'string' - }, - lastName: { - type: 'string' - }, - age: { - type: 'integer', - minimum: 0, - maximum: 150, - final: true - } - }, - required: ['passportId'] -}); -export var simpleHuman = overwritable.deepFreezeWhenDevMode({ - title: 'human schema', - version: 0, - keyCompression: false, - description: 'describes a simple human being', - primaryKey: 'passportId', - type: 'object', - properties: { - passportId: { - type: 'string', - maxLength: 100 - }, - age: { - type: 'string', - maxLength: 100 - }, - oneOptional: { - type: 'string' - } - }, - indexes: ['age'], - required: ['passportId', 'age'] -}); -export var simpleHumanV3 = overwritable.deepFreezeWhenDevMode({ - title: 'human schema', - version: 3, - keyCompression: false, - description: 'describes a simple human being', - type: 'object', - primaryKey: 'passportId', - properties: { - passportId: { - type: 'string', - maxLength: 100 - }, - age: { - type: 'number', - minimum: 0, - maximum: 1000, - multipleOf: 1 - }, - oneOptional: { - type: 'string' - } - }, - indexes: ['age'], - required: ['passportId', 'age'] -}); -export var humanAgeIndex = overwritable.deepFreezeWhenDevMode({ - title: 'human schema', - version: 0, - keyCompression: false, - description: 'describes a human being', - primaryKey: 'passportId', - type: 'object', - properties: { - passportId: { - type: 'string', - maxLength: 100 - }, - firstName: { - type: 'string' - }, - lastName: { - type: 'string' - }, - age: { - description: 'Age in years', - type: 'integer', - minimum: 0, - maximum: 150, - multipleOf: 1 - } - }, - required: ['firstName', 'lastName', 'age'], - indexes: ['age'] -}); -export var humanSubIndex = overwritable.deepFreezeWhenDevMode({ - title: 'human schema', - version: 0, - description: 'describes a human being where other.age is index', - keyCompression: false, - primaryKey: 'passportId', - type: 'object', - properties: { - passportId: { - type: 'string', - maxLength: 100 - }, - other: { - type: 'object', - properties: { - age: { - description: 'Age in years', - type: 'integer', - minimum: 0, - maximum: 150, - multipleOf: 1 - } - } - } - }, - required: ['passportId'], - indexes: ['other.age'] -}); - -/** - * each field is an index, - * use this to slow down inserts in tests - */ -export var humanWithAllIndex = overwritable.deepFreezeWhenDevMode({ - title: 'human schema', - description: 'describes a human being', - version: 0, - keyCompression: false, - primaryKey: 'passportId', - type: 'object', - properties: { - passportId: { - type: 'string', - maxLength: 100 - }, - firstName: { - type: 'string', - maxLength: 100 - }, - lastName: { - type: 'string', - maxLength: 100 - }, - age: { - description: 'age in years', - type: 'integer', - minimum: 0, - maximum: 150, - multipleOf: 1 - } - }, - indexes: ['firstName', 'lastName', 'age'], - required: ['firstName', 'lastName'] -}); -export var nestedHuman = { - title: 'human nested', - version: 0, - description: 'describes a human being with a nested field', - keyCompression: false, - primaryKey: 'passportId', - type: 'object', - properties: { - passportId: { - type: 'string', - maxLength: 100 - }, - firstName: { - type: 'string', - maxLength: 100 - }, - mainSkill: { - type: 'object', - properties: { - name: { - type: 'string', - maxLength: 10 - }, - level: { - type: 'number', - minimum: 0, - maximum: 10, - multipleOf: 1 - } - }, - required: ['name', 'level'], - additionalProperties: false - } - }, - required: ['firstName'], - indexes: [] -}; -export var deepNestedHuman = { - title: 'deep human nested', - version: 0, - keyCompression: false, - description: 'describes a human being with a nested field', - primaryKey: 'passportId', - type: 'object', - properties: { - passportId: { - type: 'string', - maxLength: 100 - }, - mainSkill: { - type: 'object', - properties: { - name: { - type: 'string' - }, - attack: { - type: 'object', - properties: { - good: { - type: 'boolean' - }, - count: { - type: 'number' - } - } - } - }, - required: ['name'] - } - }, - indexes: [], - required: ['mainSkill'] -}; -export var noIndexHuman = overwritable.deepFreezeWhenDevMode({ - title: 'human schema', - version: 0, - description: 'this schema has no index', - keyCompression: false, - primaryKey: 'firstName', - type: 'object', - properties: { - firstName: { - type: 'string', - maxLength: 100 - }, - lastName: { - type: 'string' - } - }, - required: ['lastName'] -}); -export var noStringIndex = overwritable.deepFreezeWhenDevMode({ - description: 'the index has no type:string', - version: 0, - keyCompression: false, - primaryKey: 'passportId', - type: 'object', - properties: { - passportId: { - type: 'object', - maxLength: 100 - }, - firstName: { - type: 'string' - } - }, - required: ['firstName', 'passportId'], - indexes: [] -}); -export var bigHuman = overwritable.deepFreezeWhenDevMode({ - title: 'human schema', - version: 0, - description: 'describes a human being with 2 indexes', - keyCompression: false, - primaryKey: 'passportId', - type: 'object', - properties: { - passportId: { - type: 'string', - maxLength: 100 - }, - dnaHash: { - type: 'string', - maxLength: 100 - }, - firstName: { - type: 'string', - maxLength: 100 - }, - lastName: { - type: 'string' - }, - age: { - description: 'Age in years', - type: 'integer', - minimum: 0 - } - }, - required: ['firstName', 'lastName'], - indexes: ['firstName', 'dnaHash'] -}); -export var encryptedHuman = { - title: 'human encrypted', - version: 0, - description: 'uses an encrypted field', - primaryKey: 'passportId', - type: 'object', - keyCompression: false, - properties: { - passportId: { - type: 'string', - maxLength: 100 - }, - firstName: { - type: 'string' - }, - secret: { - type: 'string' - } - }, - indexes: [], - required: ['firstName', 'secret'], - encrypted: ['secret'] -}; -export var encryptedObjectHuman = { - title: 'human encrypted', - version: 0, - keyCompression: false, - description: 'uses an encrypted field', - primaryKey: 'passportId', - type: 'object', - properties: { - passportId: { - type: 'string', - maxLength: 100 - }, - firstName: { - type: 'string' - }, - secret: { - type: 'object', - properties: { - name: { - type: 'string' - }, - subname: { - type: 'string' - } - } - } - }, - indexes: [], - required: ['firstName', 'secret'], - encrypted: ['secret'] -}; -export var encryptedDeepHuman = { - title: 'human encrypted', - version: 0, - keyCompression: false, - description: 'uses an encrypted field', - primaryKey: 'passportId', - type: 'object', - properties: { - passportId: { - type: 'string', - maxLength: 100 - }, - firstName: { - type: 'string' - }, - firstLevelPassword: { - type: 'string' - }, - secretData: { - type: 'object', - properties: { - pw: { - type: 'string' - } - } - }, - deepSecret: { - type: 'object', - properties: { - darkhole: { - type: 'object', - properties: { - pw: { - type: 'string' - } - } - } - } - }, - nestedSecret: { - type: 'object', - properties: { - darkhole: { - type: 'object', - properties: { - pw: { - type: 'string' - } - } - } - } - } - }, - indexes: [], - required: ['firstName', 'secretData'], - encrypted: ['firstLevelPassword', 'secretData', 'deepSecret.darkhole.pw', 'nestedSecret.darkhole.pw'] -}; -export var notExistingIndex = { - title: 'index', - version: 0, - description: 'this schema has a specified index which does not exists', - primaryKey: 'passportId', - type: 'object', - keyCompression: false, - properties: { - passportId: { - type: 'string', - maxLength: 100 - }, - address: { - type: 'object', - properties: { - street: { - type: 'string' - } - } - } - }, - required: ['passportId'], - indexes: ['address.apartment'] -}; -export var compoundIndex = overwritable.deepFreezeWhenDevMode({ - title: 'compound index', - version: 0, - description: 'this schema has a compoundIndex', - primaryKey: 'passportId', - type: 'object', - keyCompression: false, - properties: { - passportId: { - type: 'string', - maxLength: 100 - }, - passportCountry: { - type: 'string', - maxLength: 100 - }, - age: { - type: 'integer', - minimum: 0, - maximum: 150, - multipleOf: 1 - } - }, - required: ['passportId'], - indexes: [['age', 'passportCountry']] -}); -export var compoundIndexNoString = { - title: 'compound index', - version: 0, - description: 'this schema has a compoundIndex', - primaryKey: 'passportId', - keyCompression: false, - type: 'object', - properties: { - passportId: { - type: 'string', - maxLength: 100 - }, - passportCountry: { - type: 'object' - }, - age: { - type: 'integer' - } - }, - indexes: [[10, 'passportCountry']] -}; -export var empty = { - title: 'empty schema', - version: 0, - type: 'object', - primaryKey: 'id', - properties: { - id: { - type: 'string', - maxLength: 100 - } - }, - required: ['id'] -}; -export var heroArray = overwritable.deepFreezeWhenDevMode({ - title: 'hero schema', - version: 0, - keyCompression: false, - description: 'describes a hero with an array-field', - primaryKey: 'name', - type: 'object', - properties: { - name: { - type: 'string', - maxLength: 100 - }, - skills: { - type: 'array', - maxItems: 5, - uniqueItems: true, - items: { - type: 'object', - properties: { - name: { - type: 'string' - }, - damage: { - type: 'number' - } - } - } - } - }, - required: ['name'] -}); -export var simpleArrayHero = overwritable.deepFreezeWhenDevMode({ - title: 'hero schema', - version: 0, - description: 'describes a hero with a string-array-field', - keyCompression: false, - primaryKey: 'name', - type: 'object', - properties: { - name: { - type: 'string', - maxLength: 100 - }, - skills: { - type: 'array', - maxItems: 5, - uniqueItems: true, - items: { - type: 'string' - } - } - }, - required: ['name'] -}); -export var primaryHumanLiteral = overwritable.deepFreezeWhenDevMode({ - title: 'human schema with primary', - version: 0, - description: 'describes a human being with passportID as primary', - keyCompression: false, - primaryKey: 'passportId', - type: 'object', - properties: { - passportId: { - type: 'string', - minLength: 4, - maxLength: 100 - }, - firstName: { - type: 'string', - maxLength: 100 - }, - lastName: { - type: 'string', - maxLength: 500 - }, - age: { - type: 'integer', - minimum: 0, - maximum: 150, - multipleOf: 1 - } - }, - required: ['passportId', 'firstName', 'lastName'] -}); -var primaryHumanTypedSchema = toTypedRxJsonSchema(primaryHumanLiteral); -export var primaryHuman = primaryHumanLiteral; -export var humanNormalizeSchema1Literal = overwritable.deepFreezeWhenDevMode({ - title: 'human schema', - version: 0, - keyCompression: false, - description: 'describes a human being', - primaryKey: 'passportId', - type: 'object', - properties: { - passportId: { - type: 'string', - minLength: 4, - maxLength: 100 - }, - age: { - description: 'age in years', - type: 'integer', - minimum: 0, - maximum: 150, - multipleOf: 1 - } - }, - required: ['age', 'passportId'] -}); -var humanNormalizeSchema1Typed = toTypedRxJsonSchema(humanNormalizeSchema1Literal); -export var humanNormalizeSchema1 = humanNormalizeSchema1Literal; -export var humanNormalizeSchema2 = overwritable.deepFreezeWhenDevMode({ - title: 'human schema', - version: 0, - keyCompression: false, - primaryKey: 'passportId', - type: 'object', - properties: { - passportId: { - type: 'string', - minLength: 4, - maxLength: 100 - }, - age: { - minimum: 0, - type: 'integer', - description: 'age in years', - maximum: 150, - multipleOf: 1 - } - }, - description: 'describes a human being', - required: ['age', 'passportId'] -}); -export var refHuman = overwritable.deepFreezeWhenDevMode({ - title: 'human related to other human', - version: 0, - keyCompression: false, - primaryKey: 'name', - type: 'object', - properties: { - name: { - type: 'string', - maxLength: 100 - }, - bestFriend: { - ref: 'human', - type: 'string' - } - }, - required: ['name'] -}); -export var humanCompositePrimary = { - title: 'human schema', - description: 'describes a human being', - version: 0, - keyCompression: false, - primaryKey: { - key: 'id', - fields: ['firstName', 'info.age'], - separator: '|' - }, - type: 'object', - properties: { - id: { - type: 'string', - maxLength: 100 - }, - firstName: { - type: 'string', - maxLength: 100 - }, - lastName: { - type: 'string' - }, - info: { - type: 'object', - properties: { - age: { - description: 'age in years', - type: 'integer', - minimum: 0, - maximum: 150 - } - }, - required: ['age'] - } - }, - required: ['id', 'firstName', 'lastName', 'info'], - indexes: ['firstName'] -}; -export var humanCompositePrimarySchemaLiteral = overwritable.deepFreezeWhenDevMode({ - title: 'human schema', - description: 'describes a human being', - version: 0, - keyCompression: false, - primaryKey: { - key: 'id', - fields: ['firstName', 'info.age'], - separator: '|' - }, - encrypted: [], - type: 'object', - properties: { - id: { - type: 'string', - maxLength: 100 - }, - firstName: { - type: 'string', - maxLength: 100 - }, - lastName: { - type: 'string' - }, - info: { - type: 'object', - properties: { - age: { - description: 'age in years', - type: 'integer', - minimum: 0, - maximum: 150 - } - }, - required: ['age'] - }, - readonlyProps: { - allOf: [], - anyOf: [], - oneOf: [], - type: [], - dependencies: { - someDep: ['asd'] - }, - items: [], - required: [], - enum: [] - } - }, - required: ['id', 'firstName', 'lastName', 'info'], - indexes: ['firstName'] -}); -var humanCompositePrimarySchemaTyped = toTypedRxJsonSchema(humanCompositePrimarySchemaLiteral); -export var refHumanNested = overwritable.deepFreezeWhenDevMode({ - title: 'human related to other human', - version: 0, - keyCompression: false, - primaryKey: 'name', - type: 'object', - properties: { - name: { - type: 'string', - maxLength: 100 - }, - foo: { - type: 'object', - properties: { - bestFriend: { - ref: 'human', - type: 'string' - } - } - } - }, - required: ['name'] -}); - -/** - * an average schema used in performance-tests - */ -export function averageSchema() { - var ret = { - title: 'averageSchema_' + AsyncTestUtil.randomString(5), - // randomisation used so hash differs - version: 0, - primaryKey: 'id', - type: 'object', - keyCompression: false, - properties: { - id: { - type: 'string', - maxLength: 12 - }, - var1: { - type: 'string', - maxLength: 12 - }, - var2: { - type: 'number', - minimum: 0, - maximum: 50000, - multipleOf: 1 - }, - deep: { - type: 'object', - properties: { - deep1: { - type: 'string', - maxLength: 10 - }, - deep2: { - type: 'string', - maxLength: 10 - } - } - }, - list: { - type: 'array', - items: { - type: 'object', - properties: { - deep1: { - type: 'string' - }, - deep2: { - type: 'string' - } - } - } - } - }, - required: ['id'], - indexes: ['var1', 'var2', 'deep.deep1', - // one compound index - ['var2', 'var1']], - sharding: { - shards: 6, - mode: 'collection' - } - }; - return ret; -} -export var point = overwritable.deepFreezeWhenDevMode({ - title: 'point schema', - version: 0, - description: 'describes coordinates in 2d space', - primaryKey: 'id', - type: 'object', - properties: { - id: { - type: 'string', - maxLength: 100 - }, - x: { - type: 'number' - }, - y: { - type: 'number' - } - }, - required: ['x', 'y'] -}); -export var humanMinimal = overwritable.deepFreezeWhenDevMode({ - title: 'human schema', - description: 'describes a human being', - version: 0, - keyCompression: false, - primaryKey: 'passportId', - type: 'object', - properties: { - passportId: { - type: 'string', - maxLength: 100 - }, - age: { - type: 'integer' - }, - oneOptional: { - type: 'string' - } - }, - indexes: [], - required: ['passportId', 'age'] -}); -export var humanMinimalBroken = { - title: 'human schema', - description: 'describes a human being', - version: 0, - keyCompression: false, - primaryKey: 'passportId', - type: 'object', - properties: { - passportId: { - type: 'string', - maxLength: 100 - }, - broken: { - type: 'integer' - } - }, - indexes: [], - required: ['passportId', 'broken'] -}; - -/** - * used in the graphql-test - * contains timestamp - */ -export var humanWithTimestamp = overwritable.deepFreezeWhenDevMode({ - version: 0, - type: 'object', - primaryKey: 'id', - properties: { - id: { - type: 'string', - maxLength: 100 - }, - name: { - type: 'string', - maxLength: 1000 - }, - age: { - type: 'number' - }, - updatedAt: { - type: 'number', - minimum: 0, - maximum: 10000000000000000, - multipleOf: 1 - }, - deletedAt: { - type: 'number' - } - }, - indexes: ['updatedAt'], - required: ['id', 'name', 'age', 'updatedAt'] -}); -export var humanWithTimestampNested = overwritable.deepFreezeWhenDevMode({ - version: 0, - type: 'object', - primaryKey: 'id', - properties: { - id: { - type: 'string', - maxLength: 100 - }, - name: { - type: 'string', - maxLength: 1000 - }, - age: { - type: 'number' - }, - updatedAt: { - type: 'number', - minimum: 0, - maximum: 10000000000000000, - multipleOf: 1 - }, - deletedAt: { - type: 'number' - }, - address: { - type: 'object', - properties: { - street: { - type: 'string' - }, - suite: { - type: 'string' - }, - city: { - type: 'string' - }, - zipcode: { - type: 'string' - }, - geo: { - type: 'object', - properties: { - lat: { - type: 'string' - }, - lng: { - type: 'string' - } - } - } - } - } - }, - indexes: ['updatedAt'], - required: ['id', 'name', 'age', 'updatedAt'] -}); - -/** - * each field is an index, - * use this to slow down inserts in tests - */ -export var humanWithTimestampAllIndex = overwritable.deepFreezeWhenDevMode({ - version: 0, - type: 'object', - primaryKey: 'id', - properties: { - id: { - type: 'string', - maxLength: 100 - }, - name: { - type: 'string', - maxLength: 100 - }, - age: { - type: 'number', - minimum: 0, - maximum: 1500, - multipleOf: 1 - }, - updatedAt: { - type: 'number', - minimum: 0, - maximum: 10000000000000000, - multipleOf: 1 - }, - deletedAt: { - type: 'number' - } - }, - indexes: ['name', 'age', 'updatedAt'], - required: ['id', 'name', 'age', 'updatedAt'] -}); -export var humanWithSimpleAndCompoundIndexes = overwritable.deepFreezeWhenDevMode({ - version: 0, - primaryKey: 'id', - type: 'object', - properties: { - id: { - type: 'string', - maxLength: 100 - }, - name: { - type: 'string', - maxLength: 100 - }, - age: { - type: 'number', - minimum: 0, - maximum: 1500, - multipleOf: 1 - }, - createdAt: { - type: 'number', - minimum: 0, - maximum: 10000000000000000, - multipleOf: 1 - }, - updatedAt: { - type: 'number', - minimum: 0, - maximum: 10000000000000000, - multipleOf: 1 - } - }, - indexes: [['name', 'id'], ['age', 'id'], ['createdAt', 'updatedAt', 'id']], - required: ['id', 'name', 'age', 'updatedAt'] -}); -export var humanWithDeepNestedIndexes = overwritable.deepFreezeWhenDevMode({ - version: 0, - primaryKey: 'id', - type: 'object', - properties: { - id: { - type: 'string', - maxLength: 100 - }, - name: { - type: 'string', - maxLength: 100 - }, - job: { - type: 'object', - properties: { - name: { - type: 'string', - maxLength: 100 - }, - manager: { - type: 'object', - properties: { - fullName: { - type: 'string', - maxLength: 100 - }, - previousJobs: { - type: 'array', - items: { - type: 'object', - properties: { - name: { - type: 'string', - maxLength: 100 - } - } - } - } - } - } - } - } - }, - required: ['id'], - indexes: ['name', 'job.name', 'job.manager.fullName'] -}); -export var humanIdAndAgeIndex = overwritable.deepFreezeWhenDevMode({ - version: 0, - description: 'uses a compound index with id as lowest level', - primaryKey: 'id', - type: 'object', - properties: { - id: { - type: 'string', - maxLength: 100 - }, - name: { - type: 'string' - }, - age: { - description: 'Age in years', - type: 'integer', - minimum: 0, - maximum: 150, - multipleOf: 1 - } - }, - required: ['id', 'name', 'age'], - indexes: [['age', 'id']] -}); -export function enableKeyCompression(schema) { - var ret = flatClone(schema); - ret.keyCompression = true; - return ret; -} -//# sourceMappingURL=schemas.js.map \ No newline at end of file diff --git a/dist/esm/plugins/test-utils/schemas.js.map b/dist/esm/plugins/test-utils/schemas.js.map deleted file mode 100644 index 8301400614b..00000000000 --- a/dist/esm/plugins/test-utils/schemas.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"schemas.js","names":["AsyncTestUtil","overwritable","toTypedRxJsonSchema","flatClone","humanSchemaLiteral","deepFreezeWhenDevMode","title","description","version","keyCompression","primaryKey","type","properties","passportId","maxLength","firstName","lastName","age","minimum","maximum","multipleOf","required","indexes","humanSchemaTyped","human","humanDefault","default","humanFinal","final","simpleHuman","oneOptional","simpleHumanV3","humanAgeIndex","humanSubIndex","other","humanWithAllIndex","nestedHuman","mainSkill","name","level","additionalProperties","deepNestedHuman","attack","good","count","noIndexHuman","noStringIndex","bigHuman","dnaHash","encryptedHuman","secret","encrypted","encryptedObjectHuman","subname","encryptedDeepHuman","firstLevelPassword","secretData","pw","deepSecret","darkhole","nestedSecret","notExistingIndex","address","street","compoundIndex","passportCountry","compoundIndexNoString","empty","id","heroArray","skills","maxItems","uniqueItems","items","damage","simpleArrayHero","primaryHumanLiteral","minLength","primaryHumanTypedSchema","primaryHuman","humanNormalizeSchema1Literal","humanNormalizeSchema1Typed","humanNormalizeSchema1","humanNormalizeSchema2","refHuman","bestFriend","ref","humanCompositePrimary","key","fields","separator","info","humanCompositePrimarySchemaLiteral","readonlyProps","allOf","anyOf","oneOf","dependencies","someDep","enum","humanCompositePrimarySchemaTyped","refHumanNested","foo","averageSchema","ret","randomString","var1","var2","deep","deep1","deep2","list","sharding","shards","mode","point","x","y","humanMinimal","humanMinimalBroken","broken","humanWithTimestamp","updatedAt","deletedAt","humanWithTimestampNested","suite","city","zipcode","geo","lat","lng","humanWithTimestampAllIndex","humanWithSimpleAndCompoundIndexes","createdAt","humanWithDeepNestedIndexes","job","manager","fullName","previousJobs","humanIdAndAgeIndex","enableKeyCompression","schema"],"sources":["../../../../src/plugins/test-utils/schemas.ts"],"sourcesContent":["import AsyncTestUtil from 'async-test-util';\n\nimport {\n SimpleHumanV3DocumentType,\n HumanWithSubOtherDocumentType,\n NestedHumanDocumentType,\n DeepNestedHumanDocumentType,\n EncryptedHumanDocumentType,\n EncryptedObjectHumanDocumentType,\n EncryptedDeepHumanDocumentType,\n CompoundIndexDocumentType,\n CompoundIndexNoStringDocumentType,\n HeroArrayDocumentType,\n SimpleHeroArrayDocumentType,\n RefHumanDocumentType,\n RefHumanNestedDocumentType,\n AverageSchemaDocumentType,\n PointDocumentType,\n HumanWithTimestampDocumentType,\n BigHumanDocumentType,\n NostringIndexDocumentType,\n NoIndexHumanDocumentType,\n HumanWithCompositePrimary,\n HumanWithTimestampNestedDocumentType\n} from './schema-objects.ts';\nimport { overwritable } from '../../overwritable.ts';\nimport { toTypedRxJsonSchema } from '../../rx-schema.ts';\nimport type {\n ExtractDocumentTypeFromTypedRxJsonSchema,\n RxJsonSchema\n} from '../../types/rx-schema';\nimport { flatClone } from '../utils/index.ts';\n\n\nexport const humanSchemaLiteral = overwritable.deepFreezeWhenDevMode({\n title: 'human schema',\n description: 'describes a human being',\n version: 0,\n keyCompression: false,\n primaryKey: 'passportId',\n type: 'object',\n properties: {\n passportId: {\n type: 'string',\n maxLength: 100\n },\n firstName: {\n type: 'string',\n maxLength: 100\n },\n lastName: {\n type: 'string',\n maxLength: 100\n },\n age: {\n description: 'age in years',\n type: 'integer',\n minimum: 0,\n maximum: 150,\n multipleOf: 1\n }\n },\n required: ['firstName', 'lastName', 'passportId', 'age'],\n indexes: ['firstName']\n} as const);\nconst humanSchemaTyped = toTypedRxJsonSchema(humanSchemaLiteral);\nexport type HumanDocumentType = ExtractDocumentTypeFromTypedRxJsonSchema;\nexport const human: RxJsonSchema = humanSchemaLiteral;\n\n\nexport const humanDefault: RxJsonSchema = overwritable.deepFreezeWhenDevMode({\n title: 'human schema',\n version: 0,\n description: 'describes a human being',\n keyCompression: false,\n primaryKey: 'passportId',\n type: 'object',\n properties: {\n passportId: {\n type: 'string',\n maxLength: 100\n },\n firstName: {\n type: 'string',\n maxLength: 100\n },\n lastName: {\n type: 'string',\n maxLength: 100\n },\n age: {\n description: 'age in years',\n type: 'integer',\n minimum: 0,\n maximum: 150,\n default: 20\n }\n },\n indexes: [],\n required: ['passportId']\n});\n\nexport const humanFinal: RxJsonSchema = overwritable.deepFreezeWhenDevMode({\n title: 'human schema with age set final',\n version: 0,\n keyCompression: false,\n type: 'object',\n primaryKey: 'passportId',\n properties: {\n passportId: {\n type: 'string',\n maxLength: 100\n },\n firstName: {\n type: 'string'\n },\n lastName: {\n type: 'string'\n },\n age: {\n type: 'integer',\n minimum: 0,\n maximum: 150,\n final: true\n }\n },\n required: [\n 'passportId'\n ]\n});\n\nexport const simpleHuman: RxJsonSchema = overwritable.deepFreezeWhenDevMode({\n title: 'human schema',\n version: 0,\n keyCompression: false,\n description: 'describes a simple human being',\n primaryKey: 'passportId',\n type: 'object',\n properties: {\n passportId: {\n type: 'string',\n maxLength: 100\n },\n age: {\n type: 'string',\n maxLength: 100\n },\n oneOptional: {\n type: 'string'\n }\n },\n indexes: ['age'],\n required: ['passportId', 'age']\n});\n\nexport const simpleHumanV3: RxJsonSchema = overwritable.deepFreezeWhenDevMode({\n title: 'human schema',\n version: 3,\n keyCompression: false,\n description: 'describes a simple human being',\n type: 'object',\n primaryKey: 'passportId',\n properties: {\n passportId: {\n type: 'string',\n maxLength: 100\n },\n age: {\n type: 'number',\n minimum: 0,\n maximum: 1000,\n multipleOf: 1\n },\n oneOptional: {\n type: 'string'\n }\n },\n indexes: ['age'],\n required: ['passportId', 'age']\n});\n\nexport const humanAgeIndex: RxJsonSchema = overwritable.deepFreezeWhenDevMode({\n title: 'human schema',\n version: 0,\n keyCompression: false,\n description: 'describes a human being',\n primaryKey: 'passportId',\n type: 'object',\n properties: {\n passportId: {\n type: 'string',\n maxLength: 100\n },\n firstName: {\n type: 'string'\n },\n lastName: {\n type: 'string'\n },\n age: {\n description: 'Age in years',\n type: 'integer',\n minimum: 0,\n maximum: 150,\n multipleOf: 1\n }\n },\n required: ['firstName', 'lastName', 'age'],\n indexes: ['age']\n});\n\nexport const humanSubIndex: RxJsonSchema = overwritable.deepFreezeWhenDevMode({\n title: 'human schema',\n version: 0,\n description: 'describes a human being where other.age is index',\n keyCompression: false,\n primaryKey: 'passportId',\n type: 'object',\n properties: {\n passportId: {\n type: 'string',\n maxLength: 100\n },\n other: {\n type: 'object',\n properties: {\n age: {\n description: 'Age in years',\n type: 'integer',\n minimum: 0,\n maximum: 150,\n multipleOf: 1\n }\n }\n }\n },\n required: [\n 'passportId'\n ],\n indexes: ['other.age']\n});\n\n/**\n * each field is an index,\n * use this to slow down inserts in tests\n */\nexport const humanWithAllIndex: RxJsonSchema = overwritable.deepFreezeWhenDevMode({\n title: 'human schema',\n description: 'describes a human being',\n version: 0,\n keyCompression: false,\n primaryKey: 'passportId',\n type: 'object',\n properties: {\n passportId: {\n type: 'string',\n maxLength: 100\n },\n firstName: {\n type: 'string',\n maxLength: 100\n },\n lastName: {\n type: 'string',\n maxLength: 100\n },\n age: {\n description: 'age in years',\n type: 'integer',\n minimum: 0,\n maximum: 150,\n multipleOf: 1\n }\n },\n indexes: ['firstName', 'lastName', 'age'],\n required: ['firstName', 'lastName']\n});\n\nexport const nestedHuman: RxJsonSchema = {\n title: 'human nested',\n version: 0,\n description: 'describes a human being with a nested field',\n keyCompression: false,\n primaryKey: 'passportId',\n type: 'object',\n properties: {\n passportId: {\n type: 'string',\n maxLength: 100\n },\n firstName: {\n type: 'string',\n maxLength: 100\n },\n mainSkill: {\n type: 'object',\n properties: {\n name: {\n type: 'string',\n maxLength: 10\n },\n level: {\n type: 'number',\n minimum: 0,\n maximum: 10,\n multipleOf: 1\n }\n },\n required: ['name', 'level'],\n additionalProperties: false\n }\n },\n required: ['firstName'],\n indexes: []\n};\n\nexport const deepNestedHuman: RxJsonSchema = {\n title: 'deep human nested',\n version: 0,\n keyCompression: false,\n description: 'describes a human being with a nested field',\n primaryKey: 'passportId',\n type: 'object',\n properties: {\n passportId: {\n type: 'string',\n maxLength: 100\n },\n mainSkill: {\n type: 'object',\n properties: {\n name: {\n type: 'string'\n },\n attack: {\n type: 'object',\n properties: {\n good: {\n type: 'boolean'\n },\n count: {\n type: 'number'\n }\n }\n }\n },\n required: ['name']\n }\n },\n indexes: [],\n required: ['mainSkill']\n};\n\nexport const noIndexHuman: RxJsonSchema = overwritable.deepFreezeWhenDevMode({\n title: 'human schema',\n version: 0,\n description: 'this schema has no index',\n keyCompression: false,\n primaryKey: 'firstName',\n type: 'object',\n properties: {\n firstName: {\n type: 'string',\n maxLength: 100\n },\n lastName: {\n type: 'string'\n }\n },\n required: ['lastName']\n});\n\nexport const noStringIndex: RxJsonSchema = overwritable.deepFreezeWhenDevMode({\n description: 'the index has no type:string',\n version: 0,\n keyCompression: false,\n primaryKey: 'passportId',\n type: 'object',\n properties: {\n passportId: {\n type: 'object',\n maxLength: 100\n },\n firstName: {\n type: 'string'\n }\n },\n required: ['firstName', 'passportId'],\n indexes: []\n});\n\n\nexport const bigHuman: RxJsonSchema = overwritable.deepFreezeWhenDevMode({\n title: 'human schema',\n version: 0,\n description: 'describes a human being with 2 indexes',\n keyCompression: false,\n primaryKey: 'passportId',\n type: 'object',\n properties: {\n passportId: {\n type: 'string',\n maxLength: 100\n },\n dnaHash: {\n type: 'string',\n maxLength: 100\n },\n firstName: {\n type: 'string',\n maxLength: 100\n },\n lastName: {\n type: 'string'\n },\n age: {\n description: 'Age in years',\n type: 'integer',\n minimum: 0\n }\n },\n required: ['firstName', 'lastName'],\n indexes: ['firstName', 'dnaHash']\n});\n\nexport const encryptedHuman: RxJsonSchema = {\n title: 'human encrypted',\n version: 0,\n description: 'uses an encrypted field',\n primaryKey: 'passportId',\n type: 'object',\n keyCompression: false,\n properties: {\n passportId: {\n type: 'string',\n maxLength: 100\n },\n firstName: {\n type: 'string'\n },\n secret: {\n type: 'string'\n }\n },\n indexes: [],\n required: ['firstName', 'secret'],\n encrypted: ['secret']\n};\n\nexport const encryptedObjectHuman: RxJsonSchema = {\n title: 'human encrypted',\n version: 0,\n keyCompression: false,\n description: 'uses an encrypted field',\n primaryKey: 'passportId',\n type: 'object',\n properties: {\n passportId: {\n type: 'string',\n maxLength: 100\n },\n firstName: {\n type: 'string'\n },\n secret: {\n type: 'object',\n properties: {\n name: {\n type: 'string'\n },\n subname: {\n type: 'string'\n }\n }\n }\n },\n indexes: [],\n required: ['firstName', 'secret'],\n encrypted: ['secret']\n};\n\nexport const encryptedDeepHuman: RxJsonSchema = {\n title: 'human encrypted',\n version: 0,\n keyCompression: false,\n description: 'uses an encrypted field',\n primaryKey: 'passportId',\n type: 'object',\n properties: {\n passportId: {\n type: 'string',\n maxLength: 100\n },\n firstName: {\n type: 'string'\n },\n firstLevelPassword: {\n type: 'string',\n },\n secretData: {\n type: 'object',\n properties: {\n pw: {\n type: 'string'\n }\n }\n },\n deepSecret: {\n type: 'object',\n properties: {\n darkhole: {\n type: 'object',\n properties: {\n pw: {\n type: 'string'\n }\n }\n }\n }\n },\n nestedSecret: {\n type: 'object',\n properties: {\n darkhole: {\n type: 'object',\n properties: {\n pw: {\n type: 'string'\n }\n }\n }\n }\n }\n\n },\n indexes: [],\n required: ['firstName', 'secretData'],\n encrypted: [\n 'firstLevelPassword',\n 'secretData',\n 'deepSecret.darkhole.pw',\n 'nestedSecret.darkhole.pw'\n ]\n};\n\nexport const notExistingIndex: RxJsonSchema<{ passportId: string; address: { street: string; }; }> = {\n title: 'index',\n version: 0,\n description: 'this schema has a specified index which does not exists',\n primaryKey: 'passportId',\n type: 'object',\n keyCompression: false,\n properties: {\n passportId: {\n type: 'string',\n maxLength: 100\n },\n address: {\n type: 'object',\n properties: {\n street: { type: 'string' }\n }\n }\n },\n required: [\n 'passportId'\n ],\n indexes: ['address.apartment']\n};\n\nexport const compoundIndex: RxJsonSchema = overwritable.deepFreezeWhenDevMode({\n title: 'compound index',\n version: 0,\n description: 'this schema has a compoundIndex',\n primaryKey: 'passportId',\n type: 'object',\n keyCompression: false,\n properties: {\n passportId: {\n type: 'string',\n maxLength: 100\n },\n passportCountry: {\n type: 'string',\n maxLength: 100\n },\n age: {\n type: 'integer',\n minimum: 0,\n maximum: 150,\n multipleOf: 1\n }\n },\n required: [\n 'passportId'\n ],\n indexes: [\n ['age', 'passportCountry']\n ]\n});\n\nexport const compoundIndexNoString: RxJsonSchema = {\n title: 'compound index',\n version: 0,\n description: 'this schema has a compoundIndex',\n primaryKey: 'passportId',\n keyCompression: false,\n type: 'object',\n properties: {\n passportId: {\n type: 'string',\n maxLength: 100\n },\n passportCountry: {\n type: 'object'\n },\n age: {\n type: 'integer'\n }\n },\n indexes: [\n [10, 'passportCountry']\n ]\n} as RxJsonSchema;\n\nexport const empty: RxJsonSchema = {\n title: 'empty schema',\n version: 0,\n type: 'object',\n primaryKey: 'id',\n properties: {\n id: {\n type: 'string',\n maxLength: 100\n }\n },\n required: ['id']\n};\n\nexport const heroArray: RxJsonSchema = overwritable.deepFreezeWhenDevMode({\n title: 'hero schema',\n version: 0,\n keyCompression: false,\n description: 'describes a hero with an array-field',\n primaryKey: 'name',\n type: 'object',\n properties: {\n name: {\n type: 'string',\n maxLength: 100\n },\n skills: {\n type: 'array',\n maxItems: 5,\n uniqueItems: true,\n items: {\n type: 'object',\n properties: {\n name: {\n type: 'string'\n },\n damage: {\n type: 'number'\n }\n }\n }\n }\n },\n required: [\n 'name'\n ]\n});\n\nexport const simpleArrayHero: RxJsonSchema = overwritable.deepFreezeWhenDevMode({\n title: 'hero schema',\n version: 0,\n description: 'describes a hero with a string-array-field',\n keyCompression: false,\n primaryKey: 'name',\n type: 'object',\n properties: {\n name: {\n type: 'string',\n maxLength: 100\n },\n skills: {\n type: 'array',\n maxItems: 5,\n uniqueItems: true,\n items: {\n type: 'string',\n }\n }\n },\n required: [\n 'name'\n ]\n});\n\nexport const primaryHumanLiteral = overwritable.deepFreezeWhenDevMode({\n title: 'human schema with primary',\n version: 0,\n description: 'describes a human being with passportID as primary',\n keyCompression: false,\n primaryKey: 'passportId',\n type: 'object',\n properties: {\n passportId: {\n type: 'string',\n minLength: 4,\n maxLength: 100\n },\n firstName: {\n type: 'string',\n maxLength: 100\n },\n lastName: {\n type: 'string',\n maxLength: 500\n },\n age: {\n type: 'integer',\n minimum: 0,\n maximum: 150,\n multipleOf: 1\n }\n },\n required: ['passportId', 'firstName', 'lastName']\n} as const);\nconst primaryHumanTypedSchema = toTypedRxJsonSchema(primaryHumanLiteral);\nexport type PrimaryHumanDocType = ExtractDocumentTypeFromTypedRxJsonSchema;\nexport const primaryHuman: RxJsonSchema = primaryHumanLiteral;\n\nexport const humanNormalizeSchema1Literal = overwritable.deepFreezeWhenDevMode({\n title: 'human schema',\n version: 0,\n keyCompression: false,\n description: 'describes a human being',\n primaryKey: 'passportId',\n type: 'object',\n properties: {\n passportId: {\n type: 'string',\n minLength: 4,\n maxLength: 100\n },\n age: {\n description: 'age in years',\n type: 'integer',\n minimum: 0,\n maximum: 150,\n multipleOf: 1\n }\n },\n required: ['age', 'passportId']\n} as const);\nconst humanNormalizeSchema1Typed = toTypedRxJsonSchema(humanNormalizeSchema1Literal);\nexport type AgeHumanDocumentType = ExtractDocumentTypeFromTypedRxJsonSchema;\nexport const humanNormalizeSchema1: RxJsonSchema = humanNormalizeSchema1Literal;\n\nexport const humanNormalizeSchema2: RxJsonSchema = overwritable.deepFreezeWhenDevMode({\n title: 'human schema',\n version: 0,\n keyCompression: false,\n primaryKey: 'passportId',\n type: 'object',\n properties: {\n passportId: {\n type: 'string',\n minLength: 4,\n maxLength: 100\n },\n age: {\n minimum: 0,\n type: 'integer',\n description: 'age in years',\n maximum: 150,\n multipleOf: 1\n }\n },\n description: 'describes a human being',\n required: ['age', 'passportId']\n});\n\nexport const refHuman: RxJsonSchema = overwritable.deepFreezeWhenDevMode({\n title: 'human related to other human',\n version: 0,\n keyCompression: false,\n primaryKey: 'name',\n type: 'object',\n properties: {\n name: {\n type: 'string',\n maxLength: 100\n },\n bestFriend: {\n ref: 'human',\n type: 'string'\n }\n },\n required: [\n 'name'\n ]\n});\n\nexport const humanCompositePrimary: RxJsonSchema = {\n title: 'human schema',\n description: 'describes a human being',\n version: 0,\n keyCompression: false,\n primaryKey: {\n key: 'id',\n fields: [\n 'firstName',\n 'info.age'\n ],\n separator: '|'\n },\n type: 'object',\n properties: {\n id: {\n type: 'string',\n maxLength: 100\n },\n firstName: {\n type: 'string',\n maxLength: 100\n },\n lastName: {\n type: 'string'\n },\n info: {\n type: 'object',\n properties: {\n age: {\n description: 'age in years',\n type: 'integer',\n minimum: 0,\n maximum: 150\n }\n },\n required: ['age']\n }\n },\n required: [\n 'id',\n 'firstName',\n 'lastName',\n 'info'\n ],\n indexes: ['firstName']\n};\n\nexport const humanCompositePrimarySchemaLiteral = overwritable.deepFreezeWhenDevMode({\n title: 'human schema',\n description: 'describes a human being',\n version: 0,\n keyCompression: false,\n primaryKey: {\n key: 'id',\n fields: [\n 'firstName',\n 'info.age'\n ],\n separator: '|'\n },\n encrypted: [],\n type: 'object',\n properties: {\n id: {\n type: 'string',\n maxLength: 100\n },\n firstName: {\n type: 'string',\n maxLength: 100\n },\n lastName: {\n type: 'string'\n },\n info: {\n type: 'object',\n properties: {\n age: {\n description: 'age in years',\n type: 'integer',\n minimum: 0,\n maximum: 150\n }\n },\n required: ['age']\n },\n readonlyProps: {\n allOf: [],\n anyOf: [],\n oneOf: [],\n type: [],\n dependencies: {\n someDep: ['asd'],\n },\n items: [],\n required: [],\n enum: [],\n }\n },\n required: [\n 'id',\n 'firstName',\n 'lastName',\n 'info'\n ],\n indexes: ['firstName']\n} as const);\n\nconst humanCompositePrimarySchemaTyped = toTypedRxJsonSchema(humanCompositePrimarySchemaLiteral);\nexport type HumanCompositePrimaryDocType = ExtractDocumentTypeFromTypedRxJsonSchema;\n\nexport const refHumanNested: RxJsonSchema = overwritable.deepFreezeWhenDevMode({\n title: 'human related to other human',\n version: 0,\n keyCompression: false,\n primaryKey: 'name',\n type: 'object',\n properties: {\n name: {\n type: 'string',\n maxLength: 100\n },\n foo: {\n type: 'object',\n properties: {\n bestFriend: {\n ref: 'human',\n type: 'string'\n }\n }\n }\n },\n required: [\n 'name'\n ]\n});\n\n/**\n * an average schema used in performance-tests\n */\nexport function averageSchema(): RxJsonSchema {\n const ret: RxJsonSchema = {\n title: 'averageSchema_' + AsyncTestUtil.randomString(5), // randomisation used so hash differs\n version: 0,\n primaryKey: 'id',\n type: 'object',\n keyCompression: false,\n properties: {\n id: {\n type: 'string',\n maxLength: 12\n },\n var1: {\n type: 'string',\n maxLength: 12\n },\n var2: {\n type: 'number',\n minimum: 0,\n maximum: 50000,\n multipleOf: 1\n },\n deep: {\n type: 'object',\n properties: {\n deep1: {\n type: 'string',\n maxLength: 10\n },\n deep2: {\n type: 'string',\n maxLength: 10\n }\n }\n },\n list: {\n type: 'array',\n items: {\n type: 'object',\n properties: {\n deep1: {\n type: 'string'\n },\n deep2: {\n type: 'string'\n }\n }\n }\n }\n },\n required: [\n 'id'\n ],\n indexes: [\n 'var1',\n 'var2',\n 'deep.deep1',\n // one compound index\n [\n 'var2',\n 'var1'\n ]\n ],\n sharding: {\n shards: 6,\n mode: 'collection'\n }\n };\n return ret;\n}\n\nexport const point: RxJsonSchema = overwritable.deepFreezeWhenDevMode({\n title: 'point schema',\n version: 0,\n description: 'describes coordinates in 2d space',\n primaryKey: 'id',\n type: 'object',\n properties: {\n id: {\n type: 'string',\n maxLength: 100\n },\n x: {\n type: 'number'\n },\n y: {\n type: 'number'\n }\n },\n required: ['x', 'y']\n});\n\nexport const humanMinimal: RxJsonSchema = overwritable.deepFreezeWhenDevMode({\n title: 'human schema',\n description: 'describes a human being',\n version: 0,\n keyCompression: false,\n primaryKey: 'passportId',\n type: 'object',\n properties: {\n passportId: {\n type: 'string',\n maxLength: 100\n },\n age: {\n type: 'integer'\n },\n oneOptional: {\n type: 'string'\n }\n },\n indexes: [],\n required: ['passportId', 'age']\n});\n\nexport const humanMinimalBroken: RxJsonSchema<{ passportId: string; broken: number; }> = {\n title: 'human schema',\n description: 'describes a human being',\n version: 0,\n keyCompression: false,\n primaryKey: 'passportId',\n type: 'object',\n properties: {\n passportId: {\n type: 'string',\n maxLength: 100\n },\n broken: {\n type: 'integer'\n }\n },\n indexes: [],\n required: ['passportId', 'broken']\n} as unknown as RxJsonSchema;\n\n\n/**\n * used in the graphql-test\n * contains timestamp\n */\nexport const humanWithTimestamp: RxJsonSchema = overwritable.deepFreezeWhenDevMode({\n version: 0,\n type: 'object',\n primaryKey: 'id',\n properties: {\n id: {\n type: 'string',\n maxLength: 100\n },\n name: {\n type: 'string',\n maxLength: 1000\n },\n age: {\n type: 'number'\n },\n updatedAt: {\n type: 'number',\n minimum: 0,\n maximum: 10000000000000000,\n multipleOf: 1\n\n },\n deletedAt: {\n type: 'number'\n }\n },\n indexes: ['updatedAt'],\n required: ['id', 'name', 'age', 'updatedAt']\n});\n\nexport const humanWithTimestampNested: RxJsonSchema = overwritable.deepFreezeWhenDevMode({\n version: 0,\n type: 'object',\n primaryKey: 'id',\n properties: {\n id: {\n type: 'string',\n maxLength: 100\n },\n name: {\n type: 'string',\n maxLength: 1000\n },\n age: {\n type: 'number'\n },\n updatedAt: {\n type: 'number',\n minimum: 0,\n maximum: 10000000000000000,\n multipleOf: 1\n\n },\n deletedAt: {\n type: 'number'\n },\n address: {\n type: 'object',\n properties: {\n street: {\n type: 'string',\n },\n suite: {\n type: 'string',\n },\n city: {\n type: 'string',\n },\n zipcode: {\n type: 'string',\n },\n geo: {\n type: 'object',\n properties: {\n lat: {\n type: 'string',\n },\n lng: {\n type: 'string',\n },\n },\n },\n },\n },\n },\n indexes: ['updatedAt'],\n required: ['id', 'name', 'age', 'updatedAt']\n});\n\n\n/**\n * each field is an index,\n * use this to slow down inserts in tests\n */\nexport const humanWithTimestampAllIndex: RxJsonSchema = overwritable.deepFreezeWhenDevMode({\n version: 0,\n type: 'object',\n primaryKey: 'id',\n properties: {\n id: {\n type: 'string',\n maxLength: 100\n },\n name: {\n type: 'string',\n maxLength: 100\n },\n age: {\n type: 'number',\n minimum: 0,\n maximum: 1500,\n multipleOf: 1\n },\n updatedAt: {\n type: 'number',\n minimum: 0,\n maximum: 10000000000000000,\n multipleOf: 1\n },\n deletedAt: {\n type: 'number'\n }\n },\n indexes: ['name', 'age', 'updatedAt'],\n required: ['id', 'name', 'age', 'updatedAt']\n});\n\nexport const humanWithSimpleAndCompoundIndexes: RxJsonSchema<{\n id: string;\n name: string;\n age: number;\n createdAt: number;\n updatedAt: number;\n}> = overwritable.deepFreezeWhenDevMode({\n version: 0,\n primaryKey: 'id',\n type: 'object',\n properties: {\n id: {\n type: 'string',\n maxLength: 100\n },\n name: {\n type: 'string',\n maxLength: 100\n },\n age: {\n type: 'number',\n minimum: 0,\n maximum: 1500,\n multipleOf: 1\n },\n createdAt: {\n type: 'number',\n minimum: 0,\n maximum: 10000000000000000,\n multipleOf: 1\n },\n updatedAt: {\n type: 'number',\n minimum: 0,\n maximum: 10000000000000000,\n multipleOf: 1\n }\n },\n indexes: [\n ['name', 'id'],\n ['age', 'id'],\n ['createdAt', 'updatedAt', 'id']\n ],\n required: ['id', 'name', 'age', 'updatedAt']\n});\n\nexport const humanWithDeepNestedIndexes: RxJsonSchema<{ id: string; name: string; job: any; }> = overwritable.deepFreezeWhenDevMode({\n version: 0,\n primaryKey: 'id',\n type: 'object',\n properties: {\n id: {\n type: 'string',\n maxLength: 100\n },\n name: {\n type: 'string',\n maxLength: 100\n },\n job: {\n type: 'object',\n properties: {\n name: {\n type: 'string',\n maxLength: 100\n },\n manager: {\n type: 'object',\n properties: {\n fullName: {\n type: 'string',\n maxLength: 100\n },\n previousJobs: {\n type: 'array',\n items: {\n type: 'object',\n properties: {\n name: {\n type: 'string',\n maxLength: 100\n }\n }\n }\n }\n }\n }\n }\n }\n },\n required: [\n 'id'\n ],\n indexes: [\n 'name',\n 'job.name',\n 'job.manager.fullName'\n ]\n});\n\nexport const humanIdAndAgeIndex: RxJsonSchema<{ id: string; name: string; age: number; }> = overwritable.deepFreezeWhenDevMode({\n version: 0,\n description: 'uses a compound index with id as lowest level',\n primaryKey: 'id',\n type: 'object',\n properties: {\n id: {\n type: 'string',\n maxLength: 100\n },\n name: {\n type: 'string'\n },\n age: {\n description: 'Age in years',\n type: 'integer',\n minimum: 0,\n maximum: 150,\n multipleOf: 1\n }\n },\n required: ['id', 'name', 'age'],\n indexes: [\n ['age', 'id']\n ]\n});\n\n\nexport function enableKeyCompression(\n schema: RxJsonSchema\n): RxJsonSchema {\n const ret = flatClone(schema);\n ret.keyCompression = true;\n return ret;\n}\n"],"mappings":"AAAA,OAAOA,aAAa,MAAM,iBAAiB;AAyB3C,SAASC,YAAY,QAAQ,uBAAuB;AACpD,SAASC,mBAAmB,QAAQ,oBAAoB;AAKxD,SAASC,SAAS,QAAQ,mBAAmB;AAG7C,OAAO,IAAMC,kBAAkB,GAAGH,YAAY,CAACI,qBAAqB,CAAC;EACjEC,KAAK,EAAE,cAAc;EACrBC,WAAW,EAAE,yBAAyB;EACtCC,OAAO,EAAE,CAAC;EACVC,cAAc,EAAE,KAAK;EACrBC,UAAU,EAAE,YAAY;EACxBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRC,UAAU,EAAE;MACRF,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDC,SAAS,EAAE;MACPJ,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDE,QAAQ,EAAE;MACNL,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDG,GAAG,EAAE;MACDV,WAAW,EAAE,cAAc;MAC3BI,IAAI,EAAE,SAAS;MACfO,OAAO,EAAE,CAAC;MACVC,OAAO,EAAE,GAAG;MACZC,UAAU,EAAE;IAChB;EACJ,CAAC;EACDC,QAAQ,EAAE,CAAC,WAAW,EAAE,UAAU,EAAE,YAAY,EAAE,KAAK,CAAC;EACxDC,OAAO,EAAE,CAAC,WAAW;AACzB,CAAU,CAAC;AACX,IAAMC,gBAAgB,GAAGrB,mBAAmB,CAACE,kBAAkB,CAAC;AAEhE,OAAO,IAAMoB,KAAsC,GAAGpB,kBAAkB;AAGxE,OAAO,IAAMqB,YAA6C,GAAGxB,YAAY,CAACI,qBAAqB,CAAC;EAC5FC,KAAK,EAAE,cAAc;EACrBE,OAAO,EAAE,CAAC;EACVD,WAAW,EAAE,yBAAyB;EACtCE,cAAc,EAAE,KAAK;EACrBC,UAAU,EAAE,YAAY;EACxBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRC,UAAU,EAAE;MACRF,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDC,SAAS,EAAE;MACPJ,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDE,QAAQ,EAAE;MACNL,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDG,GAAG,EAAE;MACDV,WAAW,EAAE,cAAc;MAC3BI,IAAI,EAAE,SAAS;MACfO,OAAO,EAAE,CAAC;MACVC,OAAO,EAAE,GAAG;MACZO,OAAO,EAAE;IACb;EACJ,CAAC;EACDJ,OAAO,EAAE,EAAE;EACXD,QAAQ,EAAE,CAAC,YAAY;AAC3B,CAAC,CAAC;AAEF,OAAO,IAAMM,UAA2C,GAAG1B,YAAY,CAACI,qBAAqB,CAAC;EAC1FC,KAAK,EAAE,iCAAiC;EACxCE,OAAO,EAAE,CAAC;EACVC,cAAc,EAAE,KAAK;EACrBE,IAAI,EAAE,QAAQ;EACdD,UAAU,EAAE,YAAY;EACxBE,UAAU,EAAE;IACRC,UAAU,EAAE;MACRF,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDC,SAAS,EAAE;MACPJ,IAAI,EAAE;IACV,CAAC;IACDK,QAAQ,EAAE;MACNL,IAAI,EAAE;IACV,CAAC;IACDM,GAAG,EAAE;MACDN,IAAI,EAAE,SAAS;MACfO,OAAO,EAAE,CAAC;MACVC,OAAO,EAAE,GAAG;MACZS,KAAK,EAAE;IACX;EACJ,CAAC;EACDP,QAAQ,EAAE,CACN,YAAY;AAEpB,CAAC,CAAC;AAEF,OAAO,IAAMQ,WAAoD,GAAG5B,YAAY,CAACI,qBAAqB,CAAC;EACnGC,KAAK,EAAE,cAAc;EACrBE,OAAO,EAAE,CAAC;EACVC,cAAc,EAAE,KAAK;EACrBF,WAAW,EAAE,gCAAgC;EAC7CG,UAAU,EAAE,YAAY;EACxBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRC,UAAU,EAAE;MACRF,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDG,GAAG,EAAE;MACDN,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDgB,WAAW,EAAE;MACTnB,IAAI,EAAE;IACV;EACJ,CAAC;EACDW,OAAO,EAAE,CAAC,KAAK,CAAC;EAChBD,QAAQ,EAAE,CAAC,YAAY,EAAE,KAAK;AAClC,CAAC,CAAC;AAEF,OAAO,IAAMU,aAAsD,GAAG9B,YAAY,CAACI,qBAAqB,CAAC;EACrGC,KAAK,EAAE,cAAc;EACrBE,OAAO,EAAE,CAAC;EACVC,cAAc,EAAE,KAAK;EACrBF,WAAW,EAAE,gCAAgC;EAC7CI,IAAI,EAAE,QAAQ;EACdD,UAAU,EAAE,YAAY;EACxBE,UAAU,EAAE;IACRC,UAAU,EAAE;MACRF,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDG,GAAG,EAAE;MACDN,IAAI,EAAE,QAAQ;MACdO,OAAO,EAAE,CAAC;MACVC,OAAO,EAAE,IAAI;MACbC,UAAU,EAAE;IAChB,CAAC;IACDU,WAAW,EAAE;MACTnB,IAAI,EAAE;IACV;EACJ,CAAC;EACDW,OAAO,EAAE,CAAC,KAAK,CAAC;EAChBD,QAAQ,EAAE,CAAC,YAAY,EAAE,KAAK;AAClC,CAAC,CAAC;AAEF,OAAO,IAAMW,aAA8C,GAAG/B,YAAY,CAACI,qBAAqB,CAAC;EAC7FC,KAAK,EAAE,cAAc;EACrBE,OAAO,EAAE,CAAC;EACVC,cAAc,EAAE,KAAK;EACrBF,WAAW,EAAE,yBAAyB;EACtCG,UAAU,EAAE,YAAY;EACxBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRC,UAAU,EAAE;MACRF,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDC,SAAS,EAAE;MACPJ,IAAI,EAAE;IACV,CAAC;IACDK,QAAQ,EAAE;MACNL,IAAI,EAAE;IACV,CAAC;IACDM,GAAG,EAAE;MACDV,WAAW,EAAE,cAAc;MAC3BI,IAAI,EAAE,SAAS;MACfO,OAAO,EAAE,CAAC;MACVC,OAAO,EAAE,GAAG;MACZC,UAAU,EAAE;IAChB;EACJ,CAAC;EACDC,QAAQ,EAAE,CAAC,WAAW,EAAE,UAAU,EAAE,KAAK,CAAC;EAC1CC,OAAO,EAAE,CAAC,KAAK;AACnB,CAAC,CAAC;AAEF,OAAO,IAAMW,aAA0D,GAAGhC,YAAY,CAACI,qBAAqB,CAAC;EACzGC,KAAK,EAAE,cAAc;EACrBE,OAAO,EAAE,CAAC;EACVD,WAAW,EAAE,kDAAkD;EAC/DE,cAAc,EAAE,KAAK;EACrBC,UAAU,EAAE,YAAY;EACxBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRC,UAAU,EAAE;MACRF,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDoB,KAAK,EAAE;MACHvB,IAAI,EAAE,QAAQ;MACdC,UAAU,EAAE;QACRK,GAAG,EAAE;UACDV,WAAW,EAAE,cAAc;UAC3BI,IAAI,EAAE,SAAS;UACfO,OAAO,EAAE,CAAC;UACVC,OAAO,EAAE,GAAG;UACZC,UAAU,EAAE;QAChB;MACJ;IACJ;EACJ,CAAC;EACDC,QAAQ,EAAE,CACN,YAAY,CACf;EACDC,OAAO,EAAE,CAAC,WAAW;AACzB,CAAC,CAAC;;AAEF;AACA;AACA;AACA;AACA,OAAO,IAAMa,iBAAkD,GAAGlC,YAAY,CAACI,qBAAqB,CAAC;EACjGC,KAAK,EAAE,cAAc;EACrBC,WAAW,EAAE,yBAAyB;EACtCC,OAAO,EAAE,CAAC;EACVC,cAAc,EAAE,KAAK;EACrBC,UAAU,EAAE,YAAY;EACxBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRC,UAAU,EAAE;MACRF,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDC,SAAS,EAAE;MACPJ,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDE,QAAQ,EAAE;MACNL,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDG,GAAG,EAAE;MACDV,WAAW,EAAE,cAAc;MAC3BI,IAAI,EAAE,SAAS;MACfO,OAAO,EAAE,CAAC;MACVC,OAAO,EAAE,GAAG;MACZC,UAAU,EAAE;IAChB;EACJ,CAAC;EACDE,OAAO,EAAE,CAAC,WAAW,EAAE,UAAU,EAAE,KAAK,CAAC;EACzCD,QAAQ,EAAE,CAAC,WAAW,EAAE,UAAU;AACtC,CAAC,CAAC;AAEF,OAAO,IAAMe,WAAkD,GAAG;EAC9D9B,KAAK,EAAE,cAAc;EACrBE,OAAO,EAAE,CAAC;EACVD,WAAW,EAAE,6CAA6C;EAC1DE,cAAc,EAAE,KAAK;EACrBC,UAAU,EAAE,YAAY;EACxBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRC,UAAU,EAAE;MACRF,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDC,SAAS,EAAE;MACPJ,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDuB,SAAS,EAAE;MACP1B,IAAI,EAAE,QAAQ;MACdC,UAAU,EAAE;QACR0B,IAAI,EAAE;UACF3B,IAAI,EAAE,QAAQ;UACdG,SAAS,EAAE;QACf,CAAC;QACDyB,KAAK,EAAE;UACH5B,IAAI,EAAE,QAAQ;UACdO,OAAO,EAAE,CAAC;UACVC,OAAO,EAAE,EAAE;UACXC,UAAU,EAAE;QAChB;MACJ,CAAC;MACDC,QAAQ,EAAE,CAAC,MAAM,EAAE,OAAO,CAAC;MAC3BmB,oBAAoB,EAAE;IAC1B;EACJ,CAAC;EACDnB,QAAQ,EAAE,CAAC,WAAW,CAAC;EACvBC,OAAO,EAAE;AACb,CAAC;AAED,OAAO,IAAMmB,eAA0D,GAAG;EACtEnC,KAAK,EAAE,mBAAmB;EAC1BE,OAAO,EAAE,CAAC;EACVC,cAAc,EAAE,KAAK;EACrBF,WAAW,EAAE,6CAA6C;EAC1DG,UAAU,EAAE,YAAY;EACxBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRC,UAAU,EAAE;MACRF,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDuB,SAAS,EAAE;MACP1B,IAAI,EAAE,QAAQ;MACdC,UAAU,EAAE;QACR0B,IAAI,EAAE;UACF3B,IAAI,EAAE;QACV,CAAC;QACD+B,MAAM,EAAE;UACJ/B,IAAI,EAAE,QAAQ;UACdC,UAAU,EAAE;YACR+B,IAAI,EAAE;cACFhC,IAAI,EAAE;YACV,CAAC;YACDiC,KAAK,EAAE;cACHjC,IAAI,EAAE;YACV;UACJ;QACJ;MACJ,CAAC;MACDU,QAAQ,EAAE,CAAC,MAAM;IACrB;EACJ,CAAC;EACDC,OAAO,EAAE,EAAE;EACXD,QAAQ,EAAE,CAAC,WAAW;AAC1B,CAAC;AAED,OAAO,IAAMwB,YAAoD,GAAG5C,YAAY,CAACI,qBAAqB,CAAC;EACnGC,KAAK,EAAE,cAAc;EACrBE,OAAO,EAAE,CAAC;EACVD,WAAW,EAAE,0BAA0B;EACvCE,cAAc,EAAE,KAAK;EACrBC,UAAU,EAAE,WAAW;EACvBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRG,SAAS,EAAE;MACPJ,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDE,QAAQ,EAAE;MACNL,IAAI,EAAE;IACV;EACJ,CAAC;EACDU,QAAQ,EAAE,CAAC,UAAU;AACzB,CAAC,CAAC;AAEF,OAAO,IAAMyB,aAAsD,GAAG7C,YAAY,CAACI,qBAAqB,CAAC;EACrGE,WAAW,EAAE,8BAA8B;EAC3CC,OAAO,EAAE,CAAC;EACVC,cAAc,EAAE,KAAK;EACrBC,UAAU,EAAE,YAAY;EACxBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRC,UAAU,EAAE;MACRF,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDC,SAAS,EAAE;MACPJ,IAAI,EAAE;IACV;EACJ,CAAC;EACDU,QAAQ,EAAE,CAAC,WAAW,EAAE,YAAY,CAAC;EACrCC,OAAO,EAAE;AACb,CAAC,CAAC;AAGF,OAAO,IAAMyB,QAA4C,GAAG9C,YAAY,CAACI,qBAAqB,CAAC;EAC3FC,KAAK,EAAE,cAAc;EACrBE,OAAO,EAAE,CAAC;EACVD,WAAW,EAAE,wCAAwC;EACrDE,cAAc,EAAE,KAAK;EACrBC,UAAU,EAAE,YAAY;EACxBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRC,UAAU,EAAE;MACRF,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDkC,OAAO,EAAE;MACLrC,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDC,SAAS,EAAE;MACPJ,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDE,QAAQ,EAAE;MACNL,IAAI,EAAE;IACV,CAAC;IACDM,GAAG,EAAE;MACDV,WAAW,EAAE,cAAc;MAC3BI,IAAI,EAAE,SAAS;MACfO,OAAO,EAAE;IACb;EACJ,CAAC;EACDG,QAAQ,EAAE,CAAC,WAAW,EAAE,UAAU,CAAC;EACnCC,OAAO,EAAE,CAAC,WAAW,EAAE,SAAS;AACpC,CAAC,CAAC;AAEF,OAAO,IAAM2B,cAAwD,GAAG;EACpE3C,KAAK,EAAE,iBAAiB;EACxBE,OAAO,EAAE,CAAC;EACVD,WAAW,EAAE,yBAAyB;EACtCG,UAAU,EAAE,YAAY;EACxBC,IAAI,EAAE,QAAQ;EACdF,cAAc,EAAE,KAAK;EACrBG,UAAU,EAAE;IACRC,UAAU,EAAE;MACRF,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDC,SAAS,EAAE;MACPJ,IAAI,EAAE;IACV,CAAC;IACDuC,MAAM,EAAE;MACJvC,IAAI,EAAE;IACV;EACJ,CAAC;EACDW,OAAO,EAAE,EAAE;EACXD,QAAQ,EAAE,CAAC,WAAW,EAAE,QAAQ,CAAC;EACjC8B,SAAS,EAAE,CAAC,QAAQ;AACxB,CAAC;AAED,OAAO,IAAMC,oBAAoE,GAAG;EAChF9C,KAAK,EAAE,iBAAiB;EACxBE,OAAO,EAAE,CAAC;EACVC,cAAc,EAAE,KAAK;EACrBF,WAAW,EAAE,yBAAyB;EACtCG,UAAU,EAAE,YAAY;EACxBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRC,UAAU,EAAE;MACRF,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDC,SAAS,EAAE;MACPJ,IAAI,EAAE;IACV,CAAC;IACDuC,MAAM,EAAE;MACJvC,IAAI,EAAE,QAAQ;MACdC,UAAU,EAAE;QACR0B,IAAI,EAAE;UACF3B,IAAI,EAAE;QACV,CAAC;QACD0C,OAAO,EAAE;UACL1C,IAAI,EAAE;QACV;MACJ;IACJ;EACJ,CAAC;EACDW,OAAO,EAAE,EAAE;EACXD,QAAQ,EAAE,CAAC,WAAW,EAAE,QAAQ,CAAC;EACjC8B,SAAS,EAAE,CAAC,QAAQ;AACxB,CAAC;AAED,OAAO,IAAMG,kBAAgE,GAAG;EAC5EhD,KAAK,EAAE,iBAAiB;EACxBE,OAAO,EAAE,CAAC;EACVC,cAAc,EAAE,KAAK;EACrBF,WAAW,EAAE,yBAAyB;EACtCG,UAAU,EAAE,YAAY;EACxBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRC,UAAU,EAAE;MACRF,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDC,SAAS,EAAE;MACPJ,IAAI,EAAE;IACV,CAAC;IACD4C,kBAAkB,EAAE;MAChB5C,IAAI,EAAE;IACV,CAAC;IACD6C,UAAU,EAAE;MACR7C,IAAI,EAAE,QAAQ;MACdC,UAAU,EAAE;QACR6C,EAAE,EAAE;UACA9C,IAAI,EAAE;QACV;MACJ;IACJ,CAAC;IACD+C,UAAU,EAAE;MACR/C,IAAI,EAAE,QAAQ;MACdC,UAAU,EAAE;QACR+C,QAAQ,EAAE;UACNhD,IAAI,EAAE,QAAQ;UACdC,UAAU,EAAE;YACR6C,EAAE,EAAE;cACA9C,IAAI,EAAE;YACV;UACJ;QACJ;MACJ;IACJ,CAAC;IACDiD,YAAY,EAAE;MACVjD,IAAI,EAAE,QAAQ;MACdC,UAAU,EAAE;QACR+C,QAAQ,EAAE;UACNhD,IAAI,EAAE,QAAQ;UACdC,UAAU,EAAE;YACR6C,EAAE,EAAE;cACA9C,IAAI,EAAE;YACV;UACJ;QACJ;MACJ;IACJ;EAEJ,CAAC;EACDW,OAAO,EAAE,EAAE;EACXD,QAAQ,EAAE,CAAC,WAAW,EAAE,YAAY,CAAC;EACrC8B,SAAS,EAAE,CACP,oBAAoB,EACpB,YAAY,EACZ,wBAAwB,EACxB,0BAA0B;AAElC,CAAC;AAED,OAAO,IAAMU,gBAAqF,GAAG;EACjGvD,KAAK,EAAE,OAAO;EACdE,OAAO,EAAE,CAAC;EACVD,WAAW,EAAE,yDAAyD;EACtEG,UAAU,EAAE,YAAY;EACxBC,IAAI,EAAE,QAAQ;EACdF,cAAc,EAAE,KAAK;EACrBG,UAAU,EAAE;IACRC,UAAU,EAAE;MACRF,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDgD,OAAO,EAAE;MACLnD,IAAI,EAAE,QAAQ;MACdC,UAAU,EAAE;QACRmD,MAAM,EAAE;UAAEpD,IAAI,EAAE;QAAS;MAC7B;IACJ;EACJ,CAAC;EACDU,QAAQ,EAAE,CACN,YAAY,CACf;EACDC,OAAO,EAAE,CAAC,mBAAmB;AACjC,CAAC;AAED,OAAO,IAAM0C,aAAsD,GAAG/D,YAAY,CAACI,qBAAqB,CAAC;EACrGC,KAAK,EAAE,gBAAgB;EACvBE,OAAO,EAAE,CAAC;EACVD,WAAW,EAAE,iCAAiC;EAC9CG,UAAU,EAAE,YAAY;EACxBC,IAAI,EAAE,QAAQ;EACdF,cAAc,EAAE,KAAK;EACrBG,UAAU,EAAE;IACRC,UAAU,EAAE;MACRF,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDmD,eAAe,EAAE;MACbtD,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDG,GAAG,EAAE;MACDN,IAAI,EAAE,SAAS;MACfO,OAAO,EAAE,CAAC;MACVC,OAAO,EAAE,GAAG;MACZC,UAAU,EAAE;IAChB;EACJ,CAAC;EACDC,QAAQ,EAAE,CACN,YAAY,CACf;EACDC,OAAO,EAAE,CACL,CAAC,KAAK,EAAE,iBAAiB,CAAC;AAElC,CAAC,CAAC;AAEF,OAAO,IAAM4C,qBAAsE,GAAG;EAClF5D,KAAK,EAAE,gBAAgB;EACvBE,OAAO,EAAE,CAAC;EACVD,WAAW,EAAE,iCAAiC;EAC9CG,UAAU,EAAE,YAAY;EACxBD,cAAc,EAAE,KAAK;EACrBE,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRC,UAAU,EAAE;MACRF,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDmD,eAAe,EAAE;MACbtD,IAAI,EAAE;IACV,CAAC;IACDM,GAAG,EAAE;MACDN,IAAI,EAAE;IACV;EACJ,CAAC;EACDW,OAAO,EAAE,CACL,CAAC,EAAE,EAAE,iBAAiB,CAAC;AAE/B,CAAoD;AAEpD,OAAO,IAAM6C,KAAwB,GAAG;EACpC7D,KAAK,EAAE,cAAc;EACrBE,OAAO,EAAE,CAAC;EACVG,IAAI,EAAE,QAAQ;EACdD,UAAU,EAAE,IAAI;EAChBE,UAAU,EAAE;IACRwD,EAAE,EAAE;MACAzD,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf;EACJ,CAAC;EACDO,QAAQ,EAAE,CAAC,IAAI;AACnB,CAAC;AAED,OAAO,IAAMgD,SAA8C,GAAGpE,YAAY,CAACI,qBAAqB,CAAC;EAC7FC,KAAK,EAAE,aAAa;EACpBE,OAAO,EAAE,CAAC;EACVC,cAAc,EAAE,KAAK;EACrBF,WAAW,EAAE,sCAAsC;EACnDG,UAAU,EAAE,MAAM;EAClBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACR0B,IAAI,EAAE;MACF3B,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDwD,MAAM,EAAE;MACJ3D,IAAI,EAAE,OAAO;MACb4D,QAAQ,EAAE,CAAC;MACXC,WAAW,EAAE,IAAI;MACjBC,KAAK,EAAE;QACH9D,IAAI,EAAE,QAAQ;QACdC,UAAU,EAAE;UACR0B,IAAI,EAAE;YACF3B,IAAI,EAAE;UACV,CAAC;UACD+D,MAAM,EAAE;YACJ/D,IAAI,EAAE;UACV;QACJ;MACJ;IACJ;EACJ,CAAC;EACDU,QAAQ,EAAE,CACN,MAAM;AAEd,CAAC,CAAC;AAEF,OAAO,IAAMsD,eAA0D,GAAG1E,YAAY,CAACI,qBAAqB,CAAC;EACzGC,KAAK,EAAE,aAAa;EACpBE,OAAO,EAAE,CAAC;EACVD,WAAW,EAAE,4CAA4C;EACzDE,cAAc,EAAE,KAAK;EACrBC,UAAU,EAAE,MAAM;EAClBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACR0B,IAAI,EAAE;MACF3B,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDwD,MAAM,EAAE;MACJ3D,IAAI,EAAE,OAAO;MACb4D,QAAQ,EAAE,CAAC;MACXC,WAAW,EAAE,IAAI;MACjBC,KAAK,EAAE;QACH9D,IAAI,EAAE;MACV;IACJ;EACJ,CAAC;EACDU,QAAQ,EAAE,CACN,MAAM;AAEd,CAAC,CAAC;AAEF,OAAO,IAAMuD,mBAAmB,GAAG3E,YAAY,CAACI,qBAAqB,CAAC;EAClEC,KAAK,EAAE,2BAA2B;EAClCE,OAAO,EAAE,CAAC;EACVD,WAAW,EAAE,oDAAoD;EACjEE,cAAc,EAAE,KAAK;EACrBC,UAAU,EAAE,YAAY;EACxBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRC,UAAU,EAAE;MACRF,IAAI,EAAE,QAAQ;MACdkE,SAAS,EAAE,CAAC;MACZ/D,SAAS,EAAE;IACf,CAAC;IACDC,SAAS,EAAE;MACPJ,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDE,QAAQ,EAAE;MACNL,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDG,GAAG,EAAE;MACDN,IAAI,EAAE,SAAS;MACfO,OAAO,EAAE,CAAC;MACVC,OAAO,EAAE,GAAG;MACZC,UAAU,EAAE;IAChB;EACJ,CAAC;EACDC,QAAQ,EAAE,CAAC,YAAY,EAAE,WAAW,EAAE,UAAU;AACpD,CAAU,CAAC;AACX,IAAMyD,uBAAuB,GAAG5E,mBAAmB,CAAC0E,mBAAmB,CAAC;AAExE,OAAO,IAAMG,YAA+C,GAAGH,mBAAmB;AAElF,OAAO,IAAMI,4BAA4B,GAAG/E,YAAY,CAACI,qBAAqB,CAAC;EAC3EC,KAAK,EAAE,cAAc;EACrBE,OAAO,EAAE,CAAC;EACVC,cAAc,EAAE,KAAK;EACrBF,WAAW,EAAE,yBAAyB;EACtCG,UAAU,EAAE,YAAY;EACxBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRC,UAAU,EAAE;MACRF,IAAI,EAAE,QAAQ;MACdkE,SAAS,EAAE,CAAC;MACZ/D,SAAS,EAAE;IACf,CAAC;IACDG,GAAG,EAAE;MACDV,WAAW,EAAE,cAAc;MAC3BI,IAAI,EAAE,SAAS;MACfO,OAAO,EAAE,CAAC;MACVC,OAAO,EAAE,GAAG;MACZC,UAAU,EAAE;IAChB;EACJ,CAAC;EACDC,QAAQ,EAAE,CAAC,KAAK,EAAE,YAAY;AAClC,CAAU,CAAC;AACX,IAAM4D,0BAA0B,GAAG/E,mBAAmB,CAAC8E,4BAA4B,CAAC;AAEpF,OAAO,IAAME,qBAAyD,GAAGF,4BAA4B;AAErG,OAAO,IAAMG,qBAAyD,GAAGlF,YAAY,CAACI,qBAAqB,CAAC;EACxGC,KAAK,EAAE,cAAc;EACrBE,OAAO,EAAE,CAAC;EACVC,cAAc,EAAE,KAAK;EACrBC,UAAU,EAAE,YAAY;EACxBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRC,UAAU,EAAE;MACRF,IAAI,EAAE,QAAQ;MACdkE,SAAS,EAAE,CAAC;MACZ/D,SAAS,EAAE;IACf,CAAC;IACDG,GAAG,EAAE;MACDC,OAAO,EAAE,CAAC;MACVP,IAAI,EAAE,SAAS;MACfJ,WAAW,EAAE,cAAc;MAC3BY,OAAO,EAAE,GAAG;MACZC,UAAU,EAAE;IAChB;EACJ,CAAC;EACDb,WAAW,EAAE,yBAAyB;EACtCc,QAAQ,EAAE,CAAC,KAAK,EAAE,YAAY;AAClC,CAAC,CAAC;AAEF,OAAO,IAAM+D,QAA4C,GAAGnF,YAAY,CAACI,qBAAqB,CAAC;EAC3FC,KAAK,EAAE,8BAA8B;EACrCE,OAAO,EAAE,CAAC;EACVC,cAAc,EAAE,KAAK;EACrBC,UAAU,EAAE,MAAM;EAClBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACR0B,IAAI,EAAE;MACF3B,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDuE,UAAU,EAAE;MACRC,GAAG,EAAE,OAAO;MACZ3E,IAAI,EAAE;IACV;EACJ,CAAC;EACDU,QAAQ,EAAE,CACN,MAAM;AAEd,CAAC,CAAC;AAEF,OAAO,IAAMkE,qBAA8D,GAAG;EAC1EjF,KAAK,EAAE,cAAc;EACrBC,WAAW,EAAE,yBAAyB;EACtCC,OAAO,EAAE,CAAC;EACVC,cAAc,EAAE,KAAK;EACrBC,UAAU,EAAE;IACR8E,GAAG,EAAE,IAAI;IACTC,MAAM,EAAE,CACJ,WAAW,EACX,UAAU,CACb;IACDC,SAAS,EAAE;EACf,CAAC;EACD/E,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRwD,EAAE,EAAE;MACAzD,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDC,SAAS,EAAE;MACPJ,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDE,QAAQ,EAAE;MACNL,IAAI,EAAE;IACV,CAAC;IACDgF,IAAI,EAAE;MACFhF,IAAI,EAAE,QAAQ;MACdC,UAAU,EAAE;QACRK,GAAG,EAAE;UACDV,WAAW,EAAE,cAAc;UAC3BI,IAAI,EAAE,SAAS;UACfO,OAAO,EAAE,CAAC;UACVC,OAAO,EAAE;QACb;MACJ,CAAC;MACDE,QAAQ,EAAE,CAAC,KAAK;IACpB;EACJ,CAAC;EACDA,QAAQ,EAAE,CACN,IAAI,EACJ,WAAW,EACX,UAAU,EACV,MAAM,CACT;EACDC,OAAO,EAAE,CAAC,WAAW;AACzB,CAAC;AAED,OAAO,IAAMsE,kCAAkC,GAAG3F,YAAY,CAACI,qBAAqB,CAAC;EACjFC,KAAK,EAAE,cAAc;EACrBC,WAAW,EAAE,yBAAyB;EACtCC,OAAO,EAAE,CAAC;EACVC,cAAc,EAAE,KAAK;EACrBC,UAAU,EAAE;IACR8E,GAAG,EAAE,IAAI;IACTC,MAAM,EAAE,CACJ,WAAW,EACX,UAAU,CACb;IACDC,SAAS,EAAE;EACf,CAAC;EACDvC,SAAS,EAAE,EAAE;EACbxC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRwD,EAAE,EAAE;MACAzD,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDC,SAAS,EAAE;MACPJ,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDE,QAAQ,EAAE;MACNL,IAAI,EAAE;IACV,CAAC;IACDgF,IAAI,EAAE;MACFhF,IAAI,EAAE,QAAQ;MACdC,UAAU,EAAE;QACRK,GAAG,EAAE;UACDV,WAAW,EAAE,cAAc;UAC3BI,IAAI,EAAE,SAAS;UACfO,OAAO,EAAE,CAAC;UACVC,OAAO,EAAE;QACb;MACJ,CAAC;MACDE,QAAQ,EAAE,CAAC,KAAK;IACpB,CAAC;IACDwE,aAAa,EAAE;MACXC,KAAK,EAAE,EAAE;MACTC,KAAK,EAAE,EAAE;MACTC,KAAK,EAAE,EAAE;MACTrF,IAAI,EAAE,EAAE;MACRsF,YAAY,EAAE;QACVC,OAAO,EAAE,CAAC,KAAK;MACnB,CAAC;MACDzB,KAAK,EAAE,EAAE;MACTpD,QAAQ,EAAE,EAAE;MACZ8E,IAAI,EAAE;IACV;EACJ,CAAC;EACD9E,QAAQ,EAAE,CACN,IAAI,EACJ,WAAW,EACX,UAAU,EACV,MAAM,CACT;EACDC,OAAO,EAAE,CAAC,WAAW;AACzB,CAAU,CAAC;AAEX,IAAM8E,gCAAgC,GAAGlG,mBAAmB,CAAC0F,kCAAkC,CAAC;AAGhG,OAAO,IAAMS,cAAwD,GAAGpG,YAAY,CAACI,qBAAqB,CAAC;EACvGC,KAAK,EAAE,8BAA8B;EACrCE,OAAO,EAAE,CAAC;EACVC,cAAc,EAAE,KAAK;EACrBC,UAAU,EAAE,MAAM;EAClBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACR0B,IAAI,EAAE;MACF3B,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDwF,GAAG,EAAE;MACD3F,IAAI,EAAE,QAAQ;MACdC,UAAU,EAAE;QACRyE,UAAU,EAAE;UACRC,GAAG,EAAE,OAAO;UACZ3E,IAAI,EAAE;QACV;MACJ;IACJ;EACJ,CAAC;EACDU,QAAQ,EAAE,CACN,MAAM;AAEd,CAAC,CAAC;;AAEF;AACA;AACA;AACA,OAAO,SAASkF,aAAaA,CAAA,EAA4C;EACrE,IAAMC,GAA4C,GAAG;IACjDlG,KAAK,EAAE,gBAAgB,GAAGN,aAAa,CAACyG,YAAY,CAAC,CAAC,CAAC;IAAE;IACzDjG,OAAO,EAAE,CAAC;IACVE,UAAU,EAAE,IAAI;IAChBC,IAAI,EAAE,QAAQ;IACdF,cAAc,EAAE,KAAK;IACrBG,UAAU,EAAE;MACRwD,EAAE,EAAE;QACAzD,IAAI,EAAE,QAAQ;QACdG,SAAS,EAAE;MACf,CAAC;MACD4F,IAAI,EAAE;QACF/F,IAAI,EAAE,QAAQ;QACdG,SAAS,EAAE;MACf,CAAC;MACD6F,IAAI,EAAE;QACFhG,IAAI,EAAE,QAAQ;QACdO,OAAO,EAAE,CAAC;QACVC,OAAO,EAAE,KAAK;QACdC,UAAU,EAAE;MAChB,CAAC;MACDwF,IAAI,EAAE;QACFjG,IAAI,EAAE,QAAQ;QACdC,UAAU,EAAE;UACRiG,KAAK,EAAE;YACHlG,IAAI,EAAE,QAAQ;YACdG,SAAS,EAAE;UACf,CAAC;UACDgG,KAAK,EAAE;YACHnG,IAAI,EAAE,QAAQ;YACdG,SAAS,EAAE;UACf;QACJ;MACJ,CAAC;MACDiG,IAAI,EAAE;QACFpG,IAAI,EAAE,OAAO;QACb8D,KAAK,EAAE;UACH9D,IAAI,EAAE,QAAQ;UACdC,UAAU,EAAE;YACRiG,KAAK,EAAE;cACHlG,IAAI,EAAE;YACV,CAAC;YACDmG,KAAK,EAAE;cACHnG,IAAI,EAAE;YACV;UACJ;QACJ;MACJ;IACJ,CAAC;IACDU,QAAQ,EAAE,CACN,IAAI,CACP;IACDC,OAAO,EAAE,CACL,MAAM,EACN,MAAM,EACN,YAAY;IACZ;IACA,CACI,MAAM,EACN,MAAM,CACT,CACJ;IACD0F,QAAQ,EAAE;MACNC,MAAM,EAAE,CAAC;MACTC,IAAI,EAAE;IACV;EACJ,CAAC;EACD,OAAOV,GAAG;AACd;AAEA,OAAO,IAAMW,KAAsC,GAAGlH,YAAY,CAACI,qBAAqB,CAAC;EACrFC,KAAK,EAAE,cAAc;EACrBE,OAAO,EAAE,CAAC;EACVD,WAAW,EAAE,mCAAmC;EAChDG,UAAU,EAAE,IAAI;EAChBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRwD,EAAE,EAAE;MACAzD,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDsG,CAAC,EAAE;MACCzG,IAAI,EAAE;IACV,CAAC;IACD0G,CAAC,EAAE;MACC1G,IAAI,EAAE;IACV;EACJ,CAAC;EACDU,QAAQ,EAAE,CAAC,GAAG,EAAE,GAAG;AACvB,CAAC,CAAC;AAEF,OAAO,IAAMiG,YAAqD,GAAGrH,YAAY,CAACI,qBAAqB,CAAC;EACpGC,KAAK,EAAE,cAAc;EACrBC,WAAW,EAAE,yBAAyB;EACtCC,OAAO,EAAE,CAAC;EACVC,cAAc,EAAE,KAAK;EACrBC,UAAU,EAAE,YAAY;EACxBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRC,UAAU,EAAE;MACRF,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDG,GAAG,EAAE;MACDN,IAAI,EAAE;IACV,CAAC;IACDmB,WAAW,EAAE;MACTnB,IAAI,EAAE;IACV;EACJ,CAAC;EACDW,OAAO,EAAE,EAAE;EACXD,QAAQ,EAAE,CAAC,YAAY,EAAE,KAAK;AAClC,CAAC,CAAC;AAEF,OAAO,IAAMkG,kBAAyE,GAAG;EACrFjH,KAAK,EAAE,cAAc;EACrBC,WAAW,EAAE,yBAAyB;EACtCC,OAAO,EAAE,CAAC;EACVC,cAAc,EAAE,KAAK;EACrBC,UAAU,EAAE,YAAY;EACxBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRC,UAAU,EAAE;MACRF,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACD0G,MAAM,EAAE;MACJ7G,IAAI,EAAE;IACV;EACJ,CAAC;EACDW,OAAO,EAAE,EAAE;EACXD,QAAQ,EAAE,CAAC,YAAY,EAAE,QAAQ;AACrC,CAAiC;;AAGjC;AACA;AACA;AACA;AACA,OAAO,IAAMoG,kBAAgE,GAAGxH,YAAY,CAACI,qBAAqB,CAAC;EAC/GG,OAAO,EAAE,CAAC;EACVG,IAAI,EAAE,QAAQ;EACdD,UAAU,EAAE,IAAI;EAChBE,UAAU,EAAE;IACRwD,EAAE,EAAE;MACAzD,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDwB,IAAI,EAAE;MACF3B,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDG,GAAG,EAAE;MACDN,IAAI,EAAE;IACV,CAAC;IACD+G,SAAS,EAAE;MACP/G,IAAI,EAAE,QAAQ;MACdO,OAAO,EAAE,CAAC;MACVC,OAAO,EAAE,iBAAiB;MAC1BC,UAAU,EAAE;IAEhB,CAAC;IACDuG,SAAS,EAAE;MACPhH,IAAI,EAAE;IACV;EACJ,CAAC;EACDW,OAAO,EAAE,CAAC,WAAW,CAAC;EACtBD,QAAQ,EAAE,CAAC,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,WAAW;AAC/C,CAAC,CAAC;AAEF,OAAO,IAAMuG,wBAA4E,GAAG3H,YAAY,CAACI,qBAAqB,CAAC;EAC3HG,OAAO,EAAE,CAAC;EACVG,IAAI,EAAE,QAAQ;EACdD,UAAU,EAAE,IAAI;EAChBE,UAAU,EAAE;IACRwD,EAAE,EAAE;MACAzD,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDwB,IAAI,EAAE;MACF3B,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDG,GAAG,EAAE;MACDN,IAAI,EAAE;IACV,CAAC;IACD+G,SAAS,EAAE;MACP/G,IAAI,EAAE,QAAQ;MACdO,OAAO,EAAE,CAAC;MACVC,OAAO,EAAE,iBAAiB;MAC1BC,UAAU,EAAE;IAEhB,CAAC;IACDuG,SAAS,EAAE;MACPhH,IAAI,EAAE;IACV,CAAC;IACDmD,OAAO,EAAE;MACLnD,IAAI,EAAE,QAAQ;MACdC,UAAU,EAAE;QACRmD,MAAM,EAAE;UACJpD,IAAI,EAAE;QACV,CAAC;QACDkH,KAAK,EAAE;UACHlH,IAAI,EAAE;QACV,CAAC;QACDmH,IAAI,EAAE;UACFnH,IAAI,EAAE;QACV,CAAC;QACDoH,OAAO,EAAE;UACLpH,IAAI,EAAE;QACV,CAAC;QACDqH,GAAG,EAAE;UACDrH,IAAI,EAAE,QAAQ;UACdC,UAAU,EAAE;YACRqH,GAAG,EAAE;cACDtH,IAAI,EAAE;YACV,CAAC;YACDuH,GAAG,EAAE;cACDvH,IAAI,EAAE;YACV;UACJ;QACJ;MACJ;IACJ;EACJ,CAAC;EACDW,OAAO,EAAE,CAAC,WAAW,CAAC;EACtBD,QAAQ,EAAE,CAAC,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,WAAW;AAC/C,CAAC,CAAC;;AAGF;AACA;AACA;AACA;AACA,OAAO,IAAM8G,0BAAwE,GAAGlI,YAAY,CAACI,qBAAqB,CAAC;EACvHG,OAAO,EAAE,CAAC;EACVG,IAAI,EAAE,QAAQ;EACdD,UAAU,EAAE,IAAI;EAChBE,UAAU,EAAE;IACRwD,EAAE,EAAE;MACAzD,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDwB,IAAI,EAAE;MACF3B,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDG,GAAG,EAAE;MACDN,IAAI,EAAE,QAAQ;MACdO,OAAO,EAAE,CAAC;MACVC,OAAO,EAAE,IAAI;MACbC,UAAU,EAAE;IAChB,CAAC;IACDsG,SAAS,EAAE;MACP/G,IAAI,EAAE,QAAQ;MACdO,OAAO,EAAE,CAAC;MACVC,OAAO,EAAE,iBAAiB;MAC1BC,UAAU,EAAE;IAChB,CAAC;IACDuG,SAAS,EAAE;MACPhH,IAAI,EAAE;IACV;EACJ,CAAC;EACDW,OAAO,EAAE,CAAC,MAAM,EAAE,KAAK,EAAE,WAAW,CAAC;EACrCD,QAAQ,EAAE,CAAC,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,WAAW;AAC/C,CAAC,CAAC;AAEF,OAAO,IAAM+G,iCAMX,GAAGnI,YAAY,CAACI,qBAAqB,CAAC;EACpCG,OAAO,EAAE,CAAC;EACVE,UAAU,EAAE,IAAI;EAChBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRwD,EAAE,EAAE;MACAzD,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDwB,IAAI,EAAE;MACF3B,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDG,GAAG,EAAE;MACDN,IAAI,EAAE,QAAQ;MACdO,OAAO,EAAE,CAAC;MACVC,OAAO,EAAE,IAAI;MACbC,UAAU,EAAE;IAChB,CAAC;IACDiH,SAAS,EAAE;MACP1H,IAAI,EAAE,QAAQ;MACdO,OAAO,EAAE,CAAC;MACVC,OAAO,EAAE,iBAAiB;MAC1BC,UAAU,EAAE;IAChB,CAAC;IACDsG,SAAS,EAAE;MACP/G,IAAI,EAAE,QAAQ;MACdO,OAAO,EAAE,CAAC;MACVC,OAAO,EAAE,iBAAiB;MAC1BC,UAAU,EAAE;IAChB;EACJ,CAAC;EACDE,OAAO,EAAE,CACL,CAAC,MAAM,EAAE,IAAI,CAAC,EACd,CAAC,KAAK,EAAE,IAAI,CAAC,EACb,CAAC,WAAW,EAAE,WAAW,EAAE,IAAI,CAAC,CACnC;EACDD,QAAQ,EAAE,CAAC,IAAI,EAAE,MAAM,EAAE,KAAK,EAAE,WAAW;AAC/C,CAAC,CAAC;AAEF,OAAO,IAAMiH,0BAAiF,GAAGrI,YAAY,CAACI,qBAAqB,CAAC;EAChIG,OAAO,EAAE,CAAC;EACVE,UAAU,EAAE,IAAI;EAChBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRwD,EAAE,EAAE;MACAzD,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDwB,IAAI,EAAE;MACF3B,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDyH,GAAG,EAAE;MACD5H,IAAI,EAAE,QAAQ;MACdC,UAAU,EAAE;QACR0B,IAAI,EAAE;UACF3B,IAAI,EAAE,QAAQ;UACdG,SAAS,EAAE;QACf,CAAC;QACD0H,OAAO,EAAE;UACL7H,IAAI,EAAE,QAAQ;UACdC,UAAU,EAAE;YACR6H,QAAQ,EAAE;cACN9H,IAAI,EAAE,QAAQ;cACdG,SAAS,EAAE;YACf,CAAC;YACD4H,YAAY,EAAE;cACV/H,IAAI,EAAE,OAAO;cACb8D,KAAK,EAAE;gBACH9D,IAAI,EAAE,QAAQ;gBACdC,UAAU,EAAE;kBACR0B,IAAI,EAAE;oBACF3B,IAAI,EAAE,QAAQ;oBACdG,SAAS,EAAE;kBACf;gBACJ;cACJ;YACJ;UACJ;QACJ;MACJ;IACJ;EACJ,CAAC;EACDO,QAAQ,EAAE,CACN,IAAI,CACP;EACDC,OAAO,EAAE,CACL,MAAM,EACN,UAAU,EACV,sBAAsB;AAE9B,CAAC,CAAC;AAEF,OAAO,IAAMqH,kBAA4E,GAAG1I,YAAY,CAACI,qBAAqB,CAAC;EAC3HG,OAAO,EAAE,CAAC;EACVD,WAAW,EAAE,+CAA+C;EAC5DG,UAAU,EAAE,IAAI;EAChBC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRwD,EAAE,EAAE;MACAzD,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDwB,IAAI,EAAE;MACF3B,IAAI,EAAE;IACV,CAAC;IACDM,GAAG,EAAE;MACDV,WAAW,EAAE,cAAc;MAC3BI,IAAI,EAAE,SAAS;MACfO,OAAO,EAAE,CAAC;MACVC,OAAO,EAAE,GAAG;MACZC,UAAU,EAAE;IAChB;EACJ,CAAC;EACDC,QAAQ,EAAE,CAAC,IAAI,EAAE,MAAM,EAAE,KAAK,CAAC;EAC/BC,OAAO,EAAE,CACL,CAAC,KAAK,EAAE,IAAI,CAAC;AAErB,CAAC,CAAC;AAGF,OAAO,SAASsH,oBAAoBA,CAChCC,MAA+B,EACR;EACvB,IAAMrC,GAAG,GAAGrG,SAAS,CAAC0I,MAAM,CAAC;EAC7BrC,GAAG,CAAC/F,cAAc,GAAG,IAAI;EACzB,OAAO+F,GAAG;AACd","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/test-utils/test-util.js b/dist/esm/plugins/test-utils/test-util.js deleted file mode 100644 index 5ba1df4b56b..00000000000 --- a/dist/esm/plugins/test-utils/test-util.js +++ /dev/null @@ -1,40 +0,0 @@ -import assert from 'assert'; -import { requestIdlePromise } from "../utils/index.js"; -export function testMultipleTimes(times, title, test) { - new Array(times).fill(0).forEach(() => { - it(title, test); - }); -} -export async function ensureCollectionsHaveEqualState(c1, c2) { - await requestIdlePromise(); - var getJson = async collection => { - var docs = await collection.find().exec(); - return docs.map(d => d.toJSON()); - }; - var json1 = await getJson(c1); - var json2 = await getJson(c2); - try { - assert.deepStrictEqual(json1, json2); - } catch (err) { - console.error('ensureCollectionsHaveEqualState() states not equal:'); - console.dir({ - [c1.name]: json1, - [c2.name]: json2 - }); - throw err; - } -} -export function ensureReplicationHasNoErrors(replicationState) { - /** - * We do not have to unsubscribe because the observable will cancel anyway. - */ - replicationState.error$.subscribe(err => { - console.error('ensureReplicationHasNoErrors() has error:'); - console.log(err); - if (err?.parameters?.errors) { - throw err.parameters.errors[0]; - } - throw err; - }); -} -//# sourceMappingURL=test-util.js.map \ No newline at end of file diff --git a/dist/esm/plugins/test-utils/test-util.js.map b/dist/esm/plugins/test-utils/test-util.js.map deleted file mode 100644 index 1bc64bb8b01..00000000000 --- a/dist/esm/plugins/test-utils/test-util.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"test-util.js","names":["assert","requestIdlePromise","testMultipleTimes","times","title","test","Array","fill","forEach","it","ensureCollectionsHaveEqualState","c1","c2","getJson","collection","docs","find","exec","map","d","toJSON","json1","json2","deepStrictEqual","err","console","error","dir","name","ensureReplicationHasNoErrors","replicationState","error$","subscribe","log","parameters","errors"],"sources":["../../../../src/plugins/test-utils/test-util.ts"],"sourcesContent":["import type { Func } from 'mocha';\nimport assert from 'assert';\nimport type { RxCollection } from '../../types';\nimport { requestIdlePromise } from '../utils/index.ts';\nimport type { RxReplicationState } from '../replication/index.ts';\n\nexport function testMultipleTimes(times: number, title: string, test: Func) {\n new Array(times).fill(0).forEach(() => {\n it(title, test);\n });\n}\n\nexport async function ensureCollectionsHaveEqualState(\n c1: RxCollection,\n c2: RxCollection\n) {\n await requestIdlePromise();\n const getJson = async (collection: RxCollection) => {\n const docs = await collection.find().exec();\n return docs.map(d => d.toJSON());\n };\n const json1 = await getJson(c1);\n const json2 = await getJson(c2);\n try {\n assert.deepStrictEqual(\n json1,\n json2\n );\n } catch (err) {\n console.error('ensureCollectionsHaveEqualState() states not equal:');\n console.dir({\n [c1.name]: json1,\n [c2.name]: json2\n });\n throw err;\n }\n}\n\nexport function ensureReplicationHasNoErrors(replicationState: RxReplicationState) {\n /**\n * We do not have to unsubscribe because the observable will cancel anyway.\n */\n replicationState.error$.subscribe(err => {\n console.error('ensureReplicationHasNoErrors() has error:');\n console.log(err);\n if (err?.parameters?.errors) {\n throw err.parameters.errors[0];\n }\n throw err;\n });\n}\n"],"mappings":"AACA,OAAOA,MAAM,MAAM,QAAQ;AAE3B,SAASC,kBAAkB,QAAQ,mBAAmB;AAGtD,OAAO,SAASC,iBAAiBA,CAACC,KAAa,EAAEC,KAAa,EAAEC,IAAU,EAAE;EACxE,IAAIC,KAAK,CAACH,KAAK,CAAC,CAACI,IAAI,CAAC,CAAC,CAAC,CAACC,OAAO,CAAC,MAAM;IACnCC,EAAE,CAACL,KAAK,EAAEC,IAAI,CAAC;EACnB,CAAC,CAAC;AACN;AAEA,OAAO,eAAeK,+BAA+BA,CACjDC,EAA2B,EAC3BC,EAA2B,EAC7B;EACE,MAAMX,kBAAkB,CAAC,CAAC;EAC1B,IAAMY,OAAO,GAAG,MAAOC,UAAmC,IAAK;IAC3D,IAAMC,IAAI,GAAG,MAAMD,UAAU,CAACE,IAAI,CAAC,CAAC,CAACC,IAAI,CAAC,CAAC;IAC3C,OAAOF,IAAI,CAACG,GAAG,CAACC,CAAC,IAAIA,CAAC,CAACC,MAAM,CAAC,CAAC,CAAC;EACpC,CAAC;EACD,IAAMC,KAAK,GAAG,MAAMR,OAAO,CAACF,EAAE,CAAC;EAC/B,IAAMW,KAAK,GAAG,MAAMT,OAAO,CAACD,EAAE,CAAC;EAC/B,IAAI;IACAZ,MAAM,CAACuB,eAAe,CAClBF,KAAK,EACLC,KACJ,CAAC;EACL,CAAC,CAAC,OAAOE,GAAG,EAAE;IACVC,OAAO,CAACC,KAAK,CAAC,qDAAqD,CAAC;IACpED,OAAO,CAACE,GAAG,CAAC;MACR,CAAChB,EAAE,CAACiB,IAAI,GAAGP,KAAK;MAChB,CAACT,EAAE,CAACgB,IAAI,GAAGN;IACf,CAAC,CAAC;IACF,MAAME,GAAG;EACb;AACJ;AAEA,OAAO,SAASK,4BAA4BA,CAACC,gBAA8C,EAAE;EACzF;AACJ;AACA;EACIA,gBAAgB,CAACC,MAAM,CAACC,SAAS,CAACR,GAAG,IAAI;IACrCC,OAAO,CAACC,KAAK,CAAC,2CAA2C,CAAC;IAC1DD,OAAO,CAACQ,GAAG,CAACT,GAAG,CAAC;IAChB,IAAIA,GAAG,EAAEU,UAAU,EAAEC,MAAM,EAAE;MACzB,MAAMX,GAAG,CAACU,UAAU,CAACC,MAAM,CAAC,CAAC,CAAC;IAClC;IACA,MAAMX,GAAG;EACb,CAAC,CAAC;AACN","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/update/index.js b/dist/esm/plugins/update/index.js deleted file mode 100644 index 086062cf0f1..00000000000 --- a/dist/esm/plugins/update/index.js +++ /dev/null @@ -1,35 +0,0 @@ -/** - * this plugin allows delta-updates with mongo-like-syntax - * It's using mingo internally - * @link https://github.com/kofrasa/mingo - */ -import { runQueryUpdateFunction } from "../../rx-query-helper.js"; -import { mingoUpdater } from "./mingo-updater.js"; -export function incrementalUpdate(updateObj) { - return this.incrementalModify(docData => { - var newDocData = mingoUpdater(docData, updateObj); - return newDocData; - }); -} -export function update(updateObj) { - var oldDocData = this._data; - var newDocData = mingoUpdater(oldDocData, updateObj); - return this._saveData(newDocData, oldDocData); -} -export async function RxQueryUpdate(updateObj) { - return runQueryUpdateFunction(this.asRxQuery, doc => doc.update(updateObj)); -} -export var RxDBUpdatePlugin = { - name: 'update', - rxdb: true, - prototypes: { - RxDocument: proto => { - proto.update = update; - proto.incrementalUpdate = incrementalUpdate; - }, - RxQuery: proto => { - proto.update = RxQueryUpdate; - } - } -}; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/esm/plugins/update/index.js.map b/dist/esm/plugins/update/index.js.map deleted file mode 100644 index 1a8eb31ce2d..00000000000 --- a/dist/esm/plugins/update/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["runQueryUpdateFunction","mingoUpdater","incrementalUpdate","updateObj","incrementalModify","docData","newDocData","update","oldDocData","_data","_saveData","RxQueryUpdate","asRxQuery","doc","RxDBUpdatePlugin","name","rxdb","prototypes","RxDocument","proto","RxQuery"],"sources":["../../../../src/plugins/update/index.ts"],"sourcesContent":["/**\n * this plugin allows delta-updates with mongo-like-syntax\n * It's using mingo internally\n * @link https://github.com/kofrasa/mingo\n */\nimport { runQueryUpdateFunction } from '../../rx-query-helper.ts';\nimport type {\n RxDocument,\n RxQuery,\n RxPlugin,\n UpdateQuery\n} from '../../types/index.d.ts';\nimport { mingoUpdater } from './mingo-updater.ts';\n\nexport function incrementalUpdate(\n this: RxDocument,\n updateObj: UpdateQuery\n): Promise> {\n return this.incrementalModify((docData) => {\n const newDocData = mingoUpdater(docData, updateObj);\n return newDocData;\n });\n}\n\nexport function update(\n this: RxDocument,\n updateObj: UpdateQuery\n): Promise> {\n const oldDocData = this._data;\n const newDocData = mingoUpdater(oldDocData, updateObj);\n return this._saveData(newDocData, oldDocData);\n}\n\nexport async function RxQueryUpdate(\n this: RxQuery,\n updateObj: UpdateQuery\n): Promise {\n return runQueryUpdateFunction(\n this.asRxQuery,\n (doc) => doc.update(updateObj),\n );\n}\n\n\nexport const RxDBUpdatePlugin: RxPlugin = {\n name: 'update',\n rxdb: true,\n prototypes: {\n RxDocument: (proto: any) => {\n proto.update = update;\n proto.incrementalUpdate = incrementalUpdate;\n },\n RxQuery: (proto: any) => {\n proto.update = RxQueryUpdate;\n }\n }\n};\n"],"mappings":"AAAA;AACA;AACA;AACA;AACA;AACA,SAASA,sBAAsB,QAAQ,0BAA0B;AAOjE,SAASC,YAAY,QAAQ,oBAAoB;AAEjD,OAAO,SAASC,iBAAiBA,CAE7BC,SAAiC,EACH;EAC9B,OAAO,IAAI,CAACC,iBAAiB,CAAEC,OAAO,IAAK;IACvC,IAAMC,UAAU,GAAGL,YAAY,CAAYI,OAAO,EAAEF,SAAS,CAAC;IAC9D,OAAOG,UAAU;EACrB,CAAC,CAAC;AACN;AAEA,OAAO,SAASC,MAAMA,CAElBJ,SAAiC,EACH;EAC9B,IAAMK,UAAU,GAAG,IAAI,CAACC,KAAK;EAC7B,IAAMH,UAAU,GAAGL,YAAY,CAACO,UAAU,EAAEL,SAAS,CAAC;EACtD,OAAO,IAAI,CAACO,SAAS,CAACJ,UAAU,EAAEE,UAAU,CAAC;AACjD;AAEA,OAAO,eAAeG,aAAaA,CAE/BR,SAA2B,EACf;EACZ,OAAOH,sBAAsB,CACzB,IAAI,CAACY,SAAS,EACbC,GAAG,IAAKA,GAAG,CAACN,MAAM,CAACJ,SAAS,CACjC,CAAC;AACL;AAGA,OAAO,IAAMW,gBAA0B,GAAG;EACtCC,IAAI,EAAE,QAAQ;EACdC,IAAI,EAAE,IAAI;EACVC,UAAU,EAAE;IACRC,UAAU,EAAGC,KAAU,IAAK;MACxBA,KAAK,CAACZ,MAAM,GAAGA,MAAM;MACrBY,KAAK,CAACjB,iBAAiB,GAAGA,iBAAiB;IAC/C,CAAC;IACDkB,OAAO,EAAGD,KAAU,IAAK;MACrBA,KAAK,CAACZ,MAAM,GAAGI,aAAa;IAChC;EACJ;AACJ,CAAC","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/update/mingo-updater.js b/dist/esm/plugins/update/mingo-updater.js deleted file mode 100644 index e9f95c9f0a8..00000000000 --- a/dist/esm/plugins/update/mingo-updater.js +++ /dev/null @@ -1,21 +0,0 @@ -/** - * Custom build of the mingo updater for smaller build size - */ - -import { createUpdater } from "mingo/updater"; -import { clone } from "../utils/index.js"; -var updater; -export function mingoUpdater(d, op) { - if (!updater) { - var updateObject = createUpdater({ - cloneMode: "none" - }); - updater = (d, op) => { - var cloned = clone(d); - updateObject(cloned, op); - return cloned; - }; - } - return updater(d, op); -} -//# sourceMappingURL=mingo-updater.js.map \ No newline at end of file diff --git a/dist/esm/plugins/update/mingo-updater.js.map b/dist/esm/plugins/update/mingo-updater.js.map deleted file mode 100644 index aecb0112dfe..00000000000 --- a/dist/esm/plugins/update/mingo-updater.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"mingo-updater.js","names":["createUpdater","clone","updater","mingoUpdater","d","op","updateObject","cloneMode","cloned"],"sources":["../../../../src/plugins/update/mingo-updater.ts"],"sourcesContent":["/**\n * Custom build of the mingo updater for smaller build size\n */\n\nimport {\n createUpdater\n} from \"mingo/updater\";\nimport {\n clone\n} from '../utils/index.ts';\nimport type {\n UpdateQuery\n} from '../../types/index';\n\nlet updater: any;\nexport function mingoUpdater(\n d: T, op: UpdateQuery\n): T {\n if (!updater) {\n const updateObject = createUpdater({ cloneMode: \"none\" });\n updater = (d: T, op: UpdateQuery) => {\n const cloned = clone(d);\n updateObject(cloned as any, op as any);\n return cloned;\n }\n }\n return updater(d, op);\n}\n"],"mappings":"AAAA;AACA;AACA;;AAEA,SACIA,aAAa,QACV,eAAe;AACtB,SACIC,KAAK,QACF,mBAAmB;AAK1B,IAAIC,OAAY;AAChB,OAAO,SAASC,YAAYA,CACxBC,CAAI,EAAEC,EAAkB,EACvB;EACD,IAAI,CAACH,OAAO,EAAE;IACV,IAAMI,YAAY,GAAGN,aAAa,CAAC;MAAEO,SAAS,EAAE;IAAO,CAAC,CAAC;IACzDL,OAAO,GAAGA,CAACE,CAAI,EAAEC,EAAkB,KAAK;MACpC,IAAMG,MAAM,GAAGP,KAAK,CAACG,CAAC,CAAC;MACvBE,YAAY,CAACE,MAAM,EAASH,EAAS,CAAC;MACtC,OAAOG,MAAM;IACjB,CAAC;EACL;EACA,OAAON,OAAO,CAACE,CAAC,EAAEC,EAAE,CAAC;AACzB","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/utils/index.js b/dist/esm/plugins/utils/index.js deleted file mode 100644 index d08023c51c8..00000000000 --- a/dist/esm/plugins/utils/index.js +++ /dev/null @@ -1,19 +0,0 @@ -export * from "./utils-array.js"; -export * from "./utils-blob.js"; -export * from "./utils-base64.js"; -export * from "./utils-revision.js"; -export * from "./utils-document.js"; -export * from "./utils-hash.js"; -export * from "./utils-promise.js"; -export * from "./utils-regex.js"; -export * from "./utils-string.js"; -export * from "./utils-object-deep-equal.js"; -export * from "./utils-object-dot-prop.js"; -export * from "./utils-object.js"; -export * from "./utils-map.js"; -export * from "./utils-error.js"; -export * from "./utils-time.js"; -export * from "./utils-other.js"; -export * from "./utils-rxdb-version.js"; -export * from "./utils-global.js"; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/esm/plugins/utils/index.js.map b/dist/esm/plugins/utils/index.js.map deleted file mode 100644 index bb12209e2c7..00000000000 --- a/dist/esm/plugins/utils/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":[],"sources":["../../../../src/plugins/utils/index.ts"],"sourcesContent":["export * from './utils-array.ts';\nexport * from './utils-blob.ts';\nexport * from './utils-base64.ts';\nexport * from './utils-revision.ts';\nexport * from './utils-document.ts';\nexport * from './utils-hash.ts';\nexport * from './utils-promise.ts';\nexport * from './utils-regex.ts';\nexport * from './utils-string.ts';\nexport * from './utils-object-deep-equal.ts';\nexport * from './utils-object-dot-prop.ts';\nexport * from './utils-object.ts';\nexport * from './utils-map.ts';\nexport * from './utils-error.ts';\nexport * from './utils-time.ts';\nexport * from './utils-other.ts';\nexport * from './utils-rxdb-version.ts';\nexport * from './utils-global.ts';\n"],"mappings":"AAAA,cAAc,kBAAkB;AAChC,cAAc,iBAAiB;AAC/B,cAAc,mBAAmB;AACjC,cAAc,qBAAqB;AACnC,cAAc,qBAAqB;AACnC,cAAc,iBAAiB;AAC/B,cAAc,oBAAoB;AAClC,cAAc,kBAAkB;AAChC,cAAc,mBAAmB;AACjC,cAAc,8BAA8B;AAC5C,cAAc,4BAA4B;AAC1C,cAAc,mBAAmB;AACjC,cAAc,gBAAgB;AAC9B,cAAc,kBAAkB;AAChC,cAAc,iBAAiB;AAC/B,cAAc,kBAAkB;AAChC,cAAc,yBAAyB;AACvC,cAAc,mBAAmB","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/utils/utils-array.js b/dist/esm/plugins/utils/utils-array.js deleted file mode 100644 index e8299c44801..00000000000 --- a/dist/esm/plugins/utils/utils-array.js +++ /dev/null @@ -1,140 +0,0 @@ -export function lastOfArray(ar) { - return ar[ar.length - 1]; -} - -/** - * shuffle the given array - */ -export function shuffleArray(arr) { - return arr.slice(0).sort(() => Math.random() - 0.5); -} -export function randomOfArray(arr) { - var randomElement = arr[Math.floor(Math.random() * arr.length)]; - return randomElement; -} -export function toArray(input) { - return Array.isArray(input) ? input.slice(0) : [input]; -} - -/** - * Split array with items into smaller arrays with items - * @link https://stackoverflow.com/a/7273794/3443137 - */ -export function batchArray(array, batchSize) { - array = array.slice(0); - var ret = []; - while (array.length) { - var batch = array.splice(0, batchSize); - ret.push(batch); - } - return ret; -} - -/** - * @link https://stackoverflow.com/a/15996017 - */ -export function removeOneFromArrayIfMatches(ar, condition) { - ar = ar.slice(); - var i = ar.length; - var done = false; - while (i-- && !done) { - if (condition(ar[i])) { - done = true; - ar.splice(i, 1); - } - } - return ar; -} - -/** - * returns true if the supplied argument is either an Array or a Readonly> - */ -export function isMaybeReadonlyArray(x) { - // While this looks strange, it's a workaround for an issue in TypeScript: - // https://github.com/microsoft/TypeScript/issues/17002 - // - // The problem is that `Array.isArray` as a type guard returns `false` for a readonly array, - // but at runtime the object is an array and the runtime call to `Array.isArray` would return `true`. - // The type predicate here allows for both `Array` and `Readonly>` to pass a type check while - // still performing runtime type inspection. - return Array.isArray(x); -} -export function isOneItemOfArrayInOtherArray(ar1, ar2) { - for (var i = 0; i < ar1.length; i++) { - var el = ar1[i]; - var has = ar2.includes(el); - if (has) { - return true; - } - } - return false; -} - -/** - * Use this in array.filter() to remove all empty slots - * and have the correct typings afterwards. - * @link https://stackoverflow.com/a/46700791/3443137 - */ -export function arrayFilterNotEmpty(value) { - if (value === null || value === undefined) { - return false; - } - return true; -} -export function countUntilNotMatching(ar, matchingFn) { - var count = 0; - var idx = -1; - for (var item of ar) { - idx = idx + 1; - var matching = matchingFn(item, idx); - if (matching) { - count = count + 1; - } else { - break; - } - } - return count; -} -export async function asyncFilter(array, predicate) { - var filters = await Promise.all(array.map(predicate)); - return array.filter((...[, index]) => filters[index]); -} - -/** - * @link https://stackoverflow.com/a/3762735 - */ -export function sumNumberArray(array) { - var count = 0; - for (var i = array.length; i--;) { - count += array[i]; - } - return count; -} -export function maxOfNumbers(arr) { - return Math.max(...arr); -} - -/** - * Appends the given documents to the given array. - * This will mutate the first given array. - * Mostly used as faster alternative to Array.concat() - * because .concat() is so slow. - * @link https://www.measurethat.net/Benchmarks/Show/4223/0/array-concat-vs-spread-operator-vs-push#latest_results_block - */ -export function appendToArray(ar, add) { - var amount = add.length; - for (var i = 0; i < amount; ++i) { - var element = add[i]; - ar.push(element); - } -} - -/** - * @link https://gist.github.com/telekosmos/3b62a31a5c43f40849bb - */ -export function uniqueArray(arrArg) { - return arrArg.filter(function (elem, pos, arr) { - return arr.indexOf(elem) === pos; - }); -} -//# sourceMappingURL=utils-array.js.map \ No newline at end of file diff --git a/dist/esm/plugins/utils/utils-array.js.map b/dist/esm/plugins/utils/utils-array.js.map deleted file mode 100644 index 215412769f2..00000000000 --- a/dist/esm/plugins/utils/utils-array.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"utils-array.js","names":["lastOfArray","ar","length","shuffleArray","arr","slice","sort","Math","random","randomOfArray","randomElement","floor","toArray","input","Array","isArray","batchArray","array","batchSize","ret","batch","splice","push","removeOneFromArrayIfMatches","condition","i","done","isMaybeReadonlyArray","x","isOneItemOfArrayInOtherArray","ar1","ar2","el","has","includes","arrayFilterNotEmpty","value","undefined","countUntilNotMatching","matchingFn","count","idx","item","matching","asyncFilter","predicate","filters","Promise","all","map","filter","index","sumNumberArray","maxOfNumbers","max","appendToArray","add","amount","element","uniqueArray","arrArg","elem","pos","indexOf"],"sources":["../../../../src/plugins/utils/utils-array.ts"],"sourcesContent":["import type {\n MaybePromise,\n MaybeReadonly\n} from '../../types/index.d.ts';\n\nexport function lastOfArray(ar: T[]): T | undefined {\n return ar[ar.length - 1];\n}\n\n/**\n * shuffle the given array\n */\nexport function shuffleArray(arr: T[]): T[] {\n return arr.slice(0).sort(() => (Math.random() - 0.5));\n}\n\nexport function randomOfArray(arr: T[]): T {\n const randomElement = arr[Math.floor(Math.random() * arr.length)];\n return randomElement;\n}\n\n\nexport function toArray(input: T | T[] | Readonly | Readonly): T[] {\n return Array.isArray(input) ? (input as any[]).slice(0) : [input];\n}\n\n/**\n * Split array with items into smaller arrays with items\n * @link https://stackoverflow.com/a/7273794/3443137\n */\nexport function batchArray(array: T[], batchSize: number): T[][] {\n array = array.slice(0);\n const ret: T[][] = [];\n while (array.length) {\n const batch = array.splice(0, batchSize);\n ret.push(batch);\n }\n return ret;\n}\n\n/**\n * @link https://stackoverflow.com/a/15996017\n */\nexport function removeOneFromArrayIfMatches(ar: T[], condition: (x: T) => boolean): T[] {\n ar = ar.slice();\n let i = ar.length;\n let done = false;\n while (i-- && !done) {\n if (condition(ar[i])) {\n done = true;\n ar.splice(i, 1);\n }\n }\n return ar;\n}\n\n/**\n * returns true if the supplied argument is either an Array or a Readonly>\n */\nexport function isMaybeReadonlyArray(x: any): x is MaybeReadonly {\n // While this looks strange, it's a workaround for an issue in TypeScript:\n // https://github.com/microsoft/TypeScript/issues/17002\n //\n // The problem is that `Array.isArray` as a type guard returns `false` for a readonly array,\n // but at runtime the object is an array and the runtime call to `Array.isArray` would return `true`.\n // The type predicate here allows for both `Array` and `Readonly>` to pass a type check while\n // still performing runtime type inspection.\n return Array.isArray(x);\n}\n\n\n\nexport function isOneItemOfArrayInOtherArray(ar1: T[], ar2: T[]): boolean {\n for (let i = 0; i < ar1.length; i++) {\n const el = ar1[i];\n const has = ar2.includes(el);\n if (has) {\n return true;\n }\n }\n return false;\n}\n\n\n/**\n * Use this in array.filter() to remove all empty slots\n * and have the correct typings afterwards.\n * @link https://stackoverflow.com/a/46700791/3443137\n */\nexport function arrayFilterNotEmpty(value: TValue | null | undefined): value is TValue {\n if (value === null || value === undefined) {\n return false;\n }\n return true;\n}\n\nexport function countUntilNotMatching(\n ar: T[],\n matchingFn: (v: T, idx: number) => boolean\n): number {\n let count = 0;\n let idx = -1;\n for (const item of ar) {\n idx = idx + 1;\n const matching = matchingFn(item, idx);\n if (matching) {\n count = count + 1;\n } else {\n break;\n }\n }\n return count;\n}\n\nexport async function asyncFilter(array: T[], predicate: (item: T, index: number, a: T[]) => MaybePromise): Promise {\n const filters = await Promise.all(\n array.map(predicate)\n );\n\n return array.filter((...[, index]) => filters[index]);\n}\n\n/**\n * @link https://stackoverflow.com/a/3762735\n */\nexport function sumNumberArray(array: number[]): number {\n let count = 0;\n for (let i = array.length; i--;) {\n count += array[i];\n }\n return count;\n}\n\nexport function maxOfNumbers(arr: number[]): number {\n return Math.max(...arr);\n}\n\n\n/**\n * Appends the given documents to the given array.\n * This will mutate the first given array.\n * Mostly used as faster alternative to Array.concat()\n * because .concat() is so slow.\n * @link https://www.measurethat.net/Benchmarks/Show/4223/0/array-concat-vs-spread-operator-vs-push#latest_results_block\n */\nexport function appendToArray(ar: T[], add: T[] | readonly T[]): void {\n const amount = add.length;\n for (let i = 0; i < amount; ++i) {\n const element = add[i];\n ar.push(element);\n }\n}\n\n/**\n * @link https://gist.github.com/telekosmos/3b62a31a5c43f40849bb\n */\nexport function uniqueArray(arrArg: string[]): string[] {\n return arrArg.filter(function (elem, pos, arr) {\n return arr.indexOf(elem) === pos;\n });\n}\n\n"],"mappings":"AAKA,OAAO,SAASA,WAAWA,CAAIC,EAAO,EAAiB;EACnD,OAAOA,EAAE,CAACA,EAAE,CAACC,MAAM,GAAG,CAAC,CAAC;AAC5B;;AAEA;AACA;AACA;AACA,OAAO,SAASC,YAAYA,CAAIC,GAAQ,EAAO;EAC3C,OAAOA,GAAG,CAACC,KAAK,CAAC,CAAC,CAAC,CAACC,IAAI,CAAC,MAAOC,IAAI,CAACC,MAAM,CAAC,CAAC,GAAG,GAAI,CAAC;AACzD;AAEA,OAAO,SAASC,aAAaA,CAAIL,GAAQ,EAAK;EAC1C,IAAMM,aAAa,GAAGN,GAAG,CAACG,IAAI,CAACI,KAAK,CAACJ,IAAI,CAACC,MAAM,CAAC,CAAC,GAAGJ,GAAG,CAACF,MAAM,CAAC,CAAC;EACjE,OAAOQ,aAAa;AACxB;AAGA,OAAO,SAASE,OAAOA,CAAIC,KAA4C,EAAO;EAC1E,OAAOC,KAAK,CAACC,OAAO,CAACF,KAAK,CAAC,GAAIA,KAAK,CAAWR,KAAK,CAAC,CAAC,CAAC,GAAG,CAACQ,KAAK,CAAC;AACrE;;AAEA;AACA;AACA;AACA;AACA,OAAO,SAASG,UAAUA,CAAIC,KAAU,EAAEC,SAAiB,EAAS;EAChED,KAAK,GAAGA,KAAK,CAACZ,KAAK,CAAC,CAAC,CAAC;EACtB,IAAMc,GAAU,GAAG,EAAE;EACrB,OAAOF,KAAK,CAACf,MAAM,EAAE;IACjB,IAAMkB,KAAK,GAAGH,KAAK,CAACI,MAAM,CAAC,CAAC,EAAEH,SAAS,CAAC;IACxCC,GAAG,CAACG,IAAI,CAACF,KAAK,CAAC;EACnB;EACA,OAAOD,GAAG;AACd;;AAEA;AACA;AACA;AACA,OAAO,SAASI,2BAA2BA,CAAItB,EAAO,EAAEuB,SAA4B,EAAO;EACvFvB,EAAE,GAAGA,EAAE,CAACI,KAAK,CAAC,CAAC;EACf,IAAIoB,CAAC,GAAGxB,EAAE,CAACC,MAAM;EACjB,IAAIwB,IAAI,GAAG,KAAK;EAChB,OAAOD,CAAC,EAAE,IAAI,CAACC,IAAI,EAAE;IACjB,IAAIF,SAAS,CAACvB,EAAE,CAACwB,CAAC,CAAC,CAAC,EAAE;MAClBC,IAAI,GAAG,IAAI;MACXzB,EAAE,CAACoB,MAAM,CAACI,CAAC,EAAE,CAAC,CAAC;IACnB;EACJ;EACA,OAAOxB,EAAE;AACb;;AAEA;AACA;AACA;AACA,OAAO,SAAS0B,oBAAoBA,CAACC,CAAM,EAA6B;EACpE;EACA;EACA;EACA;EACA;EACA;EACA;EACA,OAAOd,KAAK,CAACC,OAAO,CAACa,CAAC,CAAC;AAC3B;AAIA,OAAO,SAASC,4BAA4BA,CAAIC,GAAQ,EAAEC,GAAQ,EAAW;EACzE,KAAK,IAAIN,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGK,GAAG,CAAC5B,MAAM,EAAEuB,CAAC,EAAE,EAAE;IACjC,IAAMO,EAAE,GAAGF,GAAG,CAACL,CAAC,CAAC;IACjB,IAAMQ,GAAG,GAAGF,GAAG,CAACG,QAAQ,CAACF,EAAE,CAAC;IAC5B,IAAIC,GAAG,EAAE;MACL,OAAO,IAAI;IACf;EACJ;EACA,OAAO,KAAK;AAChB;;AAGA;AACA;AACA;AACA;AACA;AACA,OAAO,SAASE,mBAAmBA,CAASC,KAAgC,EAAmB;EAC3F,IAAIA,KAAK,KAAK,IAAI,IAAIA,KAAK,KAAKC,SAAS,EAAE;IACvC,OAAO,KAAK;EAChB;EACA,OAAO,IAAI;AACf;AAEA,OAAO,SAASC,qBAAqBA,CACjCrC,EAAO,EACPsC,UAA0C,EACpC;EACN,IAAIC,KAAK,GAAG,CAAC;EACb,IAAIC,GAAG,GAAG,CAAC,CAAC;EACZ,KAAK,IAAMC,IAAI,IAAIzC,EAAE,EAAE;IACnBwC,GAAG,GAAGA,GAAG,GAAG,CAAC;IACb,IAAME,QAAQ,GAAGJ,UAAU,CAACG,IAAI,EAAED,GAAG,CAAC;IACtC,IAAIE,QAAQ,EAAE;MACVH,KAAK,GAAGA,KAAK,GAAG,CAAC;IACrB,CAAC,MAAM;MACH;IACJ;EACJ;EACA,OAAOA,KAAK;AAChB;AAEA,OAAO,eAAeI,WAAWA,CAAI3B,KAAU,EAAE4B,SAAoE,EAAgB;EACjI,IAAMC,OAAO,GAAG,MAAMC,OAAO,CAACC,GAAG,CAC7B/B,KAAK,CAACgC,GAAG,CAACJ,SAAS,CACvB,CAAC;EAED,OAAO5B,KAAK,CAACiC,MAAM,CAAC,CAAC,GAAG,GAAGC,KAAK,CAAC,KAAKL,OAAO,CAACK,KAAK,CAAC,CAAC;AACzD;;AAEA;AACA;AACA;AACA,OAAO,SAASC,cAAcA,CAACnC,KAAe,EAAU;EACpD,IAAIuB,KAAK,GAAG,CAAC;EACb,KAAK,IAAIf,CAAC,GAAGR,KAAK,CAACf,MAAM,EAAEuB,CAAC,EAAE,GAAG;IAC7Be,KAAK,IAAIvB,KAAK,CAACQ,CAAC,CAAC;EACrB;EACA,OAAOe,KAAK;AAChB;AAEA,OAAO,SAASa,YAAYA,CAACjD,GAAa,EAAU;EAChD,OAAOG,IAAI,CAAC+C,GAAG,CAAC,GAAGlD,GAAG,CAAC;AAC3B;;AAGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,SAASmD,aAAaA,CAAItD,EAAO,EAAEuD,GAAuB,EAAQ;EACrE,IAAMC,MAAM,GAAGD,GAAG,CAACtD,MAAM;EACzB,KAAK,IAAIuB,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGgC,MAAM,EAAE,EAAEhC,CAAC,EAAE;IAC7B,IAAMiC,OAAO,GAAGF,GAAG,CAAC/B,CAAC,CAAC;IACtBxB,EAAE,CAACqB,IAAI,CAACoC,OAAO,CAAC;EACpB;AACJ;;AAEA;AACA;AACA;AACA,OAAO,SAASC,WAAWA,CAACC,MAAgB,EAAY;EACpD,OAAOA,MAAM,CAACV,MAAM,CAAC,UAAUW,IAAI,EAAEC,GAAG,EAAE1D,GAAG,EAAE;IAC3C,OAAOA,GAAG,CAAC2D,OAAO,CAACF,IAAI,CAAC,KAAKC,GAAG;EACpC,CAAC,CAAC;AACN","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/utils/utils-base64.js b/dist/esm/plugins/utils/utils-base64.js deleted file mode 100644 index 67ede549c56..00000000000 --- a/dist/esm/plugins/utils/utils-base64.js +++ /dev/null @@ -1,48 +0,0 @@ -/** - * NO! We cannot just use btoa() and atob() - * because they do not work correctly with binary data. - * @link https://stackoverflow.com/q/30106476/3443137 - */ -import { encode, decode } from 'js-base64'; - -/** - * atob() and btoa() do not work well with non ascii chars, - * so we have to use these helper methods instead. - * @link https://stackoverflow.com/a/30106551/3443137 - */ -// Encoding UTF8 -> base64 -export function b64EncodeUnicode(str) { - return encode(str); -} - -// Decoding base64 -> UTF8 -export function b64DecodeUnicode(str) { - return decode(str); -} - -/** - * @link https://stackoverflow.com/a/9458996/3443137 - */ -export function arrayBufferToBase64(buffer) { - var binary = ''; - var bytes = new Uint8Array(buffer); - var len = bytes.byteLength; - for (var i = 0; i < len; i++) { - binary += String.fromCharCode(bytes[i]); - } - return btoa(binary); -} - -/** - * @link https://stackoverflow.com/a/21797381 - */ -export function base64ToArrayBuffer(base64) { - var binary_string = atob(base64); - var len = binary_string.length; - var bytes = new Uint8Array(len); - for (var i = 0; i < len; i++) { - bytes[i] = binary_string.charCodeAt(i); - } - return bytes.buffer; -} -//# sourceMappingURL=utils-base64.js.map \ No newline at end of file diff --git a/dist/esm/plugins/utils/utils-base64.js.map b/dist/esm/plugins/utils/utils-base64.js.map deleted file mode 100644 index c44d4f4e66d..00000000000 --- a/dist/esm/plugins/utils/utils-base64.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"utils-base64.js","names":["encode","decode","b64EncodeUnicode","str","b64DecodeUnicode","arrayBufferToBase64","buffer","binary","bytes","Uint8Array","len","byteLength","i","String","fromCharCode","btoa","base64ToArrayBuffer","base64","binary_string","atob","length","charCodeAt"],"sources":["../../../../src/plugins/utils/utils-base64.ts"],"sourcesContent":["\n/**\n * NO! We cannot just use btoa() and atob()\n * because they do not work correctly with binary data.\n * @link https://stackoverflow.com/q/30106476/3443137\n */\nimport { encode, decode } from 'js-base64';\n\n/**\n * atob() and btoa() do not work well with non ascii chars,\n * so we have to use these helper methods instead.\n * @link https://stackoverflow.com/a/30106551/3443137\n */\n// Encoding UTF8 -> base64\nexport function b64EncodeUnicode(str: string) {\n return encode(str);\n}\n\n// Decoding base64 -> UTF8\nexport function b64DecodeUnicode(str: string) {\n return decode(str);\n}\n\n/**\n * @link https://stackoverflow.com/a/9458996/3443137\n */\nexport function arrayBufferToBase64(buffer: ArrayBuffer) {\n let binary = '';\n const bytes = new Uint8Array(buffer);\n const len = bytes.byteLength;\n for (let i = 0; i < len; i++) {\n binary += String.fromCharCode(bytes[i]);\n }\n return btoa(binary);\n}\n\n\n/**\n * @link https://stackoverflow.com/a/21797381\n */\nexport function base64ToArrayBuffer(base64: string): ArrayBuffer {\n const binary_string = atob(base64);\n const len = binary_string.length;\n const bytes = new Uint8Array(len);\n for (let i = 0; i < len; i++) {\n bytes[i] = binary_string.charCodeAt(i);\n }\n return bytes.buffer;\n}\n"],"mappings":"AACA;AACA;AACA;AACA;AACA;AACA,SAASA,MAAM,EAAEC,MAAM,QAAQ,WAAW;;AAE1C;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,SAASC,gBAAgBA,CAACC,GAAW,EAAE;EAC1C,OAAOH,MAAM,CAACG,GAAG,CAAC;AACtB;;AAEA;AACA,OAAO,SAASC,gBAAgBA,CAACD,GAAW,EAAE;EAC1C,OAAOF,MAAM,CAACE,GAAG,CAAC;AACtB;;AAEA;AACA;AACA;AACA,OAAO,SAASE,mBAAmBA,CAACC,MAAmB,EAAE;EACrD,IAAIC,MAAM,GAAG,EAAE;EACf,IAAMC,KAAK,GAAG,IAAIC,UAAU,CAACH,MAAM,CAAC;EACpC,IAAMI,GAAG,GAAGF,KAAK,CAACG,UAAU;EAC5B,KAAK,IAAIC,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGF,GAAG,EAAEE,CAAC,EAAE,EAAE;IAC1BL,MAAM,IAAIM,MAAM,CAACC,YAAY,CAACN,KAAK,CAACI,CAAC,CAAC,CAAC;EAC3C;EACA,OAAOG,IAAI,CAACR,MAAM,CAAC;AACvB;;AAGA;AACA;AACA;AACA,OAAO,SAASS,mBAAmBA,CAACC,MAAc,EAAe;EAC7D,IAAMC,aAAa,GAAGC,IAAI,CAACF,MAAM,CAAC;EAClC,IAAMP,GAAG,GAAGQ,aAAa,CAACE,MAAM;EAChC,IAAMZ,KAAK,GAAG,IAAIC,UAAU,CAACC,GAAG,CAAC;EACjC,KAAK,IAAIE,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGF,GAAG,EAAEE,CAAC,EAAE,EAAE;IAC1BJ,KAAK,CAACI,CAAC,CAAC,GAAGM,aAAa,CAACG,UAAU,CAACT,CAAC,CAAC;EAC1C;EACA,OAAOJ,KAAK,CAACF,MAAM;AACvB","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/utils/utils-blob.js b/dist/esm/plugins/utils/utils-blob.js deleted file mode 100644 index 042d71568fb..00000000000 --- a/dist/esm/plugins/utils/utils-blob.js +++ /dev/null @@ -1,57 +0,0 @@ -import { arrayBufferToBase64 } from "./utils-base64.js"; - -/** - * Since RxDB 13.0.0 we only use Blob instead of falling back to Buffer, - * because Node.js >18 supports Blobs anyway. - */ -/** - * depending if we are on node or browser, - * we have to use Buffer(node) or Blob(browser) - */ -export function createBlob(data, type) { - var blob = new Blob([data], { - type - }); - return blob; -} -export async function createBlobFromBase64(base64String, type) { - var base64Response = await fetch("data:" + type + ";base64," + base64String); - var blob = await base64Response.blob(); - return blob; -} -export function blobToString(blob) { - /** - * in the electron-renderer we have a typed array instead of a blob - * so we have to transform it. - * @link https://github.com/pubkey/rxdb/issues/1371 - */ - var blobType = Object.prototype.toString.call(blob); - if (blobType === '[object Uint8Array]') { - blob = new Blob([blob]); - } - if (typeof blob === 'string') { - return Promise.resolve(blob); - } - return blob.text(); -} -export async function blobToBase64String(blob) { - if (typeof blob === 'string') { - return blob; - } - - /** - * in the electron-renderer we have a typed array instead of a blob - * so we have to transform it. - * @link https://github.com/pubkey/rxdb/issues/1371 - */ - var blobType = Object.prototype.toString.call(blob); - if (blobType === '[object Uint8Array]') { - blob = new Blob([blob]); - } - var arrayBuffer = await blob.arrayBuffer(); - return arrayBufferToBase64(arrayBuffer); -} -export function getBlobSize(blob) { - return blob.size; -} -//# sourceMappingURL=utils-blob.js.map \ No newline at end of file diff --git a/dist/esm/plugins/utils/utils-blob.js.map b/dist/esm/plugins/utils/utils-blob.js.map deleted file mode 100644 index abce8beacc2..00000000000 --- a/dist/esm/plugins/utils/utils-blob.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"utils-blob.js","names":["arrayBufferToBase64","createBlob","data","type","blob","Blob","createBlobFromBase64","base64String","base64Response","fetch","blobToString","blobType","Object","prototype","toString","call","Promise","resolve","text","blobToBase64String","arrayBuffer","getBlobSize","size"],"sources":["../../../../src/plugins/utils/utils-blob.ts"],"sourcesContent":["import { arrayBufferToBase64 } from './utils-base64.ts';\n\n/**\n * Since RxDB 13.0.0 we only use Blob instead of falling back to Buffer,\n * because Node.js >18 supports Blobs anyway.\n */\n/**\n * depending if we are on node or browser,\n * we have to use Buffer(node) or Blob(browser)\n */\nexport function createBlob(\n data: string,\n type: string\n): Blob {\n const blob = new Blob([data], {\n type\n });\n return blob;\n}\n\nexport async function createBlobFromBase64(\n base64String: string,\n type: string\n): Promise {\n const base64Response = await fetch(`data:${type};base64,${base64String}`);\n const blob = await base64Response.blob();\n return blob;\n}\n\nexport function blobToString(blob: Blob | string): Promise {\n /**\n * in the electron-renderer we have a typed array instead of a blob\n * so we have to transform it.\n * @link https://github.com/pubkey/rxdb/issues/1371\n */\n const blobType = Object.prototype.toString.call(blob);\n if (blobType === '[object Uint8Array]') {\n blob = new Blob([blob]);\n }\n if (typeof blob === 'string') {\n return Promise.resolve(blob);\n }\n\n return blob.text();\n}\n\nexport async function blobToBase64String(blob: Blob | string): Promise {\n if (typeof blob === 'string') {\n return blob;\n }\n\n /**\n * in the electron-renderer we have a typed array instead of a blob\n * so we have to transform it.\n * @link https://github.com/pubkey/rxdb/issues/1371\n */\n const blobType = Object.prototype.toString.call(blob);\n if (blobType === '[object Uint8Array]') {\n blob = new Blob([blob]);\n }\n\n const arrayBuffer = await blob.arrayBuffer();\n return arrayBufferToBase64(arrayBuffer);\n}\n\nexport function getBlobSize(blob: Blob): number {\n return blob.size;\n}\n"],"mappings":"AAAA,SAASA,mBAAmB,QAAQ,mBAAmB;;AAEvD;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,SAASC,UAAUA,CACtBC,IAAY,EACZC,IAAY,EACR;EACJ,IAAMC,IAAI,GAAG,IAAIC,IAAI,CAAC,CAACH,IAAI,CAAC,EAAE;IAC1BC;EACJ,CAAC,CAAC;EACF,OAAOC,IAAI;AACf;AAEA,OAAO,eAAeE,oBAAoBA,CACtCC,YAAoB,EACpBJ,IAAY,EACC;EACb,IAAMK,cAAc,GAAG,MAAMC,KAAK,WAASN,IAAI,gBAAWI,YAAc,CAAC;EACzE,IAAMH,IAAI,GAAG,MAAMI,cAAc,CAACJ,IAAI,CAAC,CAAC;EACxC,OAAOA,IAAI;AACf;AAEA,OAAO,SAASM,YAAYA,CAACN,IAAmB,EAAmB;EAC/D;AACJ;AACA;AACA;AACA;EACI,IAAMO,QAAQ,GAAGC,MAAM,CAACC,SAAS,CAACC,QAAQ,CAACC,IAAI,CAACX,IAAI,CAAC;EACrD,IAAIO,QAAQ,KAAK,qBAAqB,EAAE;IACpCP,IAAI,GAAG,IAAIC,IAAI,CAAC,CAACD,IAAI,CAAC,CAAC;EAC3B;EACA,IAAI,OAAOA,IAAI,KAAK,QAAQ,EAAE;IAC1B,OAAOY,OAAO,CAACC,OAAO,CAACb,IAAI,CAAC;EAChC;EAEA,OAAOA,IAAI,CAACc,IAAI,CAAC,CAAC;AACtB;AAEA,OAAO,eAAeC,kBAAkBA,CAACf,IAAmB,EAAmB;EAC3E,IAAI,OAAOA,IAAI,KAAK,QAAQ,EAAE;IAC1B,OAAOA,IAAI;EACf;;EAEA;AACJ;AACA;AACA;AACA;EACI,IAAMO,QAAQ,GAAGC,MAAM,CAACC,SAAS,CAACC,QAAQ,CAACC,IAAI,CAACX,IAAI,CAAC;EACrD,IAAIO,QAAQ,KAAK,qBAAqB,EAAE;IACpCP,IAAI,GAAG,IAAIC,IAAI,CAAC,CAACD,IAAI,CAAC,CAAC;EAC3B;EAEA,IAAMgB,WAAW,GAAG,MAAMhB,IAAI,CAACgB,WAAW,CAAC,CAAC;EAC5C,OAAOpB,mBAAmB,CAACoB,WAAW,CAAC;AAC3C;AAEA,OAAO,SAASC,WAAWA,CAACjB,IAAU,EAAU;EAC5C,OAAOA,IAAI,CAACkB,IAAI;AACpB","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/utils/utils-document.js b/dist/esm/plugins/utils/utils-document.js deleted file mode 100644 index 745d6821498..00000000000 --- a/dist/esm/plugins/utils/utils-document.js +++ /dev/null @@ -1,87 +0,0 @@ -import { flatClone } from "./utils-object.js"; -/** - * We use 1 as minimum so that the value is never falsy. - * This const is used in several places because querying - * with a value lower then the minimum could give false results. - */ -export var RX_META_LWT_MINIMUM = 1; -export function getDefaultRxDocumentMeta() { - return { - /** - * Set this to 1 to not waste performance - * while calling new Date().. - * The storage wrappers will anyway update - * the lastWrite time while calling transformDocumentDataFromRxDBToRxStorage() - */ - lwt: RX_META_LWT_MINIMUM - }; -} - -/** - * Returns a revision that is not valid. - * Use this to have correct typings - * while the storage wrapper anyway will overwrite the revision. - */ -export function getDefaultRevision() { - /** - * Use a non-valid revision format, - * to ensure that the RxStorage will throw - * when the revision is not replaced downstream. - */ - return ''; -} -export function stripMetaDataFromDocument(docData) { - return Object.assign({}, docData, { - _meta: undefined, - _deleted: undefined, - _rev: undefined - }); -} - -/** - * Faster way to check the equality of document lists - * compared to doing a deep-equal. - * Here we only check the ids and revisions. - */ -export function areRxDocumentArraysEqual(primaryPath, ar1, ar2) { - if (ar1.length !== ar2.length) { - return false; - } - var i = 0; - var len = ar1.length; - while (i < len) { - var row1 = ar1[i]; - var row2 = ar2[i]; - i++; - if (row1._rev !== row2._rev || row1[primaryPath] !== row2[primaryPath]) { - return false; - } - } - return true; -} -export function getSortDocumentsByLastWriteTimeComparator(primaryPath) { - return (a, b) => { - if (a._meta.lwt === b._meta.lwt) { - if (b[primaryPath] < a[primaryPath]) { - return 1; - } else { - return -1; - } - } else { - return a._meta.lwt - b._meta.lwt; - } - }; -} -export function sortDocumentsByLastWriteTime(primaryPath, docs) { - return docs.sort(getSortDocumentsByLastWriteTimeComparator(primaryPath)); -} -export function toWithDeleted(docData) { - docData = flatClone(docData); - docData._deleted = !!docData._deleted; - return Object.assign(docData, { - _attachments: undefined, - _meta: undefined, - _rev: undefined - }); -} -//# sourceMappingURL=utils-document.js.map \ No newline at end of file diff --git a/dist/esm/plugins/utils/utils-document.js.map b/dist/esm/plugins/utils/utils-document.js.map deleted file mode 100644 index 84d9ab4734c..00000000000 --- a/dist/esm/plugins/utils/utils-document.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"utils-document.js","names":["flatClone","RX_META_LWT_MINIMUM","getDefaultRxDocumentMeta","lwt","getDefaultRevision","stripMetaDataFromDocument","docData","Object","assign","_meta","undefined","_deleted","_rev","areRxDocumentArraysEqual","primaryPath","ar1","ar2","length","i","len","row1","row2","getSortDocumentsByLastWriteTimeComparator","a","b","sortDocumentsByLastWriteTime","docs","sort","toWithDeleted","_attachments"],"sources":["../../../../src/plugins/utils/utils-document.ts"],"sourcesContent":["import type {\n DeepReadonly,\n RxDocumentData,\n RxDocumentMeta,\n StringKeys,\n WithDeleted,\n WithDeletedAndAttachments\n} from '../../types/index.d.ts';\nimport { flatClone } from './utils-object.ts';\n/**\n * We use 1 as minimum so that the value is never falsy.\n * This const is used in several places because querying\n * with a value lower then the minimum could give false results.\n */\nexport const RX_META_LWT_MINIMUM = 1;\n\nexport function getDefaultRxDocumentMeta(): RxDocumentMeta {\n return {\n /**\n * Set this to 1 to not waste performance\n * while calling new Date()..\n * The storage wrappers will anyway update\n * the lastWrite time while calling transformDocumentDataFromRxDBToRxStorage()\n */\n lwt: RX_META_LWT_MINIMUM\n };\n}\n\n/**\n * Returns a revision that is not valid.\n * Use this to have correct typings\n * while the storage wrapper anyway will overwrite the revision.\n */\nexport function getDefaultRevision(): string {\n /**\n * Use a non-valid revision format,\n * to ensure that the RxStorage will throw\n * when the revision is not replaced downstream.\n */\n return '';\n}\n\n\nexport function stripMetaDataFromDocument(docData: RxDocumentData): RxDocType {\n return Object.assign({}, docData, {\n _meta: undefined,\n _deleted: undefined,\n _rev: undefined\n });\n}\n\n\n/**\n * Faster way to check the equality of document lists\n * compared to doing a deep-equal.\n * Here we only check the ids and revisions.\n */\nexport function areRxDocumentArraysEqual(\n primaryPath: StringKeys>,\n ar1: RxDocumentData[],\n ar2: RxDocumentData[]\n): boolean {\n if (ar1.length !== ar2.length) {\n return false;\n }\n let i = 0;\n const len = ar1.length;\n while (i < len) {\n const row1 = ar1[i];\n const row2 = ar2[i];\n i++;\n\n if (\n row1._rev !== row2._rev ||\n row1[primaryPath] !== row2[primaryPath]\n ) {\n return false;\n }\n }\n return true;\n}\n\n\n\nexport function getSortDocumentsByLastWriteTimeComparator(primaryPath: string) {\n return (a: RxDocumentData, b: RxDocumentData) => {\n if (a._meta.lwt === b._meta.lwt) {\n if ((b as any)[primaryPath] < (a as any)[primaryPath]) {\n return 1;\n } else {\n return -1;\n }\n } else {\n return a._meta.lwt - b._meta.lwt;\n }\n };\n}\nexport function sortDocumentsByLastWriteTime(\n primaryPath: string,\n docs: RxDocumentData[]\n): RxDocumentData[] {\n return docs.sort(getSortDocumentsByLastWriteTimeComparator(primaryPath));\n}\n\ntype AnyDocFormat = RxDocType | WithDeleted | RxDocumentData | WithDeletedAndAttachments;\nexport function toWithDeleted(\n docData: AnyDocFormat | DeepReadonly>\n): WithDeleted {\n docData = flatClone(docData);\n (docData as any)._deleted = !!(docData as any)._deleted;\n return Object.assign(docData as any, {\n _attachments: undefined,\n _meta: undefined,\n _rev: undefined\n }) as any;\n}\n"],"mappings":"AAQA,SAASA,SAAS,QAAQ,mBAAmB;AAC7C;AACA;AACA;AACA;AACA;AACA,OAAO,IAAMC,mBAAmB,GAAG,CAAC;AAEpC,OAAO,SAASC,wBAAwBA,CAAA,EAAmB;EACvD,OAAO;IACH;AACR;AACA;AACA;AACA;AACA;IACQC,GAAG,EAAEF;EACT,CAAC;AACL;;AAEA;AACA;AACA;AACA;AACA;AACA,OAAO,SAASG,kBAAkBA,CAAA,EAAW;EACzC;AACJ;AACA;AACA;AACA;EACI,OAAO,EAAE;AACb;AAGA,OAAO,SAASC,yBAAyBA,CAAYC,OAAkC,EAAa;EAChG,OAAOC,MAAM,CAACC,MAAM,CAAC,CAAC,CAAC,EAAEF,OAAO,EAAE;IAC9BG,KAAK,EAAEC,SAAS;IAChBC,QAAQ,EAAED,SAAS;IACnBE,IAAI,EAAEF;EACV,CAAC,CAAC;AACN;;AAGA;AACA;AACA;AACA;AACA;AACA,OAAO,SAASG,wBAAwBA,CACpCC,WAAkD,EAClDC,GAAgC,EAChCC,GAAgC,EACzB;EACP,IAAID,GAAG,CAACE,MAAM,KAAKD,GAAG,CAACC,MAAM,EAAE;IAC3B,OAAO,KAAK;EAChB;EACA,IAAIC,CAAC,GAAG,CAAC;EACT,IAAMC,GAAG,GAAGJ,GAAG,CAACE,MAAM;EACtB,OAAOC,CAAC,GAAGC,GAAG,EAAE;IACZ,IAAMC,IAAI,GAAGL,GAAG,CAACG,CAAC,CAAC;IACnB,IAAMG,IAAI,GAAGL,GAAG,CAACE,CAAC,CAAC;IACnBA,CAAC,EAAE;IAEH,IACIE,IAAI,CAACR,IAAI,KAAKS,IAAI,CAACT,IAAI,IACvBQ,IAAI,CAACN,WAAW,CAAC,KAAKO,IAAI,CAACP,WAAW,CAAC,EACzC;MACE,OAAO,KAAK;IAChB;EACJ;EACA,OAAO,IAAI;AACf;AAIA,OAAO,SAASQ,yCAAyCA,CAAYR,WAAmB,EAAE;EACtF,OAAO,CAACS,CAA4B,EAAEC,CAA4B,KAAK;IACnE,IAAID,CAAC,CAACd,KAAK,CAACN,GAAG,KAAKqB,CAAC,CAACf,KAAK,CAACN,GAAG,EAAE;MAC7B,IAAKqB,CAAC,CAASV,WAAW,CAAC,GAAIS,CAAC,CAAST,WAAW,CAAC,EAAE;QACnD,OAAO,CAAC;MACZ,CAAC,MAAM;QACH,OAAO,CAAC,CAAC;MACb;IACJ,CAAC,MAAM;MACH,OAAOS,CAAC,CAACd,KAAK,CAACN,GAAG,GAAGqB,CAAC,CAACf,KAAK,CAACN,GAAG;IACpC;EACJ,CAAC;AACL;AACA,OAAO,SAASsB,4BAA4BA,CACxCX,WAAmB,EACnBY,IAAiC,EACN;EAC3B,OAAOA,IAAI,CAACC,IAAI,CAACL,yCAAyC,CAACR,WAAW,CAAC,CAAC;AAC5E;AAGA,OAAO,SAASc,aAAaA,CACzBtB,OAAwE,EAClD;EACtBA,OAAO,GAAGN,SAAS,CAACM,OAAO,CAAC;EAC3BA,OAAO,CAASK,QAAQ,GAAG,CAAC,CAAEL,OAAO,CAASK,QAAQ;EACvD,OAAOJ,MAAM,CAACC,MAAM,CAACF,OAAO,EAAS;IACjCuB,YAAY,EAAEnB,SAAS;IACvBD,KAAK,EAAEC,SAAS;IAChBE,IAAI,EAAEF;EACV,CAAC,CAAC;AACN","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/utils/utils-error.js b/dist/esm/plugins/utils/utils-error.js deleted file mode 100644 index 6e1701073af..00000000000 --- a/dist/esm/plugins/utils/utils-error.js +++ /dev/null @@ -1,36 +0,0 @@ -import { ucfirst } from "./utils-string.js"; - -/** - * Returns an error that indicates that a plugin is missing - * We do not throw a RxError because this should not be handled - * programmatically but by using the correct import - */ -export function pluginMissing(pluginKey) { - var keyParts = pluginKey.split('-'); - var pluginName = 'RxDB'; - keyParts.forEach(part => { - pluginName += ucfirst(part); - }); - pluginName += 'Plugin'; - return new Error("You are using a function which must be overwritten by a plugin.\n You should either prevent the usage of this function or add the plugin via:\n import { " + pluginName + " } from 'rxdb/plugins/" + pluginKey + "';\n addRxPlugin(" + pluginName + ");\n "); -} -export function errorToPlainJson(err) { - var ret = { - name: err.name, - message: err.message, - rxdb: err.rxdb, - parameters: err.parameters, - extensions: err.extensions, - code: err.code, - url: err.url, - /** - * stack must be last to make it easier to read the json in a console. - * Also we ensure that each linebreak is spaced so that the chrome devtools - * shows urls to the source code that can be clicked to inspect - * the correct place in the code. - */ - stack: !err.stack ? undefined : err.stack.replace(/\n/g, ' \n ') - }; - return ret; -} -//# sourceMappingURL=utils-error.js.map \ No newline at end of file diff --git a/dist/esm/plugins/utils/utils-error.js.map b/dist/esm/plugins/utils/utils-error.js.map deleted file mode 100644 index 84c345365da..00000000000 --- a/dist/esm/plugins/utils/utils-error.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"utils-error.js","names":["ucfirst","pluginMissing","pluginKey","keyParts","split","pluginName","forEach","part","Error","errorToPlainJson","err","ret","name","message","rxdb","parameters","extensions","code","url","stack","undefined","replace"],"sources":["../../../../src/plugins/utils/utils-error.ts"],"sourcesContent":["import type {\n PlainJsonError,\n RxError,\n RxTypeError\n} from '../../types/index.d.ts';\nimport { ucfirst } from './utils-string.ts';\n\n\n\n/**\n * Returns an error that indicates that a plugin is missing\n * We do not throw a RxError because this should not be handled\n * programmatically but by using the correct import\n */\nexport function pluginMissing(\n pluginKey: string\n): Error {\n const keyParts = pluginKey.split('-');\n let pluginName = 'RxDB';\n keyParts.forEach(part => {\n pluginName += ucfirst(part);\n });\n pluginName += 'Plugin';\n return new Error(\n `You are using a function which must be overwritten by a plugin.\n You should either prevent the usage of this function or add the plugin via:\n import { ${pluginName} } from 'rxdb/plugins/${pluginKey}';\n addRxPlugin(${pluginName});\n `\n );\n}\n\n\n\nexport function errorToPlainJson(err: Error | TypeError | RxError | RxTypeError): PlainJsonError {\n const ret: PlainJsonError = {\n name: err.name,\n message: err.message,\n rxdb: (err as any).rxdb,\n parameters: (err as RxError).parameters,\n extensions: (err as any).extensions,\n code: (err as RxError).code,\n url: (err as RxError).url,\n /**\n * stack must be last to make it easier to read the json in a console.\n * Also we ensure that each linebreak is spaced so that the chrome devtools\n * shows urls to the source code that can be clicked to inspect\n * the correct place in the code.\n */\n stack: !err.stack ? undefined : err.stack.replace(/\\n/g, ' \\n ')\n };\n return ret;\n}\n"],"mappings":"AAKA,SAASA,OAAO,QAAQ,mBAAmB;;AAI3C;AACA;AACA;AACA;AACA;AACA,OAAO,SAASC,aAAaA,CACzBC,SAAiB,EACZ;EACL,IAAMC,QAAQ,GAAGD,SAAS,CAACE,KAAK,CAAC,GAAG,CAAC;EACrC,IAAIC,UAAU,GAAG,MAAM;EACvBF,QAAQ,CAACG,OAAO,CAACC,IAAI,IAAI;IACrBF,UAAU,IAAIL,OAAO,CAACO,IAAI,CAAC;EAC/B,CAAC,CAAC;EACFF,UAAU,IAAI,QAAQ;EACtB,OAAO,IAAIG,KAAK,iLAGGH,UAAU,8BAAyBH,SAAS,oCACzCG,UAAU,iBAEhC,CAAC;AACL;AAIA,OAAO,SAASI,gBAAgBA,CAACC,GAA8C,EAAkB;EAC7F,IAAMC,GAAmB,GAAG;IACxBC,IAAI,EAAEF,GAAG,CAACE,IAAI;IACdC,OAAO,EAAEH,GAAG,CAACG,OAAO;IACpBC,IAAI,EAAGJ,GAAG,CAASI,IAAI;IACvBC,UAAU,EAAGL,GAAG,CAAaK,UAAU;IACvCC,UAAU,EAAGN,GAAG,CAASM,UAAU;IACnCC,IAAI,EAAGP,GAAG,CAAaO,IAAI;IAC3BC,GAAG,EAAGR,GAAG,CAAaQ,GAAG;IACzB;AACR;AACA;AACA;AACA;AACA;IACQC,KAAK,EAAE,CAACT,GAAG,CAACS,KAAK,GAAGC,SAAS,GAAGV,GAAG,CAACS,KAAK,CAACE,OAAO,CAAC,KAAK,EAAE,MAAM;EACnE,CAAC;EACD,OAAOV,GAAG;AACd","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/utils/utils-global.js b/dist/esm/plugins/utils/utils-global.js deleted file mode 100644 index e1a6283b45d..00000000000 --- a/dist/esm/plugins/utils/utils-global.js +++ /dev/null @@ -1,7 +0,0 @@ -/** - * Can be used by some plugins to have a "global" object that - * can be imported and mutated at will. - */ -export var RXDB_UTILS_GLOBAL = {}; -export var PREMIUM_FLAG_HASH = '6da4936d1425ff3a5c44c02342c6daf791d266be3ae8479b8ec59e261df41b93'; -//# sourceMappingURL=utils-global.js.map \ No newline at end of file diff --git a/dist/esm/plugins/utils/utils-global.js.map b/dist/esm/plugins/utils/utils-global.js.map deleted file mode 100644 index 1324087f520..00000000000 --- a/dist/esm/plugins/utils/utils-global.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"utils-global.js","names":["RXDB_UTILS_GLOBAL","PREMIUM_FLAG_HASH"],"sources":["../../../../src/plugins/utils/utils-global.ts"],"sourcesContent":["/**\n * Can be used by some plugins to have a \"global\" object that\n * can be imported and mutated at will.\n */\nexport const RXDB_UTILS_GLOBAL: any = {};\n\n\nexport const PREMIUM_FLAG_HASH = '6da4936d1425ff3a5c44c02342c6daf791d266be3ae8479b8ec59e261df41b93';\n"],"mappings":"AAAA;AACA;AACA;AACA;AACA,OAAO,IAAMA,iBAAsB,GAAG,CAAC,CAAC;AAGxC,OAAO,IAAMC,iBAAiB,GAAG,kEAAkE","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/utils/utils-hash.js b/dist/esm/plugins/utils/utils-hash.js deleted file mode 100644 index 4ea8e8a4d49..00000000000 --- a/dist/esm/plugins/utils/utils-hash.js +++ /dev/null @@ -1,40 +0,0 @@ -import { sha256 } from 'ohash'; -/** - * TODO in the future we should no longer provide a - * fallback to crypto.subtle.digest. - * Instead users without crypto.subtle.digest support, should have to provide their own - * hash function. - */ -export function jsSha256(input) { - return Promise.resolve(sha256(input)); -} -export async function nativeSha256(input) { - var data = new TextEncoder().encode(input); - var hashBuffer = await crypto.subtle.digest('SHA-256', data); - /** - * @link https://jameshfisher.com/2017/10/30/web-cryptography-api-hello-world/ - */ - var hash = Array.prototype.map.call(new Uint8Array(hashBuffer), x => ('00' + x.toString(16)).slice(-2)).join(''); - return hash; -} -export var canUseCryptoSubtle = typeof crypto !== 'undefined' && typeof crypto.subtle !== 'undefined' && typeof crypto.subtle.digest === 'function'; - -/** - * Default hash method used to hash - * strings and do equal comparisons. - * - * IMPORTANT: Changing the default hashing method - * requires a BREAKING change! - */ - -export var defaultHashSha256 = canUseCryptoSubtle ? nativeSha256 : jsSha256; -export function hashStringToNumber(str) { - var nr = 0; - var len = str.length; - for (var i = 0; i < len; i++) { - nr = nr + str.charCodeAt(i); - nr |= 0; // Convert to 32bit integer, improves performance - } - return nr; -} -//# sourceMappingURL=utils-hash.js.map \ No newline at end of file diff --git a/dist/esm/plugins/utils/utils-hash.js.map b/dist/esm/plugins/utils/utils-hash.js.map deleted file mode 100644 index 2d4b380b66d..00000000000 --- a/dist/esm/plugins/utils/utils-hash.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"utils-hash.js","names":["sha256","jsSha256","input","Promise","resolve","nativeSha256","data","TextEncoder","encode","hashBuffer","crypto","subtle","digest","hash","Array","prototype","map","call","Uint8Array","x","toString","slice","join","canUseCryptoSubtle","defaultHashSha256","hashStringToNumber","str","nr","len","length","i","charCodeAt"],"sources":["../../../../src/plugins/utils/utils-hash.ts"],"sourcesContent":["import { sha256 } from 'ohash';\nimport type { HashFunction } from '../../types/index.d.ts';\n\n\n/**\n * TODO in the future we should no longer provide a\n * fallback to crypto.subtle.digest.\n * Instead users without crypto.subtle.digest support, should have to provide their own\n * hash function.\n */\nexport function jsSha256(input: string) {\n return Promise.resolve(sha256(input));\n}\n\nexport async function nativeSha256(input: string) {\n const data = new TextEncoder().encode(input);\n const hashBuffer = await crypto.subtle.digest('SHA-256', data);\n /**\n * @link https://jameshfisher.com/2017/10/30/web-cryptography-api-hello-world/\n */\n const hash = Array.prototype.map.call(\n new Uint8Array(hashBuffer),\n x => (('00' + x.toString(16)).slice(-2))\n ).join('');\n return hash;\n}\n\n\nexport const canUseCryptoSubtle = typeof crypto !== 'undefined' &&\n typeof crypto.subtle !== 'undefined' &&\n typeof crypto.subtle.digest === 'function';\n\n/**\n * Default hash method used to hash\n * strings and do equal comparisons.\n *\n * IMPORTANT: Changing the default hashing method\n * requires a BREAKING change!\n */\n\nexport const defaultHashSha256: HashFunction = canUseCryptoSubtle ? nativeSha256 : jsSha256;\n\n\nexport function hashStringToNumber(str: string): number {\n let nr = 0;\n const len = str.length;\n for (let i = 0; i < len; i++) {\n nr = nr + str.charCodeAt(i);\n nr |= 0; // Convert to 32bit integer, improves performance\n }\n return nr;\n}\n"],"mappings":"AAAA,SAASA,MAAM,QAAQ,OAAO;AAI9B;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,SAASC,QAAQA,CAACC,KAAa,EAAE;EACpC,OAAOC,OAAO,CAACC,OAAO,CAACJ,MAAM,CAACE,KAAK,CAAC,CAAC;AACzC;AAEA,OAAO,eAAeG,YAAYA,CAACH,KAAa,EAAE;EAC9C,IAAMI,IAAI,GAAG,IAAIC,WAAW,CAAC,CAAC,CAACC,MAAM,CAACN,KAAK,CAAC;EAC5C,IAAMO,UAAU,GAAG,MAAMC,MAAM,CAACC,MAAM,CAACC,MAAM,CAAC,SAAS,EAAEN,IAAI,CAAC;EAC9D;AACJ;AACA;EACI,IAAMO,IAAI,GAAGC,KAAK,CAACC,SAAS,CAACC,GAAG,CAACC,IAAI,CACjC,IAAIC,UAAU,CAACT,UAAU,CAAC,EAC1BU,CAAC,IAAK,CAAC,IAAI,GAAGA,CAAC,CAACC,QAAQ,CAAC,EAAE,CAAC,EAAEC,KAAK,CAAC,CAAC,CAAC,CAC1C,CAAC,CAACC,IAAI,CAAC,EAAE,CAAC;EACV,OAAOT,IAAI;AACf;AAGA,OAAO,IAAMU,kBAAkB,GAAG,OAAOb,MAAM,KAAK,WAAW,IAC3D,OAAOA,MAAM,CAACC,MAAM,KAAK,WAAW,IACpC,OAAOD,MAAM,CAACC,MAAM,CAACC,MAAM,KAAK,UAAU;;AAE9C;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA,OAAO,IAAMY,iBAA+B,GAAGD,kBAAkB,GAAGlB,YAAY,GAAGJ,QAAQ;AAG3F,OAAO,SAASwB,kBAAkBA,CAACC,GAAW,EAAU;EACpD,IAAIC,EAAE,GAAG,CAAC;EACV,IAAMC,GAAG,GAAGF,GAAG,CAACG,MAAM;EACtB,KAAK,IAAIC,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGF,GAAG,EAAEE,CAAC,EAAE,EAAE;IAC1BH,EAAE,GAAGA,EAAE,GAAGD,GAAG,CAACK,UAAU,CAACD,CAAC,CAAC;IAC3BH,EAAE,IAAI,CAAC,CAAC,CAAC;EACb;EACA,OAAOA,EAAE;AACb","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/utils/utils-map.js b/dist/esm/plugins/utils/utils-map.js deleted file mode 100644 index eb91f208e97..00000000000 --- a/dist/esm/plugins/utils/utils-map.js +++ /dev/null @@ -1,18 +0,0 @@ -export function getFromMapOrThrow(map, key) { - var val = map.get(key); - if (typeof val === 'undefined') { - throw new Error('missing value from map ' + key); - } - return val; -} -export function getFromMapOrCreate(map, index, creator, ifWasThere) { - var value = map.get(index); - if (typeof value === 'undefined') { - value = creator(); - map.set(index, value); - } else if (ifWasThere) { - ifWasThere(value); - } - return value; -} -//# sourceMappingURL=utils-map.js.map \ No newline at end of file diff --git a/dist/esm/plugins/utils/utils-map.js.map b/dist/esm/plugins/utils/utils-map.js.map deleted file mode 100644 index 3535bacb1d2..00000000000 --- a/dist/esm/plugins/utils/utils-map.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"utils-map.js","names":["getFromMapOrThrow","map","key","val","get","Error","getFromMapOrCreate","index","creator","ifWasThere","value","set"],"sources":["../../../../src/plugins/utils/utils-map.ts"],"sourcesContent":["\n\n\nexport function getFromMapOrThrow(map: Map | WeakMap, key: K): V {\n const val = map.get(key);\n if (typeof val === 'undefined') {\n throw new Error('missing value from map ' + key);\n }\n return val;\n}\n\nexport function getFromMapOrCreate(\n map: Map | WeakMap,\n index: MapIndex,\n creator: () => MapValue,\n ifWasThere?: (value: MapValue) => void\n): MapValue {\n let value = map.get(index);\n if (typeof value === 'undefined') {\n value = creator();\n map.set(index, value);\n } else if (ifWasThere) {\n ifWasThere(value);\n }\n return value;\n}\n"],"mappings":"AAGA,OAAO,SAASA,iBAAiBA,CAAOC,GAAgC,EAAEC,GAAM,EAAK;EACjF,IAAMC,GAAG,GAAGF,GAAG,CAACG,GAAG,CAACF,GAAG,CAAC;EACxB,IAAI,OAAOC,GAAG,KAAK,WAAW,EAAE;IAC5B,MAAM,IAAIE,KAAK,CAAC,yBAAyB,GAAGH,GAAG,CAAC;EACpD;EACA,OAAOC,GAAG;AACd;AAEA,OAAO,SAASG,kBAAkBA,CAC9BL,GAAqD,EACrDM,KAAe,EACfC,OAAuB,EACvBC,UAAsC,EAC9B;EACR,IAAIC,KAAK,GAAGT,GAAG,CAACG,GAAG,CAACG,KAAK,CAAC;EAC1B,IAAI,OAAOG,KAAK,KAAK,WAAW,EAAE;IAC9BA,KAAK,GAAGF,OAAO,CAAC,CAAC;IACjBP,GAAG,CAACU,GAAG,CAACJ,KAAK,EAAEG,KAAK,CAAC;EACzB,CAAC,MAAM,IAAID,UAAU,EAAE;IACnBA,UAAU,CAACC,KAAK,CAAC;EACrB;EACA,OAAOA,KAAK;AAChB","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/utils/utils-number.js b/dist/esm/plugins/utils/utils-number.js deleted file mode 100644 index 7e7178d4e83..00000000000 --- a/dist/esm/plugins/utils/utils-number.js +++ /dev/null @@ -1,2 +0,0 @@ - -//# sourceMappingURL=utils-number.js.map \ No newline at end of file diff --git a/dist/esm/plugins/utils/utils-number.js.map b/dist/esm/plugins/utils/utils-number.js.map deleted file mode 100644 index fcfcf02d8f5..00000000000 --- a/dist/esm/plugins/utils/utils-number.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"utils-number.js","names":[],"sources":["../../../../src/plugins/utils/utils-number.ts"],"sourcesContent":[""],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/utils/utils-object-deep-equal.js b/dist/esm/plugins/utils/utils-object-deep-equal.js deleted file mode 100644 index 0f27cc229c5..00000000000 --- a/dist/esm/plugins/utils/utils-object-deep-equal.js +++ /dev/null @@ -1,36 +0,0 @@ -/** - * Copied from the fast-deep-equal package - * because it does not support es modules and causes optimization bailouts. - * TODO use the npm package again when this is merged: - * @link https://github.com/epoberezkin/fast-deep-equal/pull/105 - */ -export function deepEqual(a, b) { - if (a === b) return true; - if (a && b && typeof a == 'object' && typeof b == 'object') { - if (a.constructor !== b.constructor) return false; - var length; - var i; - if (Array.isArray(a)) { - length = a.length; - if (length !== b.length) return false; - for (i = length; i-- !== 0;) if (!deepEqual(a[i], b[i])) return false; - return true; - } - if (a.constructor === RegExp) return a.source === b.source && a.flags === b.flags; - if (a.valueOf !== Object.prototype.valueOf) return a.valueOf() === b.valueOf(); - if (a.toString !== Object.prototype.toString) return a.toString() === b.toString(); - var keys = Object.keys(a); - length = keys.length; - if (length !== Object.keys(b).length) return false; - for (i = length; i-- !== 0;) if (!Object.prototype.hasOwnProperty.call(b, keys[i])) return false; - for (i = length; i-- !== 0;) { - var key = keys[i]; - if (!deepEqual(a[key], b[key])) return false; - } - return true; - } - - // true if both NaN, false otherwise - return a !== a && b !== b; -} -//# sourceMappingURL=utils-object-deep-equal.js.map \ No newline at end of file diff --git a/dist/esm/plugins/utils/utils-object-deep-equal.js.map b/dist/esm/plugins/utils/utils-object-deep-equal.js.map deleted file mode 100644 index 807bc3afa81..00000000000 --- a/dist/esm/plugins/utils/utils-object-deep-equal.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"utils-object-deep-equal.js","names":["deepEqual","a","b","constructor","length","i","Array","isArray","RegExp","source","flags","valueOf","Object","prototype","toString","keys","hasOwnProperty","call","key"],"sources":["../../../../src/plugins/utils/utils-object-deep-equal.ts"],"sourcesContent":["\n/**\n * Copied from the fast-deep-equal package\n * because it does not support es modules and causes optimization bailouts.\n * TODO use the npm package again when this is merged:\n * @link https://github.com/epoberezkin/fast-deep-equal/pull/105\n */\nexport function deepEqual(a: any, b: any): boolean {\n if (a === b) return true;\n\n if (a && b && typeof a == 'object' && typeof b == 'object') {\n if (a.constructor !== b.constructor) return false;\n\n let length;\n let i;\n if (Array.isArray(a)) {\n length = a.length;\n if (length !== b.length) return false;\n for (i = length; i-- !== 0;)\n if (!deepEqual(a[i], b[i])) return false;\n return true;\n }\n\n\n if (a.constructor === RegExp) return a.source === b.source && a.flags === b.flags;\n if (a.valueOf !== Object.prototype.valueOf) return a.valueOf() === b.valueOf();\n if (a.toString !== Object.prototype.toString) return a.toString() === b.toString();\n\n const keys = Object.keys(a);\n length = keys.length;\n if (length !== Object.keys(b).length) return false;\n\n for (i = length; i-- !== 0;)\n if (!Object.prototype.hasOwnProperty.call(b, keys[i])) return false;\n\n for (i = length; i-- !== 0;) {\n const key = keys[i];\n if (!deepEqual(a[key], b[key])) return false;\n }\n\n return true;\n }\n\n // true if both NaN, false otherwise\n return a !== a && b !== b;\n}\n"],"mappings":"AACA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,SAASA,SAASA,CAACC,CAAM,EAAEC,CAAM,EAAW;EAC/C,IAAID,CAAC,KAAKC,CAAC,EAAE,OAAO,IAAI;EAExB,IAAID,CAAC,IAAIC,CAAC,IAAI,OAAOD,CAAC,IAAI,QAAQ,IAAI,OAAOC,CAAC,IAAI,QAAQ,EAAE;IACxD,IAAID,CAAC,CAACE,WAAW,KAAKD,CAAC,CAACC,WAAW,EAAE,OAAO,KAAK;IAEjD,IAAIC,MAAM;IACV,IAAIC,CAAC;IACL,IAAIC,KAAK,CAACC,OAAO,CAACN,CAAC,CAAC,EAAE;MAClBG,MAAM,GAAGH,CAAC,CAACG,MAAM;MACjB,IAAIA,MAAM,KAAKF,CAAC,CAACE,MAAM,EAAE,OAAO,KAAK;MACrC,KAAKC,CAAC,GAAGD,MAAM,EAAEC,CAAC,EAAE,KAAK,CAAC,GACtB,IAAI,CAACL,SAAS,CAACC,CAAC,CAACI,CAAC,CAAC,EAAEH,CAAC,CAACG,CAAC,CAAC,CAAC,EAAE,OAAO,KAAK;MAC5C,OAAO,IAAI;IACf;IAGA,IAAIJ,CAAC,CAACE,WAAW,KAAKK,MAAM,EAAE,OAAOP,CAAC,CAACQ,MAAM,KAAKP,CAAC,CAACO,MAAM,IAAIR,CAAC,CAACS,KAAK,KAAKR,CAAC,CAACQ,KAAK;IACjF,IAAIT,CAAC,CAACU,OAAO,KAAKC,MAAM,CAACC,SAAS,CAACF,OAAO,EAAE,OAAOV,CAAC,CAACU,OAAO,CAAC,CAAC,KAAKT,CAAC,CAACS,OAAO,CAAC,CAAC;IAC9E,IAAIV,CAAC,CAACa,QAAQ,KAAKF,MAAM,CAACC,SAAS,CAACC,QAAQ,EAAE,OAAOb,CAAC,CAACa,QAAQ,CAAC,CAAC,KAAKZ,CAAC,CAACY,QAAQ,CAAC,CAAC;IAElF,IAAMC,IAAI,GAAGH,MAAM,CAACG,IAAI,CAACd,CAAC,CAAC;IAC3BG,MAAM,GAAGW,IAAI,CAACX,MAAM;IACpB,IAAIA,MAAM,KAAKQ,MAAM,CAACG,IAAI,CAACb,CAAC,CAAC,CAACE,MAAM,EAAE,OAAO,KAAK;IAElD,KAAKC,CAAC,GAAGD,MAAM,EAAEC,CAAC,EAAE,KAAK,CAAC,GACtB,IAAI,CAACO,MAAM,CAACC,SAAS,CAACG,cAAc,CAACC,IAAI,CAACf,CAAC,EAAEa,IAAI,CAACV,CAAC,CAAC,CAAC,EAAE,OAAO,KAAK;IAEvE,KAAKA,CAAC,GAAGD,MAAM,EAAEC,CAAC,EAAE,KAAK,CAAC,GAAG;MACzB,IAAMa,GAAG,GAAGH,IAAI,CAACV,CAAC,CAAC;MACnB,IAAI,CAACL,SAAS,CAACC,CAAC,CAACiB,GAAG,CAAC,EAAEhB,CAAC,CAACgB,GAAG,CAAC,CAAC,EAAE,OAAO,KAAK;IAChD;IAEA,OAAO,IAAI;EACf;;EAEA;EACA,OAAOjB,CAAC,KAAKA,CAAC,IAAIC,CAAC,KAAKA,CAAC;AAC7B","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/utils/utils-object-dot-prop.js b/dist/esm/plugins/utils/utils-object-dot-prop.js deleted file mode 100644 index 8da7a61e19b..00000000000 --- a/dist/esm/plugins/utils/utils-object-dot-prop.js +++ /dev/null @@ -1,296 +0,0 @@ -/** - * Copied from - * @link https://github.com/sindresorhus/dot-prop/blob/main/index.js - * because it is currently an esm only module. - * TODO use the npm package again when RxDB is also fully esm. - */ - -var isObject = value => { - var type = typeof value; - return value !== null && (type === 'object' || type === 'function'); -}; -var disallowedKeys = new Set(['__proto__', 'prototype', 'constructor']); -var digits = new Set('0123456789'); -function getPathSegments(path) { - var parts = []; - var currentSegment = ''; - var currentPart = 'start'; - var isIgnoring = false; - for (var character of path) { - switch (character) { - case '\\': - { - if (currentPart === 'index') { - throw new Error('Invalid character in an index'); - } - if (currentPart === 'indexEnd') { - throw new Error('Invalid character after an index'); - } - if (isIgnoring) { - currentSegment += character; - } - currentPart = 'property'; - isIgnoring = !isIgnoring; - break; - } - case '.': - { - if (currentPart === 'index') { - throw new Error('Invalid character in an index'); - } - if (currentPart === 'indexEnd') { - currentPart = 'property'; - break; - } - if (isIgnoring) { - isIgnoring = false; - currentSegment += character; - break; - } - if (disallowedKeys.has(currentSegment)) { - return []; - } - parts.push(currentSegment); - currentSegment = ''; - currentPart = 'property'; - break; - } - case '[': - { - if (currentPart === 'index') { - throw new Error('Invalid character in an index'); - } - if (currentPart === 'indexEnd') { - currentPart = 'index'; - break; - } - if (isIgnoring) { - isIgnoring = false; - currentSegment += character; - break; - } - if (currentPart === 'property') { - if (disallowedKeys.has(currentSegment)) { - return []; - } - parts.push(currentSegment); - currentSegment = ''; - } - currentPart = 'index'; - break; - } - case ']': - { - if (currentPart === 'index') { - parts.push(Number.parseInt(currentSegment, 10)); - currentSegment = ''; - currentPart = 'indexEnd'; - break; - } - if (currentPart === 'indexEnd') { - throw new Error('Invalid character after an index'); - } - - // Falls through - } - default: - { - if (currentPart === 'index' && !digits.has(character)) { - throw new Error('Invalid character in an index'); - } - if (currentPart === 'indexEnd') { - throw new Error('Invalid character after an index'); - } - if (currentPart === 'start') { - currentPart = 'property'; - } - if (isIgnoring) { - isIgnoring = false; - currentSegment += '\\'; - } - currentSegment += character; - } - } - } - if (isIgnoring) { - currentSegment += '\\'; - } - switch (currentPart) { - case 'property': - { - if (disallowedKeys.has(currentSegment)) { - return []; - } - parts.push(currentSegment); - break; - } - case 'index': - { - throw new Error('Index was not closed'); - } - case 'start': - { - parts.push(''); - break; - } - // No default - } - return parts; -} -function isStringIndex(object, key) { - if (typeof key !== 'number' && Array.isArray(object)) { - var index = Number.parseInt(key, 10); - return Number.isInteger(index) && object[index] === object[key]; - } - return false; -} -function assertNotStringIndex(object, key) { - if (isStringIndex(object, key)) { - throw new Error('Cannot use string index'); - } -} - -/** - * TODO we need some performance tests and improvements here. - */ -export function getProperty(object, path, value) { - if (Array.isArray(path)) { - path = path.join('.'); - } - - /** - * Performance shortcut. - * In most cases we just have a simple property name - * so we can directly return it. - */ - if (!path.includes('.') && !path.includes('[')) { - return object[path]; - } - if (!isObject(object) || typeof path !== 'string') { - return value === undefined ? object : value; - } - var pathArray = getPathSegments(path); - if (pathArray.length === 0) { - return value; - } - for (var index = 0; index < pathArray.length; index++) { - var key = pathArray[index]; - if (isStringIndex(object, key)) { - object = index === pathArray.length - 1 ? undefined : null; - } else { - object = object[key]; - } - if (object === undefined || object === null) { - // `object` is either `undefined` or `null` so we want to stop the loop, and - // if this is not the last bit of the path, and - // if it didn't return `undefined` - // it would return `null` if `object` is `null` - // but we want `get({foo: null}, 'foo.bar')` to equal `undefined`, or the supplied value, not `null` - if (index !== pathArray.length - 1) { - return value; - } - break; - } - } - return object === undefined ? value : object; -} -export function setProperty(object, path, value) { - if (Array.isArray(path)) { - path = path.join('.'); - } - if (!isObject(object) || typeof path !== 'string') { - return object; - } - var root = object; - var pathArray = getPathSegments(path); - for (var index = 0; index < pathArray.length; index++) { - var key = pathArray[index]; - assertNotStringIndex(object, key); - if (index === pathArray.length - 1) { - object[key] = value; - } else if (!isObject(object[key])) { - object[key] = typeof pathArray[index + 1] === 'number' ? [] : {}; - } - object = object[key]; - } - return root; -} -export function deleteProperty(object, path) { - if (!isObject(object) || typeof path !== 'string') { - return false; - } - var pathArray = getPathSegments(path); - for (var index = 0; index < pathArray.length; index++) { - var key = pathArray[index]; - assertNotStringIndex(object, key); - if (index === pathArray.length - 1) { - delete object[key]; - return true; - } - object = object[key]; - if (!isObject(object)) { - return false; - } - } -} -export function hasProperty(object, path) { - if (!isObject(object) || typeof path !== 'string') { - return false; - } - var pathArray = getPathSegments(path); - if (pathArray.length === 0) { - return false; - } - for (var key of pathArray) { - if (!isObject(object) || !(key in object) || isStringIndex(object, key)) { - return false; - } - object = object[key]; - } - return true; -} - -// TODO: Backslashes with no effect should not be escaped -function escapePath(path) { - if (typeof path !== 'string') { - throw new TypeError('Expected a string'); - } - return path.replace(/[\\.[]/g, '\\$&'); -} - -// The keys returned by Object.entries() for arrays are strings -function entries(value) { - if (Array.isArray(value)) { - return value.map((v, index) => [index, v]); - } - return Object.entries(value); -} -function stringifyPath(pathSegments) { - var result = ''; - - // eslint-disable-next-line prefer-const - for (var [index, segment] of entries(pathSegments)) { - if (typeof segment === 'number') { - result += "[" + segment + "]"; - } else { - segment = escapePath(segment); - result += index === 0 ? segment : "." + segment; - } - } - return result; -} -function* deepKeysIterator(object, currentPath = []) { - if (!isObject(object)) { - if (currentPath.length > 0) { - yield stringifyPath(currentPath); - } - return; - } - for (var [key, value] of entries(object)) { - yield* deepKeysIterator(value, [...currentPath, key]); - } -} -export function deepKeys(object) { - return [...deepKeysIterator(object)]; -} -//# sourceMappingURL=utils-object-dot-prop.js.map \ No newline at end of file diff --git a/dist/esm/plugins/utils/utils-object-dot-prop.js.map b/dist/esm/plugins/utils/utils-object-dot-prop.js.map deleted file mode 100644 index db524a9dd68..00000000000 --- a/dist/esm/plugins/utils/utils-object-dot-prop.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"utils-object-dot-prop.js","names":["isObject","value","type","disallowedKeys","Set","digits","getPathSegments","path","parts","currentSegment","currentPart","isIgnoring","character","Error","has","push","Number","parseInt","isStringIndex","object","key","Array","isArray","index","isInteger","assertNotStringIndex","getProperty","join","includes","undefined","pathArray","length","setProperty","root","deleteProperty","hasProperty","escapePath","TypeError","replace","entries","map","v","Object","stringifyPath","pathSegments","result","segment","deepKeysIterator","currentPath","deepKeys"],"sources":["../../../../src/plugins/utils/utils-object-dot-prop.ts"],"sourcesContent":["/**\n * Copied from\n * @link https://github.com/sindresorhus/dot-prop/blob/main/index.js\n * because it is currently an esm only module.\n * TODO use the npm package again when RxDB is also fully esm.\n */\n\nconst isObject = (value: null) => {\n const type = typeof value;\n return value !== null && (type === 'object' || type === 'function');\n};\n\nconst disallowedKeys = new Set([\n '__proto__',\n 'prototype',\n 'constructor',\n]);\n\nconst digits = new Set('0123456789');\n\nfunction getPathSegments(path: string) {\n const parts = [];\n let currentSegment = '';\n let currentPart = 'start';\n let isIgnoring = false;\n\n for (const character of path) {\n switch (character) {\n case '\\\\': {\n if (currentPart === 'index') {\n throw new Error('Invalid character in an index');\n }\n\n if (currentPart === 'indexEnd') {\n throw new Error('Invalid character after an index');\n }\n\n if (isIgnoring) {\n currentSegment += character;\n }\n\n currentPart = 'property';\n isIgnoring = !isIgnoring;\n break;\n }\n\n case '.': {\n if (currentPart === 'index') {\n throw new Error('Invalid character in an index');\n }\n\n if (currentPart === 'indexEnd') {\n currentPart = 'property';\n break;\n }\n\n if (isIgnoring) {\n isIgnoring = false;\n currentSegment += character;\n break;\n }\n\n if (disallowedKeys.has(currentSegment)) {\n return [];\n }\n\n parts.push(currentSegment);\n currentSegment = '';\n currentPart = 'property';\n break;\n }\n\n case '[': {\n if (currentPart === 'index') {\n throw new Error('Invalid character in an index');\n }\n\n if (currentPart === 'indexEnd') {\n currentPart = 'index';\n break;\n }\n\n if (isIgnoring) {\n isIgnoring = false;\n currentSegment += character;\n break;\n }\n\n if (currentPart === 'property') {\n if (disallowedKeys.has(currentSegment)) {\n return [];\n }\n\n parts.push(currentSegment);\n currentSegment = '';\n }\n\n currentPart = 'index';\n break;\n }\n\n case ']': {\n if (currentPart === 'index') {\n parts.push(Number.parseInt(currentSegment, 10));\n currentSegment = '';\n currentPart = 'indexEnd';\n break;\n }\n\n if (currentPart === 'indexEnd') {\n throw new Error('Invalid character after an index');\n }\n\n // Falls through\n }\n\n default: {\n if (currentPart === 'index' && !digits.has(character)) {\n throw new Error('Invalid character in an index');\n }\n\n if (currentPart === 'indexEnd') {\n throw new Error('Invalid character after an index');\n }\n\n if (currentPart === 'start') {\n currentPart = 'property';\n }\n\n if (isIgnoring) {\n isIgnoring = false;\n currentSegment += '\\\\';\n }\n\n currentSegment += character;\n }\n }\n }\n\n if (isIgnoring) {\n currentSegment += '\\\\';\n }\n\n switch (currentPart) {\n case 'property': {\n if (disallowedKeys.has(currentSegment)) {\n return [];\n }\n\n parts.push(currentSegment);\n\n break;\n }\n\n case 'index': {\n throw new Error('Index was not closed');\n }\n\n case 'start': {\n parts.push('');\n\n break;\n }\n // No default\n }\n\n return parts;\n}\n\nfunction isStringIndex(object: any[], key: string) {\n if (typeof key !== 'number' && Array.isArray(object)) {\n const index = Number.parseInt(key, 10);\n return Number.isInteger(index) && object[index] === object[key as any];\n }\n\n return false;\n}\n\nfunction assertNotStringIndex(object: any, key: string | number) {\n if (isStringIndex(object, key as any)) {\n throw new Error('Cannot use string index');\n }\n}\n\n/**\n * TODO we need some performance tests and improvements here.\n */\nexport function getProperty(object: any, path: string | string[], value?: any) {\n if (Array.isArray(path)) {\n path = path.join('.');\n }\n\n /**\n * Performance shortcut.\n * In most cases we just have a simple property name\n * so we can directly return it.\n */\n if (\n !path.includes('.') &&\n !path.includes('[')\n ) {\n return object[path];\n }\n\n if (!isObject(object as any) || typeof path !== 'string') {\n return value === undefined ? object : value;\n }\n\n const pathArray = getPathSegments(path);\n if (pathArray.length === 0) {\n return value;\n }\n\n for (let index = 0; index < pathArray.length; index++) {\n const key = pathArray[index];\n\n if (isStringIndex(object as any, key as any)) {\n object = index === pathArray.length - 1 ? undefined : null;\n } else {\n object = (object as any)[key];\n }\n\n if (object === undefined || object === null) {\n // `object` is either `undefined` or `null` so we want to stop the loop, and\n // if this is not the last bit of the path, and\n // if it didn't return `undefined`\n // it would return `null` if `object` is `null`\n // but we want `get({foo: null}, 'foo.bar')` to equal `undefined`, or the supplied value, not `null`\n if (index !== pathArray.length - 1) {\n return value;\n }\n\n break;\n }\n }\n\n return object === undefined ? value : object;\n}\n\nexport function setProperty(object: any, path: string, value: any) {\n if (Array.isArray(path)) {\n path = path.join('.');\n }\n\n if (!isObject(object as any) || typeof path !== 'string') {\n return object;\n }\n\n const root = object;\n const pathArray = getPathSegments(path);\n\n for (let index = 0; index < pathArray.length; index++) {\n const key = pathArray[index];\n\n assertNotStringIndex(object, key);\n\n if (index === pathArray.length - 1) {\n object[key] = value;\n } else if (!isObject(object[key])) {\n object[key] = typeof pathArray[index + 1] === 'number' ? [] : {};\n }\n\n object = object[key];\n }\n\n return root;\n}\n\nexport function deleteProperty(object: any, path: string) {\n if (!isObject(object as any) || typeof path !== 'string') {\n return false;\n }\n\n const pathArray = getPathSegments(path);\n\n for (let index = 0; index < pathArray.length; index++) {\n const key = pathArray[index];\n\n assertNotStringIndex(object, key);\n\n if (index === pathArray.length - 1) {\n delete object[key];\n return true;\n }\n\n object = object[key];\n\n if (!isObject(object as any)) {\n return false;\n }\n }\n}\n\nexport function hasProperty(object: any, path: string) {\n if (!isObject(object) || typeof path !== 'string') {\n return false;\n }\n\n const pathArray = getPathSegments(path);\n if (pathArray.length === 0) {\n return false;\n }\n\n for (const key of pathArray) {\n if (!isObject(object) || !(key in object) || isStringIndex(object, key as any)) {\n return false;\n }\n\n object = object[key];\n }\n\n return true;\n}\n\n// TODO: Backslashes with no effect should not be escaped\nfunction escapePath(path: string) {\n if (typeof path !== 'string') {\n throw new TypeError('Expected a string');\n }\n\n return path.replace(/[\\\\.[]/g, '\\\\$&');\n}\n\n// The keys returned by Object.entries() for arrays are strings\nfunction entries(value: any) {\n if (Array.isArray(value)) {\n return value.map((v, index) => [index, v]);\n }\n\n return Object.entries(value);\n}\n\nfunction stringifyPath(pathSegments: never[]) {\n let result = '';\n\n // eslint-disable-next-line prefer-const\n for (let [index, segment] of entries(pathSegments)) {\n if (typeof segment === 'number') {\n result += `[${segment}]`;\n } else {\n segment = escapePath(segment);\n result += index === 0 ? segment : `.${segment}`;\n }\n }\n\n return result;\n}\n\nfunction* deepKeysIterator(object: any, currentPath = []): any {\n if (!isObject(object)) {\n if (currentPath.length > 0) {\n yield stringifyPath(currentPath);\n }\n\n return;\n }\n\n for (const [key, value] of entries(object)) {\n yield* deepKeysIterator(value, [...currentPath, key] as any);\n }\n}\n\nexport function deepKeys(object: any) {\n return [...deepKeysIterator(object)];\n}\n"],"mappings":"AAAA;AACA;AACA;AACA;AACA;AACA;;AAEA,IAAMA,QAAQ,GAAIC,KAAW,IAAK;EAC9B,IAAMC,IAAI,GAAG,OAAOD,KAAK;EACzB,OAAOA,KAAK,KAAK,IAAI,KAAKC,IAAI,KAAK,QAAQ,IAAIA,IAAI,KAAK,UAAU,CAAC;AACvE,CAAC;AAED,IAAMC,cAAc,GAAG,IAAIC,GAAG,CAAC,CAC3B,WAAW,EACX,WAAW,EACX,aAAa,CAChB,CAAC;AAEF,IAAMC,MAAM,GAAG,IAAID,GAAG,CAAC,YAAY,CAAC;AAEpC,SAASE,eAAeA,CAACC,IAAY,EAAE;EACnC,IAAMC,KAAK,GAAG,EAAE;EAChB,IAAIC,cAAc,GAAG,EAAE;EACvB,IAAIC,WAAW,GAAG,OAAO;EACzB,IAAIC,UAAU,GAAG,KAAK;EAEtB,KAAK,IAAMC,SAAS,IAAIL,IAAI,EAAE;IAC1B,QAAQK,SAAS;MACb,KAAK,IAAI;QAAE;UACP,IAAIF,WAAW,KAAK,OAAO,EAAE;YACzB,MAAM,IAAIG,KAAK,CAAC,+BAA+B,CAAC;UACpD;UAEA,IAAIH,WAAW,KAAK,UAAU,EAAE;YAC5B,MAAM,IAAIG,KAAK,CAAC,kCAAkC,CAAC;UACvD;UAEA,IAAIF,UAAU,EAAE;YACZF,cAAc,IAAIG,SAAS;UAC/B;UAEAF,WAAW,GAAG,UAAU;UACxBC,UAAU,GAAG,CAACA,UAAU;UACxB;QACJ;MAEA,KAAK,GAAG;QAAE;UACN,IAAID,WAAW,KAAK,OAAO,EAAE;YACzB,MAAM,IAAIG,KAAK,CAAC,+BAA+B,CAAC;UACpD;UAEA,IAAIH,WAAW,KAAK,UAAU,EAAE;YAC5BA,WAAW,GAAG,UAAU;YACxB;UACJ;UAEA,IAAIC,UAAU,EAAE;YACZA,UAAU,GAAG,KAAK;YAClBF,cAAc,IAAIG,SAAS;YAC3B;UACJ;UAEA,IAAIT,cAAc,CAACW,GAAG,CAACL,cAAc,CAAC,EAAE;YACpC,OAAO,EAAE;UACb;UAEAD,KAAK,CAACO,IAAI,CAACN,cAAc,CAAC;UAC1BA,cAAc,GAAG,EAAE;UACnBC,WAAW,GAAG,UAAU;UACxB;QACJ;MAEA,KAAK,GAAG;QAAE;UACN,IAAIA,WAAW,KAAK,OAAO,EAAE;YACzB,MAAM,IAAIG,KAAK,CAAC,+BAA+B,CAAC;UACpD;UAEA,IAAIH,WAAW,KAAK,UAAU,EAAE;YAC5BA,WAAW,GAAG,OAAO;YACrB;UACJ;UAEA,IAAIC,UAAU,EAAE;YACZA,UAAU,GAAG,KAAK;YAClBF,cAAc,IAAIG,SAAS;YAC3B;UACJ;UAEA,IAAIF,WAAW,KAAK,UAAU,EAAE;YAC5B,IAAIP,cAAc,CAACW,GAAG,CAACL,cAAc,CAAC,EAAE;cACpC,OAAO,EAAE;YACb;YAEAD,KAAK,CAACO,IAAI,CAACN,cAAc,CAAC;YAC1BA,cAAc,GAAG,EAAE;UACvB;UAEAC,WAAW,GAAG,OAAO;UACrB;QACJ;MAEA,KAAK,GAAG;QAAE;UACN,IAAIA,WAAW,KAAK,OAAO,EAAE;YACzBF,KAAK,CAACO,IAAI,CAACC,MAAM,CAACC,QAAQ,CAACR,cAAc,EAAE,EAAE,CAAC,CAAC;YAC/CA,cAAc,GAAG,EAAE;YACnBC,WAAW,GAAG,UAAU;YACxB;UACJ;UAEA,IAAIA,WAAW,KAAK,UAAU,EAAE;YAC5B,MAAM,IAAIG,KAAK,CAAC,kCAAkC,CAAC;UACvD;;UAEA;QACJ;MAEA;QAAS;UACL,IAAIH,WAAW,KAAK,OAAO,IAAI,CAACL,MAAM,CAACS,GAAG,CAACF,SAAS,CAAC,EAAE;YACnD,MAAM,IAAIC,KAAK,CAAC,+BAA+B,CAAC;UACpD;UAEA,IAAIH,WAAW,KAAK,UAAU,EAAE;YAC5B,MAAM,IAAIG,KAAK,CAAC,kCAAkC,CAAC;UACvD;UAEA,IAAIH,WAAW,KAAK,OAAO,EAAE;YACzBA,WAAW,GAAG,UAAU;UAC5B;UAEA,IAAIC,UAAU,EAAE;YACZA,UAAU,GAAG,KAAK;YAClBF,cAAc,IAAI,IAAI;UAC1B;UAEAA,cAAc,IAAIG,SAAS;QAC/B;IACJ;EACJ;EAEA,IAAID,UAAU,EAAE;IACZF,cAAc,IAAI,IAAI;EAC1B;EAEA,QAAQC,WAAW;IACf,KAAK,UAAU;MAAE;QACb,IAAIP,cAAc,CAACW,GAAG,CAACL,cAAc,CAAC,EAAE;UACpC,OAAO,EAAE;QACb;QAEAD,KAAK,CAACO,IAAI,CAACN,cAAc,CAAC;QAE1B;MACJ;IAEA,KAAK,OAAO;MAAE;QACV,MAAM,IAAII,KAAK,CAAC,sBAAsB,CAAC;MAC3C;IAEA,KAAK,OAAO;MAAE;QACVL,KAAK,CAACO,IAAI,CAAC,EAAE,CAAC;QAEd;MACJ;IACA;EACJ;EAEA,OAAOP,KAAK;AAChB;AAEA,SAASU,aAAaA,CAACC,MAAa,EAAEC,GAAW,EAAE;EAC/C,IAAI,OAAOA,GAAG,KAAK,QAAQ,IAAIC,KAAK,CAACC,OAAO,CAACH,MAAM,CAAC,EAAE;IAClD,IAAMI,KAAK,GAAGP,MAAM,CAACC,QAAQ,CAACG,GAAG,EAAE,EAAE,CAAC;IACtC,OAAOJ,MAAM,CAACQ,SAAS,CAACD,KAAK,CAAC,IAAIJ,MAAM,CAACI,KAAK,CAAC,KAAKJ,MAAM,CAACC,GAAG,CAAQ;EAC1E;EAEA,OAAO,KAAK;AAChB;AAEA,SAASK,oBAAoBA,CAACN,MAAW,EAAEC,GAAoB,EAAE;EAC7D,IAAIF,aAAa,CAACC,MAAM,EAAEC,GAAU,CAAC,EAAE;IACnC,MAAM,IAAIP,KAAK,CAAC,yBAAyB,CAAC;EAC9C;AACJ;;AAEA;AACA;AACA;AACA,OAAO,SAASa,WAAWA,CAACP,MAAW,EAAEZ,IAAuB,EAAEN,KAAW,EAAE;EAC3E,IAAIoB,KAAK,CAACC,OAAO,CAACf,IAAI,CAAC,EAAE;IACrBA,IAAI,GAAGA,IAAI,CAACoB,IAAI,CAAC,GAAG,CAAC;EACzB;;EAEA;AACJ;AACA;AACA;AACA;EACI,IACI,CAACpB,IAAI,CAACqB,QAAQ,CAAC,GAAG,CAAC,IACnB,CAACrB,IAAI,CAACqB,QAAQ,CAAC,GAAG,CAAC,EACrB;IACE,OAAOT,MAAM,CAACZ,IAAI,CAAC;EACvB;EAEA,IAAI,CAACP,QAAQ,CAACmB,MAAa,CAAC,IAAI,OAAOZ,IAAI,KAAK,QAAQ,EAAE;IACtD,OAAON,KAAK,KAAK4B,SAAS,GAAGV,MAAM,GAAGlB,KAAK;EAC/C;EAEA,IAAM6B,SAAS,GAAGxB,eAAe,CAACC,IAAI,CAAC;EACvC,IAAIuB,SAAS,CAACC,MAAM,KAAK,CAAC,EAAE;IACxB,OAAO9B,KAAK;EAChB;EAEA,KAAK,IAAIsB,KAAK,GAAG,CAAC,EAAEA,KAAK,GAAGO,SAAS,CAACC,MAAM,EAAER,KAAK,EAAE,EAAE;IACnD,IAAMH,GAAG,GAAGU,SAAS,CAACP,KAAK,CAAC;IAE5B,IAAIL,aAAa,CAACC,MAAM,EAASC,GAAU,CAAC,EAAE;MAC1CD,MAAM,GAAGI,KAAK,KAAKO,SAAS,CAACC,MAAM,GAAG,CAAC,GAAGF,SAAS,GAAG,IAAI;IAC9D,CAAC,MAAM;MACHV,MAAM,GAAIA,MAAM,CAASC,GAAG,CAAC;IACjC;IAEA,IAAID,MAAM,KAAKU,SAAS,IAAIV,MAAM,KAAK,IAAI,EAAE;MACzC;MACA;MACA;MACA;MACA;MACA,IAAII,KAAK,KAAKO,SAAS,CAACC,MAAM,GAAG,CAAC,EAAE;QAChC,OAAO9B,KAAK;MAChB;MAEA;IACJ;EACJ;EAEA,OAAOkB,MAAM,KAAKU,SAAS,GAAG5B,KAAK,GAAGkB,MAAM;AAChD;AAEA,OAAO,SAASa,WAAWA,CAACb,MAAW,EAAEZ,IAAY,EAAEN,KAAU,EAAE;EAC/D,IAAIoB,KAAK,CAACC,OAAO,CAACf,IAAI,CAAC,EAAE;IACrBA,IAAI,GAAGA,IAAI,CAACoB,IAAI,CAAC,GAAG,CAAC;EACzB;EAEA,IAAI,CAAC3B,QAAQ,CAACmB,MAAa,CAAC,IAAI,OAAOZ,IAAI,KAAK,QAAQ,EAAE;IACtD,OAAOY,MAAM;EACjB;EAEA,IAAMc,IAAI,GAAGd,MAAM;EACnB,IAAMW,SAAS,GAAGxB,eAAe,CAACC,IAAI,CAAC;EAEvC,KAAK,IAAIgB,KAAK,GAAG,CAAC,EAAEA,KAAK,GAAGO,SAAS,CAACC,MAAM,EAAER,KAAK,EAAE,EAAE;IACnD,IAAMH,GAAG,GAAGU,SAAS,CAACP,KAAK,CAAC;IAE5BE,oBAAoB,CAACN,MAAM,EAAEC,GAAG,CAAC;IAEjC,IAAIG,KAAK,KAAKO,SAAS,CAACC,MAAM,GAAG,CAAC,EAAE;MAChCZ,MAAM,CAACC,GAAG,CAAC,GAAGnB,KAAK;IACvB,CAAC,MAAM,IAAI,CAACD,QAAQ,CAACmB,MAAM,CAACC,GAAG,CAAC,CAAC,EAAE;MAC/BD,MAAM,CAACC,GAAG,CAAC,GAAG,OAAOU,SAAS,CAACP,KAAK,GAAG,CAAC,CAAC,KAAK,QAAQ,GAAG,EAAE,GAAG,CAAC,CAAC;IACpE;IAEAJ,MAAM,GAAGA,MAAM,CAACC,GAAG,CAAC;EACxB;EAEA,OAAOa,IAAI;AACf;AAEA,OAAO,SAASC,cAAcA,CAACf,MAAW,EAAEZ,IAAY,EAAE;EACtD,IAAI,CAACP,QAAQ,CAACmB,MAAa,CAAC,IAAI,OAAOZ,IAAI,KAAK,QAAQ,EAAE;IACtD,OAAO,KAAK;EAChB;EAEA,IAAMuB,SAAS,GAAGxB,eAAe,CAACC,IAAI,CAAC;EAEvC,KAAK,IAAIgB,KAAK,GAAG,CAAC,EAAEA,KAAK,GAAGO,SAAS,CAACC,MAAM,EAAER,KAAK,EAAE,EAAE;IACnD,IAAMH,GAAG,GAAGU,SAAS,CAACP,KAAK,CAAC;IAE5BE,oBAAoB,CAACN,MAAM,EAAEC,GAAG,CAAC;IAEjC,IAAIG,KAAK,KAAKO,SAAS,CAACC,MAAM,GAAG,CAAC,EAAE;MAChC,OAAOZ,MAAM,CAACC,GAAG,CAAC;MAClB,OAAO,IAAI;IACf;IAEAD,MAAM,GAAGA,MAAM,CAACC,GAAG,CAAC;IAEpB,IAAI,CAACpB,QAAQ,CAACmB,MAAa,CAAC,EAAE;MAC1B,OAAO,KAAK;IAChB;EACJ;AACJ;AAEA,OAAO,SAASgB,WAAWA,CAAChB,MAAW,EAAEZ,IAAY,EAAE;EACnD,IAAI,CAACP,QAAQ,CAACmB,MAAM,CAAC,IAAI,OAAOZ,IAAI,KAAK,QAAQ,EAAE;IAC/C,OAAO,KAAK;EAChB;EAEA,IAAMuB,SAAS,GAAGxB,eAAe,CAACC,IAAI,CAAC;EACvC,IAAIuB,SAAS,CAACC,MAAM,KAAK,CAAC,EAAE;IACxB,OAAO,KAAK;EAChB;EAEA,KAAK,IAAMX,GAAG,IAAIU,SAAS,EAAE;IACzB,IAAI,CAAC9B,QAAQ,CAACmB,MAAM,CAAC,IAAI,EAAEC,GAAG,IAAID,MAAM,CAAC,IAAID,aAAa,CAACC,MAAM,EAAEC,GAAU,CAAC,EAAE;MAC5E,OAAO,KAAK;IAChB;IAEAD,MAAM,GAAGA,MAAM,CAACC,GAAG,CAAC;EACxB;EAEA,OAAO,IAAI;AACf;;AAEA;AACA,SAASgB,UAAUA,CAAC7B,IAAY,EAAE;EAC9B,IAAI,OAAOA,IAAI,KAAK,QAAQ,EAAE;IAC1B,MAAM,IAAI8B,SAAS,CAAC,mBAAmB,CAAC;EAC5C;EAEA,OAAO9B,IAAI,CAAC+B,OAAO,CAAC,SAAS,EAAE,MAAM,CAAC;AAC1C;;AAEA;AACA,SAASC,OAAOA,CAACtC,KAAU,EAAE;EACzB,IAAIoB,KAAK,CAACC,OAAO,CAACrB,KAAK,CAAC,EAAE;IACtB,OAAOA,KAAK,CAACuC,GAAG,CAAC,CAACC,CAAC,EAAElB,KAAK,KAAK,CAACA,KAAK,EAAEkB,CAAC,CAAC,CAAC;EAC9C;EAEA,OAAOC,MAAM,CAACH,OAAO,CAACtC,KAAK,CAAC;AAChC;AAEA,SAAS0C,aAAaA,CAACC,YAAqB,EAAE;EAC1C,IAAIC,MAAM,GAAG,EAAE;;EAEf;EACA,KAAK,IAAI,CAACtB,KAAK,EAAEuB,OAAO,CAAC,IAAIP,OAAO,CAACK,YAAY,CAAC,EAAE;IAChD,IAAI,OAAOE,OAAO,KAAK,QAAQ,EAAE;MAC7BD,MAAM,UAAQC,OAAO,MAAG;IAC5B,CAAC,MAAM;MACHA,OAAO,GAAGV,UAAU,CAACU,OAAO,CAAC;MAC7BD,MAAM,IAAItB,KAAK,KAAK,CAAC,GAAGuB,OAAO,SAAOA,OAAS;IACnD;EACJ;EAEA,OAAOD,MAAM;AACjB;AAEA,UAAUE,gBAAgBA,CAAC5B,MAAW,EAAE6B,WAAW,GAAG,EAAE,EAAO;EAC3D,IAAI,CAAChD,QAAQ,CAACmB,MAAM,CAAC,EAAE;IACnB,IAAI6B,WAAW,CAACjB,MAAM,GAAG,CAAC,EAAE;MACxB,MAAMY,aAAa,CAACK,WAAW,CAAC;IACpC;IAEA;EACJ;EAEA,KAAK,IAAM,CAAC5B,GAAG,EAAEnB,KAAK,CAAC,IAAIsC,OAAO,CAACpB,MAAM,CAAC,EAAE;IACxC,OAAO4B,gBAAgB,CAAC9C,KAAK,EAAE,CAAC,GAAG+C,WAAW,EAAE5B,GAAG,CAAQ,CAAC;EAChE;AACJ;AAEA,OAAO,SAAS6B,QAAQA,CAAC9B,MAAW,EAAE;EAClC,OAAO,CAAC,GAAG4B,gBAAgB,CAAC5B,MAAM,CAAC,CAAC;AACxC","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/utils/utils-object.js b/dist/esm/plugins/utils/utils-object.js deleted file mode 100644 index 87842a8ff1e..00000000000 --- a/dist/esm/plugins/utils/utils-object.js +++ /dev/null @@ -1,189 +0,0 @@ -export function deepFreeze(o) { - Object.freeze(o); - Object.getOwnPropertyNames(o).forEach(function (prop) { - if (Object.prototype.hasOwnProperty.call(o, prop) && o[prop] !== null && (typeof o[prop] === 'object' || typeof o[prop] === 'function') && !Object.isFrozen(o[prop])) { - deepFreeze(o[prop]); - } - }); - return o; -} - -/** - * To get specific nested path values from objects, - * RxDB normally uses the 'dot-prop' npm module. - * But when performance is really relevant, this is not fast enough. - * Instead we use a monad that can prepare some stuff up front - * and we can reuse the generated function. - */ - -export function objectPathMonad(objectPath) { - var split = objectPath.split('.'); - - // reuse this variable for better performance. - var splitLength = split.length; - - /** - * Performance shortcut, - * if no nested path is used, - * directly return the field of the object. - */ - if (splitLength === 1) { - return obj => obj[objectPath]; - } - return obj => { - var currentVal = obj; - for (var i = 0; i < splitLength; ++i) { - var subPath = split[i]; - currentVal = currentVal[subPath]; - if (typeof currentVal === 'undefined') { - return currentVal; - } - } - return currentVal; - }; -} -export function getFromObjectOrThrow(obj, key) { - var val = obj[key]; - if (!val) { - throw new Error('missing value from object ' + key); - } - return val; -} - -/** - * returns a flattened object - * @link https://gist.github.com/penguinboy/762197 - */ -export function flattenObject(ob) { - var toReturn = {}; - for (var i in ob) { - if (!Object.prototype.hasOwnProperty.call(ob, i)) continue; - if (typeof ob[i] === 'object') { - var flatObject = flattenObject(ob[i]); - for (var x in flatObject) { - if (!Object.prototype.hasOwnProperty.call(flatObject, x)) continue; - toReturn[i + '.' + x] = flatObject[x]; - } - } else { - toReturn[i] = ob[i]; - } - } - return toReturn; -} - -/** - * does a flat copy on the objects, - * is about 3 times faster then using deepClone - * @link https://jsperf.com/object-rest-spread-vs-clone/2 - */ -export function flatClone(obj) { - return Object.assign({}, obj); -} - -/** - * @link https://stackoverflow.com/a/11509718/3443137 - */ -export function firstPropertyNameOfObject(obj) { - return Object.keys(obj)[0]; -} -export function firstPropertyValueOfObject(obj) { - var key = Object.keys(obj)[0]; - return obj[key]; -} - -/** - * deep-sort an object so its attributes are in lexical order. - * Also sorts the arrays inside of the object if no-array-sort not set - */ -export function sortObject(obj, noArraySort = false) { - if (!obj) return obj; // do not sort null, false or undefined - - // array - if (!noArraySort && Array.isArray(obj)) { - return obj.sort((a, b) => { - if (typeof a === 'string' && typeof b === 'string') return a.localeCompare(b); - if (typeof a === 'object') return 1;else return -1; - }).map(i => sortObject(i, noArraySort)); - } - - // object - // array is also of type object - if (typeof obj === 'object' && !Array.isArray(obj)) { - var out = {}; - Object.keys(obj).sort((a, b) => a.localeCompare(b)).forEach(key => { - out[key] = sortObject(obj[key], noArraySort); - }); - return out; - } - - // everything else - return obj; -} - -/** - * Deep clone a plain json object. - * Does not work with recursive stuff - * or non-plain-json. - * IMPORTANT: Performance of this is very important, - * do not change it without running performance tests! - * - * @link https://github.com/zxdong262/deep-copy/blob/master/src/index.ts - */ -function deepClone(src) { - if (!src) { - return src; - } - if (src === null || typeof src !== 'object') { - return src; - } - if (Array.isArray(src)) { - var ret = new Array(src.length); - var i = ret.length; - while (i--) { - ret[i] = deepClone(src[i]); - } - return ret; - } - var dest = {}; - // eslint-disable-next-line guard-for-in - for (var key in src) { - dest[key] = deepClone(src[key]); - } - return dest; -} -export var clone = deepClone; - -/** - * overwrites the getter with the actual value - * Mostly used for caching stuff on the first run - */ -export function overwriteGetterForCaching(obj, getterName, value) { - Object.defineProperty(obj, getterName, { - get: function () { - return value; - } - }); - return value; -} -export function hasDeepProperty(obj, property) { - if (obj.hasOwnProperty(property)) { - return true; - } - if (Array.isArray(obj)) { - var has = !!obj.find(item => hasDeepProperty(item, property)); - return has; - } - - // Recursively check for property in nested objects - for (var key in obj) { - if (typeof obj[key] === 'object' && obj[key] !== null) { - if (hasDeepProperty(obj[key], property)) { - return true; - } - } - } - - // Return false if 'foobar' is not found at any level - return false; -} -//# sourceMappingURL=utils-object.js.map \ No newline at end of file diff --git a/dist/esm/plugins/utils/utils-object.js.map b/dist/esm/plugins/utils/utils-object.js.map deleted file mode 100644 index 657a40afd3f..00000000000 --- a/dist/esm/plugins/utils/utils-object.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"utils-object.js","names":["deepFreeze","o","Object","freeze","getOwnPropertyNames","forEach","prop","prototype","hasOwnProperty","call","isFrozen","objectPathMonad","objectPath","split","splitLength","length","obj","currentVal","i","subPath","getFromObjectOrThrow","key","val","Error","flattenObject","ob","toReturn","flatObject","x","flatClone","assign","firstPropertyNameOfObject","keys","firstPropertyValueOfObject","sortObject","noArraySort","Array","isArray","sort","a","b","localeCompare","map","out","deepClone","src","ret","dest","clone","overwriteGetterForCaching","getterName","value","defineProperty","get","hasDeepProperty","property","has","find","item"],"sources":["../../../../src/plugins/utils/utils-object.ts"],"sourcesContent":["import type {\n DeepReadonlyObject\n} from '../../types/index.d.ts';\n\nexport function deepFreeze(o: T): T {\n Object.freeze(o);\n Object.getOwnPropertyNames(o).forEach(function (prop) {\n if (\n Object.prototype.hasOwnProperty.call(o, prop) &&\n (o as any)[prop] !== null &&\n (\n typeof (o as any)[prop] === 'object'\n ||\n typeof (o as any)[prop] === 'function'\n ) &&\n !Object.isFrozen((o as any)[prop])\n ) {\n deepFreeze((o as any)[prop]);\n }\n });\n return o;\n}\n\n\n\n/**\n * To get specific nested path values from objects,\n * RxDB normally uses the 'dot-prop' npm module.\n * But when performance is really relevant, this is not fast enough.\n * Instead we use a monad that can prepare some stuff up front\n * and we can reuse the generated function.\n */\nexport type ObjectPathMonadFunction = (obj: T) => R;\nexport function objectPathMonad(objectPath: string): ObjectPathMonadFunction {\n const split = objectPath.split('.');\n\n // reuse this variable for better performance.\n const splitLength = split.length;\n\n /**\n * Performance shortcut,\n * if no nested path is used,\n * directly return the field of the object.\n */\n if (splitLength === 1) {\n return (obj: T) => (obj as any)[objectPath];\n }\n\n\n return (obj: T) => {\n let currentVal: any = obj;\n for (let i = 0; i < splitLength; ++i) {\n const subPath = split[i];\n currentVal = currentVal[subPath];\n if (typeof currentVal === 'undefined') {\n return currentVal;\n }\n }\n return currentVal;\n };\n}\n\n\nexport function getFromObjectOrThrow(\n obj: { [k: string]: V; },\n key: string\n): V {\n const val = obj[key];\n if (!val) {\n throw new Error('missing value from object ' + key);\n }\n return val;\n}\n\n/**\n * returns a flattened object\n * @link https://gist.github.com/penguinboy/762197\n */\nexport function flattenObject(ob: any) {\n const toReturn: any = {};\n\n for (const i in ob) {\n if (!Object.prototype.hasOwnProperty.call(ob, i)) continue;\n if ((typeof ob[i]) === 'object') {\n const flatObject = flattenObject(ob[i]);\n for (const x in flatObject) {\n if (!Object.prototype.hasOwnProperty.call(flatObject, x)) continue;\n toReturn[i + '.' + x] = flatObject[x];\n }\n } else {\n toReturn[i] = ob[i];\n }\n }\n return toReturn;\n}\n\n\n/**\n * does a flat copy on the objects,\n * is about 3 times faster then using deepClone\n * @link https://jsperf.com/object-rest-spread-vs-clone/2\n */\nexport function flatClone(obj: T | DeepReadonlyObject | Readonly): T {\n return Object.assign({}, obj) as any;\n}\n\n/**\n * @link https://stackoverflow.com/a/11509718/3443137\n */\nexport function firstPropertyNameOfObject(obj: any): string {\n return Object.keys(obj)[0];\n}\nexport function firstPropertyValueOfObject(obj: { [k: string]: T; }): T {\n const key = Object.keys(obj)[0];\n return obj[key];\n}\n\n\n/**\n * deep-sort an object so its attributes are in lexical order.\n * Also sorts the arrays inside of the object if no-array-sort not set\n */\nexport function sortObject(obj: any, noArraySort = false): any {\n if (!obj) return obj; // do not sort null, false or undefined\n\n // array\n if (!noArraySort && Array.isArray(obj)) {\n return obj\n .sort((a, b) => {\n if (typeof a === 'string' && typeof b === 'string')\n return a.localeCompare(b);\n\n if (typeof a === 'object') return 1;\n else return -1;\n })\n .map(i => sortObject(i, noArraySort));\n }\n\n // object\n // array is also of type object\n if (typeof obj === 'object' && !Array.isArray(obj)) {\n const out: any = {};\n Object.keys(obj)\n .sort((a, b) => a.localeCompare(b))\n .forEach(key => {\n out[key] = sortObject(obj[key], noArraySort);\n });\n return out;\n }\n\n // everything else\n return obj;\n}\n\n\n\n/**\n * Deep clone a plain json object.\n * Does not work with recursive stuff\n * or non-plain-json.\n * IMPORTANT: Performance of this is very important,\n * do not change it without running performance tests!\n *\n * @link https://github.com/zxdong262/deep-copy/blob/master/src/index.ts\n */\nfunction deepClone(src: T | DeepReadonlyObject): T {\n if (!src) {\n return src;\n }\n if (src === null || typeof (src) !== 'object') {\n return src;\n }\n if (Array.isArray(src)) {\n const ret = new Array(src.length);\n let i = ret.length;\n while (i--) {\n ret[i] = deepClone(src[i]);\n }\n return ret as any;\n }\n const dest: any = {};\n // eslint-disable-next-line guard-for-in\n for (const key in src) {\n dest[key] = deepClone(src[key]);\n }\n return dest;\n}\nexport const clone = deepClone;\n\n\n\n/**\n * overwrites the getter with the actual value\n * Mostly used for caching stuff on the first run\n */\nexport function overwriteGetterForCaching(\n obj: any,\n getterName: string,\n value: ValueType\n): ValueType {\n Object.defineProperty(obj, getterName, {\n get: function () {\n return value;\n }\n });\n return value;\n}\n\n\nexport function hasDeepProperty(obj: any, property: string): boolean {\n if (obj.hasOwnProperty(property)) {\n return true;\n }\n\n if (Array.isArray(obj)) {\n const has = !!obj.find(item => hasDeepProperty(item, property));\n return has;\n }\n\n // Recursively check for property in nested objects\n for (const key in obj) {\n if (typeof obj[key] === 'object' && obj[key] !== null) {\n if (hasDeepProperty(obj[key], property)) {\n return true;\n }\n }\n }\n\n // Return false if 'foobar' is not found at any level\n return false;\n}\n"],"mappings":"AAIA,OAAO,SAASA,UAAUA,CAAIC,CAAI,EAAK;EACnCC,MAAM,CAACC,MAAM,CAACF,CAAC,CAAC;EAChBC,MAAM,CAACE,mBAAmB,CAACH,CAAC,CAAC,CAACI,OAAO,CAAC,UAAUC,IAAI,EAAE;IAClD,IACIJ,MAAM,CAACK,SAAS,CAACC,cAAc,CAACC,IAAI,CAACR,CAAC,EAAEK,IAAI,CAAC,IAC5CL,CAAC,CAASK,IAAI,CAAC,KAAK,IAAI,KAErB,OAAQL,CAAC,CAASK,IAAI,CAAC,KAAK,QAAQ,IAEpC,OAAQL,CAAC,CAASK,IAAI,CAAC,KAAK,UAAU,CACzC,IACD,CAACJ,MAAM,CAACQ,QAAQ,CAAET,CAAC,CAASK,IAAI,CAAC,CAAC,EACpC;MACEN,UAAU,CAAEC,CAAC,CAASK,IAAI,CAAC,CAAC;IAChC;EACJ,CAAC,CAAC;EACF,OAAOL,CAAC;AACZ;;AAIA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA,OAAO,SAASU,eAAeA,CAAaC,UAAkB,EAAiC;EAC3F,IAAMC,KAAK,GAAGD,UAAU,CAACC,KAAK,CAAC,GAAG,CAAC;;EAEnC;EACA,IAAMC,WAAW,GAAGD,KAAK,CAACE,MAAM;;EAEhC;AACJ;AACA;AACA;AACA;EACI,IAAID,WAAW,KAAK,CAAC,EAAE;IACnB,OAAQE,GAAM,IAAMA,GAAG,CAASJ,UAAU,CAAC;EAC/C;EAGA,OAAQI,GAAM,IAAK;IACf,IAAIC,UAAe,GAAGD,GAAG;IACzB,KAAK,IAAIE,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGJ,WAAW,EAAE,EAAEI,CAAC,EAAE;MAClC,IAAMC,OAAO,GAAGN,KAAK,CAACK,CAAC,CAAC;MACxBD,UAAU,GAAGA,UAAU,CAACE,OAAO,CAAC;MAChC,IAAI,OAAOF,UAAU,KAAK,WAAW,EAAE;QACnC,OAAOA,UAAU;MACrB;IACJ;IACA,OAAOA,UAAU;EACrB,CAAC;AACL;AAGA,OAAO,SAASG,oBAAoBA,CAChCJ,GAAwB,EACxBK,GAAW,EACV;EACD,IAAMC,GAAG,GAAGN,GAAG,CAACK,GAAG,CAAC;EACpB,IAAI,CAACC,GAAG,EAAE;IACN,MAAM,IAAIC,KAAK,CAAC,4BAA4B,GAAGF,GAAG,CAAC;EACvD;EACA,OAAOC,GAAG;AACd;;AAEA;AACA;AACA;AACA;AACA,OAAO,SAASE,aAAaA,CAACC,EAAO,EAAE;EACnC,IAAMC,QAAa,GAAG,CAAC,CAAC;EAExB,KAAK,IAAMR,CAAC,IAAIO,EAAE,EAAE;IAChB,IAAI,CAACvB,MAAM,CAACK,SAAS,CAACC,cAAc,CAACC,IAAI,CAACgB,EAAE,EAAEP,CAAC,CAAC,EAAE;IAClD,IAAK,OAAOO,EAAE,CAACP,CAAC,CAAC,KAAM,QAAQ,EAAE;MAC7B,IAAMS,UAAU,GAAGH,aAAa,CAACC,EAAE,CAACP,CAAC,CAAC,CAAC;MACvC,KAAK,IAAMU,CAAC,IAAID,UAAU,EAAE;QACxB,IAAI,CAACzB,MAAM,CAACK,SAAS,CAACC,cAAc,CAACC,IAAI,CAACkB,UAAU,EAAEC,CAAC,CAAC,EAAE;QAC1DF,QAAQ,CAACR,CAAC,GAAG,GAAG,GAAGU,CAAC,CAAC,GAAGD,UAAU,CAACC,CAAC,CAAC;MACzC;IACJ,CAAC,MAAM;MACHF,QAAQ,CAACR,CAAC,CAAC,GAAGO,EAAE,CAACP,CAAC,CAAC;IACvB;EACJ;EACA,OAAOQ,QAAQ;AACnB;;AAGA;AACA;AACA;AACA;AACA;AACA,OAAO,SAASG,SAASA,CAAIb,GAA4C,EAAK;EAC1E,OAAOd,MAAM,CAAC4B,MAAM,CAAC,CAAC,CAAC,EAAEd,GAAG,CAAC;AACjC;;AAEA;AACA;AACA;AACA,OAAO,SAASe,yBAAyBA,CAACf,GAAQ,EAAU;EACxD,OAAOd,MAAM,CAAC8B,IAAI,CAAChB,GAAG,CAAC,CAAC,CAAC,CAAC;AAC9B;AACA,OAAO,SAASiB,0BAA0BA,CAAIjB,GAAwB,EAAK;EACvE,IAAMK,GAAG,GAAGnB,MAAM,CAAC8B,IAAI,CAAChB,GAAG,CAAC,CAAC,CAAC,CAAC;EAC/B,OAAOA,GAAG,CAACK,GAAG,CAAC;AACnB;;AAGA;AACA;AACA;AACA;AACA,OAAO,SAASa,UAAUA,CAAClB,GAAQ,EAAEmB,WAAW,GAAG,KAAK,EAAO;EAC3D,IAAI,CAACnB,GAAG,EAAE,OAAOA,GAAG,CAAC,CAAC;;EAEtB;EACA,IAAI,CAACmB,WAAW,IAAIC,KAAK,CAACC,OAAO,CAACrB,GAAG,CAAC,EAAE;IACpC,OAAOA,GAAG,CACLsB,IAAI,CAAC,CAACC,CAAC,EAAEC,CAAC,KAAK;MACZ,IAAI,OAAOD,CAAC,KAAK,QAAQ,IAAI,OAAOC,CAAC,KAAK,QAAQ,EAC9C,OAAOD,CAAC,CAACE,aAAa,CAACD,CAAC,CAAC;MAE7B,IAAI,OAAOD,CAAC,KAAK,QAAQ,EAAE,OAAO,CAAC,CAAC,KAC/B,OAAO,CAAC,CAAC;IAClB,CAAC,CAAC,CACDG,GAAG,CAACxB,CAAC,IAAIgB,UAAU,CAAChB,CAAC,EAAEiB,WAAW,CAAC,CAAC;EAC7C;;EAEA;EACA;EACA,IAAI,OAAOnB,GAAG,KAAK,QAAQ,IAAI,CAACoB,KAAK,CAACC,OAAO,CAACrB,GAAG,CAAC,EAAE;IAChD,IAAM2B,GAAQ,GAAG,CAAC,CAAC;IACnBzC,MAAM,CAAC8B,IAAI,CAAChB,GAAG,CAAC,CACXsB,IAAI,CAAC,CAACC,CAAC,EAAEC,CAAC,KAAKD,CAAC,CAACE,aAAa,CAACD,CAAC,CAAC,CAAC,CAClCnC,OAAO,CAACgB,GAAG,IAAI;MACZsB,GAAG,CAACtB,GAAG,CAAC,GAAGa,UAAU,CAAClB,GAAG,CAACK,GAAG,CAAC,EAAEc,WAAW,CAAC;IAChD,CAAC,CAAC;IACN,OAAOQ,GAAG;EACd;;EAEA;EACA,OAAO3B,GAAG;AACd;;AAIA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,SAAS4B,SAASA,CAAIC,GAA8B,EAAK;EACrD,IAAI,CAACA,GAAG,EAAE;IACN,OAAOA,GAAG;EACd;EACA,IAAIA,GAAG,KAAK,IAAI,IAAI,OAAQA,GAAI,KAAK,QAAQ,EAAE;IAC3C,OAAOA,GAAG;EACd;EACA,IAAIT,KAAK,CAACC,OAAO,CAACQ,GAAG,CAAC,EAAE;IACpB,IAAMC,GAAG,GAAG,IAAIV,KAAK,CAACS,GAAG,CAAC9B,MAAM,CAAC;IACjC,IAAIG,CAAC,GAAG4B,GAAG,CAAC/B,MAAM;IAClB,OAAOG,CAAC,EAAE,EAAE;MACR4B,GAAG,CAAC5B,CAAC,CAAC,GAAG0B,SAAS,CAACC,GAAG,CAAC3B,CAAC,CAAC,CAAC;IAC9B;IACA,OAAO4B,GAAG;EACd;EACA,IAAMC,IAAS,GAAG,CAAC,CAAC;EACpB;EACA,KAAK,IAAM1B,GAAG,IAAIwB,GAAG,EAAE;IACnBE,IAAI,CAAC1B,GAAG,CAAC,GAAGuB,SAAS,CAACC,GAAG,CAACxB,GAAG,CAAC,CAAC;EACnC;EACA,OAAO0B,IAAI;AACf;AACA,OAAO,IAAMC,KAAK,GAAGJ,SAAS;;AAI9B;AACA;AACA;AACA;AACA,OAAO,SAASK,yBAAyBA,CACrCjC,GAAQ,EACRkC,UAAkB,EAClBC,KAAgB,EACP;EACTjD,MAAM,CAACkD,cAAc,CAACpC,GAAG,EAAEkC,UAAU,EAAE;IACnCG,GAAG,EAAE,SAAAA,CAAA,EAAY;MACb,OAAOF,KAAK;IAChB;EACJ,CAAC,CAAC;EACF,OAAOA,KAAK;AAChB;AAGA,OAAO,SAASG,eAAeA,CAACtC,GAAQ,EAAEuC,QAAgB,EAAW;EACjE,IAAIvC,GAAG,CAACR,cAAc,CAAC+C,QAAQ,CAAC,EAAE;IAC9B,OAAO,IAAI;EACf;EAEA,IAAInB,KAAK,CAACC,OAAO,CAACrB,GAAG,CAAC,EAAE;IACpB,IAAMwC,GAAG,GAAG,CAAC,CAACxC,GAAG,CAACyC,IAAI,CAACC,IAAI,IAAIJ,eAAe,CAACI,IAAI,EAAEH,QAAQ,CAAC,CAAC;IAC/D,OAAOC,GAAG;EACd;;EAEA;EACA,KAAK,IAAMnC,GAAG,IAAIL,GAAG,EAAE;IACnB,IAAI,OAAOA,GAAG,CAACK,GAAG,CAAC,KAAK,QAAQ,IAAIL,GAAG,CAACK,GAAG,CAAC,KAAK,IAAI,EAAE;MACnD,IAAIiC,eAAe,CAACtC,GAAG,CAACK,GAAG,CAAC,EAAEkC,QAAQ,CAAC,EAAE;QACrC,OAAO,IAAI;MACf;IACJ;EACJ;;EAEA;EACA,OAAO,KAAK;AAChB","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/utils/utils-other.js b/dist/esm/plugins/utils/utils-other.js deleted file mode 100644 index d397f6c3af7..00000000000 --- a/dist/esm/plugins/utils/utils-other.js +++ /dev/null @@ -1,30 +0,0 @@ -export function runXTimes(xTimes, fn) { - new Array(xTimes).fill(0).forEach((_v, idx) => fn(idx)); -} -export function ensureNotFalsy(obj, message) { - if (!obj) { - if (!message) { - message = ''; - } - throw new Error('ensureNotFalsy() is falsy: ' + message); - } - return obj; -} -export function ensureInteger(obj) { - if (!Number.isInteger(obj)) { - throw new Error('ensureInteger() is falsy'); - } - return obj; -} - -/** - * Using shareReplay() without settings will not unsubscribe - * if there are no more subscribers. - * So we use these defaults. - * @link https://cartant.medium.com/rxjs-whats-changed-with-sharereplay-65c098843e95 - */ -export var RXJS_SHARE_REPLAY_DEFAULTS = { - bufferSize: 1, - refCount: true -}; -//# sourceMappingURL=utils-other.js.map \ No newline at end of file diff --git a/dist/esm/plugins/utils/utils-other.js.map b/dist/esm/plugins/utils/utils-other.js.map deleted file mode 100644 index ea2c0c3364b..00000000000 --- a/dist/esm/plugins/utils/utils-other.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"utils-other.js","names":["runXTimes","xTimes","fn","Array","fill","forEach","_v","idx","ensureNotFalsy","obj","message","Error","ensureInteger","Number","isInteger","RXJS_SHARE_REPLAY_DEFAULTS","bufferSize","refCount"],"sources":["../../../../src/plugins/utils/utils-other.ts"],"sourcesContent":["export function runXTimes(xTimes: number, fn: (idx: number) => void) {\n new Array(xTimes).fill(0).forEach((_v, idx) => fn(idx));\n}\n\nexport function ensureNotFalsy(obj: T | false | undefined | null, message?: string): T {\n if (!obj) {\n if (!message) {\n message = '';\n }\n throw new Error('ensureNotFalsy() is falsy: ' + message);\n }\n return obj;\n}\n\nexport function ensureInteger(obj: unknown): number {\n if (!Number.isInteger(obj)) {\n throw new Error('ensureInteger() is falsy');\n }\n return obj as number;\n}\n\n/**\n * Using shareReplay() without settings will not unsubscribe\n * if there are no more subscribers.\n * So we use these defaults.\n * @link https://cartant.medium.com/rxjs-whats-changed-with-sharereplay-65c098843e95\n */\nexport const RXJS_SHARE_REPLAY_DEFAULTS = {\n bufferSize: 1,\n refCount: true\n};\n"],"mappings":"AAAA,OAAO,SAASA,SAASA,CAACC,MAAc,EAAEC,EAAyB,EAAE;EACjE,IAAIC,KAAK,CAACF,MAAM,CAAC,CAACG,IAAI,CAAC,CAAC,CAAC,CAACC,OAAO,CAAC,CAACC,EAAE,EAAEC,GAAG,KAAKL,EAAE,CAACK,GAAG,CAAC,CAAC;AAC3D;AAEA,OAAO,SAASC,cAAcA,CAAIC,GAAiC,EAAEC,OAAgB,EAAK;EACtF,IAAI,CAACD,GAAG,EAAE;IACN,IAAI,CAACC,OAAO,EAAE;MACVA,OAAO,GAAG,EAAE;IAChB;IACA,MAAM,IAAIC,KAAK,CAAC,6BAA6B,GAAGD,OAAO,CAAC;EAC5D;EACA,OAAOD,GAAG;AACd;AAEA,OAAO,SAASG,aAAaA,CAACH,GAAY,EAAU;EAChD,IAAI,CAACI,MAAM,CAACC,SAAS,CAACL,GAAG,CAAC,EAAE;IACxB,MAAM,IAAIE,KAAK,CAAC,0BAA0B,CAAC;EAC/C;EACA,OAAOF,GAAG;AACd;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,IAAMM,0BAA0B,GAAG;EACtCC,UAAU,EAAE,CAAC;EACbC,QAAQ,EAAE;AACd,CAAC","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/utils/utils-promise.js b/dist/esm/plugins/utils/utils-promise.js deleted file mode 100644 index bcf025721a3..00000000000 --- a/dist/esm/plugins/utils/utils-promise.js +++ /dev/null @@ -1,104 +0,0 @@ -/** - * returns a promise that resolves on the next tick - */ -export function nextTick() { - return new Promise(res => setTimeout(res, 0)); -} -export function promiseWait(ms = 0) { - return new Promise(res => setTimeout(res, ms)); -} -export function toPromise(maybePromise) { - if (maybePromise && typeof maybePromise.then === 'function') { - // is promise - return maybePromise; - } else { - return Promise.resolve(maybePromise); - } -} - -/** - * returns true if promise is given - */ -export function isPromise(value) { - if (typeof value !== 'undefined' && typeof value.then === 'function') { - return true; - } - return false; -} - -/** - * Reusing resolved promises has a better - * performance than creating new ones each time. - */ -export var PROMISE_RESOLVE_TRUE = Promise.resolve(true); -export var PROMISE_RESOLVE_FALSE = Promise.resolve(false); -export var PROMISE_RESOLVE_NULL = Promise.resolve(null); -export var PROMISE_RESOLVE_VOID = Promise.resolve(); -export function requestIdlePromiseNoQueue( -/** - * We always set a timeout! - * RxDB might be used on the server side where the - * server runs 24/4 on 99% CPU. So without a timeout - * this would never resolve which could cause a memory leak. - */ -timeout = 10000) { - /** - * Do not use window.requestIdleCallback - * because some javascript runtimes like react-native, - * do not have a window object, but still have a global - * requestIdleCallback function. - * @link https://github.com/pubkey/rxdb/issues/4804 - */ - if (typeof requestIdleCallback === 'function') { - return new Promise(res => { - requestIdleCallback(() => res(), { - timeout - }); - }); - } else { - return promiseWait(0); - } -} - -/** - * If multiple operations wait for an requestIdlePromise - * we do not want them to resolve all at the same time. - * So we have to queue the calls. - */ -var idlePromiseQueue = PROMISE_RESOLVE_VOID; -export function requestIdlePromise(timeout = undefined) { - idlePromiseQueue = idlePromiseQueue.then(() => { - return requestIdlePromiseNoQueue(timeout); - }); - return idlePromiseQueue; -} - -/** - * run the callback if requestIdleCallback available - * do nothing if not - * @link https://developer.mozilla.org/de/docs/Web/API/Window/requestIdleCallback - */ -export function requestIdleCallbackIfAvailable(fun) { - /** - * Do not use window.requestIdleCallback - * because some javascript runtimes like react-native, - * do not have a window object, but still have a global - * requestIdleCallback function. - * @link https://github.com/pubkey/rxdb/issues/4804 - */ - if (typeof requestIdleCallback === 'function') { - requestIdleCallback(() => { - fun(); - }); - } -} - -/** - * like Promise.all() but runs in series instead of parallel - * @link https://github.com/egoist/promise.series/blob/master/index.js - * @param tasks array with functions that return a promise - */ -export function promiseSeries(tasks, initial) { - return tasks.reduce((current, next) => current.then(next), Promise.resolve(initial)); -} -//# sourceMappingURL=utils-promise.js.map \ No newline at end of file diff --git a/dist/esm/plugins/utils/utils-promise.js.map b/dist/esm/plugins/utils/utils-promise.js.map deleted file mode 100644 index 0c34e65caa1..00000000000 --- a/dist/esm/plugins/utils/utils-promise.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"utils-promise.js","names":["nextTick","Promise","res","setTimeout","promiseWait","ms","toPromise","maybePromise","then","resolve","isPromise","value","PROMISE_RESOLVE_TRUE","PROMISE_RESOLVE_FALSE","PROMISE_RESOLVE_NULL","PROMISE_RESOLVE_VOID","requestIdlePromiseNoQueue","timeout","requestIdleCallback","idlePromiseQueue","requestIdlePromise","undefined","requestIdleCallbackIfAvailable","fun","promiseSeries","tasks","initial","reduce","current","next"],"sources":["../../../../src/plugins/utils/utils-promise.ts"],"sourcesContent":["/**\n * returns a promise that resolves on the next tick\n */\nexport function nextTick(): Promise {\n return new Promise(res => setTimeout(res, 0));\n}\n\nexport function promiseWait(ms: number = 0): Promise {\n return new Promise(res => setTimeout(res, ms));\n}\n\nexport function toPromise(maybePromise: Promise | T): Promise {\n if (maybePromise && typeof (maybePromise as any).then === 'function') {\n // is promise\n return maybePromise as any;\n } else {\n return Promise.resolve(maybePromise);\n }\n}\n\n/**\n * returns true if promise is given\n */\nexport function isPromise(value: any): boolean {\n if (\n typeof value !== 'undefined' &&\n typeof value.then === 'function'\n ) {\n return true;\n }\n return false;\n}\n\n/**\n * Reusing resolved promises has a better\n * performance than creating new ones each time.\n */\nexport const PROMISE_RESOLVE_TRUE: Promise = Promise.resolve(true);\nexport const PROMISE_RESOLVE_FALSE: Promise = Promise.resolve(false);\nexport const PROMISE_RESOLVE_NULL: Promise = Promise.resolve(null);\nexport const PROMISE_RESOLVE_VOID: Promise = Promise.resolve();\n\n\nexport function requestIdlePromiseNoQueue(\n /**\n * We always set a timeout!\n * RxDB might be used on the server side where the\n * server runs 24/4 on 99% CPU. So without a timeout\n * this would never resolve which could cause a memory leak.\n */\n timeout: number | undefined = 10000\n) {\n /**\n * Do not use window.requestIdleCallback\n * because some javascript runtimes like react-native,\n * do not have a window object, but still have a global\n * requestIdleCallback function.\n * @link https://github.com/pubkey/rxdb/issues/4804\n */\n if (\n typeof requestIdleCallback === 'function'\n ) {\n return new Promise(res => {\n requestIdleCallback(\n () => res(),\n {\n timeout\n }\n );\n });\n } else {\n return promiseWait(0);\n }\n}\n\n/**\n * If multiple operations wait for an requestIdlePromise\n * we do not want them to resolve all at the same time.\n * So we have to queue the calls.\n */\nlet idlePromiseQueue = PROMISE_RESOLVE_VOID;\nexport function requestIdlePromise(\n timeout: number | undefined = undefined\n) {\n idlePromiseQueue = idlePromiseQueue.then(() => {\n return requestIdlePromiseNoQueue(timeout);\n });\n return idlePromiseQueue;\n}\n\n\n/**\n * run the callback if requestIdleCallback available\n * do nothing if not\n * @link https://developer.mozilla.org/de/docs/Web/API/Window/requestIdleCallback\n */\nexport function requestIdleCallbackIfAvailable(fun: Function): void {\n /**\n * Do not use window.requestIdleCallback\n * because some javascript runtimes like react-native,\n * do not have a window object, but still have a global\n * requestIdleCallback function.\n * @link https://github.com/pubkey/rxdb/issues/4804\n */\n if (\n typeof requestIdleCallback === 'function'\n ) {\n requestIdleCallback(() => {\n fun();\n });\n }\n}\n\n\n/**\n * like Promise.all() but runs in series instead of parallel\n * @link https://github.com/egoist/promise.series/blob/master/index.js\n * @param tasks array with functions that return a promise\n */\nexport function promiseSeries(\n tasks: Function[],\n initial?: any\n): Promise {\n return tasks\n .reduce(\n (current, next) => (current as any).then(next),\n Promise.resolve(initial)\n );\n}\n"],"mappings":"AAAA;AACA;AACA;AACA,OAAO,SAASA,QAAQA,CAAA,EAAkB;EACtC,OAAO,IAAIC,OAAO,CAACC,GAAG,IAAIC,UAAU,CAACD,GAAG,EAAE,CAAC,CAAC,CAAC;AACjD;AAEA,OAAO,SAASE,WAAWA,CAACC,EAAU,GAAG,CAAC,EAAiB;EACvD,OAAO,IAAIJ,OAAO,CAACC,GAAG,IAAIC,UAAU,CAACD,GAAG,EAAEG,EAAE,CAAC,CAAC;AAClD;AAEA,OAAO,SAASC,SAASA,CAAIC,YAA4B,EAAc;EACnE,IAAIA,YAAY,IAAI,OAAQA,YAAY,CAASC,IAAI,KAAK,UAAU,EAAE;IAClE;IACA,OAAOD,YAAY;EACvB,CAAC,MAAM;IACH,OAAON,OAAO,CAACQ,OAAO,CAACF,YAAY,CAAC;EACxC;AACJ;;AAEA;AACA;AACA;AACA,OAAO,SAASG,SAASA,CAACC,KAAU,EAAW;EAC3C,IACI,OAAOA,KAAK,KAAK,WAAW,IAC5B,OAAOA,KAAK,CAACH,IAAI,KAAK,UAAU,EAClC;IACE,OAAO,IAAI;EACf;EACA,OAAO,KAAK;AAChB;;AAEA;AACA;AACA;AACA;AACA,OAAO,IAAMI,oBAAmC,GAAGX,OAAO,CAACQ,OAAO,CAAC,IAAI,CAAC;AACxE,OAAO,IAAMI,qBAAqC,GAAGZ,OAAO,CAACQ,OAAO,CAAC,KAAK,CAAC;AAC3E,OAAO,IAAMK,oBAAmC,GAAGb,OAAO,CAACQ,OAAO,CAAC,IAAI,CAAC;AACxE,OAAO,IAAMM,oBAAmC,GAAGd,OAAO,CAACQ,OAAO,CAAC,CAAC;AAGpE,OAAO,SAASO,yBAAyBA;AACrC;AACJ;AACA;AACA;AACA;AACA;AACIC,OAA2B,GAAG,KAAK,EACrC;EACE;AACJ;AACA;AACA;AACA;AACA;AACA;EACI,IACI,OAAOC,mBAAmB,KAAK,UAAU,EAC3C;IACE,OAAO,IAAIjB,OAAO,CAAOC,GAAG,IAAI;MAC5BgB,mBAAmB,CACf,MAAMhB,GAAG,CAAC,CAAC,EACX;QACIe;MACJ,CACJ,CAAC;IACL,CAAC,CAAC;EACN,CAAC,MAAM;IACH,OAAOb,WAAW,CAAC,CAAC,CAAC;EACzB;AACJ;;AAEA;AACA;AACA;AACA;AACA;AACA,IAAIe,gBAAgB,GAAGJ,oBAAoB;AAC3C,OAAO,SAASK,kBAAkBA,CAC9BH,OAA2B,GAAGI,SAAS,EACzC;EACEF,gBAAgB,GAAGA,gBAAgB,CAACX,IAAI,CAAC,MAAM;IAC3C,OAAOQ,yBAAyB,CAACC,OAAO,CAAC;EAC7C,CAAC,CAAC;EACF,OAAOE,gBAAgB;AAC3B;;AAGA;AACA;AACA;AACA;AACA;AACA,OAAO,SAASG,8BAA8BA,CAACC,GAAa,EAAQ;EAChE;AACJ;AACA;AACA;AACA;AACA;AACA;EACI,IACI,OAAOL,mBAAmB,KAAK,UAAU,EAC3C;IACEA,mBAAmB,CAAC,MAAM;MACtBK,GAAG,CAAC,CAAC;IACT,CAAC,CAAC;EACN;AACJ;;AAGA;AACA;AACA;AACA;AACA;AACA,OAAO,SAASC,aAAaA,CACzBC,KAAiB,EACjBC,OAAa,EACC;EACd,OAAOD,KAAK,CACPE,MAAM,CACH,CAACC,OAAO,EAAEC,IAAI,KAAMD,OAAO,CAASpB,IAAI,CAACqB,IAAI,CAAC,EAC9C5B,OAAO,CAACQ,OAAO,CAACiB,OAAO,CAC3B,CAAC;AACT","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/utils/utils-regex.js b/dist/esm/plugins/utils/utils-regex.js deleted file mode 100644 index c2495237495..00000000000 --- a/dist/esm/plugins/utils/utils-regex.js +++ /dev/null @@ -1,3 +0,0 @@ -export var REGEX_ALL_DOTS = /\./g; -export var REGEX_ALL_PIPES = /\|/g; -//# sourceMappingURL=utils-regex.js.map \ No newline at end of file diff --git a/dist/esm/plugins/utils/utils-regex.js.map b/dist/esm/plugins/utils/utils-regex.js.map deleted file mode 100644 index a18ff848691..00000000000 --- a/dist/esm/plugins/utils/utils-regex.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"utils-regex.js","names":["REGEX_ALL_DOTS","REGEX_ALL_PIPES"],"sources":["../../../../src/plugins/utils/utils-regex.ts"],"sourcesContent":["export const REGEX_ALL_DOTS = /\\./g;\nexport const REGEX_ALL_PIPES = /\\|/g;\n"],"mappings":"AAAA,OAAO,IAAMA,cAAc,GAAG,KAAK;AACnC,OAAO,IAAMC,eAAe,GAAG,KAAK","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/utils/utils-revision.js b/dist/esm/plugins/utils/utils-revision.js deleted file mode 100644 index 3ebfe6a64fa..00000000000 --- a/dist/esm/plugins/utils/utils-revision.js +++ /dev/null @@ -1,42 +0,0 @@ -/** - * Parses the full revision. - * Do NOT use this if you only need the revision height, - * then use getHeightOfRevision() instead which is faster. - */ -export function parseRevision(revision) { - var split = revision.split('-'); - if (split.length !== 2) { - throw new Error('malformatted revision: ' + revision); - } - return { - height: parseInt(split[0], 10), - hash: split[1] - }; -} - -/** - * @hotPath Performance is very important here - * because we need to parse the revision height very often. - * Do not use `parseInt(revision.split('-')[0], 10)` because - * only fetching the start-number chars is faster. - */ -export function getHeightOfRevision(revision) { - var useChars = ''; - for (var index = 0; index < revision.length; index++) { - var char = revision[index]; - if (char === '-') { - return parseInt(useChars, 10); - } - useChars += char; - } - throw new Error('malformatted revision: ' + revision); -} - -/** - * Creates the next write revision for a given document. - */ -export function createRevision(databaseInstanceToken, previousDocData) { - var newRevisionHeight = !previousDocData ? 1 : getHeightOfRevision(previousDocData._rev) + 1; - return newRevisionHeight + '-' + databaseInstanceToken; -} -//# sourceMappingURL=utils-revision.js.map \ No newline at end of file diff --git a/dist/esm/plugins/utils/utils-revision.js.map b/dist/esm/plugins/utils/utils-revision.js.map deleted file mode 100644 index 9b8242a02a3..00000000000 --- a/dist/esm/plugins/utils/utils-revision.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"utils-revision.js","names":["parseRevision","revision","split","length","Error","height","parseInt","hash","getHeightOfRevision","useChars","index","char","createRevision","databaseInstanceToken","previousDocData","newRevisionHeight","_rev"],"sources":["../../../../src/plugins/utils/utils-revision.ts"],"sourcesContent":["import type {\n RxDocumentData\n} from '../../types/index.d.ts';\n\n/**\n * Parses the full revision.\n * Do NOT use this if you only need the revision height,\n * then use getHeightOfRevision() instead which is faster.\n */\nexport function parseRevision(revision: string): { height: number; hash: string; } {\n const split = revision.split('-');\n if (split.length !== 2) {\n throw new Error('malformatted revision: ' + revision);\n }\n return {\n height: parseInt(split[0], 10),\n hash: split[1]\n };\n}\n\n/**\n * @hotPath Performance is very important here\n * because we need to parse the revision height very often.\n * Do not use `parseInt(revision.split('-')[0], 10)` because\n * only fetching the start-number chars is faster.\n */\nexport function getHeightOfRevision(revision: string): number {\n let useChars = '';\n for (let index = 0; index < revision.length; index++) {\n const char = revision[index];\n if (char === '-') {\n return parseInt(useChars, 10);\n }\n useChars += char;\n }\n throw new Error('malformatted revision: ' + revision);\n}\n\n\n/**\n * Creates the next write revision for a given document.\n */\nexport function createRevision(\n databaseInstanceToken: string,\n previousDocData?: RxDocumentData\n): string {\n const newRevisionHeight = !previousDocData ? 1 : getHeightOfRevision(previousDocData._rev) + 1\n return newRevisionHeight + '-' + databaseInstanceToken;\n}\n\n"],"mappings":"AAIA;AACA;AACA;AACA;AACA;AACA,OAAO,SAASA,aAAaA,CAACC,QAAgB,EAAqC;EAC/E,IAAMC,KAAK,GAAGD,QAAQ,CAACC,KAAK,CAAC,GAAG,CAAC;EACjC,IAAIA,KAAK,CAACC,MAAM,KAAK,CAAC,EAAE;IACpB,MAAM,IAAIC,KAAK,CAAC,yBAAyB,GAAGH,QAAQ,CAAC;EACzD;EACA,OAAO;IACHI,MAAM,EAAEC,QAAQ,CAACJ,KAAK,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC;IAC9BK,IAAI,EAAEL,KAAK,CAAC,CAAC;EACjB,CAAC;AACL;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,SAASM,mBAAmBA,CAACP,QAAgB,EAAU;EAC1D,IAAIQ,QAAQ,GAAG,EAAE;EACjB,KAAK,IAAIC,KAAK,GAAG,CAAC,EAAEA,KAAK,GAAGT,QAAQ,CAACE,MAAM,EAAEO,KAAK,EAAE,EAAE;IAClD,IAAMC,IAAI,GAAGV,QAAQ,CAACS,KAAK,CAAC;IAC5B,IAAIC,IAAI,KAAK,GAAG,EAAE;MACd,OAAOL,QAAQ,CAACG,QAAQ,EAAE,EAAE,CAAC;IACjC;IACAA,QAAQ,IAAIE,IAAI;EACpB;EACA,MAAM,IAAIP,KAAK,CAAC,yBAAyB,GAAGH,QAAQ,CAAC;AACzD;;AAGA;AACA;AACA;AACA,OAAO,SAASW,cAAcA,CAC1BC,qBAA6B,EAC7BC,eAA2C,EACrC;EACN,IAAMC,iBAAiB,GAAG,CAACD,eAAe,GAAG,CAAC,GAAGN,mBAAmB,CAACM,eAAe,CAACE,IAAI,CAAC,GAAG,CAAC;EAC9F,OAAOD,iBAAiB,GAAG,GAAG,GAAGF,qBAAqB;AAC1D","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/utils/utils-rxdb-version.js b/dist/esm/plugins/utils/utils-rxdb-version.js deleted file mode 100644 index ea07ba69ffe..00000000000 --- a/dist/esm/plugins/utils/utils-rxdb-version.js +++ /dev/null @@ -1,5 +0,0 @@ -/** - * This file is replaced in the 'npm run build:version' script. - */ -export var RXDB_VERSION = '15.24.0'; -//# sourceMappingURL=utils-rxdb-version.js.map \ No newline at end of file diff --git a/dist/esm/plugins/utils/utils-rxdb-version.js.map b/dist/esm/plugins/utils/utils-rxdb-version.js.map deleted file mode 100644 index ce71eb27f10..00000000000 --- a/dist/esm/plugins/utils/utils-rxdb-version.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"utils-rxdb-version.js","names":["RXDB_VERSION"],"sources":["../../../../src/plugins/utils/utils-rxdb-version.ts"],"sourcesContent":["/**\n * This file is replaced in the 'npm run build:version' script.\n */\nexport const RXDB_VERSION = '15.24.0';\n"],"mappings":"AAAA;AACA;AACA;AACA,OAAO,IAAMA,YAAY,GAAG,SAAS","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/utils/utils-rxdb-version.template.js b/dist/esm/plugins/utils/utils-rxdb-version.template.js deleted file mode 100644 index 71002cf6740..00000000000 --- a/dist/esm/plugins/utils/utils-rxdb-version.template.js +++ /dev/null @@ -1,5 +0,0 @@ -/** - * This file is replaced in the 'npm run build:version' script. - */ -export var RXDB_VERSION = '|PLACEHOLDER|'; -//# sourceMappingURL=utils-rxdb-version.template.js.map \ No newline at end of file diff --git a/dist/esm/plugins/utils/utils-rxdb-version.template.js.map b/dist/esm/plugins/utils/utils-rxdb-version.template.js.map deleted file mode 100644 index e1efdea2cd7..00000000000 --- a/dist/esm/plugins/utils/utils-rxdb-version.template.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"utils-rxdb-version.template.js","names":["RXDB_VERSION"],"sources":["../../../../src/plugins/utils/utils-rxdb-version.template.ts"],"sourcesContent":["/**\n * This file is replaced in the 'npm run build:version' script.\n */\nexport const RXDB_VERSION = '|PLACEHOLDER|';\n"],"mappings":"AAAA;AACA;AACA;AACA,OAAO,IAAMA,YAAY,GAAG,eAAe","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/utils/utils-string.js b/dist/esm/plugins/utils/utils-string.js deleted file mode 100644 index 8ea3efdeb0e..00000000000 --- a/dist/esm/plugins/utils/utils-string.js +++ /dev/null @@ -1,91 +0,0 @@ -var COUCH_NAME_CHARS = 'abcdefghijklmnopqrstuvwxyz'; -/** - * get a random string which can be used with couchdb - * @link http://stackoverflow.com/a/1349426/3443137 - */ -export function randomCouchString(length = 10) { - var text = ''; - for (var i = 0; i < length; i++) { - text += COUCH_NAME_CHARS.charAt(Math.floor(Math.random() * COUCH_NAME_CHARS.length)); - } - return text; -} - -/** - * A random string that is never inside of any storage - */ -export var RANDOM_STRING = 'Fz7SZXPmYJujkzjY1rpXWvlWBqoGAfAX'; - -/** - * uppercase first char - */ -export function ucfirst(str) { - str += ''; - var f = str.charAt(0).toUpperCase(); - return f + str.substr(1); -} - -/** - * removes trailing and ending dots from the string - */ -export function trimDots(str) { - // start - while (str.charAt(0) === '.') { - str = str.substr(1); - } - - // end - while (str.slice(-1) === '.') { - str = str.slice(0, -1); - } - return str; -} - -/** - * @link https://stackoverflow.com/a/44950500/3443137 - */ -export function lastCharOfString(str) { - return str.charAt(str.length - 1); -} - -/** - * returns true if the given name is likely a folder path - */ -export function isFolderPath(name) { - // do not check, if foldername is given - if (name.includes('/') || - // unix - name.includes('\\') // windows - ) { - return true; - } else { - return false; - } -} - -/** - * @link https://gist.github.com/andreburgaud/6f73fd2d690b629346b8 - * @link https://stackoverflow.com/a/76240378/3443137 - */ -export function arrayBufferToString(arrayBuffer) { - var chunkSize = 8192; - var str = ''; - var len = arrayBuffer.byteLength; - for (var i = 0; i < len; i += chunkSize) { - var chunk = new Uint8Array(arrayBuffer, i, Math.min(chunkSize, len - i)); - str += String.fromCharCode.apply(null, chunk); - } - return str; -} -export function stringToArrayBuffer(str) { - var buf = new ArrayBuffer(str.length); - var bufView = new Uint8Array(buf); - for (var i = 0, strLen = str.length; i < strLen; i++) { - bufView[i] = str.charCodeAt(i); - } - return buf; -} -export function normalizeString(str) { - return str.trim().replace(/[\n\s]+/g, ''); -} -//# sourceMappingURL=utils-string.js.map \ No newline at end of file diff --git a/dist/esm/plugins/utils/utils-string.js.map b/dist/esm/plugins/utils/utils-string.js.map deleted file mode 100644 index b61b42fbeab..00000000000 --- a/dist/esm/plugins/utils/utils-string.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"utils-string.js","names":["COUCH_NAME_CHARS","randomCouchString","length","text","i","charAt","Math","floor","random","RANDOM_STRING","ucfirst","str","f","toUpperCase","substr","trimDots","slice","lastCharOfString","isFolderPath","name","includes","arrayBufferToString","arrayBuffer","chunkSize","len","byteLength","chunk","Uint8Array","min","String","fromCharCode","apply","stringToArrayBuffer","buf","ArrayBuffer","bufView","strLen","charCodeAt","normalizeString","trim","replace"],"sources":["../../../../src/plugins/utils/utils-string.ts"],"sourcesContent":["const COUCH_NAME_CHARS = 'abcdefghijklmnopqrstuvwxyz';\n/**\n * get a random string which can be used with couchdb\n * @link http://stackoverflow.com/a/1349426/3443137\n */\nexport function randomCouchString(length: number = 10): string {\n let text = '';\n\n for (let i = 0; i < length; i++) {\n text += COUCH_NAME_CHARS.charAt(Math.floor(Math.random() * COUCH_NAME_CHARS.length));\n }\n\n return text;\n}\n\n\n/**\n * A random string that is never inside of any storage\n */\nexport const RANDOM_STRING = 'Fz7SZXPmYJujkzjY1rpXWvlWBqoGAfAX';\n\n/**\n * uppercase first char\n */\nexport function ucfirst(str: string): string {\n str += '';\n const f = str.charAt(0)\n .toUpperCase();\n return f + str.substr(1);\n}\n\n/**\n * removes trailing and ending dots from the string\n */\nexport function trimDots(str: string): string {\n // start\n while (str.charAt(0) === '.') {\n str = str.substr(1);\n }\n\n // end\n while (str.slice(-1) === '.') {\n str = str.slice(0, -1);\n }\n\n return str;\n}\n\n/**\n * @link https://stackoverflow.com/a/44950500/3443137\n */\nexport function lastCharOfString(str: string): string {\n return str.charAt(str.length - 1);\n}\n\n/**\n * returns true if the given name is likely a folder path\n */\nexport function isFolderPath(name: string) {\n // do not check, if foldername is given\n if (\n name.includes('/') || // unix\n name.includes('\\\\') // windows\n ) {\n return true;\n } else {\n return false;\n }\n}\n\n\n/**\n * @link https://gist.github.com/andreburgaud/6f73fd2d690b629346b8\n * @link https://stackoverflow.com/a/76240378/3443137\n */\nexport function arrayBufferToString(arrayBuffer: ArrayBuffer): string {\n const chunkSize = 8192;\n let str = '';\n var len = arrayBuffer.byteLength;\n for (let i = 0; i < len; i += chunkSize) {\n const chunk = new Uint8Array(\n arrayBuffer,\n i,\n Math.min(chunkSize, len - i)\n );\n str += String.fromCharCode.apply(null, chunk as any);\n }\n return str;\n}\n\nexport function stringToArrayBuffer(str: string): ArrayBuffer {\n const buf = new ArrayBuffer(str.length);\n const bufView = new Uint8Array(buf);\n for (let i = 0, strLen = str.length; i < strLen; i++) {\n bufView[i] = str.charCodeAt(i);\n }\n return buf;\n}\n\n\nexport function normalizeString(str: string) : string {\n return str.trim().replace(/[\\n\\s]+/g, '');\n}\n"],"mappings":"AAAA,IAAMA,gBAAgB,GAAG,4BAA4B;AACrD;AACA;AACA;AACA;AACA,OAAO,SAASC,iBAAiBA,CAACC,MAAc,GAAG,EAAE,EAAU;EAC3D,IAAIC,IAAI,GAAG,EAAE;EAEb,KAAK,IAAIC,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGF,MAAM,EAAEE,CAAC,EAAE,EAAE;IAC7BD,IAAI,IAAIH,gBAAgB,CAACK,MAAM,CAACC,IAAI,CAACC,KAAK,CAACD,IAAI,CAACE,MAAM,CAAC,CAAC,GAAGR,gBAAgB,CAACE,MAAM,CAAC,CAAC;EACxF;EAEA,OAAOC,IAAI;AACf;;AAGA;AACA;AACA;AACA,OAAO,IAAMM,aAAa,GAAG,kCAAkC;;AAE/D;AACA;AACA;AACA,OAAO,SAASC,OAAOA,CAACC,GAAW,EAAU;EACzCA,GAAG,IAAI,EAAE;EACT,IAAMC,CAAC,GAAGD,GAAG,CAACN,MAAM,CAAC,CAAC,CAAC,CAClBQ,WAAW,CAAC,CAAC;EAClB,OAAOD,CAAC,GAAGD,GAAG,CAACG,MAAM,CAAC,CAAC,CAAC;AAC5B;;AAEA;AACA;AACA;AACA,OAAO,SAASC,QAAQA,CAACJ,GAAW,EAAU;EAC1C;EACA,OAAOA,GAAG,CAACN,MAAM,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE;IAC1BM,GAAG,GAAGA,GAAG,CAACG,MAAM,CAAC,CAAC,CAAC;EACvB;;EAEA;EACA,OAAOH,GAAG,CAACK,KAAK,CAAC,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE;IAC1BL,GAAG,GAAGA,GAAG,CAACK,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;EAC1B;EAEA,OAAOL,GAAG;AACd;;AAEA;AACA;AACA;AACA,OAAO,SAASM,gBAAgBA,CAACN,GAAW,EAAU;EAClD,OAAOA,GAAG,CAACN,MAAM,CAACM,GAAG,CAACT,MAAM,GAAG,CAAC,CAAC;AACrC;;AAEA;AACA;AACA;AACA,OAAO,SAASgB,YAAYA,CAACC,IAAY,EAAE;EACvC;EACA,IACIA,IAAI,CAACC,QAAQ,CAAC,GAAG,CAAC;EAAI;EACtBD,IAAI,CAACC,QAAQ,CAAC,IAAI,CAAC,CAAC;EAAA,EACtB;IACE,OAAO,IAAI;EACf,CAAC,MAAM;IACH,OAAO,KAAK;EAChB;AACJ;;AAGA;AACA;AACA;AACA;AACA,OAAO,SAASC,mBAAmBA,CAACC,WAAwB,EAAU;EAClE,IAAMC,SAAS,GAAG,IAAI;EACtB,IAAIZ,GAAG,GAAG,EAAE;EACZ,IAAIa,GAAG,GAAGF,WAAW,CAACG,UAAU;EAChC,KAAK,IAAIrB,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGoB,GAAG,EAAEpB,CAAC,IAAImB,SAAS,EAAE;IACrC,IAAMG,KAAK,GAAG,IAAIC,UAAU,CACxBL,WAAW,EACXlB,CAAC,EACDE,IAAI,CAACsB,GAAG,CAACL,SAAS,EAAEC,GAAG,GAAGpB,CAAC,CAC/B,CAAC;IACDO,GAAG,IAAIkB,MAAM,CAACC,YAAY,CAACC,KAAK,CAAC,IAAI,EAAEL,KAAY,CAAC;EACxD;EACA,OAAOf,GAAG;AACd;AAEA,OAAO,SAASqB,mBAAmBA,CAACrB,GAAW,EAAe;EAC1D,IAAMsB,GAAG,GAAG,IAAIC,WAAW,CAACvB,GAAG,CAACT,MAAM,CAAC;EACvC,IAAMiC,OAAO,GAAG,IAAIR,UAAU,CAACM,GAAG,CAAC;EACnC,KAAK,IAAI7B,CAAC,GAAG,CAAC,EAAEgC,MAAM,GAAGzB,GAAG,CAACT,MAAM,EAAEE,CAAC,GAAGgC,MAAM,EAAEhC,CAAC,EAAE,EAAE;IAClD+B,OAAO,CAAC/B,CAAC,CAAC,GAAGO,GAAG,CAAC0B,UAAU,CAACjC,CAAC,CAAC;EAClC;EACA,OAAO6B,GAAG;AACd;AAGA,OAAO,SAASK,eAAeA,CAAC3B,GAAW,EAAW;EAClD,OAAOA,GAAG,CAAC4B,IAAI,CAAC,CAAC,CAACC,OAAO,CAAC,UAAU,EAAE,EAAE,CAAC;AAC7C","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/utils/utils-time.js b/dist/esm/plugins/utils/utils-time.js deleted file mode 100644 index bfc224f2419..00000000000 --- a/dist/esm/plugins/utils/utils-time.js +++ /dev/null @@ -1,35 +0,0 @@ -/** - * Returns the current unix time in milliseconds (with two decimals!) - * Because the accuracy of getTime() in javascript is bad, - * and we cannot rely on performance.now() on all platforms, - * this method implements a way to never return the same value twice. - * This ensures that when now() is called often, we do not loose the information - * about which call came first and which came after. - * - * We had to move from having no decimals, to having two decimal - * because it turned out that some storages are such fast that - * calling this method too often would return 'the future'. - */ -var _lastNow = 0; -/** - * Returns the current time in milliseconds, - * also ensures to not return the same value twice. - */ -export function now() { - var ret = Date.now(); - ret = ret + 0.01; - if (ret <= _lastNow) { - ret = _lastNow + 0.01; - } - - /** - * Strip the returned number to max two decimals. - * In theory we would not need this but - * in practice JavaScript has no such good number precision - * so rounding errors could add another decimal place. - */ - var twoDecimals = parseFloat(ret.toFixed(2)); - _lastNow = twoDecimals; - return twoDecimals; -} -//# sourceMappingURL=utils-time.js.map \ No newline at end of file diff --git a/dist/esm/plugins/utils/utils-time.js.map b/dist/esm/plugins/utils/utils-time.js.map deleted file mode 100644 index 650b9a6723f..00000000000 --- a/dist/esm/plugins/utils/utils-time.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"utils-time.js","names":["_lastNow","now","ret","Date","twoDecimals","parseFloat","toFixed"],"sources":["../../../../src/plugins/utils/utils-time.ts"],"sourcesContent":["\n/**\n * Returns the current unix time in milliseconds (with two decimals!)\n * Because the accuracy of getTime() in javascript is bad,\n * and we cannot rely on performance.now() on all platforms,\n * this method implements a way to never return the same value twice.\n * This ensures that when now() is called often, we do not loose the information\n * about which call came first and which came after.\n *\n * We had to move from having no decimals, to having two decimal\n * because it turned out that some storages are such fast that\n * calling this method too often would return 'the future'.\n */\nlet _lastNow: number = 0;\n/**\n * Returns the current time in milliseconds,\n * also ensures to not return the same value twice.\n */\nexport function now(): number {\n let ret = Date.now();\n ret = ret + 0.01;\n if (ret <= _lastNow) {\n ret = _lastNow + 0.01;\n }\n\n /**\n * Strip the returned number to max two decimals.\n * In theory we would not need this but\n * in practice JavaScript has no such good number precision\n * so rounding errors could add another decimal place.\n */\n const twoDecimals = parseFloat(ret.toFixed(2));\n\n _lastNow = twoDecimals;\n return twoDecimals;\n}\n"],"mappings":"AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,IAAIA,QAAgB,GAAG,CAAC;AACxB;AACA;AACA;AACA;AACA,OAAO,SAASC,GAAGA,CAAA,EAAW;EAC1B,IAAIC,GAAG,GAAGC,IAAI,CAACF,GAAG,CAAC,CAAC;EACpBC,GAAG,GAAGA,GAAG,GAAG,IAAI;EAChB,IAAIA,GAAG,IAAIF,QAAQ,EAAE;IACjBE,GAAG,GAAGF,QAAQ,GAAG,IAAI;EACzB;;EAEA;AACJ;AACA;AACA;AACA;AACA;EACI,IAAMI,WAAW,GAAGC,UAAU,CAACH,GAAG,CAACI,OAAO,CAAC,CAAC,CAAC,CAAC;EAE9CN,QAAQ,GAAGI,WAAW;EACtB,OAAOA,WAAW;AACtB","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/validate-ajv/index.js b/dist/esm/plugins/validate-ajv/index.js deleted file mode 100644 index 4b78aee760a..00000000000 --- a/dist/esm/plugins/validate-ajv/index.js +++ /dev/null @@ -1,24 +0,0 @@ -/** - * this plugin validates documents before they can be inserted into the RxCollection. - * It's using ajv as jsonschema-validator - * @link https://github.com/epoberezkin/ajv - * @link https://github.com/ajv-validator/ajv/issues/2132#issuecomment-1537224620 - */ -import Ajv from 'ajv'; -import { wrappedValidateStorageFactory } from "../../plugin-helpers.js"; -var ajv = new Ajv({ - strict: false -}); -export function getValidator(schema) { - var validator = ajv.compile(schema); - return docData => { - var isValid = validator(docData); - if (isValid) { - return []; - } else { - return validator.errors; - } - }; -} -export var wrappedValidateAjvStorage = wrappedValidateStorageFactory(getValidator, 'ajv'); -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/esm/plugins/validate-ajv/index.js.map b/dist/esm/plugins/validate-ajv/index.js.map deleted file mode 100644 index f269ae40408..00000000000 --- a/dist/esm/plugins/validate-ajv/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["Ajv","wrappedValidateStorageFactory","ajv","strict","getValidator","schema","validator","compile","docData","isValid","errors","wrappedValidateAjvStorage"],"sources":["../../../../src/plugins/validate-ajv/index.ts"],"sourcesContent":["/**\n * this plugin validates documents before they can be inserted into the RxCollection.\n * It's using ajv as jsonschema-validator\n * @link https://github.com/epoberezkin/ajv\n * @link https://github.com/ajv-validator/ajv/issues/2132#issuecomment-1537224620\n */\nimport Ajv from 'ajv';\nimport type {\n RxDocumentData,\n RxJsonSchema\n} from '../../types/index.d.ts';\nimport { wrappedValidateStorageFactory } from '../../plugin-helpers.ts';\n\n\nconst ajv = new Ajv({\n strict: false\n});\n\n\nexport function getValidator(\n schema: RxJsonSchema\n) {\n const validator = ajv.compile(schema);\n return (docData: RxDocumentData) => {\n const isValid = validator(docData);\n if (isValid) {\n return [];\n } else {\n return validator.errors as any;\n }\n };\n}\n\nexport const wrappedValidateAjvStorage = wrappedValidateStorageFactory(\n getValidator,\n 'ajv'\n);\n"],"mappings":"AAAA;AACA;AACA;AACA;AACA;AACA;AACA,OAAOA,GAAG,MAAM,KAAK;AAKrB,SAASC,6BAA6B,QAAQ,yBAAyB;AAGvE,IAAMC,GAAG,GAAG,IAAIF,GAAG,CAAC;EAChBG,MAAM,EAAE;AACZ,CAAC,CAAC;AAGF,OAAO,SAASC,YAAYA,CACxBC,MAAyB,EAC3B;EACE,IAAMC,SAAS,GAAGJ,GAAG,CAACK,OAAO,CAACF,MAAM,CAAC;EACrC,OAAQG,OAA4B,IAAK;IACrC,IAAMC,OAAO,GAAGH,SAAS,CAACE,OAAO,CAAC;IAClC,IAAIC,OAAO,EAAE;MACT,OAAO,EAAE;IACb,CAAC,MAAM;MACH,OAAOH,SAAS,CAACI,MAAM;IAC3B;EACJ,CAAC;AACL;AAEA,OAAO,IAAMC,yBAAyB,GAAGV,6BAA6B,CAClEG,YAAY,EACZ,KACJ,CAAC","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/validate-is-my-json-valid/index.js b/dist/esm/plugins/validate-is-my-json-valid/index.js deleted file mode 100644 index 5d57372fc6f..00000000000 --- a/dist/esm/plugins/validate-is-my-json-valid/index.js +++ /dev/null @@ -1,20 +0,0 @@ -/** - * this plugin validates documents before they can be inserted into the RxCollection. - * It's using is-my-json-valid as jsonschema-validator - * @link https://github.com/mafintosh/is-my-json-valid - */ -import isMyJsonValid from 'is-my-json-valid'; -import { wrappedValidateStorageFactory } from "../../plugin-helpers.js"; -export function getValidator(schema) { - var validator = isMyJsonValid(schema); - return docData => { - var isValid = validator(docData); - if (isValid) { - return []; - } else { - return validator.errors; - } - }; -} -export var wrappedValidateIsMyJsonValidStorage = wrappedValidateStorageFactory(getValidator, 'is-my-json-valid'); -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/esm/plugins/validate-is-my-json-valid/index.js.map b/dist/esm/plugins/validate-is-my-json-valid/index.js.map deleted file mode 100644 index b7bd5cd27b8..00000000000 --- a/dist/esm/plugins/validate-is-my-json-valid/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["isMyJsonValid","wrappedValidateStorageFactory","getValidator","schema","validator","docData","isValid","errors","wrappedValidateIsMyJsonValidStorage"],"sources":["../../../../src/plugins/validate-is-my-json-valid/index.ts"],"sourcesContent":["/**\n * this plugin validates documents before they can be inserted into the RxCollection.\n * It's using is-my-json-valid as jsonschema-validator\n * @link https://github.com/mafintosh/is-my-json-valid\n */\nimport isMyJsonValid from 'is-my-json-valid';\nimport type {\n RxJsonSchema\n} from '../../types/index.d.ts';\nimport { wrappedValidateStorageFactory } from '../../plugin-helpers.ts';\n\n\nexport function getValidator(\n schema: RxJsonSchema\n) {\n const validator = isMyJsonValid(schema as any);\n return (docData: any) => {\n const isValid = validator(docData);\n if (isValid) {\n return [];\n } else {\n return validator.errors as any;\n }\n };\n}\n\nexport const wrappedValidateIsMyJsonValidStorage = wrappedValidateStorageFactory(\n getValidator,\n 'is-my-json-valid'\n);\n"],"mappings":"AAAA;AACA;AACA;AACA;AACA;AACA,OAAOA,aAAa,MAAM,kBAAkB;AAI5C,SAASC,6BAA6B,QAAQ,yBAAyB;AAGvE,OAAO,SAASC,YAAYA,CACxBC,MAAyB,EAC3B;EACE,IAAMC,SAAS,GAAGJ,aAAa,CAACG,MAAa,CAAC;EAC9C,OAAQE,OAAY,IAAK;IACrB,IAAMC,OAAO,GAAGF,SAAS,CAACC,OAAO,CAAC;IAClC,IAAIC,OAAO,EAAE;MACT,OAAO,EAAE;IACb,CAAC,MAAM;MACH,OAAOF,SAAS,CAACG,MAAM;IAC3B;EACJ,CAAC;AACL;AAEA,OAAO,IAAMC,mCAAmC,GAAGP,6BAA6B,CAC5EC,YAAY,EACZ,kBACJ,CAAC","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/plugins/validate-z-schema/index.js b/dist/esm/plugins/validate-z-schema/index.js deleted file mode 100644 index fcc3396d447..00000000000 --- a/dist/esm/plugins/validate-z-schema/index.js +++ /dev/null @@ -1,39 +0,0 @@ -/** - * this plugin validates documents before they can be inserted into the RxCollection. - * It's using z-schema as jsonschema-validator - * @link https://github.com/zaggino/z-schema - */ -import ZSchema from 'z-schema'; -import { wrappedValidateStorageFactory } from "../../plugin-helpers.js"; -export function getValidator(schema) { - var validatorInstance = new ZSchema(); - var validator = obj => { - validatorInstance.validate(obj, schema); - return validatorInstance; - }; - return docData => { - var useValidator = validator(docData); - if (useValidator === true) { - return; - } - var errors = useValidator.getLastErrors(); - if (errors) { - var formattedZSchemaErrors = errors.map(({ - title, - description, - message, - path - }) => ({ - title, - description, - message, - path - })); - return formattedZSchemaErrors; - } else { - return []; - } - }; -} -export var wrappedValidateZSchemaStorage = wrappedValidateStorageFactory(getValidator, 'z-schema'); -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/esm/plugins/validate-z-schema/index.js.map b/dist/esm/plugins/validate-z-schema/index.js.map deleted file mode 100644 index 4d1657ec531..00000000000 --- a/dist/esm/plugins/validate-z-schema/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["ZSchema","wrappedValidateStorageFactory","getValidator","schema","validatorInstance","validator","obj","validate","docData","useValidator","errors","getLastErrors","formattedZSchemaErrors","map","title","description","message","path","wrappedValidateZSchemaStorage"],"sources":["../../../../src/plugins/validate-z-schema/index.ts"],"sourcesContent":["/**\n * this plugin validates documents before they can be inserted into the RxCollection.\n * It's using z-schema as jsonschema-validator\n * @link https://github.com/zaggino/z-schema\n */\nimport ZSchema from 'z-schema';\nimport type { RxJsonSchema } from '../../types/index.d.ts';\nimport { wrappedValidateStorageFactory } from '../../plugin-helpers.ts';\n\n\nexport function getValidator(\n schema: RxJsonSchema\n) {\n const validatorInstance = new (ZSchema as any)();\n const validator = (obj: any) => {\n validatorInstance.validate(obj, schema);\n return validatorInstance;\n };\n return (docData: any) => {\n const useValidator = validator(docData);\n if (useValidator === true) {\n return;\n }\n const errors: ZSchema.SchemaErrorDetail[] = (useValidator as any).getLastErrors();\n if (errors) {\n const formattedZSchemaErrors = (errors as any).map(({\n title,\n description,\n message,\n path\n }: any) => ({\n title,\n description,\n message,\n path\n }));\n return formattedZSchemaErrors;\n } else {\n return [];\n }\n };\n}\n\nexport const wrappedValidateZSchemaStorage = wrappedValidateStorageFactory(\n getValidator,\n 'z-schema'\n);\n"],"mappings":"AAAA;AACA;AACA;AACA;AACA;AACA,OAAOA,OAAO,MAAM,UAAU;AAE9B,SAASC,6BAA6B,QAAQ,yBAAyB;AAGvE,OAAO,SAASC,YAAYA,CACxBC,MAAyB,EAC3B;EACE,IAAMC,iBAAiB,GAAG,IAAKJ,OAAO,CAAS,CAAC;EAChD,IAAMK,SAAS,GAAIC,GAAQ,IAAK;IAC5BF,iBAAiB,CAACG,QAAQ,CAACD,GAAG,EAAEH,MAAM,CAAC;IACvC,OAAOC,iBAAiB;EAC5B,CAAC;EACD,OAAQI,OAAY,IAAK;IACrB,IAAMC,YAAY,GAAGJ,SAAS,CAACG,OAAO,CAAC;IACvC,IAAIC,YAAY,KAAK,IAAI,EAAE;MACvB;IACJ;IACA,IAAMC,MAAmC,GAAID,YAAY,CAASE,aAAa,CAAC,CAAC;IACjF,IAAID,MAAM,EAAE;MACR,IAAME,sBAAsB,GAAIF,MAAM,CAASG,GAAG,CAAC,CAAC;QAChDC,KAAK;QACLC,WAAW;QACXC,OAAO;QACPC;MACC,CAAC,MAAM;QACRH,KAAK;QACLC,WAAW;QACXC,OAAO;QACPC;MACJ,CAAC,CAAC,CAAC;MACH,OAAOL,sBAAsB;IACjC,CAAC,MAAM;MACH,OAAO,EAAE;IACb;EACJ,CAAC;AACL;AAEA,OAAO,IAAMM,6BAA6B,GAAGjB,6BAA6B,CACtEC,YAAY,EACZ,UACJ,CAAC","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/query-cache.js b/dist/esm/query-cache.js deleted file mode 100644 index 1e87528d36f..00000000000 --- a/dist/esm/query-cache.js +++ /dev/null @@ -1,98 +0,0 @@ -/** - * the query-cache makes sure that on every query-state, exactly one instance can exist - * if you use the same mango-query more then once, it will reuse the first RxQuery - */ - -import { getFromMapOrCreate, nextTick, now, requestIdlePromise } from "./plugins/utils/index.js"; -export var QueryCache = /*#__PURE__*/function () { - function QueryCache() { - this._map = new Map(); - } - var _proto = QueryCache.prototype; - /** - * check if an equal query is in the cache, - * if true, return the cached one, - * if false, save the given one and return it - */ - _proto.getByQuery = function getByQuery(rxQuery) { - var stringRep = rxQuery.toString(); - return getFromMapOrCreate(this._map, stringRep, () => rxQuery); - }; - return QueryCache; -}(); -export function createQueryCache() { - return new QueryCache(); -} -export function uncacheRxQuery(queryCache, rxQuery) { - rxQuery.uncached = true; - var stringRep = rxQuery.toString(); - queryCache._map.delete(stringRep); -} -export function countRxQuerySubscribers(rxQuery) { - return rxQuery.refCount$.observers.length; -} -export var DEFAULT_TRY_TO_KEEP_MAX = 100; -export var DEFAULT_UNEXECUTED_LIFETIME = 30 * 1000; - -/** - * The default cache replacement policy - * See docs-src/query-cache.md to learn how it should work. - * Notice that this runs often and should block the cpu as less as possible - * This is a monad which makes it easier to unit test - */ -export var defaultCacheReplacementPolicyMonad = (tryToKeepMax, unExecutedLifetime) => (_collection, queryCache) => { - if (queryCache._map.size < tryToKeepMax) { - return; - } - var minUnExecutedLifetime = now() - unExecutedLifetime; - var maybeUncache = []; - var queriesInCache = Array.from(queryCache._map.values()); - for (var rxQuery of queriesInCache) { - // filter out queries with subscribers - if (countRxQuerySubscribers(rxQuery) > 0) { - continue; - } - // directly uncache queries that never executed and are older then unExecutedLifetime - if (rxQuery._lastEnsureEqual === 0 && rxQuery._creationTime < minUnExecutedLifetime) { - uncacheRxQuery(queryCache, rxQuery); - continue; - } - maybeUncache.push(rxQuery); - } - var mustUncache = maybeUncache.length - tryToKeepMax; - if (mustUncache <= 0) { - return; - } - var sortedByLastUsage = maybeUncache.sort((a, b) => a._lastEnsureEqual - b._lastEnsureEqual); - var toRemove = sortedByLastUsage.slice(0, mustUncache); - toRemove.forEach(rxQuery => uncacheRxQuery(queryCache, rxQuery)); -}; -export var defaultCacheReplacementPolicy = defaultCacheReplacementPolicyMonad(DEFAULT_TRY_TO_KEEP_MAX, DEFAULT_UNEXECUTED_LIFETIME); -export var COLLECTIONS_WITH_RUNNING_CLEANUP = new WeakSet(); - -/** - * Triggers the cache replacement policy after waitTime has passed. - * We do not run this directly because at exactly the time a query is created, - * we need all CPU to minimize latency. - * Also this should not be triggered multiple times when waitTime is still waiting. - */ -export function triggerCacheReplacement(rxCollection) { - if (COLLECTIONS_WITH_RUNNING_CLEANUP.has(rxCollection)) { - // already started - return; - } - COLLECTIONS_WITH_RUNNING_CLEANUP.add(rxCollection); - - /** - * Do not run directly to not reduce result latency of a new query - */ - nextTick() // wait at least one tick - .then(() => requestIdlePromise(200)) // and then wait for the CPU to be idle - .then(() => { - if (!rxCollection.destroyed) { - rxCollection.cacheReplacementPolicy(rxCollection, rxCollection._queryCache); - } - COLLECTIONS_WITH_RUNNING_CLEANUP.delete(rxCollection); - }); -} -//# sourceMappingURL=query-cache.js.map \ No newline at end of file diff --git a/dist/esm/query-cache.js.map b/dist/esm/query-cache.js.map deleted file mode 100644 index eea48af0fb3..00000000000 --- a/dist/esm/query-cache.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"query-cache.js","names":["getFromMapOrCreate","nextTick","now","requestIdlePromise","QueryCache","_map","Map","_proto","prototype","getByQuery","rxQuery","stringRep","toString","createQueryCache","uncacheRxQuery","queryCache","uncached","delete","countRxQuerySubscribers","refCount$","observers","length","DEFAULT_TRY_TO_KEEP_MAX","DEFAULT_UNEXECUTED_LIFETIME","defaultCacheReplacementPolicyMonad","tryToKeepMax","unExecutedLifetime","_collection","size","minUnExecutedLifetime","maybeUncache","queriesInCache","Array","from","values","_lastEnsureEqual","_creationTime","push","mustUncache","sortedByLastUsage","sort","a","b","toRemove","slice","forEach","defaultCacheReplacementPolicy","COLLECTIONS_WITH_RUNNING_CLEANUP","WeakSet","triggerCacheReplacement","rxCollection","has","add","then","destroyed","cacheReplacementPolicy","_queryCache"],"sources":["../../src/query-cache.ts"],"sourcesContent":["/**\n * the query-cache makes sure that on every query-state, exactly one instance can exist\n * if you use the same mango-query more then once, it will reuse the first RxQuery\n */\nimport type {\n RxQuery,\n RxCacheReplacementPolicy,\n RxCollection\n} from './types/index.d.ts';\nimport {\n getFromMapOrCreate,\n nextTick,\n now,\n requestIdlePromise\n} from './plugins/utils/index.ts';\n\nexport class QueryCache {\n public _map: Map = new Map();\n\n /**\n * check if an equal query is in the cache,\n * if true, return the cached one,\n * if false, save the given one and return it\n */\n getByQuery(rxQuery: RxQuery): RxQuery {\n const stringRep = rxQuery.toString();\n return getFromMapOrCreate(\n this._map,\n stringRep,\n () => rxQuery\n );\n }\n}\n\nexport function createQueryCache() {\n return new QueryCache();\n}\n\n\nexport function uncacheRxQuery(queryCache: QueryCache, rxQuery: RxQuery) {\n rxQuery.uncached = true;\n const stringRep = rxQuery.toString();\n queryCache._map.delete(stringRep);\n\n}\n\n\nexport function countRxQuerySubscribers(rxQuery: RxQuery): number {\n return rxQuery.refCount$.observers.length;\n}\n\n\nexport const DEFAULT_TRY_TO_KEEP_MAX = 100;\nexport const DEFAULT_UNEXECUTED_LIFETIME = 30 * 1000;\n\n/**\n * The default cache replacement policy\n * See docs-src/query-cache.md to learn how it should work.\n * Notice that this runs often and should block the cpu as less as possible\n * This is a monad which makes it easier to unit test\n */\nexport const defaultCacheReplacementPolicyMonad: (\n tryToKeepMax: number,\n unExecutedLifetime: number\n) => RxCacheReplacementPolicy = (\n tryToKeepMax,\n unExecutedLifetime\n) => (\n _collection: RxCollection,\n queryCache: QueryCache\n) => {\n if (queryCache._map.size < tryToKeepMax) {\n return;\n }\n\n const minUnExecutedLifetime = now() - unExecutedLifetime;\n const maybeUncache: RxQuery[] = [];\n\n const queriesInCache = Array.from(queryCache._map.values());\n for (const rxQuery of queriesInCache) {\n // filter out queries with subscribers\n if (countRxQuerySubscribers(rxQuery) > 0) {\n continue;\n }\n // directly uncache queries that never executed and are older then unExecutedLifetime\n if (rxQuery._lastEnsureEqual === 0 && rxQuery._creationTime < minUnExecutedLifetime) {\n uncacheRxQuery(queryCache, rxQuery);\n continue;\n }\n maybeUncache.push(rxQuery);\n }\n\n const mustUncache = maybeUncache.length - tryToKeepMax;\n if (mustUncache <= 0) {\n return;\n }\n\n const sortedByLastUsage = maybeUncache.sort((a, b) => a._lastEnsureEqual - b._lastEnsureEqual);\n const toRemove = sortedByLastUsage.slice(0, mustUncache);\n toRemove.forEach(rxQuery => uncacheRxQuery(queryCache, rxQuery));\n };\n\n\nexport const defaultCacheReplacementPolicy: RxCacheReplacementPolicy = defaultCacheReplacementPolicyMonad(\n DEFAULT_TRY_TO_KEEP_MAX,\n DEFAULT_UNEXECUTED_LIFETIME\n);\n\nexport const COLLECTIONS_WITH_RUNNING_CLEANUP: WeakSet = new WeakSet();\n\n/**\n * Triggers the cache replacement policy after waitTime has passed.\n * We do not run this directly because at exactly the time a query is created,\n * we need all CPU to minimize latency.\n * Also this should not be triggered multiple times when waitTime is still waiting.\n */\nexport function triggerCacheReplacement(\n rxCollection: RxCollection\n) {\n if (COLLECTIONS_WITH_RUNNING_CLEANUP.has(rxCollection)) {\n // already started\n return;\n }\n\n COLLECTIONS_WITH_RUNNING_CLEANUP.add(rxCollection);\n\n /**\n * Do not run directly to not reduce result latency of a new query\n */\n nextTick() // wait at least one tick\n .then(() => requestIdlePromise(200)) // and then wait for the CPU to be idle\n .then(() => {\n if (!rxCollection.destroyed) {\n rxCollection.cacheReplacementPolicy(rxCollection, rxCollection._queryCache);\n }\n COLLECTIONS_WITH_RUNNING_CLEANUP.delete(rxCollection);\n });\n}\n"],"mappings":"AAAA;AACA;AACA;AACA;;AAMA,SACIA,kBAAkB,EAClBC,QAAQ,EACRC,GAAG,EACHC,kBAAkB,QACf,0BAA0B;AAEjC,WAAaC,UAAU;EAAA,SAAAA,WAAA;IAAA,KACZC,IAAI,GAAyB,IAAIC,GAAG,CAAC,CAAC;EAAA;EAAA,IAAAC,MAAA,GAAAH,UAAA,CAAAI,SAAA;EAE7C;AACJ;AACA;AACA;AACA;EAJID,MAAA,CAKAE,UAAU,GAAV,SAAAA,WAAWC,OAAgB,EAAW;IAClC,IAAMC,SAAS,GAAGD,OAAO,CAACE,QAAQ,CAAC,CAAC;IACpC,OAAOZ,kBAAkB,CACrB,IAAI,CAACK,IAAI,EACTM,SAAS,EACT,MAAMD,OACV,CAAC;EACL,CAAC;EAAA,OAAAN,UAAA;AAAA;AAGL,OAAO,SAASS,gBAAgBA,CAAA,EAAG;EAC/B,OAAO,IAAIT,UAAU,CAAC,CAAC;AAC3B;AAGA,OAAO,SAASU,cAAcA,CAACC,UAAsB,EAAEL,OAAgB,EAAE;EACrEA,OAAO,CAACM,QAAQ,GAAG,IAAI;EACvB,IAAML,SAAS,GAAGD,OAAO,CAACE,QAAQ,CAAC,CAAC;EACpCG,UAAU,CAACV,IAAI,CAACY,MAAM,CAACN,SAAS,CAAC;AAErC;AAGA,OAAO,SAASO,uBAAuBA,CAACR,OAAgB,EAAU;EAC9D,OAAOA,OAAO,CAACS,SAAS,CAACC,SAAS,CAACC,MAAM;AAC7C;AAGA,OAAO,IAAMC,uBAAuB,GAAG,GAAG;AAC1C,OAAO,IAAMC,2BAA2B,GAAG,EAAE,GAAG,IAAI;;AAEpD;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,IAAMC,kCAGgB,GAAGA,CAC5BC,YAAY,EACZC,kBAAkB,KACjB,CACDC,WAAyB,EACzBZ,UAAsB,KACrB;EACO,IAAIA,UAAU,CAACV,IAAI,CAACuB,IAAI,GAAGH,YAAY,EAAE;IACrC;EACJ;EAEA,IAAMI,qBAAqB,GAAG3B,GAAG,CAAC,CAAC,GAAGwB,kBAAkB;EACxD,IAAMI,YAAuB,GAAG,EAAE;EAElC,IAAMC,cAAc,GAAGC,KAAK,CAACC,IAAI,CAAClB,UAAU,CAACV,IAAI,CAAC6B,MAAM,CAAC,CAAC,CAAC;EAC3D,KAAK,IAAMxB,OAAO,IAAIqB,cAAc,EAAE;IAClC;IACA,IAAIb,uBAAuB,CAACR,OAAO,CAAC,GAAG,CAAC,EAAE;MACtC;IACJ;IACA;IACA,IAAIA,OAAO,CAACyB,gBAAgB,KAAK,CAAC,IAAIzB,OAAO,CAAC0B,aAAa,GAAGP,qBAAqB,EAAE;MACjFf,cAAc,CAACC,UAAU,EAAEL,OAAO,CAAC;MACnC;IACJ;IACAoB,YAAY,CAACO,IAAI,CAAC3B,OAAO,CAAC;EAC9B;EAEA,IAAM4B,WAAW,GAAGR,YAAY,CAACT,MAAM,GAAGI,YAAY;EACtD,IAAIa,WAAW,IAAI,CAAC,EAAE;IAClB;EACJ;EAEA,IAAMC,iBAAiB,GAAGT,YAAY,CAACU,IAAI,CAAC,CAACC,CAAC,EAAEC,CAAC,KAAKD,CAAC,CAACN,gBAAgB,GAAGO,CAAC,CAACP,gBAAgB,CAAC;EAC9F,IAAMQ,QAAQ,GAAGJ,iBAAiB,CAACK,KAAK,CAAC,CAAC,EAAEN,WAAW,CAAC;EACxDK,QAAQ,CAACE,OAAO,CAACnC,OAAO,IAAII,cAAc,CAACC,UAAU,EAAEL,OAAO,CAAC,CAAC;AACpE,CAAC;AAGT,OAAO,IAAMoC,6BAAuD,GAAGtB,kCAAkC,CACrGF,uBAAuB,EACvBC,2BACJ,CAAC;AAED,OAAO,IAAMwB,gCAAuD,GAAG,IAAIC,OAAO,CAAC,CAAC;;AAEpF;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,SAASC,uBAAuBA,CACnCC,YAA0B,EAC5B;EACE,IAAIH,gCAAgC,CAACI,GAAG,CAACD,YAAY,CAAC,EAAE;IACpD;IACA;EACJ;EAEAH,gCAAgC,CAACK,GAAG,CAACF,YAAY,CAAC;;EAElD;AACJ;AACA;EACIjD,QAAQ,CAAC,CAAC,CAAC;EAAA,CACNoD,IAAI,CAAC,MAAMlD,kBAAkB,CAAC,GAAG,CAAC,CAAC,CAAC;EAAA,CACpCkD,IAAI,CAAC,MAAM;IACR,IAAI,CAACH,YAAY,CAACI,SAAS,EAAE;MACzBJ,YAAY,CAACK,sBAAsB,CAACL,YAAY,EAAEA,YAAY,CAACM,WAAW,CAAC;IAC/E;IACAT,gCAAgC,CAAC9B,MAAM,CAACiC,YAAY,CAAC;EACzD,CAAC,CAAC;AACV","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/query-planner.js b/dist/esm/query-planner.js deleted file mode 100644 index ba6b4ab203c..00000000000 --- a/dist/esm/query-planner.js +++ /dev/null @@ -1,296 +0,0 @@ -import { countUntilNotMatching } from "./plugins/utils/index.js"; -import { newRxError } from "./rx-error.js"; -import { getSchemaByObjectPath } from "./rx-schema-helper.js"; -export var INDEX_MAX = String.fromCharCode(65535); - -/** - * Do not use -Infinity here because it would be - * transformed to null on JSON.stringify() which can break things - * when the query plan is send to the storage as json. - * @link https://stackoverflow.com/a/16644751 - * Notice that for IndexedDB IDBKeyRange we have - * to transform the value back to -Infinity - * before we can use it in IDBKeyRange.bound. - */ -export var INDEX_MIN = Number.MIN_SAFE_INTEGER; - -/** - * Returns the query plan which contains - * information about how to run the query - * and which indexes to use. - * - * This is used in some storage like Memory, dexie.js and IndexedDB. - */ -export function getQueryPlan(schema, query) { - var selector = query.selector; - var indexes = schema.indexes ? schema.indexes.slice(0) : []; - if (query.index) { - indexes = [query.index]; - } - - /** - * Most storages do not support descending indexes - * so having a 'desc' in the sorting, means we always have to re-sort the results. - */ - var hasDescSorting = !!query.sort.find(sortField => Object.values(sortField)[0] === 'desc'); - - /** - * Some fields can be part of the selector while not being relevant for sorting - * because their selector operators specify that in all cases all matching docs - * would have the same value. - * For example the boolean field _deleted. - * TODO similar thing could be done for enums. - */ - var sortIrrelevevantFields = new Set(); - Object.keys(selector).forEach(fieldName => { - var schemaPart = getSchemaByObjectPath(schema, fieldName); - if (schemaPart && schemaPart.type === 'boolean' && Object.prototype.hasOwnProperty.call(selector[fieldName], '$eq')) { - sortIrrelevevantFields.add(fieldName); - } - }); - var optimalSortIndex = query.sort.map(sortField => Object.keys(sortField)[0]); - var optimalSortIndexCompareString = optimalSortIndex.filter(f => !sortIrrelevevantFields.has(f)).join(','); - var currentBestQuality = -1; - var currentBestQueryPlan; - - /** - * Calculate one query plan for each index - * and then test which of the plans is best. - */ - indexes.forEach(index => { - var inclusiveEnd = true; - var inclusiveStart = true; - var opts = index.map(indexField => { - var matcher = selector[indexField]; - var operators = matcher ? Object.keys(matcher) : []; - var matcherOpts = {}; - if (!matcher || !operators.length) { - var startKey = inclusiveStart ? INDEX_MIN : INDEX_MAX; - matcherOpts = { - startKey, - endKey: inclusiveEnd ? INDEX_MAX : INDEX_MIN, - inclusiveStart: true, - inclusiveEnd: true - }; - } else { - operators.forEach(operator => { - if (LOGICAL_OPERATORS.has(operator)) { - var operatorValue = matcher[operator]; - var partialOpts = getMatcherQueryOpts(operator, operatorValue); - matcherOpts = Object.assign(matcherOpts, partialOpts); - } - }); - } - - // fill missing attributes - if (typeof matcherOpts.startKey === 'undefined') { - matcherOpts.startKey = INDEX_MIN; - } - if (typeof matcherOpts.endKey === 'undefined') { - matcherOpts.endKey = INDEX_MAX; - } - if (typeof matcherOpts.inclusiveStart === 'undefined') { - matcherOpts.inclusiveStart = true; - } - if (typeof matcherOpts.inclusiveEnd === 'undefined') { - matcherOpts.inclusiveEnd = true; - } - if (inclusiveStart && !matcherOpts.inclusiveStart) { - inclusiveStart = false; - } - if (inclusiveEnd && !matcherOpts.inclusiveEnd) { - inclusiveEnd = false; - } - return matcherOpts; - }); - var startKeys = opts.map(opt => opt.startKey); - var endKeys = opts.map(opt => opt.endKey); - var queryPlan = { - index, - startKeys, - endKeys, - inclusiveEnd, - inclusiveStart, - sortSatisfiedByIndex: !hasDescSorting && optimalSortIndexCompareString === index.filter(f => !sortIrrelevevantFields.has(f)).join(','), - selectorSatisfiedByIndex: isSelectorSatisfiedByIndex(index, query.selector, startKeys, endKeys) - }; - var quality = rateQueryPlan(schema, query, queryPlan); - if (quality >= currentBestQuality || query.index) { - currentBestQuality = quality; - currentBestQueryPlan = queryPlan; - } - }); - - /** - * In all cases and index must be found - */ - if (!currentBestQueryPlan) { - throw newRxError('SNH', { - query - }); - } - return currentBestQueryPlan; -} -export var LOGICAL_OPERATORS = new Set(['$eq', '$gt', '$gte', '$lt', '$lte']); -export var LOWER_BOUND_LOGICAL_OPERATORS = new Set(['$eq', '$gt', '$gte']); -export var UPPER_BOUND_LOGICAL_OPERATORS = new Set(['$eq', '$lt', '$lte']); -export function isSelectorSatisfiedByIndex(index, selector, startKeys, endKeys) { - /** - * Not satisfied if one or more operators are non-logical - * operators that can never be satisfied by an index. - */ - var selectorEntries = Object.entries(selector); - var hasNonMatchingOperator = selectorEntries.find(([fieldName, operation]) => { - if (!index.includes(fieldName)) { - return true; - } - var hasNonLogicOperator = Object.entries(operation).find(([op, _value]) => !LOGICAL_OPERATORS.has(op)); - return hasNonLogicOperator; - }); - if (hasNonMatchingOperator) { - return false; - } - - /** - * Not satisfied if contains $and or $or operations. - */ - if (selector.$and || selector.$or) { - return false; - } - - // ensure all lower bound in index - var satisfieldLowerBound = []; - var lowerOperatorFieldNames = new Set(); - for (var [fieldName, operation] of Object.entries(selector)) { - if (!index.includes(fieldName)) { - return false; - } - - // If more then one logic op on the same field, we have to selector-match. - var lowerLogicOps = Object.keys(operation).filter(key => LOWER_BOUND_LOGICAL_OPERATORS.has(key)); - if (lowerLogicOps.length > 1) { - return false; - } - var hasLowerLogicOp = lowerLogicOps[0]; - if (hasLowerLogicOp) { - lowerOperatorFieldNames.add(fieldName); - } - if (hasLowerLogicOp !== '$eq') { - if (satisfieldLowerBound.length > 0) { - return false; - } else { - satisfieldLowerBound.push(hasLowerLogicOp); - } - } - } - - // ensure all upper bound in index - var satisfieldUpperBound = []; - var upperOperatorFieldNames = new Set(); - for (var [_fieldName, _operation] of Object.entries(selector)) { - if (!index.includes(_fieldName)) { - return false; - } - - // If more then one logic op on the same field, we have to selector-match. - var upperLogicOps = Object.keys(_operation).filter(key => UPPER_BOUND_LOGICAL_OPERATORS.has(key)); - if (upperLogicOps.length > 1) { - return false; - } - var hasUperLogicOp = upperLogicOps[0]; - if (hasUperLogicOp) { - upperOperatorFieldNames.add(_fieldName); - } - if (hasUperLogicOp !== '$eq') { - if (satisfieldUpperBound.length > 0) { - return false; - } else { - satisfieldUpperBound.push(hasUperLogicOp); - } - } - } - - /** - * If the index contains a non-relevant field between - * the relevant fields, then the index is not satisfying. - */ - var i = 0; - for (var _fieldName2 of index) { - for (var set of [lowerOperatorFieldNames, upperOperatorFieldNames]) { - if (!set.has(_fieldName2) && set.size > 0) { - return false; - } - set.delete(_fieldName2); - } - var startKey = startKeys[i]; - var endKey = endKeys[i]; - if (startKey !== endKey && lowerOperatorFieldNames.size > 0 && upperOperatorFieldNames.size > 0) { - return false; - } - i++; - } - return true; -} -export function getMatcherQueryOpts(operator, operatorValue) { - switch (operator) { - case '$eq': - return { - startKey: operatorValue, - endKey: operatorValue, - inclusiveEnd: true, - inclusiveStart: true - }; - case '$lte': - return { - endKey: operatorValue, - inclusiveEnd: true - }; - case '$gte': - return { - startKey: operatorValue, - inclusiveStart: true - }; - case '$lt': - return { - endKey: operatorValue, - inclusiveEnd: false - }; - case '$gt': - return { - startKey: operatorValue, - inclusiveStart: false - }; - default: - throw new Error('SNH'); - } -} - -/** - * Returns a number that determines the quality of the query plan. - * Higher number means better query plan. - */ -export function rateQueryPlan(schema, query, queryPlan) { - var quality = 0; - var addQuality = value => { - if (value > 0) { - quality = quality + value; - } - }; - var pointsPerMatchingKey = 10; - var nonMinKeyCount = countUntilNotMatching(queryPlan.startKeys, keyValue => keyValue !== INDEX_MIN && keyValue !== INDEX_MAX); - addQuality(nonMinKeyCount * pointsPerMatchingKey); - var nonMaxKeyCount = countUntilNotMatching(queryPlan.startKeys, keyValue => keyValue !== INDEX_MAX && keyValue !== INDEX_MIN); - addQuality(nonMaxKeyCount * pointsPerMatchingKey); - var equalKeyCount = countUntilNotMatching(queryPlan.startKeys, (keyValue, idx) => { - if (keyValue === queryPlan.endKeys[idx]) { - return true; - } else { - return false; - } - }); - addQuality(equalKeyCount * pointsPerMatchingKey * 1.5); - var pointsIfNoReSortMustBeDone = queryPlan.sortSatisfiedByIndex ? 5 : 0; - addQuality(pointsIfNoReSortMustBeDone); - return quality; -} -//# sourceMappingURL=query-planner.js.map \ No newline at end of file diff --git a/dist/esm/query-planner.js.map b/dist/esm/query-planner.js.map deleted file mode 100644 index ee1b313a5d4..00000000000 --- a/dist/esm/query-planner.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"query-planner.js","names":["countUntilNotMatching","newRxError","getSchemaByObjectPath","INDEX_MAX","String","fromCharCode","INDEX_MIN","Number","MIN_SAFE_INTEGER","getQueryPlan","schema","query","selector","indexes","slice","index","hasDescSorting","sort","find","sortField","Object","values","sortIrrelevevantFields","Set","keys","forEach","fieldName","schemaPart","type","prototype","hasOwnProperty","call","add","optimalSortIndex","map","optimalSortIndexCompareString","filter","f","has","join","currentBestQuality","currentBestQueryPlan","inclusiveEnd","inclusiveStart","opts","indexField","matcher","operators","matcherOpts","length","startKey","endKey","operator","LOGICAL_OPERATORS","operatorValue","partialOpts","getMatcherQueryOpts","assign","startKeys","opt","endKeys","queryPlan","sortSatisfiedByIndex","selectorSatisfiedByIndex","isSelectorSatisfiedByIndex","quality","rateQueryPlan","LOWER_BOUND_LOGICAL_OPERATORS","UPPER_BOUND_LOGICAL_OPERATORS","selectorEntries","entries","hasNonMatchingOperator","operation","includes","hasNonLogicOperator","op","_value","$and","$or","satisfieldLowerBound","lowerOperatorFieldNames","lowerLogicOps","key","hasLowerLogicOp","push","satisfieldUpperBound","upperOperatorFieldNames","upperLogicOps","hasUperLogicOp","i","set","size","delete","Error","addQuality","value","pointsPerMatchingKey","nonMinKeyCount","keyValue","nonMaxKeyCount","equalKeyCount","idx","pointsIfNoReSortMustBeDone"],"sources":["../../src/query-planner.ts"],"sourcesContent":["import { countUntilNotMatching } from './plugins/utils/index.ts';\nimport { newRxError } from './rx-error.ts';\nimport { getSchemaByObjectPath } from './rx-schema-helper.ts';\nimport type {\n FilledMangoQuery,\n MangoQuerySelector,\n RxDocumentData,\n RxJsonSchema,\n RxQueryPlan,\n RxQueryPlanKey,\n RxQueryPlanerOpts\n} from './types/index.d.ts';\n\nexport const INDEX_MAX = String.fromCharCode(65535);\n\n/**\n * Do not use -Infinity here because it would be\n * transformed to null on JSON.stringify() which can break things\n * when the query plan is send to the storage as json.\n * @link https://stackoverflow.com/a/16644751\n * Notice that for IndexedDB IDBKeyRange we have\n * to transform the value back to -Infinity\n * before we can use it in IDBKeyRange.bound.\n */\nexport const INDEX_MIN = Number.MIN_SAFE_INTEGER;\n\n/**\n * Returns the query plan which contains\n * information about how to run the query\n * and which indexes to use.\n *\n * This is used in some storage like Memory, dexie.js and IndexedDB.\n */\nexport function getQueryPlan(\n schema: RxJsonSchema>,\n query: FilledMangoQuery\n): RxQueryPlan {\n const selector = query.selector;\n\n let indexes: string[][] = schema.indexes ? schema.indexes.slice(0) as any : [];\n if (query.index) {\n indexes = [query.index];\n }\n\n /**\n * Most storages do not support descending indexes\n * so having a 'desc' in the sorting, means we always have to re-sort the results.\n */\n const hasDescSorting = !!query.sort.find(sortField => Object.values(sortField)[0] === 'desc');\n\n /**\n * Some fields can be part of the selector while not being relevant for sorting\n * because their selector operators specify that in all cases all matching docs\n * would have the same value.\n * For example the boolean field _deleted.\n * TODO similar thing could be done for enums.\n */\n const sortIrrelevevantFields = new Set();\n Object.keys(selector).forEach(fieldName => {\n const schemaPart = getSchemaByObjectPath(schema, fieldName);\n if (\n schemaPart &&\n schemaPart.type === 'boolean' &&\n Object.prototype.hasOwnProperty.call((selector as any)[fieldName], '$eq')\n ) {\n sortIrrelevevantFields.add(fieldName);\n }\n });\n\n\n const optimalSortIndex = query.sort.map(sortField => Object.keys(sortField)[0]);\n const optimalSortIndexCompareString = optimalSortIndex\n .filter(f => !sortIrrelevevantFields.has(f))\n .join(',');\n\n let currentBestQuality = -1;\n let currentBestQueryPlan: RxQueryPlan | undefined;\n\n /**\n * Calculate one query plan for each index\n * and then test which of the plans is best.\n */\n indexes.forEach((index) => {\n let inclusiveEnd = true;\n let inclusiveStart = true;\n const opts: RxQueryPlanerOpts[] = index.map(indexField => {\n const matcher = (selector as any)[indexField];\n const operators = matcher ? Object.keys(matcher) : [];\n\n let matcherOpts: RxQueryPlanerOpts = {} as any;\n if (\n !matcher ||\n !operators.length\n ) {\n const startKey = inclusiveStart ? INDEX_MIN : INDEX_MAX;\n matcherOpts = {\n startKey,\n endKey: inclusiveEnd ? INDEX_MAX : INDEX_MIN,\n inclusiveStart: true,\n inclusiveEnd: true\n };\n } else {\n operators.forEach(operator => {\n if (LOGICAL_OPERATORS.has(operator)) {\n const operatorValue = matcher[operator];\n const partialOpts = getMatcherQueryOpts(operator, operatorValue);\n matcherOpts = Object.assign(matcherOpts, partialOpts);\n }\n });\n }\n\n // fill missing attributes\n if (typeof matcherOpts.startKey === 'undefined') {\n matcherOpts.startKey = INDEX_MIN;\n }\n if (typeof matcherOpts.endKey === 'undefined') {\n matcherOpts.endKey = INDEX_MAX;\n }\n if (typeof matcherOpts.inclusiveStart === 'undefined') {\n matcherOpts.inclusiveStart = true;\n }\n if (typeof matcherOpts.inclusiveEnd === 'undefined') {\n matcherOpts.inclusiveEnd = true;\n }\n\n if (inclusiveStart && !matcherOpts.inclusiveStart) {\n inclusiveStart = false;\n }\n if (inclusiveEnd && !matcherOpts.inclusiveEnd) {\n inclusiveEnd = false;\n }\n\n return matcherOpts;\n });\n\n\n const startKeys = opts.map(opt => opt.startKey);\n const endKeys = opts.map(opt => opt.endKey);\n const queryPlan: RxQueryPlan = {\n index,\n startKeys,\n endKeys,\n inclusiveEnd,\n inclusiveStart,\n sortSatisfiedByIndex: !hasDescSorting && optimalSortIndexCompareString === index.filter(f => !sortIrrelevevantFields.has(f)).join(','),\n selectorSatisfiedByIndex: isSelectorSatisfiedByIndex(index, query.selector, startKeys, endKeys)\n };\n const quality = rateQueryPlan(\n schema,\n query,\n queryPlan\n );\n if (\n (\n quality >= currentBestQuality\n ) ||\n query.index\n ) {\n currentBestQuality = quality;\n currentBestQueryPlan = queryPlan;\n }\n });\n\n /**\n * In all cases and index must be found\n */\n if (!currentBestQueryPlan) {\n throw newRxError('SNH', {\n query\n });\n }\n\n return currentBestQueryPlan;\n}\n\nexport const LOGICAL_OPERATORS = new Set(['$eq', '$gt', '$gte', '$lt', '$lte']);\nexport const LOWER_BOUND_LOGICAL_OPERATORS = new Set(['$eq', '$gt', '$gte']);\nexport const UPPER_BOUND_LOGICAL_OPERATORS = new Set(['$eq', '$lt', '$lte']);\n\n\nexport function isSelectorSatisfiedByIndex(\n index: string[],\n selector: MangoQuerySelector,\n startKeys: RxQueryPlanKey[],\n endKeys: RxQueryPlanKey[]\n): boolean {\n\n\n /**\n * Not satisfied if one or more operators are non-logical\n * operators that can never be satisfied by an index.\n */\n const selectorEntries = Object.entries(selector);\n const hasNonMatchingOperator = selectorEntries\n .find(([fieldName, operation]) => {\n if (!index.includes(fieldName)) {\n return true;\n }\n const hasNonLogicOperator = Object.entries(operation as any)\n .find(([op, _value]) => !LOGICAL_OPERATORS.has(op));\n return hasNonLogicOperator;\n });\n\n if (hasNonMatchingOperator) {\n return false;\n }\n\n /**\n * Not satisfied if contains $and or $or operations.\n */\n if (selector.$and || selector.$or) {\n return false;\n }\n\n\n\n // ensure all lower bound in index\n const satisfieldLowerBound: string[] = [];\n const lowerOperatorFieldNames = new Set();\n for (const [fieldName, operation] of Object.entries(selector)) {\n if (!index.includes(fieldName)) {\n return false;\n }\n\n // If more then one logic op on the same field, we have to selector-match.\n const lowerLogicOps = Object.keys(operation as any).filter(key => LOWER_BOUND_LOGICAL_OPERATORS.has(key));\n if (lowerLogicOps.length > 1) {\n return false;\n }\n\n const hasLowerLogicOp = lowerLogicOps[0];\n if (hasLowerLogicOp) {\n lowerOperatorFieldNames.add(fieldName);\n }\n if (hasLowerLogicOp !== '$eq') {\n if (satisfieldLowerBound.length > 0) {\n return false;\n } else {\n satisfieldLowerBound.push(hasLowerLogicOp);\n }\n }\n }\n\n // ensure all upper bound in index\n const satisfieldUpperBound: string[] = [];\n const upperOperatorFieldNames = new Set();\n for (const [fieldName, operation] of Object.entries(selector)) {\n if (!index.includes(fieldName)) {\n return false;\n }\n\n // If more then one logic op on the same field, we have to selector-match.\n const upperLogicOps = Object.keys(operation as any).filter(key => UPPER_BOUND_LOGICAL_OPERATORS.has(key));\n if (upperLogicOps.length > 1) {\n return false;\n }\n\n const hasUperLogicOp = upperLogicOps[0];\n if (hasUperLogicOp) {\n upperOperatorFieldNames.add(fieldName);\n }\n if (hasUperLogicOp !== '$eq') {\n if (satisfieldUpperBound.length > 0) {\n return false;\n } else {\n satisfieldUpperBound.push(hasUperLogicOp);\n }\n }\n }\n\n\n /**\n * If the index contains a non-relevant field between\n * the relevant fields, then the index is not satisfying.\n */\n let i = 0;\n for (const fieldName of index) {\n for (const set of [\n lowerOperatorFieldNames,\n upperOperatorFieldNames\n ]) {\n if (\n !set.has(fieldName) &&\n set.size > 0\n ) {\n return false;\n }\n set.delete(fieldName);\n }\n\n const startKey = startKeys[i];\n const endKey = endKeys[i];\n\n if (\n startKey !== endKey && (\n lowerOperatorFieldNames.size > 0 &&\n upperOperatorFieldNames.size > 0\n )\n ) {\n return false;\n }\n\n i++;\n }\n\n return true;\n}\n\nexport function getMatcherQueryOpts(\n operator: string,\n operatorValue: any\n): Partial {\n switch (operator) {\n case '$eq':\n return {\n startKey: operatorValue,\n endKey: operatorValue,\n inclusiveEnd: true,\n inclusiveStart: true\n };\n case '$lte':\n return {\n endKey: operatorValue,\n inclusiveEnd: true\n };\n case '$gte':\n return {\n startKey: operatorValue,\n inclusiveStart: true\n };\n case '$lt':\n return {\n endKey: operatorValue,\n inclusiveEnd: false\n };\n case '$gt':\n return {\n startKey: operatorValue,\n inclusiveStart: false\n };\n default:\n throw new Error('SNH');\n }\n}\n\n\n/**\n * Returns a number that determines the quality of the query plan.\n * Higher number means better query plan.\n */\nexport function rateQueryPlan(\n schema: RxJsonSchema>,\n query: FilledMangoQuery,\n queryPlan: RxQueryPlan\n): number {\n let quality: number = 0;\n const addQuality = (value: number) => {\n if (value > 0) {\n quality = quality + value;\n }\n };\n\n const pointsPerMatchingKey = 10;\n\n const nonMinKeyCount = countUntilNotMatching(queryPlan.startKeys, keyValue => keyValue !== INDEX_MIN && keyValue !== INDEX_MAX);\n addQuality(nonMinKeyCount * pointsPerMatchingKey);\n\n const nonMaxKeyCount = countUntilNotMatching(queryPlan.startKeys, keyValue => keyValue !== INDEX_MAX && keyValue !== INDEX_MIN);\n addQuality(nonMaxKeyCount * pointsPerMatchingKey);\n\n const equalKeyCount = countUntilNotMatching(queryPlan.startKeys, (keyValue, idx) => {\n if (keyValue === queryPlan.endKeys[idx]) {\n return true;\n } else {\n return false;\n }\n });\n addQuality(equalKeyCount * pointsPerMatchingKey * 1.5);\n\n const pointsIfNoReSortMustBeDone = queryPlan.sortSatisfiedByIndex ? 5 : 0;\n addQuality(pointsIfNoReSortMustBeDone);\n\n return quality;\n}\n"],"mappings":"AAAA,SAASA,qBAAqB,QAAQ,0BAA0B;AAChE,SAASC,UAAU,QAAQ,eAAe;AAC1C,SAASC,qBAAqB,QAAQ,uBAAuB;AAW7D,OAAO,IAAMC,SAAS,GAAGC,MAAM,CAACC,YAAY,CAAC,KAAK,CAAC;;AAEnD;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,IAAMC,SAAS,GAAGC,MAAM,CAACC,gBAAgB;;AAEhD;AACA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,SAASC,YAAYA,CACxBC,MAA+C,EAC/CC,KAAkC,EACvB;EACX,IAAMC,QAAQ,GAAGD,KAAK,CAACC,QAAQ;EAE/B,IAAIC,OAAmB,GAAGH,MAAM,CAACG,OAAO,GAAGH,MAAM,CAACG,OAAO,CAACC,KAAK,CAAC,CAAC,CAAC,GAAU,EAAE;EAC9E,IAAIH,KAAK,CAACI,KAAK,EAAE;IACbF,OAAO,GAAG,CAACF,KAAK,CAACI,KAAK,CAAC;EAC3B;;EAEA;AACJ;AACA;AACA;EACI,IAAMC,cAAc,GAAG,CAAC,CAACL,KAAK,CAACM,IAAI,CAACC,IAAI,CAACC,SAAS,IAAIC,MAAM,CAACC,MAAM,CAACF,SAAS,CAAC,CAAC,CAAC,CAAC,KAAK,MAAM,CAAC;;EAE7F;AACJ;AACA;AACA;AACA;AACA;AACA;EACI,IAAMG,sBAAsB,GAAG,IAAIC,GAAG,CAAC,CAAC;EACxCH,MAAM,CAACI,IAAI,CAACZ,QAAQ,CAAC,CAACa,OAAO,CAACC,SAAS,IAAI;IACvC,IAAMC,UAAU,GAAGzB,qBAAqB,CAACQ,MAAM,EAAEgB,SAAS,CAAC;IAC3D,IACIC,UAAU,IACVA,UAAU,CAACC,IAAI,KAAK,SAAS,IAC7BR,MAAM,CAACS,SAAS,CAACC,cAAc,CAACC,IAAI,CAAEnB,QAAQ,CAASc,SAAS,CAAC,EAAE,KAAK,CAAC,EAC3E;MACEJ,sBAAsB,CAACU,GAAG,CAACN,SAAS,CAAC;IACzC;EACJ,CAAC,CAAC;EAGF,IAAMO,gBAAgB,GAAGtB,KAAK,CAACM,IAAI,CAACiB,GAAG,CAACf,SAAS,IAAIC,MAAM,CAACI,IAAI,CAACL,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC;EAC/E,IAAMgB,6BAA6B,GAAGF,gBAAgB,CACjDG,MAAM,CAACC,CAAC,IAAI,CAACf,sBAAsB,CAACgB,GAAG,CAACD,CAAC,CAAC,CAAC,CAC3CE,IAAI,CAAC,GAAG,CAAC;EAEd,IAAIC,kBAAkB,GAAG,CAAC,CAAC;EAC3B,IAAIC,oBAA6C;;EAEjD;AACJ;AACA;AACA;EACI5B,OAAO,CAACY,OAAO,CAAEV,KAAK,IAAK;IACvB,IAAI2B,YAAY,GAAG,IAAI;IACvB,IAAIC,cAAc,GAAG,IAAI;IACzB,IAAMC,IAAyB,GAAG7B,KAAK,CAACmB,GAAG,CAACW,UAAU,IAAI;MACtD,IAAMC,OAAO,GAAIlC,QAAQ,CAASiC,UAAU,CAAC;MAC7C,IAAME,SAAS,GAAGD,OAAO,GAAG1B,MAAM,CAACI,IAAI,CAACsB,OAAO,CAAC,GAAG,EAAE;MAErD,IAAIE,WAA8B,GAAG,CAAC,CAAQ;MAC9C,IACI,CAACF,OAAO,IACR,CAACC,SAAS,CAACE,MAAM,EACnB;QACE,IAAMC,QAAQ,GAAGP,cAAc,GAAGrC,SAAS,GAAGH,SAAS;QACvD6C,WAAW,GAAG;UACVE,QAAQ;UACRC,MAAM,EAAET,YAAY,GAAGvC,SAAS,GAAGG,SAAS;UAC5CqC,cAAc,EAAE,IAAI;UACpBD,YAAY,EAAE;QAClB,CAAC;MACL,CAAC,MAAM;QACHK,SAAS,CAACtB,OAAO,CAAC2B,QAAQ,IAAI;UAC1B,IAAIC,iBAAiB,CAACf,GAAG,CAACc,QAAQ,CAAC,EAAE;YACjC,IAAME,aAAa,GAAGR,OAAO,CAACM,QAAQ,CAAC;YACvC,IAAMG,WAAW,GAAGC,mBAAmB,CAACJ,QAAQ,EAAEE,aAAa,CAAC;YAChEN,WAAW,GAAG5B,MAAM,CAACqC,MAAM,CAACT,WAAW,EAAEO,WAAW,CAAC;UACzD;QACJ,CAAC,CAAC;MACN;;MAEA;MACA,IAAI,OAAOP,WAAW,CAACE,QAAQ,KAAK,WAAW,EAAE;QAC7CF,WAAW,CAACE,QAAQ,GAAG5C,SAAS;MACpC;MACA,IAAI,OAAO0C,WAAW,CAACG,MAAM,KAAK,WAAW,EAAE;QAC3CH,WAAW,CAACG,MAAM,GAAGhD,SAAS;MAClC;MACA,IAAI,OAAO6C,WAAW,CAACL,cAAc,KAAK,WAAW,EAAE;QACnDK,WAAW,CAACL,cAAc,GAAG,IAAI;MACrC;MACA,IAAI,OAAOK,WAAW,CAACN,YAAY,KAAK,WAAW,EAAE;QACjDM,WAAW,CAACN,YAAY,GAAG,IAAI;MACnC;MAEA,IAAIC,cAAc,IAAI,CAACK,WAAW,CAACL,cAAc,EAAE;QAC/CA,cAAc,GAAG,KAAK;MAC1B;MACA,IAAID,YAAY,IAAI,CAACM,WAAW,CAACN,YAAY,EAAE;QAC3CA,YAAY,GAAG,KAAK;MACxB;MAEA,OAAOM,WAAW;IACtB,CAAC,CAAC;IAGF,IAAMU,SAAS,GAAGd,IAAI,CAACV,GAAG,CAACyB,GAAG,IAAIA,GAAG,CAACT,QAAQ,CAAC;IAC/C,IAAMU,OAAO,GAAGhB,IAAI,CAACV,GAAG,CAACyB,GAAG,IAAIA,GAAG,CAACR,MAAM,CAAC;IAC3C,IAAMU,SAAsB,GAAG;MAC3B9C,KAAK;MACL2C,SAAS;MACTE,OAAO;MACPlB,YAAY;MACZC,cAAc;MACdmB,oBAAoB,EAAE,CAAC9C,cAAc,IAAImB,6BAA6B,KAAKpB,KAAK,CAACqB,MAAM,CAACC,CAAC,IAAI,CAACf,sBAAsB,CAACgB,GAAG,CAACD,CAAC,CAAC,CAAC,CAACE,IAAI,CAAC,GAAG,CAAC;MACtIwB,wBAAwB,EAAEC,0BAA0B,CAACjD,KAAK,EAAEJ,KAAK,CAACC,QAAQ,EAAE8C,SAAS,EAAEE,OAAO;IAClG,CAAC;IACD,IAAMK,OAAO,GAAGC,aAAa,CACzBxD,MAAM,EACNC,KAAK,EACLkD,SACJ,CAAC;IACD,IAEQI,OAAO,IAAIzB,kBAAkB,IAEjC7B,KAAK,CAACI,KAAK,EACb;MACEyB,kBAAkB,GAAGyB,OAAO;MAC5BxB,oBAAoB,GAAGoB,SAAS;IACpC;EACJ,CAAC,CAAC;;EAEF;AACJ;AACA;EACI,IAAI,CAACpB,oBAAoB,EAAE;IACvB,MAAMxC,UAAU,CAAC,KAAK,EAAE;MACpBU;IACJ,CAAC,CAAC;EACN;EAEA,OAAO8B,oBAAoB;AAC/B;AAEA,OAAO,IAAMY,iBAAiB,GAAG,IAAI9B,GAAG,CAAC,CAAC,KAAK,EAAE,KAAK,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;AAC/E,OAAO,IAAM4C,6BAA6B,GAAG,IAAI5C,GAAG,CAAC,CAAC,KAAK,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;AAC5E,OAAO,IAAM6C,6BAA6B,GAAG,IAAI7C,GAAG,CAAC,CAAC,KAAK,EAAE,KAAK,EAAE,MAAM,CAAC,CAAC;AAG5E,OAAO,SAASyC,0BAA0BA,CACtCjD,KAAe,EACfH,QAAiC,EACjC8C,SAA2B,EAC3BE,OAAyB,EAClB;EAGP;AACJ;AACA;AACA;EACI,IAAMS,eAAe,GAAGjD,MAAM,CAACkD,OAAO,CAAC1D,QAAQ,CAAC;EAChD,IAAM2D,sBAAsB,GAAGF,eAAe,CACzCnD,IAAI,CAAC,CAAC,CAACQ,SAAS,EAAE8C,SAAS,CAAC,KAAK;IAC9B,IAAI,CAACzD,KAAK,CAAC0D,QAAQ,CAAC/C,SAAS,CAAC,EAAE;MAC5B,OAAO,IAAI;IACf;IACA,IAAMgD,mBAAmB,GAAGtD,MAAM,CAACkD,OAAO,CAACE,SAAgB,CAAC,CACvDtD,IAAI,CAAC,CAAC,CAACyD,EAAE,EAAEC,MAAM,CAAC,KAAK,CAACvB,iBAAiB,CAACf,GAAG,CAACqC,EAAE,CAAC,CAAC;IACvD,OAAOD,mBAAmB;EAC9B,CAAC,CAAC;EAEN,IAAIH,sBAAsB,EAAE;IACxB,OAAO,KAAK;EAChB;;EAEA;AACJ;AACA;EACI,IAAI3D,QAAQ,CAACiE,IAAI,IAAIjE,QAAQ,CAACkE,GAAG,EAAE;IAC/B,OAAO,KAAK;EAChB;;EAIA;EACA,IAAMC,oBAA8B,GAAG,EAAE;EACzC,IAAMC,uBAAuB,GAAG,IAAIzD,GAAG,CAAS,CAAC;EACjD,KAAK,IAAM,CAACG,SAAS,EAAE8C,SAAS,CAAC,IAAIpD,MAAM,CAACkD,OAAO,CAAC1D,QAAQ,CAAC,EAAE;IAC3D,IAAI,CAACG,KAAK,CAAC0D,QAAQ,CAAC/C,SAAS,CAAC,EAAE;MAC5B,OAAO,KAAK;IAChB;;IAEA;IACA,IAAMuD,aAAa,GAAG7D,MAAM,CAACI,IAAI,CAACgD,SAAgB,CAAC,CAACpC,MAAM,CAAC8C,GAAG,IAAIf,6BAA6B,CAAC7B,GAAG,CAAC4C,GAAG,CAAC,CAAC;IACzG,IAAID,aAAa,CAAChC,MAAM,GAAG,CAAC,EAAE;MAC1B,OAAO,KAAK;IAChB;IAEA,IAAMkC,eAAe,GAAGF,aAAa,CAAC,CAAC,CAAC;IACxC,IAAIE,eAAe,EAAE;MACjBH,uBAAuB,CAAChD,GAAG,CAACN,SAAS,CAAC;IAC1C;IACA,IAAIyD,eAAe,KAAK,KAAK,EAAE;MAC3B,IAAIJ,oBAAoB,CAAC9B,MAAM,GAAG,CAAC,EAAE;QACjC,OAAO,KAAK;MAChB,CAAC,MAAM;QACH8B,oBAAoB,CAACK,IAAI,CAACD,eAAe,CAAC;MAC9C;IACJ;EACJ;;EAEA;EACA,IAAME,oBAA8B,GAAG,EAAE;EACzC,IAAMC,uBAAuB,GAAG,IAAI/D,GAAG,CAAS,CAAC;EACjD,KAAK,IAAM,CAACG,UAAS,EAAE8C,UAAS,CAAC,IAAIpD,MAAM,CAACkD,OAAO,CAAC1D,QAAQ,CAAC,EAAE;IAC3D,IAAI,CAACG,KAAK,CAAC0D,QAAQ,CAAC/C,UAAS,CAAC,EAAE;MAC5B,OAAO,KAAK;IAChB;;IAEA;IACA,IAAM6D,aAAa,GAAGnE,MAAM,CAACI,IAAI,CAACgD,UAAgB,CAAC,CAACpC,MAAM,CAAC8C,GAAG,IAAId,6BAA6B,CAAC9B,GAAG,CAAC4C,GAAG,CAAC,CAAC;IACzG,IAAIK,aAAa,CAACtC,MAAM,GAAG,CAAC,EAAE;MAC1B,OAAO,KAAK;IAChB;IAEA,IAAMuC,cAAc,GAAGD,aAAa,CAAC,CAAC,CAAC;IACvC,IAAIC,cAAc,EAAE;MAChBF,uBAAuB,CAACtD,GAAG,CAACN,UAAS,CAAC;IAC1C;IACA,IAAI8D,cAAc,KAAK,KAAK,EAAE;MAC1B,IAAIH,oBAAoB,CAACpC,MAAM,GAAG,CAAC,EAAE;QACjC,OAAO,KAAK;MAChB,CAAC,MAAM;QACHoC,oBAAoB,CAACD,IAAI,CAACI,cAAc,CAAC;MAC7C;IACJ;EACJ;;EAGA;AACJ;AACA;AACA;EACI,IAAIC,CAAC,GAAG,CAAC;EACT,KAAK,IAAM/D,WAAS,IAAIX,KAAK,EAAE;IAC3B,KAAK,IAAM2E,GAAG,IAAI,CACdV,uBAAuB,EACvBM,uBAAuB,CAC1B,EAAE;MACC,IACI,CAACI,GAAG,CAACpD,GAAG,CAACZ,WAAS,CAAC,IACnBgE,GAAG,CAACC,IAAI,GAAG,CAAC,EACd;QACE,OAAO,KAAK;MAChB;MACAD,GAAG,CAACE,MAAM,CAAClE,WAAS,CAAC;IACzB;IAEA,IAAMwB,QAAQ,GAAGQ,SAAS,CAAC+B,CAAC,CAAC;IAC7B,IAAMtC,MAAM,GAAGS,OAAO,CAAC6B,CAAC,CAAC;IAEzB,IACIvC,QAAQ,KAAKC,MAAM,IACf6B,uBAAuB,CAACW,IAAI,GAAG,CAAC,IAChCL,uBAAuB,CAACK,IAAI,GAAG,CAClC,EACH;MACE,OAAO,KAAK;IAChB;IAEAF,CAAC,EAAE;EACP;EAEA,OAAO,IAAI;AACf;AAEA,OAAO,SAASjC,mBAAmBA,CAC/BJ,QAAgB,EAChBE,aAAkB,EACQ;EAC1B,QAAQF,QAAQ;IACZ,KAAK,KAAK;MACN,OAAO;QACHF,QAAQ,EAAEI,aAAa;QACvBH,MAAM,EAAEG,aAAa;QACrBZ,YAAY,EAAE,IAAI;QAClBC,cAAc,EAAE;MACpB,CAAC;IACL,KAAK,MAAM;MACP,OAAO;QACHQ,MAAM,EAAEG,aAAa;QACrBZ,YAAY,EAAE;MAClB,CAAC;IACL,KAAK,MAAM;MACP,OAAO;QACHQ,QAAQ,EAAEI,aAAa;QACvBX,cAAc,EAAE;MACpB,CAAC;IACL,KAAK,KAAK;MACN,OAAO;QACHQ,MAAM,EAAEG,aAAa;QACrBZ,YAAY,EAAE;MAClB,CAAC;IACL,KAAK,KAAK;MACN,OAAO;QACHQ,QAAQ,EAAEI,aAAa;QACvBX,cAAc,EAAE;MACpB,CAAC;IACL;MACI,MAAM,IAAIkD,KAAK,CAAC,KAAK,CAAC;EAC9B;AACJ;;AAGA;AACA;AACA;AACA;AACA,OAAO,SAAS3B,aAAaA,CACzBxD,MAA+C,EAC/CC,KAAkC,EAClCkD,SAAsB,EAChB;EACN,IAAII,OAAe,GAAG,CAAC;EACvB,IAAM6B,UAAU,GAAIC,KAAa,IAAK;IAClC,IAAIA,KAAK,GAAG,CAAC,EAAE;MACX9B,OAAO,GAAGA,OAAO,GAAG8B,KAAK;IAC7B;EACJ,CAAC;EAED,IAAMC,oBAAoB,GAAG,EAAE;EAE/B,IAAMC,cAAc,GAAGjG,qBAAqB,CAAC6D,SAAS,CAACH,SAAS,EAAEwC,QAAQ,IAAIA,QAAQ,KAAK5F,SAAS,IAAI4F,QAAQ,KAAK/F,SAAS,CAAC;EAC/H2F,UAAU,CAACG,cAAc,GAAGD,oBAAoB,CAAC;EAEjD,IAAMG,cAAc,GAAGnG,qBAAqB,CAAC6D,SAAS,CAACH,SAAS,EAAEwC,QAAQ,IAAIA,QAAQ,KAAK/F,SAAS,IAAI+F,QAAQ,KAAK5F,SAAS,CAAC;EAC/HwF,UAAU,CAACK,cAAc,GAAGH,oBAAoB,CAAC;EAEjD,IAAMI,aAAa,GAAGpG,qBAAqB,CAAC6D,SAAS,CAACH,SAAS,EAAE,CAACwC,QAAQ,EAAEG,GAAG,KAAK;IAChF,IAAIH,QAAQ,KAAKrC,SAAS,CAACD,OAAO,CAACyC,GAAG,CAAC,EAAE;MACrC,OAAO,IAAI;IACf,CAAC,MAAM;MACH,OAAO,KAAK;IAChB;EACJ,CAAC,CAAC;EACFP,UAAU,CAACM,aAAa,GAAGJ,oBAAoB,GAAG,GAAG,CAAC;EAEtD,IAAMM,0BAA0B,GAAGzC,SAAS,CAACC,oBAAoB,GAAG,CAAC,GAAG,CAAC;EACzEgC,UAAU,CAACQ,0BAA0B,CAAC;EAEtC,OAAOrC,OAAO;AAClB","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/replication-protocol/checkpoint.js b/dist/esm/replication-protocol/checkpoint.js deleted file mode 100644 index 76729e19d3d..00000000000 --- a/dist/esm/replication-protocol/checkpoint.js +++ /dev/null @@ -1,94 +0,0 @@ -import { getComposedPrimaryKeyOfDocumentData } from "../rx-schema-helper.js"; -import { stackCheckpoints } from "../rx-storage-helper.js"; -import { createRevision, ensureNotFalsy, getDefaultRevision, getDefaultRxDocumentMeta, now } from "../plugins/utils/index.js"; -export async function getLastCheckpointDoc(state, direction) { - var checkpointDocId = getComposedPrimaryKeyOfDocumentData(state.input.metaInstance.schema, { - isCheckpoint: '1', - itemId: direction - }); - var checkpointResult = await state.input.metaInstance.findDocumentsById([checkpointDocId], false); - var checkpointDoc = checkpointResult[0]; - state.lastCheckpointDoc[direction] = checkpointDoc; - if (checkpointDoc) { - return checkpointDoc.checkpointData; - } else { - return undefined; - } -} - -/** - * Sets the checkpoint, - * automatically resolves conflicts that appear. - */ -export async function setCheckpoint(state, direction, checkpoint) { - state.checkpointQueue = state.checkpointQueue.then(async () => { - var previousCheckpointDoc = state.lastCheckpointDoc[direction]; - if (checkpoint && - /** - * If the replication is already canceled, - * we do not write a checkpoint - * because that could mean we write a checkpoint - * for data that has been fetched from the master - * but not been written to the child. - */ - !state.events.canceled.getValue() && ( - /** - * Only write checkpoint if it is different from before - * to have less writes to the storage. - */ - - !previousCheckpointDoc || JSON.stringify(previousCheckpointDoc.checkpointData) !== JSON.stringify(checkpoint))) { - var newDoc = { - id: '', - isCheckpoint: '1', - itemId: direction, - _deleted: false, - _attachments: {}, - checkpointData: checkpoint, - _meta: getDefaultRxDocumentMeta(), - _rev: getDefaultRevision() - }; - newDoc.id = getComposedPrimaryKeyOfDocumentData(state.input.metaInstance.schema, newDoc); - while (!state.events.canceled.getValue()) { - /** - * Instead of just storing the new checkpoint, - * we have to stack up the checkpoint with the previous one. - * This is required for plugins like the sharding RxStorage - * where the changeStream events only contain a Partial of the - * checkpoint. - */ - if (previousCheckpointDoc) { - newDoc.checkpointData = stackCheckpoints([previousCheckpointDoc.checkpointData, newDoc.checkpointData]); - } - newDoc._meta.lwt = now(); - newDoc._rev = createRevision(await state.checkpointKey, previousCheckpointDoc); - if (state.events.canceled.getValue()) { - return; - } - var result = await state.input.metaInstance.bulkWrite([{ - previous: previousCheckpointDoc, - document: newDoc - }], 'replication-set-checkpoint'); - var sucessDoc = result.success[0]; - if (sucessDoc) { - state.lastCheckpointDoc[direction] = sucessDoc; - return; - } else { - var error = result.error[0]; - if (error.status !== 409) { - throw error; - } else { - previousCheckpointDoc = ensureNotFalsy(error.documentInDb); - newDoc._rev = createRevision(await state.checkpointKey, previousCheckpointDoc); - } - } - } - } - }); - await state.checkpointQueue; -} -export async function getCheckpointKey(input) { - var hash = await input.hashFunction([input.identifier, input.forkInstance.databaseName, input.forkInstance.collectionName].join('||')); - return 'rx_storage_replication_' + hash; -} -//# sourceMappingURL=checkpoint.js.map \ No newline at end of file diff --git a/dist/esm/replication-protocol/checkpoint.js.map b/dist/esm/replication-protocol/checkpoint.js.map deleted file mode 100644 index b371a09805d..00000000000 --- a/dist/esm/replication-protocol/checkpoint.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"checkpoint.js","names":["getComposedPrimaryKeyOfDocumentData","stackCheckpoints","createRevision","ensureNotFalsy","getDefaultRevision","getDefaultRxDocumentMeta","now","getLastCheckpointDoc","state","direction","checkpointDocId","input","metaInstance","schema","isCheckpoint","itemId","checkpointResult","findDocumentsById","checkpointDoc","lastCheckpointDoc","checkpointData","undefined","setCheckpoint","checkpoint","checkpointQueue","then","previousCheckpointDoc","events","canceled","getValue","JSON","stringify","newDoc","id","_deleted","_attachments","_meta","_rev","lwt","checkpointKey","result","bulkWrite","previous","document","sucessDoc","success","error","status","documentInDb","getCheckpointKey","hash","hashFunction","identifier","forkInstance","databaseName","collectionName","join"],"sources":["../../../src/replication-protocol/checkpoint.ts"],"sourcesContent":["import { getComposedPrimaryKeyOfDocumentData } from '../rx-schema-helper.ts';\nimport { stackCheckpoints } from '../rx-storage-helper.ts';\nimport type {\n RxDocumentData,\n RxStorageInstanceReplicationInput,\n RxStorageInstanceReplicationState,\n RxStorageReplicationDirection,\n RxStorageReplicationMeta\n} from '../types/index.d.ts';\nimport {\n createRevision,\n ensureNotFalsy,\n getDefaultRevision,\n getDefaultRxDocumentMeta,\n now\n} from '../plugins/utils/index.ts';\n\nexport async function getLastCheckpointDoc(\n state: RxStorageInstanceReplicationState,\n direction: RxStorageReplicationDirection\n): Promise {\n const checkpointDocId = getComposedPrimaryKeyOfDocumentData(\n state.input.metaInstance.schema,\n {\n isCheckpoint: '1',\n itemId: direction\n }\n );\n const checkpointResult = await state.input.metaInstance.findDocumentsById(\n [\n checkpointDocId\n ],\n false\n );\n\n const checkpointDoc = checkpointResult[0];\n state.lastCheckpointDoc[direction] = checkpointDoc;\n if (checkpointDoc) {\n return checkpointDoc.checkpointData;\n } else {\n return undefined;\n }\n}\n\n\n/**\n * Sets the checkpoint,\n * automatically resolves conflicts that appear.\n */\nexport async function setCheckpoint(\n state: RxStorageInstanceReplicationState,\n direction: RxStorageReplicationDirection,\n checkpoint: CheckpointType\n) {\n state.checkpointQueue = state.checkpointQueue.then(async () => {\n let previousCheckpointDoc = state.lastCheckpointDoc[direction];\n if (\n checkpoint &&\n /**\n * If the replication is already canceled,\n * we do not write a checkpoint\n * because that could mean we write a checkpoint\n * for data that has been fetched from the master\n * but not been written to the child.\n */\n !state.events.canceled.getValue() &&\n /**\n * Only write checkpoint if it is different from before\n * to have less writes to the storage.\n */\n (\n !previousCheckpointDoc ||\n JSON.stringify(previousCheckpointDoc.checkpointData) !== JSON.stringify(checkpoint)\n )\n ) {\n const newDoc: RxDocumentData> = {\n id: '',\n isCheckpoint: '1',\n itemId: direction,\n _deleted: false,\n _attachments: {},\n checkpointData: checkpoint,\n _meta: getDefaultRxDocumentMeta(),\n _rev: getDefaultRevision()\n };\n newDoc.id = getComposedPrimaryKeyOfDocumentData(\n state.input.metaInstance.schema,\n newDoc\n );\n while (!state.events.canceled.getValue()) {\n /**\n * Instead of just storing the new checkpoint,\n * we have to stack up the checkpoint with the previous one.\n * This is required for plugins like the sharding RxStorage\n * where the changeStream events only contain a Partial of the\n * checkpoint.\n */\n if (previousCheckpointDoc) {\n newDoc.checkpointData = stackCheckpoints([\n previousCheckpointDoc.checkpointData,\n newDoc.checkpointData\n ]);\n }\n newDoc._meta.lwt = now();\n newDoc._rev = createRevision(\n await state.checkpointKey,\n previousCheckpointDoc\n );\n\n if (state.events.canceled.getValue()) {\n return;\n }\n\n const result = await state.input.metaInstance.bulkWrite([{\n previous: previousCheckpointDoc,\n document: newDoc\n }], 'replication-set-checkpoint');\n\n const sucessDoc = result.success[0];\n if (sucessDoc) {\n state.lastCheckpointDoc[direction] = sucessDoc;\n return;\n } else {\n const error = result.error[0];\n if (error.status !== 409) {\n throw error;\n } else {\n previousCheckpointDoc = ensureNotFalsy(error.documentInDb);\n newDoc._rev = createRevision(\n await state.checkpointKey,\n previousCheckpointDoc\n );\n }\n }\n }\n }\n });\n await state.checkpointQueue;\n}\n\nexport async function getCheckpointKey(\n input: RxStorageInstanceReplicationInput\n): Promise {\n const hash = await input.hashFunction([\n input.identifier,\n input.forkInstance.databaseName,\n input.forkInstance.collectionName\n ].join('||'));\n return 'rx_storage_replication_' + hash;\n}\n"],"mappings":"AAAA,SAASA,mCAAmC,QAAQ,wBAAwB;AAC5E,SAASC,gBAAgB,QAAQ,yBAAyB;AAQ1D,SACIC,cAAc,EACdC,cAAc,EACdC,kBAAkB,EAClBC,wBAAwB,EACxBC,GAAG,QACA,2BAA2B;AAElC,OAAO,eAAeC,oBAAoBA,CACtCC,KAAmD,EACnDC,SAAwC,EACL;EACnC,IAAMC,eAAe,GAAGV,mCAAmC,CACvDQ,KAAK,CAACG,KAAK,CAACC,YAAY,CAACC,MAAM,EAC/B;IACIC,YAAY,EAAE,GAAG;IACjBC,MAAM,EAAEN;EACZ,CACJ,CAAC;EACD,IAAMO,gBAAgB,GAAG,MAAMR,KAAK,CAACG,KAAK,CAACC,YAAY,CAACK,iBAAiB,CACrE,CACIP,eAAe,CAClB,EACD,KACJ,CAAC;EAED,IAAMQ,aAAa,GAAGF,gBAAgB,CAAC,CAAC,CAAC;EACzCR,KAAK,CAACW,iBAAiB,CAACV,SAAS,CAAC,GAAGS,aAAa;EAClD,IAAIA,aAAa,EAAE;IACf,OAAOA,aAAa,CAACE,cAAc;EACvC,CAAC,MAAM;IACH,OAAOC,SAAS;EACpB;AACJ;;AAGA;AACA;AACA;AACA;AACA,OAAO,eAAeC,aAAaA,CAC/Bd,KAAmD,EACnDC,SAAwC,EACxCc,UAA0B,EAC5B;EACEf,KAAK,CAACgB,eAAe,GAAGhB,KAAK,CAACgB,eAAe,CAACC,IAAI,CAAC,YAAY;IAC3D,IAAIC,qBAAqB,GAAGlB,KAAK,CAACW,iBAAiB,CAACV,SAAS,CAAC;IAC9D,IACIc,UAAU;IACV;AACZ;AACA;AACA;AACA;AACA;AACA;IACY,CAACf,KAAK,CAACmB,MAAM,CAACC,QAAQ,CAACC,QAAQ,CAAC,CAAC;IACjC;AACZ;AACA;AACA;;IAEgB,CAACH,qBAAqB,IACtBI,IAAI,CAACC,SAAS,CAACL,qBAAqB,CAACN,cAAc,CAAC,KAAKU,IAAI,CAACC,SAAS,CAACR,UAAU,CAAC,CACtF,EACH;MACE,IAAMS,MAA2E,GAAG;QAChFC,EAAE,EAAE,EAAE;QACNnB,YAAY,EAAE,GAAG;QACjBC,MAAM,EAAEN,SAAS;QACjByB,QAAQ,EAAE,KAAK;QACfC,YAAY,EAAE,CAAC,CAAC;QAChBf,cAAc,EAAEG,UAAU;QAC1Ba,KAAK,EAAE/B,wBAAwB,CAAC,CAAC;QACjCgC,IAAI,EAAEjC,kBAAkB,CAAC;MAC7B,CAAC;MACD4B,MAAM,CAACC,EAAE,GAAGjC,mCAAmC,CAC3CQ,KAAK,CAACG,KAAK,CAACC,YAAY,CAACC,MAAM,EAC/BmB,MACJ,CAAC;MACD,OAAO,CAACxB,KAAK,CAACmB,MAAM,CAACC,QAAQ,CAACC,QAAQ,CAAC,CAAC,EAAE;QACtC;AAChB;AACA;AACA;AACA;AACA;AACA;QACgB,IAAIH,qBAAqB,EAAE;UACvBM,MAAM,CAACZ,cAAc,GAAGnB,gBAAgB,CAAC,CACrCyB,qBAAqB,CAACN,cAAc,EACpCY,MAAM,CAACZ,cAAc,CACxB,CAAC;QACN;QACAY,MAAM,CAACI,KAAK,CAACE,GAAG,GAAGhC,GAAG,CAAC,CAAC;QACxB0B,MAAM,CAACK,IAAI,GAAGnC,cAAc,CACxB,MAAMM,KAAK,CAAC+B,aAAa,EACzBb,qBACJ,CAAC;QAED,IAAIlB,KAAK,CAACmB,MAAM,CAACC,QAAQ,CAACC,QAAQ,CAAC,CAAC,EAAE;UAClC;QACJ;QAEA,IAAMW,MAAM,GAAG,MAAMhC,KAAK,CAACG,KAAK,CAACC,YAAY,CAAC6B,SAAS,CAAC,CAAC;UACrDC,QAAQ,EAAEhB,qBAAqB;UAC/BiB,QAAQ,EAAEX;QACd,CAAC,CAAC,EAAE,4BAA4B,CAAC;QAEjC,IAAMY,SAAS,GAAGJ,MAAM,CAACK,OAAO,CAAC,CAAC,CAAC;QACnC,IAAID,SAAS,EAAE;UACXpC,KAAK,CAACW,iBAAiB,CAACV,SAAS,CAAC,GAAGmC,SAAS;UAC9C;QACJ,CAAC,MAAM;UACH,IAAME,KAAK,GAAGN,MAAM,CAACM,KAAK,CAAC,CAAC,CAAC;UAC7B,IAAIA,KAAK,CAACC,MAAM,KAAK,GAAG,EAAE;YACtB,MAAMD,KAAK;UACf,CAAC,MAAM;YACHpB,qBAAqB,GAAGvB,cAAc,CAAC2C,KAAK,CAACE,YAAY,CAAC;YAC1DhB,MAAM,CAACK,IAAI,GAAGnC,cAAc,CACxB,MAAMM,KAAK,CAAC+B,aAAa,EACzBb,qBACJ,CAAC;UACL;QACJ;MACJ;IACJ;EACJ,CAAC,CAAC;EACF,MAAMlB,KAAK,CAACgB,eAAe;AAC/B;AAEA,OAAO,eAAeyB,gBAAgBA,CAClCtC,KAAmD,EACpC;EACf,IAAMuC,IAAI,GAAG,MAAMvC,KAAK,CAACwC,YAAY,CAAC,CAClCxC,KAAK,CAACyC,UAAU,EAChBzC,KAAK,CAAC0C,YAAY,CAACC,YAAY,EAC/B3C,KAAK,CAAC0C,YAAY,CAACE,cAAc,CACpC,CAACC,IAAI,CAAC,IAAI,CAAC,CAAC;EACb,OAAO,yBAAyB,GAAGN,IAAI;AAC3C","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/replication-protocol/conflicts.js b/dist/esm/replication-protocol/conflicts.js deleted file mode 100644 index 89c6184d9a2..00000000000 --- a/dist/esm/replication-protocol/conflicts.js +++ /dev/null @@ -1,69 +0,0 @@ -import { getDefaultRevision, createRevision, now, flatClone, deepEqual } from "../plugins/utils/index.js"; -import { stripAttachmentsDataFromDocument } from "../rx-storage-helper.js"; -export var defaultConflictHandler = function (i, _context) { - var newDocumentState = stripAttachmentsDataFromDocument(i.newDocumentState); - var realMasterState = stripAttachmentsDataFromDocument(i.realMasterState); - - /** - * If the documents are deep equal, - * we have no conflict. - * On your custom conflict handler you might only - * check some properties, like the updatedAt time, - * for better performance, because deepEqual is expensive. - */ - if (deepEqual(newDocumentState, realMasterState)) { - return Promise.resolve({ - isEqual: true - }); - } - - /** - * The default conflict handler will always - * drop the fork state and use the master state instead. - */ - return Promise.resolve({ - isEqual: false, - documentData: i.realMasterState - }); -}; - -/** - * Resolves a conflict error or determines that the given document states are equal. - * Returns the resolved document that must be written to the fork. - * Then the new document state can be pushed upstream. - * If document is not in conflict, returns undefined. - * If error is non-409, it throws an error. - * Conflicts are only solved in the upstream, never in the downstream. - */ -export async function resolveConflictError(state, input, forkState) { - var conflictHandler = state.input.conflictHandler; - var conflictHandlerOutput = await conflictHandler(input, 'replication-resolve-conflict'); - if (conflictHandlerOutput.isEqual) { - /** - * Documents are equal, - * so this is not a conflict -> do nothing. - */ - return undefined; - } else { - /** - * We have a resolved conflict, - * use the resolved document data. - */ - var resolvedDoc = Object.assign({}, conflictHandlerOutput.documentData, { - /** - * Because the resolved conflict is written to the fork, - * we have to keep/update the forks _meta data, not the masters. - */ - _meta: flatClone(forkState._meta), - _rev: getDefaultRevision(), - _attachments: flatClone(forkState._attachments) - }); - resolvedDoc._meta.lwt = now(); - resolvedDoc._rev = createRevision(await state.checkpointKey, forkState); - return { - resolvedDoc, - output: conflictHandlerOutput - }; - } -} -//# sourceMappingURL=conflicts.js.map \ No newline at end of file diff --git a/dist/esm/replication-protocol/conflicts.js.map b/dist/esm/replication-protocol/conflicts.js.map deleted file mode 100644 index 03f84195baa..00000000000 --- a/dist/esm/replication-protocol/conflicts.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"conflicts.js","names":["getDefaultRevision","createRevision","now","flatClone","deepEqual","stripAttachmentsDataFromDocument","defaultConflictHandler","i","_context","newDocumentState","realMasterState","Promise","resolve","isEqual","documentData","resolveConflictError","state","input","forkState","conflictHandler","conflictHandlerOutput","undefined","resolvedDoc","Object","assign","_meta","_rev","_attachments","lwt","checkpointKey","output"],"sources":["../../../src/replication-protocol/conflicts.ts"],"sourcesContent":["import type {\n RxConflictHandler,\n RxConflictHandlerInput,\n RxConflictHandlerOutput,\n RxDocumentData,\n RxStorageInstanceReplicationState\n} from '../types/index.d.ts';\nimport {\n getDefaultRevision,\n createRevision,\n now,\n flatClone,\n deepEqual\n} from '../plugins/utils/index.ts';\nimport { stripAttachmentsDataFromDocument } from '../rx-storage-helper.ts';\n\nexport const defaultConflictHandler: RxConflictHandler = function (\n i: RxConflictHandlerInput,\n _context: string\n): Promise> {\n const newDocumentState = stripAttachmentsDataFromDocument(i.newDocumentState);\n const realMasterState = stripAttachmentsDataFromDocument(i.realMasterState);\n\n /**\n * If the documents are deep equal,\n * we have no conflict.\n * On your custom conflict handler you might only\n * check some properties, like the updatedAt time,\n * for better performance, because deepEqual is expensive.\n */\n if (deepEqual(\n newDocumentState,\n realMasterState\n )) {\n return Promise.resolve({\n isEqual: true\n });\n }\n\n /**\n * The default conflict handler will always\n * drop the fork state and use the master state instead.\n */\n return Promise.resolve({\n isEqual: false,\n documentData: i.realMasterState\n });\n};\n\n\n/**\n * Resolves a conflict error or determines that the given document states are equal.\n * Returns the resolved document that must be written to the fork.\n * Then the new document state can be pushed upstream.\n * If document is not in conflict, returns undefined.\n * If error is non-409, it throws an error.\n * Conflicts are only solved in the upstream, never in the downstream.\n */\nexport async function resolveConflictError(\n state: RxStorageInstanceReplicationState,\n input: RxConflictHandlerInput,\n forkState: RxDocumentData\n): Promise<{\n resolvedDoc: RxDocumentData;\n output: RxConflictHandlerOutput;\n} | undefined> {\n const conflictHandler: RxConflictHandler = state.input.conflictHandler;\n const conflictHandlerOutput = await conflictHandler(input, 'replication-resolve-conflict');\n\n if (conflictHandlerOutput.isEqual) {\n /**\n * Documents are equal,\n * so this is not a conflict -> do nothing.\n */\n return undefined;\n } else {\n /**\n * We have a resolved conflict,\n * use the resolved document data.\n */\n const resolvedDoc: RxDocumentData = Object.assign(\n {},\n conflictHandlerOutput.documentData,\n {\n /**\n * Because the resolved conflict is written to the fork,\n * we have to keep/update the forks _meta data, not the masters.\n */\n _meta: flatClone(forkState._meta),\n _rev: getDefaultRevision(),\n _attachments: flatClone(forkState._attachments)\n }\n ) as any;\n resolvedDoc._meta.lwt = now();\n resolvedDoc._rev = createRevision(\n await state.checkpointKey,\n forkState\n );\n return {\n resolvedDoc,\n output: conflictHandlerOutput\n };\n }\n}\n"],"mappings":"AAOA,SACIA,kBAAkB,EAClBC,cAAc,EACdC,GAAG,EACHC,SAAS,EACTC,SAAS,QACN,2BAA2B;AAClC,SAASC,gCAAgC,QAAQ,yBAAyB;AAE1E,OAAO,IAAMC,sBAA8C,GAAG,SAAAA,CAC1DC,CAA8B,EAC9BC,QAAgB,EACqB;EACrC,IAAMC,gBAAgB,GAAGJ,gCAAgC,CAACE,CAAC,CAACE,gBAAgB,CAAC;EAC7E,IAAMC,eAAe,GAAGL,gCAAgC,CAACE,CAAC,CAACG,eAAe,CAAC;;EAE3E;AACJ;AACA;AACA;AACA;AACA;AACA;EACI,IAAIN,SAAS,CACTK,gBAAgB,EAChBC,eACJ,CAAC,EAAE;IACC,OAAOC,OAAO,CAACC,OAAO,CAAC;MACnBC,OAAO,EAAE;IACb,CAAC,CAAC;EACN;;EAEA;AACJ;AACA;AACA;EACI,OAAOF,OAAO,CAACC,OAAO,CAAC;IACnBC,OAAO,EAAE,KAAK;IACdC,YAAY,EAAEP,CAAC,CAACG;EACpB,CAAC,CAAC;AACN,CAAC;;AAGD;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,eAAeK,oBAAoBA,CACtCC,KAAmD,EACnDC,KAAwC,EACxCC,SAAoC,EAIzB;EACX,IAAMC,eAA6C,GAAGH,KAAK,CAACC,KAAK,CAACE,eAAe;EACjF,IAAMC,qBAAqB,GAAG,MAAMD,eAAe,CAACF,KAAK,EAAE,8BAA8B,CAAC;EAE1F,IAAIG,qBAAqB,CAACP,OAAO,EAAE;IAC/B;AACR;AACA;AACA;IACQ,OAAOQ,SAAS;EACpB,CAAC,MAAM;IACH;AACR;AACA;AACA;IACQ,IAAMC,WAAsC,GAAGC,MAAM,CAACC,MAAM,CACxD,CAAC,CAAC,EACFJ,qBAAqB,CAACN,YAAY,EAClC;MACI;AAChB;AACA;AACA;MACgBW,KAAK,EAAEtB,SAAS,CAACe,SAAS,CAACO,KAAK,CAAC;MACjCC,IAAI,EAAE1B,kBAAkB,CAAC,CAAC;MAC1B2B,YAAY,EAAExB,SAAS,CAACe,SAAS,CAACS,YAAY;IAClD,CACJ,CAAQ;IACRL,WAAW,CAACG,KAAK,CAACG,GAAG,GAAG1B,GAAG,CAAC,CAAC;IAC7BoB,WAAW,CAACI,IAAI,GAAGzB,cAAc,CAC7B,MAAMe,KAAK,CAACa,aAAa,EACzBX,SACJ,CAAC;IACD,OAAO;MACHI,WAAW;MACXQ,MAAM,EAAEV;IACZ,CAAC;EACL;AACJ","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/replication-protocol/downstream.js b/dist/esm/replication-protocol/downstream.js deleted file mode 100644 index 29d502ab3cb..00000000000 --- a/dist/esm/replication-protocol/downstream.js +++ /dev/null @@ -1,325 +0,0 @@ -import { firstValueFrom, filter, mergeMap } from 'rxjs'; -import { newRxError } from "../rx-error.js"; -import { stackCheckpoints } from "../rx-storage-helper.js"; -import { appendToArray, createRevision, ensureNotFalsy, flatClone, getDefaultRevision, getHeightOfRevision, now, PROMISE_RESOLVE_VOID } from "../plugins/utils/index.js"; -import { getLastCheckpointDoc, setCheckpoint } from "./checkpoint.js"; -import { stripAttachmentsDataFromMetaWriteRows, writeDocToDocState } from "./helper.js"; -import { getAssumedMasterState, getMetaWriteRow } from "./meta-instance.js"; - -/** - * Writes all documents from the master to the fork. - * The downstream has two operation modes - * - Sync by iterating over the checkpoints via downstreamResyncOnce() - * - Sync by listening to the changestream via downstreamProcessChanges() - * We need this to be able to do initial syncs - * and still can have fast event based sync when the client is not offline. - */ -export async function startReplicationDownstream(state) { - if (state.input.initialCheckpoint && state.input.initialCheckpoint.downstream) { - var checkpointDoc = await getLastCheckpointDoc(state, 'down'); - if (!checkpointDoc) { - await setCheckpoint(state, 'down', state.input.initialCheckpoint.downstream); - } - } - var identifierHash = await state.input.hashFunction(state.input.identifier); - var replicationHandler = state.input.replicationHandler; - - // used to detect which tasks etc can in it at which order. - var timer = 0; - var openTasks = []; - function addNewTask(task) { - state.stats.down.addNewTask = state.stats.down.addNewTask + 1; - var taskWithTime = { - time: timer++, - task - }; - openTasks.push(taskWithTime); - state.streamQueue.down = state.streamQueue.down.then(() => { - var useTasks = []; - while (openTasks.length > 0) { - state.events.active.down.next(true); - var innerTaskWithTime = ensureNotFalsy(openTasks.shift()); - - /** - * If the task came in before the last time we started the pull - * from the master, then we can drop the task. - */ - if (innerTaskWithTime.time < lastTimeMasterChangesRequested) { - continue; - } - if (innerTaskWithTime.task === 'RESYNC') { - if (useTasks.length === 0) { - useTasks.push(innerTaskWithTime.task); - break; - } else { - break; - } - } - useTasks.push(innerTaskWithTime.task); - } - if (useTasks.length === 0) { - return; - } - if (useTasks[0] === 'RESYNC') { - return downstreamResyncOnce(); - } else { - return downstreamProcessChanges(useTasks); - } - }).then(() => { - state.events.active.down.next(false); - if (!state.firstSyncDone.down.getValue() && !state.events.canceled.getValue()) { - state.firstSyncDone.down.next(true); - } - }); - } - addNewTask('RESYNC'); - - /** - * If a write on the master happens, we have to trigger the downstream. - * Only do this if not canceled yet, otherwise firstValueFrom errors - * when running on a completed observable. - */ - if (!state.events.canceled.getValue()) { - var sub = replicationHandler.masterChangeStream$.pipe(mergeMap(async ev => { - /** - * While a push is running, we have to delay all incoming - * events from the server to not mix up the replication state. - */ - await firstValueFrom(state.events.active.up.pipe(filter(s => !s))); - return ev; - })).subscribe(task => { - state.stats.down.masterChangeStreamEmit = state.stats.down.masterChangeStreamEmit + 1; - addNewTask(task); - }); - firstValueFrom(state.events.canceled.pipe(filter(canceled => !!canceled))).then(() => sub.unsubscribe()); - } - - /** - * For faster performance, we directly start each write - * and then await all writes at the end. - */ - var lastTimeMasterChangesRequested = -1; - async function downstreamResyncOnce() { - state.stats.down.downstreamResyncOnce = state.stats.down.downstreamResyncOnce + 1; - if (state.events.canceled.getValue()) { - return; - } - state.checkpointQueue = state.checkpointQueue.then(() => getLastCheckpointDoc(state, 'down')); - var lastCheckpoint = await state.checkpointQueue; - var promises = []; - while (!state.events.canceled.getValue()) { - lastTimeMasterChangesRequested = timer++; - var downResult = await replicationHandler.masterChangesSince(lastCheckpoint, state.input.pullBatchSize); - if (downResult.documents.length === 0) { - break; - } - lastCheckpoint = stackCheckpoints([lastCheckpoint, downResult.checkpoint]); - promises.push(persistFromMaster(downResult.documents, lastCheckpoint)); - - /** - * By definition we stop pull when the pulled documents - * do not fill up the pullBatchSize because we - * can assume that the remote has no more documents. - */ - if (downResult.documents.length < state.input.pullBatchSize) { - break; - } - } - await Promise.all(promises); - } - function downstreamProcessChanges(tasks) { - state.stats.down.downstreamProcessChanges = state.stats.down.downstreamProcessChanges + 1; - var docsOfAllTasks = []; - var lastCheckpoint = null; - tasks.forEach(task => { - if (task === 'RESYNC') { - throw new Error('SNH'); - } - appendToArray(docsOfAllTasks, task.documents); - lastCheckpoint = stackCheckpoints([lastCheckpoint, task.checkpoint]); - }); - return persistFromMaster(docsOfAllTasks, ensureNotFalsy(lastCheckpoint)); - } - - /** - * It can happen that the calls to masterChangesSince() or the changeStream() - * are way faster then how fast the documents can be persisted. - * Therefore we merge all incoming downResults into the nonPersistedFromMaster object - * and process them together if possible. - * This often bundles up single writes and improves performance - * by processing the documents in bulks. - */ - var persistenceQueue = PROMISE_RESOLVE_VOID; - var nonPersistedFromMaster = { - docs: {} - }; - function persistFromMaster(docs, checkpoint) { - var primaryPath = state.primaryPath; - state.stats.down.persistFromMaster = state.stats.down.persistFromMaster + 1; - - /** - * Add the new docs to the non-persistent list - */ - docs.forEach(docData => { - var docId = docData[primaryPath]; - nonPersistedFromMaster.docs[docId] = docData; - }); - nonPersistedFromMaster.checkpoint = checkpoint; - - /** - * Run in the queue - * with all open documents from nonPersistedFromMaster. - */ - persistenceQueue = persistenceQueue.then(() => { - var downDocsById = nonPersistedFromMaster.docs; - nonPersistedFromMaster.docs = {}; - var useCheckpoint = nonPersistedFromMaster.checkpoint; - var docIds = Object.keys(downDocsById); - if (state.events.canceled.getValue() || docIds.length === 0) { - return PROMISE_RESOLVE_VOID; - } - var writeRowsToFork = []; - var writeRowsToForkById = {}; - var writeRowsToMeta = {}; - var useMetaWriteRows = []; - return Promise.all([state.input.forkInstance.findDocumentsById(docIds, true), getAssumedMasterState(state, docIds)]).then(([currentForkStateList, assumedMasterState]) => { - var currentForkState = new Map(); - currentForkStateList.forEach(doc => currentForkState.set(doc[primaryPath], doc)); - return Promise.all(docIds.map(async docId => { - var forkStateFullDoc = currentForkState.get(docId); - var forkStateDocData = forkStateFullDoc ? writeDocToDocState(forkStateFullDoc, state.hasAttachments, false) : undefined; - var masterState = downDocsById[docId]; - var assumedMaster = assumedMasterState[docId]; - if (assumedMaster && forkStateFullDoc && assumedMaster.metaDocument.isResolvedConflict === forkStateFullDoc._rev) { - /** - * The current fork state represents a resolved conflict - * that first must be send to the master in the upstream. - * All conflicts are resolved by the upstream. - */ - // return PROMISE_RESOLVE_VOID; - await state.streamQueue.up; - } - var isAssumedMasterEqualToForkState = !assumedMaster || !forkStateDocData ? false : await state.input.conflictHandler({ - realMasterState: assumedMaster.docData, - newDocumentState: forkStateDocData - }, 'downstream-check-if-equal-0').then(r => r.isEqual); - if (!isAssumedMasterEqualToForkState && assumedMaster && assumedMaster.docData._rev && forkStateFullDoc && forkStateFullDoc._meta[state.input.identifier] && getHeightOfRevision(forkStateFullDoc._rev) === forkStateFullDoc._meta[state.input.identifier]) { - isAssumedMasterEqualToForkState = true; - } - if (forkStateFullDoc && assumedMaster && isAssumedMasterEqualToForkState === false || forkStateFullDoc && !assumedMaster) { - /** - * We have a non-upstream-replicated - * local write to the fork. - * This means we ignore the downstream of this document - * because anyway the upstream will first resolve the conflict. - */ - return PROMISE_RESOLVE_VOID; - } - var areStatesExactlyEqual = !forkStateDocData ? false : await state.input.conflictHandler({ - realMasterState: masterState, - newDocumentState: forkStateDocData - }, 'downstream-check-if-equal-1').then(r => r.isEqual); - if (forkStateDocData && areStatesExactlyEqual) { - /** - * Document states are exactly equal. - * This can happen when the replication is shut down - * unexpected like when the user goes offline. - * - * Only when the assumedMaster is different from the forkState, - * we have to patch the document in the meta instance. - */ - if (!assumedMaster || isAssumedMasterEqualToForkState === false) { - useMetaWriteRows.push(await getMetaWriteRow(state, forkStateDocData, assumedMaster ? assumedMaster.metaDocument : undefined)); - } - return PROMISE_RESOLVE_VOID; - } - - /** - * All other master states need to be written to the forkInstance - * and metaInstance. - */ - var newForkState = Object.assign({}, masterState, forkStateFullDoc ? { - _meta: flatClone(forkStateFullDoc._meta), - _attachments: state.hasAttachments && masterState._attachments ? masterState._attachments : {}, - _rev: getDefaultRevision() - } : { - _meta: { - lwt: now() - }, - _rev: getDefaultRevision(), - _attachments: state.hasAttachments && masterState._attachments ? masterState._attachments : {} - }); - /** - * If the remote works with revisions, - * we store the height of the next fork-state revision - * inside of the documents meta data. - * By doing so we can filter it out in the upstream - * and detect the document as being equal to master or not. - * This is used for example in the CouchDB replication plugin. - */ - if (masterState._rev) { - var nextRevisionHeight = !forkStateFullDoc ? 1 : getHeightOfRevision(forkStateFullDoc._rev) + 1; - newForkState._meta[state.input.identifier] = nextRevisionHeight; - if (state.input.keepMeta) { - newForkState._rev = masterState._rev; - } - } - if (state.input.keepMeta && masterState._meta) { - newForkState._meta = masterState._meta; - } - var forkWriteRow = { - previous: forkStateFullDoc, - document: newForkState - }; - forkWriteRow.document._rev = forkWriteRow.document._rev ? forkWriteRow.document._rev : createRevision(identifierHash, forkWriteRow.previous); - writeRowsToFork.push(forkWriteRow); - writeRowsToForkById[docId] = forkWriteRow; - writeRowsToMeta[docId] = await getMetaWriteRow(state, masterState, assumedMaster ? assumedMaster.metaDocument : undefined); - })); - }).then(async () => { - if (writeRowsToFork.length > 0) { - return state.input.forkInstance.bulkWrite(writeRowsToFork, await state.downstreamBulkWriteFlag).then(forkWriteResult => { - forkWriteResult.success.forEach(doc => { - var docId = doc[primaryPath]; - state.events.processed.down.next(writeRowsToForkById[docId]); - useMetaWriteRows.push(writeRowsToMeta[docId]); - }); - forkWriteResult.error.forEach(error => { - /** - * We do not have to care about downstream conflict errors here - * because on conflict, it will be solved locally and result in another write. - */ - if (error.status === 409) { - return; - } - // other non-conflict errors must be handled - state.events.error.next(newRxError('RC_PULL', { - writeError: error - })); - }); - }); - } - }).then(() => { - if (useMetaWriteRows.length > 0) { - return state.input.metaInstance.bulkWrite(stripAttachmentsDataFromMetaWriteRows(state, useMetaWriteRows), 'replication-down-write-meta').then(metaWriteResult => { - metaWriteResult.error.forEach(writeError => { - state.events.error.next(newRxError('RC_PULL', { - id: writeError.documentId, - writeError - })); - }); - }); - } - }).then(() => { - /** - * For better performance we do not await checkpoint writes, - * but to ensure order on parallel checkpoint writes, - * we have to use a queue. - */ - setCheckpoint(state, 'down', useCheckpoint); - }); - }).catch(unhandledError => state.events.error.next(unhandledError)); - return persistenceQueue; - } -} -//# sourceMappingURL=downstream.js.map \ No newline at end of file diff --git a/dist/esm/replication-protocol/downstream.js.map b/dist/esm/replication-protocol/downstream.js.map deleted file mode 100644 index 8a3ad2dddea..00000000000 --- a/dist/esm/replication-protocol/downstream.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"downstream.js","names":["firstValueFrom","filter","mergeMap","newRxError","stackCheckpoints","appendToArray","createRevision","ensureNotFalsy","flatClone","getDefaultRevision","getHeightOfRevision","now","PROMISE_RESOLVE_VOID","getLastCheckpointDoc","setCheckpoint","stripAttachmentsDataFromMetaWriteRows","writeDocToDocState","getAssumedMasterState","getMetaWriteRow","startReplicationDownstream","state","input","initialCheckpoint","downstream","checkpointDoc","identifierHash","hashFunction","identifier","replicationHandler","timer","openTasks","addNewTask","task","stats","down","taskWithTime","time","push","streamQueue","then","useTasks","length","events","active","next","innerTaskWithTime","shift","lastTimeMasterChangesRequested","downstreamResyncOnce","downstreamProcessChanges","firstSyncDone","getValue","canceled","sub","masterChangeStream$","pipe","ev","up","s","subscribe","masterChangeStreamEmit","unsubscribe","checkpointQueue","lastCheckpoint","promises","downResult","masterChangesSince","pullBatchSize","documents","checkpoint","persistFromMaster","Promise","all","tasks","docsOfAllTasks","forEach","Error","persistenceQueue","nonPersistedFromMaster","docs","primaryPath","docData","docId","downDocsById","useCheckpoint","docIds","Object","keys","writeRowsToFork","writeRowsToForkById","writeRowsToMeta","useMetaWriteRows","forkInstance","findDocumentsById","currentForkStateList","assumedMasterState","currentForkState","Map","doc","set","map","forkStateFullDoc","get","forkStateDocData","hasAttachments","undefined","masterState","assumedMaster","metaDocument","isResolvedConflict","_rev","isAssumedMasterEqualToForkState","conflictHandler","realMasterState","newDocumentState","r","isEqual","_meta","areStatesExactlyEqual","newForkState","assign","_attachments","lwt","nextRevisionHeight","keepMeta","forkWriteRow","previous","document","bulkWrite","downstreamBulkWriteFlag","forkWriteResult","success","processed","error","status","writeError","metaInstance","metaWriteResult","id","documentId","catch","unhandledError"],"sources":["../../../src/replication-protocol/downstream.ts"],"sourcesContent":["import {\n firstValueFrom,\n filter,\n mergeMap\n} from 'rxjs';\nimport { newRxError } from '../rx-error.ts';\nimport { stackCheckpoints } from '../rx-storage-helper.ts';\nimport type {\n RxStorageInstanceReplicationState,\n BulkWriteRow,\n BulkWriteRowById,\n RxStorageReplicationMeta,\n RxDocumentData,\n ById,\n WithDeleted,\n DocumentsWithCheckpoint,\n WithDeletedAndAttachments\n} from '../types/index.d.ts';\nimport {\n appendToArray,\n createRevision,\n ensureNotFalsy,\n flatClone,\n getDefaultRevision,\n getHeightOfRevision,\n now,\n PROMISE_RESOLVE_VOID\n} from '../plugins/utils/index.ts';\nimport {\n getLastCheckpointDoc,\n setCheckpoint\n} from './checkpoint.ts';\nimport {\n stripAttachmentsDataFromMetaWriteRows,\n writeDocToDocState\n} from './helper.ts';\nimport {\n getAssumedMasterState,\n getMetaWriteRow\n} from './meta-instance.ts';\n\n/**\n * Writes all documents from the master to the fork.\n * The downstream has two operation modes\n * - Sync by iterating over the checkpoints via downstreamResyncOnce()\n * - Sync by listening to the changestream via downstreamProcessChanges()\n * We need this to be able to do initial syncs\n * and still can have fast event based sync when the client is not offline.\n */\nexport async function startReplicationDownstream(\n state: RxStorageInstanceReplicationState\n) {\n if (\n state.input.initialCheckpoint &&\n state.input.initialCheckpoint.downstream\n ) {\n const checkpointDoc = await getLastCheckpointDoc(state, 'down');\n if (!checkpointDoc) {\n await setCheckpoint(\n state,\n 'down',\n state.input.initialCheckpoint.downstream\n );\n }\n }\n\n const identifierHash = await state.input.hashFunction(state.input.identifier);\n const replicationHandler = state.input.replicationHandler;\n\n // used to detect which tasks etc can in it at which order.\n let timer = 0;\n\n\n type Task = DocumentsWithCheckpoint | 'RESYNC';\n type TaskWithTime = {\n time: number;\n task: Task;\n };\n const openTasks: TaskWithTime[] = [];\n\n\n function addNewTask(task: Task): void {\n state.stats.down.addNewTask = state.stats.down.addNewTask + 1;\n const taskWithTime = {\n time: timer++,\n task\n };\n openTasks.push(taskWithTime);\n state.streamQueue.down = state.streamQueue.down\n .then(() => {\n const useTasks: Task[] = [];\n while (openTasks.length > 0) {\n state.events.active.down.next(true);\n const innerTaskWithTime = ensureNotFalsy(openTasks.shift());\n\n /**\n * If the task came in before the last time we started the pull\n * from the master, then we can drop the task.\n */\n if (innerTaskWithTime.time < lastTimeMasterChangesRequested) {\n continue;\n }\n\n if (innerTaskWithTime.task === 'RESYNC') {\n if (useTasks.length === 0) {\n useTasks.push(innerTaskWithTime.task);\n break;\n } else {\n break;\n }\n }\n\n useTasks.push(innerTaskWithTime.task);\n }\n if (useTasks.length === 0) {\n return;\n }\n\n if (useTasks[0] === 'RESYNC') {\n return downstreamResyncOnce();\n } else {\n return downstreamProcessChanges(useTasks);\n }\n }).then(() => {\n state.events.active.down.next(false);\n if (\n !state.firstSyncDone.down.getValue() &&\n !state.events.canceled.getValue()\n ) {\n state.firstSyncDone.down.next(true);\n }\n });\n }\n addNewTask('RESYNC');\n\n /**\n * If a write on the master happens, we have to trigger the downstream.\n * Only do this if not canceled yet, otherwise firstValueFrom errors\n * when running on a completed observable.\n */\n if (!state.events.canceled.getValue()) {\n const sub = replicationHandler\n .masterChangeStream$\n .pipe(\n mergeMap(async (ev) => {\n /**\n * While a push is running, we have to delay all incoming\n * events from the server to not mix up the replication state.\n */\n await firstValueFrom(\n state.events.active.up.pipe(filter(s => !s))\n );\n return ev;\n })\n )\n .subscribe((task: Task) => {\n state.stats.down.masterChangeStreamEmit = state.stats.down.masterChangeStreamEmit + 1;\n addNewTask(task);\n });\n firstValueFrom(\n state.events.canceled.pipe(\n filter(canceled => !!canceled)\n )\n ).then(() => sub.unsubscribe());\n }\n\n\n /**\n * For faster performance, we directly start each write\n * and then await all writes at the end.\n */\n let lastTimeMasterChangesRequested: number = -1;\n async function downstreamResyncOnce() {\n state.stats.down.downstreamResyncOnce = state.stats.down.downstreamResyncOnce + 1;\n if (state.events.canceled.getValue()) {\n return;\n }\n\n state.checkpointQueue = state.checkpointQueue.then(() => getLastCheckpointDoc(state, 'down'));\n let lastCheckpoint: CheckpointType = await state.checkpointQueue;\n\n\n const promises: Promise[] = [];\n while (!state.events.canceled.getValue()) {\n lastTimeMasterChangesRequested = timer++;\n const downResult = await replicationHandler.masterChangesSince(\n lastCheckpoint,\n state.input.pullBatchSize\n );\n\n if (downResult.documents.length === 0) {\n break;\n }\n\n lastCheckpoint = stackCheckpoints([lastCheckpoint, downResult.checkpoint]);\n\n promises.push(\n persistFromMaster(\n downResult.documents,\n lastCheckpoint\n )\n );\n\n /**\n * By definition we stop pull when the pulled documents\n * do not fill up the pullBatchSize because we\n * can assume that the remote has no more documents.\n */\n if (downResult.documents.length < state.input.pullBatchSize) {\n break;\n }\n\n }\n await Promise.all(promises);\n }\n\n\n function downstreamProcessChanges(tasks: Task[]) {\n state.stats.down.downstreamProcessChanges = state.stats.down.downstreamProcessChanges + 1;\n const docsOfAllTasks: WithDeleted[] = [];\n let lastCheckpoint: CheckpointType | undefined = null as any;\n\n tasks.forEach(task => {\n if (task === 'RESYNC') {\n throw new Error('SNH');\n }\n appendToArray(docsOfAllTasks, task.documents);\n lastCheckpoint = stackCheckpoints([lastCheckpoint, task.checkpoint]);\n });\n return persistFromMaster(\n docsOfAllTasks,\n ensureNotFalsy(lastCheckpoint)\n );\n }\n\n\n /**\n * It can happen that the calls to masterChangesSince() or the changeStream()\n * are way faster then how fast the documents can be persisted.\n * Therefore we merge all incoming downResults into the nonPersistedFromMaster object\n * and process them together if possible.\n * This often bundles up single writes and improves performance\n * by processing the documents in bulks.\n */\n let persistenceQueue = PROMISE_RESOLVE_VOID;\n const nonPersistedFromMaster: {\n checkpoint?: CheckpointType;\n docs: ById>;\n } = {\n docs: {}\n };\n\n function persistFromMaster(\n docs: WithDeleted[],\n checkpoint: CheckpointType\n ): Promise {\n const primaryPath = state.primaryPath;\n state.stats.down.persistFromMaster = state.stats.down.persistFromMaster + 1;\n\n /**\n * Add the new docs to the non-persistent list\n */\n docs.forEach(docData => {\n const docId: string = (docData as any)[primaryPath];\n nonPersistedFromMaster.docs[docId] = docData;\n });\n nonPersistedFromMaster.checkpoint = checkpoint;\n\n /**\n * Run in the queue\n * with all open documents from nonPersistedFromMaster.\n */\n persistenceQueue = persistenceQueue.then(() => {\n\n const downDocsById: ById> = nonPersistedFromMaster.docs;\n nonPersistedFromMaster.docs = {};\n const useCheckpoint = nonPersistedFromMaster.checkpoint;\n const docIds = Object.keys(downDocsById);\n\n if (\n state.events.canceled.getValue() ||\n docIds.length === 0\n ) {\n return PROMISE_RESOLVE_VOID;\n }\n\n const writeRowsToFork: BulkWriteRow[] = [];\n const writeRowsToForkById: ById> = {};\n const writeRowsToMeta: BulkWriteRowById> = {};\n const useMetaWriteRows: BulkWriteRow>[] = [];\n\n return Promise.all([\n state.input.forkInstance.findDocumentsById(docIds, true),\n getAssumedMasterState(\n state,\n docIds\n )\n ]).then(([\n currentForkStateList,\n assumedMasterState\n ]) => {\n const currentForkState = new Map>();\n currentForkStateList.forEach(doc => currentForkState.set((doc as any)[primaryPath], doc));\n return Promise.all(\n docIds.map(async (docId) => {\n const forkStateFullDoc: RxDocumentData | undefined = currentForkState.get(docId);\n const forkStateDocData: WithDeletedAndAttachments | undefined = forkStateFullDoc\n ? writeDocToDocState(forkStateFullDoc, state.hasAttachments, false)\n : undefined\n ;\n const masterState = downDocsById[docId];\n const assumedMaster = assumedMasterState[docId];\n\n if (\n assumedMaster &&\n forkStateFullDoc &&\n assumedMaster.metaDocument.isResolvedConflict === forkStateFullDoc._rev\n ) {\n /**\n * The current fork state represents a resolved conflict\n * that first must be send to the master in the upstream.\n * All conflicts are resolved by the upstream.\n */\n // return PROMISE_RESOLVE_VOID;\n await state.streamQueue.up;\n }\n\n let isAssumedMasterEqualToForkState = !assumedMaster || !forkStateDocData ?\n false :\n await state.input.conflictHandler({\n realMasterState: assumedMaster.docData,\n newDocumentState: forkStateDocData\n }, 'downstream-check-if-equal-0').then(r => r.isEqual);\n if (\n !isAssumedMasterEqualToForkState &&\n (\n assumedMaster &&\n (assumedMaster.docData as any)._rev &&\n forkStateFullDoc &&\n forkStateFullDoc._meta[state.input.identifier] &&\n getHeightOfRevision(forkStateFullDoc._rev) === forkStateFullDoc._meta[state.input.identifier]\n )\n ) {\n isAssumedMasterEqualToForkState = true;\n }\n if (\n (\n forkStateFullDoc &&\n assumedMaster &&\n isAssumedMasterEqualToForkState === false\n ) ||\n (\n forkStateFullDoc && !assumedMaster\n )\n ) {\n /**\n * We have a non-upstream-replicated\n * local write to the fork.\n * This means we ignore the downstream of this document\n * because anyway the upstream will first resolve the conflict.\n */\n return PROMISE_RESOLVE_VOID;\n }\n\n const areStatesExactlyEqual = !forkStateDocData\n ? false\n : await state.input.conflictHandler(\n {\n realMasterState: masterState,\n newDocumentState: forkStateDocData\n },\n 'downstream-check-if-equal-1'\n ).then(r => r.isEqual);\n if (\n forkStateDocData &&\n areStatesExactlyEqual\n ) {\n /**\n * Document states are exactly equal.\n * This can happen when the replication is shut down\n * unexpected like when the user goes offline.\n *\n * Only when the assumedMaster is different from the forkState,\n * we have to patch the document in the meta instance.\n */\n if (\n !assumedMaster ||\n isAssumedMasterEqualToForkState === false\n ) {\n useMetaWriteRows.push(\n await getMetaWriteRow(\n state,\n forkStateDocData,\n assumedMaster ? assumedMaster.metaDocument : undefined\n )\n );\n }\n return PROMISE_RESOLVE_VOID;\n }\n\n /**\n * All other master states need to be written to the forkInstance\n * and metaInstance.\n */\n const newForkState = Object.assign(\n {},\n masterState,\n forkStateFullDoc ? {\n _meta: flatClone(forkStateFullDoc._meta),\n _attachments: state.hasAttachments && masterState._attachments ? masterState._attachments : {},\n _rev: getDefaultRevision()\n } : {\n _meta: {\n lwt: now()\n },\n _rev: getDefaultRevision(),\n _attachments: state.hasAttachments && masterState._attachments ? masterState._attachments : {}\n }\n );\n /**\n * If the remote works with revisions,\n * we store the height of the next fork-state revision\n * inside of the documents meta data.\n * By doing so we can filter it out in the upstream\n * and detect the document as being equal to master or not.\n * This is used for example in the CouchDB replication plugin.\n */\n if ((masterState as any)._rev) {\n const nextRevisionHeight = !forkStateFullDoc ? 1 : getHeightOfRevision(forkStateFullDoc._rev) + 1;\n newForkState._meta[state.input.identifier] = nextRevisionHeight;\n if (state.input.keepMeta) {\n newForkState._rev = (masterState as any)._rev;\n }\n }\n if (\n state.input.keepMeta &&\n (masterState as any)._meta\n ) {\n newForkState._meta = (masterState as any)._meta;\n }\n\n const forkWriteRow = {\n previous: forkStateFullDoc,\n document: newForkState\n };\n\n forkWriteRow.document._rev = forkWriteRow.document._rev ? forkWriteRow.document._rev : createRevision(\n identifierHash,\n forkWriteRow.previous\n );\n writeRowsToFork.push(forkWriteRow);\n writeRowsToForkById[docId] = forkWriteRow;\n writeRowsToMeta[docId] = await getMetaWriteRow(\n state,\n masterState,\n assumedMaster ? assumedMaster.metaDocument : undefined\n );\n })\n );\n }).then(async () => {\n if (writeRowsToFork.length > 0) {\n return state.input.forkInstance.bulkWrite(\n writeRowsToFork,\n await state.downstreamBulkWriteFlag\n ).then((forkWriteResult) => {\n forkWriteResult.success.forEach(doc => {\n const docId = (doc as any)[primaryPath];\n state.events.processed.down.next(writeRowsToForkById[docId]);\n useMetaWriteRows.push(writeRowsToMeta[docId]);\n });\n forkWriteResult.error.forEach(error => {\n /**\n * We do not have to care about downstream conflict errors here\n * because on conflict, it will be solved locally and result in another write.\n */\n if (error.status === 409) {\n return;\n }\n // other non-conflict errors must be handled\n state.events.error.next(newRxError('RC_PULL', {\n writeError: error\n }));\n });\n });\n }\n }).then(() => {\n if (useMetaWriteRows.length > 0) {\n return state.input.metaInstance.bulkWrite(\n stripAttachmentsDataFromMetaWriteRows(state, useMetaWriteRows),\n 'replication-down-write-meta'\n ).then(metaWriteResult => {\n metaWriteResult.error\n .forEach(writeError => {\n state.events.error.next(newRxError('RC_PULL', {\n id: writeError.documentId,\n writeError\n }));\n });\n });\n }\n }).then(() => {\n /**\n * For better performance we do not await checkpoint writes,\n * but to ensure order on parallel checkpoint writes,\n * we have to use a queue.\n */\n setCheckpoint(\n state,\n 'down',\n useCheckpoint\n );\n });\n }).catch(unhandledError => state.events.error.next(unhandledError));\n return persistenceQueue;\n }\n}\n"],"mappings":"AAAA,SACIA,cAAc,EACdC,MAAM,EACNC,QAAQ,QACL,MAAM;AACb,SAASC,UAAU,QAAQ,gBAAgB;AAC3C,SAASC,gBAAgB,QAAQ,yBAAyB;AAY1D,SACIC,aAAa,EACbC,cAAc,EACdC,cAAc,EACdC,SAAS,EACTC,kBAAkB,EAClBC,mBAAmB,EACnBC,GAAG,EACHC,oBAAoB,QACjB,2BAA2B;AAClC,SACIC,oBAAoB,EACpBC,aAAa,QACV,iBAAiB;AACxB,SACIC,qCAAqC,EACrCC,kBAAkB,QACf,aAAa;AACpB,SACIC,qBAAqB,EACrBC,eAAe,QACZ,oBAAoB;;AAE3B;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,eAAeC,0BAA0BA,CAC5CC,KAAmD,EACrD;EACE,IACIA,KAAK,CAACC,KAAK,CAACC,iBAAiB,IAC7BF,KAAK,CAACC,KAAK,CAACC,iBAAiB,CAACC,UAAU,EAC1C;IACE,IAAMC,aAAa,GAAG,MAAMX,oBAAoB,CAACO,KAAK,EAAE,MAAM,CAAC;IAC/D,IAAI,CAACI,aAAa,EAAE;MAChB,MAAMV,aAAa,CACfM,KAAK,EACL,MAAM,EACNA,KAAK,CAACC,KAAK,CAACC,iBAAiB,CAACC,UAClC,CAAC;IACL;EACJ;EAEA,IAAME,cAAc,GAAG,MAAML,KAAK,CAACC,KAAK,CAACK,YAAY,CAACN,KAAK,CAACC,KAAK,CAACM,UAAU,CAAC;EAC7E,IAAMC,kBAAkB,GAAGR,KAAK,CAACC,KAAK,CAACO,kBAAkB;;EAEzD;EACA,IAAIC,KAAK,GAAG,CAAC;EAQb,IAAMC,SAAyB,GAAG,EAAE;EAGpC,SAASC,UAAUA,CAACC,IAAU,EAAQ;IAClCZ,KAAK,CAACa,KAAK,CAACC,IAAI,CAACH,UAAU,GAAGX,KAAK,CAACa,KAAK,CAACC,IAAI,CAACH,UAAU,GAAG,CAAC;IAC7D,IAAMI,YAAY,GAAG;MACjBC,IAAI,EAAEP,KAAK,EAAE;MACbG;IACJ,CAAC;IACDF,SAAS,CAACO,IAAI,CAACF,YAAY,CAAC;IAC5Bf,KAAK,CAACkB,WAAW,CAACJ,IAAI,GAAGd,KAAK,CAACkB,WAAW,CAACJ,IAAI,CAC1CK,IAAI,CAAC,MAAM;MACR,IAAMC,QAAgB,GAAG,EAAE;MAC3B,OAAOV,SAAS,CAACW,MAAM,GAAG,CAAC,EAAE;QACzBrB,KAAK,CAACsB,MAAM,CAACC,MAAM,CAACT,IAAI,CAACU,IAAI,CAAC,IAAI,CAAC;QACnC,IAAMC,iBAAiB,GAAGtC,cAAc,CAACuB,SAAS,CAACgB,KAAK,CAAC,CAAC,CAAC;;QAE3D;AACpB;AACA;AACA;QACoB,IAAID,iBAAiB,CAACT,IAAI,GAAGW,8BAA8B,EAAE;UACzD;QACJ;QAEA,IAAIF,iBAAiB,CAACb,IAAI,KAAK,QAAQ,EAAE;UACrC,IAAIQ,QAAQ,CAACC,MAAM,KAAK,CAAC,EAAE;YACvBD,QAAQ,CAACH,IAAI,CAACQ,iBAAiB,CAACb,IAAI,CAAC;YACrC;UACJ,CAAC,MAAM;YACH;UACJ;QACJ;QAEAQ,QAAQ,CAACH,IAAI,CAACQ,iBAAiB,CAACb,IAAI,CAAC;MACzC;MACA,IAAIQ,QAAQ,CAACC,MAAM,KAAK,CAAC,EAAE;QACvB;MACJ;MAEA,IAAID,QAAQ,CAAC,CAAC,CAAC,KAAK,QAAQ,EAAE;QAC1B,OAAOQ,oBAAoB,CAAC,CAAC;MACjC,CAAC,MAAM;QACH,OAAOC,wBAAwB,CAACT,QAAQ,CAAC;MAC7C;IACJ,CAAC,CAAC,CAACD,IAAI,CAAC,MAAM;MACVnB,KAAK,CAACsB,MAAM,CAACC,MAAM,CAACT,IAAI,CAACU,IAAI,CAAC,KAAK,CAAC;MACpC,IACI,CAACxB,KAAK,CAAC8B,aAAa,CAAChB,IAAI,CAACiB,QAAQ,CAAC,CAAC,IACpC,CAAC/B,KAAK,CAACsB,MAAM,CAACU,QAAQ,CAACD,QAAQ,CAAC,CAAC,EACnC;QACE/B,KAAK,CAAC8B,aAAa,CAAChB,IAAI,CAACU,IAAI,CAAC,IAAI,CAAC;MACvC;IACJ,CAAC,CAAC;EACV;EACAb,UAAU,CAAC,QAAQ,CAAC;;EAEpB;AACJ;AACA;AACA;AACA;EACI,IAAI,CAACX,KAAK,CAACsB,MAAM,CAACU,QAAQ,CAACD,QAAQ,CAAC,CAAC,EAAE;IACnC,IAAME,GAAG,GAAGzB,kBAAkB,CACzB0B,mBAAmB,CACnBC,IAAI,CACDrD,QAAQ,CAAC,MAAOsD,EAAE,IAAK;MACnB;AACpB;AACA;AACA;MACoB,MAAMxD,cAAc,CAChBoB,KAAK,CAACsB,MAAM,CAACC,MAAM,CAACc,EAAE,CAACF,IAAI,CAACtD,MAAM,CAACyD,CAAC,IAAI,CAACA,CAAC,CAAC,CAC/C,CAAC;MACD,OAAOF,EAAE;IACb,CAAC,CACL,CAAC,CACAG,SAAS,CAAE3B,IAAU,IAAK;MACvBZ,KAAK,CAACa,KAAK,CAACC,IAAI,CAAC0B,sBAAsB,GAAGxC,KAAK,CAACa,KAAK,CAACC,IAAI,CAAC0B,sBAAsB,GAAG,CAAC;MACrF7B,UAAU,CAACC,IAAI,CAAC;IACpB,CAAC,CAAC;IACNhC,cAAc,CACVoB,KAAK,CAACsB,MAAM,CAACU,QAAQ,CAACG,IAAI,CACtBtD,MAAM,CAACmD,QAAQ,IAAI,CAAC,CAACA,QAAQ,CACjC,CACJ,CAAC,CAACb,IAAI,CAAC,MAAMc,GAAG,CAACQ,WAAW,CAAC,CAAC,CAAC;EACnC;;EAGA;AACJ;AACA;AACA;EACI,IAAId,8BAAsC,GAAG,CAAC,CAAC;EAC/C,eAAeC,oBAAoBA,CAAA,EAAG;IAClC5B,KAAK,CAACa,KAAK,CAACC,IAAI,CAACc,oBAAoB,GAAG5B,KAAK,CAACa,KAAK,CAACC,IAAI,CAACc,oBAAoB,GAAG,CAAC;IACjF,IAAI5B,KAAK,CAACsB,MAAM,CAACU,QAAQ,CAACD,QAAQ,CAAC,CAAC,EAAE;MAClC;IACJ;IAEA/B,KAAK,CAAC0C,eAAe,GAAG1C,KAAK,CAAC0C,eAAe,CAACvB,IAAI,CAAC,MAAM1B,oBAAoB,CAACO,KAAK,EAAE,MAAM,CAAC,CAAC;IAC7F,IAAI2C,cAA8B,GAAG,MAAM3C,KAAK,CAAC0C,eAAe;IAGhE,IAAME,QAAwB,GAAG,EAAE;IACnC,OAAO,CAAC5C,KAAK,CAACsB,MAAM,CAACU,QAAQ,CAACD,QAAQ,CAAC,CAAC,EAAE;MACtCJ,8BAA8B,GAAGlB,KAAK,EAAE;MACxC,IAAMoC,UAAU,GAAG,MAAMrC,kBAAkB,CAACsC,kBAAkB,CAC1DH,cAAc,EACd3C,KAAK,CAACC,KAAK,CAAC8C,aAChB,CAAC;MAED,IAAIF,UAAU,CAACG,SAAS,CAAC3B,MAAM,KAAK,CAAC,EAAE;QACnC;MACJ;MAEAsB,cAAc,GAAG3D,gBAAgB,CAAC,CAAC2D,cAAc,EAAEE,UAAU,CAACI,UAAU,CAAC,CAAC;MAE1EL,QAAQ,CAAC3B,IAAI,CACTiC,iBAAiB,CACbL,UAAU,CAACG,SAAS,EACpBL,cACJ,CACJ,CAAC;;MAED;AACZ;AACA;AACA;AACA;MACY,IAAIE,UAAU,CAACG,SAAS,CAAC3B,MAAM,GAAGrB,KAAK,CAACC,KAAK,CAAC8C,aAAa,EAAE;QACzD;MACJ;IAEJ;IACA,MAAMI,OAAO,CAACC,GAAG,CAACR,QAAQ,CAAC;EAC/B;EAGA,SAASf,wBAAwBA,CAACwB,KAAa,EAAE;IAC7CrD,KAAK,CAACa,KAAK,CAACC,IAAI,CAACe,wBAAwB,GAAG7B,KAAK,CAACa,KAAK,CAACC,IAAI,CAACe,wBAAwB,GAAG,CAAC;IACzF,IAAMyB,cAAwC,GAAG,EAAE;IACnD,IAAIX,cAA0C,GAAG,IAAW;IAE5DU,KAAK,CAACE,OAAO,CAAC3C,IAAI,IAAI;MAClB,IAAIA,IAAI,KAAK,QAAQ,EAAE;QACnB,MAAM,IAAI4C,KAAK,CAAC,KAAK,CAAC;MAC1B;MACAvE,aAAa,CAACqE,cAAc,EAAE1C,IAAI,CAACoC,SAAS,CAAC;MAC7CL,cAAc,GAAG3D,gBAAgB,CAAC,CAAC2D,cAAc,EAAE/B,IAAI,CAACqC,UAAU,CAAC,CAAC;IACxE,CAAC,CAAC;IACF,OAAOC,iBAAiB,CACpBI,cAAc,EACdnE,cAAc,CAACwD,cAAc,CACjC,CAAC;EACL;;EAGA;AACJ;AACA;AACA;AACA;AACA;AACA;AACA;EACI,IAAIc,gBAAgB,GAAGjE,oBAAoB;EAC3C,IAAMkE,sBAGL,GAAG;IACAC,IAAI,EAAE,CAAC;EACX,CAAC;EAED,SAAST,iBAAiBA,CACtBS,IAA8B,EAC9BV,UAA0B,EACb;IACb,IAAMW,WAAW,GAAG5D,KAAK,CAAC4D,WAAW;IACrC5D,KAAK,CAACa,KAAK,CAACC,IAAI,CAACoC,iBAAiB,GAAGlD,KAAK,CAACa,KAAK,CAACC,IAAI,CAACoC,iBAAiB,GAAG,CAAC;;IAE3E;AACR;AACA;IACQS,IAAI,CAACJ,OAAO,CAACM,OAAO,IAAI;MACpB,IAAMC,KAAa,GAAID,OAAO,CAASD,WAAW,CAAC;MACnDF,sBAAsB,CAACC,IAAI,CAACG,KAAK,CAAC,GAAGD,OAAO;IAChD,CAAC,CAAC;IACFH,sBAAsB,CAACT,UAAU,GAAGA,UAAU;;IAE9C;AACR;AACA;AACA;IACQQ,gBAAgB,GAAGA,gBAAgB,CAACtC,IAAI,CAAC,MAAM;MAE3C,IAAM4C,YAAwD,GAAGL,sBAAsB,CAACC,IAAI;MAC5FD,sBAAsB,CAACC,IAAI,GAAG,CAAC,CAAC;MAChC,IAAMK,aAAa,GAAGN,sBAAsB,CAACT,UAAU;MACvD,IAAMgB,MAAM,GAAGC,MAAM,CAACC,IAAI,CAACJ,YAAY,CAAC;MAExC,IACI/D,KAAK,CAACsB,MAAM,CAACU,QAAQ,CAACD,QAAQ,CAAC,CAAC,IAChCkC,MAAM,CAAC5C,MAAM,KAAK,CAAC,EACrB;QACE,OAAO7B,oBAAoB;MAC/B;MAEA,IAAM4E,eAA0C,GAAG,EAAE;MACrD,IAAMC,mBAAkD,GAAG,CAAC,CAAC;MAC7D,IAAMC,eAAsF,GAAG,CAAC,CAAC;MACjG,IAAMC,gBAAqF,GAAG,EAAE;MAEhG,OAAOpB,OAAO,CAACC,GAAG,CAAC,CACfpD,KAAK,CAACC,KAAK,CAACuE,YAAY,CAACC,iBAAiB,CAACR,MAAM,EAAE,IAAI,CAAC,EACxDpE,qBAAqB,CACjBG,KAAK,EACLiE,MACJ,CAAC,CACJ,CAAC,CAAC9C,IAAI,CAAC,CAAC,CACLuD,oBAAoB,EACpBC,kBAAkB,CACrB,KAAK;QACF,IAAMC,gBAAgB,GAAG,IAAIC,GAAG,CAAoC,CAAC;QACrEH,oBAAoB,CAACnB,OAAO,CAACuB,GAAG,IAAIF,gBAAgB,CAACG,GAAG,CAAED,GAAG,CAASlB,WAAW,CAAC,EAAEkB,GAAG,CAAC,CAAC;QACzF,OAAO3B,OAAO,CAACC,GAAG,CACda,MAAM,CAACe,GAAG,CAAC,MAAOlB,KAAK,IAAK;UACxB,IAAMmB,gBAAuD,GAAGL,gBAAgB,CAACM,GAAG,CAACpB,KAAK,CAAC;UAC3F,IAAMqB,gBAAkE,GAAGF,gBAAgB,GACrFrF,kBAAkB,CAACqF,gBAAgB,EAAEjF,KAAK,CAACoF,cAAc,EAAE,KAAK,CAAC,GACjEC,SAAS;UAEf,IAAMC,WAAW,GAAGvB,YAAY,CAACD,KAAK,CAAC;UACvC,IAAMyB,aAAa,GAAGZ,kBAAkB,CAACb,KAAK,CAAC;UAE/C,IACIyB,aAAa,IACbN,gBAAgB,IAChBM,aAAa,CAACC,YAAY,CAACC,kBAAkB,KAAKR,gBAAgB,CAACS,IAAI,EACzE;YACE;AAC5B;AACA;AACA;AACA;YAC4B;YACA,MAAM1F,KAAK,CAACkB,WAAW,CAACmB,EAAE;UAC9B;UAEA,IAAIsD,+BAA+B,GAAG,CAACJ,aAAa,IAAI,CAACJ,gBAAgB,GACrE,KAAK,GACL,MAAMnF,KAAK,CAACC,KAAK,CAAC2F,eAAe,CAAC;YAC9BC,eAAe,EAAEN,aAAa,CAAC1B,OAAO;YACtCiC,gBAAgB,EAAEX;UACtB,CAAC,EAAE,6BAA6B,CAAC,CAAChE,IAAI,CAAC4E,CAAC,IAAIA,CAAC,CAACC,OAAO,CAAC;UAC1D,IACI,CAACL,+BAA+B,IAE5BJ,aAAa,IACZA,aAAa,CAAC1B,OAAO,CAAS6B,IAAI,IACnCT,gBAAgB,IAChBA,gBAAgB,CAACgB,KAAK,CAACjG,KAAK,CAACC,KAAK,CAACM,UAAU,CAAC,IAC9CjB,mBAAmB,CAAC2F,gBAAgB,CAACS,IAAI,CAAC,KAAKT,gBAAgB,CAACgB,KAAK,CAACjG,KAAK,CAACC,KAAK,CAACM,UAAU,CAC/F,EACH;YACEoF,+BAA+B,GAAG,IAAI;UAC1C;UACA,IAEQV,gBAAgB,IAChBM,aAAa,IACbI,+BAA+B,KAAK,KAAK,IAGzCV,gBAAgB,IAAI,CAACM,aACxB,EACH;YACE;AAC5B;AACA;AACA;AACA;AACA;YAC4B,OAAO/F,oBAAoB;UAC/B;UAEA,IAAM0G,qBAAqB,GAAG,CAACf,gBAAgB,GACzC,KAAK,GACL,MAAMnF,KAAK,CAACC,KAAK,CAAC2F,eAAe,CAC/B;YACIC,eAAe,EAAEP,WAAW;YAC5BQ,gBAAgB,EAAEX;UACtB,CAAC,EACD,6BACJ,CAAC,CAAChE,IAAI,CAAC4E,CAAC,IAAIA,CAAC,CAACC,OAAO,CAAC;UAC1B,IACIb,gBAAgB,IAChBe,qBAAqB,EACvB;YACE;AAC5B;AACA;AACA;AACA;AACA;AACA;AACA;YAC4B,IACI,CAACX,aAAa,IACdI,+BAA+B,KAAK,KAAK,EAC3C;cACEpB,gBAAgB,CAACtD,IAAI,CACjB,MAAMnB,eAAe,CACjBE,KAAK,EACLmF,gBAAgB,EAChBI,aAAa,GAAGA,aAAa,CAACC,YAAY,GAAGH,SACjD,CACJ,CAAC;YACL;YACA,OAAO7F,oBAAoB;UAC/B;;UAEA;AACxB;AACA;AACA;UACwB,IAAM2G,YAAY,GAAGjC,MAAM,CAACkC,MAAM,CAC9B,CAAC,CAAC,EACFd,WAAW,EACXL,gBAAgB,GAAG;YACfgB,KAAK,EAAE7G,SAAS,CAAC6F,gBAAgB,CAACgB,KAAK,CAAC;YACxCI,YAAY,EAAErG,KAAK,CAACoF,cAAc,IAAIE,WAAW,CAACe,YAAY,GAAGf,WAAW,CAACe,YAAY,GAAG,CAAC,CAAC;YAC9FX,IAAI,EAAErG,kBAAkB,CAAC;UAC7B,CAAC,GAAG;YACA4G,KAAK,EAAE;cACHK,GAAG,EAAE/G,GAAG,CAAC;YACb,CAAC;YACDmG,IAAI,EAAErG,kBAAkB,CAAC,CAAC;YAC1BgH,YAAY,EAAErG,KAAK,CAACoF,cAAc,IAAIE,WAAW,CAACe,YAAY,GAAGf,WAAW,CAACe,YAAY,GAAG,CAAC;UACjG,CACJ,CAAC;UACD;AACxB;AACA;AACA;AACA;AACA;AACA;AACA;UACwB,IAAKf,WAAW,CAASI,IAAI,EAAE;YAC3B,IAAMa,kBAAkB,GAAG,CAACtB,gBAAgB,GAAG,CAAC,GAAG3F,mBAAmB,CAAC2F,gBAAgB,CAACS,IAAI,CAAC,GAAG,CAAC;YACjGS,YAAY,CAACF,KAAK,CAACjG,KAAK,CAACC,KAAK,CAACM,UAAU,CAAC,GAAGgG,kBAAkB;YAC/D,IAAIvG,KAAK,CAACC,KAAK,CAACuG,QAAQ,EAAE;cACtBL,YAAY,CAACT,IAAI,GAAIJ,WAAW,CAASI,IAAI;YACjD;UACJ;UACA,IACI1F,KAAK,CAACC,KAAK,CAACuG,QAAQ,IACnBlB,WAAW,CAASW,KAAK,EAC5B;YACEE,YAAY,CAACF,KAAK,GAAIX,WAAW,CAASW,KAAK;UACnD;UAEA,IAAMQ,YAAY,GAAG;YACjBC,QAAQ,EAAEzB,gBAAgB;YAC1B0B,QAAQ,EAAER;UACd,CAAC;UAEDM,YAAY,CAACE,QAAQ,CAACjB,IAAI,GAAGe,YAAY,CAACE,QAAQ,CAACjB,IAAI,GAAGe,YAAY,CAACE,QAAQ,CAACjB,IAAI,GAAGxG,cAAc,CACjGmB,cAAc,EACdoG,YAAY,CAACC,QACjB,CAAC;UACDtC,eAAe,CAACnD,IAAI,CAACwF,YAAY,CAAC;UAClCpC,mBAAmB,CAACP,KAAK,CAAC,GAAG2C,YAAY;UACzCnC,eAAe,CAACR,KAAK,CAAC,GAAG,MAAMhE,eAAe,CAC1CE,KAAK,EACLsF,WAAW,EACXC,aAAa,GAAGA,aAAa,CAACC,YAAY,GAAGH,SACjD,CAAC;QACL,CAAC,CACL,CAAC;MACL,CAAC,CAAC,CAAClE,IAAI,CAAC,YAAY;QAChB,IAAIiD,eAAe,CAAC/C,MAAM,GAAG,CAAC,EAAE;UAC5B,OAAOrB,KAAK,CAACC,KAAK,CAACuE,YAAY,CAACoC,SAAS,CACrCxC,eAAe,EACf,MAAMpE,KAAK,CAAC6G,uBAChB,CAAC,CAAC1F,IAAI,CAAE2F,eAAe,IAAK;YACxBA,eAAe,CAACC,OAAO,CAACxD,OAAO,CAACuB,GAAG,IAAI;cACnC,IAAMhB,KAAK,GAAIgB,GAAG,CAASlB,WAAW,CAAC;cACvC5D,KAAK,CAACsB,MAAM,CAAC0F,SAAS,CAAClG,IAAI,CAACU,IAAI,CAAC6C,mBAAmB,CAACP,KAAK,CAAC,CAAC;cAC5DS,gBAAgB,CAACtD,IAAI,CAACqD,eAAe,CAACR,KAAK,CAAC,CAAC;YACjD,CAAC,CAAC;YACFgD,eAAe,CAACG,KAAK,CAAC1D,OAAO,CAAC0D,KAAK,IAAI;cACnC;AAC5B;AACA;AACA;cAC4B,IAAIA,KAAK,CAACC,MAAM,KAAK,GAAG,EAAE;gBACtB;cACJ;cACA;cACAlH,KAAK,CAACsB,MAAM,CAAC2F,KAAK,CAACzF,IAAI,CAACzC,UAAU,CAAC,SAAS,EAAE;gBAC1CoI,UAAU,EAAEF;cAChB,CAAC,CAAC,CAAC;YACP,CAAC,CAAC;UACN,CAAC,CAAC;QACN;MACJ,CAAC,CAAC,CAAC9F,IAAI,CAAC,MAAM;QACV,IAAIoD,gBAAgB,CAAClD,MAAM,GAAG,CAAC,EAAE;UAC7B,OAAOrB,KAAK,CAACC,KAAK,CAACmH,YAAY,CAACR,SAAS,CACrCjH,qCAAqC,CAACK,KAAK,EAAEuE,gBAAgB,CAAC,EAC9D,6BACJ,CAAC,CAACpD,IAAI,CAACkG,eAAe,IAAI;YACtBA,eAAe,CAACJ,KAAK,CAChB1D,OAAO,CAAC4D,UAAU,IAAI;cACnBnH,KAAK,CAACsB,MAAM,CAAC2F,KAAK,CAACzF,IAAI,CAACzC,UAAU,CAAC,SAAS,EAAE;gBAC1CuI,EAAE,EAAEH,UAAU,CAACI,UAAU;gBACzBJ;cACJ,CAAC,CAAC,CAAC;YACP,CAAC,CAAC;UACV,CAAC,CAAC;QACN;MACJ,CAAC,CAAC,CAAChG,IAAI,CAAC,MAAM;QACV;AAChB;AACA;AACA;AACA;QACgBzB,aAAa,CACTM,KAAK,EACL,MAAM,EACNgE,aACJ,CAAC;MACL,CAAC,CAAC;IACN,CAAC,CAAC,CAACwD,KAAK,CAACC,cAAc,IAAIzH,KAAK,CAACsB,MAAM,CAAC2F,KAAK,CAACzF,IAAI,CAACiG,cAAc,CAAC,CAAC;IACnE,OAAOhE,gBAAgB;EAC3B;AACJ","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/replication-protocol/helper.js b/dist/esm/replication-protocol/helper.js deleted file mode 100644 index a920daa4b15..00000000000 --- a/dist/esm/replication-protocol/helper.js +++ /dev/null @@ -1,49 +0,0 @@ -import { clone, createRevision, flatClone, getDefaultRevision, now } from "../plugins/utils/index.js"; -import { stripAttachmentsDataFromDocument } from "../rx-storage-helper.js"; -export function docStateToWriteDoc(databaseInstanceToken, hasAttachments, keepMeta, docState, previous) { - var docData = Object.assign({}, docState, { - _attachments: hasAttachments && docState._attachments ? docState._attachments : {}, - _meta: keepMeta ? docState._meta : Object.assign({}, previous ? previous._meta : {}, { - lwt: now() - }), - _rev: keepMeta ? docState._rev : getDefaultRevision() - }); - if (!docData._rev) { - docData._rev = createRevision(databaseInstanceToken, previous); - } - return docData; -} -export function writeDocToDocState(writeDoc, keepAttachments, keepMeta) { - var ret = flatClone(writeDoc); - if (!keepAttachments) { - delete ret._attachments; - } - if (!keepMeta) { - delete ret._meta; - delete ret._rev; - } - return ret; -} -export function stripAttachmentsDataFromMetaWriteRows(state, rows) { - if (!state.hasAttachments) { - return rows; - } - return rows.map(row => { - var document = clone(row.document); - document.docData = stripAttachmentsDataFromDocument(document.docData); - return { - document, - previous: row.previous - }; - }); -} -export function getUnderlyingPersistentStorage(instance) { - while (true) { - if (instance.underlyingPersistentStorage) { - instance = instance.underlyingPersistentStorage; - } else { - return instance; - } - } -} -//# sourceMappingURL=helper.js.map \ No newline at end of file diff --git a/dist/esm/replication-protocol/helper.js.map b/dist/esm/replication-protocol/helper.js.map deleted file mode 100644 index e50f2e7cf2d..00000000000 --- a/dist/esm/replication-protocol/helper.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"helper.js","names":["clone","createRevision","flatClone","getDefaultRevision","now","stripAttachmentsDataFromDocument","docStateToWriteDoc","databaseInstanceToken","hasAttachments","keepMeta","docState","previous","docData","Object","assign","_attachments","_meta","lwt","_rev","writeDocToDocState","writeDoc","keepAttachments","ret","stripAttachmentsDataFromMetaWriteRows","state","rows","map","row","document","getUnderlyingPersistentStorage","instance","underlyingPersistentStorage"],"sources":["../../../src/replication-protocol/helper.ts"],"sourcesContent":["import type {\n BulkWriteRow,\n RxDocumentData,\n RxDocumentWriteData,\n RxStorageInstance,\n RxStorageInstanceReplicationState,\n RxStorageReplicationMeta,\n WithDeletedAndAttachments\n} from '../types/index.d.ts';\nimport {\n clone,\n createRevision,\n flatClone,\n getDefaultRevision,\n now\n} from '../plugins/utils/index.ts';\nimport { stripAttachmentsDataFromDocument } from '../rx-storage-helper.ts';\n\nexport function docStateToWriteDoc(\n databaseInstanceToken: string,\n hasAttachments: boolean,\n keepMeta: boolean,\n docState: WithDeletedAndAttachments,\n previous?: RxDocumentData\n): RxDocumentWriteData {\n const docData: RxDocumentWriteData = Object.assign(\n {},\n docState,\n {\n _attachments: hasAttachments && docState._attachments ? docState._attachments : {},\n _meta: keepMeta ? (docState as any)._meta : Object.assign(\n {},\n previous ? previous._meta : {},\n {\n lwt: now()\n }\n ),\n _rev: keepMeta ? (docState as any)._rev : getDefaultRevision()\n }\n );\n if (!docData._rev) {\n docData._rev = createRevision(\n databaseInstanceToken,\n previous\n );\n }\n\n return docData;\n}\n\nexport function writeDocToDocState(\n writeDoc: RxDocumentData,\n keepAttachments: boolean,\n keepMeta: boolean\n): WithDeletedAndAttachments {\n const ret = flatClone(writeDoc);\n\n if (!keepAttachments) {\n delete (ret as any)._attachments;\n }\n if (!keepMeta) {\n delete (ret as any)._meta;\n delete (ret as any)._rev;\n }\n return ret;\n}\n\n\nexport function stripAttachmentsDataFromMetaWriteRows(\n state: RxStorageInstanceReplicationState,\n rows: BulkWriteRow>[]\n): BulkWriteRow>[] {\n if (!state.hasAttachments) {\n return rows;\n }\n return rows.map(row => {\n const document = clone(row.document);\n document.docData = stripAttachmentsDataFromDocument(document.docData);\n return {\n document,\n previous: row.previous\n };\n });\n}\n\nexport function getUnderlyingPersistentStorage(\n instance: RxStorageInstance\n): RxStorageInstance {\n while (true) {\n if (instance.underlyingPersistentStorage) {\n instance = instance.underlyingPersistentStorage;\n } else {\n return instance;\n }\n }\n}\n"],"mappings":"AASA,SACIA,KAAK,EACLC,cAAc,EACdC,SAAS,EACTC,kBAAkB,EAClBC,GAAG,QACA,2BAA2B;AAClC,SAASC,gCAAgC,QAAQ,yBAAyB;AAE1E,OAAO,SAASC,kBAAkBA,CAC9BC,qBAA6B,EAC7BC,cAAuB,EACvBC,QAAiB,EACjBC,QAA8C,EAC9CC,QAAoC,EACN;EAC9B,IAAMC,OAAuC,GAAGC,MAAM,CAACC,MAAM,CACzD,CAAC,CAAC,EACFJ,QAAQ,EACR;IACIK,YAAY,EAAEP,cAAc,IAAIE,QAAQ,CAACK,YAAY,GAAGL,QAAQ,CAACK,YAAY,GAAG,CAAC,CAAC;IAClFC,KAAK,EAAEP,QAAQ,GAAIC,QAAQ,CAASM,KAAK,GAAGH,MAAM,CAACC,MAAM,CACrD,CAAC,CAAC,EACFH,QAAQ,GAAGA,QAAQ,CAACK,KAAK,GAAG,CAAC,CAAC,EAC9B;MACIC,GAAG,EAAEb,GAAG,CAAC;IACb,CACJ,CAAC;IACDc,IAAI,EAAET,QAAQ,GAAIC,QAAQ,CAASQ,IAAI,GAAGf,kBAAkB,CAAC;EACjE,CACJ,CAAC;EACD,IAAI,CAACS,OAAO,CAACM,IAAI,EAAE;IACfN,OAAO,CAACM,IAAI,GAAGjB,cAAc,CACzBM,qBAAqB,EACrBI,QACJ,CAAC;EACL;EAEA,OAAOC,OAAO;AAClB;AAEA,OAAO,SAASO,kBAAkBA,CAC9BC,QAAmC,EACnCC,eAAwB,EACxBZ,QAAiB,EACmB;EACpC,IAAMa,GAAG,GAAGpB,SAAS,CAACkB,QAAQ,CAAC;EAE/B,IAAI,CAACC,eAAe,EAAE;IAClB,OAAQC,GAAG,CAASP,YAAY;EACpC;EACA,IAAI,CAACN,QAAQ,EAAE;IACX,OAAQa,GAAG,CAASN,KAAK;IACzB,OAAQM,GAAG,CAASJ,IAAI;EAC5B;EACA,OAAOI,GAAG;AACd;AAGA,OAAO,SAASC,qCAAqCA,CACjDC,KAA6C,EAC7CC,IAA8D,EACN;EACxD,IAAI,CAACD,KAAK,CAAChB,cAAc,EAAE;IACvB,OAAOiB,IAAI;EACf;EACA,OAAOA,IAAI,CAACC,GAAG,CAACC,GAAG,IAAI;IACnB,IAAMC,QAAQ,GAAG5B,KAAK,CAAC2B,GAAG,CAACC,QAAQ,CAAC;IACpCA,QAAQ,CAAChB,OAAO,GAAGP,gCAAgC,CAACuB,QAAQ,CAAChB,OAAO,CAAC;IACrE,OAAO;MACHgB,QAAQ;MACRjB,QAAQ,EAAEgB,GAAG,CAAChB;IAClB,CAAC;EACL,CAAC,CAAC;AACN;AAEA,OAAO,SAASkB,8BAA8BA,CAC1CC,QAAqD,EACV;EAC3C,OAAO,IAAI,EAAE;IACT,IAAIA,QAAQ,CAACC,2BAA2B,EAAE;MACtCD,QAAQ,GAAGA,QAAQ,CAACC,2BAA2B;IACnD,CAAC,MAAM;MACH,OAAOD,QAAQ;IACnB;EACJ;AACJ","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/replication-protocol/index.js b/dist/esm/replication-protocol/index.js deleted file mode 100644 index 71af456b785..00000000000 --- a/dist/esm/replication-protocol/index.js +++ /dev/null @@ -1,209 +0,0 @@ -/** - * These files contain the replication protocol. - * It can be used to replicated RxStorageInstances or RxCollections - * or even to do a client(s)-server replication. - */ - -import { BehaviorSubject, combineLatest, filter, firstValueFrom, mergeMap, Subject } from 'rxjs'; -import { getPrimaryFieldOfPrimaryKey } from "../rx-schema-helper.js"; -import { clone, ensureNotFalsy, flatClone, PROMISE_RESOLVE_VOID } from "../plugins/utils/index.js"; -import { getCheckpointKey } from "./checkpoint.js"; -import { startReplicationDownstream } from "./downstream.js"; -import { docStateToWriteDoc, getUnderlyingPersistentStorage, writeDocToDocState } from "./helper.js"; -import { startReplicationUpstream } from "./upstream.js"; -import { fillWriteDataForAttachmentsChange } from "../plugins/attachments/index.js"; -import { getChangedDocumentsSince } from "../rx-storage-helper.js"; -export * from "./checkpoint.js"; -export * from "./downstream.js"; -export * from "./upstream.js"; -export * from "./meta-instance.js"; -export * from "./conflicts.js"; -export * from "./helper.js"; -export function replicateRxStorageInstance(input) { - input = flatClone(input); - input.forkInstance = getUnderlyingPersistentStorage(input.forkInstance); - input.metaInstance = getUnderlyingPersistentStorage(input.metaInstance); - var checkpointKeyPromise = getCheckpointKey(input); - var state = { - primaryPath: getPrimaryFieldOfPrimaryKey(input.forkInstance.schema.primaryKey), - hasAttachments: !!input.forkInstance.schema.attachments, - input, - checkpointKey: checkpointKeyPromise, - downstreamBulkWriteFlag: checkpointKeyPromise.then(checkpointKey => 'replication-downstream-' + checkpointKey), - events: { - canceled: new BehaviorSubject(false), - active: { - down: new BehaviorSubject(true), - up: new BehaviorSubject(true) - }, - processed: { - down: new Subject(), - up: new Subject() - }, - resolvedConflicts: new Subject(), - error: new Subject() - }, - stats: { - down: { - addNewTask: 0, - downstreamProcessChanges: 0, - downstreamResyncOnce: 0, - masterChangeStreamEmit: 0, - persistFromMaster: 0 - }, - up: { - forkChangeStreamEmit: 0, - persistToMaster: 0, - persistToMasterConflictWrites: 0, - persistToMasterHadConflicts: 0, - processTasks: 0, - upstreamInitialSync: 0 - } - }, - firstSyncDone: { - down: new BehaviorSubject(false), - up: new BehaviorSubject(false) - }, - streamQueue: { - down: PROMISE_RESOLVE_VOID, - up: PROMISE_RESOLVE_VOID - }, - checkpointQueue: PROMISE_RESOLVE_VOID, - lastCheckpointDoc: {} - }; - startReplicationDownstream(state); - startReplicationUpstream(state); - return state; -} -export function awaitRxStorageReplicationFirstInSync(state) { - return firstValueFrom(combineLatest([state.firstSyncDone.down.pipe(filter(v => !!v)), state.firstSyncDone.up.pipe(filter(v => !!v))])).then(() => {}); -} -export function awaitRxStorageReplicationInSync(replicationState) { - return Promise.all([replicationState.streamQueue.up, replicationState.streamQueue.down, replicationState.checkpointQueue]); -} -export async function awaitRxStorageReplicationIdle(state) { - await awaitRxStorageReplicationFirstInSync(state); - while (true) { - var { - down, - up - } = state.streamQueue; - await Promise.all([up, down]); - /** - * If the Promises have not been reassigned - * after awaiting them, we know that the replication - * is in idle state at this point in time. - */ - if (down === state.streamQueue.down && up === state.streamQueue.up) { - return; - } - } -} -export function rxStorageInstanceToReplicationHandler(instance, conflictHandler, databaseInstanceToken, -/** - * If set to true, - * the _meta.lwt from the pushed documents is kept. - * (Used in the migration to ensure checkpoints are still valid) - */ -keepMeta = false) { - instance = getUnderlyingPersistentStorage(instance); - var hasAttachments = !!instance.schema.attachments; - var primaryPath = getPrimaryFieldOfPrimaryKey(instance.schema.primaryKey); - var replicationHandler = { - masterChangeStream$: instance.changeStream().pipe(mergeMap(async eventBulk => { - var ret = { - checkpoint: eventBulk.checkpoint, - documents: await Promise.all(eventBulk.events.map(async event => { - var docData = writeDocToDocState(event.documentData, hasAttachments, keepMeta); - if (hasAttachments) { - docData = await fillWriteDataForAttachmentsChange(primaryPath, instance, clone(docData), - /** - * Notice that the master never knows - * the client state of the document. - * Therefore we always send all attachments data. - */ - undefined); - } - return docData; - })) - }; - return ret; - })), - masterChangesSince(checkpoint, batchSize) { - return getChangedDocumentsSince(instance, batchSize, checkpoint).then(async result => { - return { - checkpoint: result.documents.length > 0 ? result.checkpoint : checkpoint, - documents: await Promise.all(result.documents.map(async plainDocumentData => { - var docData = writeDocToDocState(plainDocumentData, hasAttachments, keepMeta); - if (hasAttachments) { - docData = await fillWriteDataForAttachmentsChange(primaryPath, instance, clone(docData), - /** - * Notice the the master never knows - * the client state of the document. - * Therefore we always send all attachments data. - */ - undefined); - } - return docData; - })) - }; - }); - }, - async masterWrite(rows) { - var rowById = {}; - rows.forEach(row => { - var docId = row.newDocumentState[primaryPath]; - rowById[docId] = row; - }); - var ids = Object.keys(rowById); - var masterDocsStateList = await instance.findDocumentsById(ids, true); - var masterDocsState = new Map(); - masterDocsStateList.forEach(doc => masterDocsState.set(doc[primaryPath], doc)); - var conflicts = []; - var writeRows = []; - await Promise.all(Object.entries(rowById).map(async ([id, row]) => { - var masterState = masterDocsState.get(id); - if (!masterState) { - writeRows.push({ - document: docStateToWriteDoc(databaseInstanceToken, hasAttachments, keepMeta, row.newDocumentState) - }); - } else if (masterState && !row.assumedMasterState) { - conflicts.push(writeDocToDocState(masterState, hasAttachments, keepMeta)); - } else if ((await conflictHandler({ - realMasterState: writeDocToDocState(masterState, hasAttachments, keepMeta), - newDocumentState: ensureNotFalsy(row.assumedMasterState) - }, 'rxStorageInstanceToReplicationHandler-masterWrite')).isEqual === true) { - writeRows.push({ - previous: masterState, - document: docStateToWriteDoc(databaseInstanceToken, hasAttachments, keepMeta, row.newDocumentState, masterState) - }); - } else { - conflicts.push(writeDocToDocState(masterState, hasAttachments, keepMeta)); - } - })); - if (writeRows.length > 0) { - var result = await instance.bulkWrite(writeRows, 'replication-master-write'); - result.error.forEach(err => { - if (err.status !== 409) { - throw new Error('non conflict error'); - } else { - conflicts.push(writeDocToDocState(ensureNotFalsy(err.documentInDb), hasAttachments, keepMeta)); - } - }); - } - return conflicts; - } - }; - return replicationHandler; -} -export async function cancelRxStorageReplication(replicationState) { - replicationState.events.canceled.next(true); - replicationState.events.active.up.complete(); - replicationState.events.active.down.complete(); - replicationState.events.processed.up.complete(); - replicationState.events.processed.down.complete(); - replicationState.events.resolvedConflicts.complete(); - replicationState.events.canceled.complete(); - await replicationState.checkpointQueue; -} -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/dist/esm/replication-protocol/index.js.map b/dist/esm/replication-protocol/index.js.map deleted file mode 100644 index 2bc6951ef7e..00000000000 --- a/dist/esm/replication-protocol/index.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.js","names":["BehaviorSubject","combineLatest","filter","firstValueFrom","mergeMap","Subject","getPrimaryFieldOfPrimaryKey","clone","ensureNotFalsy","flatClone","PROMISE_RESOLVE_VOID","getCheckpointKey","startReplicationDownstream","docStateToWriteDoc","getUnderlyingPersistentStorage","writeDocToDocState","startReplicationUpstream","fillWriteDataForAttachmentsChange","getChangedDocumentsSince","replicateRxStorageInstance","input","forkInstance","metaInstance","checkpointKeyPromise","state","primaryPath","schema","primaryKey","hasAttachments","attachments","checkpointKey","downstreamBulkWriteFlag","then","events","canceled","active","down","up","processed","resolvedConflicts","error","stats","addNewTask","downstreamProcessChanges","downstreamResyncOnce","masterChangeStreamEmit","persistFromMaster","forkChangeStreamEmit","persistToMaster","persistToMasterConflictWrites","persistToMasterHadConflicts","processTasks","upstreamInitialSync","firstSyncDone","streamQueue","checkpointQueue","lastCheckpointDoc","awaitRxStorageReplicationFirstInSync","pipe","v","awaitRxStorageReplicationInSync","replicationState","Promise","all","awaitRxStorageReplicationIdle","rxStorageInstanceToReplicationHandler","instance","conflictHandler","databaseInstanceToken","keepMeta","replicationHandler","masterChangeStream$","changeStream","eventBulk","ret","checkpoint","documents","map","event","docData","documentData","undefined","masterChangesSince","batchSize","result","length","plainDocumentData","masterWrite","rows","rowById","forEach","row","docId","newDocumentState","ids","Object","keys","masterDocsStateList","findDocumentsById","masterDocsState","Map","doc","set","conflicts","writeRows","entries","id","masterState","get","push","document","assumedMasterState","realMasterState","isEqual","previous","bulkWrite","err","status","Error","documentInDb","cancelRxStorageReplication","next","complete"],"sources":["../../../src/replication-protocol/index.ts"],"sourcesContent":["/**\n * These files contain the replication protocol.\n * It can be used to replicated RxStorageInstances or RxCollections\n * or even to do a client(s)-server replication.\n */\n\n\nimport {\n BehaviorSubject,\n combineLatest,\n filter,\n firstValueFrom,\n mergeMap,\n Subject\n} from 'rxjs';\nimport {\n getPrimaryFieldOfPrimaryKey\n} from '../rx-schema-helper.ts';\nimport type {\n BulkWriteRow,\n ById,\n DocumentsWithCheckpoint,\n RxConflictHandler,\n RxDocumentData,\n RxReplicationHandler,\n RxReplicationWriteToMasterRow,\n RxStorageInstance,\n RxStorageInstanceReplicationInput,\n RxStorageInstanceReplicationState,\n WithDeleted\n} from '../types/index.d.ts';\nimport {\n clone,\n ensureNotFalsy,\n flatClone,\n PROMISE_RESOLVE_VOID\n} from '../plugins/utils/index.ts';\nimport {\n getCheckpointKey\n} from './checkpoint.ts';\nimport { startReplicationDownstream } from './downstream.ts';\nimport { docStateToWriteDoc, getUnderlyingPersistentStorage, writeDocToDocState } from './helper.ts';\nimport { startReplicationUpstream } from './upstream.ts';\nimport { fillWriteDataForAttachmentsChange } from '../plugins/attachments/index.ts';\nimport { getChangedDocumentsSince } from '../rx-storage-helper.ts';\n\n\nexport * from './checkpoint.ts';\nexport * from './downstream.ts';\nexport * from './upstream.ts';\nexport * from './meta-instance.ts';\nexport * from './conflicts.ts';\nexport * from './helper.ts';\n\n\nexport function replicateRxStorageInstance(\n input: RxStorageInstanceReplicationInput\n): RxStorageInstanceReplicationState {\n input = flatClone(input);\n input.forkInstance = getUnderlyingPersistentStorage(input.forkInstance);\n input.metaInstance = getUnderlyingPersistentStorage(input.metaInstance);\n const checkpointKeyPromise = getCheckpointKey(input);\n const state: RxStorageInstanceReplicationState = {\n primaryPath: getPrimaryFieldOfPrimaryKey(input.forkInstance.schema.primaryKey),\n hasAttachments: !!input.forkInstance.schema.attachments,\n input,\n checkpointKey: checkpointKeyPromise,\n downstreamBulkWriteFlag: checkpointKeyPromise.then(checkpointKey => 'replication-downstream-' + checkpointKey),\n events: {\n canceled: new BehaviorSubject(false),\n active: {\n down: new BehaviorSubject(true),\n up: new BehaviorSubject(true)\n },\n processed: {\n down: new Subject(),\n up: new Subject()\n },\n resolvedConflicts: new Subject(),\n error: new Subject()\n },\n stats: {\n down: {\n addNewTask: 0,\n downstreamProcessChanges: 0,\n downstreamResyncOnce: 0,\n masterChangeStreamEmit: 0,\n persistFromMaster: 0\n },\n up: {\n forkChangeStreamEmit: 0,\n persistToMaster: 0,\n persistToMasterConflictWrites: 0,\n persistToMasterHadConflicts: 0,\n processTasks: 0,\n upstreamInitialSync: 0\n }\n },\n firstSyncDone: {\n down: new BehaviorSubject(false),\n up: new BehaviorSubject(false)\n },\n streamQueue: {\n down: PROMISE_RESOLVE_VOID,\n up: PROMISE_RESOLVE_VOID\n },\n checkpointQueue: PROMISE_RESOLVE_VOID,\n lastCheckpointDoc: {}\n };\n\n startReplicationDownstream(state);\n startReplicationUpstream(state);\n return state;\n}\n\nexport function awaitRxStorageReplicationFirstInSync(\n state: RxStorageInstanceReplicationState\n): Promise {\n return firstValueFrom(\n combineLatest([\n state.firstSyncDone.down.pipe(\n filter(v => !!v)\n ),\n state.firstSyncDone.up.pipe(\n filter(v => !!v)\n )\n ])\n ).then(() => { });\n}\n\nexport function awaitRxStorageReplicationInSync(\n replicationState: RxStorageInstanceReplicationState\n) {\n return Promise.all([\n replicationState.streamQueue.up,\n replicationState.streamQueue.down,\n replicationState.checkpointQueue\n ]);\n}\n\n\nexport async function awaitRxStorageReplicationIdle(\n state: RxStorageInstanceReplicationState\n) {\n await awaitRxStorageReplicationFirstInSync(state);\n while (true) {\n const { down, up } = state.streamQueue;\n await Promise.all([\n up,\n down\n ]);\n /**\n * If the Promises have not been reassigned\n * after awaiting them, we know that the replication\n * is in idle state at this point in time.\n */\n if (\n down === state.streamQueue.down &&\n up === state.streamQueue.up\n ) {\n return;\n }\n }\n}\n\n\nexport function rxStorageInstanceToReplicationHandler(\n instance: RxStorageInstance,\n conflictHandler: RxConflictHandler,\n databaseInstanceToken: string,\n /**\n * If set to true,\n * the _meta.lwt from the pushed documents is kept.\n * (Used in the migration to ensure checkpoints are still valid)\n */\n keepMeta: boolean = false\n): RxReplicationHandler {\n instance = getUnderlyingPersistentStorage(instance);\n\n const hasAttachments = !!instance.schema.attachments;\n const primaryPath = getPrimaryFieldOfPrimaryKey(instance.schema.primaryKey);\n const replicationHandler: RxReplicationHandler = {\n masterChangeStream$: instance.changeStream().pipe(\n mergeMap(async (eventBulk) => {\n const ret: DocumentsWithCheckpoint = {\n checkpoint: eventBulk.checkpoint,\n documents: await Promise.all(\n eventBulk.events.map(async (event) => {\n let docData = writeDocToDocState(event.documentData, hasAttachments, keepMeta);\n if (hasAttachments) {\n docData = await fillWriteDataForAttachmentsChange(\n primaryPath,\n instance,\n clone(docData),\n /**\n * Notice that the master never knows\n * the client state of the document.\n * Therefore we always send all attachments data.\n */\n undefined\n );\n }\n\n return docData;\n })\n )\n };\n return ret;\n })\n ),\n masterChangesSince(\n checkpoint,\n batchSize\n ) {\n return getChangedDocumentsSince(\n instance,\n batchSize,\n checkpoint\n ).then(async (result) => {\n return {\n checkpoint: result.documents.length > 0 ? result.checkpoint : checkpoint,\n documents: await Promise.all(\n result.documents.map(async (plainDocumentData) => {\n let docData = writeDocToDocState(plainDocumentData, hasAttachments, keepMeta);\n if (hasAttachments) {\n docData = await fillWriteDataForAttachmentsChange(\n primaryPath,\n instance,\n clone(docData),\n /**\n * Notice the the master never knows\n * the client state of the document.\n * Therefore we always send all attachments data.\n */\n undefined\n );\n }\n return docData;\n })\n )\n };\n });\n },\n async masterWrite(\n rows\n ) {\n const rowById: ById> = {};\n rows.forEach(row => {\n const docId: string = (row.newDocumentState as any)[primaryPath];\n rowById[docId] = row;\n });\n const ids = Object.keys(rowById);\n\n const masterDocsStateList = await instance.findDocumentsById(\n ids,\n true\n );\n const masterDocsState = new Map>();\n masterDocsStateList.forEach(doc => masterDocsState.set((doc as any)[primaryPath], doc));\n const conflicts: WithDeleted[] = [];\n const writeRows: BulkWriteRow[] = [];\n await Promise.all(\n Object.entries(rowById)\n .map(async ([id, row]) => {\n const masterState = masterDocsState.get(id);\n if (!masterState) {\n writeRows.push({\n document: docStateToWriteDoc(databaseInstanceToken, hasAttachments, keepMeta, row.newDocumentState)\n });\n } else if (\n masterState &&\n !row.assumedMasterState\n ) {\n conflicts.push(writeDocToDocState(masterState, hasAttachments, keepMeta));\n } else if (\n (await conflictHandler({\n realMasterState: writeDocToDocState(masterState, hasAttachments, keepMeta),\n newDocumentState: ensureNotFalsy(row.assumedMasterState)\n }, 'rxStorageInstanceToReplicationHandler-masterWrite')).isEqual === true\n ) {\n writeRows.push({\n previous: masterState,\n document: docStateToWriteDoc(databaseInstanceToken, hasAttachments, keepMeta, row.newDocumentState, masterState)\n });\n } else {\n conflicts.push(writeDocToDocState(masterState, hasAttachments, keepMeta));\n }\n })\n );\n\n if (writeRows.length > 0) {\n const result = await instance.bulkWrite(\n writeRows,\n 'replication-master-write'\n );\n\n result.error.forEach(err => {\n if (err.status !== 409) {\n throw new Error('non conflict error');\n } else {\n conflicts.push(\n writeDocToDocState(ensureNotFalsy(err.documentInDb), hasAttachments, keepMeta)\n );\n }\n });\n }\n return conflicts;\n }\n };\n\n return replicationHandler;\n}\n\n\nexport async function cancelRxStorageReplication(\n replicationState: RxStorageInstanceReplicationState\n) {\n replicationState.events.canceled.next(true);\n replicationState.events.active.up.complete();\n replicationState.events.active.down.complete();\n replicationState.events.processed.up.complete();\n replicationState.events.processed.down.complete();\n replicationState.events.resolvedConflicts.complete();\n replicationState.events.canceled.complete();\n await replicationState.checkpointQueue;\n}\n"],"mappings":"AAAA;AACA;AACA;AACA;AACA;;AAGA,SACIA,eAAe,EACfC,aAAa,EACbC,MAAM,EACNC,cAAc,EACdC,QAAQ,EACRC,OAAO,QACJ,MAAM;AACb,SACIC,2BAA2B,QACxB,wBAAwB;AAc/B,SACIC,KAAK,EACLC,cAAc,EACdC,SAAS,EACTC,oBAAoB,QACjB,2BAA2B;AAClC,SACIC,gBAAgB,QACb,iBAAiB;AACxB,SAASC,0BAA0B,QAAQ,iBAAiB;AAC5D,SAASC,kBAAkB,EAAEC,8BAA8B,EAAEC,kBAAkB,QAAQ,aAAa;AACpG,SAASC,wBAAwB,QAAQ,eAAe;AACxD,SAASC,iCAAiC,QAAQ,iCAAiC;AACnF,SAASC,wBAAwB,QAAQ,yBAAyB;AAGlE,cAAc,iBAAiB;AAC/B,cAAc,iBAAiB;AAC/B,cAAc,eAAe;AAC7B,cAAc,oBAAoB;AAClC,cAAc,gBAAgB;AAC9B,cAAc,aAAa;AAG3B,OAAO,SAASC,0BAA0BA,CACtCC,KAAmD,EACP;EAC5CA,KAAK,GAAGX,SAAS,CAACW,KAAK,CAAC;EACxBA,KAAK,CAACC,YAAY,GAAGP,8BAA8B,CAACM,KAAK,CAACC,YAAY,CAAC;EACvED,KAAK,CAACE,YAAY,GAAGR,8BAA8B,CAACM,KAAK,CAACE,YAAY,CAAC;EACvE,IAAMC,oBAAoB,GAAGZ,gBAAgB,CAACS,KAAK,CAAC;EACpD,IAAMI,KAAmD,GAAG;IACxDC,WAAW,EAAEnB,2BAA2B,CAACc,KAAK,CAACC,YAAY,CAACK,MAAM,CAACC,UAAU,CAAC;IAC9EC,cAAc,EAAE,CAAC,CAACR,KAAK,CAACC,YAAY,CAACK,MAAM,CAACG,WAAW;IACvDT,KAAK;IACLU,aAAa,EAAEP,oBAAoB;IACnCQ,uBAAuB,EAAER,oBAAoB,CAACS,IAAI,CAACF,aAAa,IAAI,yBAAyB,GAAGA,aAAa,CAAC;IAC9GG,MAAM,EAAE;MACJC,QAAQ,EAAE,IAAIlC,eAAe,CAAU,KAAK,CAAC;MAC7CmC,MAAM,EAAE;QACJC,IAAI,EAAE,IAAIpC,eAAe,CAAU,IAAI,CAAC;QACxCqC,EAAE,EAAE,IAAIrC,eAAe,CAAU,IAAI;MACzC,CAAC;MACDsC,SAAS,EAAE;QACPF,IAAI,EAAE,IAAI/B,OAAO,CAAC,CAAC;QACnBgC,EAAE,EAAE,IAAIhC,OAAO,CAAC;MACpB,CAAC;MACDkC,iBAAiB,EAAE,IAAIlC,OAAO,CAAC,CAAC;MAChCmC,KAAK,EAAE,IAAInC,OAAO,CAAC;IACvB,CAAC;IACDoC,KAAK,EAAE;MACHL,IAAI,EAAE;QACFM,UAAU,EAAE,CAAC;QACbC,wBAAwB,EAAE,CAAC;QAC3BC,oBAAoB,EAAE,CAAC;QACvBC,sBAAsB,EAAE,CAAC;QACzBC,iBAAiB,EAAE;MACvB,CAAC;MACDT,EAAE,EAAE;QACAU,oBAAoB,EAAE,CAAC;QACvBC,eAAe,EAAE,CAAC;QAClBC,6BAA6B,EAAE,CAAC;QAChCC,2BAA2B,EAAE,CAAC;QAC9BC,YAAY,EAAE,CAAC;QACfC,mBAAmB,EAAE;MACzB;IACJ,CAAC;IACDC,aAAa,EAAE;MACXjB,IAAI,EAAE,IAAIpC,eAAe,CAAU,KAAK,CAAC;MACzCqC,EAAE,EAAE,IAAIrC,eAAe,CAAU,KAAK;IAC1C,CAAC;IACDsD,WAAW,EAAE;MACTlB,IAAI,EAAE1B,oBAAoB;MAC1B2B,EAAE,EAAE3B;IACR,CAAC;IACD6C,eAAe,EAAE7C,oBAAoB;IACrC8C,iBAAiB,EAAE,CAAC;EACxB,CAAC;EAED5C,0BAA0B,CAACY,KAAK,CAAC;EACjCR,wBAAwB,CAACQ,KAAK,CAAC;EAC/B,OAAOA,KAAK;AAChB;AAEA,OAAO,SAASiC,oCAAoCA,CAChDjC,KAA6C,EAChC;EACb,OAAOrB,cAAc,CACjBF,aAAa,CAAC,CACVuB,KAAK,CAAC6B,aAAa,CAACjB,IAAI,CAACsB,IAAI,CACzBxD,MAAM,CAACyD,CAAC,IAAI,CAAC,CAACA,CAAC,CACnB,CAAC,EACDnC,KAAK,CAAC6B,aAAa,CAAChB,EAAE,CAACqB,IAAI,CACvBxD,MAAM,CAACyD,CAAC,IAAI,CAAC,CAACA,CAAC,CACnB,CAAC,CACJ,CACL,CAAC,CAAC3B,IAAI,CAAC,MAAM,CAAE,CAAC,CAAC;AACrB;AAEA,OAAO,SAAS4B,+BAA+BA,CAC3CC,gBAAwD,EAC1D;EACE,OAAOC,OAAO,CAACC,GAAG,CAAC,CACfF,gBAAgB,CAACP,WAAW,CAACjB,EAAE,EAC/BwB,gBAAgB,CAACP,WAAW,CAAClB,IAAI,EACjCyB,gBAAgB,CAACN,eAAe,CACnC,CAAC;AACN;AAGA,OAAO,eAAeS,6BAA6BA,CAC/CxC,KAA6C,EAC/C;EACE,MAAMiC,oCAAoC,CAACjC,KAAK,CAAC;EACjD,OAAO,IAAI,EAAE;IACT,IAAM;MAAEY,IAAI;MAAEC;IAAG,CAAC,GAAGb,KAAK,CAAC8B,WAAW;IACtC,MAAMQ,OAAO,CAACC,GAAG,CAAC,CACd1B,EAAE,EACFD,IAAI,CACP,CAAC;IACF;AACR;AACA;AACA;AACA;IACQ,IACIA,IAAI,KAAKZ,KAAK,CAAC8B,WAAW,CAAClB,IAAI,IAC/BC,EAAE,KAAKb,KAAK,CAAC8B,WAAW,CAACjB,EAAE,EAC7B;MACE;IACJ;EACJ;AACJ;AAGA,OAAO,SAAS4B,qCAAqCA,CACjDC,QAAsE,EACtEC,eAA6C,EAC7CC,qBAA6B;AAC7B;AACJ;AACA;AACA;AACA;AACIC,QAAiB,GAAG,KAAK,EAC4B;EACrDH,QAAQ,GAAGpD,8BAA8B,CAACoD,QAAQ,CAAC;EAEnD,IAAMtC,cAAc,GAAG,CAAC,CAACsC,QAAQ,CAACxC,MAAM,CAACG,WAAW;EACpD,IAAMJ,WAAW,GAAGnB,2BAA2B,CAAC4D,QAAQ,CAACxC,MAAM,CAACC,UAAU,CAAC;EAC3E,IAAM2C,kBAAyE,GAAG;IAC9EC,mBAAmB,EAAEL,QAAQ,CAACM,YAAY,CAAC,CAAC,CAACd,IAAI,CAC7CtD,QAAQ,CAAC,MAAOqE,SAAS,IAAK;MAC1B,IAAMC,GAA6D,GAAG;QAClEC,UAAU,EAAEF,SAAS,CAACE,UAAU;QAChCC,SAAS,EAAE,MAAMd,OAAO,CAACC,GAAG,CACxBU,SAAS,CAACxC,MAAM,CAAC4C,GAAG,CAAC,MAAOC,KAAK,IAAK;UAClC,IAAIC,OAAO,GAAGhE,kBAAkB,CAAC+D,KAAK,CAACE,YAAY,EAAEpD,cAAc,EAAEyC,QAAQ,CAAC;UAC9E,IAAIzC,cAAc,EAAE;YAChBmD,OAAO,GAAG,MAAM9D,iCAAiC,CAC7CQ,WAAW,EACXyC,QAAQ,EACR3D,KAAK,CAACwE,OAAO,CAAC;YACd;AACpC;AACA;AACA;AACA;YACoCE,SACJ,CAAC;UACL;UAEA,OAAOF,OAAO;QAClB,CAAC,CACL;MACJ,CAAC;MACD,OAAOL,GAAG;IACd,CAAC,CACL,CAAC;IACDQ,kBAAkBA,CACdP,UAAU,EACVQ,SAAS,EACX;MACE,OAAOjE,wBAAwB,CAC3BgD,QAAQ,EACRiB,SAAS,EACTR,UACJ,CAAC,CAAC3C,IAAI,CAAC,MAAOoD,MAAM,IAAK;QACrB,OAAO;UACHT,UAAU,EAAES,MAAM,CAACR,SAAS,CAACS,MAAM,GAAG,CAAC,GAAGD,MAAM,CAACT,UAAU,GAAGA,UAAU;UACxEC,SAAS,EAAE,MAAMd,OAAO,CAACC,GAAG,CACxBqB,MAAM,CAACR,SAAS,CAACC,GAAG,CAAC,MAAOS,iBAAiB,IAAK;YAC9C,IAAIP,OAAO,GAAGhE,kBAAkB,CAACuE,iBAAiB,EAAE1D,cAAc,EAAEyC,QAAQ,CAAC;YAC7E,IAAIzC,cAAc,EAAE;cAChBmD,OAAO,GAAG,MAAM9D,iCAAiC,CAC7CQ,WAAW,EACXyC,QAAQ,EACR3D,KAAK,CAACwE,OAAO,CAAC;cACd;AACpC;AACA;AACA;AACA;cACoCE,SACJ,CAAC;YACL;YACA,OAAOF,OAAO;UAClB,CAAC,CACL;QACJ,CAAC;MACL,CAAC,CAAC;IACN,CAAC;IACD,MAAMQ,WAAWA,CACbC,IAAI,EACN;MACE,IAAMC,OAAuD,GAAG,CAAC,CAAC;MAClED,IAAI,CAACE,OAAO,CAACC,GAAG,IAAI;QAChB,IAAMC,KAAa,GAAID,GAAG,CAACE,gBAAgB,CAASpE,WAAW,CAAC;QAChEgE,OAAO,CAACG,KAAK,CAAC,GAAGD,GAAG;MACxB,CAAC,CAAC;MACF,IAAMG,GAAG,GAAGC,MAAM,CAACC,IAAI,CAACP,OAAO,CAAC;MAEhC,IAAMQ,mBAAmB,GAAG,MAAM/B,QAAQ,CAACgC,iBAAiB,CACxDJ,GAAG,EACH,IACJ,CAAC;MACD,IAAMK,eAAe,GAAG,IAAIC,GAAG,CAAoC,CAAC;MACpEH,mBAAmB,CAACP,OAAO,CAACW,GAAG,IAAIF,eAAe,CAACG,GAAG,CAAED,GAAG,CAAS5E,WAAW,CAAC,EAAE4E,GAAG,CAAC,CAAC;MACvF,IAAME,SAAmC,GAAG,EAAE;MAC9C,IAAMC,SAAoC,GAAG,EAAE;MAC/C,MAAM1C,OAAO,CAACC,GAAG,CACbgC,MAAM,CAACU,OAAO,CAAChB,OAAO,CAAC,CAClBZ,GAAG,CAAC,OAAO,CAAC6B,EAAE,EAAEf,GAAG,CAAC,KAAK;QACtB,IAAMgB,WAAW,GAAGR,eAAe,CAACS,GAAG,CAACF,EAAE,CAAC;QAC3C,IAAI,CAACC,WAAW,EAAE;UACdH,SAAS,CAACK,IAAI,CAAC;YACXC,QAAQ,EAAEjG,kBAAkB,CAACuD,qBAAqB,EAAExC,cAAc,EAAEyC,QAAQ,EAAEsB,GAAG,CAACE,gBAAgB;UACtG,CAAC,CAAC;QACN,CAAC,MAAM,IACHc,WAAW,IACX,CAAChB,GAAG,CAACoB,kBAAkB,EACzB;UACER,SAAS,CAACM,IAAI,CAAC9F,kBAAkB,CAAC4F,WAAW,EAAE/E,cAAc,EAAEyC,QAAQ,CAAC,CAAC;QAC7E,CAAC,MAAM,IACH,CAAC,MAAMF,eAAe,CAAC;UACnB6C,eAAe,EAAEjG,kBAAkB,CAAC4F,WAAW,EAAE/E,cAAc,EAAEyC,QAAQ,CAAC;UAC1EwB,gBAAgB,EAAErF,cAAc,CAACmF,GAAG,CAACoB,kBAAkB;QAC3D,CAAC,EAAE,mDAAmD,CAAC,EAAEE,OAAO,KAAK,IAAI,EAC3E;UACET,SAAS,CAACK,IAAI,CAAC;YACXK,QAAQ,EAAEP,WAAW;YACrBG,QAAQ,EAAEjG,kBAAkB,CAACuD,qBAAqB,EAAExC,cAAc,EAAEyC,QAAQ,EAAEsB,GAAG,CAACE,gBAAgB,EAAEc,WAAW;UACnH,CAAC,CAAC;QACN,CAAC,MAAM;UACHJ,SAAS,CAACM,IAAI,CAAC9F,kBAAkB,CAAC4F,WAAW,EAAE/E,cAAc,EAAEyC,QAAQ,CAAC,CAAC;QAC7E;MACJ,CAAC,CACT,CAAC;MAED,IAAImC,SAAS,CAACnB,MAAM,GAAG,CAAC,EAAE;QACtB,IAAMD,MAAM,GAAG,MAAMlB,QAAQ,CAACiD,SAAS,CACnCX,SAAS,EACT,0BACJ,CAAC;QAEDpB,MAAM,CAAC5C,KAAK,CAACkD,OAAO,CAAC0B,GAAG,IAAI;UACxB,IAAIA,GAAG,CAACC,MAAM,KAAK,GAAG,EAAE;YACpB,MAAM,IAAIC,KAAK,CAAC,oBAAoB,CAAC;UACzC,CAAC,MAAM;YACHf,SAAS,CAACM,IAAI,CACV9F,kBAAkB,CAACP,cAAc,CAAC4G,GAAG,CAACG,YAAY,CAAC,EAAE3F,cAAc,EAAEyC,QAAQ,CACjF,CAAC;UACL;QACJ,CAAC,CAAC;MACN;MACA,OAAOkC,SAAS;IACpB;EACJ,CAAC;EAED,OAAOjC,kBAAkB;AAC7B;AAGA,OAAO,eAAekD,0BAA0BA,CAC5C3D,gBAAwD,EAC1D;EACEA,gBAAgB,CAAC5B,MAAM,CAACC,QAAQ,CAACuF,IAAI,CAAC,IAAI,CAAC;EAC3C5D,gBAAgB,CAAC5B,MAAM,CAACE,MAAM,CAACE,EAAE,CAACqF,QAAQ,CAAC,CAAC;EAC5C7D,gBAAgB,CAAC5B,MAAM,CAACE,MAAM,CAACC,IAAI,CAACsF,QAAQ,CAAC,CAAC;EAC9C7D,gBAAgB,CAAC5B,MAAM,CAACK,SAAS,CAACD,EAAE,CAACqF,QAAQ,CAAC,CAAC;EAC/C7D,gBAAgB,CAAC5B,MAAM,CAACK,SAAS,CAACF,IAAI,CAACsF,QAAQ,CAAC,CAAC;EACjD7D,gBAAgB,CAAC5B,MAAM,CAACM,iBAAiB,CAACmF,QAAQ,CAAC,CAAC;EACpD7D,gBAAgB,CAAC5B,MAAM,CAACC,QAAQ,CAACwF,QAAQ,CAAC,CAAC;EAC3C,MAAM7D,gBAAgB,CAACN,eAAe;AAC1C","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/replication-protocol/meta-instance.js b/dist/esm/replication-protocol/meta-instance.js deleted file mode 100644 index 4d1ed1fd99d..00000000000 --- a/dist/esm/replication-protocol/meta-instance.js +++ /dev/null @@ -1,115 +0,0 @@ -import { fillWithDefaultSettings, getComposedPrimaryKeyOfDocumentData, getLengthOfPrimaryKey } from "../rx-schema-helper.js"; -import { flatCloneDocWithMeta } from "../rx-storage-helper.js"; -import { getDefaultRevision, createRevision, now } from "../plugins/utils/index.js"; -export var META_INSTANCE_SCHEMA_TITLE = 'RxReplicationProtocolMetaData'; -export function getRxReplicationMetaInstanceSchema(replicatedDocumentsSchema, encrypted) { - var parentPrimaryKeyLength = getLengthOfPrimaryKey(replicatedDocumentsSchema); - var baseSchema = { - title: META_INSTANCE_SCHEMA_TITLE, - primaryKey: { - key: 'id', - fields: ['itemId', 'isCheckpoint'], - separator: '|' - }, - type: 'object', - version: replicatedDocumentsSchema.version, - additionalProperties: false, - properties: { - id: { - type: 'string', - minLength: 1, - // add +1 for the '|' and +1 for the 'isCheckpoint' flag - maxLength: parentPrimaryKeyLength + 2 - }, - isCheckpoint: { - type: 'string', - enum: ['0', '1'], - minLength: 1, - maxLength: 1 - }, - itemId: { - type: 'string', - /** - * ensure that all values of RxStorageReplicationDirection ('DOWN' has 4 chars) fit into it - * because checkpoints use the itemId field for that. - */ - maxLength: parentPrimaryKeyLength > 4 ? parentPrimaryKeyLength : 4 - }, - checkpointData: { - type: 'object', - additionalProperties: true - }, - docData: { - type: 'object', - properties: replicatedDocumentsSchema.properties - }, - isResolvedConflict: { - type: 'string' - } - }, - keyCompression: replicatedDocumentsSchema.keyCompression, - required: ['id', 'isCheckpoint', 'itemId'] - }; - if (encrypted) { - baseSchema.encrypted = ['docData']; - } - var metaInstanceSchema = fillWithDefaultSettings(baseSchema); - return metaInstanceSchema; -} - -/** - * Returns the document states of what the fork instance - * assumes to be the latest state on the master instance. - */ -export function getAssumedMasterState(state, docIds) { - return state.input.metaInstance.findDocumentsById(docIds.map(docId => { - var useId = getComposedPrimaryKeyOfDocumentData(state.input.metaInstance.schema, { - itemId: docId, - isCheckpoint: '0' - }); - return useId; - }), true).then(metaDocs => { - var ret = {}; - Object.values(metaDocs).forEach(metaDoc => { - ret[metaDoc.itemId] = { - docData: metaDoc.docData, - metaDocument: metaDoc - }; - }); - return ret; - }); -} -export async function getMetaWriteRow(state, newMasterDocState, previous, isResolvedConflict) { - var docId = newMasterDocState[state.primaryPath]; - var newMeta = previous ? flatCloneDocWithMeta(previous) : { - id: '', - isCheckpoint: '0', - itemId: docId, - docData: newMasterDocState, - _attachments: {}, - _deleted: false, - _rev: getDefaultRevision(), - _meta: { - lwt: 0 - } - }; - newMeta.docData = newMasterDocState; - - /** - * Sending isResolvedConflict with the value undefined - * will throw a schema validation error because it must be either - * not set or have a string. - */ - if (isResolvedConflict) { - newMeta.isResolvedConflict = isResolvedConflict; - } - newMeta._meta.lwt = now(); - newMeta.id = getComposedPrimaryKeyOfDocumentData(state.input.metaInstance.schema, newMeta); - newMeta._rev = createRevision(await state.checkpointKey, previous); - var ret = { - previous, - document: newMeta - }; - return ret; -} -//# sourceMappingURL=meta-instance.js.map \ No newline at end of file diff --git a/dist/esm/replication-protocol/meta-instance.js.map b/dist/esm/replication-protocol/meta-instance.js.map deleted file mode 100644 index c5c9383bb99..00000000000 --- a/dist/esm/replication-protocol/meta-instance.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"meta-instance.js","names":["fillWithDefaultSettings","getComposedPrimaryKeyOfDocumentData","getLengthOfPrimaryKey","flatCloneDocWithMeta","getDefaultRevision","createRevision","now","META_INSTANCE_SCHEMA_TITLE","getRxReplicationMetaInstanceSchema","replicatedDocumentsSchema","encrypted","parentPrimaryKeyLength","baseSchema","title","primaryKey","key","fields","separator","type","version","additionalProperties","properties","id","minLength","maxLength","isCheckpoint","enum","itemId","checkpointData","docData","isResolvedConflict","keyCompression","required","metaInstanceSchema","getAssumedMasterState","state","docIds","input","metaInstance","findDocumentsById","map","docId","useId","schema","then","metaDocs","ret","Object","values","forEach","metaDoc","metaDocument","getMetaWriteRow","newMasterDocState","previous","primaryPath","newMeta","_attachments","_deleted","_rev","_meta","lwt","checkpointKey","document"],"sources":["../../../src/replication-protocol/meta-instance.ts"],"sourcesContent":["import {\n fillWithDefaultSettings,\n getComposedPrimaryKeyOfDocumentData,\n getLengthOfPrimaryKey\n} from '../rx-schema-helper.ts';\nimport { flatCloneDocWithMeta } from '../rx-storage-helper.ts';\nimport type {\n BulkWriteRow,\n ById,\n RxDocumentData,\n RxJsonSchema,\n RxStorageInstanceReplicationState,\n RxStorageReplicationMeta,\n WithDeleted\n} from '../types/index.d.ts';\nimport {\n getDefaultRevision,\n createRevision,\n now\n} from '../plugins/utils/index.ts';\n\n\nexport const META_INSTANCE_SCHEMA_TITLE = 'RxReplicationProtocolMetaData';\n\nexport function getRxReplicationMetaInstanceSchema(\n replicatedDocumentsSchema: RxJsonSchema>,\n encrypted: boolean\n): RxJsonSchema>> {\n const parentPrimaryKeyLength = getLengthOfPrimaryKey(replicatedDocumentsSchema);\n\n const baseSchema: RxJsonSchema> = {\n title: META_INSTANCE_SCHEMA_TITLE,\n primaryKey: {\n key: 'id',\n fields: [\n 'itemId',\n 'isCheckpoint'\n ],\n separator: '|'\n },\n type: 'object',\n version: replicatedDocumentsSchema.version,\n additionalProperties: false,\n properties: {\n id: {\n type: 'string',\n minLength: 1,\n // add +1 for the '|' and +1 for the 'isCheckpoint' flag\n maxLength: parentPrimaryKeyLength + 2\n },\n isCheckpoint: {\n type: 'string',\n enum: [\n '0',\n '1'\n ],\n minLength: 1,\n maxLength: 1\n },\n itemId: {\n type: 'string',\n /**\n * ensure that all values of RxStorageReplicationDirection ('DOWN' has 4 chars) fit into it\n * because checkpoints use the itemId field for that.\n */\n maxLength: parentPrimaryKeyLength > 4 ? parentPrimaryKeyLength : 4\n },\n checkpointData: {\n type: 'object',\n additionalProperties: true\n },\n docData: {\n type: 'object',\n properties: replicatedDocumentsSchema.properties\n },\n isResolvedConflict: {\n type: 'string'\n }\n },\n keyCompression: replicatedDocumentsSchema.keyCompression,\n required: [\n 'id',\n 'isCheckpoint',\n 'itemId'\n ]\n };\n if (encrypted) {\n baseSchema.encrypted = ['docData'];\n }\n const metaInstanceSchema: RxJsonSchema>> = fillWithDefaultSettings(baseSchema);\n return metaInstanceSchema;\n}\n\n\n\n/**\n * Returns the document states of what the fork instance\n * assumes to be the latest state on the master instance.\n */\nexport function getAssumedMasterState(\n state: RxStorageInstanceReplicationState,\n docIds: string[]\n): Promise;\n metaDocument: RxDocumentData>;\n}>> {\n return state.input.metaInstance.findDocumentsById(\n docIds.map(docId => {\n const useId = getComposedPrimaryKeyOfDocumentData(\n state.input.metaInstance.schema,\n {\n itemId: docId,\n isCheckpoint: '0'\n }\n );\n return useId;\n }),\n true\n ).then(metaDocs => {\n const ret: {\n [docId: string]: {\n docData: RxDocumentData;\n metaDocument: RxDocumentData>;\n };\n } = {};\n Object\n .values(metaDocs)\n .forEach((metaDoc) => {\n ret[metaDoc.itemId] = {\n docData: metaDoc.docData,\n metaDocument: metaDoc\n };\n });\n\n return ret;\n });\n}\n\n\nexport async function getMetaWriteRow(\n state: RxStorageInstanceReplicationState,\n newMasterDocState: WithDeleted,\n previous?: RxDocumentData>,\n isResolvedConflict?: string\n): Promise>> {\n const docId: string = (newMasterDocState as any)[state.primaryPath];\n const newMeta: RxDocumentData> = previous ? flatCloneDocWithMeta(\n previous\n ) : {\n id: '',\n isCheckpoint: '0',\n itemId: docId,\n docData: newMasterDocState,\n _attachments: {},\n _deleted: false,\n _rev: getDefaultRevision(),\n _meta: {\n lwt: 0\n }\n };\n newMeta.docData = newMasterDocState;\n\n /**\n * Sending isResolvedConflict with the value undefined\n * will throw a schema validation error because it must be either\n * not set or have a string.\n */\n if (isResolvedConflict) {\n newMeta.isResolvedConflict = isResolvedConflict;\n }\n\n newMeta._meta.lwt = now();\n newMeta.id = getComposedPrimaryKeyOfDocumentData(\n state.input.metaInstance.schema,\n newMeta\n );\n newMeta._rev = createRevision(\n await state.checkpointKey,\n previous\n );\n\n const ret = {\n previous,\n document: newMeta\n };\n\n return ret;\n}\n"],"mappings":"AAAA,SACIA,uBAAuB,EACvBC,mCAAmC,EACnCC,qBAAqB,QAClB,wBAAwB;AAC/B,SAASC,oBAAoB,QAAQ,yBAAyB;AAU9D,SACIC,kBAAkB,EAClBC,cAAc,EACdC,GAAG,QACA,2BAA2B;AAGlC,OAAO,IAAMC,0BAA0B,GAAG,+BAA+B;AAEzE,OAAO,SAASC,kCAAkCA,CAC9CC,yBAAkE,EAClEC,SAAkB,EAC+D;EACjF,IAAMC,sBAAsB,GAAGT,qBAAqB,CAACO,yBAAyB,CAAC;EAE/E,IAAMG,UAA6E,GAAG;IAClFC,KAAK,EAAEN,0BAA0B;IACjCO,UAAU,EAAE;MACRC,GAAG,EAAE,IAAI;MACTC,MAAM,EAAE,CACJ,QAAQ,EACR,cAAc,CACjB;MACDC,SAAS,EAAE;IACf,CAAC;IACDC,IAAI,EAAE,QAAQ;IACdC,OAAO,EAAEV,yBAAyB,CAACU,OAAO;IAC1CC,oBAAoB,EAAE,KAAK;IAC3BC,UAAU,EAAE;MACRC,EAAE,EAAE;QACAJ,IAAI,EAAE,QAAQ;QACdK,SAAS,EAAE,CAAC;QACZ;QACAC,SAAS,EAAEb,sBAAsB,GAAG;MACxC,CAAC;MACDc,YAAY,EAAE;QACVP,IAAI,EAAE,QAAQ;QACdQ,IAAI,EAAE,CACF,GAAG,EACH,GAAG,CACN;QACDH,SAAS,EAAE,CAAC;QACZC,SAAS,EAAE;MACf,CAAC;MACDG,MAAM,EAAE;QACJT,IAAI,EAAE,QAAQ;QACd;AAChB;AACA;AACA;QACgBM,SAAS,EAAEb,sBAAsB,GAAG,CAAC,GAAGA,sBAAsB,GAAG;MACrE,CAAC;MACDiB,cAAc,EAAE;QACZV,IAAI,EAAE,QAAQ;QACdE,oBAAoB,EAAE;MAC1B,CAAC;MACDS,OAAO,EAAE;QACLX,IAAI,EAAE,QAAQ;QACdG,UAAU,EAAEZ,yBAAyB,CAACY;MAC1C,CAAC;MACDS,kBAAkB,EAAE;QAChBZ,IAAI,EAAE;MACV;IACJ,CAAC;IACDa,cAAc,EAAEtB,yBAAyB,CAACsB,cAAc;IACxDC,QAAQ,EAAE,CACN,IAAI,EACJ,cAAc,EACd,QAAQ;EAEhB,CAAC;EACD,IAAItB,SAAS,EAAE;IACXE,UAAU,CAACF,SAAS,GAAG,CAAC,SAAS,CAAC;EACtC;EACA,IAAMuB,kBAAqG,GAAGjC,uBAAuB,CAACY,UAAU,CAAC;EACjJ,OAAOqB,kBAAkB;AAC7B;;AAIA;AACA;AACA;AACA;AACA,OAAO,SAASC,qBAAqBA,CACjCC,KAAmD,EACnDC,MAAgB,EAIhB;EACA,OAAOD,KAAK,CAACE,KAAK,CAACC,YAAY,CAACC,iBAAiB,CAC7CH,MAAM,CAACI,GAAG,CAACC,KAAK,IAAI;IAChB,IAAMC,KAAK,GAAGzC,mCAAmC,CAC7CkC,KAAK,CAACE,KAAK,CAACC,YAAY,CAACK,MAAM,EAC/B;MACIhB,MAAM,EAAEc,KAAK;MACbhB,YAAY,EAAE;IAClB,CACJ,CAAC;IACD,OAAOiB,KAAK;EAChB,CAAC,CAAC,EACF,IACJ,CAAC,CAACE,IAAI,CAACC,QAAQ,IAAI;IACf,IAAMC,GAKL,GAAG,CAAC,CAAC;IACNC,MAAM,CACDC,MAAM,CAACH,QAAQ,CAAC,CAChBI,OAAO,CAAEC,OAAO,IAAK;MAClBJ,GAAG,CAACI,OAAO,CAACvB,MAAM,CAAC,GAAG;QAClBE,OAAO,EAAEqB,OAAO,CAACrB,OAAO;QACxBsB,YAAY,EAAED;MAClB,CAAC;IACL,CAAC,CAAC;IAEN,OAAOJ,GAAG;EACd,CAAC,CAAC;AACN;AAGA,OAAO,eAAeM,eAAeA,CACjCjB,KAAmD,EACnDkB,iBAAyC,EACzCC,QAAmE,EACnExB,kBAA2B,EACoC;EAC/D,IAAMW,KAAa,GAAIY,iBAAiB,CAASlB,KAAK,CAACoB,WAAW,CAAC;EACnE,IAAMC,OAAiE,GAAGF,QAAQ,GAAGnD,oBAAoB,CACrGmD,QACJ,CAAC,GAAG;IACAhC,EAAE,EAAE,EAAE;IACNG,YAAY,EAAE,GAAG;IACjBE,MAAM,EAAEc,KAAK;IACbZ,OAAO,EAAEwB,iBAAiB;IAC1BI,YAAY,EAAE,CAAC,CAAC;IAChBC,QAAQ,EAAE,KAAK;IACfC,IAAI,EAAEvD,kBAAkB,CAAC,CAAC;IAC1BwD,KAAK,EAAE;MACHC,GAAG,EAAE;IACT;EACJ,CAAC;EACDL,OAAO,CAAC3B,OAAO,GAAGwB,iBAAiB;;EAEnC;AACJ;AACA;AACA;AACA;EACI,IAAIvB,kBAAkB,EAAE;IACpB0B,OAAO,CAAC1B,kBAAkB,GAAGA,kBAAkB;EACnD;EAEA0B,OAAO,CAACI,KAAK,CAACC,GAAG,GAAGvD,GAAG,CAAC,CAAC;EACzBkD,OAAO,CAAClC,EAAE,GAAGrB,mCAAmC,CAC5CkC,KAAK,CAACE,KAAK,CAACC,YAAY,CAACK,MAAM,EAC/Ba,OACJ,CAAC;EACDA,OAAO,CAACG,IAAI,GAAGtD,cAAc,CACzB,MAAM8B,KAAK,CAAC2B,aAAa,EACzBR,QACJ,CAAC;EAED,IAAMR,GAAG,GAAG;IACRQ,QAAQ;IACRS,QAAQ,EAAEP;EACd,CAAC;EAED,OAAOV,GAAG;AACd","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/replication-protocol/upstream.js b/dist/esm/replication-protocol/upstream.js deleted file mode 100644 index cabde145ebc..00000000000 --- a/dist/esm/replication-protocol/upstream.js +++ /dev/null @@ -1,326 +0,0 @@ -import { firstValueFrom, filter } from 'rxjs'; -import { getChangedDocumentsSince, stackCheckpoints } from "../rx-storage-helper.js"; -import { appendToArray, batchArray, clone, ensureNotFalsy, getHeightOfRevision, PROMISE_RESOLVE_FALSE } from "../plugins/utils/index.js"; -import { getLastCheckpointDoc, setCheckpoint } from "./checkpoint.js"; -import { resolveConflictError } from "./conflicts.js"; -import { stripAttachmentsDataFromMetaWriteRows, writeDocToDocState } from "./helper.js"; -import { getAssumedMasterState, getMetaWriteRow } from "./meta-instance.js"; -import { fillWriteDataForAttachmentsChange } from "../plugins/attachments/index.js"; - -/** - * Writes all document changes from the fork to the master. - * The upstream runs on two modes: - * - For initial replication, a checkpoint-iteration is used - * - For ongoing local writes, we just subscribe to the changeStream of the fork. - * In contrast to the master, the fork can be assumed to never loose connection, - * so we do not have to prepare for missed out events. - */ -export async function startReplicationUpstream(state) { - if (state.input.initialCheckpoint && state.input.initialCheckpoint.upstream) { - var checkpointDoc = await getLastCheckpointDoc(state, 'up'); - if (!checkpointDoc) { - await setCheckpoint(state, 'up', state.input.initialCheckpoint.upstream); - } - } - var replicationHandler = state.input.replicationHandler; - state.streamQueue.up = state.streamQueue.up.then(() => { - return upstreamInitialSync().then(() => { - processTasks(); - }); - }); - - // used to detect which tasks etc can in it at which order. - var timer = 0; - var initialSyncStartTime = -1; - var openTasks = []; - var persistenceQueue = PROMISE_RESOLVE_FALSE; - var nonPersistedFromMaster = { - docs: {} - }; - var sub = state.input.forkInstance.changeStream().subscribe(async eventBulk => { - // ignore writes that came from the downstream - if (eventBulk.context === (await state.downstreamBulkWriteFlag)) { - return; - } - state.stats.up.forkChangeStreamEmit = state.stats.up.forkChangeStreamEmit + 1; - openTasks.push({ - task: eventBulk, - time: timer++ - }); - if (!state.events.active.up.getValue()) { - state.events.active.up.next(true); - } - if (state.input.waitBeforePersist) { - return state.input.waitBeforePersist().then(() => processTasks()); - } else { - return processTasks(); - } - }); - firstValueFrom(state.events.canceled.pipe(filter(canceled => !!canceled))).then(() => sub.unsubscribe()); - async function upstreamInitialSync() { - state.stats.up.upstreamInitialSync = state.stats.up.upstreamInitialSync + 1; - if (state.events.canceled.getValue()) { - return; - } - state.checkpointQueue = state.checkpointQueue.then(() => getLastCheckpointDoc(state, 'up')); - var lastCheckpoint = await state.checkpointQueue; - var promises = new Set(); - var _loop = async function () { - initialSyncStartTime = timer++; - - /** - * Throttle the calls to - * forkInstance.getChangedDocumentsSince() so that - * if the pushing to the remote is slower compared to the - * pulling out of forkInstance, we do not block the UI too much - * and have a big memory spike with all forkInstance documents. - */ - if (promises.size > 3) { - await Promise.race(Array.from(promises)); - } - var upResult = await getChangedDocumentsSince(state.input.forkInstance, state.input.pushBatchSize, lastCheckpoint); - if (upResult.documents.length === 0) { - return 1; // break - } - lastCheckpoint = stackCheckpoints([lastCheckpoint, upResult.checkpoint]); - var promise = persistToMaster(upResult.documents, ensureNotFalsy(lastCheckpoint)); - promises.add(promise); - promise.catch().then(() => promises.delete(promise)); - }; - while (!state.events.canceled.getValue()) { - if (await _loop()) break; - } - - /** - * If we had conflicts during the initial sync, - * it means that we likely have new writes to the fork - * and so we have to run the initial sync again to upstream these new writes. - */ - var resolvedPromises = await Promise.all(promises); - var hadConflicts = resolvedPromises.find(r => !!r); - if (hadConflicts) { - await upstreamInitialSync(); - } else if (!state.firstSyncDone.up.getValue() && !state.events.canceled.getValue()) { - state.firstSyncDone.up.next(true); - } - } - - /** - * Takes all open tasks an processes them at once. - */ - function processTasks() { - if (state.events.canceled.getValue() || openTasks.length === 0) { - state.events.active.up.next(false); - return; - } - state.stats.up.processTasks = state.stats.up.processTasks + 1; - state.events.active.up.next(true); - state.streamQueue.up = state.streamQueue.up.then(() => { - /** - * Merge/filter all open tasks - */ - var docs = []; - var checkpoint = {}; - while (openTasks.length > 0) { - var taskWithTime = ensureNotFalsy(openTasks.shift()); - /** - * If the task came in before the last time the initial sync fetching - * has run, we can ignore the task because the initial sync already processed - * these documents. - */ - if (taskWithTime.time < initialSyncStartTime) { - continue; - } - appendToArray(docs, taskWithTime.task.events.map(r => { - return r.documentData; - })); - checkpoint = stackCheckpoints([checkpoint, taskWithTime.task.checkpoint]); - } - var promise = docs.length === 0 ? PROMISE_RESOLVE_FALSE : persistToMaster(docs, checkpoint); - return promise.then(() => { - if (openTasks.length === 0) { - state.events.active.up.next(false); - } else { - processTasks(); - } - }); - }); - } - - /** - * Returns true if had conflicts, - * false if not. - */ - function persistToMaster(docs, checkpoint) { - state.stats.up.persistToMaster = state.stats.up.persistToMaster + 1; - - /** - * Add the new docs to the non-persistent list - */ - docs.forEach(docData => { - var docId = docData[state.primaryPath]; - nonPersistedFromMaster.docs[docId] = docData; - }); - nonPersistedFromMaster.checkpoint = checkpoint; - persistenceQueue = persistenceQueue.then(async () => { - if (state.events.canceled.getValue()) { - return false; - } - var upDocsById = nonPersistedFromMaster.docs; - nonPersistedFromMaster.docs = {}; - var useCheckpoint = nonPersistedFromMaster.checkpoint; - var docIds = Object.keys(upDocsById); - if (docIds.length === 0) { - return false; - } - var assumedMasterState = await getAssumedMasterState(state, docIds); - var writeRowsToMaster = {}; - var writeRowsToMasterIds = []; - var writeRowsToMeta = {}; - var forkStateById = {}; - await Promise.all(docIds.map(async docId => { - var fullDocData = upDocsById[docId]; - forkStateById[docId] = fullDocData; - var docData = writeDocToDocState(fullDocData, state.hasAttachments, !!state.input.keepMeta); - var assumedMasterDoc = assumedMasterState[docId]; - - /** - * If the master state is equal to the - * fork state, we can assume that the document state is already - * replicated. - */ - if (assumedMasterDoc && - // if the isResolvedConflict is correct, we do not have to compare the documents. - assumedMasterDoc.metaDocument.isResolvedConflict !== fullDocData._rev && (await state.input.conflictHandler({ - realMasterState: assumedMasterDoc.docData, - newDocumentState: docData - }, 'upstream-check-if-equal')).isEqual || ( - /** - * If the master works with _rev fields, - * we use that to check if our current doc state - * is different from the assumedMasterDoc. - */ - - assumedMasterDoc && assumedMasterDoc.docData._rev && getHeightOfRevision(fullDocData._rev) === fullDocData._meta[state.input.identifier])) { - return; - } - writeRowsToMasterIds.push(docId); - writeRowsToMaster[docId] = { - assumedMasterState: assumedMasterDoc ? assumedMasterDoc.docData : undefined, - newDocumentState: docData - }; - writeRowsToMeta[docId] = await getMetaWriteRow(state, docData, assumedMasterDoc ? assumedMasterDoc.metaDocument : undefined); - })); - if (writeRowsToMasterIds.length === 0) { - return false; - } - var writeRowsArray = Object.values(writeRowsToMaster); - var conflictIds = new Set(); - var conflictsById = {}; - - /** - * To always respect the push.batchSize, - * we have to split the write rows into batches - * to ensure that replicationHandler.masterWrite() is never - * called with more documents than what the batchSize limits. - */ - var writeBatches = batchArray(writeRowsArray, state.input.pushBatchSize); - await Promise.all(writeBatches.map(async writeBatch => { - // enhance docs with attachments - if (state.hasAttachments) { - await Promise.all(writeBatch.map(async row => { - row.newDocumentState = await fillWriteDataForAttachmentsChange(state.primaryPath, state.input.forkInstance, clone(row.newDocumentState), row.assumedMasterState); - })); - } - var masterWriteResult = await replicationHandler.masterWrite(writeBatch); - masterWriteResult.forEach(conflictDoc => { - var id = conflictDoc[state.primaryPath]; - conflictIds.add(id); - conflictsById[id] = conflictDoc; - }); - })); - var useWriteRowsToMeta = []; - writeRowsToMasterIds.forEach(docId => { - if (!conflictIds.has(docId)) { - state.events.processed.up.next(writeRowsToMaster[docId]); - useWriteRowsToMeta.push(writeRowsToMeta[docId]); - } - }); - if (state.events.canceled.getValue()) { - return false; - } - if (useWriteRowsToMeta.length > 0) { - await state.input.metaInstance.bulkWrite(stripAttachmentsDataFromMetaWriteRows(state, useWriteRowsToMeta), 'replication-up-write-meta'); - // TODO what happens when we have conflicts here? - } - - /** - * Resolve conflicts by writing a new document - * state to the fork instance and the 'real' master state - * to the meta instance. - * Non-409 errors will be detected by resolveConflictError() - */ - var hadConflictWrites = false; - if (conflictIds.size > 0) { - state.stats.up.persistToMasterHadConflicts = state.stats.up.persistToMasterHadConflicts + 1; - var conflictWriteFork = []; - var conflictWriteMeta = {}; - await Promise.all(Object.entries(conflictsById).map(([docId, realMasterState]) => { - var writeToMasterRow = writeRowsToMaster[docId]; - var input = { - newDocumentState: writeToMasterRow.newDocumentState, - assumedMasterState: writeToMasterRow.assumedMasterState, - realMasterState - }; - return resolveConflictError(state, input, forkStateById[docId]).then(async resolved => { - if (resolved) { - state.events.resolvedConflicts.next({ - input, - output: resolved.output - }); - conflictWriteFork.push({ - previous: forkStateById[docId], - document: resolved.resolvedDoc - }); - var assumedMasterDoc = assumedMasterState[docId]; - conflictWriteMeta[docId] = await getMetaWriteRow(state, ensureNotFalsy(realMasterState), assumedMasterDoc ? assumedMasterDoc.metaDocument : undefined, resolved.resolvedDoc._rev); - } - }); - })); - if (conflictWriteFork.length > 0) { - hadConflictWrites = true; - state.stats.up.persistToMasterConflictWrites = state.stats.up.persistToMasterConflictWrites + 1; - var forkWriteResult = await state.input.forkInstance.bulkWrite(conflictWriteFork, 'replication-up-write-conflict'); - /** - * Errors in the forkWriteResult must not be handled - * because they have been caused by a write to the forkInstance - * in between which will anyway trigger a new upstream cycle - * that will then resolved the conflict again. - */ - var useMetaWrites = []; - forkWriteResult.success.forEach(docData => { - var docId = docData[state.primaryPath]; - useMetaWrites.push(conflictWriteMeta[docId]); - }); - if (useMetaWrites.length > 0) { - await state.input.metaInstance.bulkWrite(stripAttachmentsDataFromMetaWriteRows(state, useMetaWrites), 'replication-up-write-conflict-meta'); - } - // TODO what to do with conflicts while writing to the metaInstance? - } - } - - /** - * For better performance we do not await checkpoint writes, - * but to ensure order on parallel checkpoint writes, - * we have to use a queue. - */ - setCheckpoint(state, 'up', useCheckpoint); - return hadConflictWrites; - }).catch(unhandledError => { - state.events.error.next(unhandledError); - return false; - }); - return persistenceQueue; - } -} -//# sourceMappingURL=upstream.js.map \ No newline at end of file diff --git a/dist/esm/replication-protocol/upstream.js.map b/dist/esm/replication-protocol/upstream.js.map deleted file mode 100644 index b05c67ee771..00000000000 --- a/dist/esm/replication-protocol/upstream.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"upstream.js","names":["firstValueFrom","filter","getChangedDocumentsSince","stackCheckpoints","appendToArray","batchArray","clone","ensureNotFalsy","getHeightOfRevision","PROMISE_RESOLVE_FALSE","getLastCheckpointDoc","setCheckpoint","resolveConflictError","stripAttachmentsDataFromMetaWriteRows","writeDocToDocState","getAssumedMasterState","getMetaWriteRow","fillWriteDataForAttachmentsChange","startReplicationUpstream","state","input","initialCheckpoint","upstream","checkpointDoc","replicationHandler","streamQueue","up","then","upstreamInitialSync","processTasks","timer","initialSyncStartTime","openTasks","persistenceQueue","nonPersistedFromMaster","docs","sub","forkInstance","changeStream","subscribe","eventBulk","context","downstreamBulkWriteFlag","stats","forkChangeStreamEmit","push","task","time","events","active","getValue","next","waitBeforePersist","canceled","pipe","unsubscribe","checkpointQueue","lastCheckpoint","promises","Set","_loop","size","Promise","race","Array","from","upResult","pushBatchSize","documents","length","checkpoint","promise","persistToMaster","add","catch","delete","resolvedPromises","all","hadConflicts","find","r","firstSyncDone","taskWithTime","shift","map","documentData","forEach","docData","docId","primaryPath","upDocsById","useCheckpoint","docIds","Object","keys","assumedMasterState","writeRowsToMaster","writeRowsToMasterIds","writeRowsToMeta","forkStateById","fullDocData","hasAttachments","keepMeta","assumedMasterDoc","metaDocument","isResolvedConflict","_rev","conflictHandler","realMasterState","newDocumentState","isEqual","_meta","identifier","undefined","writeRowsArray","values","conflictIds","conflictsById","writeBatches","writeBatch","row","masterWriteResult","masterWrite","conflictDoc","id","useWriteRowsToMeta","has","processed","metaInstance","bulkWrite","hadConflictWrites","persistToMasterHadConflicts","conflictWriteFork","conflictWriteMeta","entries","writeToMasterRow","resolved","resolvedConflicts","output","previous","document","resolvedDoc","persistToMasterConflictWrites","forkWriteResult","useMetaWrites","success","unhandledError","error"],"sources":["../../../src/replication-protocol/upstream.ts"],"sourcesContent":["import { firstValueFrom, filter } from 'rxjs';\nimport {\n getChangedDocumentsSince,\n stackCheckpoints\n} from '../rx-storage-helper.ts';\nimport type {\n BulkWriteRow,\n BulkWriteRowById,\n ById,\n EventBulk,\n RxDocumentData,\n RxReplicationWriteToMasterRow,\n RxStorageChangeEvent,\n RxStorageInstanceReplicationState,\n RxStorageReplicationMeta,\n WithDeleted\n} from '../types/index.d.ts';\nimport {\n appendToArray,\n batchArray,\n clone,\n ensureNotFalsy,\n getHeightOfRevision,\n PROMISE_RESOLVE_FALSE\n} from '../plugins/utils/index.ts';\nimport {\n getLastCheckpointDoc,\n setCheckpoint\n} from './checkpoint.ts';\nimport {\n resolveConflictError\n} from './conflicts.ts';\nimport {\n stripAttachmentsDataFromMetaWriteRows,\n writeDocToDocState\n} from './helper.ts';\nimport {\n getAssumedMasterState,\n getMetaWriteRow\n} from './meta-instance.ts';\nimport { fillWriteDataForAttachmentsChange } from '../plugins/attachments/index.ts';\n\n/**\n * Writes all document changes from the fork to the master.\n * The upstream runs on two modes:\n * - For initial replication, a checkpoint-iteration is used\n * - For ongoing local writes, we just subscribe to the changeStream of the fork.\n * In contrast to the master, the fork can be assumed to never loose connection,\n * so we do not have to prepare for missed out events.\n */\nexport async function startReplicationUpstream(\n state: RxStorageInstanceReplicationState\n) {\n if (\n state.input.initialCheckpoint &&\n state.input.initialCheckpoint.upstream\n ) {\n const checkpointDoc = await getLastCheckpointDoc(state, 'up');\n if (!checkpointDoc) {\n await setCheckpoint(\n state,\n 'up',\n state.input.initialCheckpoint.upstream\n );\n }\n }\n\n const replicationHandler = state.input.replicationHandler;\n state.streamQueue.up = state.streamQueue.up.then(() => {\n return upstreamInitialSync().then(() => {\n processTasks();\n });\n });\n\n // used to detect which tasks etc can in it at which order.\n let timer = 0;\n let initialSyncStartTime = -1;\n\n type Task = EventBulk, any>;\n type TaskWithTime = {\n task: Task;\n time: number;\n };\n const openTasks: TaskWithTime[] = [];\n let persistenceQueue: Promise = PROMISE_RESOLVE_FALSE;\n const nonPersistedFromMaster: {\n checkpoint?: CheckpointType;\n docs: ById>;\n } = {\n docs: {}\n };\n\n const sub = state.input.forkInstance.changeStream()\n .subscribe(async (eventBulk) => {\n // ignore writes that came from the downstream\n if (eventBulk.context === await state.downstreamBulkWriteFlag) {\n return;\n }\n\n state.stats.up.forkChangeStreamEmit = state.stats.up.forkChangeStreamEmit + 1;\n openTasks.push({\n task: eventBulk,\n time: timer++\n });\n if (!state.events.active.up.getValue()) {\n state.events.active.up.next(true);\n }\n if (state.input.waitBeforePersist) {\n return state.input.waitBeforePersist()\n .then(() => processTasks());\n } else {\n return processTasks();\n }\n });\n firstValueFrom(\n state.events.canceled.pipe(\n filter(canceled => !!canceled)\n )\n ).then(() => sub.unsubscribe());\n\n\n async function upstreamInitialSync() {\n state.stats.up.upstreamInitialSync = state.stats.up.upstreamInitialSync + 1;\n if (state.events.canceled.getValue()) {\n return;\n }\n\n state.checkpointQueue = state.checkpointQueue.then(() => getLastCheckpointDoc(state, 'up'));\n let lastCheckpoint: CheckpointType = await state.checkpointQueue;\n\n const promises: Set> = new Set();\n\n while (!state.events.canceled.getValue()) {\n initialSyncStartTime = timer++;\n\n /**\n * Throttle the calls to\n * forkInstance.getChangedDocumentsSince() so that\n * if the pushing to the remote is slower compared to the\n * pulling out of forkInstance, we do not block the UI too much\n * and have a big memory spike with all forkInstance documents.\n */\n if (promises.size > 3) {\n await Promise.race(Array.from(promises));\n }\n\n const upResult = await getChangedDocumentsSince(\n state.input.forkInstance,\n state.input.pushBatchSize,\n lastCheckpoint\n );\n if (upResult.documents.length === 0) {\n break;\n }\n\n lastCheckpoint = stackCheckpoints([lastCheckpoint, upResult.checkpoint]);\n\n const promise = persistToMaster(\n upResult.documents,\n ensureNotFalsy(lastCheckpoint)\n );\n promises.add(promise);\n promise.catch().then(() => promises.delete(promise));\n }\n\n /**\n * If we had conflicts during the initial sync,\n * it means that we likely have new writes to the fork\n * and so we have to run the initial sync again to upstream these new writes.\n */\n const resolvedPromises = await Promise.all(promises);\n const hadConflicts = resolvedPromises.find(r => !!r);\n if (hadConflicts) {\n await upstreamInitialSync();\n } else if (\n !state.firstSyncDone.up.getValue() &&\n !state.events.canceled.getValue()\n ) {\n state.firstSyncDone.up.next(true);\n }\n }\n\n\n /**\n * Takes all open tasks an processes them at once.\n */\n function processTasks() {\n if (\n state.events.canceled.getValue() ||\n openTasks.length === 0\n ) {\n state.events.active.up.next(false);\n return;\n }\n state.stats.up.processTasks = state.stats.up.processTasks + 1;\n state.events.active.up.next(true);\n state.streamQueue.up = state.streamQueue.up.then(() => {\n /**\n * Merge/filter all open tasks\n */\n const docs: RxDocumentData[] = [];\n let checkpoint: CheckpointType = {} as any;\n while (openTasks.length > 0) {\n const taskWithTime = ensureNotFalsy(openTasks.shift());\n /**\n * If the task came in before the last time the initial sync fetching\n * has run, we can ignore the task because the initial sync already processed\n * these documents.\n */\n if (taskWithTime.time < initialSyncStartTime) {\n continue;\n }\n appendToArray(\n docs,\n taskWithTime.task.events.map(r => {\n return r.documentData as any;\n })\n );\n checkpoint = stackCheckpoints([checkpoint, taskWithTime.task.checkpoint]);\n }\n\n const promise = docs.length === 0 ? PROMISE_RESOLVE_FALSE : persistToMaster(\n docs,\n checkpoint\n );\n return promise.then(() => {\n if (openTasks.length === 0) {\n state.events.active.up.next(false);\n } else {\n processTasks();\n }\n });\n });\n }\n\n /**\n * Returns true if had conflicts,\n * false if not.\n */\n function persistToMaster(\n docs: RxDocumentData[],\n checkpoint: CheckpointType\n ): Promise {\n state.stats.up.persistToMaster = state.stats.up.persistToMaster + 1;\n\n /**\n * Add the new docs to the non-persistent list\n */\n docs.forEach(docData => {\n const docId: string = (docData as any)[state.primaryPath];\n nonPersistedFromMaster.docs[docId] = docData;\n });\n nonPersistedFromMaster.checkpoint = checkpoint;\n\n persistenceQueue = persistenceQueue.then(async () => {\n if (state.events.canceled.getValue()) {\n return false;\n }\n\n const upDocsById: ById> = nonPersistedFromMaster.docs;\n nonPersistedFromMaster.docs = {};\n const useCheckpoint = nonPersistedFromMaster.checkpoint;\n const docIds = Object.keys(upDocsById);\n if (docIds.length === 0) {\n return false;\n }\n\n const assumedMasterState = await getAssumedMasterState(\n state,\n docIds\n );\n\n const writeRowsToMaster: ById> = {};\n const writeRowsToMasterIds: string[] = [];\n const writeRowsToMeta: BulkWriteRowById> = {};\n const forkStateById: ById> = {};\n\n await Promise.all(\n docIds.map(async (docId) => {\n const fullDocData: RxDocumentData = upDocsById[docId];\n forkStateById[docId] = fullDocData;\n const docData: WithDeleted = writeDocToDocState(fullDocData, state.hasAttachments, !!state.input.keepMeta);\n const assumedMasterDoc = assumedMasterState[docId];\n\n /**\n * If the master state is equal to the\n * fork state, we can assume that the document state is already\n * replicated.\n */\n if (\n (\n assumedMasterDoc &&\n // if the isResolvedConflict is correct, we do not have to compare the documents.\n assumedMasterDoc.metaDocument.isResolvedConflict !== fullDocData._rev\n &&\n (await state.input.conflictHandler({\n realMasterState: assumedMasterDoc.docData,\n newDocumentState: docData\n }, 'upstream-check-if-equal')).isEqual\n )\n ||\n /**\n * If the master works with _rev fields,\n * we use that to check if our current doc state\n * is different from the assumedMasterDoc.\n */\n (\n assumedMasterDoc &&\n (assumedMasterDoc.docData as any)._rev &&\n getHeightOfRevision(fullDocData._rev) === fullDocData._meta[state.input.identifier]\n )\n ) {\n return;\n }\n\n writeRowsToMasterIds.push(docId);\n\n writeRowsToMaster[docId] = {\n assumedMasterState: assumedMasterDoc ? assumedMasterDoc.docData : undefined,\n newDocumentState: docData\n };\n writeRowsToMeta[docId] = await getMetaWriteRow(\n state,\n docData,\n assumedMasterDoc ? assumedMasterDoc.metaDocument : undefined\n );\n })\n );\n\n if (writeRowsToMasterIds.length === 0) {\n return false;\n }\n\n\n const writeRowsArray = Object.values(writeRowsToMaster);\n const conflictIds: Set = new Set();\n const conflictsById: ById> = {};\n\n /**\n * To always respect the push.batchSize,\n * we have to split the write rows into batches\n * to ensure that replicationHandler.masterWrite() is never\n * called with more documents than what the batchSize limits.\n */\n const writeBatches = batchArray(writeRowsArray, state.input.pushBatchSize);\n await Promise.all(\n writeBatches.map(async (writeBatch) => {\n\n // enhance docs with attachments\n if (state.hasAttachments) {\n await Promise.all(\n writeBatch.map(async (row) => {\n row.newDocumentState = await fillWriteDataForAttachmentsChange(\n state.primaryPath,\n state.input.forkInstance,\n clone(row.newDocumentState),\n row.assumedMasterState\n );\n })\n );\n }\n const masterWriteResult = await replicationHandler.masterWrite(writeBatch);\n masterWriteResult.forEach(conflictDoc => {\n const id = (conflictDoc as any)[state.primaryPath];\n conflictIds.add(id);\n conflictsById[id] = conflictDoc;\n });\n })\n );\n\n const useWriteRowsToMeta: BulkWriteRow>[] = [];\n\n writeRowsToMasterIds.forEach(docId => {\n if (!conflictIds.has(docId)) {\n state.events.processed.up.next(writeRowsToMaster[docId]);\n useWriteRowsToMeta.push(writeRowsToMeta[docId]);\n }\n });\n\n if (state.events.canceled.getValue()) {\n return false;\n }\n\n if (useWriteRowsToMeta.length > 0) {\n await state.input.metaInstance.bulkWrite(\n stripAttachmentsDataFromMetaWriteRows(state, useWriteRowsToMeta),\n 'replication-up-write-meta'\n );\n // TODO what happens when we have conflicts here?\n }\n\n /**\n * Resolve conflicts by writing a new document\n * state to the fork instance and the 'real' master state\n * to the meta instance.\n * Non-409 errors will be detected by resolveConflictError()\n */\n let hadConflictWrites = false;\n if (conflictIds.size > 0) {\n state.stats.up.persistToMasterHadConflicts = state.stats.up.persistToMasterHadConflicts + 1;\n const conflictWriteFork: BulkWriteRow[] = [];\n const conflictWriteMeta: BulkWriteRowById> = {};\n await Promise.all(\n Object\n .entries(conflictsById)\n .map(([docId, realMasterState]) => {\n const writeToMasterRow = writeRowsToMaster[docId];\n const input = {\n newDocumentState: writeToMasterRow.newDocumentState,\n assumedMasterState: writeToMasterRow.assumedMasterState,\n realMasterState\n };\n return resolveConflictError(\n state,\n input,\n forkStateById[docId]\n ).then(async (resolved) => {\n if (resolved) {\n state.events.resolvedConflicts.next({\n input,\n output: resolved.output\n });\n conflictWriteFork.push({\n previous: forkStateById[docId],\n document: resolved.resolvedDoc\n });\n const assumedMasterDoc = assumedMasterState[docId];\n conflictWriteMeta[docId] = await getMetaWriteRow(\n state,\n ensureNotFalsy(realMasterState),\n assumedMasterDoc ? assumedMasterDoc.metaDocument : undefined,\n resolved.resolvedDoc._rev\n );\n }\n });\n })\n );\n\n if (conflictWriteFork.length > 0) {\n hadConflictWrites = true;\n\n state.stats.up.persistToMasterConflictWrites = state.stats.up.persistToMasterConflictWrites + 1;\n const forkWriteResult = await state.input.forkInstance.bulkWrite(\n conflictWriteFork,\n 'replication-up-write-conflict'\n );\n /**\n * Errors in the forkWriteResult must not be handled\n * because they have been caused by a write to the forkInstance\n * in between which will anyway trigger a new upstream cycle\n * that will then resolved the conflict again.\n */\n const useMetaWrites: BulkWriteRow>[] = [];\n forkWriteResult.success\n .forEach(docData => {\n const docId = (docData as any)[state.primaryPath];\n useMetaWrites.push(\n conflictWriteMeta[docId]\n );\n });\n if (useMetaWrites.length > 0) {\n await state.input.metaInstance.bulkWrite(\n stripAttachmentsDataFromMetaWriteRows(state, useMetaWrites),\n 'replication-up-write-conflict-meta'\n );\n }\n // TODO what to do with conflicts while writing to the metaInstance?\n }\n }\n\n /**\n * For better performance we do not await checkpoint writes,\n * but to ensure order on parallel checkpoint writes,\n * we have to use a queue.\n */\n setCheckpoint(\n state,\n 'up',\n useCheckpoint\n );\n\n return hadConflictWrites;\n }).catch(unhandledError => {\n state.events.error.next(unhandledError);\n return false;\n });\n\n return persistenceQueue;\n }\n}\n"],"mappings":"AAAA,SAASA,cAAc,EAAEC,MAAM,QAAQ,MAAM;AAC7C,SACIC,wBAAwB,EACxBC,gBAAgB,QACb,yBAAyB;AAahC,SACIC,aAAa,EACbC,UAAU,EACVC,KAAK,EACLC,cAAc,EACdC,mBAAmB,EACnBC,qBAAqB,QAClB,2BAA2B;AAClC,SACIC,oBAAoB,EACpBC,aAAa,QACV,iBAAiB;AACxB,SACIC,oBAAoB,QACjB,gBAAgB;AACvB,SACIC,qCAAqC,EACrCC,kBAAkB,QACf,aAAa;AACpB,SACIC,qBAAqB,EACrBC,eAAe,QACZ,oBAAoB;AAC3B,SAASC,iCAAiC,QAAQ,iCAAiC;;AAEnF;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,eAAeC,wBAAwBA,CAC1CC,KAAmD,EACrD;EACE,IACIA,KAAK,CAACC,KAAK,CAACC,iBAAiB,IAC7BF,KAAK,CAACC,KAAK,CAACC,iBAAiB,CAACC,QAAQ,EACxC;IACE,IAAMC,aAAa,GAAG,MAAMb,oBAAoB,CAACS,KAAK,EAAE,IAAI,CAAC;IAC7D,IAAI,CAACI,aAAa,EAAE;MAChB,MAAMZ,aAAa,CACfQ,KAAK,EACL,IAAI,EACJA,KAAK,CAACC,KAAK,CAACC,iBAAiB,CAACC,QAClC,CAAC;IACL;EACJ;EAEA,IAAME,kBAAkB,GAAGL,KAAK,CAACC,KAAK,CAACI,kBAAkB;EACzDL,KAAK,CAACM,WAAW,CAACC,EAAE,GAAGP,KAAK,CAACM,WAAW,CAACC,EAAE,CAACC,IAAI,CAAC,MAAM;IACnD,OAAOC,mBAAmB,CAAC,CAAC,CAACD,IAAI,CAAC,MAAM;MACpCE,YAAY,CAAC,CAAC;IAClB,CAAC,CAAC;EACN,CAAC,CAAC;;EAEF;EACA,IAAIC,KAAK,GAAG,CAAC;EACb,IAAIC,oBAAoB,GAAG,CAAC,CAAC;EAO7B,IAAMC,SAAyB,GAAG,EAAE;EACpC,IAAIC,gBAAkC,GAAGxB,qBAAqB;EAC9D,IAAMyB,sBAGL,GAAG;IACAC,IAAI,EAAE,CAAC;EACX,CAAC;EAED,IAAMC,GAAG,GAAGjB,KAAK,CAACC,KAAK,CAACiB,YAAY,CAACC,YAAY,CAAC,CAAC,CAC9CC,SAAS,CAAC,MAAOC,SAAS,IAAK;IAC5B;IACA,IAAIA,SAAS,CAACC,OAAO,MAAK,MAAMtB,KAAK,CAACuB,uBAAuB,GAAE;MAC3D;IACJ;IAEAvB,KAAK,CAACwB,KAAK,CAACjB,EAAE,CAACkB,oBAAoB,GAAGzB,KAAK,CAACwB,KAAK,CAACjB,EAAE,CAACkB,oBAAoB,GAAG,CAAC;IAC7EZ,SAAS,CAACa,IAAI,CAAC;MACXC,IAAI,EAAEN,SAAS;MACfO,IAAI,EAAEjB,KAAK;IACf,CAAC,CAAC;IACF,IAAI,CAACX,KAAK,CAAC6B,MAAM,CAACC,MAAM,CAACvB,EAAE,CAACwB,QAAQ,CAAC,CAAC,EAAE;MACpC/B,KAAK,CAAC6B,MAAM,CAACC,MAAM,CAACvB,EAAE,CAACyB,IAAI,CAAC,IAAI,CAAC;IACrC;IACA,IAAIhC,KAAK,CAACC,KAAK,CAACgC,iBAAiB,EAAE;MAC/B,OAAOjC,KAAK,CAACC,KAAK,CAACgC,iBAAiB,CAAC,CAAC,CACjCzB,IAAI,CAAC,MAAME,YAAY,CAAC,CAAC,CAAC;IACnC,CAAC,MAAM;MACH,OAAOA,YAAY,CAAC,CAAC;IACzB;EACJ,CAAC,CAAC;EACN7B,cAAc,CACVmB,KAAK,CAAC6B,MAAM,CAACK,QAAQ,CAACC,IAAI,CACtBrD,MAAM,CAACoD,QAAQ,IAAI,CAAC,CAACA,QAAQ,CACjC,CACJ,CAAC,CAAC1B,IAAI,CAAC,MAAMS,GAAG,CAACmB,WAAW,CAAC,CAAC,CAAC;EAG/B,eAAe3B,mBAAmBA,CAAA,EAAG;IACjCT,KAAK,CAACwB,KAAK,CAACjB,EAAE,CAACE,mBAAmB,GAAGT,KAAK,CAACwB,KAAK,CAACjB,EAAE,CAACE,mBAAmB,GAAG,CAAC;IAC3E,IAAIT,KAAK,CAAC6B,MAAM,CAACK,QAAQ,CAACH,QAAQ,CAAC,CAAC,EAAE;MAClC;IACJ;IAEA/B,KAAK,CAACqC,eAAe,GAAGrC,KAAK,CAACqC,eAAe,CAAC7B,IAAI,CAAC,MAAMjB,oBAAoB,CAACS,KAAK,EAAE,IAAI,CAAC,CAAC;IAC3F,IAAIsC,cAA8B,GAAG,MAAMtC,KAAK,CAACqC,eAAe;IAEhE,IAAME,QAA2B,GAAG,IAAIC,GAAG,CAAC,CAAC;IAAC,IAAAC,KAAA,kBAAAA,CAAA,EAEJ;MACtC7B,oBAAoB,GAAGD,KAAK,EAAE;;MAE9B;AACZ;AACA;AACA;AACA;AACA;AACA;MACY,IAAI4B,QAAQ,CAACG,IAAI,GAAG,CAAC,EAAE;QACnB,MAAMC,OAAO,CAACC,IAAI,CAACC,KAAK,CAACC,IAAI,CAACP,QAAQ,CAAC,CAAC;MAC5C;MAEA,IAAMQ,QAAQ,GAAG,MAAMhE,wBAAwB,CAC3CiB,KAAK,CAACC,KAAK,CAACiB,YAAY,EACxBlB,KAAK,CAACC,KAAK,CAAC+C,aAAa,EACzBV,cACJ,CAAC;MACD,IAAIS,QAAQ,CAACE,SAAS,CAACC,MAAM,KAAK,CAAC,EAAE;QAAA;MAErC;MAEAZ,cAAc,GAAGtD,gBAAgB,CAAC,CAACsD,cAAc,EAAES,QAAQ,CAACI,UAAU,CAAC,CAAC;MAExE,IAAMC,OAAO,GAAGC,eAAe,CAC3BN,QAAQ,CAACE,SAAS,EAClB7D,cAAc,CAACkD,cAAc,CACjC,CAAC;MACDC,QAAQ,CAACe,GAAG,CAACF,OAAO,CAAC;MACrBA,OAAO,CAACG,KAAK,CAAC,CAAC,CAAC/C,IAAI,CAAC,MAAM+B,QAAQ,CAACiB,MAAM,CAACJ,OAAO,CAAC,CAAC;IACxD,CAAC;IA/BD,OAAO,CAACpD,KAAK,CAAC6B,MAAM,CAACK,QAAQ,CAACH,QAAQ,CAAC,CAAC;MAAA,UAAAU,KAAA,IAoBhC;IAAM;;IAad;AACR;AACA;AACA;AACA;IACQ,IAAMgB,gBAAgB,GAAG,MAAMd,OAAO,CAACe,GAAG,CAACnB,QAAQ,CAAC;IACpD,IAAMoB,YAAY,GAAGF,gBAAgB,CAACG,IAAI,CAACC,CAAC,IAAI,CAAC,CAACA,CAAC,CAAC;IACpD,IAAIF,YAAY,EAAE;MACd,MAAMlD,mBAAmB,CAAC,CAAC;IAC/B,CAAC,MAAM,IACH,CAACT,KAAK,CAAC8D,aAAa,CAACvD,EAAE,CAACwB,QAAQ,CAAC,CAAC,IAClC,CAAC/B,KAAK,CAAC6B,MAAM,CAACK,QAAQ,CAACH,QAAQ,CAAC,CAAC,EACnC;MACE/B,KAAK,CAAC8D,aAAa,CAACvD,EAAE,CAACyB,IAAI,CAAC,IAAI,CAAC;IACrC;EACJ;;EAGA;AACJ;AACA;EACI,SAAStB,YAAYA,CAAA,EAAG;IACpB,IACIV,KAAK,CAAC6B,MAAM,CAACK,QAAQ,CAACH,QAAQ,CAAC,CAAC,IAChClB,SAAS,CAACqC,MAAM,KAAK,CAAC,EACxB;MACElD,KAAK,CAAC6B,MAAM,CAACC,MAAM,CAACvB,EAAE,CAACyB,IAAI,CAAC,KAAK,CAAC;MAClC;IACJ;IACAhC,KAAK,CAACwB,KAAK,CAACjB,EAAE,CAACG,YAAY,GAAGV,KAAK,CAACwB,KAAK,CAACjB,EAAE,CAACG,YAAY,GAAG,CAAC;IAC7DV,KAAK,CAAC6B,MAAM,CAACC,MAAM,CAACvB,EAAE,CAACyB,IAAI,CAAC,IAAI,CAAC;IACjChC,KAAK,CAACM,WAAW,CAACC,EAAE,GAAGP,KAAK,CAACM,WAAW,CAACC,EAAE,CAACC,IAAI,CAAC,MAAM;MACnD;AACZ;AACA;MACY,IAAMQ,IAAiC,GAAG,EAAE;MAC5C,IAAImC,UAA0B,GAAG,CAAC,CAAQ;MAC1C,OAAOtC,SAAS,CAACqC,MAAM,GAAG,CAAC,EAAE;QACzB,IAAMa,YAAY,GAAG3E,cAAc,CAACyB,SAAS,CAACmD,KAAK,CAAC,CAAC,CAAC;QACtD;AAChB;AACA;AACA;AACA;QACgB,IAAID,YAAY,CAACnC,IAAI,GAAGhB,oBAAoB,EAAE;UAC1C;QACJ;QACA3B,aAAa,CACT+B,IAAI,EACJ+C,YAAY,CAACpC,IAAI,CAACE,MAAM,CAACoC,GAAG,CAACJ,CAAC,IAAI;UAC9B,OAAOA,CAAC,CAACK,YAAY;QACzB,CAAC,CACL,CAAC;QACDf,UAAU,GAAGnE,gBAAgB,CAAC,CAACmE,UAAU,EAAEY,YAAY,CAACpC,IAAI,CAACwB,UAAU,CAAC,CAAC;MAC7E;MAEA,IAAMC,OAAO,GAAGpC,IAAI,CAACkC,MAAM,KAAK,CAAC,GAAG5D,qBAAqB,GAAG+D,eAAe,CACvErC,IAAI,EACJmC,UACJ,CAAC;MACD,OAAOC,OAAO,CAAC5C,IAAI,CAAC,MAAM;QACtB,IAAIK,SAAS,CAACqC,MAAM,KAAK,CAAC,EAAE;UACxBlD,KAAK,CAAC6B,MAAM,CAACC,MAAM,CAACvB,EAAE,CAACyB,IAAI,CAAC,KAAK,CAAC;QACtC,CAAC,MAAM;UACHtB,YAAY,CAAC,CAAC;QAClB;MACJ,CAAC,CAAC;IACN,CAAC,CAAC;EACN;;EAEA;AACJ;AACA;AACA;EACI,SAAS2C,eAAeA,CACpBrC,IAAiC,EACjCmC,UAA0B,EACV;IAChBnD,KAAK,CAACwB,KAAK,CAACjB,EAAE,CAAC8C,eAAe,GAAGrD,KAAK,CAACwB,KAAK,CAACjB,EAAE,CAAC8C,eAAe,GAAG,CAAC;;IAEnE;AACR;AACA;IACQrC,IAAI,CAACmD,OAAO,CAACC,OAAO,IAAI;MACpB,IAAMC,KAAa,GAAID,OAAO,CAASpE,KAAK,CAACsE,WAAW,CAAC;MACzDvD,sBAAsB,CAACC,IAAI,CAACqD,KAAK,CAAC,GAAGD,OAAO;IAChD,CAAC,CAAC;IACFrD,sBAAsB,CAACoC,UAAU,GAAGA,UAAU;IAE9CrC,gBAAgB,GAAGA,gBAAgB,CAACN,IAAI,CAAC,YAAY;MACjD,IAAIR,KAAK,CAAC6B,MAAM,CAACK,QAAQ,CAACH,QAAQ,CAAC,CAAC,EAAE;QAClC,OAAO,KAAK;MAChB;MAEA,IAAMwC,UAA2C,GAAGxD,sBAAsB,CAACC,IAAI;MAC/ED,sBAAsB,CAACC,IAAI,GAAG,CAAC,CAAC;MAChC,IAAMwD,aAAa,GAAGzD,sBAAsB,CAACoC,UAAU;MACvD,IAAMsB,MAAM,GAAGC,MAAM,CAACC,IAAI,CAACJ,UAAU,CAAC;MACtC,IAAIE,MAAM,CAACvB,MAAM,KAAK,CAAC,EAAE;QACrB,OAAO,KAAK;MAChB;MAEA,IAAM0B,kBAAkB,GAAG,MAAMhF,qBAAqB,CAClDI,KAAK,EACLyE,MACJ,CAAC;MAED,IAAMI,iBAAiE,GAAG,CAAC,CAAC;MAC5E,IAAMC,oBAA8B,GAAG,EAAE;MACzC,IAAMC,eAA2E,GAAG,CAAC,CAAC;MACtF,IAAMC,aAA8C,GAAG,CAAC,CAAC;MAEzD,MAAMrC,OAAO,CAACe,GAAG,CACbe,MAAM,CAACR,GAAG,CAAC,MAAOI,KAAK,IAAK;QACxB,IAAMY,WAAsC,GAAGV,UAAU,CAACF,KAAK,CAAC;QAChEW,aAAa,CAACX,KAAK,CAAC,GAAGY,WAAW;QAClC,IAAMb,OAA+B,GAAGzE,kBAAkB,CAACsF,WAAW,EAAEjF,KAAK,CAACkF,cAAc,EAAE,CAAC,CAAClF,KAAK,CAACC,KAAK,CAACkF,QAAQ,CAAC;QACrH,IAAMC,gBAAgB,GAAGR,kBAAkB,CAACP,KAAK,CAAC;;QAElD;AACpB;AACA;AACA;AACA;QACoB,IAEQe,gBAAgB;QAChB;QACAA,gBAAgB,CAACC,YAAY,CAACC,kBAAkB,KAAKL,WAAW,CAACM,IAAI,IAErE,CAAC,MAAMvF,KAAK,CAACC,KAAK,CAACuF,eAAe,CAAC;UAC/BC,eAAe,EAAEL,gBAAgB,CAAChB,OAAO;UACzCsB,gBAAgB,EAAEtB;QACtB,CAAC,EAAE,yBAAyB,CAAC,EAAEuB,OAAO;QAG1C;AACxB;AACA;AACA;AACA;;QAE4BP,gBAAgB,IACfA,gBAAgB,CAAChB,OAAO,CAASmB,IAAI,IACtClG,mBAAmB,CAAC4F,WAAW,CAACM,IAAI,CAAC,KAAKN,WAAW,CAACW,KAAK,CAAC5F,KAAK,CAACC,KAAK,CAAC4F,UAAU,CAAC,CACtF,EACH;UACE;QACJ;QAEAf,oBAAoB,CAACpD,IAAI,CAAC2C,KAAK,CAAC;QAEhCQ,iBAAiB,CAACR,KAAK,CAAC,GAAG;UACvBO,kBAAkB,EAAEQ,gBAAgB,GAAGA,gBAAgB,CAAChB,OAAO,GAAG0B,SAAS;UAC3EJ,gBAAgB,EAAEtB;QACtB,CAAC;QACDW,eAAe,CAACV,KAAK,CAAC,GAAG,MAAMxE,eAAe,CAC1CG,KAAK,EACLoE,OAAO,EACPgB,gBAAgB,GAAGA,gBAAgB,CAACC,YAAY,GAAGS,SACvD,CAAC;MACL,CAAC,CACL,CAAC;MAED,IAAIhB,oBAAoB,CAAC5B,MAAM,KAAK,CAAC,EAAE;QACnC,OAAO,KAAK;MAChB;MAGA,IAAM6C,cAAc,GAAGrB,MAAM,CAACsB,MAAM,CAACnB,iBAAiB,CAAC;MACvD,IAAMoB,WAAwB,GAAG,IAAIzD,GAAG,CAAC,CAAC;MAC1C,IAAM0D,aAA2C,GAAG,CAAC,CAAC;;MAEtD;AACZ;AACA;AACA;AACA;AACA;MACY,IAAMC,YAAY,GAAGjH,UAAU,CAAC6G,cAAc,EAAE/F,KAAK,CAACC,KAAK,CAAC+C,aAAa,CAAC;MAC1E,MAAML,OAAO,CAACe,GAAG,CACbyC,YAAY,CAAClC,GAAG,CAAC,MAAOmC,UAAU,IAAK;QAEnC;QACA,IAAIpG,KAAK,CAACkF,cAAc,EAAE;UACtB,MAAMvC,OAAO,CAACe,GAAG,CACb0C,UAAU,CAACnC,GAAG,CAAC,MAAOoC,GAAG,IAAK;YAC1BA,GAAG,CAACX,gBAAgB,GAAG,MAAM5F,iCAAiC,CAC1DE,KAAK,CAACsE,WAAW,EACjBtE,KAAK,CAACC,KAAK,CAACiB,YAAY,EACxB/B,KAAK,CAACkH,GAAG,CAACX,gBAAgB,CAAC,EAC3BW,GAAG,CAACzB,kBACR,CAAC;UACL,CAAC,CACL,CAAC;QACL;QACA,IAAM0B,iBAAiB,GAAG,MAAMjG,kBAAkB,CAACkG,WAAW,CAACH,UAAU,CAAC;QAC1EE,iBAAiB,CAACnC,OAAO,CAACqC,WAAW,IAAI;UACrC,IAAMC,EAAE,GAAID,WAAW,CAASxG,KAAK,CAACsE,WAAW,CAAC;UAClD2B,WAAW,CAAC3C,GAAG,CAACmD,EAAE,CAAC;UACnBP,aAAa,CAACO,EAAE,CAAC,GAAGD,WAAW;QACnC,CAAC,CAAC;MACN,CAAC,CACL,CAAC;MAED,IAAME,kBAA4E,GAAG,EAAE;MAEvF5B,oBAAoB,CAACX,OAAO,CAACE,KAAK,IAAI;QAClC,IAAI,CAAC4B,WAAW,CAACU,GAAG,CAACtC,KAAK,CAAC,EAAE;UACzBrE,KAAK,CAAC6B,MAAM,CAAC+E,SAAS,CAACrG,EAAE,CAACyB,IAAI,CAAC6C,iBAAiB,CAACR,KAAK,CAAC,CAAC;UACxDqC,kBAAkB,CAAChF,IAAI,CAACqD,eAAe,CAACV,KAAK,CAAC,CAAC;QACnD;MACJ,CAAC,CAAC;MAEF,IAAIrE,KAAK,CAAC6B,MAAM,CAACK,QAAQ,CAACH,QAAQ,CAAC,CAAC,EAAE;QAClC,OAAO,KAAK;MAChB;MAEA,IAAI2E,kBAAkB,CAACxD,MAAM,GAAG,CAAC,EAAE;QAC/B,MAAMlD,KAAK,CAACC,KAAK,CAAC4G,YAAY,CAACC,SAAS,CACpCpH,qCAAqC,CAACM,KAAK,EAAE0G,kBAAkB,CAAC,EAChE,2BACJ,CAAC;QACD;MACJ;;MAEA;AACZ;AACA;AACA;AACA;AACA;MACY,IAAIK,iBAAiB,GAAG,KAAK;MAC7B,IAAId,WAAW,CAACvD,IAAI,GAAG,CAAC,EAAE;QACtB1C,KAAK,CAACwB,KAAK,CAACjB,EAAE,CAACyG,2BAA2B,GAAGhH,KAAK,CAACwB,KAAK,CAACjB,EAAE,CAACyG,2BAA2B,GAAG,CAAC;QAC3F,IAAMC,iBAA4C,GAAG,EAAE;QACvD,IAAMC,iBAA6E,GAAG,CAAC,CAAC;QACxF,MAAMvE,OAAO,CAACe,GAAG,CACbgB,MAAM,CACDyC,OAAO,CAACjB,aAAa,CAAC,CACtBjC,GAAG,CAAC,CAAC,CAACI,KAAK,EAAEoB,eAAe,CAAC,KAAK;UAC/B,IAAM2B,gBAAgB,GAAGvC,iBAAiB,CAACR,KAAK,CAAC;UACjD,IAAMpE,KAAK,GAAG;YACVyF,gBAAgB,EAAE0B,gBAAgB,CAAC1B,gBAAgB;YACnDd,kBAAkB,EAAEwC,gBAAgB,CAACxC,kBAAkB;YACvDa;UACJ,CAAC;UACD,OAAOhG,oBAAoB,CACvBO,KAAK,EACLC,KAAK,EACL+E,aAAa,CAACX,KAAK,CACvB,CAAC,CAAC7D,IAAI,CAAC,MAAO6G,QAAQ,IAAK;YACvB,IAAIA,QAAQ,EAAE;cACVrH,KAAK,CAAC6B,MAAM,CAACyF,iBAAiB,CAACtF,IAAI,CAAC;gBAChC/B,KAAK;gBACLsH,MAAM,EAAEF,QAAQ,CAACE;cACrB,CAAC,CAAC;cACFN,iBAAiB,CAACvF,IAAI,CAAC;gBACnB8F,QAAQ,EAAExC,aAAa,CAACX,KAAK,CAAC;gBAC9BoD,QAAQ,EAAEJ,QAAQ,CAACK;cACvB,CAAC,CAAC;cACF,IAAMtC,gBAAgB,GAAGR,kBAAkB,CAACP,KAAK,CAAC;cAClD6C,iBAAiB,CAAC7C,KAAK,CAAC,GAAG,MAAMxE,eAAe,CAC5CG,KAAK,EACLZ,cAAc,CAACqG,eAAe,CAAC,EAC/BL,gBAAgB,GAAGA,gBAAgB,CAACC,YAAY,GAAGS,SAAS,EAC5DuB,QAAQ,CAACK,WAAW,CAACnC,IACzB,CAAC;YACL;UACJ,CAAC,CAAC;QACN,CAAC,CACT,CAAC;QAED,IAAI0B,iBAAiB,CAAC/D,MAAM,GAAG,CAAC,EAAE;UAC9B6D,iBAAiB,GAAG,IAAI;UAExB/G,KAAK,CAACwB,KAAK,CAACjB,EAAE,CAACoH,6BAA6B,GAAG3H,KAAK,CAACwB,KAAK,CAACjB,EAAE,CAACoH,6BAA6B,GAAG,CAAC;UAC/F,IAAMC,eAAe,GAAG,MAAM5H,KAAK,CAACC,KAAK,CAACiB,YAAY,CAAC4F,SAAS,CAC5DG,iBAAiB,EACjB,+BACJ,CAAC;UACD;AACpB;AACA;AACA;AACA;AACA;UACoB,IAAMY,aAAuE,GAAG,EAAE;UAClFD,eAAe,CAACE,OAAO,CAClB3D,OAAO,CAACC,OAAO,IAAI;YAChB,IAAMC,KAAK,GAAID,OAAO,CAASpE,KAAK,CAACsE,WAAW,CAAC;YACjDuD,aAAa,CAACnG,IAAI,CACdwF,iBAAiB,CAAC7C,KAAK,CAC3B,CAAC;UACL,CAAC,CAAC;UACN,IAAIwD,aAAa,CAAC3E,MAAM,GAAG,CAAC,EAAE;YAC1B,MAAMlD,KAAK,CAACC,KAAK,CAAC4G,YAAY,CAACC,SAAS,CACpCpH,qCAAqC,CAACM,KAAK,EAAE6H,aAAa,CAAC,EAC3D,oCACJ,CAAC;UACL;UACA;QACJ;MACJ;;MAEA;AACZ;AACA;AACA;AACA;MACYrI,aAAa,CACTQ,KAAK,EACL,IAAI,EACJwE,aACJ,CAAC;MAED,OAAOuC,iBAAiB;IAC5B,CAAC,CAAC,CAACxD,KAAK,CAACwE,cAAc,IAAI;MACvB/H,KAAK,CAAC6B,MAAM,CAACmG,KAAK,CAAChG,IAAI,CAAC+F,cAAc,CAAC;MACvC,OAAO,KAAK;IAChB,CAAC,CAAC;IAEF,OAAOjH,gBAAgB;EAC3B;AACJ","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/rx-change-event.js b/dist/esm/rx-change-event.js deleted file mode 100644 index 78a98b7ae05..00000000000 --- a/dist/esm/rx-change-event.js +++ /dev/null @@ -1,81 +0,0 @@ -/** - * RxChangeEvents a emitted when something in the database changes - * they can be grabbed by the observables of database, collection and document - */ - -import { overwritable } from "./overwritable.js"; -import { appendToArray } from "./plugins/utils/index.js"; -export function getDocumentDataOfRxChangeEvent(rxChangeEvent) { - if (rxChangeEvent.documentData) { - return rxChangeEvent.documentData; - } else { - return rxChangeEvent.previousDocumentData; - } -} - -/** - * Might return null which means an - * already deleted document got modified but still is deleted. - * These kind of events are not relevant for the event-reduce algorithm - * and must be filtered out. - */ -export function rxChangeEventToEventReduceChangeEvent(rxChangeEvent) { - switch (rxChangeEvent.operation) { - case 'INSERT': - return { - operation: rxChangeEvent.operation, - id: rxChangeEvent.documentId, - doc: rxChangeEvent.documentData, - previous: null - }; - case 'UPDATE': - return { - operation: rxChangeEvent.operation, - id: rxChangeEvent.documentId, - doc: overwritable.deepFreezeWhenDevMode(rxChangeEvent.documentData), - previous: rxChangeEvent.previousDocumentData ? rxChangeEvent.previousDocumentData : 'UNKNOWN' - }; - case 'DELETE': - return { - operation: rxChangeEvent.operation, - id: rxChangeEvent.documentId, - doc: null, - previous: rxChangeEvent.previousDocumentData - }; - } -} - -/** - * Flattens the given events into a single array of events. - * Used mostly in tests. - */ -export function flattenEvents(input) { - var output = []; - if (Array.isArray(input)) { - input.forEach(inputItem => { - var add = flattenEvents(inputItem); - appendToArray(output, add); - }); - } else { - if (input.id && input.events) { - // is bulk - input.events.forEach(ev => output.push(ev)); - } else { - output.push(input); - } - } - var usedIds = new Set(); - var nonDuplicate = []; - function getEventId(ev) { - return [ev.documentId, ev.documentData ? ev.documentData._rev : '', ev.previousDocumentData ? ev.previousDocumentData._rev : ''].join('|'); - } - output.forEach(ev => { - var eventId = getEventId(ev); - if (!usedIds.has(eventId)) { - usedIds.add(eventId); - nonDuplicate.push(ev); - } - }); - return nonDuplicate; -} -//# sourceMappingURL=rx-change-event.js.map \ No newline at end of file diff --git a/dist/esm/rx-change-event.js.map b/dist/esm/rx-change-event.js.map deleted file mode 100644 index bd3244432d4..00000000000 --- a/dist/esm/rx-change-event.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-change-event.js","names":["overwritable","appendToArray","getDocumentDataOfRxChangeEvent","rxChangeEvent","documentData","previousDocumentData","rxChangeEventToEventReduceChangeEvent","operation","id","documentId","doc","previous","deepFreezeWhenDevMode","flattenEvents","input","output","Array","isArray","forEach","inputItem","add","events","ev","push","usedIds","Set","nonDuplicate","getEventId","_rev","join","eventId","has"],"sources":["../../src/rx-change-event.ts"],"sourcesContent":["/**\n * RxChangeEvents a emitted when something in the database changes\n * they can be grabbed by the observables of database, collection and document\n */\n\nimport type {\n ChangeEvent as EventReduceChangeEvent,\n} from 'event-reduce-js';\nimport { overwritable } from './overwritable.ts';\n\nimport type {\n EventBulk,\n RxChangeEvent,\n RxDocumentData\n} from './types/index.d.ts';\nimport { appendToArray } from './plugins/utils/index.ts';\n\nexport function getDocumentDataOfRxChangeEvent(\n rxChangeEvent: RxChangeEvent\n): RxDocumentData {\n if ((rxChangeEvent as any).documentData) {\n return (rxChangeEvent as any).documentData;\n } else {\n return (rxChangeEvent as any).previousDocumentData;\n }\n}\n\n/**\n * Might return null which means an\n * already deleted document got modified but still is deleted.\n * These kind of events are not relevant for the event-reduce algorithm\n * and must be filtered out.\n */\nexport function rxChangeEventToEventReduceChangeEvent(\n rxChangeEvent: RxChangeEvent\n): EventReduceChangeEvent | null {\n switch (rxChangeEvent.operation) {\n case 'INSERT':\n return {\n operation: rxChangeEvent.operation,\n id: rxChangeEvent.documentId,\n doc: rxChangeEvent.documentData as any,\n previous: null\n };\n case 'UPDATE':\n return {\n operation: rxChangeEvent.operation,\n id: rxChangeEvent.documentId,\n doc: overwritable.deepFreezeWhenDevMode(rxChangeEvent.documentData) as any,\n previous: rxChangeEvent.previousDocumentData ? rxChangeEvent.previousDocumentData as any : 'UNKNOWN'\n };\n case 'DELETE':\n return {\n operation: rxChangeEvent.operation,\n id: rxChangeEvent.documentId,\n doc: null,\n previous: rxChangeEvent.previousDocumentData as DocType\n };\n }\n}\n\n/**\n * Flattens the given events into a single array of events.\n * Used mostly in tests.\n */\nexport function flattenEvents(\n input: EventBulk | EventBulk[] | EventType | EventType[]\n): EventType[] {\n const output: EventType[] = [];\n if (Array.isArray(input)) {\n input.forEach(inputItem => {\n const add = flattenEvents(inputItem);\n appendToArray(output, add);\n });\n } else {\n if ((input as any).id && (input as any).events) {\n // is bulk\n (input as EventBulk)\n .events\n .forEach(ev => output.push(ev));\n } else {\n output.push(input as any);\n }\n }\n\n const usedIds = new Set();\n const nonDuplicate: EventType[] = [];\n\n function getEventId(ev: any): string {\n return [\n ev.documentId,\n ev.documentData ? ev.documentData._rev : '',\n ev.previousDocumentData ? ev.previousDocumentData._rev : ''\n ].join('|');\n }\n\n output.forEach(ev => {\n const eventId = getEventId(ev);\n if (!usedIds.has(eventId)) {\n usedIds.add(eventId);\n nonDuplicate.push(ev);\n }\n });\n\n return nonDuplicate;\n}\n"],"mappings":"AAAA;AACA;AACA;AACA;;AAKA,SAASA,YAAY,QAAQ,mBAAmB;AAOhD,SAASC,aAAa,QAAQ,0BAA0B;AAExD,OAAO,SAASC,8BAA8BA,CAC1CC,aAA+B,EACd;EACjB,IAAKA,aAAa,CAASC,YAAY,EAAE;IACrC,OAAQD,aAAa,CAASC,YAAY;EAC9C,CAAC,MAAM;IACH,OAAQD,aAAa,CAASE,oBAAoB;EACtD;AACJ;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,SAASC,qCAAqCA,CACjDH,aAAqC,EACC;EACtC,QAAQA,aAAa,CAACI,SAAS;IAC3B,KAAK,QAAQ;MACT,OAAO;QACHA,SAAS,EAAEJ,aAAa,CAACI,SAAS;QAClCC,EAAE,EAAEL,aAAa,CAACM,UAAU;QAC5BC,GAAG,EAAEP,aAAa,CAACC,YAAmB;QACtCO,QAAQ,EAAE;MACd,CAAC;IACL,KAAK,QAAQ;MACT,OAAO;QACHJ,SAAS,EAAEJ,aAAa,CAACI,SAAS;QAClCC,EAAE,EAAEL,aAAa,CAACM,UAAU;QAC5BC,GAAG,EAAEV,YAAY,CAACY,qBAAqB,CAACT,aAAa,CAACC,YAAY,CAAQ;QAC1EO,QAAQ,EAAER,aAAa,CAACE,oBAAoB,GAAGF,aAAa,CAACE,oBAAoB,GAAU;MAC/F,CAAC;IACL,KAAK,QAAQ;MACT,OAAO;QACHE,SAAS,EAAEJ,aAAa,CAACI,SAAS;QAClCC,EAAE,EAAEL,aAAa,CAACM,UAAU;QAC5BC,GAAG,EAAE,IAAI;QACTC,QAAQ,EAAER,aAAa,CAACE;MAC5B,CAAC;EACT;AACJ;;AAEA;AACA;AACA;AACA;AACA,OAAO,SAASQ,aAAaA,CACzBC,KAAwF,EAC7E;EACX,IAAMC,MAAmB,GAAG,EAAE;EAC9B,IAAIC,KAAK,CAACC,OAAO,CAACH,KAAK,CAAC,EAAE;IACtBA,KAAK,CAACI,OAAO,CAACC,SAAS,IAAI;MACvB,IAAMC,GAAG,GAAGP,aAAa,CAACM,SAAS,CAAC;MACpClB,aAAa,CAACc,MAAM,EAAEK,GAAG,CAAC;IAC9B,CAAC,CAAC;EACN,CAAC,MAAM;IACH,IAAKN,KAAK,CAASN,EAAE,IAAKM,KAAK,CAASO,MAAM,EAAE;MAC5C;MACCP,KAAK,CACDO,MAAM,CACNH,OAAO,CAACI,EAAE,IAAIP,MAAM,CAACQ,IAAI,CAACD,EAAE,CAAC,CAAC;IACvC,CAAC,MAAM;MACHP,MAAM,CAACQ,IAAI,CAACT,KAAY,CAAC;IAC7B;EACJ;EAEA,IAAMU,OAAO,GAAG,IAAIC,GAAG,CAAS,CAAC;EACjC,IAAMC,YAAyB,GAAG,EAAE;EAEpC,SAASC,UAAUA,CAACL,EAAO,EAAU;IACjC,OAAO,CACHA,EAAE,CAACb,UAAU,EACba,EAAE,CAAClB,YAAY,GAAGkB,EAAE,CAAClB,YAAY,CAACwB,IAAI,GAAG,EAAE,EAC3CN,EAAE,CAACjB,oBAAoB,GAAGiB,EAAE,CAACjB,oBAAoB,CAACuB,IAAI,GAAG,EAAE,CAC9D,CAACC,IAAI,CAAC,GAAG,CAAC;EACf;EAEAd,MAAM,CAACG,OAAO,CAACI,EAAE,IAAI;IACjB,IAAMQ,OAAO,GAAGH,UAAU,CAACL,EAAE,CAAC;IAC9B,IAAI,CAACE,OAAO,CAACO,GAAG,CAACD,OAAO,CAAC,EAAE;MACvBN,OAAO,CAACJ,GAAG,CAACU,OAAO,CAAC;MACpBJ,YAAY,CAACH,IAAI,CAACD,EAAE,CAAC;IACzB;EACJ,CAAC,CAAC;EAEF,OAAOI,YAAY;AACvB","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/rx-collection-helper.js b/dist/esm/rx-collection-helper.js deleted file mode 100644 index 0467dd21fe1..00000000000 --- a/dist/esm/rx-collection-helper.js +++ /dev/null @@ -1,123 +0,0 @@ -import { createRevision, flatClone, getDefaultRevision, getDefaultRxDocumentMeta, now } from "./plugins/utils/index.js"; -import { fillObjectWithDefaults, fillPrimaryKey } from "./rx-schema-helper.js"; -import { runAsyncPluginHooks } from "./hooks.js"; -import { getAllCollectionDocuments } from "./rx-database-internal-store.js"; -import { flatCloneDocWithMeta } from "./rx-storage-helper.js"; -import { overwritable } from "./overwritable.js"; -import { newRxError } from "./rx-error.js"; - -/** - * fills in the default data. - * This also clones the data. - */ -export function fillObjectDataBeforeInsert(schema, data) { - data = flatClone(data); - data = fillObjectWithDefaults(schema, data); - data = fillPrimaryKey(schema.primaryPath, schema.jsonSchema, data); - data._meta = getDefaultRxDocumentMeta(); - if (!Object.prototype.hasOwnProperty.call(data, '_deleted')) { - data._deleted = false; - } - if (!Object.prototype.hasOwnProperty.call(data, '_attachments')) { - data._attachments = {}; - } - if (!Object.prototype.hasOwnProperty.call(data, '_rev')) { - data._rev = getDefaultRevision(); - } - return data; -} - -/** - * Creates the storage instances that are used internally in the collection - */ -export async function createRxCollectionStorageInstance(rxDatabase, storageInstanceCreationParams) { - storageInstanceCreationParams.multiInstance = rxDatabase.multiInstance; - var storageInstance = await rxDatabase.storage.createStorageInstance(storageInstanceCreationParams); - return storageInstance; -} - -/** - * Removes the main storage of the collection - * and all connected storages like the ones from the replication meta etc. - */ -export async function removeCollectionStorages(storage, databaseInternalStorage, databaseInstanceToken, databaseName, collectionName, password, -/** - * If no hash function is provided, - * we assume that the whole internal store is removed anyway - * so we do not have to delete the meta documents. - */ -hashFunction) { - var allCollectionMetaDocs = await getAllCollectionDocuments(databaseInternalStorage); - var relevantCollectionMetaDocs = allCollectionMetaDocs.filter(metaDoc => metaDoc.data.name === collectionName); - var removeStorages = []; - relevantCollectionMetaDocs.forEach(metaDoc => { - removeStorages.push({ - collectionName: metaDoc.data.name, - schema: metaDoc.data.schema, - isCollection: true - }); - metaDoc.data.connectedStorages.forEach(row => removeStorages.push({ - collectionName: row.collectionName, - isCollection: false, - schema: row.schema - })); - }); - - // ensure uniqueness - var alreadyAdded = new Set(); - removeStorages = removeStorages.filter(row => { - var key = row.collectionName + '||' + row.schema.version; - if (alreadyAdded.has(key)) { - return false; - } else { - alreadyAdded.add(key); - return true; - } - }); - - // remove all the storages - await Promise.all(removeStorages.map(async row => { - var storageInstance = await storage.createStorageInstance({ - collectionName: row.collectionName, - databaseInstanceToken, - databaseName, - multiInstance: false, - options: {}, - schema: row.schema, - password, - devMode: overwritable.isDevMode() - }); - await storageInstance.remove(); - if (row.isCollection) { - await runAsyncPluginHooks('postRemoveRxCollection', { - storage, - databaseName: databaseName, - collectionName - }); - } - })); - - // remove the meta documents - if (hashFunction) { - var writeRows = relevantCollectionMetaDocs.map(doc => { - var writeDoc = flatCloneDocWithMeta(doc); - writeDoc._deleted = true; - writeDoc._meta.lwt = now(); - writeDoc._rev = createRevision(databaseInstanceToken, doc); - return { - previous: doc, - document: writeDoc - }; - }); - await databaseInternalStorage.bulkWrite(writeRows, 'rx-database-remove-collection-all'); - } -} -export function ensureRxCollectionIsNotDestroyed(collection) { - if (collection.destroyed) { - throw newRxError('COL21', { - collection: collection.name, - version: collection.schema.version - }); - } -} -//# sourceMappingURL=rx-collection-helper.js.map \ No newline at end of file diff --git a/dist/esm/rx-collection-helper.js.map b/dist/esm/rx-collection-helper.js.map deleted file mode 100644 index 630a69db2fa..00000000000 --- a/dist/esm/rx-collection-helper.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-collection-helper.js","names":["createRevision","flatClone","getDefaultRevision","getDefaultRxDocumentMeta","now","fillObjectWithDefaults","fillPrimaryKey","runAsyncPluginHooks","getAllCollectionDocuments","flatCloneDocWithMeta","overwritable","newRxError","fillObjectDataBeforeInsert","schema","data","primaryPath","jsonSchema","_meta","Object","prototype","hasOwnProperty","call","_deleted","_attachments","_rev","createRxCollectionStorageInstance","rxDatabase","storageInstanceCreationParams","multiInstance","storageInstance","storage","createStorageInstance","removeCollectionStorages","databaseInternalStorage","databaseInstanceToken","databaseName","collectionName","password","hashFunction","allCollectionMetaDocs","relevantCollectionMetaDocs","filter","metaDoc","name","removeStorages","forEach","push","isCollection","connectedStorages","row","alreadyAdded","Set","key","version","has","add","Promise","all","map","options","devMode","isDevMode","remove","writeRows","doc","writeDoc","lwt","previous","document","bulkWrite","ensureRxCollectionIsNotDestroyed","collection","destroyed"],"sources":["../../src/rx-collection-helper.ts"],"sourcesContent":["import type {\n HashFunction,\n InternalStoreDocType,\n RxCollection,\n RxDatabase,\n RxDocumentData,\n RxJsonSchema,\n RxStorage,\n RxStorageInstance,\n RxStorageInstanceCreationParams\n} from './types/index.d.ts';\nimport {\n createRevision,\n flatClone,\n getDefaultRevision,\n getDefaultRxDocumentMeta,\n now\n} from './plugins/utils/index.ts';\nimport {\n fillObjectWithDefaults,\n fillPrimaryKey\n} from './rx-schema-helper.ts';\nimport type { RxSchema } from './rx-schema.ts';\nimport { runAsyncPluginHooks } from './hooks.ts';\nimport { getAllCollectionDocuments } from './rx-database-internal-store.ts';\nimport { flatCloneDocWithMeta } from './rx-storage-helper.ts';\nimport { overwritable } from './overwritable.ts';\nimport type { RxCollectionBase } from './rx-collection.ts';\nimport { newRxError } from './rx-error.ts';\n\n/**\n * fills in the default data.\n * This also clones the data.\n */\nexport function fillObjectDataBeforeInsert(\n schema: RxSchema,\n data: Partial> | any\n): RxDocumentData {\n data = flatClone(data);\n data = fillObjectWithDefaults(schema, data);\n data = fillPrimaryKey(\n schema.primaryPath,\n schema.jsonSchema,\n data\n );\n data._meta = getDefaultRxDocumentMeta();\n if (!Object.prototype.hasOwnProperty.call(data, '_deleted')) {\n data._deleted = false;\n }\n if (!Object.prototype.hasOwnProperty.call(data, '_attachments')) {\n data._attachments = {};\n }\n if (!Object.prototype.hasOwnProperty.call(data, '_rev')) {\n data._rev = getDefaultRevision();\n }\n return data;\n}\n\n/**\n * Creates the storage instances that are used internally in the collection\n */\nexport async function createRxCollectionStorageInstance(\n rxDatabase: RxDatabase<{}, Internals, InstanceCreationOptions>,\n storageInstanceCreationParams: RxStorageInstanceCreationParams\n): Promise> {\n storageInstanceCreationParams.multiInstance = rxDatabase.multiInstance;\n const storageInstance = await rxDatabase.storage.createStorageInstance(\n storageInstanceCreationParams\n );\n return storageInstance;\n}\n\n/**\n * Removes the main storage of the collection\n * and all connected storages like the ones from the replication meta etc.\n */\nexport async function removeCollectionStorages(\n storage: RxStorage,\n databaseInternalStorage: RxStorageInstance, any, any>,\n databaseInstanceToken: string,\n databaseName: string,\n collectionName: string,\n password?: string,\n /**\n * If no hash function is provided,\n * we assume that the whole internal store is removed anyway\n * so we do not have to delete the meta documents.\n */\n hashFunction?: HashFunction,\n) {\n const allCollectionMetaDocs = await getAllCollectionDocuments(\n databaseInternalStorage\n );\n const relevantCollectionMetaDocs = allCollectionMetaDocs\n .filter(metaDoc => metaDoc.data.name === collectionName);\n\n let removeStorages: {\n collectionName: string;\n schema: RxJsonSchema;\n isCollection: boolean;\n }[] = [];\n relevantCollectionMetaDocs.forEach(metaDoc => {\n removeStorages.push({\n collectionName: metaDoc.data.name,\n schema: metaDoc.data.schema,\n isCollection: true\n });\n metaDoc.data.connectedStorages.forEach(row => removeStorages.push({\n collectionName: row.collectionName,\n isCollection: false,\n schema: row.schema\n }));\n });\n\n // ensure uniqueness\n const alreadyAdded = new Set();\n removeStorages = removeStorages.filter(row => {\n const key = row.collectionName + '||' + row.schema.version;\n if (alreadyAdded.has(key)) {\n return false;\n } else {\n alreadyAdded.add(key);\n return true;\n }\n });\n\n // remove all the storages\n await Promise.all(\n removeStorages\n .map(async (row) => {\n const storageInstance = await storage.createStorageInstance({\n collectionName: row.collectionName,\n databaseInstanceToken,\n databaseName,\n multiInstance: false,\n options: {},\n schema: row.schema,\n password,\n devMode: overwritable.isDevMode()\n });\n await storageInstance.remove();\n if (row.isCollection) {\n await runAsyncPluginHooks('postRemoveRxCollection', {\n storage,\n databaseName: databaseName,\n collectionName\n });\n }\n })\n );\n\n // remove the meta documents\n if (hashFunction) {\n const writeRows = relevantCollectionMetaDocs.map(doc => {\n const writeDoc = flatCloneDocWithMeta(doc);\n writeDoc._deleted = true;\n writeDoc._meta.lwt = now();\n writeDoc._rev = createRevision(\n databaseInstanceToken,\n doc\n );\n return {\n previous: doc,\n document: writeDoc\n };\n });\n await databaseInternalStorage.bulkWrite(\n writeRows,\n 'rx-database-remove-collection-all'\n );\n }\n}\n\n\nexport function ensureRxCollectionIsNotDestroyed(\n collection: RxCollection | RxCollectionBase\n) {\n if (collection.destroyed) {\n throw newRxError(\n 'COL21',\n {\n collection: collection.name,\n version: collection.schema.version\n }\n );\n }\n}\n"],"mappings":"AAWA,SACIA,cAAc,EACdC,SAAS,EACTC,kBAAkB,EAClBC,wBAAwB,EACxBC,GAAG,QACA,0BAA0B;AACjC,SACIC,sBAAsB,EACtBC,cAAc,QACX,uBAAuB;AAE9B,SAASC,mBAAmB,QAAQ,YAAY;AAChD,SAASC,yBAAyB,QAAQ,iCAAiC;AAC3E,SAASC,oBAAoB,QAAQ,wBAAwB;AAC7D,SAASC,YAAY,QAAQ,mBAAmB;AAEhD,SAASC,UAAU,QAAQ,eAAe;;AAE1C;AACA;AACA;AACA;AACA,OAAO,SAASC,0BAA0BA,CACtCC,MAA2B,EAC3BC,IAA8C,EACrB;EACzBA,IAAI,GAAGb,SAAS,CAACa,IAAI,CAAC;EACtBA,IAAI,GAAGT,sBAAsB,CAACQ,MAAM,EAAEC,IAAI,CAAC;EAC3CA,IAAI,GAAGR,cAAc,CACjBO,MAAM,CAACE,WAAW,EAClBF,MAAM,CAACG,UAAU,EACjBF,IACJ,CAAC;EACDA,IAAI,CAACG,KAAK,GAAGd,wBAAwB,CAAC,CAAC;EACvC,IAAI,CAACe,MAAM,CAACC,SAAS,CAACC,cAAc,CAACC,IAAI,CAACP,IAAI,EAAE,UAAU,CAAC,EAAE;IACzDA,IAAI,CAACQ,QAAQ,GAAG,KAAK;EACzB;EACA,IAAI,CAACJ,MAAM,CAACC,SAAS,CAACC,cAAc,CAACC,IAAI,CAACP,IAAI,EAAE,cAAc,CAAC,EAAE;IAC7DA,IAAI,CAACS,YAAY,GAAG,CAAC,CAAC;EAC1B;EACA,IAAI,CAACL,MAAM,CAACC,SAAS,CAACC,cAAc,CAACC,IAAI,CAACP,IAAI,EAAE,MAAM,CAAC,EAAE;IACrDA,IAAI,CAACU,IAAI,GAAGtB,kBAAkB,CAAC,CAAC;EACpC;EACA,OAAOY,IAAI;AACf;;AAEA;AACA;AACA;AACA,OAAO,eAAeW,iCAAiCA,CACnDC,UAA8D,EAC9DC,6BAAuG,EACzB;EAC9EA,6BAA6B,CAACC,aAAa,GAAGF,UAAU,CAACE,aAAa;EACtE,IAAMC,eAAe,GAAG,MAAMH,UAAU,CAACI,OAAO,CAACC,qBAAqB,CAClEJ,6BACJ,CAAC;EACD,OAAOE,eAAe;AAC1B;;AAEA;AACA;AACA;AACA;AACA,OAAO,eAAeG,wBAAwBA,CAC1CF,OAA4B,EAC5BG,uBAA+E,EAC/EC,qBAA6B,EAC7BC,YAAoB,EACpBC,cAAsB,EACtBC,QAAiB;AACjB;AACJ;AACA;AACA;AACA;AACIC,YAA2B,EAC7B;EACE,IAAMC,qBAAqB,GAAG,MAAM/B,yBAAyB,CACzDyB,uBACJ,CAAC;EACD,IAAMO,0BAA0B,GAAGD,qBAAqB,CACnDE,MAAM,CAACC,OAAO,IAAIA,OAAO,CAAC5B,IAAI,CAAC6B,IAAI,KAAKP,cAAc,CAAC;EAE5D,IAAIQ,cAID,GAAG,EAAE;EACRJ,0BAA0B,CAACK,OAAO,CAACH,OAAO,IAAI;IAC1CE,cAAc,CAACE,IAAI,CAAC;MAChBV,cAAc,EAAEM,OAAO,CAAC5B,IAAI,CAAC6B,IAAI;MACjC9B,MAAM,EAAE6B,OAAO,CAAC5B,IAAI,CAACD,MAAM;MAC3BkC,YAAY,EAAE;IAClB,CAAC,CAAC;IACFL,OAAO,CAAC5B,IAAI,CAACkC,iBAAiB,CAACH,OAAO,CAACI,GAAG,IAAIL,cAAc,CAACE,IAAI,CAAC;MAC9DV,cAAc,EAAEa,GAAG,CAACb,cAAc;MAClCW,YAAY,EAAE,KAAK;MACnBlC,MAAM,EAAEoC,GAAG,CAACpC;IAChB,CAAC,CAAC,CAAC;EACP,CAAC,CAAC;;EAEF;EACA,IAAMqC,YAAY,GAAG,IAAIC,GAAG,CAAS,CAAC;EACtCP,cAAc,GAAGA,cAAc,CAACH,MAAM,CAACQ,GAAG,IAAI;IAC1C,IAAMG,GAAG,GAAGH,GAAG,CAACb,cAAc,GAAG,IAAI,GAAGa,GAAG,CAACpC,MAAM,CAACwC,OAAO;IAC1D,IAAIH,YAAY,CAACI,GAAG,CAACF,GAAG,CAAC,EAAE;MACvB,OAAO,KAAK;IAChB,CAAC,MAAM;MACHF,YAAY,CAACK,GAAG,CAACH,GAAG,CAAC;MACrB,OAAO,IAAI;IACf;EACJ,CAAC,CAAC;;EAEF;EACA,MAAMI,OAAO,CAACC,GAAG,CACbb,cAAc,CACTc,GAAG,CAAC,MAAOT,GAAG,IAAK;IAChB,IAAMpB,eAAe,GAAG,MAAMC,OAAO,CAACC,qBAAqB,CAAM;MAC7DK,cAAc,EAAEa,GAAG,CAACb,cAAc;MAClCF,qBAAqB;MACrBC,YAAY;MACZP,aAAa,EAAE,KAAK;MACpB+B,OAAO,EAAE,CAAC,CAAC;MACX9C,MAAM,EAAEoC,GAAG,CAACpC,MAAM;MAClBwB,QAAQ;MACRuB,OAAO,EAAElD,YAAY,CAACmD,SAAS,CAAC;IACpC,CAAC,CAAC;IACF,MAAMhC,eAAe,CAACiC,MAAM,CAAC,CAAC;IAC9B,IAAIb,GAAG,CAACF,YAAY,EAAE;MAClB,MAAMxC,mBAAmB,CAAC,wBAAwB,EAAE;QAChDuB,OAAO;QACPK,YAAY,EAAEA,YAAY;QAC1BC;MACJ,CAAC,CAAC;IACN;EACJ,CAAC,CACT,CAAC;;EAED;EACA,IAAIE,YAAY,EAAE;IACd,IAAMyB,SAAS,GAAGvB,0BAA0B,CAACkB,GAAG,CAACM,GAAG,IAAI;MACpD,IAAMC,QAAQ,GAAGxD,oBAAoB,CAACuD,GAAG,CAAC;MAC1CC,QAAQ,CAAC3C,QAAQ,GAAG,IAAI;MACxB2C,QAAQ,CAAChD,KAAK,CAACiD,GAAG,GAAG9D,GAAG,CAAC,CAAC;MAC1B6D,QAAQ,CAACzC,IAAI,GAAGxB,cAAc,CAC1BkC,qBAAqB,EACrB8B,GACJ,CAAC;MACD,OAAO;QACHG,QAAQ,EAAEH,GAAG;QACbI,QAAQ,EAAEH;MACd,CAAC;IACL,CAAC,CAAC;IACF,MAAMhC,uBAAuB,CAACoC,SAAS,CACnCN,SAAS,EACT,mCACJ,CAAC;EACL;AACJ;AAGA,OAAO,SAASO,gCAAgCA,CAC5CC,UAAoE,EACtE;EACE,IAAIA,UAAU,CAACC,SAAS,EAAE;IACtB,MAAM7D,UAAU,CACZ,OAAO,EACP;MACI4D,UAAU,EAAEA,UAAU,CAAC5B,IAAI;MAC3BU,OAAO,EAAEkB,UAAU,CAAC1D,MAAM,CAACwC;IAC/B,CACJ,CAAC;EACL;AACJ","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/rx-collection.js b/dist/esm/rx-collection.js deleted file mode 100644 index 5dae74c3bbd..00000000000 --- a/dist/esm/rx-collection.js +++ /dev/null @@ -1,749 +0,0 @@ -import _createClass from "@babel/runtime/helpers/createClass"; -import { filter, map, mergeMap } from 'rxjs'; -import { ucfirst, flatClone, promiseSeries, pluginMissing, ensureNotFalsy, getFromMapOrThrow, PROMISE_RESOLVE_FALSE, PROMISE_RESOLVE_VOID } from "./plugins/utils/index.js"; -import { fillObjectDataBeforeInsert, createRxCollectionStorageInstance, removeCollectionStorages, ensureRxCollectionIsNotDestroyed } from "./rx-collection-helper.js"; -import { createRxQuery, _getDefaultQuery } from "./rx-query.js"; -import { newRxError, newRxTypeError } from "./rx-error.js"; -import { DocumentCache, mapDocumentsDataToCacheDocs } from "./doc-cache.js"; -import { createQueryCache, defaultCacheReplacementPolicy } from "./query-cache.js"; -import { createChangeEventBuffer } from "./change-event-buffer.js"; -import { runAsyncPluginHooks, runPluginHooks } from "./hooks.js"; -import { createNewRxDocument, getRxDocumentConstructor } from "./rx-document-prototype-merge.js"; -import { getWrappedStorageInstance, throwIfIsStorageWriteError } from "./rx-storage-helper.js"; -import { defaultConflictHandler } from "./replication-protocol/index.js"; -import { IncrementalWriteQueue } from "./incremental-write.js"; -import { beforeDocumentUpdateWrite } from "./rx-document.js"; -import { overwritable } from "./overwritable.js"; -var HOOKS_WHEN = ['pre', 'post']; -var HOOKS_KEYS = ['insert', 'save', 'remove', 'create']; -var hooksApplied = false; -export var RxCollectionBase = /*#__PURE__*/function () { - /** - * Stores all 'normal' documents - */ - - function RxCollectionBase(database, name, schema, internalStorageInstance, instanceCreationOptions = {}, migrationStrategies = {}, methods = {}, attachments = {}, options = {}, cacheReplacementPolicy = defaultCacheReplacementPolicy, statics = {}, conflictHandler = defaultConflictHandler) { - this.storageInstance = {}; - this.timeouts = new Set(); - this.incrementalWriteQueue = {}; - this._incrementalUpsertQueues = new Map(); - this.synced = false; - this.hooks = {}; - this._subs = []; - this._docCache = {}; - this._queryCache = createQueryCache(); - this.$ = {}; - this.checkpoint$ = {}; - this._changeEventBuffer = {}; - this.onDestroy = []; - this.destroyed = false; - this.onRemove = []; - this.database = database; - this.name = name; - this.schema = schema; - this.internalStorageInstance = internalStorageInstance; - this.instanceCreationOptions = instanceCreationOptions; - this.migrationStrategies = migrationStrategies; - this.methods = methods; - this.attachments = attachments; - this.options = options; - this.cacheReplacementPolicy = cacheReplacementPolicy; - this.statics = statics; - this.conflictHandler = conflictHandler; - _applyHookFunctions(this.asRxCollection); - } - var _proto = RxCollectionBase.prototype; - _proto.prepare = async function prepare() { - this.storageInstance = getWrappedStorageInstance(this.database, this.internalStorageInstance, this.schema.jsonSchema); - this.incrementalWriteQueue = new IncrementalWriteQueue(this.storageInstance, this.schema.primaryPath, (newData, oldData) => beforeDocumentUpdateWrite(this, newData, oldData), result => this._runHooks('post', 'save', result)); - var collectionEventBulks$ = this.database.eventBulks$.pipe(filter(changeEventBulk => changeEventBulk.collectionName === this.name)); - this.$ = collectionEventBulks$.pipe(mergeMap(changeEventBulk => changeEventBulk.events)); - this.checkpoint$ = collectionEventBulks$.pipe(map(changeEventBulk => changeEventBulk.checkpoint)); - this._changeEventBuffer = createChangeEventBuffer(this.asRxCollection); - var documentConstructor; - this._docCache = new DocumentCache(this.schema.primaryPath, this.$.pipe(filter(cE => !cE.isLocal)), docData => { - if (!documentConstructor) { - documentConstructor = getRxDocumentConstructor(this.asRxCollection); - } - return createNewRxDocument(this.asRxCollection, documentConstructor, docData); - }); - var listenToRemoveSub = this.database.internalStore.changeStream().pipe(filter(bulk => { - var key = this.name + '-' + this.schema.version; - var found = bulk.events.find(event => { - return event.documentData.context === 'collection' && event.documentData.key === key && event.operation === 'DELETE'; - }); - return !!found; - })).subscribe(async () => { - await this.destroy(); - await Promise.all(this.onRemove.map(fn => fn())); - }); - this._subs.push(listenToRemoveSub); - - /** - * TODO Instead of resolving the EventBulk array here and spit it into - * single events, we should fully work with event bulks internally - * to save performance. - */ - var databaseStorageToken = await this.database.storageToken; - var subDocs = this.storageInstance.changeStream().subscribe(eventBulk => { - var events = new Array(eventBulk.events.length); - var rawEvents = eventBulk.events; - var collectionName = this.name; - var deepFreezeWhenDevMode = overwritable.deepFreezeWhenDevMode; - for (var index = 0; index < rawEvents.length; index++) { - var event = rawEvents[index]; - events[index] = { - documentId: event.documentId, - collectionName, - isLocal: false, - operation: event.operation, - documentData: deepFreezeWhenDevMode(event.documentData), - previousDocumentData: deepFreezeWhenDevMode(event.previousDocumentData) - }; - } - var changeEventBulk = { - id: eventBulk.id, - internal: false, - collectionName: this.name, - storageToken: databaseStorageToken, - events, - databaseToken: this.database.token, - checkpoint: eventBulk.checkpoint, - context: eventBulk.context, - endTime: eventBulk.endTime, - startTime: eventBulk.startTime - }; - this.database.$emit(changeEventBulk); - }); - this._subs.push(subDocs); - - /** - * Resolve the conflict tasks - * of the RxStorageInstance - */ - this._subs.push(this.storageInstance.conflictResultionTasks().subscribe(task => { - this.conflictHandler(task.input, task.context).then(output => { - this.storageInstance.resolveConflictResultionTask({ - id: task.id, - output - }); - }); - })); - return PROMISE_RESOLVE_VOID; - } - - /** - * Manually call the cleanup function of the storage. - * @link https://rxdb.info/cleanup.html - */; - _proto.cleanup = function cleanup(_minimumDeletedTime) { - ensureRxCollectionIsNotDestroyed(this); - throw pluginMissing('cleanup'); - } - - // overwritten by migration-plugin - ; - _proto.migrationNeeded = function migrationNeeded() { - throw pluginMissing('migration-schema'); - }; - _proto.getMigrationState = function getMigrationState() { - throw pluginMissing('migration-schema'); - }; - _proto.startMigration = function startMigration(batchSize = 10) { - ensureRxCollectionIsNotDestroyed(this); - return this.getMigrationState().startMigration(batchSize); - }; - _proto.migratePromise = function migratePromise(batchSize = 10) { - return this.getMigrationState().migratePromise(batchSize); - }; - _proto.insert = async function insert(json) { - ensureRxCollectionIsNotDestroyed(this); - var writeResult = await this.bulkInsert([json]); - var isError = writeResult.error[0]; - throwIfIsStorageWriteError(this, json[this.schema.primaryPath], json, isError); - var insertResult = ensureNotFalsy(writeResult.success[0]); - return insertResult; - }; - _proto.bulkInsert = async function bulkInsert(docsData) { - ensureRxCollectionIsNotDestroyed(this); - /** - * Optimization shortcut, - * do nothing when called with an empty array - */ - if (docsData.length === 0) { - return { - success: [], - error: [] - }; - } - var primaryPath = this.schema.primaryPath; - - /** - * This code is a bit redundant for better performance. - * Instead of iterating multiple times, - * we directly transform the input to a write-row array. - */ - var insertRows; - if (this.hasHooks('pre', 'insert')) { - insertRows = await Promise.all(docsData.map(docData => { - var useDocData = fillObjectDataBeforeInsert(this.schema, docData); - return this._runHooks('pre', 'insert', useDocData).then(() => { - return { - document: useDocData - }; - }); - })); - } else { - insertRows = []; - for (var index = 0; index < docsData.length; index++) { - var docData = docsData[index]; - var useDocData = fillObjectDataBeforeInsert(this.schema, docData); - insertRows[index] = { - document: useDocData - }; - } - } - var results = await this.storageInstance.bulkWrite(insertRows, 'rx-collection-bulk-insert'); - - // create documents - var rxDocuments = mapDocumentsDataToCacheDocs(this._docCache, results.success); - if (this.hasHooks('post', 'insert')) { - var docsMap = new Map(); - insertRows.forEach(row => { - var doc = row.document; - docsMap.set(doc[primaryPath], doc); - }); - await Promise.all(rxDocuments.map(doc => { - return this._runHooks('post', 'insert', docsMap.get(doc.primary), doc); - })); - } - return { - success: rxDocuments, - error: results.error - }; - }; - _proto.bulkRemove = async function bulkRemove(ids) { - ensureRxCollectionIsNotDestroyed(this); - var primaryPath = this.schema.primaryPath; - /** - * Optimization shortcut, - * do nothing when called with an empty array - */ - if (ids.length === 0) { - return { - success: [], - error: [] - }; - } - var rxDocumentMap = await this.findByIds(ids).exec(); - var docsData = []; - var docsMap = new Map(); - Array.from(rxDocumentMap.values()).forEach(rxDocument => { - var data = rxDocument.toMutableJSON(true); - docsData.push(data); - docsMap.set(rxDocument.primary, data); - }); - await Promise.all(docsData.map(doc => { - var primary = doc[this.schema.primaryPath]; - return this._runHooks('pre', 'remove', doc, rxDocumentMap.get(primary)); - })); - var removeDocs = docsData.map(doc => { - var writeDoc = flatClone(doc); - writeDoc._deleted = true; - return { - previous: doc, - document: writeDoc - }; - }); - var results = await this.storageInstance.bulkWrite(removeDocs, 'rx-collection-bulk-remove'); - var successIds = results.success.map(d => d[primaryPath]); - - // run hooks - await Promise.all(successIds.map(id => { - return this._runHooks('post', 'remove', docsMap.get(id), rxDocumentMap.get(id)); - })); - var rxDocuments = successIds.map(id => getFromMapOrThrow(rxDocumentMap, id)); - return { - success: rxDocuments, - error: results.error - }; - } - - /** - * same as bulkInsert but overwrites existing document with same primary - */; - _proto.bulkUpsert = async function bulkUpsert(docsData) { - ensureRxCollectionIsNotDestroyed(this); - var insertData = []; - var useJsonByDocId = new Map(); - docsData.forEach(docData => { - var useJson = fillObjectDataBeforeInsert(this.schema, docData); - var primary = useJson[this.schema.primaryPath]; - if (!primary) { - throw newRxError('COL3', { - primaryPath: this.schema.primaryPath, - data: useJson, - schema: this.schema.jsonSchema - }); - } - useJsonByDocId.set(primary, useJson); - insertData.push(useJson); - }); - var insertResult = await this.bulkInsert(insertData); - var success = insertResult.success.slice(0); - var error = []; - - // update the ones that existed already - await Promise.all(insertResult.error.map(async err => { - if (err.status !== 409) { - error.push(err); - } else { - var id = err.documentId; - var writeData = getFromMapOrThrow(useJsonByDocId, id); - var docDataInDb = ensureNotFalsy(err.documentInDb); - var doc = this._docCache.getCachedRxDocuments([docDataInDb])[0]; - var newDoc = await doc.incrementalModify(() => writeData); - success.push(newDoc); - } - })); - return { - error, - success - }; - } - - /** - * same as insert but overwrites existing document with same primary - */; - _proto.upsert = async function upsert(json) { - ensureRxCollectionIsNotDestroyed(this); - var bulkResult = await this.bulkUpsert([json]); - throwIfIsStorageWriteError(this.asRxCollection, json[this.schema.primaryPath], json, bulkResult.error[0]); - return bulkResult.success[0]; - } - - /** - * upserts to a RxDocument, uses incrementalModify if document already exists - */; - _proto.incrementalUpsert = function incrementalUpsert(json) { - ensureRxCollectionIsNotDestroyed(this); - var useJson = fillObjectDataBeforeInsert(this.schema, json); - var primary = useJson[this.schema.primaryPath]; - if (!primary) { - throw newRxError('COL4', { - data: json - }); - } - - // ensure that it won't try 2 parallel runs - var queue = this._incrementalUpsertQueues.get(primary); - if (!queue) { - queue = PROMISE_RESOLVE_VOID; - } - queue = queue.then(() => _incrementalUpsertEnsureRxDocumentExists(this, primary, useJson)).then(wasInserted => { - if (!wasInserted.inserted) { - return _incrementalUpsertUpdate(wasInserted.doc, useJson); - } else { - return wasInserted.doc; - } - }); - this._incrementalUpsertQueues.set(primary, queue); - return queue; - }; - _proto.find = function find(queryObj) { - ensureRxCollectionIsNotDestroyed(this); - if (typeof queryObj === 'string') { - throw newRxError('COL5', { - queryObj - }); - } - if (!queryObj) { - queryObj = _getDefaultQuery(); - } - var query = createRxQuery('find', queryObj, this); - return query; - }; - _proto.findOne = function findOne(queryObj) { - ensureRxCollectionIsNotDestroyed(this); - - // TODO move this check to dev-mode plugin - if (typeof queryObj === 'number' || Array.isArray(queryObj)) { - throw newRxTypeError('COL6', { - queryObj - }); - } - var query; - if (typeof queryObj === 'string') { - query = createRxQuery('findOne', { - selector: { - [this.schema.primaryPath]: queryObj - }, - limit: 1 - }, this); - } else { - if (!queryObj) { - queryObj = _getDefaultQuery(); - } - - // cannot have limit on findOne queries because it will be overwritten - if (queryObj.limit) { - throw newRxError('QU6'); - } - queryObj = flatClone(queryObj); - queryObj.limit = 1; - query = createRxQuery('findOne', queryObj, this); - } - return query; - }; - _proto.count = function count(queryObj) { - ensureRxCollectionIsNotDestroyed(this); - if (!queryObj) { - queryObj = _getDefaultQuery(); - } - var query = createRxQuery('count', queryObj, this); - return query; - } - - /** - * find a list documents by their primary key - * has way better performance then running multiple findOne() or a find() with a complex $or-selected - */; - _proto.findByIds = function findByIds(ids) { - ensureRxCollectionIsNotDestroyed(this); - var mangoQuery = { - selector: { - [this.schema.primaryPath]: { - $in: ids.slice(0) - } - } - }; - var query = createRxQuery('findByIds', mangoQuery, this); - return query; - } - - /** - * Export collection to a JSON friendly format. - */; - _proto.exportJSON = function exportJSON() { - throw pluginMissing('json-dump'); - } - - /** - * Import the parsed JSON export into the collection. - * @param _exportedJSON The previously exported data from the `.exportJSON()` method. - */; - _proto.importJSON = function importJSON(_exportedJSON) { - throw pluginMissing('json-dump'); - }; - _proto.insertCRDT = function insertCRDT(_updateObj) { - throw pluginMissing('crdt'); - } - - /** - * HOOKS - */; - _proto.addHook = function addHook(when, key, fun, parallel = false) { - if (typeof fun !== 'function') { - throw newRxTypeError('COL7', { - key, - when - }); - } - if (!HOOKS_WHEN.includes(when)) { - throw newRxTypeError('COL8', { - key, - when - }); - } - if (!HOOKS_KEYS.includes(key)) { - throw newRxError('COL9', { - key - }); - } - if (when === 'post' && key === 'create' && parallel === true) { - throw newRxError('COL10', { - when, - key, - parallel - }); - } - - // bind this-scope to hook-function - var boundFun = fun.bind(this); - var runName = parallel ? 'parallel' : 'series'; - this.hooks[key] = this.hooks[key] || {}; - this.hooks[key][when] = this.hooks[key][when] || { - series: [], - parallel: [] - }; - this.hooks[key][when][runName].push(boundFun); - }; - _proto.getHooks = function getHooks(when, key) { - if (!this.hooks[key] || !this.hooks[key][when]) { - return { - series: [], - parallel: [] - }; - } - return this.hooks[key][when]; - }; - _proto.hasHooks = function hasHooks(when, key) { - /** - * Performance shortcut - * so that we not have to build the empty object. - */ - if (!this.hooks[key] || !this.hooks[key][when]) { - return false; - } - var hooks = this.getHooks(when, key); - if (!hooks) { - return false; - } - return hooks.series.length > 0 || hooks.parallel.length > 0; - }; - _proto._runHooks = function _runHooks(when, key, data, instance) { - var hooks = this.getHooks(when, key); - if (!hooks) { - return PROMISE_RESOLVE_VOID; - } - - // run parallel: false - var tasks = hooks.series.map(hook => () => hook(data, instance)); - return promiseSeries(tasks) - // run parallel: true - .then(() => Promise.all(hooks.parallel.map(hook => hook(data, instance)))); - } - - /** - * does the same as ._runHooks() but with non-async-functions - */; - _proto._runHooksSync = function _runHooksSync(when, key, data, instance) { - if (!this.hasHooks(when, key)) { - return; - } - var hooks = this.getHooks(when, key); - if (!hooks) return; - hooks.series.forEach(hook => hook(data, instance)); - } - - /** - * Returns a promise that resolves after the given time. - * Ensures that is properly cleans up when the collection is destroyed - * so that no running timeouts prevent the exit of the JavaScript process. - */; - _proto.promiseWait = function promiseWait(time) { - var ret = new Promise(res => { - var timeout = setTimeout(() => { - this.timeouts.delete(timeout); - res(); - }, time); - this.timeouts.add(timeout); - }); - return ret; - }; - _proto.destroy = async function destroy() { - if (this.destroyed) { - return PROMISE_RESOLVE_FALSE; - } - await Promise.all(this.onDestroy.map(fn => fn())); - - /** - * Settings destroyed = true - * must be the first thing to do, - * so for example the replication can directly stop - * instead of sending requests to a closed storage. - */ - this.destroyed = true; - Array.from(this.timeouts).forEach(timeout => clearTimeout(timeout)); - if (this._changeEventBuffer) { - this._changeEventBuffer.destroy(); - } - /** - * First wait until the whole database is idle. - * This ensures that the storage does not get closed - * while some operation is running. - * It is important that we do not intercept a running call - * because it might lead to undefined behavior like when a doc is written - * but the change is not added to the changes collection. - */ - return this.database.requestIdlePromise().then(() => this.storageInstance.close()).then(() => { - /** - * Unsubscribing must be done AFTER the storageInstance.close() - * Because the conflict handling is part of the subscriptions and - * otherwise there might be open conflicts to be resolved which - * will then stuck and never resolve. - */ - this._subs.forEach(sub => sub.unsubscribe()); - delete this.database.collections[this.name]; - return runAsyncPluginHooks('postDestroyRxCollection', this).then(() => true); - }); - } - - /** - * remove all data of the collection - */; - _proto.remove = async function remove() { - await this.destroy(); - await Promise.all(this.onRemove.map(fn => fn())); - await removeCollectionStorages(this.database.storage, this.database.internalStore, this.database.token, this.database.name, this.name, this.database.password, this.database.hashFunction); - }; - return _createClass(RxCollectionBase, [{ - key: "insert$", - get: function () { - return this.$.pipe(filter(cE => cE.operation === 'INSERT')); - } - }, { - key: "update$", - get: function () { - return this.$.pipe(filter(cE => cE.operation === 'UPDATE')); - } - }, { - key: "remove$", - get: function () { - return this.$.pipe(filter(cE => cE.operation === 'DELETE')); - } - - // defaults - - /** - * When the collection is destroyed, - * these functions will be called an awaited. - * Used to automatically clean up stuff that - * belongs to this collection. - */ - }, { - key: "asRxCollection", - get: function () { - return this; - } - }]); -}(); - -/** - * adds the hook-functions to the collections prototype - * this runs only once - */ -function _applyHookFunctions(collection) { - if (hooksApplied) return; // already run - hooksApplied = true; - var colProto = Object.getPrototypeOf(collection); - HOOKS_KEYS.forEach(key => { - HOOKS_WHEN.map(when => { - var fnName = when + ucfirst(key); - colProto[fnName] = function (fun, parallel) { - return this.addHook(when, key, fun, parallel); - }; - }); - }); -} -function _incrementalUpsertUpdate(doc, json) { - return doc.incrementalModify(_innerDoc => { - return json; - }); -} - -/** - * ensures that the given document exists - * @return promise that resolves with new doc and flag if inserted - */ -function _incrementalUpsertEnsureRxDocumentExists(rxCollection, primary, json) { - /** - * Optimisation shortcut, - * first try to find the document in the doc-cache - */ - var docDataFromCache = rxCollection._docCache.getLatestDocumentDataIfExists(primary); - if (docDataFromCache) { - return Promise.resolve({ - doc: rxCollection._docCache.getCachedRxDocuments([docDataFromCache])[0], - inserted: false - }); - } - return rxCollection.findOne(primary).exec().then(doc => { - if (!doc) { - return rxCollection.insert(json).then(newDoc => ({ - doc: newDoc, - inserted: true - })); - } else { - return { - doc, - inserted: false - }; - } - }); -} - -/** - * creates and prepares a new collection - */ -export function createRxCollection({ - database, - name, - schema, - instanceCreationOptions = {}, - migrationStrategies = {}, - autoMigrate = true, - statics = {}, - methods = {}, - attachments = {}, - options = {}, - localDocuments = false, - cacheReplacementPolicy = defaultCacheReplacementPolicy, - conflictHandler = defaultConflictHandler -}) { - var storageInstanceCreationParams = { - databaseInstanceToken: database.token, - databaseName: database.name, - collectionName: name, - schema: schema.jsonSchema, - options: instanceCreationOptions, - multiInstance: database.multiInstance, - password: database.password, - devMode: overwritable.isDevMode() - }; - runPluginHooks('preCreateRxStorageInstance', storageInstanceCreationParams); - return createRxCollectionStorageInstance(database, storageInstanceCreationParams).then(storageInstance => { - var collection = new RxCollectionBase(database, name, schema, storageInstance, instanceCreationOptions, migrationStrategies, methods, attachments, options, cacheReplacementPolicy, statics, conflictHandler); - return collection.prepare().then(() => { - // ORM add statics - Object.entries(statics).forEach(([funName, fun]) => { - Object.defineProperty(collection, funName, { - get: () => fun.bind(collection) - }); - }); - var ret = PROMISE_RESOLVE_VOID; - if (autoMigrate && collection.schema.version !== 0) { - ret = collection.migratePromise(); - } - return ret; - }).then(() => { - runPluginHooks('createRxCollection', { - collection, - creator: { - name, - schema, - storageInstance, - instanceCreationOptions, - migrationStrategies, - methods, - attachments, - options, - cacheReplacementPolicy, - localDocuments, - statics - } - }); - return collection; - }) - /** - * If the collection creation fails, - * we yet have to close the storage instances. - */.catch(err => { - return storageInstance.close().then(() => Promise.reject(err)); - }); - }); -} -export function isRxCollection(obj) { - return obj instanceof RxCollectionBase; -} -//# sourceMappingURL=rx-collection.js.map \ No newline at end of file diff --git a/dist/esm/rx-collection.js.map b/dist/esm/rx-collection.js.map deleted file mode 100644 index 1cc9da5ebdf..00000000000 --- a/dist/esm/rx-collection.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-collection.js","names":["filter","map","mergeMap","ucfirst","flatClone","promiseSeries","pluginMissing","ensureNotFalsy","getFromMapOrThrow","PROMISE_RESOLVE_FALSE","PROMISE_RESOLVE_VOID","fillObjectDataBeforeInsert","createRxCollectionStorageInstance","removeCollectionStorages","ensureRxCollectionIsNotDestroyed","createRxQuery","_getDefaultQuery","newRxError","newRxTypeError","DocumentCache","mapDocumentsDataToCacheDocs","createQueryCache","defaultCacheReplacementPolicy","createChangeEventBuffer","runAsyncPluginHooks","runPluginHooks","createNewRxDocument","getRxDocumentConstructor","getWrappedStorageInstance","throwIfIsStorageWriteError","defaultConflictHandler","IncrementalWriteQueue","beforeDocumentUpdateWrite","overwritable","HOOKS_WHEN","HOOKS_KEYS","hooksApplied","RxCollectionBase","database","name","schema","internalStorageInstance","instanceCreationOptions","migrationStrategies","methods","attachments","options","cacheReplacementPolicy","statics","conflictHandler","storageInstance","timeouts","Set","incrementalWriteQueue","_incrementalUpsertQueues","Map","synced","hooks","_subs","_docCache","_queryCache","$","checkpoint$","_changeEventBuffer","onDestroy","destroyed","onRemove","_applyHookFunctions","asRxCollection","_proto","prototype","prepare","jsonSchema","primaryPath","newData","oldData","result","_runHooks","collectionEventBulks$","eventBulks$","pipe","changeEventBulk","collectionName","events","checkpoint","documentConstructor","cE","isLocal","docData","listenToRemoveSub","internalStore","changeStream","bulk","key","version","found","find","event","documentData","context","operation","subscribe","destroy","Promise","all","fn","push","databaseStorageToken","storageToken","subDocs","eventBulk","Array","length","rawEvents","deepFreezeWhenDevMode","index","documentId","previousDocumentData","id","internal","databaseToken","token","endTime","startTime","$emit","conflictResultionTasks","task","input","then","output","resolveConflictResultionTask","cleanup","_minimumDeletedTime","migrationNeeded","getMigrationState","startMigration","batchSize","migratePromise","insert","json","writeResult","bulkInsert","isError","error","insertResult","success","docsData","insertRows","hasHooks","useDocData","document","results","bulkWrite","rxDocuments","docsMap","forEach","row","doc","set","get","primary","bulkRemove","ids","rxDocumentMap","findByIds","exec","from","values","rxDocument","data","toMutableJSON","removeDocs","writeDoc","_deleted","previous","successIds","d","bulkUpsert","insertData","useJsonByDocId","useJson","slice","err","status","writeData","docDataInDb","documentInDb","getCachedRxDocuments","newDoc","incrementalModify","upsert","bulkResult","incrementalUpsert","queue","_incrementalUpsertEnsureRxDocumentExists","wasInserted","inserted","_incrementalUpsertUpdate","queryObj","query","findOne","isArray","selector","limit","count","mangoQuery","$in","exportJSON","importJSON","_exportedJSON","insertCRDT","_updateObj","addHook","when","fun","parallel","includes","boundFun","bind","runName","series","getHooks","instance","tasks","hook","_runHooksSync","promiseWait","time","ret","res","timeout","setTimeout","delete","add","clearTimeout","requestIdlePromise","close","sub","unsubscribe","collections","remove","storage","password","hashFunction","_createClass","collection","colProto","Object","getPrototypeOf","fnName","_innerDoc","rxCollection","docDataFromCache","getLatestDocumentDataIfExists","resolve","createRxCollection","autoMigrate","localDocuments","storageInstanceCreationParams","databaseInstanceToken","databaseName","multiInstance","devMode","isDevMode","entries","funName","defineProperty","creator","catch","reject","isRxCollection","obj"],"sources":["../../src/rx-collection.ts"],"sourcesContent":["import {\n filter,\n map,\n mergeMap\n} from 'rxjs';\n\nimport {\n ucfirst,\n flatClone,\n promiseSeries,\n pluginMissing,\n ensureNotFalsy,\n getFromMapOrThrow,\n PROMISE_RESOLVE_FALSE,\n PROMISE_RESOLVE_VOID\n} from './plugins/utils/index.ts';\nimport {\n fillObjectDataBeforeInsert,\n createRxCollectionStorageInstance,\n removeCollectionStorages,\n ensureRxCollectionIsNotDestroyed\n} from './rx-collection-helper.ts';\nimport {\n createRxQuery,\n _getDefaultQuery\n} from './rx-query.ts';\nimport {\n newRxError,\n newRxTypeError\n} from './rx-error.ts';\nimport type {\n RxMigrationState\n} from './plugins/migration-schema/index.ts';\nimport {\n DocumentCache,\n mapDocumentsDataToCacheDocs\n} from './doc-cache.ts';\nimport {\n QueryCache,\n createQueryCache,\n defaultCacheReplacementPolicy\n} from './query-cache.ts';\nimport {\n ChangeEventBuffer,\n createChangeEventBuffer\n} from './change-event-buffer.ts';\nimport {\n runAsyncPluginHooks,\n runPluginHooks\n} from './hooks.ts';\n\nimport {\n Subscription,\n Observable\n} from 'rxjs';\n\nimport type {\n KeyFunctionMap,\n RxCollection,\n RxDatabase,\n RxQuery,\n RxDocument,\n RxDumpCollection,\n RxDumpCollectionAny,\n MangoQuery,\n MangoQueryNoLimit,\n RxCacheReplacementPolicy,\n RxStorageWriteError,\n RxDocumentData,\n RxStorageInstanceCreationParams,\n BulkWriteRow,\n RxChangeEvent,\n RxChangeEventInsert,\n RxChangeEventUpdate,\n RxChangeEventDelete,\n RxStorageInstance,\n CollectionsOfDatabase,\n RxChangeEventBulk,\n RxLocalDocumentData,\n RxDocumentBase,\n RxConflictHandler,\n MaybePromise,\n CRDTEntry,\n MangoQuerySelectorAndIndex,\n MigrationStrategies\n} from './types/index.d.ts';\n\nimport {\n RxSchema\n} from './rx-schema.ts';\n\nimport {\n createNewRxDocument,\n getRxDocumentConstructor\n} from './rx-document-prototype-merge.ts';\nimport {\n getWrappedStorageInstance,\n throwIfIsStorageWriteError,\n WrappedRxStorageInstance\n} from './rx-storage-helper.ts';\nimport { defaultConflictHandler } from './replication-protocol/index.ts';\nimport { IncrementalWriteQueue } from './incremental-write.ts';\nimport { beforeDocumentUpdateWrite } from './rx-document.ts';\nimport { overwritable } from './overwritable.ts';\n\nconst HOOKS_WHEN = ['pre', 'post'] as const;\ntype HookWhenType = typeof HOOKS_WHEN[number];\nconst HOOKS_KEYS = ['insert', 'save', 'remove', 'create'] as const;\ntype HookKeyType = typeof HOOKS_KEYS[number];\nlet hooksApplied = false;\n\nexport class RxCollectionBase<\n InstanceCreationOptions,\n RxDocumentType = { [prop: string]: any; },\n OrmMethods = {},\n StaticMethods = { [key: string]: any; },\n Reactivity = any\n> {\n\n\n /**\n * Stores all 'normal' documents\n */\n public storageInstance: WrappedRxStorageInstance = {} as any;\n public readonly timeouts: Set> = new Set();\n public incrementalWriteQueue: IncrementalWriteQueue = {} as any;\n\n constructor(\n public database: RxDatabase,\n public name: string,\n public schema: RxSchema,\n public internalStorageInstance: RxStorageInstance,\n public instanceCreationOptions: InstanceCreationOptions = {} as any,\n public migrationStrategies: MigrationStrategies = {},\n public methods: KeyFunctionMap = {},\n public attachments: KeyFunctionMap = {},\n public options: any = {},\n public cacheReplacementPolicy: RxCacheReplacementPolicy = defaultCacheReplacementPolicy,\n public statics: KeyFunctionMap = {},\n public conflictHandler: RxConflictHandler = defaultConflictHandler\n ) {\n _applyHookFunctions(this.asRxCollection);\n }\n\n get insert$(): Observable> {\n return this.$.pipe(\n filter(cE => cE.operation === 'INSERT')\n ) as any;\n }\n get update$(): Observable> {\n return this.$.pipe(\n filter(cE => cE.operation === 'UPDATE')\n ) as any;\n }\n get remove$(): Observable> {\n return this.$.pipe(\n filter(cE => cE.operation === 'DELETE')\n ) as any;\n }\n\n public _incrementalUpsertQueues: Map> = new Map();\n // defaults\n public synced: boolean = false;\n public hooks: {\n [key in HookKeyType]: {\n [when in HookWhenType]: {\n series: Function[];\n parallel: Function[];\n };\n }\n } = {} as any;\n public _subs: Subscription[] = [];\n\n public _docCache: DocumentCache = {} as any;\n\n public _queryCache: QueryCache = createQueryCache();\n public $: Observable> = {} as any;\n public checkpoint$: Observable = {} as any;\n public _changeEventBuffer: ChangeEventBuffer = {} as ChangeEventBuffer;\n\n\n\n /**\n * When the collection is destroyed,\n * these functions will be called an awaited.\n * Used to automatically clean up stuff that\n * belongs to this collection.\n */\n public onDestroy: (() => MaybePromise)[] = [];\n public destroyed = false;\n\n public onRemove: (() => MaybePromise)[] = [];\n\n public async prepare(): Promise {\n this.storageInstance = getWrappedStorageInstance(\n this.database,\n this.internalStorageInstance,\n this.schema.jsonSchema\n );\n this.incrementalWriteQueue = new IncrementalWriteQueue(\n this.storageInstance,\n this.schema.primaryPath,\n (newData, oldData) => beforeDocumentUpdateWrite(this as any, newData, oldData),\n result => this._runHooks('post', 'save', result)\n );\n\n const collectionEventBulks$ = this.database.eventBulks$.pipe(\n filter(changeEventBulk => changeEventBulk.collectionName === this.name),\n );\n this.$ = collectionEventBulks$.pipe(\n mergeMap(changeEventBulk => changeEventBulk.events),\n );\n this.checkpoint$ = collectionEventBulks$.pipe(\n map(changeEventBulk => changeEventBulk.checkpoint),\n );\n\n this._changeEventBuffer = createChangeEventBuffer(this.asRxCollection);\n let documentConstructor: any;\n this._docCache = new DocumentCache(\n this.schema.primaryPath,\n this.$.pipe(filter(cE => !cE.isLocal)),\n docData => {\n if (!documentConstructor) {\n documentConstructor = getRxDocumentConstructor(this.asRxCollection);\n }\n return createNewRxDocument(this.asRxCollection, documentConstructor, docData);\n }\n );\n\n\n const listenToRemoveSub = this.database.internalStore.changeStream().pipe(\n filter(bulk => {\n const key = this.name + '-' + this.schema.version;\n const found = bulk.events.find(event => {\n return (\n event.documentData.context === 'collection' &&\n event.documentData.key === key &&\n event.operation === 'DELETE'\n );\n });\n return !!found;\n })\n ).subscribe(async () => {\n await this.destroy();\n await Promise.all(this.onRemove.map(fn => fn()));\n });\n this._subs.push(listenToRemoveSub);\n\n\n /**\n * TODO Instead of resolving the EventBulk array here and spit it into\n * single events, we should fully work with event bulks internally\n * to save performance.\n */\n const databaseStorageToken = await this.database.storageToken;\n const subDocs = this.storageInstance.changeStream().subscribe(eventBulk => {\n const events = new Array(eventBulk.events.length);\n const rawEvents = eventBulk.events;\n const collectionName = this.name;\n const deepFreezeWhenDevMode = overwritable.deepFreezeWhenDevMode;\n for (let index = 0; index < rawEvents.length; index++) {\n const event = rawEvents[index];\n events[index] = {\n documentId: event.documentId,\n collectionName,\n isLocal: false,\n operation: event.operation,\n documentData: deepFreezeWhenDevMode(event.documentData) as any,\n previousDocumentData: deepFreezeWhenDevMode(event.previousDocumentData) as any\n };\n }\n const changeEventBulk: RxChangeEventBulk = {\n id: eventBulk.id,\n internal: false,\n collectionName: this.name,\n storageToken: databaseStorageToken,\n events,\n databaseToken: this.database.token,\n checkpoint: eventBulk.checkpoint,\n context: eventBulk.context,\n endTime: eventBulk.endTime,\n startTime: eventBulk.startTime\n };\n this.database.$emit(changeEventBulk);\n });\n this._subs.push(subDocs);\n\n /**\n * Resolve the conflict tasks\n * of the RxStorageInstance\n */\n this._subs.push(\n this.storageInstance\n .conflictResultionTasks()\n .subscribe(task => {\n this\n .conflictHandler(task.input, task.context)\n .then(output => {\n this.storageInstance.resolveConflictResultionTask({\n id: task.id,\n output\n });\n });\n })\n );\n\n return PROMISE_RESOLVE_VOID;\n }\n\n\n /**\n * Manually call the cleanup function of the storage.\n * @link https://rxdb.info/cleanup.html\n */\n cleanup(_minimumDeletedTime?: number): Promise {\n ensureRxCollectionIsNotDestroyed(this);\n throw pluginMissing('cleanup');\n }\n\n // overwritten by migration-plugin\n migrationNeeded(): Promise {\n throw pluginMissing('migration-schema');\n }\n getMigrationState(): RxMigrationState {\n throw pluginMissing('migration-schema');\n }\n startMigration(batchSize: number = 10): Promise {\n ensureRxCollectionIsNotDestroyed(this);\n return this.getMigrationState().startMigration(batchSize);\n }\n migratePromise(batchSize: number = 10): Promise {\n return this.getMigrationState().migratePromise(batchSize);\n }\n\n async insert(\n json: RxDocumentType | RxDocument\n ): Promise> {\n ensureRxCollectionIsNotDestroyed(this);\n const writeResult = await this.bulkInsert([json as any]);\n\n const isError = writeResult.error[0];\n throwIfIsStorageWriteError(this as any, (json as any)[this.schema.primaryPath] as any, json, isError);\n const insertResult = ensureNotFalsy(writeResult.success[0]);\n return insertResult;\n }\n\n async bulkInsert(\n docsData: RxDocumentType[]\n ): Promise<{\n success: RxDocument[];\n error: RxStorageWriteError[];\n }> {\n ensureRxCollectionIsNotDestroyed(this);\n /**\n * Optimization shortcut,\n * do nothing when called with an empty array\n */\n if (docsData.length === 0) {\n return {\n success: [],\n error: []\n };\n }\n\n const primaryPath = this.schema.primaryPath;\n\n\n /**\n * This code is a bit redundant for better performance.\n * Instead of iterating multiple times,\n * we directly transform the input to a write-row array.\n */\n let insertRows: BulkWriteRow[];\n if (this.hasHooks('pre', 'insert')) {\n insertRows = await Promise.all(\n docsData.map(docData => {\n const useDocData = fillObjectDataBeforeInsert(this.schema, docData);\n return this._runHooks('pre', 'insert', useDocData)\n .then(() => {\n return { document: useDocData };\n });\n })\n );\n } else {\n insertRows = [];\n for (let index = 0; index < docsData.length; index++) {\n const docData = docsData[index];\n const useDocData = fillObjectDataBeforeInsert(this.schema, docData);\n insertRows[index] = { document: useDocData };\n }\n }\n\n const results = await this.storageInstance.bulkWrite(\n insertRows,\n 'rx-collection-bulk-insert'\n );\n\n // create documents\n const rxDocuments = mapDocumentsDataToCacheDocs(this._docCache, results.success);\n\n if (this.hasHooks('post', 'insert')) {\n const docsMap: Map = new Map();\n insertRows.forEach(row => {\n const doc = row.document;\n docsMap.set((doc as any)[primaryPath] as any, doc);\n });\n await Promise.all(\n rxDocuments.map(doc => {\n return this._runHooks(\n 'post', 'insert',\n docsMap.get(doc.primary),\n doc\n );\n })\n );\n }\n\n return {\n success: rxDocuments,\n error: results.error\n };\n }\n\n async bulkRemove(\n ids: string[]\n ): Promise<{\n success: RxDocument[];\n error: RxStorageWriteError[];\n }> {\n ensureRxCollectionIsNotDestroyed(this);\n const primaryPath = this.schema.primaryPath;\n /**\n * Optimization shortcut,\n * do nothing when called with an empty array\n */\n if (ids.length === 0) {\n return {\n success: [],\n error: []\n };\n }\n\n const rxDocumentMap = await this.findByIds(ids).exec();\n const docsData: RxDocumentData[] = [];\n const docsMap: Map> = new Map();\n Array.from(rxDocumentMap.values()).forEach(rxDocument => {\n const data: RxDocumentData = rxDocument.toMutableJSON(true) as any;\n docsData.push(data);\n docsMap.set(rxDocument.primary, data);\n });\n\n await Promise.all(\n docsData.map(doc => {\n const primary = (doc as any)[this.schema.primaryPath];\n return this._runHooks('pre', 'remove', doc, rxDocumentMap.get(primary));\n })\n );\n const removeDocs: BulkWriteRow[] = docsData.map(doc => {\n const writeDoc = flatClone(doc);\n writeDoc._deleted = true;\n return {\n previous: doc,\n document: writeDoc\n };\n });\n const results = await this.storageInstance.bulkWrite(\n removeDocs,\n 'rx-collection-bulk-remove'\n );\n\n const successIds: string[] = results.success.map(d => d[primaryPath] as string);\n\n // run hooks\n await Promise.all(\n successIds.map(id => {\n return this._runHooks(\n 'post',\n 'remove',\n docsMap.get(id),\n rxDocumentMap.get(id)\n );\n })\n );\n\n const rxDocuments = successIds.map(id => getFromMapOrThrow(rxDocumentMap, id));\n\n return {\n success: rxDocuments,\n error: results.error\n };\n }\n\n /**\n * same as bulkInsert but overwrites existing document with same primary\n */\n async bulkUpsert(docsData: Partial[]): Promise<{\n success: RxDocument[];\n error: RxStorageWriteError[];\n }> {\n ensureRxCollectionIsNotDestroyed(this);\n const insertData: RxDocumentType[] = [];\n const useJsonByDocId: Map = new Map();\n docsData.forEach(docData => {\n const useJson = fillObjectDataBeforeInsert(this.schema, docData);\n const primary: string = useJson[this.schema.primaryPath] as any;\n if (!primary) {\n throw newRxError('COL3', {\n primaryPath: this.schema.primaryPath as string,\n data: useJson,\n schema: this.schema.jsonSchema\n });\n }\n useJsonByDocId.set(primary, useJson);\n insertData.push(useJson);\n });\n\n const insertResult = await this.bulkInsert(insertData);\n const success = insertResult.success.slice(0);\n const error: RxStorageWriteError[] = [];\n\n // update the ones that existed already\n await Promise.all(\n insertResult.error.map(async (err) => {\n if (err.status !== 409) {\n error.push(err);\n } else {\n const id = err.documentId;\n const writeData = getFromMapOrThrow(useJsonByDocId, id);\n const docDataInDb = ensureNotFalsy(err.documentInDb);\n const doc = this._docCache.getCachedRxDocuments([docDataInDb])[0];\n const newDoc = await doc.incrementalModify(() => writeData);\n success.push(newDoc);\n }\n })\n );\n return {\n error,\n success\n };\n }\n\n /**\n * same as insert but overwrites existing document with same primary\n */\n async upsert(json: Partial): Promise> {\n ensureRxCollectionIsNotDestroyed(this);\n const bulkResult = await this.bulkUpsert([json]);\n throwIfIsStorageWriteError(\n this.asRxCollection,\n (json as any)[this.schema.primaryPath],\n json as any,\n bulkResult.error[0]\n );\n return bulkResult.success[0];\n }\n\n /**\n * upserts to a RxDocument, uses incrementalModify if document already exists\n */\n incrementalUpsert(json: Partial): Promise> {\n ensureRxCollectionIsNotDestroyed(this);\n const useJson = fillObjectDataBeforeInsert(this.schema, json);\n const primary: string = useJson[this.schema.primaryPath] as any;\n if (!primary) {\n throw newRxError('COL4', {\n data: json\n });\n }\n\n // ensure that it won't try 2 parallel runs\n let queue = this._incrementalUpsertQueues.get(primary);\n if (!queue) {\n queue = PROMISE_RESOLVE_VOID;\n }\n queue = queue\n .then(() => _incrementalUpsertEnsureRxDocumentExists(this as any, primary as any, useJson))\n .then((wasInserted) => {\n if (!wasInserted.inserted) {\n return _incrementalUpsertUpdate(wasInserted.doc, useJson);\n } else {\n return wasInserted.doc;\n }\n });\n this._incrementalUpsertQueues.set(primary, queue);\n return queue;\n }\n\n find(queryObj?: MangoQuery): RxQuery<\n RxDocumentType,\n RxDocument[],\n OrmMethods,\n Reactivity\n > {\n ensureRxCollectionIsNotDestroyed(this);\n if (typeof queryObj === 'string') {\n throw newRxError('COL5', {\n queryObj\n });\n }\n\n if (!queryObj) {\n queryObj = _getDefaultQuery();\n }\n\n const query = createRxQuery('find', queryObj, this as any);\n return query as any;\n }\n\n findOne(\n queryObj?: MangoQueryNoLimit | string\n ): RxQuery<\n RxDocumentType,\n RxDocument | null,\n OrmMethods,\n Reactivity\n > {\n ensureRxCollectionIsNotDestroyed(this);\n\n // TODO move this check to dev-mode plugin\n if (\n typeof queryObj === 'number' ||\n Array.isArray(queryObj)\n ) {\n throw newRxTypeError('COL6', {\n queryObj\n });\n }\n\n let query;\n\n if (typeof queryObj === 'string') {\n query = createRxQuery('findOne', {\n selector: {\n [this.schema.primaryPath]: queryObj\n },\n limit: 1\n }, this as any);\n } else {\n if (!queryObj) {\n queryObj = _getDefaultQuery();\n }\n\n\n // cannot have limit on findOne queries because it will be overwritten\n if ((queryObj as MangoQuery).limit) {\n throw newRxError('QU6');\n }\n\n queryObj = flatClone(queryObj);\n (queryObj as any).limit = 1;\n query = createRxQuery('findOne', queryObj, this as any);\n }\n\n\n return query as any;\n }\n\n count(queryObj?: MangoQuerySelectorAndIndex): RxQuery<\n RxDocumentType,\n number,\n OrmMethods,\n Reactivity\n > {\n ensureRxCollectionIsNotDestroyed(this);\n if (!queryObj) {\n queryObj = _getDefaultQuery();\n }\n const query = createRxQuery('count', queryObj, this as any);\n return query as any;\n }\n\n /**\n * find a list documents by their primary key\n * has way better performance then running multiple findOne() or a find() with a complex $or-selected\n */\n findByIds(\n ids: string[]\n ): RxQuery<\n RxDocumentType,\n Map>,\n OrmMethods,\n Reactivity\n > {\n ensureRxCollectionIsNotDestroyed(this);\n const mangoQuery: MangoQuery = {\n selector: {\n [this.schema.primaryPath]: {\n $in: ids.slice(0)\n }\n } as any\n };\n const query = createRxQuery('findByIds', mangoQuery, this as any);\n return query as any;\n }\n\n /**\n * Export collection to a JSON friendly format.\n */\n exportJSON(): Promise>;\n exportJSON(): Promise>;\n exportJSON(): Promise {\n throw pluginMissing('json-dump');\n }\n\n /**\n * Import the parsed JSON export into the collection.\n * @param _exportedJSON The previously exported data from the `.exportJSON()` method.\n */\n importJSON(_exportedJSON: RxDumpCollectionAny): Promise {\n throw pluginMissing('json-dump');\n }\n\n insertCRDT(_updateObj: CRDTEntry | CRDTEntry[]): RxDocument {\n throw pluginMissing('crdt');\n }\n\n /**\n * HOOKS\n */\n addHook(when: HookWhenType, key: HookKeyType, fun: any, parallel = false) {\n if (typeof fun !== 'function') {\n throw newRxTypeError('COL7', {\n key,\n when\n });\n }\n\n if (!HOOKS_WHEN.includes(when)) {\n throw newRxTypeError('COL8', {\n key,\n when\n });\n }\n\n if (!HOOKS_KEYS.includes(key)) {\n throw newRxError('COL9', {\n key\n });\n }\n\n if (when === 'post' && key === 'create' && parallel === true) {\n throw newRxError('COL10', {\n when,\n key,\n parallel\n });\n }\n\n // bind this-scope to hook-function\n const boundFun = fun.bind(this);\n\n const runName = parallel ? 'parallel' : 'series';\n\n this.hooks[key] = this.hooks[key] || {};\n this.hooks[key][when] = this.hooks[key][when] || {\n series: [],\n parallel: []\n };\n this.hooks[key][when][runName].push(boundFun);\n }\n\n getHooks(when: HookWhenType, key: HookKeyType) {\n if (\n !this.hooks[key] ||\n !this.hooks[key][when]\n ) {\n return {\n series: [],\n parallel: []\n };\n }\n return this.hooks[key][when];\n }\n\n hasHooks(when: HookWhenType, key: HookKeyType) {\n /**\n * Performance shortcut\n * so that we not have to build the empty object.\n */\n if (\n !this.hooks[key] ||\n !this.hooks[key][when]\n ) {\n return false;\n }\n\n const hooks = this.getHooks(when, key);\n if (!hooks) {\n return false;\n }\n return hooks.series.length > 0 || hooks.parallel.length > 0;\n }\n\n _runHooks(when: HookWhenType, key: HookKeyType, data: any, instance?: any): Promise {\n const hooks = this.getHooks(when, key);\n\n if (!hooks) {\n return PROMISE_RESOLVE_VOID;\n }\n\n // run parallel: false\n const tasks = hooks.series.map((hook: any) => () => hook(data, instance));\n return promiseSeries(tasks)\n // run parallel: true\n .then(() => Promise.all(\n hooks.parallel\n .map((hook: any) => hook(data, instance))\n ));\n }\n\n /**\n * does the same as ._runHooks() but with non-async-functions\n */\n _runHooksSync(when: HookWhenType, key: HookKeyType, data: any, instance: any) {\n if (!this.hasHooks(when, key)) {\n return;\n }\n const hooks = this.getHooks(when, key);\n if (!hooks) return;\n hooks.series.forEach((hook: any) => hook(data, instance));\n }\n\n /**\n * Returns a promise that resolves after the given time.\n * Ensures that is properly cleans up when the collection is destroyed\n * so that no running timeouts prevent the exit of the JavaScript process.\n */\n promiseWait(time: number): Promise {\n const ret = new Promise(res => {\n const timeout = setTimeout(() => {\n this.timeouts.delete(timeout);\n res();\n }, time);\n this.timeouts.add(timeout);\n });\n return ret;\n }\n\n async destroy(): Promise {\n if (this.destroyed) {\n return PROMISE_RESOLVE_FALSE;\n }\n\n\n await Promise.all(this.onDestroy.map(fn => fn()));\n\n /**\n * Settings destroyed = true\n * must be the first thing to do,\n * so for example the replication can directly stop\n * instead of sending requests to a closed storage.\n */\n this.destroyed = true;\n\n\n Array.from(this.timeouts).forEach(timeout => clearTimeout(timeout));\n if (this._changeEventBuffer) {\n this._changeEventBuffer.destroy();\n }\n /**\n * First wait until the whole database is idle.\n * This ensures that the storage does not get closed\n * while some operation is running.\n * It is important that we do not intercept a running call\n * because it might lead to undefined behavior like when a doc is written\n * but the change is not added to the changes collection.\n */\n return this.database.requestIdlePromise()\n .then(() => this.storageInstance.close())\n .then(() => {\n /**\n * Unsubscribing must be done AFTER the storageInstance.close()\n * Because the conflict handling is part of the subscriptions and\n * otherwise there might be open conflicts to be resolved which\n * will then stuck and never resolve.\n */\n this._subs.forEach(sub => sub.unsubscribe());\n\n delete this.database.collections[this.name];\n return runAsyncPluginHooks('postDestroyRxCollection', this).then(() => true);\n });\n }\n\n /**\n * remove all data of the collection\n */\n async remove(): Promise {\n await this.destroy();\n await Promise.all(this.onRemove.map(fn => fn()));\n await removeCollectionStorages(\n this.database.storage,\n this.database.internalStore,\n this.database.token,\n this.database.name,\n this.name,\n this.database.password,\n this.database.hashFunction\n );\n }\n\n get asRxCollection(): RxCollection {\n return this as any;\n }\n}\n\n/**\n * adds the hook-functions to the collections prototype\n * this runs only once\n */\nfunction _applyHookFunctions(\n collection: RxCollection\n) {\n if (hooksApplied) return; // already run\n hooksApplied = true;\n const colProto = Object.getPrototypeOf(collection);\n HOOKS_KEYS.forEach(key => {\n HOOKS_WHEN.map(when => {\n const fnName = when + ucfirst(key);\n colProto[fnName] = function (fun: string, parallel: boolean) {\n return this.addHook(when, key, fun, parallel);\n };\n });\n });\n}\n\nfunction _incrementalUpsertUpdate(\n doc: RxDocumentBase,\n json: RxDocumentData\n): Promise> {\n return doc.incrementalModify((_innerDoc) => {\n return json;\n });\n}\n\n/**\n * ensures that the given document exists\n * @return promise that resolves with new doc and flag if inserted\n */\nfunction _incrementalUpsertEnsureRxDocumentExists(\n rxCollection: RxCollection,\n primary: string,\n json: any\n): Promise<\n {\n doc: RxDocument;\n inserted: boolean;\n }\n> {\n /**\n * Optimisation shortcut,\n * first try to find the document in the doc-cache\n */\n const docDataFromCache = rxCollection._docCache.getLatestDocumentDataIfExists(primary);\n if (docDataFromCache) {\n return Promise.resolve({\n doc: rxCollection._docCache.getCachedRxDocuments([docDataFromCache])[0],\n inserted: false\n });\n }\n return rxCollection.findOne(primary).exec()\n .then(doc => {\n if (!doc) {\n return rxCollection.insert(json).then(newDoc => ({\n doc: newDoc,\n inserted: true\n }));\n } else {\n return {\n doc,\n inserted: false\n };\n }\n });\n}\n\n/**\n * creates and prepares a new collection\n */\nexport function createRxCollection(\n {\n database,\n name,\n schema,\n instanceCreationOptions = {},\n migrationStrategies = {},\n autoMigrate = true,\n statics = {},\n methods = {},\n attachments = {},\n options = {},\n localDocuments = false,\n cacheReplacementPolicy = defaultCacheReplacementPolicy,\n conflictHandler = defaultConflictHandler\n }: any\n): Promise {\n const storageInstanceCreationParams: RxStorageInstanceCreationParams = {\n databaseInstanceToken: database.token,\n databaseName: database.name,\n collectionName: name,\n schema: schema.jsonSchema,\n options: instanceCreationOptions,\n multiInstance: database.multiInstance,\n password: database.password,\n devMode: overwritable.isDevMode()\n };\n\n runPluginHooks(\n 'preCreateRxStorageInstance',\n storageInstanceCreationParams\n );\n\n return createRxCollectionStorageInstance(\n database,\n storageInstanceCreationParams\n ).then(storageInstance => {\n const collection = new RxCollectionBase(\n database,\n name,\n schema,\n storageInstance,\n instanceCreationOptions,\n migrationStrategies,\n methods,\n attachments,\n options,\n cacheReplacementPolicy,\n statics,\n conflictHandler\n );\n\n return collection\n .prepare()\n .then(() => {\n // ORM add statics\n Object\n .entries(statics)\n .forEach(([funName, fun]) => {\n Object.defineProperty(collection, funName, {\n get: () => (fun as any).bind(collection)\n });\n });\n\n let ret = PROMISE_RESOLVE_VOID;\n if (autoMigrate && collection.schema.version !== 0) {\n ret = collection.migratePromise();\n }\n return ret;\n })\n .then(() => {\n runPluginHooks('createRxCollection', {\n collection,\n creator: {\n name,\n schema,\n storageInstance,\n instanceCreationOptions,\n migrationStrategies,\n methods,\n attachments,\n options,\n cacheReplacementPolicy,\n localDocuments,\n statics\n }\n });\n return collection as any;\n })\n /**\n * If the collection creation fails,\n * we yet have to close the storage instances.\n */\n .catch(err => {\n return storageInstance.close()\n .then(() => Promise.reject(err));\n });\n });\n}\n\nexport function isRxCollection(obj: any): boolean {\n return obj instanceof RxCollectionBase;\n}\n"],"mappings":";AAAA,SACIA,MAAM,EACNC,GAAG,EACHC,QAAQ,QACL,MAAM;AAEb,SACIC,OAAO,EACPC,SAAS,EACTC,aAAa,EACbC,aAAa,EACbC,cAAc,EACdC,iBAAiB,EACjBC,qBAAqB,EACrBC,oBAAoB,QACjB,0BAA0B;AACjC,SACIC,0BAA0B,EAC1BC,iCAAiC,EACjCC,wBAAwB,EACxBC,gCAAgC,QAC7B,2BAA2B;AAClC,SACIC,aAAa,EACbC,gBAAgB,QACb,eAAe;AACtB,SACIC,UAAU,EACVC,cAAc,QACX,eAAe;AAItB,SACIC,aAAa,EACbC,2BAA2B,QACxB,gBAAgB;AACvB,SAEIC,gBAAgB,EAChBC,6BAA6B,QAC1B,kBAAkB;AACzB,SAEIC,uBAAuB,QACpB,0BAA0B;AACjC,SACIC,mBAAmB,EACnBC,cAAc,QACX,YAAY;AA0CnB,SACIC,mBAAmB,EACnBC,wBAAwB,QACrB,kCAAkC;AACzC,SACIC,yBAAyB,EACzBC,0BAA0B,QAEvB,wBAAwB;AAC/B,SAASC,sBAAsB,QAAQ,iCAAiC;AACxE,SAASC,qBAAqB,QAAQ,wBAAwB;AAC9D,SAASC,yBAAyB,QAAQ,kBAAkB;AAC5D,SAASC,YAAY,QAAQ,mBAAmB;AAEhD,IAAMC,UAAU,GAAG,CAAC,KAAK,EAAE,MAAM,CAAU;AAE3C,IAAMC,UAAU,GAAG,CAAC,QAAQ,EAAE,MAAM,EAAE,QAAQ,EAAE,QAAQ,CAAU;AAElE,IAAIC,YAAY,GAAG,KAAK;AAExB,WAAaC,gBAAgB;EASzB;AACJ;AACA;;EAKI,SAAAA,iBACWC,QAAqF,EACrFC,IAAY,EACZC,MAAgC,EAChCC,uBAAwF,EACxFC,uBAAgD,GAAG,CAAC,CAAQ,EAC5DC,mBAAwC,GAAG,CAAC,CAAC,EAC7CC,OAAuB,GAAG,CAAC,CAAC,EAC5BC,WAA2B,GAAG,CAAC,CAAC,EAChCC,OAAY,GAAG,CAAC,CAAC,EACjBC,sBAAgD,GAAGzB,6BAA6B,EAChF0B,OAAuB,GAAG,CAAC,CAAC,EAC5BC,eAAkD,GAAGnB,sBAAsB,EACpF;IAAA,KAjBKoB,eAAe,GAA2E,CAAC,CAAC;IAAA,KACnFC,QAAQ,GAAuC,IAAIC,GAAG,CAAC,CAAC;IAAA,KACjEC,qBAAqB,GAA0C,CAAC,CAAC;IAAA,KAmCjEC,wBAAwB,GAA8B,IAAIC,GAAG,CAAC,CAAC;IAAA,KAE/DC,MAAM,GAAY,KAAK;IAAA,KACvBC,KAAK,GAOR,CAAC,CAAC;IAAA,KACCC,KAAK,GAAmB,EAAE;IAAA,KAE1BC,SAAS,GAA8C,CAAC,CAAC;IAAA,KAEzDC,WAAW,GAAevC,gBAAgB,CAAC,CAAC;IAAA,KAC5CwC,CAAC,GAA8C,CAAC,CAAC;IAAA,KACjDC,WAAW,GAAoB,CAAC,CAAC;IAAA,KACjCC,kBAAkB,GAAsC,CAAC,CAAC;IAAA,KAU1DC,SAAS,GAAgC,EAAE;IAAA,KAC3CC,SAAS,GAAG,KAAK;IAAA,KAEjBC,QAAQ,GAAgC,EAAE;IAAA,KA/DtC5B,QAAqF,GAArFA,QAAqF;IAAA,KACrFC,IAAY,GAAZA,IAAY;IAAA,KACZC,MAAgC,GAAhCA,MAAgC;IAAA,KAChCC,uBAAwF,GAAxFA,uBAAwF;IAAA,KACxFC,uBAAgD,GAAhDA,uBAAgD;IAAA,KAChDC,mBAAwC,GAAxCA,mBAAwC;IAAA,KACxCC,OAAuB,GAAvBA,OAAuB;IAAA,KACvBC,WAA2B,GAA3BA,WAA2B;IAAA,KAC3BC,OAAY,GAAZA,OAAY;IAAA,KACZC,sBAAgD,GAAhDA,sBAAgD;IAAA,KAChDC,OAAuB,GAAvBA,OAAuB;IAAA,KACvBC,eAAkD,GAAlDA,eAAkD;IAEzDkB,mBAAmB,CAAC,IAAI,CAACC,cAAc,CAAC;EAC5C;EAAC,IAAAC,MAAA,GAAAhC,gBAAA,CAAAiC,SAAA;EAAAD,MAAA,CAmDYE,OAAO,GAApB,eAAAA,QAAA,EAAsC;IAClC,IAAI,CAACrB,eAAe,GAAGtB,yBAAyB,CAC5C,IAAI,CAACU,QAAQ,EACb,IAAI,CAACG,uBAAuB,EAC5B,IAAI,CAACD,MAAM,CAACgC,UAChB,CAAC;IACD,IAAI,CAACnB,qBAAqB,GAAG,IAAItB,qBAAqB,CAClD,IAAI,CAACmB,eAAe,EACpB,IAAI,CAACV,MAAM,CAACiC,WAAW,EACvB,CAACC,OAAO,EAAEC,OAAO,KAAK3C,yBAAyB,CAAC,IAAI,EAAS0C,OAAO,EAAEC,OAAO,CAAC,EAC9EC,MAAM,IAAI,IAAI,CAACC,SAAS,CAAC,MAAM,EAAE,MAAM,EAAED,MAAM,CACnD,CAAC;IAED,IAAME,qBAAqB,GAAG,IAAI,CAACxC,QAAQ,CAACyC,WAAW,CAACC,IAAI,CACxDhF,MAAM,CAACiF,eAAe,IAAIA,eAAe,CAACC,cAAc,KAAK,IAAI,CAAC3C,IAAI,CAC1E,CAAC;IACD,IAAI,CAACsB,CAAC,GAAGiB,qBAAqB,CAACE,IAAI,CAC/B9E,QAAQ,CAAC+E,eAAe,IAAIA,eAAe,CAACE,MAAM,CACtD,CAAC;IACD,IAAI,CAACrB,WAAW,GAAGgB,qBAAqB,CAACE,IAAI,CACzC/E,GAAG,CAACgF,eAAe,IAAIA,eAAe,CAACG,UAAU,CACrD,CAAC;IAED,IAAI,CAACrB,kBAAkB,GAAGxC,uBAAuB,CAAiB,IAAI,CAAC6C,cAAc,CAAC;IACtF,IAAIiB,mBAAwB;IAC5B,IAAI,CAAC1B,SAAS,GAAG,IAAIxC,aAAa,CAC9B,IAAI,CAACqB,MAAM,CAACiC,WAAW,EACvB,IAAI,CAACZ,CAAC,CAACmB,IAAI,CAAChF,MAAM,CAACsF,EAAE,IAAI,CAACA,EAAE,CAACC,OAAO,CAAC,CAAC,EACtCC,OAAO,IAAI;MACP,IAAI,CAACH,mBAAmB,EAAE;QACtBA,mBAAmB,GAAG1D,wBAAwB,CAAC,IAAI,CAACyC,cAAc,CAAC;MACvE;MACA,OAAO1C,mBAAmB,CAAC,IAAI,CAAC0C,cAAc,EAAEiB,mBAAmB,EAAEG,OAAO,CAAC;IACjF,CACJ,CAAC;IAGD,IAAMC,iBAAiB,GAAG,IAAI,CAACnD,QAAQ,CAACoD,aAAa,CAACC,YAAY,CAAC,CAAC,CAACX,IAAI,CACrEhF,MAAM,CAAC4F,IAAI,IAAI;MACX,IAAMC,GAAG,GAAG,IAAI,CAACtD,IAAI,GAAG,GAAG,GAAG,IAAI,CAACC,MAAM,CAACsD,OAAO;MACjD,IAAMC,KAAK,GAAGH,IAAI,CAACT,MAAM,CAACa,IAAI,CAACC,KAAK,IAAI;QACpC,OACIA,KAAK,CAACC,YAAY,CAACC,OAAO,KAAK,YAAY,IAC3CF,KAAK,CAACC,YAAY,CAACL,GAAG,KAAKA,GAAG,IAC9BI,KAAK,CAACG,SAAS,KAAK,QAAQ;MAEpC,CAAC,CAAC;MACF,OAAO,CAAC,CAACL,KAAK;IAClB,CAAC,CACL,CAAC,CAACM,SAAS,CAAC,YAAY;MACpB,MAAM,IAAI,CAACC,OAAO,CAAC,CAAC;MACpB,MAAMC,OAAO,CAACC,GAAG,CAAC,IAAI,CAACtC,QAAQ,CAACjE,GAAG,CAACwG,EAAE,IAAIA,EAAE,CAAC,CAAC,CAAC,CAAC;IACpD,CAAC,CAAC;IACF,IAAI,CAAC/C,KAAK,CAACgD,IAAI,CAACjB,iBAAiB,CAAC;;IAGlC;AACR;AACA;AACA;AACA;IACQ,IAAMkB,oBAAoB,GAAG,MAAM,IAAI,CAACrE,QAAQ,CAACsE,YAAY;IAC7D,IAAMC,OAAO,GAAG,IAAI,CAAC3D,eAAe,CAACyC,YAAY,CAAC,CAAC,CAACU,SAAS,CAACS,SAAS,IAAI;MACvE,IAAM3B,MAAM,GAAG,IAAI4B,KAAK,CAACD,SAAS,CAAC3B,MAAM,CAAC6B,MAAM,CAAC;MACjD,IAAMC,SAAS,GAAGH,SAAS,CAAC3B,MAAM;MAClC,IAAMD,cAAc,GAAG,IAAI,CAAC3C,IAAI;MAChC,IAAM2E,qBAAqB,GAAGjF,YAAY,CAACiF,qBAAqB;MAChE,KAAK,IAAIC,KAAK,GAAG,CAAC,EAAEA,KAAK,GAAGF,SAAS,CAACD,MAAM,EAAEG,KAAK,EAAE,EAAE;QACnD,IAAMlB,KAAK,GAAGgB,SAAS,CAACE,KAAK,CAAC;QAC9BhC,MAAM,CAACgC,KAAK,CAAC,GAAG;UACZC,UAAU,EAAEnB,KAAK,CAACmB,UAAU;UAC5BlC,cAAc;UACdK,OAAO,EAAE,KAAK;UACda,SAAS,EAAEH,KAAK,CAACG,SAAS;UAC1BF,YAAY,EAAEgB,qBAAqB,CAACjB,KAAK,CAACC,YAAY,CAAQ;UAC9DmB,oBAAoB,EAAEH,qBAAqB,CAACjB,KAAK,CAACoB,oBAAoB;QAC1E,CAAC;MACL;MACA,IAAMpC,eAAwE,GAAG;QAC7EqC,EAAE,EAAER,SAAS,CAACQ,EAAE;QAChBC,QAAQ,EAAE,KAAK;QACfrC,cAAc,EAAE,IAAI,CAAC3C,IAAI;QACzBqE,YAAY,EAAED,oBAAoB;QAClCxB,MAAM;QACNqC,aAAa,EAAE,IAAI,CAAClF,QAAQ,CAACmF,KAAK;QAClCrC,UAAU,EAAE0B,SAAS,CAAC1B,UAAU;QAChCe,OAAO,EAAEW,SAAS,CAACX,OAAO;QAC1BuB,OAAO,EAAEZ,SAAS,CAACY,OAAO;QAC1BC,SAAS,EAAEb,SAAS,CAACa;MACzB,CAAC;MACD,IAAI,CAACrF,QAAQ,CAACsF,KAAK,CAAC3C,eAAe,CAAC;IACxC,CAAC,CAAC;IACF,IAAI,CAACvB,KAAK,CAACgD,IAAI,CAACG,OAAO,CAAC;;IAExB;AACR;AACA;AACA;IACQ,IAAI,CAACnD,KAAK,CAACgD,IAAI,CACX,IAAI,CAACxD,eAAe,CACf2E,sBAAsB,CAAC,CAAC,CACxBxB,SAAS,CAACyB,IAAI,IAAI;MACf,IAAI,CACC7E,eAAe,CAAC6E,IAAI,CAACC,KAAK,EAAED,IAAI,CAAC3B,OAAO,CAAC,CACzC6B,IAAI,CAACC,MAAM,IAAI;QACZ,IAAI,CAAC/E,eAAe,CAACgF,4BAA4B,CAAC;UAC9CZ,EAAE,EAAEQ,IAAI,CAACR,EAAE;UACXW;QACJ,CAAC,CAAC;MACN,CAAC,CAAC;IACV,CAAC,CACT,CAAC;IAED,OAAOvH,oBAAoB;EAC/B;;EAGA;AACJ;AACA;AACA,KAHI;EAAA2D,MAAA,CAIA8D,OAAO,GAAP,SAAAA,QAAQC,mBAA4B,EAAoB;IACpDtH,gCAAgC,CAAC,IAAI,CAAC;IACtC,MAAMR,aAAa,CAAC,SAAS,CAAC;EAClC;;EAEA;EAAA;EAAA+D,MAAA,CACAgE,eAAe,GAAf,SAAAA,gBAAA,EAAoC;IAChC,MAAM/H,aAAa,CAAC,kBAAkB,CAAC;EAC3C,CAAC;EAAA+D,MAAA,CACDiE,iBAAiB,GAAjB,SAAAA,kBAAA,EAAsC;IAClC,MAAMhI,aAAa,CAAC,kBAAkB,CAAC;EAC3C,CAAC;EAAA+D,MAAA,CACDkE,cAAc,GAAd,SAAAA,eAAeC,SAAiB,GAAG,EAAE,EAAiB;IAClD1H,gCAAgC,CAAC,IAAI,CAAC;IACtC,OAAO,IAAI,CAACwH,iBAAiB,CAAC,CAAC,CAACC,cAAc,CAACC,SAAS,CAAC;EAC7D,CAAC;EAAAnE,MAAA,CACDoE,cAAc,GAAd,SAAAA,eAAeD,SAAiB,GAAG,EAAE,EAAgB;IACjD,OAAO,IAAI,CAACF,iBAAiB,CAAC,CAAC,CAACG,cAAc,CAACD,SAAS,CAAC;EAC7D,CAAC;EAAAnE,MAAA,CAEKqE,MAAM,GAAZ,eAAAA,OACIC,IAAiC,EACc;IAC/C7H,gCAAgC,CAAC,IAAI,CAAC;IACtC,IAAM8H,WAAW,GAAG,MAAM,IAAI,CAACC,UAAU,CAAC,CAACF,IAAI,CAAQ,CAAC;IAExD,IAAMG,OAAO,GAAGF,WAAW,CAACG,KAAK,CAAC,CAAC,CAAC;IACpClH,0BAA0B,CAAC,IAAI,EAAU8G,IAAI,CAAS,IAAI,CAACnG,MAAM,CAACiC,WAAW,CAAC,EAASkE,IAAI,EAAEG,OAAO,CAAC;IACrG,IAAME,YAAY,GAAGzI,cAAc,CAACqI,WAAW,CAACK,OAAO,CAAC,CAAC,CAAC,CAAC;IAC3D,OAAOD,YAAY;EACvB,CAAC;EAAA3E,MAAA,CAEKwE,UAAU,GAAhB,eAAAA,WACIK,QAA0B,EAI3B;IACCpI,gCAAgC,CAAC,IAAI,CAAC;IACtC;AACR;AACA;AACA;IACQ,IAAIoI,QAAQ,CAAClC,MAAM,KAAK,CAAC,EAAE;MACvB,OAAO;QACHiC,OAAO,EAAE,EAAE;QACXF,KAAK,EAAE;MACX,CAAC;IACL;IAEA,IAAMtE,WAAW,GAAG,IAAI,CAACjC,MAAM,CAACiC,WAAW;;IAG3C;AACR;AACA;AACA;AACA;IACQ,IAAI0E,UAA0C;IAC9C,IAAI,IAAI,CAACC,QAAQ,CAAC,KAAK,EAAE,QAAQ,CAAC,EAAE;MAChCD,UAAU,GAAG,MAAM5C,OAAO,CAACC,GAAG,CAC1B0C,QAAQ,CAACjJ,GAAG,CAACuF,OAAO,IAAI;QACpB,IAAM6D,UAAU,GAAG1I,0BAA0B,CAAC,IAAI,CAAC6B,MAAM,EAAEgD,OAAO,CAAC;QACnE,OAAO,IAAI,CAACX,SAAS,CAAC,KAAK,EAAE,QAAQ,EAAEwE,UAAU,CAAC,CAC7CrB,IAAI,CAAC,MAAM;UACR,OAAO;YAAEsB,QAAQ,EAAED;UAAW,CAAC;QACnC,CAAC,CAAC;MACV,CAAC,CACL,CAAC;IACL,CAAC,MAAM;MACHF,UAAU,GAAG,EAAE;MACf,KAAK,IAAIhC,KAAK,GAAG,CAAC,EAAEA,KAAK,GAAG+B,QAAQ,CAAClC,MAAM,EAAEG,KAAK,EAAE,EAAE;QAClD,IAAM3B,OAAO,GAAG0D,QAAQ,CAAC/B,KAAK,CAAC;QAC/B,IAAMkC,UAAU,GAAG1I,0BAA0B,CAAC,IAAI,CAAC6B,MAAM,EAAEgD,OAAO,CAAC;QACnE2D,UAAU,CAAChC,KAAK,CAAC,GAAG;UAAEmC,QAAQ,EAAED;QAAW,CAAC;MAChD;IACJ;IAEA,IAAME,OAAO,GAAG,MAAM,IAAI,CAACrG,eAAe,CAACsG,SAAS,CAChDL,UAAU,EACV,2BACJ,CAAC;;IAED;IACA,IAAMM,WAAW,GAAGrI,2BAA2B,CAA6B,IAAI,CAACuC,SAAS,EAAE4F,OAAO,CAACN,OAAO,CAAC;IAE5G,IAAI,IAAI,CAACG,QAAQ,CAAC,MAAM,EAAE,QAAQ,CAAC,EAAE;MACjC,IAAMM,OAAoC,GAAG,IAAInG,GAAG,CAAC,CAAC;MACtD4F,UAAU,CAACQ,OAAO,CAACC,GAAG,IAAI;QACtB,IAAMC,GAAG,GAAGD,GAAG,CAACN,QAAQ;QACxBI,OAAO,CAACI,GAAG,CAAED,GAAG,CAASpF,WAAW,CAAC,EAASoF,GAAG,CAAC;MACtD,CAAC,CAAC;MACF,MAAMtD,OAAO,CAACC,GAAG,CACbiD,WAAW,CAACxJ,GAAG,CAAC4J,GAAG,IAAI;QACnB,OAAO,IAAI,CAAChF,SAAS,CACjB,MAAM,EAAE,QAAQ,EAChB6E,OAAO,CAACK,GAAG,CAACF,GAAG,CAACG,OAAO,CAAC,EACxBH,GACJ,CAAC;MACL,CAAC,CACL,CAAC;IACL;IAEA,OAAO;MACHZ,OAAO,EAAEQ,WAAW;MACpBV,KAAK,EAAEQ,OAAO,CAACR;IACnB,CAAC;EACL,CAAC;EAAA1E,MAAA,CAEK4F,UAAU,GAAhB,eAAAA,WACIC,GAAa,EAId;IACCpJ,gCAAgC,CAAC,IAAI,CAAC;IACtC,IAAM2D,WAAW,GAAG,IAAI,CAACjC,MAAM,CAACiC,WAAW;IAC3C;AACR;AACA;AACA;IACQ,IAAIyF,GAAG,CAAClD,MAAM,KAAK,CAAC,EAAE;MAClB,OAAO;QACHiC,OAAO,EAAE,EAAE;QACXF,KAAK,EAAE;MACX,CAAC;IACL;IAEA,IAAMoB,aAAa,GAAG,MAAM,IAAI,CAACC,SAAS,CAACF,GAAG,CAAC,CAACG,IAAI,CAAC,CAAC;IACtD,IAAMnB,QAA0C,GAAG,EAAE;IACrD,IAAMQ,OAAoD,GAAG,IAAInG,GAAG,CAAC,CAAC;IACtEwD,KAAK,CAACuD,IAAI,CAACH,aAAa,CAACI,MAAM,CAAC,CAAC,CAAC,CAACZ,OAAO,CAACa,UAAU,IAAI;MACrD,IAAMC,IAAoC,GAAGD,UAAU,CAACE,aAAa,CAAC,IAAI,CAAQ;MAClFxB,QAAQ,CAACxC,IAAI,CAAC+D,IAAI,CAAC;MACnBf,OAAO,CAACI,GAAG,CAACU,UAAU,CAACR,OAAO,EAAES,IAAI,CAAC;IACzC,CAAC,CAAC;IAEF,MAAMlE,OAAO,CAACC,GAAG,CACb0C,QAAQ,CAACjJ,GAAG,CAAC4J,GAAG,IAAI;MAChB,IAAMG,OAAO,GAAIH,GAAG,CAAS,IAAI,CAACrH,MAAM,CAACiC,WAAW,CAAC;MACrD,OAAO,IAAI,CAACI,SAAS,CAAC,KAAK,EAAE,QAAQ,EAAEgF,GAAG,EAAEM,aAAa,CAACJ,GAAG,CAACC,OAAO,CAAC,CAAC;IAC3E,CAAC,CACL,CAAC;IACD,IAAMW,UAA0C,GAAGzB,QAAQ,CAACjJ,GAAG,CAAC4J,GAAG,IAAI;MACnE,IAAMe,QAAQ,GAAGxK,SAAS,CAACyJ,GAAG,CAAC;MAC/Be,QAAQ,CAACC,QAAQ,GAAG,IAAI;MACxB,OAAO;QACHC,QAAQ,EAAEjB,GAAG;QACbP,QAAQ,EAAEsB;MACd,CAAC;IACL,CAAC,CAAC;IACF,IAAMrB,OAAO,GAAG,MAAM,IAAI,CAACrG,eAAe,CAACsG,SAAS,CAChDmB,UAAU,EACV,2BACJ,CAAC;IAED,IAAMI,UAAoB,GAAGxB,OAAO,CAACN,OAAO,CAAChJ,GAAG,CAAC+K,CAAC,IAAIA,CAAC,CAACvG,WAAW,CAAW,CAAC;;IAE/E;IACA,MAAM8B,OAAO,CAACC,GAAG,CACbuE,UAAU,CAAC9K,GAAG,CAACqH,EAAE,IAAI;MACjB,OAAO,IAAI,CAACzC,SAAS,CACjB,MAAM,EACN,QAAQ,EACR6E,OAAO,CAACK,GAAG,CAACzC,EAAE,CAAC,EACf6C,aAAa,CAACJ,GAAG,CAACzC,EAAE,CACxB,CAAC;IACL,CAAC,CACL,CAAC;IAED,IAAMmC,WAAW,GAAGsB,UAAU,CAAC9K,GAAG,CAACqH,EAAE,IAAI9G,iBAAiB,CAAC2J,aAAa,EAAE7C,EAAE,CAAC,CAAC;IAE9E,OAAO;MACH2B,OAAO,EAAEQ,WAAW;MACpBV,KAAK,EAAEQ,OAAO,CAACR;IACnB,CAAC;EACL;;EAEA;AACJ;AACA,KAFI;EAAA1E,MAAA,CAGM4G,UAAU,GAAhB,eAAAA,WAAiB/B,QAAmC,EAGjD;IACCpI,gCAAgC,CAAC,IAAI,CAAC;IACtC,IAAMoK,UAA4B,GAAG,EAAE;IACvC,IAAMC,cAA2C,GAAG,IAAI5H,GAAG,CAAC,CAAC;IAC7D2F,QAAQ,CAACS,OAAO,CAACnE,OAAO,IAAI;MACxB,IAAM4F,OAAO,GAAGzK,0BAA0B,CAAC,IAAI,CAAC6B,MAAM,EAAEgD,OAAO,CAAC;MAChE,IAAMwE,OAAe,GAAGoB,OAAO,CAAC,IAAI,CAAC5I,MAAM,CAACiC,WAAW,CAAQ;MAC/D,IAAI,CAACuF,OAAO,EAAE;QACV,MAAM/I,UAAU,CAAC,MAAM,EAAE;UACrBwD,WAAW,EAAE,IAAI,CAACjC,MAAM,CAACiC,WAAqB;UAC9CgG,IAAI,EAAEW,OAAO;UACb5I,MAAM,EAAE,IAAI,CAACA,MAAM,CAACgC;QACxB,CAAC,CAAC;MACN;MACA2G,cAAc,CAACrB,GAAG,CAACE,OAAO,EAAEoB,OAAO,CAAC;MACpCF,UAAU,CAACxE,IAAI,CAAC0E,OAAO,CAAC;IAC5B,CAAC,CAAC;IAEF,IAAMpC,YAAY,GAAG,MAAM,IAAI,CAACH,UAAU,CAACqC,UAAU,CAAC;IACtD,IAAMjC,OAAO,GAAGD,YAAY,CAACC,OAAO,CAACoC,KAAK,CAAC,CAAC,CAAC;IAC7C,IAAMtC,KAA4C,GAAG,EAAE;;IAEvD;IACA,MAAMxC,OAAO,CAACC,GAAG,CACbwC,YAAY,CAACD,KAAK,CAAC9I,GAAG,CAAC,MAAOqL,GAAG,IAAK;MAClC,IAAIA,GAAG,CAACC,MAAM,KAAK,GAAG,EAAE;QACpBxC,KAAK,CAACrC,IAAI,CAAC4E,GAAG,CAAC;MACnB,CAAC,MAAM;QACH,IAAMhE,EAAE,GAAGgE,GAAG,CAAClE,UAAU;QACzB,IAAMoE,SAAS,GAAGhL,iBAAiB,CAAC2K,cAAc,EAAE7D,EAAE,CAAC;QACvD,IAAMmE,WAAW,GAAGlL,cAAc,CAAC+K,GAAG,CAACI,YAAY,CAAC;QACpD,IAAM7B,GAAG,GAAG,IAAI,CAAClG,SAAS,CAACgI,oBAAoB,CAAC,CAACF,WAAW,CAAC,CAAC,CAAC,CAAC,CAAC;QACjE,IAAMG,MAAM,GAAG,MAAM/B,GAAG,CAACgC,iBAAiB,CAAC,MAAML,SAAS,CAAC;QAC3DvC,OAAO,CAACvC,IAAI,CAACkF,MAAM,CAAC;MACxB;IACJ,CAAC,CACL,CAAC;IACD,OAAO;MACH7C,KAAK;MACLE;IACJ,CAAC;EACL;;EAEA;AACJ;AACA,KAFI;EAAA5E,MAAA,CAGMyH,MAAM,GAAZ,eAAAA,OAAanD,IAA6B,EAAmD;IACzF7H,gCAAgC,CAAC,IAAI,CAAC;IACtC,IAAMiL,UAAU,GAAG,MAAM,IAAI,CAACd,UAAU,CAAC,CAACtC,IAAI,CAAC,CAAC;IAChD9G,0BAA0B,CACtB,IAAI,CAACuC,cAAc,EAClBuE,IAAI,CAAS,IAAI,CAACnG,MAAM,CAACiC,WAAW,CAAC,EACtCkE,IAAI,EACJoD,UAAU,CAAChD,KAAK,CAAC,CAAC,CACtB,CAAC;IACD,OAAOgD,UAAU,CAAC9C,OAAO,CAAC,CAAC,CAAC;EAChC;;EAEA;AACJ;AACA,KAFI;EAAA5E,MAAA,CAGA2H,iBAAiB,GAAjB,SAAAA,kBAAkBrD,IAA6B,EAAmD;IAC9F7H,gCAAgC,CAAC,IAAI,CAAC;IACtC,IAAMsK,OAAO,GAAGzK,0BAA0B,CAAC,IAAI,CAAC6B,MAAM,EAAEmG,IAAI,CAAC;IAC7D,IAAMqB,OAAe,GAAGoB,OAAO,CAAC,IAAI,CAAC5I,MAAM,CAACiC,WAAW,CAAQ;IAC/D,IAAI,CAACuF,OAAO,EAAE;MACV,MAAM/I,UAAU,CAAC,MAAM,EAAE;QACrBwJ,IAAI,EAAE9B;MACV,CAAC,CAAC;IACN;;IAEA;IACA,IAAIsD,KAAK,GAAG,IAAI,CAAC3I,wBAAwB,CAACyG,GAAG,CAACC,OAAO,CAAC;IACtD,IAAI,CAACiC,KAAK,EAAE;MACRA,KAAK,GAAGvL,oBAAoB;IAChC;IACAuL,KAAK,GAAGA,KAAK,CACRjE,IAAI,CAAC,MAAMkE,wCAAwC,CAAC,IAAI,EAASlC,OAAO,EAASoB,OAAO,CAAC,CAAC,CAC1FpD,IAAI,CAAEmE,WAAW,IAAK;MACnB,IAAI,CAACA,WAAW,CAACC,QAAQ,EAAE;QACvB,OAAOC,wBAAwB,CAACF,WAAW,CAACtC,GAAG,EAAEuB,OAAO,CAAC;MAC7D,CAAC,MAAM;QACH,OAAOe,WAAW,CAACtC,GAAG;MAC1B;IACJ,CAAC,CAAC;IACN,IAAI,CAACvG,wBAAwB,CAACwG,GAAG,CAACE,OAAO,EAAEiC,KAAK,CAAC;IACjD,OAAOA,KAAK;EAChB,CAAC;EAAA5H,MAAA,CAED2B,IAAI,GAAJ,SAAAA,KAAKsG,QAAqC,EAKxC;IACExL,gCAAgC,CAAC,IAAI,CAAC;IACtC,IAAI,OAAOwL,QAAQ,KAAK,QAAQ,EAAE;MAC9B,MAAMrL,UAAU,CAAC,MAAM,EAAE;QACrBqL;MACJ,CAAC,CAAC;IACN;IAEA,IAAI,CAACA,QAAQ,EAAE;MACXA,QAAQ,GAAGtL,gBAAgB,CAAC,CAAC;IACjC;IAEA,IAAMuL,KAAK,GAAGxL,aAAa,CAAC,MAAM,EAAEuL,QAAQ,EAAE,IAAW,CAAC;IAC1D,OAAOC,KAAK;EAChB,CAAC;EAAAlI,MAAA,CAEDmI,OAAO,GAAP,SAAAA,QACIF,QAAqD,EAMvD;IACExL,gCAAgC,CAAC,IAAI,CAAC;;IAEtC;IACA,IACI,OAAOwL,QAAQ,KAAK,QAAQ,IAC5BvF,KAAK,CAAC0F,OAAO,CAACH,QAAQ,CAAC,EACzB;MACE,MAAMpL,cAAc,CAAC,MAAM,EAAE;QACzBoL;MACJ,CAAC,CAAC;IACN;IAEA,IAAIC,KAAK;IAET,IAAI,OAAOD,QAAQ,KAAK,QAAQ,EAAE;MAC9BC,KAAK,GAAGxL,aAAa,CAAC,SAAS,EAAE;QAC7B2L,QAAQ,EAAE;UACN,CAAC,IAAI,CAAClK,MAAM,CAACiC,WAAW,GAAG6H;QAC/B,CAAC;QACDK,KAAK,EAAE;MACX,CAAC,EAAE,IAAW,CAAC;IACnB,CAAC,MAAM;MACH,IAAI,CAACL,QAAQ,EAAE;QACXA,QAAQ,GAAGtL,gBAAgB,CAAC,CAAC;MACjC;;MAGA;MACA,IAAKsL,QAAQ,CAAgBK,KAAK,EAAE;QAChC,MAAM1L,UAAU,CAAC,KAAK,CAAC;MAC3B;MAEAqL,QAAQ,GAAGlM,SAAS,CAACkM,QAAQ,CAAC;MAC7BA,QAAQ,CAASK,KAAK,GAAG,CAAC;MAC3BJ,KAAK,GAAGxL,aAAa,CAAiB,SAAS,EAAEuL,QAAQ,EAAE,IAAW,CAAC;IAC3E;IAGA,OAAOC,KAAK;EAChB,CAAC;EAAAlI,MAAA,CAEDuI,KAAK,GAAL,SAAAA,MAAMN,QAAqD,EAKzD;IACExL,gCAAgC,CAAC,IAAI,CAAC;IACtC,IAAI,CAACwL,QAAQ,EAAE;MACXA,QAAQ,GAAGtL,gBAAgB,CAAC,CAAC;IACjC;IACA,IAAMuL,KAAK,GAAGxL,aAAa,CAAC,OAAO,EAAEuL,QAAQ,EAAE,IAAW,CAAC;IAC3D,OAAOC,KAAK;EAChB;;EAEA;AACJ;AACA;AACA,KAHI;EAAAlI,MAAA,CAIA+F,SAAS,GAAT,SAAAA,UACIF,GAAa,EAMf;IACEpJ,gCAAgC,CAAC,IAAI,CAAC;IACtC,IAAM+L,UAAsC,GAAG;MAC3CH,QAAQ,EAAE;QACN,CAAC,IAAI,CAAClK,MAAM,CAACiC,WAAW,GAAG;UACvBqI,GAAG,EAAE5C,GAAG,CAACmB,KAAK,CAAC,CAAC;QACpB;MACJ;IACJ,CAAC;IACD,IAAMkB,KAAK,GAAGxL,aAAa,CAAC,WAAW,EAAE8L,UAAU,EAAE,IAAW,CAAC;IACjE,OAAON,KAAK;EAChB;;EAEA;AACJ;AACA,KAFI;EAAAlI,MAAA,CAKA0I,UAAU,GAAV,SAAAA,WAAA,EAA2B;IACvB,MAAMzM,aAAa,CAAC,WAAW,CAAC;EACpC;;EAEA;AACJ;AACA;AACA,KAHI;EAAA+D,MAAA,CAIA2I,UAAU,GAAV,SAAAA,WAAWC,aAAkD,EAAiB;IAC1E,MAAM3M,aAAa,CAAC,WAAW,CAAC;EACpC,CAAC;EAAA+D,MAAA,CAED6I,UAAU,GAAV,SAAAA,WAAWC,UAA6C,EAA0C;IAC9F,MAAM7M,aAAa,CAAC,MAAM,CAAC;EAC/B;;EAEA;AACJ;AACA,KAFI;EAAA+D,MAAA,CAGA+I,OAAO,GAAP,SAAAA,QAAQC,IAAkB,EAAExH,GAAgB,EAAEyH,GAAQ,EAAEC,QAAQ,GAAG,KAAK,EAAE;IACtE,IAAI,OAAOD,GAAG,KAAK,UAAU,EAAE;MAC3B,MAAMpM,cAAc,CAAC,MAAM,EAAE;QACzB2E,GAAG;QACHwH;MACJ,CAAC,CAAC;IACN;IAEA,IAAI,CAACnL,UAAU,CAACsL,QAAQ,CAACH,IAAI,CAAC,EAAE;MAC5B,MAAMnM,cAAc,CAAC,MAAM,EAAE;QACzB2E,GAAG;QACHwH;MACJ,CAAC,CAAC;IACN;IAEA,IAAI,CAAClL,UAAU,CAACqL,QAAQ,CAAC3H,GAAG,CAAC,EAAE;MAC3B,MAAM5E,UAAU,CAAC,MAAM,EAAE;QACrB4E;MACJ,CAAC,CAAC;IACN;IAEA,IAAIwH,IAAI,KAAK,MAAM,IAAIxH,GAAG,KAAK,QAAQ,IAAI0H,QAAQ,KAAK,IAAI,EAAE;MAC1D,MAAMtM,UAAU,CAAC,OAAO,EAAE;QACtBoM,IAAI;QACJxH,GAAG;QACH0H;MACJ,CAAC,CAAC;IACN;;IAEA;IACA,IAAME,QAAQ,GAAGH,GAAG,CAACI,IAAI,CAAC,IAAI,CAAC;IAE/B,IAAMC,OAAO,GAAGJ,QAAQ,GAAG,UAAU,GAAG,QAAQ;IAEhD,IAAI,CAAC9J,KAAK,CAACoC,GAAG,CAAC,GAAG,IAAI,CAACpC,KAAK,CAACoC,GAAG,CAAC,IAAI,CAAC,CAAC;IACvC,IAAI,CAACpC,KAAK,CAACoC,GAAG,CAAC,CAACwH,IAAI,CAAC,GAAG,IAAI,CAAC5J,KAAK,CAACoC,GAAG,CAAC,CAACwH,IAAI,CAAC,IAAI;MAC7CO,MAAM,EAAE,EAAE;MACVL,QAAQ,EAAE;IACd,CAAC;IACD,IAAI,CAAC9J,KAAK,CAACoC,GAAG,CAAC,CAACwH,IAAI,CAAC,CAACM,OAAO,CAAC,CAACjH,IAAI,CAAC+G,QAAQ,CAAC;EACjD,CAAC;EAAApJ,MAAA,CAEDwJ,QAAQ,GAAR,SAAAA,SAASR,IAAkB,EAAExH,GAAgB,EAAE;IAC3C,IACI,CAAC,IAAI,CAACpC,KAAK,CAACoC,GAAG,CAAC,IAChB,CAAC,IAAI,CAACpC,KAAK,CAACoC,GAAG,CAAC,CAACwH,IAAI,CAAC,EACxB;MACE,OAAO;QACHO,MAAM,EAAE,EAAE;QACVL,QAAQ,EAAE;MACd,CAAC;IACL;IACA,OAAO,IAAI,CAAC9J,KAAK,CAACoC,GAAG,CAAC,CAACwH,IAAI,CAAC;EAChC,CAAC;EAAAhJ,MAAA,CAED+E,QAAQ,GAAR,SAAAA,SAASiE,IAAkB,EAAExH,GAAgB,EAAE;IAC3C;AACR;AACA;AACA;IACQ,IACI,CAAC,IAAI,CAACpC,KAAK,CAACoC,GAAG,CAAC,IAChB,CAAC,IAAI,CAACpC,KAAK,CAACoC,GAAG,CAAC,CAACwH,IAAI,CAAC,EACxB;MACE,OAAO,KAAK;IAChB;IAEA,IAAM5J,KAAK,GAAG,IAAI,CAACoK,QAAQ,CAACR,IAAI,EAAExH,GAAG,CAAC;IACtC,IAAI,CAACpC,KAAK,EAAE;MACR,OAAO,KAAK;IAChB;IACA,OAAOA,KAAK,CAACmK,MAAM,CAAC5G,MAAM,GAAG,CAAC,IAAIvD,KAAK,CAAC8J,QAAQ,CAACvG,MAAM,GAAG,CAAC;EAC/D,CAAC;EAAA3C,MAAA,CAEDQ,SAAS,GAAT,SAAAA,UAAUwI,IAAkB,EAAExH,GAAgB,EAAE4E,IAAS,EAAEqD,QAAc,EAAgB;IACrF,IAAMrK,KAAK,GAAG,IAAI,CAACoK,QAAQ,CAACR,IAAI,EAAExH,GAAG,CAAC;IAEtC,IAAI,CAACpC,KAAK,EAAE;MACR,OAAO/C,oBAAoB;IAC/B;;IAEA;IACA,IAAMqN,KAAK,GAAGtK,KAAK,CAACmK,MAAM,CAAC3N,GAAG,CAAE+N,IAAS,IAAK,MAAMA,IAAI,CAACvD,IAAI,EAAEqD,QAAQ,CAAC,CAAC;IACzE,OAAOzN,aAAa,CAAC0N,KAAK;IACtB;IAAA,CACC/F,IAAI,CAAC,MAAMzB,OAAO,CAACC,GAAG,CACnB/C,KAAK,CAAC8J,QAAQ,CACTtN,GAAG,CAAE+N,IAAS,IAAKA,IAAI,CAACvD,IAAI,EAAEqD,QAAQ,CAAC,CAChD,CAAC,CAAC;EACV;;EAEA;AACJ;AACA,KAFI;EAAAzJ,MAAA,CAGA4J,aAAa,GAAb,SAAAA,cAAcZ,IAAkB,EAAExH,GAAgB,EAAE4E,IAAS,EAAEqD,QAAa,EAAE;IAC1E,IAAI,CAAC,IAAI,CAAC1E,QAAQ,CAACiE,IAAI,EAAExH,GAAG,CAAC,EAAE;MAC3B;IACJ;IACA,IAAMpC,KAAK,GAAG,IAAI,CAACoK,QAAQ,CAACR,IAAI,EAAExH,GAAG,CAAC;IACtC,IAAI,CAACpC,KAAK,EAAE;IACZA,KAAK,CAACmK,MAAM,CAACjE,OAAO,CAAEqE,IAAS,IAAKA,IAAI,CAACvD,IAAI,EAAEqD,QAAQ,CAAC,CAAC;EAC7D;;EAEA;AACJ;AACA;AACA;AACA,KAJI;EAAAzJ,MAAA,CAKA6J,WAAW,GAAX,SAAAA,YAAYC,IAAY,EAAiB;IACrC,IAAMC,GAAG,GAAG,IAAI7H,OAAO,CAAO8H,GAAG,IAAI;MACjC,IAAMC,OAAO,GAAGC,UAAU,CAAC,MAAM;QAC7B,IAAI,CAACpL,QAAQ,CAACqL,MAAM,CAACF,OAAO,CAAC;QAC7BD,GAAG,CAAC,CAAC;MACT,CAAC,EAAEF,IAAI,CAAC;MACR,IAAI,CAAChL,QAAQ,CAACsL,GAAG,CAACH,OAAO,CAAC;IAC9B,CAAC,CAAC;IACF,OAAOF,GAAG;EACd,CAAC;EAAA/J,MAAA,CAEKiC,OAAO,GAAb,eAAAA,QAAA,EAAkC;IAC9B,IAAI,IAAI,CAACrC,SAAS,EAAE;MAChB,OAAOxD,qBAAqB;IAChC;IAGA,MAAM8F,OAAO,CAACC,GAAG,CAAC,IAAI,CAACxC,SAAS,CAAC/D,GAAG,CAACwG,EAAE,IAAIA,EAAE,CAAC,CAAC,CAAC,CAAC;;IAEjD;AACR;AACA;AACA;AACA;AACA;IACQ,IAAI,CAACxC,SAAS,GAAG,IAAI;IAGrB8C,KAAK,CAACuD,IAAI,CAAC,IAAI,CAACnH,QAAQ,CAAC,CAACwG,OAAO,CAAC2E,OAAO,IAAII,YAAY,CAACJ,OAAO,CAAC,CAAC;IACnE,IAAI,IAAI,CAACvK,kBAAkB,EAAE;MACzB,IAAI,CAACA,kBAAkB,CAACuC,OAAO,CAAC,CAAC;IACrC;IACA;AACR;AACA;AACA;AACA;AACA;AACA;AACA;IACQ,OAAO,IAAI,CAAChE,QAAQ,CAACqM,kBAAkB,CAAC,CAAC,CACpC3G,IAAI,CAAC,MAAM,IAAI,CAAC9E,eAAe,CAAC0L,KAAK,CAAC,CAAC,CAAC,CACxC5G,IAAI,CAAC,MAAM;MACR;AAChB;AACA;AACA;AACA;AACA;MACgB,IAAI,CAACtE,KAAK,CAACiG,OAAO,CAACkF,GAAG,IAAIA,GAAG,CAACC,WAAW,CAAC,CAAC,CAAC;MAE5C,OAAO,IAAI,CAACxM,QAAQ,CAACyM,WAAW,CAAC,IAAI,CAACxM,IAAI,CAAC;MAC3C,OAAOf,mBAAmB,CAAC,yBAAyB,EAAE,IAAI,CAAC,CAACwG,IAAI,CAAC,MAAM,IAAI,CAAC;IAChF,CAAC,CAAC;EACV;;EAEA;AACJ;AACA,KAFI;EAAA3D,MAAA,CAGM2K,MAAM,GAAZ,eAAAA,OAAA,EAA6B;IACzB,MAAM,IAAI,CAAC1I,OAAO,CAAC,CAAC;IACpB,MAAMC,OAAO,CAACC,GAAG,CAAC,IAAI,CAACtC,QAAQ,CAACjE,GAAG,CAACwG,EAAE,IAAIA,EAAE,CAAC,CAAC,CAAC,CAAC;IAChD,MAAM5F,wBAAwB,CAC1B,IAAI,CAACyB,QAAQ,CAAC2M,OAAO,EACrB,IAAI,CAAC3M,QAAQ,CAACoD,aAAa,EAC3B,IAAI,CAACpD,QAAQ,CAACmF,KAAK,EACnB,IAAI,CAACnF,QAAQ,CAACC,IAAI,EAClB,IAAI,CAACA,IAAI,EACT,IAAI,CAACD,QAAQ,CAAC4M,QAAQ,EACtB,IAAI,CAAC5M,QAAQ,CAAC6M,YAClB,CAAC;EACL,CAAC;EAAA,OAAAC,YAAA,CAAA/M,gBAAA;IAAAwD,GAAA;IAAAkE,GAAA,EAlvBD,SAAAA,CAAA,EAA+D;MAC3D,OAAO,IAAI,CAAClG,CAAC,CAACmB,IAAI,CACdhF,MAAM,CAACsF,EAAE,IAAIA,EAAE,CAACc,SAAS,KAAK,QAAQ,CAC1C,CAAC;IACL;EAAC;IAAAP,GAAA;IAAAkE,GAAA,EACD,SAAAA,CAAA,EAA+D;MAC3D,OAAO,IAAI,CAAClG,CAAC,CAACmB,IAAI,CACdhF,MAAM,CAACsF,EAAE,IAAIA,EAAE,CAACc,SAAS,KAAK,QAAQ,CAC1C,CAAC;IACL;EAAC;IAAAP,GAAA;IAAAkE,GAAA,EACD,SAAAA,CAAA,EAA+D;MAC3D,OAAO,IAAI,CAAClG,CAAC,CAACmB,IAAI,CACdhF,MAAM,CAACsF,EAAE,IAAIA,EAAE,CAACc,SAAS,KAAK,QAAQ,CAC1C,CAAC;IACL;;IAGA;;IAqBA;AACJ;AACA;AACA;AACA;AACA;EALI;IAAAP,GAAA;IAAAkE,GAAA,EA8sBA,SAAAA,CAAA,EAA+F;MAC3F,OAAO,IAAI;IACf;EAAC;AAAA;;AAGL;AACA;AACA;AACA;AACA,SAAS5F,mBAAmBA,CACxBkL,UAAkC,EACpC;EACE,IAAIjN,YAAY,EAAE,OAAO,CAAC;EAC1BA,YAAY,GAAG,IAAI;EACnB,IAAMkN,QAAQ,GAAGC,MAAM,CAACC,cAAc,CAACH,UAAU,CAAC;EAClDlN,UAAU,CAACwH,OAAO,CAAC9D,GAAG,IAAI;IACtB3D,UAAU,CAACjC,GAAG,CAACoN,IAAI,IAAI;MACnB,IAAMoC,MAAM,GAAGpC,IAAI,GAAGlN,OAAO,CAAC0F,GAAG,CAAC;MAClCyJ,QAAQ,CAACG,MAAM,CAAC,GAAG,UAAUnC,GAAW,EAAEC,QAAiB,EAAE;QACzD,OAAO,IAAI,CAACH,OAAO,CAACC,IAAI,EAAExH,GAAG,EAAEyH,GAAG,EAAEC,QAAQ,CAAC;MACjD,CAAC;IACL,CAAC,CAAC;EACN,CAAC,CAAC;AACN;AAEA,SAASlB,wBAAwBA,CAC7BxC,GAA8B,EAC9BlB,IAA+B,EACG;EAClC,OAAOkB,GAAG,CAACgC,iBAAiB,CAAE6D,SAAS,IAAK;IACxC,OAAO/G,IAAI;EACf,CAAC,CAAC;AACN;;AAEA;AACA;AACA;AACA;AACA,SAASuD,wCAAwCA,CAC7CyD,YAAqC,EACrC3F,OAAe,EACfrB,IAAS,EAMX;EACE;AACJ;AACA;AACA;EACI,IAAMiH,gBAAgB,GAAGD,YAAY,CAAChM,SAAS,CAACkM,6BAA6B,CAAC7F,OAAO,CAAC;EACtF,IAAI4F,gBAAgB,EAAE;IAClB,OAAOrJ,OAAO,CAACuJ,OAAO,CAAC;MACnBjG,GAAG,EAAE8F,YAAY,CAAChM,SAAS,CAACgI,oBAAoB,CAAC,CAACiE,gBAAgB,CAAC,CAAC,CAAC,CAAC,CAAC;MACvExD,QAAQ,EAAE;IACd,CAAC,CAAC;EACN;EACA,OAAOuD,YAAY,CAACnD,OAAO,CAACxC,OAAO,CAAC,CAACK,IAAI,CAAC,CAAC,CACtCrC,IAAI,CAAC6B,GAAG,IAAI;IACT,IAAI,CAACA,GAAG,EAAE;MACN,OAAO8F,YAAY,CAACjH,MAAM,CAACC,IAAI,CAAC,CAACX,IAAI,CAAC4D,MAAM,KAAK;QAC7C/B,GAAG,EAAE+B,MAAM;QACXQ,QAAQ,EAAE;MACd,CAAC,CAAC,CAAC;IACP,CAAC,MAAM;MACH,OAAO;QACHvC,GAAG;QACHuC,QAAQ,EAAE;MACd,CAAC;IACL;EACJ,CAAC,CAAC;AACV;;AAEA;AACA;AACA;AACA,OAAO,SAAS2D,kBAAkBA,CAC9B;EACIzN,QAAQ;EACRC,IAAI;EACJC,MAAM;EACNE,uBAAuB,GAAG,CAAC,CAAC;EAC5BC,mBAAmB,GAAG,CAAC,CAAC;EACxBqN,WAAW,GAAG,IAAI;EAClBhN,OAAO,GAAG,CAAC,CAAC;EACZJ,OAAO,GAAG,CAAC,CAAC;EACZC,WAAW,GAAG,CAAC,CAAC;EAChBC,OAAO,GAAG,CAAC,CAAC;EACZmN,cAAc,GAAG,KAAK;EACtBlN,sBAAsB,GAAGzB,6BAA6B;EACtD2B,eAAe,GAAGnB;AACjB,CAAC,EACe;EACrB,IAAMoO,6BAAwE,GAAG;IAC7EC,qBAAqB,EAAE7N,QAAQ,CAACmF,KAAK;IACrC2I,YAAY,EAAE9N,QAAQ,CAACC,IAAI;IAC3B2C,cAAc,EAAE3C,IAAI;IACpBC,MAAM,EAAEA,MAAM,CAACgC,UAAU;IACzB1B,OAAO,EAAEJ,uBAAuB;IAChC2N,aAAa,EAAE/N,QAAQ,CAAC+N,aAAa;IACrCnB,QAAQ,EAAE5M,QAAQ,CAAC4M,QAAQ;IAC3BoB,OAAO,EAAErO,YAAY,CAACsO,SAAS,CAAC;EACpC,CAAC;EAED9O,cAAc,CACV,4BAA4B,EAC5ByO,6BACJ,CAAC;EAED,OAAOtP,iCAAiC,CACpC0B,QAAQ,EACR4N,6BACJ,CAAC,CAAClI,IAAI,CAAC9E,eAAe,IAAI;IACtB,IAAMmM,UAAU,GAAG,IAAIhN,gBAAgB,CACnCC,QAAQ,EACRC,IAAI,EACJC,MAAM,EACNU,eAAe,EACfR,uBAAuB,EACvBC,mBAAmB,EACnBC,OAAO,EACPC,WAAW,EACXC,OAAO,EACPC,sBAAsB,EACtBC,OAAO,EACPC,eACJ,CAAC;IAED,OAAOoM,UAAU,CACZ9K,OAAO,CAAC,CAAC,CACTyD,IAAI,CAAC,MAAM;MACR;MACAuH,MAAM,CACDiB,OAAO,CAACxN,OAAO,CAAC,CAChB2G,OAAO,CAAC,CAAC,CAAC8G,OAAO,EAAEnD,GAAG,CAAC,KAAK;QACzBiC,MAAM,CAACmB,cAAc,CAACrB,UAAU,EAAEoB,OAAO,EAAE;UACvC1G,GAAG,EAAEA,CAAA,KAAOuD,GAAG,CAASI,IAAI,CAAC2B,UAAU;QAC3C,CAAC,CAAC;MACN,CAAC,CAAC;MAEN,IAAIjB,GAAG,GAAG1N,oBAAoB;MAC9B,IAAIsP,WAAW,IAAIX,UAAU,CAAC7M,MAAM,CAACsD,OAAO,KAAK,CAAC,EAAE;QAChDsI,GAAG,GAAGiB,UAAU,CAAC5G,cAAc,CAAC,CAAC;MACrC;MACA,OAAO2F,GAAG;IACd,CAAC,CAAC,CACDpG,IAAI,CAAC,MAAM;MACRvG,cAAc,CAAC,oBAAoB,EAAE;QACjC4N,UAAU;QACVsB,OAAO,EAAE;UACLpO,IAAI;UACJC,MAAM;UACNU,eAAe;UACfR,uBAAuB;UACvBC,mBAAmB;UACnBC,OAAO;UACPC,WAAW;UACXC,OAAO;UACPC,sBAAsB;UACtBkN,cAAc;UACdjN;QACJ;MACJ,CAAC,CAAC;MACF,OAAOqM,UAAU;IACrB,CAAC;IACD;AACZ;AACA;AACA,OAHY,CAICuB,KAAK,CAACtF,GAAG,IAAI;MACV,OAAOpI,eAAe,CAAC0L,KAAK,CAAC,CAAC,CACzB5G,IAAI,CAAC,MAAMzB,OAAO,CAACsK,MAAM,CAACvF,GAAG,CAAC,CAAC;IACxC,CAAC,CAAC;EACV,CAAC,CAAC;AACN;AAEA,OAAO,SAASwF,cAAcA,CAACC,GAAQ,EAAW;EAC9C,OAAOA,GAAG,YAAY1O,gBAAgB;AAC1C","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/rx-database-internal-store.js b/dist/esm/rx-database-internal-store.js deleted file mode 100644 index a11da6c357c..00000000000 --- a/dist/esm/rx-database-internal-store.js +++ /dev/null @@ -1,264 +0,0 @@ -import { isBulkWriteConflictError, newRxError } from "./rx-error.js"; -import { fillWithDefaultSettings, getComposedPrimaryKeyOfDocumentData } from "./rx-schema-helper.js"; -import { getSingleDocument, writeSingle } from "./rx-storage-helper.js"; -import { clone, ensureNotFalsy, getDefaultRevision, getDefaultRxDocumentMeta, randomCouchString } from "./plugins/utils/index.js"; -import { prepareQuery } from "./rx-query.js"; -export var INTERNAL_CONTEXT_COLLECTION = 'collection'; -export var INTERNAL_CONTEXT_STORAGE_TOKEN = 'storage-token'; -export var INTERNAL_CONTEXT_MIGRATION_STATUS = 'rx-migration-status'; - -/** - * Do not change the title, - * we have to flag the internal schema so that - * some RxStorage implementations are able - * to detect if the created RxStorageInstance - * is from the internals or not, - * to do some optimizations in some cases. - */ -export var INTERNAL_STORE_SCHEMA_TITLE = 'RxInternalDocument'; -export var INTERNAL_STORE_SCHEMA = fillWithDefaultSettings({ - version: 0, - title: INTERNAL_STORE_SCHEMA_TITLE, - primaryKey: { - key: 'id', - fields: ['context', 'key'], - separator: '|' - }, - type: 'object', - properties: { - id: { - type: 'string', - maxLength: 200 - }, - key: { - type: 'string' - }, - context: { - type: 'string', - enum: [INTERNAL_CONTEXT_COLLECTION, INTERNAL_CONTEXT_STORAGE_TOKEN, INTERNAL_CONTEXT_MIGRATION_STATUS, 'OTHER'] - }, - data: { - type: 'object', - additionalProperties: true - } - }, - indexes: [], - required: ['key', 'context', 'data'], - additionalProperties: false, - /** - * If the sharding plugin is used, - * it must not shard on the internal RxStorageInstance - * because that one anyway has only a small amount of documents - * and also its creation is in the hot path of the initial page load, - * so we should spend less time creating multiple RxStorageInstances. - */ - sharding: { - shards: 1, - mode: 'collection' - } -}); -export function getPrimaryKeyOfInternalDocument(key, context) { - return getComposedPrimaryKeyOfDocumentData(INTERNAL_STORE_SCHEMA, { - key, - context - }); -} - -/** - * Returns all internal documents - * with context 'collection' - */ -export async function getAllCollectionDocuments(storageInstance) { - var getAllQueryPrepared = prepareQuery(storageInstance.schema, { - selector: { - context: INTERNAL_CONTEXT_COLLECTION, - _deleted: { - $eq: false - } - }, - sort: [{ - id: 'asc' - }], - skip: 0 - }); - var queryResult = await storageInstance.query(getAllQueryPrepared); - var allDocs = queryResult.documents; - return allDocs; -} - -/** - * to not confuse multiInstance-messages with other databases that have the same - * name and adapter, but do not share state with this one (for example in-memory-instances), - * we set a storage-token and use it in the broadcast-channel - */ -export var STORAGE_TOKEN_DOCUMENT_KEY = 'storageToken'; -export var STORAGE_TOKEN_DOCUMENT_ID = getPrimaryKeyOfInternalDocument(STORAGE_TOKEN_DOCUMENT_KEY, INTERNAL_CONTEXT_STORAGE_TOKEN); -export async function ensureStorageTokenDocumentExists(rxDatabase) { - /** - * To have less read-write cycles, - * we just try to insert a new document - * and only fetch the existing one if a conflict happened. - */ - var storageToken = randomCouchString(10); - var passwordHash = rxDatabase.password ? await rxDatabase.hashFunction(JSON.stringify(rxDatabase.password)) : undefined; - var docData = { - id: STORAGE_TOKEN_DOCUMENT_ID, - context: INTERNAL_CONTEXT_STORAGE_TOKEN, - key: STORAGE_TOKEN_DOCUMENT_KEY, - data: { - rxdbVersion: rxDatabase.rxdbVersion, - token: storageToken, - /** - * We add the instance token here - * to be able to detect if a given RxDatabase instance - * is the first instance that was ever created - * or if databases have existed earlier on that storage - * with the same database name. - */ - instanceToken: rxDatabase.token, - passwordHash - }, - _deleted: false, - _meta: getDefaultRxDocumentMeta(), - _rev: getDefaultRevision(), - _attachments: {} - }; - var writeResult = await rxDatabase.internalStore.bulkWrite([{ - document: docData - }], 'internal-add-storage-token'); - if (writeResult.success[0]) { - return writeResult.success[0]; - } - - /** - * If we get a 409 error, - * it means another instance already inserted the storage token. - * So we get that token from the database and return that one. - */ - var error = ensureNotFalsy(writeResult.error[0]); - if (error.isError && isBulkWriteConflictError(error)) { - var conflictError = error; - if (!isDatabaseStateVersionCompatibleWithDatabaseCode(conflictError.documentInDb.data.rxdbVersion, rxDatabase.rxdbVersion)) { - throw newRxError('DM5', { - args: { - database: rxDatabase.name, - databaseStateVersion: conflictError.documentInDb.data.rxdbVersion, - codeVersion: rxDatabase.rxdbVersion - } - }); - } - if (passwordHash && passwordHash !== conflictError.documentInDb.data.passwordHash) { - throw newRxError('DB1', { - passwordHash, - existingPasswordHash: conflictError.documentInDb.data.passwordHash - }); - } - var storageTokenDocInDb = conflictError.documentInDb; - return ensureNotFalsy(storageTokenDocInDb); - } - throw error; -} -export function isDatabaseStateVersionCompatibleWithDatabaseCode(databaseStateVersion, codeVersion) { - if (!databaseStateVersion) { - return false; - } - if (codeVersion.includes('beta') && codeVersion !== databaseStateVersion) { - return false; - } - var stateMajor = databaseStateVersion.split('.')[0]; - var codeMajor = codeVersion.split('.')[0]; - if (stateMajor !== codeMajor) { - return false; - } - return true; -} -export async function addConnectedStorageToCollection(collection, storageCollectionName, schema) { - if (collection.schema.version !== schema.version) { - throw newRxError('SNH', { - schema, - version: collection.schema.version, - name: collection.name, - collection, - args: { - storageCollectionName - } - }); - } - var collectionNameWithVersion = _collectionNamePrimary(collection.name, collection.schema.jsonSchema); - var collectionDocId = getPrimaryKeyOfInternalDocument(collectionNameWithVersion, INTERNAL_CONTEXT_COLLECTION); - while (true) { - var collectionDoc = await getSingleDocument(collection.database.internalStore, collectionDocId); - var saveData = clone(ensureNotFalsy(collectionDoc)); - - // do nothing if already in array - var alreadyThere = saveData.data.connectedStorages.find(row => row.collectionName === storageCollectionName && row.schema.version === schema.version); - if (alreadyThere) { - return; - } - - // otherwise add to array and save - saveData.data.connectedStorages.push({ - collectionName: storageCollectionName, - schema - }); - try { - await writeSingle(collection.database.internalStore, { - previous: ensureNotFalsy(collectionDoc), - document: saveData - }, 'add-connected-storage-to-collection'); - } catch (err) { - if (!isBulkWriteConflictError(err)) { - throw err; - } - // retry on conflict - } - } -} -export async function removeConnectedStorageFromCollection(collection, storageCollectionName, schema) { - if (collection.schema.version !== schema.version) { - throw newRxError('SNH', { - schema, - version: collection.schema.version, - name: collection.name, - collection, - args: { - storageCollectionName - } - }); - } - var collectionNameWithVersion = _collectionNamePrimary(collection.name, collection.schema.jsonSchema); - var collectionDocId = getPrimaryKeyOfInternalDocument(collectionNameWithVersion, INTERNAL_CONTEXT_COLLECTION); - while (true) { - var collectionDoc = await getSingleDocument(collection.database.internalStore, collectionDocId); - var saveData = clone(ensureNotFalsy(collectionDoc)); - - // do nothing if not there - var isThere = saveData.data.connectedStorages.find(row => row.collectionName === storageCollectionName && row.schema.version === schema.version); - if (!isThere) { - return; - } - - // otherwise remove from array and save - saveData.data.connectedStorages = saveData.data.connectedStorages.filter(item => item.collectionName !== storageCollectionName); - try { - await writeSingle(collection.database.internalStore, { - previous: ensureNotFalsy(collectionDoc), - document: saveData - }, 'remove-connected-storage-from-collection'); - } catch (err) { - if (!isBulkWriteConflictError(err)) { - throw err; - } - // retry on conflict - } - } -} - -/** - * returns the primary for a given collection-data - * used in the internal store of a RxDatabase - */ -export function _collectionNamePrimary(name, schema) { - return name + '-' + schema.version; -} -//# sourceMappingURL=rx-database-internal-store.js.map \ No newline at end of file diff --git a/dist/esm/rx-database-internal-store.js.map b/dist/esm/rx-database-internal-store.js.map deleted file mode 100644 index 950b6482daf..00000000000 --- a/dist/esm/rx-database-internal-store.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-database-internal-store.js","names":["isBulkWriteConflictError","newRxError","fillWithDefaultSettings","getComposedPrimaryKeyOfDocumentData","getSingleDocument","writeSingle","clone","ensureNotFalsy","getDefaultRevision","getDefaultRxDocumentMeta","randomCouchString","prepareQuery","INTERNAL_CONTEXT_COLLECTION","INTERNAL_CONTEXT_STORAGE_TOKEN","INTERNAL_CONTEXT_MIGRATION_STATUS","INTERNAL_STORE_SCHEMA_TITLE","INTERNAL_STORE_SCHEMA","version","title","primaryKey","key","fields","separator","type","properties","id","maxLength","context","enum","data","additionalProperties","indexes","required","sharding","shards","mode","getPrimaryKeyOfInternalDocument","getAllCollectionDocuments","storageInstance","getAllQueryPrepared","schema","selector","_deleted","$eq","sort","skip","queryResult","query","allDocs","documents","STORAGE_TOKEN_DOCUMENT_KEY","STORAGE_TOKEN_DOCUMENT_ID","ensureStorageTokenDocumentExists","rxDatabase","storageToken","passwordHash","password","hashFunction","JSON","stringify","undefined","docData","rxdbVersion","token","instanceToken","_meta","_rev","_attachments","writeResult","internalStore","bulkWrite","document","success","error","isError","conflictError","isDatabaseStateVersionCompatibleWithDatabaseCode","documentInDb","args","database","name","databaseStateVersion","codeVersion","existingPasswordHash","storageTokenDocInDb","includes","stateMajor","split","codeMajor","addConnectedStorageToCollection","collection","storageCollectionName","collectionNameWithVersion","_collectionNamePrimary","jsonSchema","collectionDocId","collectionDoc","saveData","alreadyThere","connectedStorages","find","row","collectionName","push","previous","err","removeConnectedStorageFromCollection","isThere","filter","item"],"sources":["../../src/rx-database-internal-store.ts"],"sourcesContent":["import {\n isBulkWriteConflictError,\n newRxError\n} from './rx-error.ts';\nimport {\n fillWithDefaultSettings,\n getComposedPrimaryKeyOfDocumentData\n} from './rx-schema-helper.ts';\nimport { getSingleDocument, writeSingle } from './rx-storage-helper.ts';\nimport type {\n CollectionsOfDatabase,\n InternalStoreCollectionDocType,\n InternalStoreDocType,\n InternalStoreStorageTokenDocType,\n RxCollection,\n RxDatabase,\n RxDocumentData,\n RxJsonSchema,\n RxStorageInstance,\n RxStorageWriteErrorConflict\n} from './types/index.d.ts';\nimport {\n clone,\n ensureNotFalsy,\n getDefaultRevision,\n getDefaultRxDocumentMeta,\n randomCouchString\n} from './plugins/utils/index.ts';\nimport { prepareQuery } from './rx-query.ts';\n\nexport const INTERNAL_CONTEXT_COLLECTION = 'collection';\nexport const INTERNAL_CONTEXT_STORAGE_TOKEN = 'storage-token';\nexport const INTERNAL_CONTEXT_MIGRATION_STATUS = 'rx-migration-status';\n\n/**\n * Do not change the title,\n * we have to flag the internal schema so that\n * some RxStorage implementations are able\n * to detect if the created RxStorageInstance\n * is from the internals or not,\n * to do some optimizations in some cases.\n */\nexport const INTERNAL_STORE_SCHEMA_TITLE = 'RxInternalDocument';\n\nexport const INTERNAL_STORE_SCHEMA: RxJsonSchema>> = fillWithDefaultSettings({\n version: 0,\n title: INTERNAL_STORE_SCHEMA_TITLE,\n primaryKey: {\n key: 'id',\n fields: [\n 'context',\n 'key'\n ],\n separator: '|'\n },\n type: 'object',\n properties: {\n id: {\n type: 'string',\n maxLength: 200\n },\n key: {\n type: 'string'\n },\n context: {\n type: 'string',\n enum: [\n INTERNAL_CONTEXT_COLLECTION,\n INTERNAL_CONTEXT_STORAGE_TOKEN,\n INTERNAL_CONTEXT_MIGRATION_STATUS,\n 'OTHER'\n ]\n },\n data: {\n type: 'object',\n additionalProperties: true\n }\n },\n indexes: [],\n required: [\n 'key',\n 'context',\n 'data'\n ],\n additionalProperties: false,\n /**\n * If the sharding plugin is used,\n * it must not shard on the internal RxStorageInstance\n * because that one anyway has only a small amount of documents\n * and also its creation is in the hot path of the initial page load,\n * so we should spend less time creating multiple RxStorageInstances.\n */\n sharding: {\n shards: 1,\n mode: 'collection'\n }\n});\n\n\nexport function getPrimaryKeyOfInternalDocument(\n key: string,\n context: string\n): string {\n return getComposedPrimaryKeyOfDocumentData(\n INTERNAL_STORE_SCHEMA,\n {\n key,\n context\n }\n );\n}\n\n/**\n * Returns all internal documents\n * with context 'collection'\n */\nexport async function getAllCollectionDocuments(\n storageInstance: RxStorageInstance, any, any>\n): Promise[]> {\n const getAllQueryPrepared = prepareQuery>(\n storageInstance.schema,\n {\n selector: {\n context: INTERNAL_CONTEXT_COLLECTION,\n _deleted: {\n $eq: false\n }\n },\n sort: [{ id: 'asc' }],\n skip: 0\n }\n );\n const queryResult = await storageInstance.query(getAllQueryPrepared);\n const allDocs = queryResult.documents;\n return allDocs;\n}\n\n/**\n * to not confuse multiInstance-messages with other databases that have the same\n * name and adapter, but do not share state with this one (for example in-memory-instances),\n * we set a storage-token and use it in the broadcast-channel\n */\nexport const STORAGE_TOKEN_DOCUMENT_KEY = 'storageToken';\n\nexport const STORAGE_TOKEN_DOCUMENT_ID = getPrimaryKeyOfInternalDocument(\n STORAGE_TOKEN_DOCUMENT_KEY,\n INTERNAL_CONTEXT_STORAGE_TOKEN\n);\n\nexport async function ensureStorageTokenDocumentExists(\n rxDatabase: RxDatabase\n): Promise> {\n\n /**\n * To have less read-write cycles,\n * we just try to insert a new document\n * and only fetch the existing one if a conflict happened.\n */\n const storageToken = randomCouchString(10);\n\n const passwordHash = rxDatabase.password ?\n await rxDatabase.hashFunction(JSON.stringify(rxDatabase.password)) :\n undefined;\n\n const docData: RxDocumentData = {\n id: STORAGE_TOKEN_DOCUMENT_ID,\n context: INTERNAL_CONTEXT_STORAGE_TOKEN,\n key: STORAGE_TOKEN_DOCUMENT_KEY,\n data: {\n rxdbVersion: rxDatabase.rxdbVersion,\n token: storageToken,\n /**\n * We add the instance token here\n * to be able to detect if a given RxDatabase instance\n * is the first instance that was ever created\n * or if databases have existed earlier on that storage\n * with the same database name.\n */\n instanceToken: rxDatabase.token,\n passwordHash\n },\n _deleted: false,\n _meta: getDefaultRxDocumentMeta(),\n _rev: getDefaultRevision(),\n _attachments: {}\n };\n\n const writeResult = await rxDatabase.internalStore.bulkWrite(\n [{ document: docData }],\n 'internal-add-storage-token'\n );\n if (writeResult.success[0]) {\n return writeResult.success[0];\n }\n\n /**\n * If we get a 409 error,\n * it means another instance already inserted the storage token.\n * So we get that token from the database and return that one.\n */\n const error = ensureNotFalsy(writeResult.error[0]);\n if (\n error.isError &&\n isBulkWriteConflictError(error)\n ) {\n const conflictError = (error as RxStorageWriteErrorConflict);\n\n if (\n !isDatabaseStateVersionCompatibleWithDatabaseCode(\n conflictError.documentInDb.data.rxdbVersion,\n rxDatabase.rxdbVersion\n )\n ) {\n throw newRxError('DM5', {\n args: {\n database: rxDatabase.name,\n databaseStateVersion: conflictError.documentInDb.data.rxdbVersion,\n codeVersion: rxDatabase.rxdbVersion\n }\n });\n }\n\n if (\n passwordHash &&\n passwordHash !== conflictError.documentInDb.data.passwordHash\n ) {\n throw newRxError('DB1', {\n passwordHash,\n existingPasswordHash: conflictError.documentInDb.data.passwordHash\n });\n }\n\n const storageTokenDocInDb = conflictError.documentInDb;\n return ensureNotFalsy(storageTokenDocInDb);\n }\n throw error;\n}\n\n\nexport function isDatabaseStateVersionCompatibleWithDatabaseCode(\n databaseStateVersion: string,\n codeVersion: string\n): boolean {\n if (!databaseStateVersion) {\n return false;\n }\n\n if (\n codeVersion.includes('beta') &&\n codeVersion !== databaseStateVersion\n ) {\n return false;\n }\n\n const stateMajor = databaseStateVersion.split('.')[0];\n const codeMajor = codeVersion.split('.')[0];\n if (stateMajor !== codeMajor) {\n return false;\n }\n return true;\n}\n\n\n\n\n\nexport async function addConnectedStorageToCollection(\n collection: RxCollection,\n storageCollectionName: string,\n schema: RxJsonSchema\n) {\n\n if (collection.schema.version !== schema.version) {\n throw newRxError('SNH', {\n schema,\n version: collection.schema.version,\n name: collection.name,\n collection,\n args: {\n storageCollectionName\n }\n });\n }\n\n const collectionNameWithVersion = _collectionNamePrimary(collection.name, collection.schema.jsonSchema);\n const collectionDocId = getPrimaryKeyOfInternalDocument(\n collectionNameWithVersion,\n INTERNAL_CONTEXT_COLLECTION\n );\n\n while (true) {\n const collectionDoc = await getSingleDocument(\n collection.database.internalStore,\n collectionDocId\n );\n const saveData: RxDocumentData = clone(ensureNotFalsy(collectionDoc));\n\n // do nothing if already in array\n const alreadyThere = saveData.data.connectedStorages\n .find(row => row.collectionName === storageCollectionName && row.schema.version === schema.version);\n if (alreadyThere) {\n return;\n }\n\n // otherwise add to array and save\n saveData.data.connectedStorages.push({\n collectionName: storageCollectionName,\n schema\n });\n try {\n await writeSingle(\n collection.database.internalStore,\n {\n previous: ensureNotFalsy(collectionDoc),\n document: saveData\n },\n 'add-connected-storage-to-collection'\n );\n } catch (err) {\n if (!isBulkWriteConflictError(err)) {\n throw err;\n }\n // retry on conflict\n }\n }\n}\n\nexport async function removeConnectedStorageFromCollection(\n collection: RxCollection,\n storageCollectionName: string,\n schema: RxJsonSchema\n) {\n if (collection.schema.version !== schema.version) {\n throw newRxError('SNH', {\n schema,\n version: collection.schema.version,\n name: collection.name,\n collection,\n args: {\n storageCollectionName\n }\n });\n }\n\n const collectionNameWithVersion = _collectionNamePrimary(collection.name, collection.schema.jsonSchema);\n const collectionDocId = getPrimaryKeyOfInternalDocument(\n collectionNameWithVersion,\n INTERNAL_CONTEXT_COLLECTION\n );\n\n while (true) {\n const collectionDoc = await getSingleDocument(\n collection.database.internalStore,\n collectionDocId\n );\n const saveData: RxDocumentData = clone(ensureNotFalsy(collectionDoc));\n\n // do nothing if not there\n const isThere = saveData.data.connectedStorages\n .find(row => row.collectionName === storageCollectionName && row.schema.version === schema.version);\n if (!isThere) {\n return;\n }\n\n // otherwise remove from array and save\n saveData.data.connectedStorages = saveData.data.connectedStorages.filter(item => item.collectionName !== storageCollectionName);\n try {\n await writeSingle(\n collection.database.internalStore,\n {\n previous: ensureNotFalsy(collectionDoc),\n document: saveData\n },\n 'remove-connected-storage-from-collection'\n );\n } catch (err) {\n if (!isBulkWriteConflictError(err)) {\n throw err;\n }\n // retry on conflict\n }\n }\n}\n\n\n\n/**\n * returns the primary for a given collection-data\n * used in the internal store of a RxDatabase\n */\nexport function _collectionNamePrimary(name: string, schema: RxJsonSchema) {\n return name + '-' + schema.version;\n}\n"],"mappings":"AAAA,SACIA,wBAAwB,EACxBC,UAAU,QACP,eAAe;AACtB,SACIC,uBAAuB,EACvBC,mCAAmC,QAChC,uBAAuB;AAC9B,SAASC,iBAAiB,EAAEC,WAAW,QAAQ,wBAAwB;AAavE,SACIC,KAAK,EACLC,cAAc,EACdC,kBAAkB,EAClBC,wBAAwB,EACxBC,iBAAiB,QACd,0BAA0B;AACjC,SAASC,YAAY,QAAQ,eAAe;AAE5C,OAAO,IAAMC,2BAA2B,GAAG,YAAY;AACvD,OAAO,IAAMC,8BAA8B,GAAG,eAAe;AAC7D,OAAO,IAAMC,iCAAiC,GAAG,qBAAqB;;AAEtE;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,IAAMC,2BAA2B,GAAG,oBAAoB;AAE/D,OAAO,IAAMC,qBAA8E,GAAGd,uBAAuB,CAAC;EAClHe,OAAO,EAAE,CAAC;EACVC,KAAK,EAAEH,2BAA2B;EAClCI,UAAU,EAAE;IACRC,GAAG,EAAE,IAAI;IACTC,MAAM,EAAE,CACJ,SAAS,EACT,KAAK,CACR;IACDC,SAAS,EAAE;EACf,CAAC;EACDC,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRC,EAAE,EAAE;MACAF,IAAI,EAAE,QAAQ;MACdG,SAAS,EAAE;IACf,CAAC;IACDN,GAAG,EAAE;MACDG,IAAI,EAAE;IACV,CAAC;IACDI,OAAO,EAAE;MACLJ,IAAI,EAAE,QAAQ;MACdK,IAAI,EAAE,CACFhB,2BAA2B,EAC3BC,8BAA8B,EAC9BC,iCAAiC,EACjC,OAAO;IAEf,CAAC;IACDe,IAAI,EAAE;MACFN,IAAI,EAAE,QAAQ;MACdO,oBAAoB,EAAE;IAC1B;EACJ,CAAC;EACDC,OAAO,EAAE,EAAE;EACXC,QAAQ,EAAE,CACN,KAAK,EACL,SAAS,EACT,MAAM,CACT;EACDF,oBAAoB,EAAE,KAAK;EAC3B;AACJ;AACA;AACA;AACA;AACA;AACA;EACIG,QAAQ,EAAE;IACNC,MAAM,EAAE,CAAC;IACTC,IAAI,EAAE;EACV;AACJ,CAAC,CAAC;AAGF,OAAO,SAASC,+BAA+BA,CAC3ChB,GAAW,EACXO,OAAe,EACT;EACN,OAAOxB,mCAAmC,CACtCa,qBAAqB,EACrB;IACII,GAAG;IACHO;EACJ,CACJ,CAAC;AACL;;AAEA;AACA;AACA;AACA;AACA,OAAO,eAAeU,yBAAyBA,CAC3CC,eAAuE,EACd;EACzD,IAAMC,mBAAmB,GAAG5B,YAAY,CACpC2B,eAAe,CAACE,MAAM,EACtB;IACIC,QAAQ,EAAE;MACNd,OAAO,EAAEf,2BAA2B;MACpC8B,QAAQ,EAAE;QACNC,GAAG,EAAE;MACT;IACJ,CAAC;IACDC,IAAI,EAAE,CAAC;MAAEnB,EAAE,EAAE;IAAM,CAAC,CAAC;IACrBoB,IAAI,EAAE;EACV,CACJ,CAAC;EACD,IAAMC,WAAW,GAAG,MAAMR,eAAe,CAACS,KAAK,CAACR,mBAAmB,CAAC;EACpE,IAAMS,OAAO,GAAGF,WAAW,CAACG,SAAS;EACrC,OAAOD,OAAO;AAClB;;AAEA;AACA;AACA;AACA;AACA;AACA,OAAO,IAAME,0BAA0B,GAAG,cAAc;AAExD,OAAO,IAAMC,yBAAyB,GAAGf,+BAA+B,CACpEc,0BAA0B,EAC1BrC,8BACJ,CAAC;AAED,OAAO,eAAeuC,gCAAgCA,CAClDC,UAAmC,EACsB;EAEzD;AACJ;AACA;AACA;AACA;EACI,IAAMC,YAAY,GAAG5C,iBAAiB,CAAC,EAAE,CAAC;EAE1C,IAAM6C,YAAY,GAAGF,UAAU,CAACG,QAAQ,GACpC,MAAMH,UAAU,CAACI,YAAY,CAACC,IAAI,CAACC,SAAS,CAACN,UAAU,CAACG,QAAQ,CAAC,CAAC,GAClEI,SAAS;EAEb,IAAMC,OAAyD,GAAG;IAC9DpC,EAAE,EAAE0B,yBAAyB;IAC7BxB,OAAO,EAAEd,8BAA8B;IACvCO,GAAG,EAAE8B,0BAA0B;IAC/BrB,IAAI,EAAE;MACFiC,WAAW,EAAET,UAAU,CAACS,WAAW;MACnCC,KAAK,EAAET,YAAY;MACnB;AACZ;AACA;AACA;AACA;AACA;AACA;MACYU,aAAa,EAAEX,UAAU,CAACU,KAAK;MAC/BR;IACJ,CAAC;IACDb,QAAQ,EAAE,KAAK;IACfuB,KAAK,EAAExD,wBAAwB,CAAC,CAAC;IACjCyD,IAAI,EAAE1D,kBAAkB,CAAC,CAAC;IAC1B2D,YAAY,EAAE,CAAC;EACnB,CAAC;EAED,IAAMC,WAAW,GAAG,MAAMf,UAAU,CAACgB,aAAa,CAACC,SAAS,CACxD,CAAC;IAAEC,QAAQ,EAAEV;EAAQ,CAAC,CAAC,EACvB,4BACJ,CAAC;EACD,IAAIO,WAAW,CAACI,OAAO,CAAC,CAAC,CAAC,EAAE;IACxB,OAAOJ,WAAW,CAACI,OAAO,CAAC,CAAC,CAAC;EACjC;;EAEA;AACJ;AACA;AACA;AACA;EACI,IAAMC,KAAK,GAAGlE,cAAc,CAAC6D,WAAW,CAACK,KAAK,CAAC,CAAC,CAAC,CAAC;EAClD,IACIA,KAAK,CAACC,OAAO,IACb1E,wBAAwB,CAACyE,KAAK,CAAC,EACjC;IACE,IAAME,aAAa,GAAIF,KAAuE;IAE9F,IACI,CAACG,gDAAgD,CAC7CD,aAAa,CAACE,YAAY,CAAChD,IAAI,CAACiC,WAAW,EAC3CT,UAAU,CAACS,WACf,CAAC,EACH;MACE,MAAM7D,UAAU,CAAC,KAAK,EAAE;QACpB6E,IAAI,EAAE;UACFC,QAAQ,EAAE1B,UAAU,CAAC2B,IAAI;UACzBC,oBAAoB,EAAEN,aAAa,CAACE,YAAY,CAAChD,IAAI,CAACiC,WAAW;UACjEoB,WAAW,EAAE7B,UAAU,CAACS;QAC5B;MACJ,CAAC,CAAC;IACN;IAEA,IACIP,YAAY,IACZA,YAAY,KAAKoB,aAAa,CAACE,YAAY,CAAChD,IAAI,CAAC0B,YAAY,EAC/D;MACE,MAAMtD,UAAU,CAAC,KAAK,EAAE;QACpBsD,YAAY;QACZ4B,oBAAoB,EAAER,aAAa,CAACE,YAAY,CAAChD,IAAI,CAAC0B;MAC1D,CAAC,CAAC;IACN;IAEA,IAAM6B,mBAAmB,GAAGT,aAAa,CAACE,YAAY;IACtD,OAAOtE,cAAc,CAAC6E,mBAAmB,CAAC;EAC9C;EACA,MAAMX,KAAK;AACf;AAGA,OAAO,SAASG,gDAAgDA,CAC5DK,oBAA4B,EAC5BC,WAAmB,EACZ;EACP,IAAI,CAACD,oBAAoB,EAAE;IACvB,OAAO,KAAK;EAChB;EAEA,IACIC,WAAW,CAACG,QAAQ,CAAC,MAAM,CAAC,IAC5BH,WAAW,KAAKD,oBAAoB,EACtC;IACE,OAAO,KAAK;EAChB;EAEA,IAAMK,UAAU,GAAGL,oBAAoB,CAACM,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;EACrD,IAAMC,SAAS,GAAGN,WAAW,CAACK,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;EAC3C,IAAID,UAAU,KAAKE,SAAS,EAAE;IAC1B,OAAO,KAAK;EAChB;EACA,OAAO,IAAI;AACf;AAMA,OAAO,eAAeC,+BAA+BA,CACjDC,UAA6B,EAC7BC,qBAA6B,EAC7BnD,MAAyB,EAC3B;EAEE,IAAIkD,UAAU,CAAClD,MAAM,CAACvB,OAAO,KAAKuB,MAAM,CAACvB,OAAO,EAAE;IAC9C,MAAMhB,UAAU,CAAC,KAAK,EAAE;MACpBuC,MAAM;MACNvB,OAAO,EAAEyE,UAAU,CAAClD,MAAM,CAACvB,OAAO;MAClC+D,IAAI,EAAEU,UAAU,CAACV,IAAI;MACrBU,UAAU;MACVZ,IAAI,EAAE;QACFa;MACJ;IACJ,CAAC,CAAC;EACN;EAEA,IAAMC,yBAAyB,GAAGC,sBAAsB,CAACH,UAAU,CAACV,IAAI,EAAEU,UAAU,CAAClD,MAAM,CAACsD,UAAU,CAAC;EACvG,IAAMC,eAAe,GAAG3D,+BAA+B,CACnDwD,yBAAyB,EACzBhF,2BACJ,CAAC;EAED,OAAO,IAAI,EAAE;IACT,IAAMoF,aAAa,GAAG,MAAM5F,iBAAiB,CACzCsF,UAAU,CAACX,QAAQ,CAACV,aAAa,EACjC0B,eACJ,CAAC;IACD,IAAME,QAAwD,GAAG3F,KAAK,CAACC,cAAc,CAACyF,aAAa,CAAC,CAAC;;IAErG;IACA,IAAME,YAAY,GAAGD,QAAQ,CAACpE,IAAI,CAACsE,iBAAiB,CAC/CC,IAAI,CAACC,GAAG,IAAIA,GAAG,CAACC,cAAc,KAAKX,qBAAqB,IAAIU,GAAG,CAAC7D,MAAM,CAACvB,OAAO,KAAKuB,MAAM,CAACvB,OAAO,CAAC;IACvG,IAAIiF,YAAY,EAAE;MACd;IACJ;;IAEA;IACAD,QAAQ,CAACpE,IAAI,CAACsE,iBAAiB,CAACI,IAAI,CAAC;MACjCD,cAAc,EAAEX,qBAAqB;MACrCnD;IACJ,CAAC,CAAC;IACF,IAAI;MACA,MAAMnC,WAAW,CACbqF,UAAU,CAACX,QAAQ,CAACV,aAAa,EACjC;QACImC,QAAQ,EAAEjG,cAAc,CAACyF,aAAa,CAAC;QACvCzB,QAAQ,EAAE0B;MACd,CAAC,EACD,qCACJ,CAAC;IACL,CAAC,CAAC,OAAOQ,GAAG,EAAE;MACV,IAAI,CAACzG,wBAAwB,CAACyG,GAAG,CAAC,EAAE;QAChC,MAAMA,GAAG;MACb;MACA;IACJ;EACJ;AACJ;AAEA,OAAO,eAAeC,oCAAoCA,CACtDhB,UAA6B,EAC7BC,qBAA6B,EAC7BnD,MAAyB,EAC3B;EACE,IAAIkD,UAAU,CAAClD,MAAM,CAACvB,OAAO,KAAKuB,MAAM,CAACvB,OAAO,EAAE;IAC9C,MAAMhB,UAAU,CAAC,KAAK,EAAE;MACpBuC,MAAM;MACNvB,OAAO,EAAEyE,UAAU,CAAClD,MAAM,CAACvB,OAAO;MAClC+D,IAAI,EAAEU,UAAU,CAACV,IAAI;MACrBU,UAAU;MACVZ,IAAI,EAAE;QACFa;MACJ;IACJ,CAAC,CAAC;EACN;EAEA,IAAMC,yBAAyB,GAAGC,sBAAsB,CAACH,UAAU,CAACV,IAAI,EAAEU,UAAU,CAAClD,MAAM,CAACsD,UAAU,CAAC;EACvG,IAAMC,eAAe,GAAG3D,+BAA+B,CACnDwD,yBAAyB,EACzBhF,2BACJ,CAAC;EAED,OAAO,IAAI,EAAE;IACT,IAAMoF,aAAa,GAAG,MAAM5F,iBAAiB,CACzCsF,UAAU,CAACX,QAAQ,CAACV,aAAa,EACjC0B,eACJ,CAAC;IACD,IAAME,QAAwD,GAAG3F,KAAK,CAACC,cAAc,CAACyF,aAAa,CAAC,CAAC;;IAErG;IACA,IAAMW,OAAO,GAAGV,QAAQ,CAACpE,IAAI,CAACsE,iBAAiB,CAC1CC,IAAI,CAACC,GAAG,IAAIA,GAAG,CAACC,cAAc,KAAKX,qBAAqB,IAAIU,GAAG,CAAC7D,MAAM,CAACvB,OAAO,KAAKuB,MAAM,CAACvB,OAAO,CAAC;IACvG,IAAI,CAAC0F,OAAO,EAAE;MACV;IACJ;;IAEA;IACAV,QAAQ,CAACpE,IAAI,CAACsE,iBAAiB,GAAGF,QAAQ,CAACpE,IAAI,CAACsE,iBAAiB,CAACS,MAAM,CAACC,IAAI,IAAIA,IAAI,CAACP,cAAc,KAAKX,qBAAqB,CAAC;IAC/H,IAAI;MACA,MAAMtF,WAAW,CACbqF,UAAU,CAACX,QAAQ,CAACV,aAAa,EACjC;QACImC,QAAQ,EAAEjG,cAAc,CAACyF,aAAa,CAAC;QACvCzB,QAAQ,EAAE0B;MACd,CAAC,EACD,0CACJ,CAAC;IACL,CAAC,CAAC,OAAOQ,GAAG,EAAE;MACV,IAAI,CAACzG,wBAAwB,CAACyG,GAAG,CAAC,EAAE;QAChC,MAAMA,GAAG;MACb;MACA;IACJ;EACJ;AACJ;;AAIA;AACA;AACA;AACA;AACA,OAAO,SAASZ,sBAAsBA,CAACb,IAAY,EAAExC,MAAyB,EAAE;EAC5E,OAAOwC,IAAI,GAAG,GAAG,GAAGxC,MAAM,CAACvB,OAAO;AACtC","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/rx-database.js b/dist/esm/rx-database.js deleted file mode 100644 index ef99b7b34ae..00000000000 --- a/dist/esm/rx-database.js +++ /dev/null @@ -1,533 +0,0 @@ -import _createClass from "@babel/runtime/helpers/createClass"; -import { IdleQueue } from 'custom-idle-queue'; -import { pluginMissing, flatClone, PROMISE_RESOLVE_FALSE, randomCouchString, ensureNotFalsy, getDefaultRevision, getDefaultRxDocumentMeta, defaultHashSha256, RXDB_VERSION } from "./plugins/utils/index.js"; -import { newRxError } from "./rx-error.js"; -import { createRxSchema } from "./rx-schema.js"; -import { runPluginHooks, runAsyncPluginHooks } from "./hooks.js"; -import { Subject } from 'rxjs'; -import { mergeMap } from 'rxjs/operators'; -import { createRxCollection } from "./rx-collection.js"; -import { flatCloneDocWithMeta, getSingleDocument, getWrappedStorageInstance, INTERNAL_STORAGE_NAME } from "./rx-storage-helper.js"; -import { ObliviousSet } from 'oblivious-set'; -import { ensureStorageTokenDocumentExists, getAllCollectionDocuments, getPrimaryKeyOfInternalDocument, INTERNAL_CONTEXT_COLLECTION, INTERNAL_STORE_SCHEMA, _collectionNamePrimary } from "./rx-database-internal-store.js"; -import { removeCollectionStorages } from "./rx-collection-helper.js"; -import { overwritable } from "./overwritable.js"; -/** - * stores the used database names+storage names - * so we can throw when the same database is created more then once. - */ -var USED_DATABASE_NAMES = new Set(); -var DB_COUNT = 0; -export var RxDatabaseBase = /*#__PURE__*/function () { - /** - * Contains all known non-closed storage instances - * that belong to this database. - * Used in plugins and unit tests. - */ - - function RxDatabaseBase(name, - /** - * Uniquely identifies the instance - * of this RxDatabase. - */ - token, storage, instanceCreationOptions, password, multiInstance, eventReduce = false, options = {}, - /** - * Stores information documents about the collections of the database - */ - internalStore, hashFunction, cleanupPolicy, allowSlowCount, reactivity) { - this.idleQueue = new IdleQueue(); - this.rxdbVersion = RXDB_VERSION; - this.storageInstances = new Set(); - this._subs = []; - this.startupErrors = []; - this.onDestroy = []; - this.destroyed = false; - this.collections = {}; - this.states = {}; - this.eventBulks$ = new Subject(); - this.observable$ = this.eventBulks$.pipe(mergeMap(changeEventBulk => changeEventBulk.events)); - this.storageToken = PROMISE_RESOLVE_FALSE; - this.storageTokenDocument = PROMISE_RESOLVE_FALSE; - this.emittedEventBulkIds = new ObliviousSet(60 * 1000); - this.name = name; - this.token = token; - this.storage = storage; - this.instanceCreationOptions = instanceCreationOptions; - this.password = password; - this.multiInstance = multiInstance; - this.eventReduce = eventReduce; - this.options = options; - this.internalStore = internalStore; - this.hashFunction = hashFunction; - this.cleanupPolicy = cleanupPolicy; - this.allowSlowCount = allowSlowCount; - this.reactivity = reactivity; - DB_COUNT++; - - /** - * In the dev-mode, we create a pseudoInstance - * to get all properties of RxDatabase and ensure they do not - * conflict with the collection names etc. - * So only if it is not pseudoInstance, - * we have all values to prepare a real RxDatabase. - * - * TODO this is ugly, we should use a different way in the dev-mode - * so that all non-dev-mode code can be cleaner. - */ - if (this.name !== 'pseudoInstance') { - /** - * Wrap the internal store - * to ensure that calls to it also end up in - * calculation of the idle state and the hooks. - */ - this.internalStore = getWrappedStorageInstance(this.asRxDatabase, internalStore, INTERNAL_STORE_SCHEMA); - - /** - * Start writing the storage token. - * Do not await the creation because it would run - * in a critical path that increases startup time. - * - * Writing the token takes about 20 milliseconds - * even on a fast adapter, so this is worth it. - */ - this.storageTokenDocument = ensureStorageTokenDocumentExists(this.asRxDatabase).catch(err => this.startupErrors.push(err)); - this.storageToken = this.storageTokenDocument.then(doc => doc.data.token).catch(err => this.startupErrors.push(err)); - } - } - var _proto = RxDatabaseBase.prototype; - _proto.getReactivityFactory = function getReactivityFactory() { - if (!this.reactivity) { - throw newRxError('DB14', { - database: this.name - }); - } - return this.reactivity; - } - - /** - * Because having unhandled exceptions would fail, - * we have to store the async errors of the constructor here - * so we can throw them later. - */ - - /** - * When the database is destroyed, - * these functions will be called an awaited. - * Used to automatically clean up stuff that - * belongs to this collection. - */ - - /** - * Unique token that is stored with the data. - * Used to detect if the dataset has been deleted - * and if two RxDatabase instances work on the same dataset or not. - * - * Because reading and writing the storageToken runs in the hot path - * of database creation, we do not await the storageWrites but instead - * work with the promise when we need the value. - */ - - /** - * Stores the whole state of the internal storage token document. - * We need this in some plugins. - */ - - /** - * Contains the ids of all event bulks that have been emitted - * by the database. - * Used to detect duplicates that come in again via BroadcastChannel - * or other streams. - * TODO instead of having this here, we should add a test to ensure each RxStorage - * behaves equal and does never emit duplicate eventBulks. - */; - /** - * This is the main handle-point for all change events - * ChangeEvents created by this instance go: - * RxDocument -> RxCollection -> RxDatabase.$emit -> MultiInstance - * ChangeEvents created by other instances go: - * MultiInstance -> RxDatabase.$emit -> RxCollection -> RxDatabase - */ - _proto.$emit = function $emit(changeEventBulk) { - if (this.emittedEventBulkIds.has(changeEventBulk.id)) { - return; - } - this.emittedEventBulkIds.add(changeEventBulk.id); - - // emit into own stream - this.eventBulks$.next(changeEventBulk); - } - - /** - * removes the collection-doc from the internalStore - */; - _proto.removeCollectionDoc = async function removeCollectionDoc(name, schema) { - var doc = await getSingleDocument(this.internalStore, getPrimaryKeyOfInternalDocument(_collectionNamePrimary(name, schema), INTERNAL_CONTEXT_COLLECTION)); - if (!doc) { - throw newRxError('SNH', { - name, - schema - }); - } - var writeDoc = flatCloneDocWithMeta(doc); - writeDoc._deleted = true; - await this.internalStore.bulkWrite([{ - document: writeDoc, - previous: doc - }], 'rx-database-remove-collection'); - } - - /** - * creates multiple RxCollections at once - * to be much faster by saving db txs and doing stuff in bulk-operations - * This function is not called often, but mostly in the critical path at the initial page load - * So it must be as fast as possible. - */; - _proto.addCollections = async function addCollections(collectionCreators) { - var jsonSchemas = {}; - var schemas = {}; - var bulkPutDocs = []; - var useArgsByCollectionName = {}; - await Promise.all(Object.entries(collectionCreators).map(async ([name, args]) => { - var collectionName = name; - var rxJsonSchema = args.schema; - jsonSchemas[collectionName] = rxJsonSchema; - var schema = createRxSchema(rxJsonSchema, this.hashFunction); - schemas[collectionName] = schema; - - // collection already exists - if (this.collections[name]) { - throw newRxError('DB3', { - name - }); - } - var collectionNameWithVersion = _collectionNamePrimary(name, rxJsonSchema); - var collectionDocData = { - id: getPrimaryKeyOfInternalDocument(collectionNameWithVersion, INTERNAL_CONTEXT_COLLECTION), - key: collectionNameWithVersion, - context: INTERNAL_CONTEXT_COLLECTION, - data: { - name: collectionName, - schemaHash: await schema.hash, - schema: schema.jsonSchema, - version: schema.version, - connectedStorages: [] - }, - _deleted: false, - _meta: getDefaultRxDocumentMeta(), - _rev: getDefaultRevision(), - _attachments: {} - }; - bulkPutDocs.push({ - document: collectionDocData - }); - var useArgs = Object.assign({}, args, { - name: collectionName, - schema, - database: this - }); - - // run hooks - var hookData = flatClone(args); - hookData.database = this; - hookData.name = name; - runPluginHooks('preCreateRxCollection', hookData); - useArgs.conflictHandler = hookData.conflictHandler; - useArgsByCollectionName[collectionName] = useArgs; - })); - var putDocsResult = await this.internalStore.bulkWrite(bulkPutDocs, 'rx-database-add-collection'); - await ensureNoStartupErrors(this); - await Promise.all(putDocsResult.error.map(async error => { - if (error.status !== 409) { - throw newRxError('DB12', { - database: this.name, - writeError: error - }); - } - var docInDb = ensureNotFalsy(error.documentInDb); - var collectionName = docInDb.data.name; - var schema = schemas[collectionName]; - // collection already exists but has different schema - if (docInDb.data.schemaHash !== (await schema.hash)) { - throw newRxError('DB6', { - database: this.name, - collection: collectionName, - previousSchemaHash: docInDb.data.schemaHash, - schemaHash: await schema.hash, - previousSchema: docInDb.data.schema, - schema: ensureNotFalsy(jsonSchemas[collectionName]) - }); - } - })); - var ret = {}; - await Promise.all(Object.keys(collectionCreators).map(async collectionName => { - var useArgs = useArgsByCollectionName[collectionName]; - var collection = await createRxCollection(useArgs); - ret[collectionName] = collection; - - // set as getter to the database - this.collections[collectionName] = collection; - if (!this[collectionName]) { - Object.defineProperty(this, collectionName, { - get: () => this.collections[collectionName] - }); - } - })); - return ret; - } - - /** - * runs the given function between idleQueue-locking - */; - _proto.lockedRun = function lockedRun(fn) { - return this.idleQueue.wrapCall(fn); - }; - _proto.requestIdlePromise = function requestIdlePromise() { - return this.idleQueue.requestIdlePromise(); - } - - /** - * Export database to a JSON friendly format. - */; - _proto.exportJSON = function exportJSON(_collections) { - throw pluginMissing('json-dump'); - }; - _proto.addState = function addState(_name) { - throw pluginMissing('state'); - } - - /** - * Import the parsed JSON export into the collection. - * @param _exportedJSON The previously exported data from the `.exportJSON()` method. - * @note When an interface is loaded in this collection all base properties of the type are typed as `any` - * since data could be encrypted. - */; - _proto.importJSON = function importJSON(_exportedJSON) { - throw pluginMissing('json-dump'); - }; - _proto.backup = function backup(_options) { - throw pluginMissing('backup'); - }; - _proto.leaderElector = function leaderElector() { - throw pluginMissing('leader-election'); - }; - _proto.isLeader = function isLeader() { - throw pluginMissing('leader-election'); - } - /** - * returns a promise which resolves when the instance becomes leader - */; - _proto.waitForLeadership = function waitForLeadership() { - throw pluginMissing('leader-election'); - }; - _proto.migrationStates = function migrationStates() { - throw pluginMissing('migration-schema'); - } - - /** - * destroys the database-instance and all collections - */; - _proto.destroy = async function destroy() { - if (this.destroyed) { - return PROMISE_RESOLVE_FALSE; - } - - // settings destroyed = true must be the first thing to do. - this.destroyed = true; - await runAsyncPluginHooks('preDestroyRxDatabase', this); - /** - * Complete the event stream - * to stop all subscribers who forgot to unsubscribe. - */ - this.eventBulks$.complete(); - DB_COUNT--; - this._subs.map(sub => sub.unsubscribe()); - - /** - * Destroying the pseudo instance will throw - * because stuff is missing - * TODO we should not need the pseudo instance on runtime. - * we should generate the property list on build time. - */ - if (this.name === 'pseudoInstance') { - return PROMISE_RESOLVE_FALSE; - } - - /** - * First wait until the database is idle - */ - return this.requestIdlePromise().then(() => Promise.all(this.onDestroy.map(fn => fn()))) - // destroy all collections - .then(() => Promise.all(Object.keys(this.collections).map(key => this.collections[key]).map(col => col.destroy()))) - // destroy internal storage instances - .then(() => this.internalStore.close()) - // remove combination from USED_COMBINATIONS-map - .then(() => USED_DATABASE_NAMES.delete(this.storage.name + '|' + this.name)).then(() => true); - } - - /** - * deletes the database and its stored data. - * Returns the names of all removed collections. - */; - _proto.remove = function remove() { - return this.destroy().then(() => removeRxDatabase(this.name, this.storage, this.password)); - }; - return _createClass(RxDatabaseBase, [{ - key: "$", - get: function () { - return this.observable$; - } - }, { - key: "asRxDatabase", - get: function () { - return this; - } - }]); -}(); - -/** - * checks if an instance with same name and storage already exists - * @throws {RxError} if used - */ -function throwIfDatabaseNameUsed(name, storage) { - var key = storage.name + '|' + name; - if (!USED_DATABASE_NAMES.has(key)) { - return; - } else { - throw newRxError('DB8', { - name, - storage: storage.name, - link: 'https://rxdb.info/rx-database.html#ignoreduplicate' - }); - } -} - -/** - * Creates the storage instances that are used internally in the database - * to store schemas and other configuration stuff. - */ -export async function createRxDatabaseStorageInstance(databaseInstanceToken, storage, databaseName, options, multiInstance, password) { - var internalStore = await storage.createStorageInstance({ - databaseInstanceToken, - databaseName, - collectionName: INTERNAL_STORAGE_NAME, - schema: INTERNAL_STORE_SCHEMA, - options, - multiInstance, - password, - devMode: overwritable.isDevMode() - }); - return internalStore; -} -export function createRxDatabase({ - storage, - instanceCreationOptions, - name, - password, - multiInstance = true, - eventReduce = true, - ignoreDuplicate = false, - options = {}, - cleanupPolicy, - allowSlowCount = false, - localDocuments = false, - hashFunction = defaultHashSha256, - reactivity -}) { - runPluginHooks('preCreateRxDatabase', { - storage, - instanceCreationOptions, - name, - password, - multiInstance, - eventReduce, - ignoreDuplicate, - options, - localDocuments - }); - // check if combination already used - if (!ignoreDuplicate) { - throwIfDatabaseNameUsed(name, storage); - } - USED_DATABASE_NAMES.add(storage.name + '|' + name); - var databaseInstanceToken = randomCouchString(10); - return createRxDatabaseStorageInstance(databaseInstanceToken, storage, name, instanceCreationOptions, multiInstance, password) - /** - * Creating the internal store might fail - * if some RxStorage wrapper is used that does some checks - * and then throw. - * In that case we have to properly clean up the database. - */.catch(err => { - USED_DATABASE_NAMES.delete(storage.name + '|' + name); - throw err; - }).then(storageInstance => { - var rxDatabase = new RxDatabaseBase(name, databaseInstanceToken, storage, instanceCreationOptions, password, multiInstance, eventReduce, options, storageInstance, hashFunction, cleanupPolicy, allowSlowCount, reactivity); - return runAsyncPluginHooks('createRxDatabase', { - database: rxDatabase, - creator: { - storage, - instanceCreationOptions, - name, - password, - multiInstance, - eventReduce, - ignoreDuplicate, - options, - localDocuments - } - }).then(() => rxDatabase); - }); -} - -/** - * Removes the database and all its known data - * with all known collections and all internal meta data. - * - * Returns the names of the removed collections. - */ -export async function removeRxDatabase(databaseName, storage, password) { - var databaseInstanceToken = randomCouchString(10); - var dbInternalsStorageInstance = await createRxDatabaseStorageInstance(databaseInstanceToken, storage, databaseName, {}, false, password); - var collectionDocs = await getAllCollectionDocuments(dbInternalsStorageInstance); - var collectionNames = new Set(); - collectionDocs.forEach(doc => collectionNames.add(doc.data.name)); - var removedCollectionNames = Array.from(collectionNames); - await Promise.all(removedCollectionNames.map(collectionName => removeCollectionStorages(storage, dbInternalsStorageInstance, databaseInstanceToken, databaseName, collectionName, password))); - await runAsyncPluginHooks('postRemoveRxDatabase', { - databaseName, - storage - }); - await dbInternalsStorageInstance.remove(); - return removedCollectionNames; -} -export function isRxDatabase(obj) { - return obj instanceof RxDatabaseBase; -} -export function dbCount() { - return DB_COUNT; -} - -/** - * Returns true if the given RxDatabase was the first - * instance that was created on the storage with this name. - * - * Can be used for some optimizations because on the first instantiation, - * we can assume that no data was written before. - */ -export async function isRxDatabaseFirstTimeInstantiated(database) { - var tokenDoc = await database.storageTokenDocument; - return tokenDoc.data.instanceToken === database.token; -} - -/** - * For better performance some tasks run async - * and are awaited later. - * But we still have to ensure that there have been no errors - * on database creation. - */ -export async function ensureNoStartupErrors(rxDatabase) { - await rxDatabase.storageToken; - if (rxDatabase.startupErrors[0]) { - throw rxDatabase.startupErrors[0]; - } -} -//# sourceMappingURL=rx-database.js.map \ No newline at end of file diff --git a/dist/esm/rx-database.js.map b/dist/esm/rx-database.js.map deleted file mode 100644 index 29a486d5347..00000000000 --- a/dist/esm/rx-database.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-database.js","names":["IdleQueue","pluginMissing","flatClone","PROMISE_RESOLVE_FALSE","randomCouchString","ensureNotFalsy","getDefaultRevision","getDefaultRxDocumentMeta","defaultHashSha256","RXDB_VERSION","newRxError","createRxSchema","runPluginHooks","runAsyncPluginHooks","Subject","mergeMap","createRxCollection","flatCloneDocWithMeta","getSingleDocument","getWrappedStorageInstance","INTERNAL_STORAGE_NAME","ObliviousSet","ensureStorageTokenDocumentExists","getAllCollectionDocuments","getPrimaryKeyOfInternalDocument","INTERNAL_CONTEXT_COLLECTION","INTERNAL_STORE_SCHEMA","_collectionNamePrimary","removeCollectionStorages","overwritable","USED_DATABASE_NAMES","Set","DB_COUNT","RxDatabaseBase","name","token","storage","instanceCreationOptions","password","multiInstance","eventReduce","options","internalStore","hashFunction","cleanupPolicy","allowSlowCount","reactivity","idleQueue","rxdbVersion","storageInstances","_subs","startupErrors","onDestroy","destroyed","collections","states","eventBulks$","observable$","pipe","changeEventBulk","events","storageToken","storageTokenDocument","emittedEventBulkIds","asRxDatabase","catch","err","push","then","doc","data","_proto","prototype","getReactivityFactory","database","$emit","has","id","add","next","removeCollectionDoc","schema","writeDoc","_deleted","bulkWrite","document","previous","addCollections","collectionCreators","jsonSchemas","schemas","bulkPutDocs","useArgsByCollectionName","Promise","all","Object","entries","map","args","collectionName","rxJsonSchema","collectionNameWithVersion","collectionDocData","key","context","schemaHash","hash","jsonSchema","version","connectedStorages","_meta","_rev","_attachments","useArgs","assign","hookData","conflictHandler","putDocsResult","ensureNoStartupErrors","error","status","writeError","docInDb","documentInDb","collection","previousSchemaHash","previousSchema","ret","keys","defineProperty","get","lockedRun","fn","wrapCall","requestIdlePromise","exportJSON","_collections","addState","_name","importJSON","_exportedJSON","backup","_options","leaderElector","isLeader","waitForLeadership","migrationStates","destroy","complete","sub","unsubscribe","col","close","delete","remove","removeRxDatabase","_createClass","throwIfDatabaseNameUsed","link","createRxDatabaseStorageInstance","databaseInstanceToken","databaseName","createStorageInstance","devMode","isDevMode","createRxDatabase","ignoreDuplicate","localDocuments","storageInstance","rxDatabase","creator","dbInternalsStorageInstance","collectionDocs","collectionNames","forEach","removedCollectionNames","Array","from","isRxDatabase","obj","dbCount","isRxDatabaseFirstTimeInstantiated","tokenDoc","instanceToken"],"sources":["../../src/rx-database.ts"],"sourcesContent":["import { IdleQueue } from 'custom-idle-queue';\nimport type {\n LeaderElector\n} from 'broadcast-channel';\nimport type {\n CollectionsOfDatabase,\n RxDatabase,\n RxCollectionCreator,\n RxJsonSchema,\n RxCollection,\n RxDumpDatabase,\n RxDumpDatabaseAny,\n BackupOptions,\n RxStorage,\n RxStorageInstance,\n BulkWriteRow,\n RxChangeEvent,\n RxDatabaseCreator,\n RxChangeEventBulk,\n RxDocumentData,\n RxCleanupPolicy,\n InternalStoreDocType,\n InternalStoreStorageTokenDocType,\n InternalStoreCollectionDocType,\n RxTypeError,\n RxError,\n HashFunction,\n MaybePromise,\n RxState\n} from './types/index.d.ts';\n\nimport {\n pluginMissing,\n flatClone,\n PROMISE_RESOLVE_FALSE,\n randomCouchString,\n ensureNotFalsy,\n getDefaultRevision,\n getDefaultRxDocumentMeta,\n defaultHashSha256,\n RXDB_VERSION\n} from './plugins/utils/index.ts';\nimport {\n newRxError\n} from './rx-error.ts';\nimport {\n createRxSchema,\n RxSchema\n} from './rx-schema.ts';\nimport {\n runPluginHooks,\n runAsyncPluginHooks\n} from './hooks.ts';\nimport {\n Subject,\n Subscription,\n Observable\n} from 'rxjs';\nimport {\n mergeMap\n} from 'rxjs/operators';\nimport {\n createRxCollection\n} from './rx-collection.ts';\nimport {\n flatCloneDocWithMeta,\n getSingleDocument,\n getWrappedStorageInstance,\n INTERNAL_STORAGE_NAME,\n WrappedRxStorageInstance\n} from './rx-storage-helper.ts';\nimport type { RxBackupState } from './plugins/backup/index.ts';\nimport { ObliviousSet } from 'oblivious-set';\nimport {\n ensureStorageTokenDocumentExists,\n getAllCollectionDocuments,\n getPrimaryKeyOfInternalDocument,\n INTERNAL_CONTEXT_COLLECTION,\n INTERNAL_STORE_SCHEMA,\n _collectionNamePrimary\n} from './rx-database-internal-store.ts';\nimport { removeCollectionStorages } from './rx-collection-helper.ts';\nimport { overwritable } from './overwritable.ts';\nimport type { RxMigrationState } from './plugins/migration-schema/index.ts';\nimport type { RxReactivityFactory } from './types/plugins/reactivity.d.ts';\n\n/**\n * stores the used database names+storage names\n * so we can throw when the same database is created more then once.\n */\nconst USED_DATABASE_NAMES: Set = new Set();\n\nlet DB_COUNT = 0;\n\nexport class RxDatabaseBase<\n Internals,\n InstanceCreationOptions,\n Collections = CollectionsOfDatabase,\n Reactivity = unknown\n> {\n\n public readonly idleQueue: IdleQueue = new IdleQueue();\n public readonly rxdbVersion = RXDB_VERSION;\n\n /**\n * Contains all known non-closed storage instances\n * that belong to this database.\n * Used in plugins and unit tests.\n */\n public readonly storageInstances = new Set>();\n\n constructor(\n public readonly name: string,\n /**\n * Uniquely identifies the instance\n * of this RxDatabase.\n */\n public readonly token: string,\n public readonly storage: RxStorage,\n public readonly instanceCreationOptions: InstanceCreationOptions,\n public readonly password: any,\n public readonly multiInstance: boolean,\n public readonly eventReduce: boolean = false,\n public options: any = {},\n /**\n * Stores information documents about the collections of the database\n */\n public readonly internalStore: RxStorageInstance,\n public readonly hashFunction: HashFunction,\n public readonly cleanupPolicy?: Partial,\n public readonly allowSlowCount?: boolean,\n public readonly reactivity?: RxReactivityFactory\n ) {\n DB_COUNT++;\n\n /**\n * In the dev-mode, we create a pseudoInstance\n * to get all properties of RxDatabase and ensure they do not\n * conflict with the collection names etc.\n * So only if it is not pseudoInstance,\n * we have all values to prepare a real RxDatabase.\n *\n * TODO this is ugly, we should use a different way in the dev-mode\n * so that all non-dev-mode code can be cleaner.\n */\n if (this.name !== 'pseudoInstance') {\n /**\n * Wrap the internal store\n * to ensure that calls to it also end up in\n * calculation of the idle state and the hooks.\n */\n this.internalStore = getWrappedStorageInstance(\n this.asRxDatabase,\n internalStore,\n INTERNAL_STORE_SCHEMA\n );\n\n /**\n * Start writing the storage token.\n * Do not await the creation because it would run\n * in a critical path that increases startup time.\n *\n * Writing the token takes about 20 milliseconds\n * even on a fast adapter, so this is worth it.\n */\n this.storageTokenDocument = ensureStorageTokenDocumentExists(this.asRxDatabase)\n .catch(err => this.startupErrors.push(err) as any);\n this.storageToken = this.storageTokenDocument\n .then(doc => doc.data.token)\n .catch(err => this.startupErrors.push(err) as any);\n }\n }\n\n get $(): Observable> {\n return this.observable$;\n }\n\n public getReactivityFactory(): RxReactivityFactory {\n if (!this.reactivity) {\n throw newRxError('DB14', { database: this.name });\n }\n return this.reactivity;\n }\n\n public _subs: Subscription[] = [];\n\n /**\n * Because having unhandled exceptions would fail,\n * we have to store the async errors of the constructor here\n * so we can throw them later.\n */\n public startupErrors: (RxError | RxTypeError)[] = [];\n\n /**\n * When the database is destroyed,\n * these functions will be called an awaited.\n * Used to automatically clean up stuff that\n * belongs to this collection.\n */\n public onDestroy: (() => MaybePromise)[] = [];\n public destroyed: boolean = false;\n public collections: Collections = {} as any;\n public states: { [name: string]: RxState; } = {};\n public readonly eventBulks$: Subject> = new Subject();\n private observable$: Observable> = this.eventBulks$\n .pipe(\n mergeMap(changeEventBulk => changeEventBulk.events)\n );\n\n /**\n * Unique token that is stored with the data.\n * Used to detect if the dataset has been deleted\n * and if two RxDatabase instances work on the same dataset or not.\n *\n * Because reading and writing the storageToken runs in the hot path\n * of database creation, we do not await the storageWrites but instead\n * work with the promise when we need the value.\n */\n public storageToken: Promise = PROMISE_RESOLVE_FALSE as any;\n /**\n * Stores the whole state of the internal storage token document.\n * We need this in some plugins.\n */\n public storageTokenDocument: Promise> = PROMISE_RESOLVE_FALSE as any;\n\n /**\n * Contains the ids of all event bulks that have been emitted\n * by the database.\n * Used to detect duplicates that come in again via BroadcastChannel\n * or other streams.\n * TODO instead of having this here, we should add a test to ensure each RxStorage\n * behaves equal and does never emit duplicate eventBulks.\n */\n public emittedEventBulkIds: ObliviousSet = new ObliviousSet(60 * 1000);\n\n /**\n * This is the main handle-point for all change events\n * ChangeEvents created by this instance go:\n * RxDocument -> RxCollection -> RxDatabase.$emit -> MultiInstance\n * ChangeEvents created by other instances go:\n * MultiInstance -> RxDatabase.$emit -> RxCollection -> RxDatabase\n */\n $emit(changeEventBulk: RxChangeEventBulk) {\n if (this.emittedEventBulkIds.has(changeEventBulk.id)) {\n return;\n }\n this.emittedEventBulkIds.add(changeEventBulk.id);\n\n // emit into own stream\n this.eventBulks$.next(changeEventBulk);\n }\n\n /**\n * removes the collection-doc from the internalStore\n */\n async removeCollectionDoc(name: string, schema: any): Promise {\n const doc = await getSingleDocument(\n this.internalStore,\n getPrimaryKeyOfInternalDocument(\n _collectionNamePrimary(name, schema),\n INTERNAL_CONTEXT_COLLECTION\n )\n );\n if (!doc) {\n throw newRxError('SNH', { name, schema });\n }\n const writeDoc = flatCloneDocWithMeta(doc);\n writeDoc._deleted = true;\n\n await this.internalStore.bulkWrite([{\n document: writeDoc,\n previous: doc\n }], 'rx-database-remove-collection');\n }\n\n /**\n * creates multiple RxCollections at once\n * to be much faster by saving db txs and doing stuff in bulk-operations\n * This function is not called often, but mostly in the critical path at the initial page load\n * So it must be as fast as possible.\n */\n async addCollections>(collectionCreators: {\n [key in keyof CreatedCollections]: RxCollectionCreator\n }): Promise<{ [key in keyof CreatedCollections]: RxCollection }> {\n const jsonSchemas: { [key in keyof CreatedCollections]: RxJsonSchema } = {} as any;\n const schemas: { [key in keyof CreatedCollections]: RxSchema } = {} as any;\n const bulkPutDocs: BulkWriteRow[] = [];\n const useArgsByCollectionName: any = {};\n\n await Promise.all(\n Object.entries(collectionCreators).map(async ([name, args]) => {\n const collectionName: keyof CreatedCollections = name as any;\n const rxJsonSchema = (args as RxCollectionCreator).schema;\n jsonSchemas[collectionName] = rxJsonSchema;\n const schema = createRxSchema(rxJsonSchema, this.hashFunction);\n schemas[collectionName] = schema;\n\n // collection already exists\n if ((this.collections as any)[name]) {\n throw newRxError('DB3', {\n name\n });\n }\n\n const collectionNameWithVersion = _collectionNamePrimary(name, rxJsonSchema);\n const collectionDocData: RxDocumentData = {\n id: getPrimaryKeyOfInternalDocument(\n collectionNameWithVersion,\n INTERNAL_CONTEXT_COLLECTION\n ),\n key: collectionNameWithVersion,\n context: INTERNAL_CONTEXT_COLLECTION,\n data: {\n name: collectionName as any,\n schemaHash: await schema.hash,\n schema: schema.jsonSchema,\n version: schema.version,\n connectedStorages: []\n },\n _deleted: false,\n _meta: getDefaultRxDocumentMeta(),\n _rev: getDefaultRevision(),\n _attachments: {}\n };\n bulkPutDocs.push({\n document: collectionDocData\n });\n\n const useArgs: any = Object.assign(\n {},\n args,\n {\n name: collectionName,\n schema,\n database: this\n }\n );\n\n // run hooks\n const hookData: RxCollectionCreator & { name: string; } = flatClone(args) as any;\n (hookData as any).database = this;\n hookData.name = name;\n runPluginHooks('preCreateRxCollection', hookData);\n useArgs.conflictHandler = hookData.conflictHandler;\n\n useArgsByCollectionName[collectionName] = useArgs;\n })\n );\n\n\n const putDocsResult = await this.internalStore.bulkWrite(\n bulkPutDocs,\n 'rx-database-add-collection'\n );\n\n await ensureNoStartupErrors(this);\n\n await Promise.all(\n putDocsResult.error.map(async (error) => {\n if (error.status !== 409) {\n throw newRxError('DB12', {\n database: this.name,\n writeError: error\n });\n }\n const docInDb: RxDocumentData = ensureNotFalsy(error.documentInDb);\n const collectionName = docInDb.data.name;\n const schema = (schemas as any)[collectionName];\n // collection already exists but has different schema\n if (docInDb.data.schemaHash !== await schema.hash) {\n throw newRxError('DB6', {\n database: this.name,\n collection: collectionName,\n previousSchemaHash: docInDb.data.schemaHash,\n schemaHash: await schema.hash,\n previousSchema: docInDb.data.schema,\n schema: ensureNotFalsy((jsonSchemas as any)[collectionName])\n });\n }\n })\n );\n\n const ret: { [key in keyof CreatedCollections]: RxCollection } = {} as any;\n await Promise.all(\n Object.keys(collectionCreators).map(async (collectionName) => {\n const useArgs = useArgsByCollectionName[collectionName];\n const collection = await createRxCollection(useArgs);\n (ret as any)[collectionName] = collection;\n\n // set as getter to the database\n (this.collections as any)[collectionName] = collection;\n if (!(this as any)[collectionName]) {\n Object.defineProperty(this, collectionName, {\n get: () => (this.collections as any)[collectionName]\n });\n }\n })\n );\n\n return ret;\n }\n\n /**\n * runs the given function between idleQueue-locking\n */\n lockedRun(fn: (...args: any[]) => T): T extends Promise ? T : Promise {\n return this.idleQueue.wrapCall(fn) as any;\n }\n\n requestIdlePromise() {\n return this.idleQueue.requestIdlePromise();\n }\n\n /**\n * Export database to a JSON friendly format.\n */\n exportJSON(_collections?: string[]): Promise>;\n exportJSON(_collections?: string[]): Promise>;\n exportJSON(_collections?: string[]): Promise {\n throw pluginMissing('json-dump');\n }\n\n addState(_name?: string): Promise> {\n throw pluginMissing('state');\n }\n\n /**\n * Import the parsed JSON export into the collection.\n * @param _exportedJSON The previously exported data from the `.exportJSON()` method.\n * @note When an interface is loaded in this collection all base properties of the type are typed as `any`\n * since data could be encrypted.\n */\n importJSON(_exportedJSON: RxDumpDatabaseAny): Promise {\n throw pluginMissing('json-dump');\n }\n\n backup(_options: BackupOptions): RxBackupState {\n throw pluginMissing('backup');\n }\n\n public leaderElector(): LeaderElector {\n throw pluginMissing('leader-election');\n }\n\n public isLeader(): boolean {\n throw pluginMissing('leader-election');\n }\n /**\n * returns a promise which resolves when the instance becomes leader\n */\n public waitForLeadership(): Promise {\n throw pluginMissing('leader-election');\n }\n\n public migrationStates(): Observable {\n throw pluginMissing('migration-schema');\n }\n\n /**\n * destroys the database-instance and all collections\n */\n public async destroy(): Promise {\n if (this.destroyed) {\n return PROMISE_RESOLVE_FALSE;\n }\n\n // settings destroyed = true must be the first thing to do.\n this.destroyed = true;\n\n await runAsyncPluginHooks('preDestroyRxDatabase', this);\n /**\n * Complete the event stream\n * to stop all subscribers who forgot to unsubscribe.\n */\n this.eventBulks$.complete();\n\n DB_COUNT--;\n this._subs.map(sub => sub.unsubscribe());\n\n /**\n * Destroying the pseudo instance will throw\n * because stuff is missing\n * TODO we should not need the pseudo instance on runtime.\n * we should generate the property list on build time.\n */\n if (this.name === 'pseudoInstance') {\n return PROMISE_RESOLVE_FALSE;\n }\n\n /**\n * First wait until the database is idle\n */\n return this.requestIdlePromise()\n .then(() => Promise.all(this.onDestroy.map(fn => fn())))\n // destroy all collections\n .then(() => Promise.all(\n Object.keys(this.collections as any)\n .map(key => (this.collections as any)[key])\n .map(col => col.destroy())\n ))\n // destroy internal storage instances\n .then(() => this.internalStore.close())\n // remove combination from USED_COMBINATIONS-map\n .then(() => USED_DATABASE_NAMES.delete(this.storage.name + '|' + this.name))\n .then(() => true);\n }\n\n /**\n * deletes the database and its stored data.\n * Returns the names of all removed collections.\n */\n remove(): Promise {\n return this\n .destroy()\n .then(() => removeRxDatabase(this.name, this.storage, this.password));\n }\n\n get asRxDatabase(): RxDatabase<\n {},\n Internals,\n InstanceCreationOptions,\n Reactivity\n > {\n return this as any;\n }\n}\n\n/**\n * checks if an instance with same name and storage already exists\n * @throws {RxError} if used\n */\nfunction throwIfDatabaseNameUsed(\n name: string,\n storage: RxStorage\n) {\n const key = storage.name + '|' + name;\n if (!USED_DATABASE_NAMES.has(key)) {\n return;\n } else {\n throw newRxError('DB8', {\n name,\n storage: storage.name,\n link: 'https://rxdb.info/rx-database.html#ignoreduplicate'\n });\n }\n}\n\n/**\n * Creates the storage instances that are used internally in the database\n * to store schemas and other configuration stuff.\n */\nexport async function createRxDatabaseStorageInstance(\n databaseInstanceToken: string,\n storage: RxStorage,\n databaseName: string,\n options: InstanceCreationOptions,\n multiInstance: boolean,\n password?: string\n): Promise> {\n const internalStore = await storage.createStorageInstance(\n {\n databaseInstanceToken,\n databaseName,\n collectionName: INTERNAL_STORAGE_NAME,\n schema: INTERNAL_STORE_SCHEMA,\n options,\n multiInstance,\n password,\n devMode: overwritable.isDevMode()\n }\n );\n return internalStore;\n}\n\nexport function createRxDatabase<\n Collections = { [key: string]: RxCollection; },\n Internals = any,\n InstanceCreationOptions = any,\n Reactivity = unknown\n>(\n {\n storage,\n instanceCreationOptions,\n name,\n password,\n multiInstance = true,\n eventReduce = true,\n ignoreDuplicate = false,\n options = {},\n cleanupPolicy,\n allowSlowCount = false,\n localDocuments = false,\n hashFunction = defaultHashSha256,\n reactivity\n }: RxDatabaseCreator\n): Promise<\n RxDatabase\n> {\n runPluginHooks('preCreateRxDatabase', {\n storage,\n instanceCreationOptions,\n name,\n password,\n multiInstance,\n eventReduce,\n ignoreDuplicate,\n options,\n localDocuments\n });\n // check if combination already used\n if (!ignoreDuplicate) {\n throwIfDatabaseNameUsed(name, storage);\n }\n USED_DATABASE_NAMES.add(storage.name + '|' + name);\n\n const databaseInstanceToken = randomCouchString(10);\n\n return createRxDatabaseStorageInstance<\n Internals,\n InstanceCreationOptions\n >(\n databaseInstanceToken,\n storage,\n name,\n instanceCreationOptions as any,\n multiInstance,\n password\n )\n /**\n * Creating the internal store might fail\n * if some RxStorage wrapper is used that does some checks\n * and then throw.\n * In that case we have to properly clean up the database.\n */\n .catch(err => {\n USED_DATABASE_NAMES.delete(storage.name + '|' + name);\n throw err;\n })\n .then(storageInstance => {\n const rxDatabase: RxDatabase = new RxDatabaseBase(\n name,\n databaseInstanceToken,\n storage,\n instanceCreationOptions,\n password,\n multiInstance,\n eventReduce,\n options,\n storageInstance,\n hashFunction,\n cleanupPolicy,\n allowSlowCount,\n reactivity\n ) as any;\n\n return runAsyncPluginHooks('createRxDatabase', {\n database: rxDatabase,\n creator: {\n storage,\n instanceCreationOptions,\n name,\n password,\n multiInstance,\n eventReduce,\n ignoreDuplicate,\n options,\n localDocuments\n }\n }).then(() => rxDatabase);\n });\n}\n\n/**\n * Removes the database and all its known data\n * with all known collections and all internal meta data.\n *\n * Returns the names of the removed collections.\n */\nexport async function removeRxDatabase(\n databaseName: string,\n storage: RxStorage,\n password?: string\n): Promise {\n const databaseInstanceToken = randomCouchString(10);\n const dbInternalsStorageInstance = await createRxDatabaseStorageInstance(\n databaseInstanceToken,\n storage,\n databaseName,\n {},\n false,\n password\n );\n const collectionDocs = await getAllCollectionDocuments(dbInternalsStorageInstance);\n const collectionNames = new Set();\n collectionDocs.forEach(doc => collectionNames.add(doc.data.name));\n const removedCollectionNames: string[] = Array.from(collectionNames);\n\n await Promise.all(\n removedCollectionNames.map(collectionName => removeCollectionStorages(\n storage,\n dbInternalsStorageInstance,\n databaseInstanceToken,\n databaseName,\n collectionName,\n password\n ))\n );\n\n await runAsyncPluginHooks('postRemoveRxDatabase', {\n databaseName,\n storage\n });\n\n await dbInternalsStorageInstance.remove();\n return removedCollectionNames;\n}\n\nexport function isRxDatabase(obj: any) {\n return obj instanceof RxDatabaseBase;\n}\n\nexport function dbCount(): number {\n return DB_COUNT;\n}\n\n\n/**\n * Returns true if the given RxDatabase was the first\n * instance that was created on the storage with this name.\n *\n * Can be used for some optimizations because on the first instantiation,\n * we can assume that no data was written before.\n */\nexport async function isRxDatabaseFirstTimeInstantiated(\n database: RxDatabase\n): Promise {\n const tokenDoc = await database.storageTokenDocument;\n return tokenDoc.data.instanceToken === database.token;\n}\n\n\n/**\n * For better performance some tasks run async\n * and are awaited later.\n * But we still have to ensure that there have been no errors\n * on database creation.\n */\nexport async function ensureNoStartupErrors(\n rxDatabase: RxDatabaseBase\n) {\n await rxDatabase.storageToken;\n if (rxDatabase.startupErrors[0]) {\n throw rxDatabase.startupErrors[0];\n }\n}\n"],"mappings":";AAAA,SAASA,SAAS,QAAQ,mBAAmB;AA+B7C,SACIC,aAAa,EACbC,SAAS,EACTC,qBAAqB,EACrBC,iBAAiB,EACjBC,cAAc,EACdC,kBAAkB,EAClBC,wBAAwB,EACxBC,iBAAiB,EACjBC,YAAY,QACT,0BAA0B;AACjC,SACIC,UAAU,QACP,eAAe;AACtB,SACIC,cAAc,QAEX,gBAAgB;AACvB,SACIC,cAAc,EACdC,mBAAmB,QAChB,YAAY;AACnB,SACIC,OAAO,QAGJ,MAAM;AACb,SACIC,QAAQ,QACL,gBAAgB;AACvB,SACIC,kBAAkB,QACf,oBAAoB;AAC3B,SACIC,oBAAoB,EACpBC,iBAAiB,EACjBC,yBAAyB,EACzBC,qBAAqB,QAElB,wBAAwB;AAE/B,SAASC,YAAY,QAAQ,eAAe;AAC5C,SACIC,gCAAgC,EAChCC,yBAAyB,EACzBC,+BAA+B,EAC/BC,2BAA2B,EAC3BC,qBAAqB,EACrBC,sBAAsB,QACnB,iCAAiC;AACxC,SAASC,wBAAwB,QAAQ,2BAA2B;AACpE,SAASC,YAAY,QAAQ,mBAAmB;AAIhD;AACA;AACA;AACA;AACA,IAAMC,mBAAgC,GAAG,IAAIC,GAAG,CAAC,CAAC;AAElD,IAAIC,QAAQ,GAAG,CAAC;AAEhB,WAAaC,cAAc;EAUvB;AACJ;AACA;AACA;AACA;;EAGI,SAAAA,eACoBC,IAAY;EAC5B;AACR;AACA;AACA;EACwBC,KAAa,EACbC,OAAsD,EACtDC,uBAAgD,EAChDC,QAAa,EACbC,aAAsB,EACtBC,WAAoB,GAAG,KAAK,EACrCC,OAAY,GAAG,CAAC,CAAC;EACxB;AACR;AACA;EACwBC,aAA0F,EAC1FC,YAA0B,EAC1BC,aAAwC,EACxCC,cAAwB,EACxBC,UAAqC,EACvD;IAAA,KA/BcC,SAAS,GAAc,IAAI/C,SAAS,CAAC,CAAC;IAAA,KACtCgD,WAAW,GAAGvC,YAAY;IAAA,KAO1BwC,gBAAgB,GAAG,IAAIlB,GAAG,CAAoE,CAAC;IAAA,KA2ExGmB,KAAK,GAAmB,EAAE;IAAA,KAO1BC,aAAa,GAA8B,EAAE;IAAA,KAQ7CC,SAAS,GAAgC,EAAE;IAAA,KAC3CC,SAAS,GAAY,KAAK;IAAA,KAC1BC,WAAW,GAAgB,CAAC,CAAC;IAAA,KAC7BC,MAAM,GAAkD,CAAC,CAAC;IAAA,KACjDC,WAAW,GAAoC,IAAI1C,OAAO,CAAC,CAAC;IAAA,KACpE2C,WAAW,GAAmC,IAAI,CAACD,WAAW,CACjEE,IAAI,CACD3C,QAAQ,CAAC4C,eAAe,IAAIA,eAAe,CAACC,MAAM,CACtD,CAAC;IAAA,KAWEC,YAAY,GAAoB1D,qBAAqB;IAAA,KAKrD2D,oBAAoB,GAA8D3D,qBAAqB;IAAA,KAUvG4D,mBAAmB,GAAyB,IAAI1C,YAAY,CAAC,EAAE,GAAG,IAAI,CAAC;IAAA,KAzH1Da,IAAY,GAAZA,IAAY;IAAA,KAKZC,KAAa,GAAbA,KAAa;IAAA,KACbC,OAAsD,GAAtDA,OAAsD;IAAA,KACtDC,uBAAgD,GAAhDA,uBAAgD;IAAA,KAChDC,QAAa,GAAbA,QAAa;IAAA,KACbC,aAAsB,GAAtBA,aAAsB;IAAA,KACtBC,WAAoB,GAApBA,WAAoB;IAAA,KAC7BC,OAAY,GAAZA,OAAY;IAAA,KAIHC,aAA0F,GAA1FA,aAA0F;IAAA,KAC1FC,YAA0B,GAA1BA,YAA0B;IAAA,KAC1BC,aAAwC,GAAxCA,aAAwC;IAAA,KACxCC,cAAwB,GAAxBA,cAAwB;IAAA,KACxBC,UAAqC,GAArCA,UAAqC;IAErDd,QAAQ,EAAE;;IAEV;AACR;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;IACQ,IAAI,IAAI,CAACE,IAAI,KAAK,gBAAgB,EAAE;MAChC;AACZ;AACA;AACA;AACA;MACY,IAAI,CAACQ,aAAa,GAAGvB,yBAAyB,CAC1C,IAAI,CAAC6C,YAAY,EACjBtB,aAAa,EACbhB,qBACJ,CAAC;;MAED;AACZ;AACA;AACA;AACA;AACA;AACA;AACA;MACY,IAAI,CAACoC,oBAAoB,GAAGxC,gCAAgC,CAAC,IAAI,CAAC0C,YAAY,CAAC,CAC1EC,KAAK,CAACC,GAAG,IAAI,IAAI,CAACf,aAAa,CAACgB,IAAI,CAACD,GAAG,CAAQ,CAAC;MACtD,IAAI,CAACL,YAAY,GAAG,IAAI,CAACC,oBAAoB,CACxCM,IAAI,CAACC,GAAG,IAAIA,GAAG,CAACC,IAAI,CAACnC,KAAK,CAAC,CAC3B8B,KAAK,CAACC,GAAG,IAAI,IAAI,CAACf,aAAa,CAACgB,IAAI,CAACD,GAAG,CAAQ,CAAC;IAC1D;EACJ;EAAC,IAAAK,MAAA,GAAAtC,cAAA,CAAAuC,SAAA;EAAAD,MAAA,CAMME,oBAAoB,GAA3B,SAAAA,qBAAA,EAA+D;IAC3D,IAAI,CAAC,IAAI,CAAC3B,UAAU,EAAE;MAClB,MAAMpC,UAAU,CAAC,MAAM,EAAE;QAAEgE,QAAQ,EAAE,IAAI,CAACxC;MAAK,CAAC,CAAC;IACrD;IACA,OAAO,IAAI,CAACY,UAAU;EAC1B;;EAIA;AACJ;AACA;AACA;AACA;;EAGI;AACJ;AACA;AACA;AACA;AACA;;EAWI;AACJ;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;EAEI;AACJ;AACA;AACA;;EAGI;AACJ;AACA;AACA;AACA;AACA;AACA;AACA,KAPI;EAUA;AACJ;AACA;AACA;AACA;AACA;AACA;EANIyB,MAAA,CAOAI,KAAK,GAAL,SAAAA,MAAMhB,eAAuC,EAAE;IAC3C,IAAI,IAAI,CAACI,mBAAmB,CAACa,GAAG,CAACjB,eAAe,CAACkB,EAAE,CAAC,EAAE;MAClD;IACJ;IACA,IAAI,CAACd,mBAAmB,CAACe,GAAG,CAACnB,eAAe,CAACkB,EAAE,CAAC;;IAEhD;IACA,IAAI,CAACrB,WAAW,CAACuB,IAAI,CAACpB,eAAe,CAAC;EAC1C;;EAEA;AACJ;AACA,KAFI;EAAAY,MAAA,CAGMS,mBAAmB,GAAzB,eAAAA,oBAA0B9C,IAAY,EAAE+C,MAAW,EAAiB;IAChE,IAAMZ,GAAG,GAAG,MAAMnD,iBAAiB,CAC/B,IAAI,CAACwB,aAAa,EAClBlB,+BAA+B,CAC3BG,sBAAsB,CAACO,IAAI,EAAE+C,MAAM,CAAC,EACpCxD,2BACJ,CACJ,CAAC;IACD,IAAI,CAAC4C,GAAG,EAAE;MACN,MAAM3D,UAAU,CAAC,KAAK,EAAE;QAAEwB,IAAI;QAAE+C;MAAO,CAAC,CAAC;IAC7C;IACA,IAAMC,QAAQ,GAAGjE,oBAAoB,CAACoD,GAAG,CAAC;IAC1Ca,QAAQ,CAACC,QAAQ,GAAG,IAAI;IAExB,MAAM,IAAI,CAACzC,aAAa,CAAC0C,SAAS,CAAC,CAAC;MAChCC,QAAQ,EAAEH,QAAQ;MAClBI,QAAQ,EAAEjB;IACd,CAAC,CAAC,EAAE,+BAA+B,CAAC;EACxC;;EAEA;AACJ;AACA;AACA;AACA;AACA,KALI;EAAAE,MAAA,CAMMgB,cAAc,GAApB,eAAAA,eAAgEC,kBAE/D,EAA6F;IAC1F,IAAMC,WAAqE,GAAG,CAAC,CAAQ;IACvF,IAAMC,OAA6D,GAAG,CAAC,CAAQ;IAC/E,IAAMC,WAA2D,GAAG,EAAE;IACtE,IAAMC,uBAA4B,GAAG,CAAC,CAAC;IAEvC,MAAMC,OAAO,CAACC,GAAG,CACbC,MAAM,CAACC,OAAO,CAACR,kBAAkB,CAAC,CAACS,GAAG,CAAC,OAAO,CAAC/D,IAAI,EAAEgE,IAAI,CAAC,KAAK;MAC3D,IAAMC,cAAwC,GAAGjE,IAAW;MAC5D,IAAMkE,YAAY,GAAIF,IAAI,CAA8BjB,MAAM;MAC9DQ,WAAW,CAACU,cAAc,CAAC,GAAGC,YAAY;MAC1C,IAAMnB,MAAM,GAAGtE,cAAc,CAACyF,YAAY,EAAE,IAAI,CAACzD,YAAY,CAAC;MAC9D+C,OAAO,CAACS,cAAc,CAAC,GAAGlB,MAAM;;MAEhC;MACA,IAAK,IAAI,CAAC3B,WAAW,CAASpB,IAAI,CAAC,EAAE;QACjC,MAAMxB,UAAU,CAAC,KAAK,EAAE;UACpBwB;QACJ,CAAC,CAAC;MACN;MAEA,IAAMmE,yBAAyB,GAAG1E,sBAAsB,CAACO,IAAI,EAAEkE,YAAY,CAAC;MAC5E,IAAME,iBAAiE,GAAG;QACtEzB,EAAE,EAAErD,+BAA+B,CAC/B6E,yBAAyB,EACzB5E,2BACJ,CAAC;QACD8E,GAAG,EAAEF,yBAAyB;QAC9BG,OAAO,EAAE/E,2BAA2B;QACpC6C,IAAI,EAAE;UACFpC,IAAI,EAAEiE,cAAqB;UAC3BM,UAAU,EAAE,MAAMxB,MAAM,CAACyB,IAAI;UAC7BzB,MAAM,EAAEA,MAAM,CAAC0B,UAAU;UACzBC,OAAO,EAAE3B,MAAM,CAAC2B,OAAO;UACvBC,iBAAiB,EAAE;QACvB,CAAC;QACD1B,QAAQ,EAAE,KAAK;QACf2B,KAAK,EAAEvG,wBAAwB,CAAC,CAAC;QACjCwG,IAAI,EAAEzG,kBAAkB,CAAC,CAAC;QAC1B0G,YAAY,EAAE,CAAC;MACnB,CAAC;MACDrB,WAAW,CAACxB,IAAI,CAAC;QACbkB,QAAQ,EAAEiB;MACd,CAAC,CAAC;MAEF,IAAMW,OAAY,GAAGlB,MAAM,CAACmB,MAAM,CAC9B,CAAC,CAAC,EACFhB,IAAI,EACJ;QACIhE,IAAI,EAAEiE,cAAc;QACpBlB,MAAM;QACNP,QAAQ,EAAE;MACd,CACJ,CAAC;;MAED;MACA,IAAMyC,QAAsD,GAAGjH,SAAS,CAACgG,IAAI,CAAQ;MACpFiB,QAAQ,CAASzC,QAAQ,GAAG,IAAI;MACjCyC,QAAQ,CAACjF,IAAI,GAAGA,IAAI;MACpBtB,cAAc,CAAC,uBAAuB,EAAEuG,QAAQ,CAAC;MACjDF,OAAO,CAACG,eAAe,GAAGD,QAAQ,CAACC,eAAe;MAElDxB,uBAAuB,CAACO,cAAc,CAAC,GAAGc,OAAO;IACrD,CAAC,CACL,CAAC;IAGD,IAAMI,aAAa,GAAG,MAAM,IAAI,CAAC3E,aAAa,CAAC0C,SAAS,CACpDO,WAAW,EACX,4BACJ,CAAC;IAED,MAAM2B,qBAAqB,CAAC,IAAI,CAAC;IAEjC,MAAMzB,OAAO,CAACC,GAAG,CACbuB,aAAa,CAACE,KAAK,CAACtB,GAAG,CAAC,MAAOsB,KAAK,IAAK;MACrC,IAAIA,KAAK,CAACC,MAAM,KAAK,GAAG,EAAE;QACtB,MAAM9G,UAAU,CAAC,MAAM,EAAE;UACrBgE,QAAQ,EAAE,IAAI,CAACxC,IAAI;UACnBuF,UAAU,EAAEF;QAChB,CAAC,CAAC;MACN;MACA,IAAMG,OAAuD,GAAGrH,cAAc,CAACkH,KAAK,CAACI,YAAY,CAAC;MAClG,IAAMxB,cAAc,GAAGuB,OAAO,CAACpD,IAAI,CAACpC,IAAI;MACxC,IAAM+C,MAAM,GAAIS,OAAO,CAASS,cAAc,CAAC;MAC/C;MACA,IAAIuB,OAAO,CAACpD,IAAI,CAACmC,UAAU,MAAK,MAAMxB,MAAM,CAACyB,IAAI,GAAE;QAC/C,MAAMhG,UAAU,CAAC,KAAK,EAAE;UACpBgE,QAAQ,EAAE,IAAI,CAACxC,IAAI;UACnB0F,UAAU,EAAEzB,cAAc;UAC1B0B,kBAAkB,EAAEH,OAAO,CAACpD,IAAI,CAACmC,UAAU;UAC3CA,UAAU,EAAE,MAAMxB,MAAM,CAACyB,IAAI;UAC7BoB,cAAc,EAAEJ,OAAO,CAACpD,IAAI,CAACW,MAAM;UACnCA,MAAM,EAAE5E,cAAc,CAAEoF,WAAW,CAASU,cAAc,CAAC;QAC/D,CAAC,CAAC;MACN;IACJ,CAAC,CACL,CAAC;IAED,IAAM4B,GAAqF,GAAG,CAAC,CAAQ;IACvG,MAAMlC,OAAO,CAACC,GAAG,CACbC,MAAM,CAACiC,IAAI,CAACxC,kBAAkB,CAAC,CAACS,GAAG,CAAC,MAAOE,cAAc,IAAK;MAC1D,IAAMc,OAAO,GAAGrB,uBAAuB,CAACO,cAAc,CAAC;MACvD,IAAMyB,UAAU,GAAG,MAAM5G,kBAAkB,CAACiG,OAAO,CAAC;MACnDc,GAAG,CAAS5B,cAAc,CAAC,GAAGyB,UAAU;;MAEzC;MACC,IAAI,CAACtE,WAAW,CAAS6C,cAAc,CAAC,GAAGyB,UAAU;MACtD,IAAI,CAAE,IAAI,CAASzB,cAAc,CAAC,EAAE;QAChCJ,MAAM,CAACkC,cAAc,CAAC,IAAI,EAAE9B,cAAc,EAAE;UACxC+B,GAAG,EAAEA,CAAA,KAAO,IAAI,CAAC5E,WAAW,CAAS6C,cAAc;QACvD,CAAC,CAAC;MACN;IACJ,CAAC,CACL,CAAC;IAED,OAAO4B,GAAG;EACd;;EAEA;AACJ;AACA,KAFI;EAAAxD,MAAA,CAGA4D,SAAS,GAAT,SAAAA,UAAaC,EAAyB,EAA2C;IAC7E,OAAO,IAAI,CAACrF,SAAS,CAACsF,QAAQ,CAACD,EAAE,CAAC;EACtC,CAAC;EAAA7D,MAAA,CAED+D,kBAAkB,GAAlB,SAAAA,mBAAA,EAAqB;IACjB,OAAO,IAAI,CAACvF,SAAS,CAACuF,kBAAkB,CAAC,CAAC;EAC9C;;EAEA;AACJ;AACA,KAFI;EAAA/D,MAAA,CAKAgE,UAAU,GAAV,SAAAA,WAAWC,YAAuB,EAAgB;IAC9C,MAAMvI,aAAa,CAAC,WAAW,CAAC;EACpC,CAAC;EAAAsE,MAAA,CAEDkE,QAAQ,GAAR,SAAAA,SAAkBC,KAAc,EAAmC;IAC/D,MAAMzI,aAAa,CAAC,OAAO,CAAC;EAChC;;EAEA;AACJ;AACA;AACA;AACA;AACA,KALI;EAAAsE,MAAA,CAMAoE,UAAU,GAAV,SAAAA,WAAWC,aAA6C,EAAiB;IACrE,MAAM3I,aAAa,CAAC,WAAW,CAAC;EACpC,CAAC;EAAAsE,MAAA,CAEDsE,MAAM,GAAN,SAAAA,OAAOC,QAAuB,EAAiB;IAC3C,MAAM7I,aAAa,CAAC,QAAQ,CAAC;EACjC,CAAC;EAAAsE,MAAA,CAEMwE,aAAa,GAApB,SAAAA,cAAA,EAAsC;IAClC,MAAM9I,aAAa,CAAC,iBAAiB,CAAC;EAC1C,CAAC;EAAAsE,MAAA,CAEMyE,QAAQ,GAAf,SAAAA,SAAA,EAA2B;IACvB,MAAM/I,aAAa,CAAC,iBAAiB,CAAC;EAC1C;EACA;AACJ;AACA,KAFI;EAAAsE,MAAA,CAGO0E,iBAAiB,GAAxB,SAAAA,kBAAA,EAA6C;IACzC,MAAMhJ,aAAa,CAAC,iBAAiB,CAAC;EAC1C,CAAC;EAAAsE,MAAA,CAEM2E,eAAe,GAAtB,SAAAA,gBAAA,EAAyD;IACrD,MAAMjJ,aAAa,CAAC,kBAAkB,CAAC;EAC3C;;EAEA;AACJ;AACA,KAFI;EAAAsE,MAAA,CAGa4E,OAAO,GAApB,eAAAA,QAAA,EAAyC;IACrC,IAAI,IAAI,CAAC9F,SAAS,EAAE;MAChB,OAAOlD,qBAAqB;IAChC;;IAEA;IACA,IAAI,CAACkD,SAAS,GAAG,IAAI;IAErB,MAAMxC,mBAAmB,CAAC,sBAAsB,EAAE,IAAI,CAAC;IACvD;AACR;AACA;AACA;IACQ,IAAI,CAAC2C,WAAW,CAAC4F,QAAQ,CAAC,CAAC;IAE3BpH,QAAQ,EAAE;IACV,IAAI,CAACkB,KAAK,CAAC+C,GAAG,CAACoD,GAAG,IAAIA,GAAG,CAACC,WAAW,CAAC,CAAC,CAAC;;IAExC;AACR;AACA;AACA;AACA;AACA;IACQ,IAAI,IAAI,CAACpH,IAAI,KAAK,gBAAgB,EAAE;MAChC,OAAO/B,qBAAqB;IAChC;;IAEA;AACR;AACA;IACQ,OAAO,IAAI,CAACmI,kBAAkB,CAAC,CAAC,CAC3BlE,IAAI,CAAC,MAAMyB,OAAO,CAACC,GAAG,CAAC,IAAI,CAAC1C,SAAS,CAAC6C,GAAG,CAACmC,EAAE,IAAIA,EAAE,CAAC,CAAC,CAAC,CAAC;IACvD;IAAA,CACChE,IAAI,CAAC,MAAMyB,OAAO,CAACC,GAAG,CACnBC,MAAM,CAACiC,IAAI,CAAC,IAAI,CAAC1E,WAAkB,CAAC,CAC/B2C,GAAG,CAACM,GAAG,IAAK,IAAI,CAACjD,WAAW,CAASiD,GAAG,CAAC,CAAC,CAC1CN,GAAG,CAACsD,GAAG,IAAIA,GAAG,CAACJ,OAAO,CAAC,CAAC,CACjC,CAAC;IACD;IAAA,CACC/E,IAAI,CAAC,MAAM,IAAI,CAAC1B,aAAa,CAAC8G,KAAK,CAAC,CAAC;IACtC;IAAA,CACCpF,IAAI,CAAC,MAAMtC,mBAAmB,CAAC2H,MAAM,CAAC,IAAI,CAACrH,OAAO,CAACF,IAAI,GAAG,GAAG,GAAG,IAAI,CAACA,IAAI,CAAC,CAAC,CAC3EkC,IAAI,CAAC,MAAM,IAAI,CAAC;EACzB;;EAEA;AACJ;AACA;AACA,KAHI;EAAAG,MAAA,CAIAmF,MAAM,GAAN,SAAAA,OAAA,EAA4B;IACxB,OAAO,IAAI,CACNP,OAAO,CAAC,CAAC,CACT/E,IAAI,CAAC,MAAMuF,gBAAgB,CAAC,IAAI,CAACzH,IAAI,EAAE,IAAI,CAACE,OAAO,EAAE,IAAI,CAACE,QAAQ,CAAC,CAAC;EAC7E,CAAC;EAAA,OAAAsH,YAAA,CAAA3H,cAAA;IAAAsE,GAAA;IAAA2B,GAAA,EAtVD,SAAAA,CAAA,EAAwC;MACpC,OAAO,IAAI,CAACzE,WAAW;IAC3B;EAAC;IAAA8C,GAAA;IAAA2B,GAAA,EAsVD,SAAAA,CAAA,EAKE;MACE,OAAO,IAAI;IACf;EAAC;AAAA;;AAGL;AACA;AACA;AACA;AACA,SAAS2B,uBAAuBA,CAC5B3H,IAAY,EACZE,OAA4B,EAC9B;EACE,IAAMmE,GAAG,GAAGnE,OAAO,CAACF,IAAI,GAAG,GAAG,GAAGA,IAAI;EACrC,IAAI,CAACJ,mBAAmB,CAAC8C,GAAG,CAAC2B,GAAG,CAAC,EAAE;IAC/B;EACJ,CAAC,MAAM;IACH,MAAM7F,UAAU,CAAC,KAAK,EAAE;MACpBwB,IAAI;MACJE,OAAO,EAAEA,OAAO,CAACF,IAAI;MACrB4H,IAAI,EAAE;IACV,CAAC,CAAC;EACN;AACJ;;AAEA;AACA;AACA;AACA;AACA,OAAO,eAAeC,+BAA+BA,CACjDC,qBAA6B,EAC7B5H,OAAsD,EACtD6H,YAAoB,EACpBxH,OAAgC,EAChCF,aAAsB,EACtBD,QAAiB,EACmE;EACpF,IAAMI,aAAa,GAAG,MAAMN,OAAO,CAAC8H,qBAAqB,CACrD;IACIF,qBAAqB;IACrBC,YAAY;IACZ9D,cAAc,EAAE/E,qBAAqB;IACrC6D,MAAM,EAAEvD,qBAAqB;IAC7Be,OAAO;IACPF,aAAa;IACbD,QAAQ;IACR6H,OAAO,EAAEtI,YAAY,CAACuI,SAAS,CAAC;EACpC,CACJ,CAAC;EACD,OAAO1H,aAAa;AACxB;AAEA,OAAO,SAAS2H,gBAAgBA,CAM5B;EACIjI,OAAO;EACPC,uBAAuB;EACvBH,IAAI;EACJI,QAAQ;EACRC,aAAa,GAAG,IAAI;EACpBC,WAAW,GAAG,IAAI;EAClB8H,eAAe,GAAG,KAAK;EACvB7H,OAAO,GAAG,CAAC,CAAC;EACZG,aAAa;EACbC,cAAc,GAAG,KAAK;EACtB0H,cAAc,GAAG,KAAK;EACtB5H,YAAY,GAAGnC,iBAAiB;EAChCsC;AAC+D,CAAC,EAGtE;EACElC,cAAc,CAAC,qBAAqB,EAAE;IAClCwB,OAAO;IACPC,uBAAuB;IACvBH,IAAI;IACJI,QAAQ;IACRC,aAAa;IACbC,WAAW;IACX8H,eAAe;IACf7H,OAAO;IACP8H;EACJ,CAAC,CAAC;EACF;EACA,IAAI,CAACD,eAAe,EAAE;IAClBT,uBAAuB,CAAC3H,IAAI,EAAEE,OAAO,CAAC;EAC1C;EACAN,mBAAmB,CAACgD,GAAG,CAAC1C,OAAO,CAACF,IAAI,GAAG,GAAG,GAAGA,IAAI,CAAC;EAElD,IAAM8H,qBAAqB,GAAG5J,iBAAiB,CAAC,EAAE,CAAC;EAEnD,OAAO2J,+BAA+B,CAIlCC,qBAAqB,EACrB5H,OAAO,EACPF,IAAI,EACJG,uBAAuB,EACvBE,aAAa,EACbD,QACJ;EACI;AACR;AACA;AACA;AACA;AACA,KALQ,CAMC2B,KAAK,CAACC,GAAG,IAAI;IACVpC,mBAAmB,CAAC2H,MAAM,CAACrH,OAAO,CAACF,IAAI,GAAG,GAAG,GAAGA,IAAI,CAAC;IACrD,MAAMgC,GAAG;EACb,CAAC,CAAC,CACDE,IAAI,CAACoG,eAAe,IAAI;IACrB,IAAMC,UAAmC,GAAG,IAAIxI,cAAc,CAC1DC,IAAI,EACJ8H,qBAAqB,EACrB5H,OAAO,EACPC,uBAAuB,EACvBC,QAAQ,EACRC,aAAa,EACbC,WAAW,EACXC,OAAO,EACP+H,eAAe,EACf7H,YAAY,EACZC,aAAa,EACbC,cAAc,EACdC,UACJ,CAAQ;IAER,OAAOjC,mBAAmB,CAAC,kBAAkB,EAAE;MAC3C6D,QAAQ,EAAE+F,UAAU;MACpBC,OAAO,EAAE;QACLtI,OAAO;QACPC,uBAAuB;QACvBH,IAAI;QACJI,QAAQ;QACRC,aAAa;QACbC,WAAW;QACX8H,eAAe;QACf7H,OAAO;QACP8H;MACJ;IACJ,CAAC,CAAC,CAACnG,IAAI,CAAC,MAAMqG,UAAU,CAAC;EAC7B,CAAC,CAAC;AACV;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,eAAed,gBAAgBA,CAClCM,YAAoB,EACpB7H,OAA4B,EAC5BE,QAAiB,EACA;EACjB,IAAM0H,qBAAqB,GAAG5J,iBAAiB,CAAC,EAAE,CAAC;EACnD,IAAMuK,0BAA0B,GAAG,MAAMZ,+BAA+B,CACpEC,qBAAqB,EACrB5H,OAAO,EACP6H,YAAY,EACZ,CAAC,CAAC,EACF,KAAK,EACL3H,QACJ,CAAC;EACD,IAAMsI,cAAc,GAAG,MAAMrJ,yBAAyB,CAACoJ,0BAA0B,CAAC;EAClF,IAAME,eAAe,GAAG,IAAI9I,GAAG,CAAS,CAAC;EACzC6I,cAAc,CAACE,OAAO,CAACzG,GAAG,IAAIwG,eAAe,CAAC/F,GAAG,CAACT,GAAG,CAACC,IAAI,CAACpC,IAAI,CAAC,CAAC;EACjE,IAAM6I,sBAAgC,GAAGC,KAAK,CAACC,IAAI,CAACJ,eAAe,CAAC;EAEpE,MAAMhF,OAAO,CAACC,GAAG,CACbiF,sBAAsB,CAAC9E,GAAG,CAACE,cAAc,IAAIvE,wBAAwB,CACjEQ,OAAO,EACPuI,0BAA0B,EAC1BX,qBAAqB,EACrBC,YAAY,EACZ9D,cAAc,EACd7D,QACJ,CAAC,CACL,CAAC;EAED,MAAMzB,mBAAmB,CAAC,sBAAsB,EAAE;IAC9CoJ,YAAY;IACZ7H;EACJ,CAAC,CAAC;EAEF,MAAMuI,0BAA0B,CAACjB,MAAM,CAAC,CAAC;EACzC,OAAOqB,sBAAsB;AACjC;AAEA,OAAO,SAASG,YAAYA,CAACC,GAAQ,EAAE;EACnC,OAAOA,GAAG,YAAYlJ,cAAc;AACxC;AAEA,OAAO,SAASmJ,OAAOA,CAAA,EAAW;EAC9B,OAAOpJ,QAAQ;AACnB;;AAGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,eAAeqJ,iCAAiCA,CACnD3G,QAAoB,EACJ;EAChB,IAAM4G,QAAQ,GAAG,MAAM5G,QAAQ,CAACZ,oBAAoB;EACpD,OAAOwH,QAAQ,CAAChH,IAAI,CAACiH,aAAa,KAAK7G,QAAQ,CAACvC,KAAK;AACzD;;AAGA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,eAAemF,qBAAqBA,CACvCmD,UAA8C,EAChD;EACE,MAAMA,UAAU,CAAC5G,YAAY;EAC7B,IAAI4G,UAAU,CAACtH,aAAa,CAAC,CAAC,CAAC,EAAE;IAC7B,MAAMsH,UAAU,CAACtH,aAAa,CAAC,CAAC,CAAC;EACrC;AACJ","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/rx-document-prototype-merge.js b/dist/esm/rx-document-prototype-merge.js deleted file mode 100644 index 1c77c571c44..00000000000 --- a/dist/esm/rx-document-prototype-merge.js +++ /dev/null @@ -1,78 +0,0 @@ -/** - * For the ORM capabilities, - * we have to merge the document prototype - * with the ORM functions and the data - * We do this iterating over the properties and - * adding them to a new object. - * In the future we should do this by chaining the __proto__ objects - */ - -import { createRxDocumentConstructor, basePrototype, createWithConstructor as createRxDocumentWithConstructor } from "./rx-document.js"; -import { runPluginHooks } from "./hooks.js"; -import { overwritable } from "./overwritable.js"; -import { getFromMapOrCreate } from "./plugins/utils/index.js"; -var constructorForCollection = new WeakMap(); -export function getDocumentPrototype(rxCollection) { - var schemaProto = rxCollection.schema.getDocumentPrototype(); - var ormProto = getDocumentOrmPrototype(rxCollection); - var baseProto = basePrototype; - var proto = {}; - [schemaProto, ormProto, baseProto].forEach(obj => { - var props = Object.getOwnPropertyNames(obj); - props.forEach(key => { - var desc = Object.getOwnPropertyDescriptor(obj, key); - /** - * When enumerable is true, it will show on console dir(instance) - * To not pollute the output, only getters and methods are enumerable - */ - var enumerable = true; - if (key.startsWith('_') || key.endsWith('_') || key.startsWith('$') || key.endsWith('$')) enumerable = false; - if (typeof desc.value === 'function') { - // when getting a function, we automatically do a .bind(this) - Object.defineProperty(proto, key, { - get() { - return desc.value.bind(this); - }, - enumerable, - configurable: false - }); - } else { - desc.enumerable = enumerable; - desc.configurable = false; - if (desc.writable) desc.writable = false; - Object.defineProperty(proto, key, desc); - } - }); - }); - return proto; -} -export function getRxDocumentConstructor(rxCollection) { - return getFromMapOrCreate(constructorForCollection, rxCollection, () => createRxDocumentConstructor(getDocumentPrototype(rxCollection))); -} - -/** - * Create a RxDocument-instance from the jsonData - * and the prototype merge. - * You should never call this method directly, - * instead you should get the document from collection._docCache.getCachedRxDocument(). - */ -export function createNewRxDocument(rxCollection, documentConstructor, docData) { - var doc = createRxDocumentWithConstructor(documentConstructor, rxCollection, overwritable.deepFreezeWhenDevMode(docData)); - rxCollection._runHooksSync('post', 'create', docData, doc); - runPluginHooks('postCreateRxDocument', doc); - return doc; -} - -/** - * returns the prototype-object - * that contains the orm-methods, - * used in the proto-merge - */ -export function getDocumentOrmPrototype(rxCollection) { - var proto = {}; - Object.entries(rxCollection.methods).forEach(([k, v]) => { - proto[k] = v; - }); - return proto; -} -//# sourceMappingURL=rx-document-prototype-merge.js.map \ No newline at end of file diff --git a/dist/esm/rx-document-prototype-merge.js.map b/dist/esm/rx-document-prototype-merge.js.map deleted file mode 100644 index e1cce56201c..00000000000 --- a/dist/esm/rx-document-prototype-merge.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-document-prototype-merge.js","names":["createRxDocumentConstructor","basePrototype","createWithConstructor","createRxDocumentWithConstructor","runPluginHooks","overwritable","getFromMapOrCreate","constructorForCollection","WeakMap","getDocumentPrototype","rxCollection","schemaProto","schema","ormProto","getDocumentOrmPrototype","baseProto","proto","forEach","obj","props","Object","getOwnPropertyNames","key","desc","getOwnPropertyDescriptor","enumerable","startsWith","endsWith","value","defineProperty","get","bind","configurable","writable","getRxDocumentConstructor","createNewRxDocument","documentConstructor","docData","doc","deepFreezeWhenDevMode","_runHooksSync","entries","methods","k","v"],"sources":["../../src/rx-document-prototype-merge.ts"],"sourcesContent":["/**\n * For the ORM capabilities,\n * we have to merge the document prototype\n * with the ORM functions and the data\n * We do this iterating over the properties and\n * adding them to a new object.\n * In the future we should do this by chaining the __proto__ objects\n */\n\nimport type {\n RxCollection,\n RxDocument,\n RxDocumentData\n} from './types/index.d.ts';\nimport {\n createRxDocumentConstructor,\n basePrototype,\n createWithConstructor as createRxDocumentWithConstructor\n} from './rx-document.ts';\nimport {\n runPluginHooks\n} from './hooks.ts';\nimport { overwritable } from './overwritable.ts';\nimport { getFromMapOrCreate } from './plugins/utils/index.ts';\n\nconst constructorForCollection = new WeakMap();\n\nexport function getDocumentPrototype(\n rxCollection: RxCollection\n): any {\n const schemaProto = rxCollection.schema.getDocumentPrototype();\n const ormProto = getDocumentOrmPrototype(rxCollection);\n const baseProto = basePrototype;\n const proto = {};\n [\n schemaProto,\n ormProto,\n baseProto\n ].forEach(obj => {\n const props = Object.getOwnPropertyNames(obj);\n props.forEach(key => {\n const desc: any = Object.getOwnPropertyDescriptor(obj, key);\n /**\n * When enumerable is true, it will show on console dir(instance)\n * To not pollute the output, only getters and methods are enumerable\n */\n let enumerable = true;\n if (\n key.startsWith('_') ||\n key.endsWith('_') ||\n key.startsWith('$') ||\n key.endsWith('$')\n ) enumerable = false;\n\n if (typeof desc.value === 'function') {\n // when getting a function, we automatically do a .bind(this)\n Object.defineProperty(proto, key, {\n get() {\n return desc.value.bind(this);\n },\n enumerable,\n configurable: false\n });\n\n } else {\n desc.enumerable = enumerable;\n desc.configurable = false;\n if (desc.writable)\n desc.writable = false;\n Object.defineProperty(proto, key, desc);\n }\n });\n });\n return proto;\n}\n\nexport function getRxDocumentConstructor(\n rxCollection: RxCollection\n) {\n return getFromMapOrCreate(\n constructorForCollection,\n rxCollection,\n () => createRxDocumentConstructor(\n getDocumentPrototype(rxCollection as any)\n )\n );\n}\n\n/**\n * Create a RxDocument-instance from the jsonData\n * and the prototype merge.\n * You should never call this method directly,\n * instead you should get the document from collection._docCache.getCachedRxDocument().\n */\nexport function createNewRxDocument(\n rxCollection: RxCollection,\n documentConstructor: any,\n docData: RxDocumentData\n): RxDocument {\n const doc = createRxDocumentWithConstructor(\n documentConstructor,\n rxCollection as any,\n overwritable.deepFreezeWhenDevMode(docData as any)\n );\n rxCollection._runHooksSync('post', 'create', docData, doc);\n runPluginHooks('postCreateRxDocument', doc);\n return doc as any;\n}\n\n\n/**\n * returns the prototype-object\n * that contains the orm-methods,\n * used in the proto-merge\n */\nexport function getDocumentOrmPrototype(rxCollection: RxCollection): any {\n const proto: any = {};\n Object\n .entries(rxCollection.methods)\n .forEach(([k, v]) => {\n proto[k] = v;\n });\n return proto;\n}\n"],"mappings":"AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAOA,SACIA,2BAA2B,EAC3BC,aAAa,EACbC,qBAAqB,IAAIC,+BAA+B,QACrD,kBAAkB;AACzB,SACIC,cAAc,QACX,YAAY;AACnB,SAASC,YAAY,QAAQ,mBAAmB;AAChD,SAASC,kBAAkB,QAAQ,0BAA0B;AAE7D,IAAMC,wBAAwB,GAAG,IAAIC,OAAO,CAAC,CAAC;AAE9C,OAAO,SAASC,oBAAoBA,CAChCC,YAA0B,EACvB;EACH,IAAMC,WAAW,GAAGD,YAAY,CAACE,MAAM,CAACH,oBAAoB,CAAC,CAAC;EAC9D,IAAMI,QAAQ,GAAGC,uBAAuB,CAACJ,YAAY,CAAC;EACtD,IAAMK,SAAS,GAAGd,aAAa;EAC/B,IAAMe,KAAK,GAAG,CAAC,CAAC;EAChB,CACIL,WAAW,EACXE,QAAQ,EACRE,SAAS,CACZ,CAACE,OAAO,CAACC,GAAG,IAAI;IACb,IAAMC,KAAK,GAAGC,MAAM,CAACC,mBAAmB,CAACH,GAAG,CAAC;IAC7CC,KAAK,CAACF,OAAO,CAACK,GAAG,IAAI;MACjB,IAAMC,IAAS,GAAGH,MAAM,CAACI,wBAAwB,CAACN,GAAG,EAAEI,GAAG,CAAC;MAC3D;AACZ;AACA;AACA;MACY,IAAIG,UAAU,GAAG,IAAI;MACrB,IACIH,GAAG,CAACI,UAAU,CAAC,GAAG,CAAC,IACnBJ,GAAG,CAACK,QAAQ,CAAC,GAAG,CAAC,IACjBL,GAAG,CAACI,UAAU,CAAC,GAAG,CAAC,IACnBJ,GAAG,CAACK,QAAQ,CAAC,GAAG,CAAC,EACnBF,UAAU,GAAG,KAAK;MAEpB,IAAI,OAAOF,IAAI,CAACK,KAAK,KAAK,UAAU,EAAE;QAClC;QACAR,MAAM,CAACS,cAAc,CAACb,KAAK,EAAEM,GAAG,EAAE;UAC9BQ,GAAGA,CAAA,EAAG;YACF,OAAOP,IAAI,CAACK,KAAK,CAACG,IAAI,CAAC,IAAI,CAAC;UAChC,CAAC;UACDN,UAAU;UACVO,YAAY,EAAE;QAClB,CAAC,CAAC;MAEN,CAAC,MAAM;QACHT,IAAI,CAACE,UAAU,GAAGA,UAAU;QAC5BF,IAAI,CAACS,YAAY,GAAG,KAAK;QACzB,IAAIT,IAAI,CAACU,QAAQ,EACbV,IAAI,CAACU,QAAQ,GAAG,KAAK;QACzBb,MAAM,CAACS,cAAc,CAACb,KAAK,EAAEM,GAAG,EAAEC,IAAI,CAAC;MAC3C;IACJ,CAAC,CAAC;EACN,CAAC,CAAC;EACF,OAAOP,KAAK;AAChB;AAEA,OAAO,SAASkB,wBAAwBA,CACpCxB,YAA0C,EAC5C;EACE,OAAOJ,kBAAkB,CACrBC,wBAAwB,EACxBG,YAAY,EACZ,MAAMV,2BAA2B,CAC7BS,oBAAoB,CAACC,YAAmB,CAC5C,CACJ,CAAC;AACL;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,SAASyB,mBAAmBA,CAC/BzB,YAA8D,EAC9D0B,mBAAwB,EACxBC,OAAkC,EACI;EACtC,IAAMC,GAAG,GAAGnC,+BAA+B,CACvCiC,mBAAmB,EACnB1B,YAAY,EACZL,YAAY,CAACkC,qBAAqB,CAACF,OAAc,CACrD,CAAC;EACD3B,YAAY,CAAC8B,aAAa,CAAC,MAAM,EAAE,QAAQ,EAAEH,OAAO,EAAEC,GAAG,CAAC;EAC1DlC,cAAc,CAAC,sBAAsB,EAAEkC,GAAG,CAAC;EAC3C,OAAOA,GAAG;AACd;;AAGA;AACA;AACA;AACA;AACA;AACA,OAAO,SAASxB,uBAAuBA,CAACJ,YAA0B,EAAO;EACrE,IAAMM,KAAU,GAAG,CAAC,CAAC;EACrBI,MAAM,CACDqB,OAAO,CAAC/B,YAAY,CAACgC,OAAO,CAAC,CAC7BzB,OAAO,CAAC,CAAC,CAAC0B,CAAC,EAAEC,CAAC,CAAC,KAAK;IACjB5B,KAAK,CAAC2B,CAAC,CAAC,GAAGC,CAAC;EAChB,CAAC,CAAC;EACN,OAAO5B,KAAK;AAChB","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/rx-document.js b/dist/esm/rx-document.js deleted file mode 100644 index 0e2bf406b82..00000000000 --- a/dist/esm/rx-document.js +++ /dev/null @@ -1,366 +0,0 @@ -import { distinctUntilChanged, filter, map, shareReplay, startWith } from 'rxjs/operators'; -import { clone, trimDots, pluginMissing, flatClone, PROMISE_RESOLVE_NULL, RXJS_SHARE_REPLAY_DEFAULTS, getProperty, getFromMapOrCreate } from "./plugins/utils/index.js"; -import { newRxError } from "./rx-error.js"; -import { runPluginHooks } from "./hooks.js"; -import { getDocumentDataOfRxChangeEvent } from "./rx-change-event.js"; -import { overwritable } from "./overwritable.js"; -import { getSchemaByObjectPath } from "./rx-schema-helper.js"; -import { throwIfIsStorageWriteError } from "./rx-storage-helper.js"; -import { modifierFromPublicToInternal } from "./incremental-write.js"; -export var basePrototype = { - get primaryPath() { - var _this = this; - if (!_this.isInstanceOfRxDocument) { - return undefined; - } - return _this.collection.schema.primaryPath; - }, - get primary() { - var _this = this; - if (!_this.isInstanceOfRxDocument) { - return undefined; - } - return _this._data[_this.primaryPath]; - }, - get revision() { - var _this = this; - if (!_this.isInstanceOfRxDocument) { - return undefined; - } - return _this._data._rev; - }, - get deleted$() { - var _this = this; - if (!_this.isInstanceOfRxDocument) { - return undefined; - } - return _this.$.pipe(map(d => d._data._deleted)); - }, - get deleted$$() { - var _this = this; - var reactivity = _this.collection.database.getReactivityFactory(); - return reactivity.fromObservable(_this.deleted$, _this.getLatest().deleted, _this.collection.database); - }, - get deleted() { - var _this = this; - if (!_this.isInstanceOfRxDocument) { - return undefined; - } - return _this._data._deleted; - }, - getLatest() { - var latestDocData = this.collection._docCache.getLatestDocumentData(this.primary); - return this.collection._docCache.getCachedRxDocument(latestDocData); - }, - /** - * returns the observable which emits the plain-data of this document - */ - get $() { - var _this = this; - return _this.collection.$.pipe(filter(changeEvent => !changeEvent.isLocal), filter(changeEvent => changeEvent.documentId === this.primary), map(changeEvent => getDocumentDataOfRxChangeEvent(changeEvent)), startWith(_this.collection._docCache.getLatestDocumentData(this.primary)), distinctUntilChanged((prev, curr) => prev._rev === curr._rev), map(docData => this.collection._docCache.getCachedRxDocument(docData)), shareReplay(RXJS_SHARE_REPLAY_DEFAULTS)); - }, - get $$() { - var _this = this; - var reactivity = _this.collection.database.getReactivityFactory(); - return reactivity.fromObservable(_this.$, _this.getLatest()._data, _this.collection.database); - }, - /** - * returns observable of the value of the given path - */ - get$(path) { - if (overwritable.isDevMode()) { - if (path.includes('.item.')) { - throw newRxError('DOC1', { - path - }); - } - if (path === this.primaryPath) { - throw newRxError('DOC2'); - } - - // final fields cannot be modified and so also not observed - if (this.collection.schema.finalFields.includes(path)) { - throw newRxError('DOC3', { - path - }); - } - var schemaObj = getSchemaByObjectPath(this.collection.schema.jsonSchema, path); - if (!schemaObj) { - throw newRxError('DOC4', { - path - }); - } - } - return this.$.pipe(map(data => getProperty(data, path)), distinctUntilChanged()); - }, - get$$(path) { - var obs = this.get$(path); - var reactivity = this.collection.database.getReactivityFactory(); - return reactivity.fromObservable(obs, this.getLatest().get(path), this.collection.database); - }, - /** - * populate the given path - */ - populate(path) { - var schemaObj = getSchemaByObjectPath(this.collection.schema.jsonSchema, path); - var value = this.get(path); - if (!value) { - return PROMISE_RESOLVE_NULL; - } - if (!schemaObj) { - throw newRxError('DOC5', { - path - }); - } - if (!schemaObj.ref) { - throw newRxError('DOC6', { - path, - schemaObj - }); - } - var refCollection = this.collection.database.collections[schemaObj.ref]; - if (!refCollection) { - throw newRxError('DOC7', { - ref: schemaObj.ref, - path, - schemaObj - }); - } - if (schemaObj.type === 'array') { - return refCollection.findByIds(value).exec().then(res => { - var valuesIterator = res.values(); - return Array.from(valuesIterator); - }); - } else { - return refCollection.findOne(value).exec(); - } - }, - /** - * get data by objectPath - * @hotPath Performance here is really important, - * run some tests before changing anything. - */ - get(objPath) { - return getFromMapOrCreate(this._propertyCache, objPath, () => { - var valueObj = getProperty(this._data, objPath); - - // direct return if array or non-object - if (typeof valueObj !== 'object' || valueObj === null || Array.isArray(valueObj)) { - return overwritable.deepFreezeWhenDevMode(valueObj); - } - var _this = this; - var proxy = new Proxy( - /** - * In dev-mode, the _data is deep-frozen - * so we have to flat clone here so that - * the proxy can work. - */ - flatClone(valueObj), { - get(target, property) { - if (typeof property !== 'string') { - return target[property]; - } - var lastChar = property.charAt(property.length - 1); - if (property.endsWith('$$')) { - var key = property.slice(0, -2); - return _this.get$$(trimDots(objPath + '.' + key)); - } else if (lastChar === '$') { - var _key = property.slice(0, -1); - return _this.get$(trimDots(objPath + '.' + _key)); - } else if (lastChar === '_') { - var _key2 = property.slice(0, -1); - return _this.populate(trimDots(objPath + '.' + _key2)); - } else { - return _this.get(trimDots(objPath + '.' + property)); - } - } - }); - return proxy; - }); - }, - toJSON(withMetaFields = false) { - if (!withMetaFields) { - var data = flatClone(this._data); - delete data._rev; - delete data._attachments; - delete data._deleted; - delete data._meta; - return overwritable.deepFreezeWhenDevMode(data); - } else { - return overwritable.deepFreezeWhenDevMode(this._data); - } - }, - toMutableJSON(withMetaFields = false) { - return clone(this.toJSON(withMetaFields)); - }, - /** - * updates document - * @overwritten by plugin (optional) - * @param updateObj mongodb-like syntax - */ - update(_updateObj) { - throw pluginMissing('update'); - }, - incrementalUpdate(_updateObj) { - throw pluginMissing('update'); - }, - updateCRDT(_updateObj) { - throw pluginMissing('crdt'); - }, - putAttachment() { - throw pluginMissing('attachments'); - }, - getAttachment() { - throw pluginMissing('attachments'); - }, - allAttachments() { - throw pluginMissing('attachments'); - }, - get allAttachments$() { - throw pluginMissing('attachments'); - }, - async modify(mutationFunction, - // used by some plugins that wrap the method - _context) { - var oldData = this._data; - var newData = await modifierFromPublicToInternal(mutationFunction)(oldData); - return this._saveData(newData, oldData); - }, - /** - * runs an incremental update over the document - * @param function that takes the document-data and returns a new data-object - */ - incrementalModify(mutationFunction, - // used by some plugins that wrap the method - _context) { - return this.collection.incrementalWriteQueue.addWrite(this._data, modifierFromPublicToInternal(mutationFunction)).then(result => this.collection._docCache.getCachedRxDocument(result)); - }, - patch(patch) { - var oldData = this._data; - var newData = clone(oldData); - Object.entries(patch).forEach(([k, v]) => { - newData[k] = v; - }); - return this._saveData(newData, oldData); - }, - /** - * patches the given properties - */ - incrementalPatch(patch) { - return this.incrementalModify(docData => { - Object.entries(patch).forEach(([k, v]) => { - docData[k] = v; - }); - return docData; - }); - }, - /** - * saves the new document-data - * and handles the events - */ - async _saveData(newData, oldData) { - newData = flatClone(newData); - - // deleted documents cannot be changed - if (this._data._deleted) { - throw newRxError('DOC11', { - id: this.primary, - document: this - }); - } - await beforeDocumentUpdateWrite(this.collection, newData, oldData); - var writeResult = await this.collection.storageInstance.bulkWrite([{ - previous: oldData, - document: newData - }], 'rx-document-save-data'); - var isError = writeResult.error[0]; - throwIfIsStorageWriteError(this.collection, this.primary, newData, isError); - await this.collection._runHooks('post', 'save', newData, this); - return this.collection._docCache.getCachedRxDocument(writeResult.success[0]); - }, - /** - * Remove the document. - * Notice that there is no hard delete, - * instead deleted documents get flagged with _deleted=true. - */ - remove() { - var collection = this.collection; - if (this.deleted) { - return Promise.reject(newRxError('DOC13', { - document: this, - id: this.primary - })); - } - var deletedData = flatClone(this._data); - var removedDocData; - return collection._runHooks('pre', 'remove', deletedData, this).then(async () => { - deletedData._deleted = true; - var writeResult = await collection.storageInstance.bulkWrite([{ - previous: this._data, - document: deletedData - }], 'rx-document-remove'); - var isError = writeResult.error[0]; - throwIfIsStorageWriteError(collection, this.primary, deletedData, isError); - return writeResult.success[0]; - }).then(removed => { - removedDocData = removed; - return this.collection._runHooks('post', 'remove', deletedData, this); - }).then(() => { - return this.collection._docCache.getCachedRxDocument(removedDocData); - }); - }, - incrementalRemove() { - return this.incrementalModify(async docData => { - await this.collection._runHooks('pre', 'remove', docData, this); - docData._deleted = true; - return docData; - }).then(async newDoc => { - await this.collection._runHooks('post', 'remove', newDoc._data, newDoc); - return newDoc; - }); - }, - destroy() { - throw newRxError('DOC14'); - } -}; -export function createRxDocumentConstructor(proto = basePrototype) { - var constructor = function RxDocumentConstructor(collection, docData) { - this.collection = collection; - - // assume that this is always equal to the doc-data in the database - this._data = docData; - this._propertyCache = new Map(); - - /** - * because of the prototype-merge, - * we can not use the native instanceof operator - */ - this.isInstanceOfRxDocument = true; - }; - constructor.prototype = proto; - return constructor; -} -export function createWithConstructor(constructor, collection, jsonData) { - var doc = new constructor(collection, jsonData); - runPluginHooks('createRxDocument', doc); - return doc; -} -export function isRxDocument(obj) { - return typeof obj === 'object' && obj !== null && 'isInstanceOfRxDocument' in obj; -} -export function beforeDocumentUpdateWrite(collection, newData, oldData) { - /** - * Meta values must always be merged - * instead of overwritten. - * This ensures that different plugins do not overwrite - * each others meta properties. - */ - newData._meta = Object.assign({}, oldData._meta, newData._meta); - - // ensure modifications are ok - if (overwritable.isDevMode()) { - collection.schema.validateChange(oldData, newData); - } - return collection._runHooks('pre', 'save', newData, oldData); -} -//# sourceMappingURL=rx-document.js.map \ No newline at end of file diff --git a/dist/esm/rx-document.js.map b/dist/esm/rx-document.js.map deleted file mode 100644 index 59738897396..00000000000 --- a/dist/esm/rx-document.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-document.js","names":["distinctUntilChanged","filter","map","shareReplay","startWith","clone","trimDots","pluginMissing","flatClone","PROMISE_RESOLVE_NULL","RXJS_SHARE_REPLAY_DEFAULTS","getProperty","getFromMapOrCreate","newRxError","runPluginHooks","getDocumentDataOfRxChangeEvent","overwritable","getSchemaByObjectPath","throwIfIsStorageWriteError","modifierFromPublicToInternal","basePrototype","primaryPath","_this","isInstanceOfRxDocument","undefined","collection","schema","primary","_data","revision","_rev","deleted$","$","pipe","d","_deleted","deleted$$","reactivity","database","getReactivityFactory","fromObservable","getLatest","deleted","latestDocData","_docCache","getLatestDocumentData","getCachedRxDocument","changeEvent","isLocal","documentId","prev","curr","docData","$$","get$","path","isDevMode","includes","finalFields","schemaObj","jsonSchema","data","get$$","obs","get","populate","value","ref","refCollection","collections","type","findByIds","exec","then","res","valuesIterator","values","Array","from","findOne","objPath","_propertyCache","valueObj","isArray","deepFreezeWhenDevMode","proxy","Proxy","target","property","lastChar","charAt","length","endsWith","key","slice","toJSON","withMetaFields","_attachments","_meta","toMutableJSON","update","_updateObj","incrementalUpdate","updateCRDT","putAttachment","getAttachment","allAttachments","allAttachments$","modify","mutationFunction","_context","oldData","newData","_saveData","incrementalModify","incrementalWriteQueue","addWrite","result","patch","Object","entries","forEach","k","v","incrementalPatch","id","document","beforeDocumentUpdateWrite","writeResult","storageInstance","bulkWrite","previous","isError","error","_runHooks","success","remove","Promise","reject","deletedData","removedDocData","removed","incrementalRemove","newDoc","destroy","createRxDocumentConstructor","proto","constructor","RxDocumentConstructor","Map","prototype","createWithConstructor","jsonData","doc","isRxDocument","obj","assign","validateChange"],"sources":["../../src/rx-document.ts"],"sourcesContent":["import {\n Observable\n} from 'rxjs';\nimport {\n distinctUntilChanged,\n filter,\n map,\n shareReplay,\n startWith\n} from 'rxjs/operators';\nimport {\n clone,\n trimDots,\n pluginMissing,\n flatClone,\n PROMISE_RESOLVE_NULL,\n RXJS_SHARE_REPLAY_DEFAULTS,\n getProperty,\n getFromMapOrCreate\n} from './plugins/utils/index.ts';\nimport {\n newRxError\n} from './rx-error.ts';\nimport {\n runPluginHooks\n} from './hooks.ts';\n\nimport type {\n RxDocument,\n RxCollection,\n RxDocumentData,\n RxDocumentWriteData,\n UpdateQuery,\n CRDTEntry,\n ModifyFunction\n} from './types/index.d.ts';\nimport { getDocumentDataOfRxChangeEvent } from './rx-change-event.ts';\nimport { overwritable } from './overwritable.ts';\nimport { getSchemaByObjectPath } from './rx-schema-helper.ts';\nimport { throwIfIsStorageWriteError } from './rx-storage-helper.ts';\nimport { modifierFromPublicToInternal } from './incremental-write.ts';\n\nexport const basePrototype = {\n get primaryPath() {\n const _this: RxDocument = this as any;\n if (!_this.isInstanceOfRxDocument) {\n return undefined;\n }\n return _this.collection.schema.primaryPath;\n },\n get primary() {\n const _this: RxDocument = this as any;\n if (!_this.isInstanceOfRxDocument) {\n return undefined;\n }\n return (_this._data as any)[_this.primaryPath];\n },\n get revision() {\n const _this: RxDocument = this as any;\n if (!_this.isInstanceOfRxDocument) {\n return undefined;\n }\n return _this._data._rev;\n },\n get deleted$() {\n const _this: RxDocument = this as any;\n if (!_this.isInstanceOfRxDocument) {\n return undefined;\n }\n return _this.$.pipe(\n map((d: any) => d._data._deleted)\n );\n },\n get deleted$$() {\n const _this: RxDocument = this as any;\n const reactivity = _this.collection.database.getReactivityFactory();\n return reactivity.fromObservable(\n _this.deleted$,\n _this.getLatest().deleted,\n _this.collection.database\n );\n },\n get deleted() {\n const _this: RxDocument = this as any;\n if (!_this.isInstanceOfRxDocument) {\n return undefined;\n }\n return _this._data._deleted;\n },\n\n getLatest(this: RxDocument): RxDocument {\n const latestDocData = this.collection._docCache.getLatestDocumentData(this.primary);\n return this.collection._docCache.getCachedRxDocument(latestDocData);\n },\n\n /**\n * returns the observable which emits the plain-data of this document\n */\n get $(): Observable> {\n const _this: RxDocument<{}, {}, {}> = this as any;\n return _this.collection.$.pipe(\n filter(changeEvent => !changeEvent.isLocal),\n filter(changeEvent => changeEvent.documentId === this.primary),\n map(changeEvent => getDocumentDataOfRxChangeEvent(changeEvent)),\n startWith(_this.collection._docCache.getLatestDocumentData(this.primary)),\n distinctUntilChanged((prev, curr) => prev._rev === curr._rev),\n map(docData => (this as RxDocument).collection._docCache.getCachedRxDocument(docData)),\n shareReplay(RXJS_SHARE_REPLAY_DEFAULTS)\n );\n },\n get $$(): any {\n const _this: RxDocument = this as any;\n const reactivity = _this.collection.database.getReactivityFactory();\n return reactivity.fromObservable(\n _this.$,\n _this.getLatest()._data,\n _this.collection.database\n );\n },\n\n /**\n * returns observable of the value of the given path\n */\n get$(this: RxDocument, path: string): Observable {\n if (overwritable.isDevMode()) {\n if (path.includes('.item.')) {\n throw newRxError('DOC1', {\n path\n });\n }\n\n if (path === this.primaryPath) {\n throw newRxError('DOC2');\n }\n\n // final fields cannot be modified and so also not observed\n if (this.collection.schema.finalFields.includes(path)) {\n throw newRxError('DOC3', {\n path\n });\n }\n\n const schemaObj = getSchemaByObjectPath(\n this.collection.schema.jsonSchema,\n path\n );\n\n if (!schemaObj) {\n throw newRxError('DOC4', {\n path\n });\n }\n }\n\n return this.$\n .pipe(\n map(data => getProperty(data, path)),\n distinctUntilChanged()\n );\n },\n get$$(this: RxDocument, path: string) {\n const obs = this.get$(path);\n const reactivity = this.collection.database.getReactivityFactory();\n return reactivity.fromObservable(\n obs,\n this.getLatest().get(path),\n this.collection.database\n );\n },\n\n /**\n * populate the given path\n */\n populate(this: RxDocument, path: string): Promise {\n const schemaObj = getSchemaByObjectPath(\n this.collection.schema.jsonSchema,\n path\n );\n const value = this.get(path);\n if (!value) {\n return PROMISE_RESOLVE_NULL;\n }\n if (!schemaObj) {\n throw newRxError('DOC5', {\n path\n });\n }\n if (!schemaObj.ref) {\n throw newRxError('DOC6', {\n path,\n schemaObj\n });\n }\n\n const refCollection: RxCollection = this.collection.database.collections[schemaObj.ref];\n if (!refCollection) {\n throw newRxError('DOC7', {\n ref: schemaObj.ref,\n path,\n schemaObj\n });\n }\n\n if (schemaObj.type === 'array') {\n return refCollection.findByIds(value).exec().then(res => {\n const valuesIterator = res.values();\n return Array.from(valuesIterator) as any;\n });\n } else {\n return refCollection.findOne(value).exec();\n }\n },\n /**\n * get data by objectPath\n * @hotPath Performance here is really important,\n * run some tests before changing anything.\n */\n get(this: RxDocument, objPath: string): any | null {\n return getFromMapOrCreate(\n this._propertyCache,\n objPath,\n () => {\n const valueObj = getProperty(this._data, objPath);\n\n // direct return if array or non-object\n if (\n typeof valueObj !== 'object' ||\n valueObj === null ||\n Array.isArray(valueObj)\n ) {\n return overwritable.deepFreezeWhenDevMode(valueObj);\n }\n const _this = this;\n const proxy = new Proxy(\n /**\n * In dev-mode, the _data is deep-frozen\n * so we have to flat clone here so that\n * the proxy can work.\n */\n flatClone(valueObj),\n {\n get(target, property: any) {\n if (typeof property !== 'string') {\n return target[property];\n }\n const lastChar = property.charAt(property.length - 1);\n if (property.endsWith('$$')) {\n const key = property.slice(0, -2);\n return _this.get$$(trimDots(objPath + '.' + key));\n } else if (lastChar === '$') {\n const key = property.slice(0, -1);\n return _this.get$(trimDots(objPath + '.' + key));\n } else if (lastChar === '_') {\n const key = property.slice(0, -1);\n return _this.populate(trimDots(objPath + '.' + key));\n } else {\n return _this.get(trimDots(objPath + '.' + property));\n }\n }\n });\n return proxy;\n }\n );\n\n },\n\n toJSON(this: RxDocument, withMetaFields = false) {\n if (!withMetaFields) {\n const data = flatClone(this._data);\n delete (data as any)._rev;\n delete (data as any)._attachments;\n delete (data as any)._deleted;\n delete (data as any)._meta;\n return overwritable.deepFreezeWhenDevMode(data);\n } else {\n return overwritable.deepFreezeWhenDevMode(this._data);\n }\n },\n toMutableJSON(this: RxDocument, withMetaFields = false) {\n return clone(this.toJSON(withMetaFields as any));\n },\n\n /**\n * updates document\n * @overwritten by plugin (optional)\n * @param updateObj mongodb-like syntax\n */\n update(_updateObj: UpdateQuery) {\n throw pluginMissing('update');\n },\n incrementalUpdate(_updateObj: UpdateQuery) {\n throw pluginMissing('update');\n },\n updateCRDT(_updateObj: CRDTEntry | CRDTEntry[]) {\n throw pluginMissing('crdt');\n },\n putAttachment() {\n throw pluginMissing('attachments');\n },\n getAttachment() {\n throw pluginMissing('attachments');\n },\n allAttachments() {\n throw pluginMissing('attachments');\n },\n get allAttachments$() {\n throw pluginMissing('attachments');\n },\n\n async modify(\n this: RxDocument,\n mutationFunction: ModifyFunction,\n // used by some plugins that wrap the method\n _context?: string\n ): Promise {\n const oldData = this._data;\n const newData: RxDocumentData = await modifierFromPublicToInternal(mutationFunction)(oldData) as any;\n return this._saveData(newData, oldData) as any;\n },\n\n /**\n * runs an incremental update over the document\n * @param function that takes the document-data and returns a new data-object\n */\n incrementalModify(\n this: RxDocument,\n mutationFunction: ModifyFunction,\n // used by some plugins that wrap the method\n _context?: string\n ): Promise {\n return this.collection.incrementalWriteQueue.addWrite(\n this._data,\n modifierFromPublicToInternal(mutationFunction)\n ).then(result => this.collection._docCache.getCachedRxDocument(result));\n },\n\n patch(\n this: RxDocument,\n patch: Partial\n ) {\n const oldData = this._data;\n const newData = clone(oldData);\n Object\n .entries(patch)\n .forEach(([k, v]) => {\n (newData as any)[k] = v;\n });\n return this._saveData(newData, oldData);\n },\n\n /**\n * patches the given properties\n */\n incrementalPatch(\n this: RxDocument,\n patch: Partial\n ): Promise> {\n return this.incrementalModify((docData) => {\n Object\n .entries(patch)\n .forEach(([k, v]) => {\n (docData as any)[k] = v;\n });\n return docData;\n });\n },\n\n /**\n * saves the new document-data\n * and handles the events\n */\n async _saveData(\n this: RxDocument,\n newData: RxDocumentWriteData,\n oldData: RxDocumentData\n ): Promise> {\n newData = flatClone(newData);\n\n // deleted documents cannot be changed\n if (this._data._deleted) {\n throw newRxError('DOC11', {\n id: this.primary,\n document: this\n });\n }\n await beforeDocumentUpdateWrite(this.collection, newData, oldData);\n const writeResult = await this.collection.storageInstance.bulkWrite([{\n previous: oldData,\n document: newData\n }], 'rx-document-save-data');\n\n const isError = writeResult.error[0];\n throwIfIsStorageWriteError(this.collection, this.primary, newData, isError);\n\n await this.collection._runHooks('post', 'save', newData, this);\n return this.collection._docCache.getCachedRxDocument(\n writeResult.success[0]\n );\n },\n\n /**\n * Remove the document.\n * Notice that there is no hard delete,\n * instead deleted documents get flagged with _deleted=true.\n */\n remove(this: RxDocument): Promise {\n const collection = this.collection;\n if (this.deleted) {\n return Promise.reject(newRxError('DOC13', {\n document: this,\n id: this.primary\n }));\n }\n\n const deletedData = flatClone(this._data);\n let removedDocData: RxDocumentData;\n return collection._runHooks('pre', 'remove', deletedData, this)\n .then(async () => {\n deletedData._deleted = true;\n const writeResult = await collection.storageInstance.bulkWrite([{\n previous: this._data,\n document: deletedData\n }], 'rx-document-remove');\n const isError = writeResult.error[0];\n throwIfIsStorageWriteError(collection, this.primary, deletedData, isError);\n return writeResult.success[0];\n })\n .then((removed) => {\n removedDocData = removed;\n return this.collection._runHooks('post', 'remove', deletedData, this);\n })\n .then(() => {\n return this.collection._docCache.getCachedRxDocument(removedDocData);\n });\n },\n incrementalRemove(this: RxDocument): Promise {\n return this.incrementalModify(async (docData) => {\n await this.collection._runHooks('pre', 'remove', docData, this);\n docData._deleted = true;\n return docData;\n }).then(async (newDoc) => {\n await this.collection._runHooks('post', 'remove', newDoc._data, newDoc);\n return newDoc;\n });\n },\n destroy() {\n throw newRxError('DOC14');\n }\n};\n\nexport function createRxDocumentConstructor(proto = basePrototype) {\n const constructor = function RxDocumentConstructor(\n this: RxDocument,\n collection: RxCollection,\n docData: RxDocumentData\n ) {\n this.collection = collection;\n\n // assume that this is always equal to the doc-data in the database\n this._data = docData;\n this._propertyCache = new Map();\n\n /**\n * because of the prototype-merge,\n * we can not use the native instanceof operator\n */\n this.isInstanceOfRxDocument = true;\n };\n constructor.prototype = proto;\n return constructor;\n}\n\nexport function createWithConstructor(\n constructor: any,\n collection: RxCollection,\n jsonData: RxDocumentData\n): RxDocument | null {\n const doc = new constructor(collection, jsonData);\n runPluginHooks('createRxDocument', doc);\n return doc;\n}\n\nexport function isRxDocument(obj: any): boolean {\n return typeof obj === 'object' && obj !== null && 'isInstanceOfRxDocument' in obj;\n}\n\n\nexport function beforeDocumentUpdateWrite(\n collection: RxCollection,\n newData: RxDocumentWriteData,\n oldData: RxDocumentData\n): Promise {\n /**\n * Meta values must always be merged\n * instead of overwritten.\n * This ensures that different plugins do not overwrite\n * each others meta properties.\n */\n newData._meta = Object.assign(\n {},\n oldData._meta,\n newData._meta\n );\n\n // ensure modifications are ok\n if (overwritable.isDevMode()) {\n collection.schema.validateChange(oldData, newData);\n }\n return collection._runHooks('pre', 'save', newData, oldData);\n}\n\n"],"mappings":"AAGA,SACIA,oBAAoB,EACpBC,MAAM,EACNC,GAAG,EACHC,WAAW,EACXC,SAAS,QACN,gBAAgB;AACvB,SACIC,KAAK,EACLC,QAAQ,EACRC,aAAa,EACbC,SAAS,EACTC,oBAAoB,EACpBC,0BAA0B,EAC1BC,WAAW,EACXC,kBAAkB,QACf,0BAA0B;AACjC,SACIC,UAAU,QACP,eAAe;AACtB,SACIC,cAAc,QACX,YAAY;AAWnB,SAASC,8BAA8B,QAAQ,sBAAsB;AACrE,SAASC,YAAY,QAAQ,mBAAmB;AAChD,SAASC,qBAAqB,QAAQ,uBAAuB;AAC7D,SAASC,0BAA0B,QAAQ,wBAAwB;AACnE,SAASC,4BAA4B,QAAQ,wBAAwB;AAErE,OAAO,IAAMC,aAAa,GAAG;EACzB,IAAIC,WAAWA,CAAA,EAAG;IACd,IAAMC,KAAiB,GAAG,IAAW;IACrC,IAAI,CAACA,KAAK,CAACC,sBAAsB,EAAE;MAC/B,OAAOC,SAAS;IACpB;IACA,OAAOF,KAAK,CAACG,UAAU,CAACC,MAAM,CAACL,WAAW;EAC9C,CAAC;EACD,IAAIM,OAAOA,CAAA,EAAG;IACV,IAAML,KAAiB,GAAG,IAAW;IACrC,IAAI,CAACA,KAAK,CAACC,sBAAsB,EAAE;MAC/B,OAAOC,SAAS;IACpB;IACA,OAAQF,KAAK,CAACM,KAAK,CAASN,KAAK,CAACD,WAAW,CAAC;EAClD,CAAC;EACD,IAAIQ,QAAQA,CAAA,EAAG;IACX,IAAMP,KAAiB,GAAG,IAAW;IACrC,IAAI,CAACA,KAAK,CAACC,sBAAsB,EAAE;MAC/B,OAAOC,SAAS;IACpB;IACA,OAAOF,KAAK,CAACM,KAAK,CAACE,IAAI;EAC3B,CAAC;EACD,IAAIC,QAAQA,CAAA,EAAG;IACX,IAAMT,KAAsB,GAAG,IAAW;IAC1C,IAAI,CAACA,KAAK,CAACC,sBAAsB,EAAE;MAC/B,OAAOC,SAAS;IACpB;IACA,OAAOF,KAAK,CAACU,CAAC,CAACC,IAAI,CACf/B,GAAG,CAAEgC,CAAM,IAAKA,CAAC,CAACN,KAAK,CAACO,QAAQ,CACpC,CAAC;EACL,CAAC;EACD,IAAIC,SAASA,CAAA,EAAG;IACZ,IAAMd,KAAiB,GAAG,IAAW;IACrC,IAAMe,UAAU,GAAGf,KAAK,CAACG,UAAU,CAACa,QAAQ,CAACC,oBAAoB,CAAC,CAAC;IACnE,OAAOF,UAAU,CAACG,cAAc,CAC5BlB,KAAK,CAACS,QAAQ,EACdT,KAAK,CAACmB,SAAS,CAAC,CAAC,CAACC,OAAO,EACzBpB,KAAK,CAACG,UAAU,CAACa,QACrB,CAAC;EACL,CAAC;EACD,IAAII,OAAOA,CAAA,EAAG;IACV,IAAMpB,KAAiB,GAAG,IAAW;IACrC,IAAI,CAACA,KAAK,CAACC,sBAAsB,EAAE;MAC/B,OAAOC,SAAS;IACpB;IACA,OAAOF,KAAK,CAACM,KAAK,CAACO,QAAQ;EAC/B,CAAC;EAEDM,SAASA,CAAA,EAA+B;IACpC,IAAME,aAAa,GAAG,IAAI,CAAClB,UAAU,CAACmB,SAAS,CAACC,qBAAqB,CAAC,IAAI,CAAClB,OAAO,CAAC;IACnF,OAAO,IAAI,CAACF,UAAU,CAACmB,SAAS,CAACE,mBAAmB,CAACH,aAAa,CAAC;EACvE,CAAC;EAED;AACJ;AACA;EACI,IAAIX,CAACA,CAAA,EAAoC;IACrC,IAAMV,KAA6B,GAAG,IAAW;IACjD,OAAOA,KAAK,CAACG,UAAU,CAACO,CAAC,CAACC,IAAI,CAC1BhC,MAAM,CAAC8C,WAAW,IAAI,CAACA,WAAW,CAACC,OAAO,CAAC,EAC3C/C,MAAM,CAAC8C,WAAW,IAAIA,WAAW,CAACE,UAAU,KAAK,IAAI,CAACtB,OAAO,CAAC,EAC9DzB,GAAG,CAAC6C,WAAW,IAAIhC,8BAA8B,CAACgC,WAAW,CAAC,CAAC,EAC/D3C,SAAS,CAACkB,KAAK,CAACG,UAAU,CAACmB,SAAS,CAACC,qBAAqB,CAAC,IAAI,CAAClB,OAAO,CAAC,CAAC,EACzE3B,oBAAoB,CAAC,CAACkD,IAAI,EAAEC,IAAI,KAAKD,IAAI,CAACpB,IAAI,KAAKqB,IAAI,CAACrB,IAAI,CAAC,EAC7D5B,GAAG,CAACkD,OAAO,IAAK,IAAI,CAAqB3B,UAAU,CAACmB,SAAS,CAACE,mBAAmB,CAACM,OAAO,CAAC,CAAC,EAC3FjD,WAAW,CAACO,0BAA0B,CAC1C,CAAC;EACL,CAAC;EACD,IAAI2C,EAAEA,CAAA,EAAQ;IACV,IAAM/B,KAAiB,GAAG,IAAW;IACrC,IAAMe,UAAU,GAAGf,KAAK,CAACG,UAAU,CAACa,QAAQ,CAACC,oBAAoB,CAAC,CAAC;IACnE,OAAOF,UAAU,CAACG,cAAc,CAC5BlB,KAAK,CAACU,CAAC,EACPV,KAAK,CAACmB,SAAS,CAAC,CAAC,CAACb,KAAK,EACvBN,KAAK,CAACG,UAAU,CAACa,QACrB,CAAC;EACL,CAAC;EAED;AACJ;AACA;EACIgB,IAAIA,CAAmBC,IAAY,EAAmB;IAClD,IAAIvC,YAAY,CAACwC,SAAS,CAAC,CAAC,EAAE;MAC1B,IAAID,IAAI,CAACE,QAAQ,CAAC,QAAQ,CAAC,EAAE;QACzB,MAAM5C,UAAU,CAAC,MAAM,EAAE;UACrB0C;QACJ,CAAC,CAAC;MACN;MAEA,IAAIA,IAAI,KAAK,IAAI,CAAClC,WAAW,EAAE;QAC3B,MAAMR,UAAU,CAAC,MAAM,CAAC;MAC5B;;MAEA;MACA,IAAI,IAAI,CAACY,UAAU,CAACC,MAAM,CAACgC,WAAW,CAACD,QAAQ,CAACF,IAAI,CAAC,EAAE;QACnD,MAAM1C,UAAU,CAAC,MAAM,EAAE;UACrB0C;QACJ,CAAC,CAAC;MACN;MAEA,IAAMI,SAAS,GAAG1C,qBAAqB,CACnC,IAAI,CAACQ,UAAU,CAACC,MAAM,CAACkC,UAAU,EACjCL,IACJ,CAAC;MAED,IAAI,CAACI,SAAS,EAAE;QACZ,MAAM9C,UAAU,CAAC,MAAM,EAAE;UACrB0C;QACJ,CAAC,CAAC;MACN;IACJ;IAEA,OAAO,IAAI,CAACvB,CAAC,CACRC,IAAI,CACD/B,GAAG,CAAC2D,IAAI,IAAIlD,WAAW,CAACkD,IAAI,EAAEN,IAAI,CAAC,CAAC,EACpCvD,oBAAoB,CAAC,CACzB,CAAC;EACT,CAAC;EACD8D,KAAKA,CAAmBP,IAAY,EAAE;IAClC,IAAMQ,GAAG,GAAG,IAAI,CAACT,IAAI,CAACC,IAAI,CAAC;IAC3B,IAAMlB,UAAU,GAAG,IAAI,CAACZ,UAAU,CAACa,QAAQ,CAACC,oBAAoB,CAAC,CAAC;IAClE,OAAOF,UAAU,CAACG,cAAc,CAC5BuB,GAAG,EACH,IAAI,CAACtB,SAAS,CAAC,CAAC,CAACuB,GAAG,CAACT,IAAI,CAAC,EAC1B,IAAI,CAAC9B,UAAU,CAACa,QACpB,CAAC;EACL,CAAC;EAED;AACJ;AACA;EACI2B,QAAQA,CAAmBV,IAAY,EAA8B;IACjE,IAAMI,SAAS,GAAG1C,qBAAqB,CACnC,IAAI,CAACQ,UAAU,CAACC,MAAM,CAACkC,UAAU,EACjCL,IACJ,CAAC;IACD,IAAMW,KAAK,GAAG,IAAI,CAACF,GAAG,CAACT,IAAI,CAAC;IAC5B,IAAI,CAACW,KAAK,EAAE;MACR,OAAOzD,oBAAoB;IAC/B;IACA,IAAI,CAACkD,SAAS,EAAE;MACZ,MAAM9C,UAAU,CAAC,MAAM,EAAE;QACrB0C;MACJ,CAAC,CAAC;IACN;IACA,IAAI,CAACI,SAAS,CAACQ,GAAG,EAAE;MAChB,MAAMtD,UAAU,CAAC,MAAM,EAAE;QACrB0C,IAAI;QACJI;MACJ,CAAC,CAAC;IACN;IAEA,IAAMS,aAA2B,GAAG,IAAI,CAAC3C,UAAU,CAACa,QAAQ,CAAC+B,WAAW,CAACV,SAAS,CAACQ,GAAG,CAAC;IACvF,IAAI,CAACC,aAAa,EAAE;MAChB,MAAMvD,UAAU,CAAC,MAAM,EAAE;QACrBsD,GAAG,EAAER,SAAS,CAACQ,GAAG;QAClBZ,IAAI;QACJI;MACJ,CAAC,CAAC;IACN;IAEA,IAAIA,SAAS,CAACW,IAAI,KAAK,OAAO,EAAE;MAC5B,OAAOF,aAAa,CAACG,SAAS,CAACL,KAAK,CAAC,CAACM,IAAI,CAAC,CAAC,CAACC,IAAI,CAACC,GAAG,IAAI;QACrD,IAAMC,cAAc,GAAGD,GAAG,CAACE,MAAM,CAAC,CAAC;QACnC,OAAOC,KAAK,CAACC,IAAI,CAACH,cAAc,CAAC;MACrC,CAAC,CAAC;IACN,CAAC,MAAM;MACH,OAAOP,aAAa,CAACW,OAAO,CAACb,KAAK,CAAC,CAACM,IAAI,CAAC,CAAC;IAC9C;EACJ,CAAC;EACD;AACJ;AACA;AACA;AACA;EACIR,GAAGA,CAAmBgB,OAAe,EAAc;IAC/C,OAAOpE,kBAAkB,CACrB,IAAI,CAACqE,cAAc,EACnBD,OAAO,EACP,MAAM;MACF,IAAME,QAAQ,GAAGvE,WAAW,CAAC,IAAI,CAACiB,KAAK,EAAEoD,OAAO,CAAC;;MAEjD;MACA,IACI,OAAOE,QAAQ,KAAK,QAAQ,IAC5BA,QAAQ,KAAK,IAAI,IACjBL,KAAK,CAACM,OAAO,CAACD,QAAQ,CAAC,EACzB;QACE,OAAOlE,YAAY,CAACoE,qBAAqB,CAACF,QAAQ,CAAC;MACvD;MACA,IAAM5D,KAAK,GAAG,IAAI;MAClB,IAAM+D,KAAK,GAAG,IAAIC,KAAK;MACnB;AACpB;AACA;AACA;AACA;MACoB9E,SAAS,CAAC0E,QAAQ,CAAC,EACnB;QACIlB,GAAGA,CAACuB,MAAM,EAAEC,QAAa,EAAE;UACvB,IAAI,OAAOA,QAAQ,KAAK,QAAQ,EAAE;YAC9B,OAAOD,MAAM,CAACC,QAAQ,CAAC;UAC3B;UACA,IAAMC,QAAQ,GAAGD,QAAQ,CAACE,MAAM,CAACF,QAAQ,CAACG,MAAM,GAAG,CAAC,CAAC;UACrD,IAAIH,QAAQ,CAACI,QAAQ,CAAC,IAAI,CAAC,EAAE;YACzB,IAAMC,GAAG,GAAGL,QAAQ,CAACM,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;YACjC,OAAOxE,KAAK,CAACwC,KAAK,CAACxD,QAAQ,CAAC0E,OAAO,GAAG,GAAG,GAAGa,GAAG,CAAC,CAAC;UACrD,CAAC,MAAM,IAAIJ,QAAQ,KAAK,GAAG,EAAE;YACzB,IAAMI,IAAG,GAAGL,QAAQ,CAACM,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;YACjC,OAAOxE,KAAK,CAACgC,IAAI,CAAChD,QAAQ,CAAC0E,OAAO,GAAG,GAAG,GAAGa,IAAG,CAAC,CAAC;UACpD,CAAC,MAAM,IAAIJ,QAAQ,KAAK,GAAG,EAAE;YACzB,IAAMI,KAAG,GAAGL,QAAQ,CAACM,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;YACjC,OAAOxE,KAAK,CAAC2C,QAAQ,CAAC3D,QAAQ,CAAC0E,OAAO,GAAG,GAAG,GAAGa,KAAG,CAAC,CAAC;UACxD,CAAC,MAAM;YACH,OAAOvE,KAAK,CAAC0C,GAAG,CAAC1D,QAAQ,CAAC0E,OAAO,GAAG,GAAG,GAAGQ,QAAQ,CAAC,CAAC;UACxD;QACJ;MACJ,CAAC,CAAC;MACN,OAAOH,KAAK;IAChB,CACJ,CAAC;EAEL,CAAC;EAEDU,MAAMA,CAAmBC,cAAc,GAAG,KAAK,EAAE;IAC7C,IAAI,CAACA,cAAc,EAAE;MACjB,IAAMnC,IAAI,GAAGrD,SAAS,CAAC,IAAI,CAACoB,KAAK,CAAC;MAClC,OAAQiC,IAAI,CAAS/B,IAAI;MACzB,OAAQ+B,IAAI,CAASoC,YAAY;MACjC,OAAQpC,IAAI,CAAS1B,QAAQ;MAC7B,OAAQ0B,IAAI,CAASqC,KAAK;MAC1B,OAAOlF,YAAY,CAACoE,qBAAqB,CAACvB,IAAI,CAAC;IACnD,CAAC,MAAM;MACH,OAAO7C,YAAY,CAACoE,qBAAqB,CAAC,IAAI,CAACxD,KAAK,CAAC;IACzD;EACJ,CAAC;EACDuE,aAAaA,CAAmBH,cAAc,GAAG,KAAK,EAAE;IACpD,OAAO3F,KAAK,CAAC,IAAI,CAAC0F,MAAM,CAACC,cAAqB,CAAC,CAAC;EACpD,CAAC;EAED;AACJ;AACA;AACA;AACA;EACII,MAAMA,CAACC,UAA4B,EAAE;IACjC,MAAM9F,aAAa,CAAC,QAAQ,CAAC;EACjC,CAAC;EACD+F,iBAAiBA,CAACD,UAA4B,EAAE;IAC5C,MAAM9F,aAAa,CAAC,QAAQ,CAAC;EACjC,CAAC;EACDgG,UAAUA,CAACF,UAA6C,EAAE;IACtD,MAAM9F,aAAa,CAAC,MAAM,CAAC;EAC/B,CAAC;EACDiG,aAAaA,CAAA,EAAG;IACZ,MAAMjG,aAAa,CAAC,aAAa,CAAC;EACtC,CAAC;EACDkG,aAAaA,CAAA,EAAG;IACZ,MAAMlG,aAAa,CAAC,aAAa,CAAC;EACtC,CAAC;EACDmG,cAAcA,CAAA,EAAG;IACb,MAAMnG,aAAa,CAAC,aAAa,CAAC;EACtC,CAAC;EACD,IAAIoG,eAAeA,CAAA,EAAG;IAClB,MAAMpG,aAAa,CAAC,aAAa,CAAC;EACtC,CAAC;EAED,MAAMqG,MAAMA,CAERC,gBAA2C;EAC3C;EACAC,QAAiB,EACE;IACnB,IAAMC,OAAO,GAAG,IAAI,CAACnF,KAAK;IAC1B,IAAMoF,OAAkC,GAAG,MAAM7F,4BAA4B,CAAY0F,gBAAgB,CAAC,CAACE,OAAO,CAAQ;IAC1H,OAAO,IAAI,CAACE,SAAS,CAACD,OAAO,EAAED,OAAO,CAAC;EAC3C,CAAC;EAED;AACJ;AACA;AACA;EACIG,iBAAiBA,CAEbL,gBAAqC;EACrC;EACAC,QAAiB,EACE;IACnB,OAAO,IAAI,CAACrF,UAAU,CAAC0F,qBAAqB,CAACC,QAAQ,CACjD,IAAI,CAACxF,KAAK,EACVT,4BAA4B,CAAC0F,gBAAgB,CACjD,CAAC,CAACpC,IAAI,CAAC4C,MAAM,IAAI,IAAI,CAAC5F,UAAU,CAACmB,SAAS,CAACE,mBAAmB,CAACuE,MAAM,CAAC,CAAC;EAC3E,CAAC;EAEDC,KAAKA,CAEDA,KAAyB,EAC3B;IACE,IAAMP,OAAO,GAAG,IAAI,CAACnF,KAAK;IAC1B,IAAMoF,OAAO,GAAG3G,KAAK,CAAC0G,OAAO,CAAC;IAC9BQ,MAAM,CACDC,OAAO,CAACF,KAAK,CAAC,CACdG,OAAO,CAAC,CAAC,CAACC,CAAC,EAAEC,CAAC,CAAC,KAAK;MAChBX,OAAO,CAASU,CAAC,CAAC,GAAGC,CAAC;IAC3B,CAAC,CAAC;IACN,OAAO,IAAI,CAACV,SAAS,CAACD,OAAO,EAAED,OAAO,CAAC;EAC3C,CAAC;EAED;AACJ;AACA;EACIa,gBAAgBA,CAEZN,KAA8B,EACK;IACnC,OAAO,IAAI,CAACJ,iBAAiB,CAAE9D,OAAO,IAAK;MACvCmE,MAAM,CACDC,OAAO,CAACF,KAAK,CAAC,CACdG,OAAO,CAAC,CAAC,CAACC,CAAC,EAAEC,CAAC,CAAC,KAAK;QAChBvE,OAAO,CAASsE,CAAC,CAAC,GAAGC,CAAC;MAC3B,CAAC,CAAC;MACN,OAAOvE,OAAO;IAClB,CAAC,CAAC;EACN,CAAC;EAED;AACJ;AACA;AACA;EACI,MAAM6D,SAASA,CAEXD,OAAuC,EACvCD,OAAkC,EACJ;IAC9BC,OAAO,GAAGxG,SAAS,CAACwG,OAAO,CAAC;;IAE5B;IACA,IAAI,IAAI,CAACpF,KAAK,CAACO,QAAQ,EAAE;MACrB,MAAMtB,UAAU,CAAC,OAAO,EAAE;QACtBgH,EAAE,EAAE,IAAI,CAAClG,OAAO;QAChBmG,QAAQ,EAAE;MACd,CAAC,CAAC;IACN;IACA,MAAMC,yBAAyB,CAAC,IAAI,CAACtG,UAAU,EAAEuF,OAAO,EAAED,OAAO,CAAC;IAClE,IAAMiB,WAAW,GAAG,MAAM,IAAI,CAACvG,UAAU,CAACwG,eAAe,CAACC,SAAS,CAAC,CAAC;MACjEC,QAAQ,EAAEpB,OAAO;MACjBe,QAAQ,EAAEd;IACd,CAAC,CAAC,EAAE,uBAAuB,CAAC;IAE5B,IAAMoB,OAAO,GAAGJ,WAAW,CAACK,KAAK,CAAC,CAAC,CAAC;IACpCnH,0BAA0B,CAAC,IAAI,CAACO,UAAU,EAAE,IAAI,CAACE,OAAO,EAAEqF,OAAO,EAAEoB,OAAO,CAAC;IAE3E,MAAM,IAAI,CAAC3G,UAAU,CAAC6G,SAAS,CAAC,MAAM,EAAE,MAAM,EAAEtB,OAAO,EAAE,IAAI,CAAC;IAC9D,OAAO,IAAI,CAACvF,UAAU,CAACmB,SAAS,CAACE,mBAAmB,CAChDkF,WAAW,CAACO,OAAO,CAAC,CAAC,CACzB,CAAC;EACL,CAAC;EAED;AACJ;AACA;AACA;AACA;EACIC,MAAMA,CAAA,EAAwC;IAC1C,IAAM/G,UAAU,GAAG,IAAI,CAACA,UAAU;IAClC,IAAI,IAAI,CAACiB,OAAO,EAAE;MACd,OAAO+F,OAAO,CAACC,MAAM,CAAC7H,UAAU,CAAC,OAAO,EAAE;QACtCiH,QAAQ,EAAE,IAAI;QACdD,EAAE,EAAE,IAAI,CAAClG;MACb,CAAC,CAAC,CAAC;IACP;IAEA,IAAMgH,WAAW,GAAGnI,SAAS,CAAC,IAAI,CAACoB,KAAK,CAAC;IACzC,IAAIgH,cAAmC;IACvC,OAAOnH,UAAU,CAAC6G,SAAS,CAAC,KAAK,EAAE,QAAQ,EAAEK,WAAW,EAAE,IAAI,CAAC,CAC1DlE,IAAI,CAAC,YAAY;MACdkE,WAAW,CAACxG,QAAQ,GAAG,IAAI;MAC3B,IAAM6F,WAAW,GAAG,MAAMvG,UAAU,CAACwG,eAAe,CAACC,SAAS,CAAC,CAAC;QAC5DC,QAAQ,EAAE,IAAI,CAACvG,KAAK;QACpBkG,QAAQ,EAAEa;MACd,CAAC,CAAC,EAAE,oBAAoB,CAAC;MACzB,IAAMP,OAAO,GAAGJ,WAAW,CAACK,KAAK,CAAC,CAAC,CAAC;MACpCnH,0BAA0B,CAACO,UAAU,EAAE,IAAI,CAACE,OAAO,EAAEgH,WAAW,EAAEP,OAAO,CAAC;MAC1E,OAAOJ,WAAW,CAACO,OAAO,CAAC,CAAC,CAAC;IACjC,CAAC,CAAC,CACD9D,IAAI,CAAEoE,OAAO,IAAK;MACfD,cAAc,GAAGC,OAAO;MACxB,OAAO,IAAI,CAACpH,UAAU,CAAC6G,SAAS,CAAC,MAAM,EAAE,QAAQ,EAAEK,WAAW,EAAE,IAAI,CAAC;IACzE,CAAC,CAAC,CACDlE,IAAI,CAAC,MAAM;MACR,OAAO,IAAI,CAAChD,UAAU,CAACmB,SAAS,CAACE,mBAAmB,CAAC8F,cAAc,CAAC;IACxE,CAAC,CAAC;EACV,CAAC;EACDE,iBAAiBA,CAAA,EAAwC;IACrD,OAAO,IAAI,CAAC5B,iBAAiB,CAAC,MAAO9D,OAAO,IAAK;MAC7C,MAAM,IAAI,CAAC3B,UAAU,CAAC6G,SAAS,CAAC,KAAK,EAAE,QAAQ,EAAElF,OAAO,EAAE,IAAI,CAAC;MAC/DA,OAAO,CAACjB,QAAQ,GAAG,IAAI;MACvB,OAAOiB,OAAO;IAClB,CAAC,CAAC,CAACqB,IAAI,CAAC,MAAOsE,MAAM,IAAK;MACtB,MAAM,IAAI,CAACtH,UAAU,CAAC6G,SAAS,CAAC,MAAM,EAAE,QAAQ,EAAES,MAAM,CAACnH,KAAK,EAAEmH,MAAM,CAAC;MACvE,OAAOA,MAAM;IACjB,CAAC,CAAC;EACN,CAAC;EACDC,OAAOA,CAAA,EAAG;IACN,MAAMnI,UAAU,CAAC,OAAO,CAAC;EAC7B;AACJ,CAAC;AAED,OAAO,SAASoI,2BAA2BA,CAACC,KAAK,GAAG9H,aAAa,EAAE;EAC/D,IAAM+H,WAAW,GAAG,SAASC,qBAAqBA,CAE9C3H,UAAwB,EACxB2B,OAA4B,EAC9B;IACE,IAAI,CAAC3B,UAAU,GAAGA,UAAU;;IAE5B;IACA,IAAI,CAACG,KAAK,GAAGwB,OAAO;IACpB,IAAI,CAAC6B,cAAc,GAAG,IAAIoE,GAAG,CAAc,CAAC;;IAE5C;AACR;AACA;AACA;IACQ,IAAI,CAAC9H,sBAAsB,GAAG,IAAI;EACtC,CAAC;EACD4H,WAAW,CAACG,SAAS,GAAGJ,KAAK;EAC7B,OAAOC,WAAW;AACtB;AAEA,OAAO,SAASI,qBAAqBA,CACjCJ,WAAgB,EAChB1H,UAAmC,EACnC+H,QAAmC,EACP;EAC5B,IAAMC,GAAG,GAAG,IAAIN,WAAW,CAAC1H,UAAU,EAAE+H,QAAQ,CAAC;EACjD1I,cAAc,CAAC,kBAAkB,EAAE2I,GAAG,CAAC;EACvC,OAAOA,GAAG;AACd;AAEA,OAAO,SAASC,YAAYA,CAACC,GAAQ,EAAW;EAC5C,OAAO,OAAOA,GAAG,KAAK,QAAQ,IAAIA,GAAG,KAAK,IAAI,IAAI,wBAAwB,IAAIA,GAAG;AACrF;AAGA,OAAO,SAAS5B,yBAAyBA,CACrCtG,UAAmC,EACnCuF,OAAuC,EACvCD,OAAkC,EACtB;EACZ;AACJ;AACA;AACA;AACA;AACA;EACIC,OAAO,CAACd,KAAK,GAAGqB,MAAM,CAACqC,MAAM,CACzB,CAAC,CAAC,EACF7C,OAAO,CAACb,KAAK,EACbc,OAAO,CAACd,KACZ,CAAC;;EAED;EACA,IAAIlF,YAAY,CAACwC,SAAS,CAAC,CAAC,EAAE;IAC1B/B,UAAU,CAACC,MAAM,CAACmI,cAAc,CAAC9C,OAAO,EAAEC,OAAO,CAAC;EACtD;EACA,OAAOvF,UAAU,CAAC6G,SAAS,CAAC,KAAK,EAAE,MAAM,EAAEtB,OAAO,EAAED,OAAO,CAAC;AAChE","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/rx-error.js b/dist/esm/rx-error.js deleted file mode 100644 index a2ac8ea78af..00000000000 --- a/dist/esm/rx-error.js +++ /dev/null @@ -1,133 +0,0 @@ -import _createClass from "@babel/runtime/helpers/createClass"; -import _inheritsLoose from "@babel/runtime/helpers/inheritsLoose"; -import _wrapNativeSuper from "@babel/runtime/helpers/wrapNativeSuper"; -/** - * here we use custom errors with the additional field 'parameters' - */ - -import { overwritable } from "./overwritable.js"; -/** - * transform an object of parameters to a presentable string - */ -function parametersToString(parameters) { - var ret = ''; - if (Object.keys(parameters).length === 0) return ret; - ret += 'Given parameters: {\n'; - ret += Object.keys(parameters).map(k => { - var paramStr = '[object Object]'; - try { - if (k === 'errors') { - paramStr = parameters[k].map(err => JSON.stringify(err, Object.getOwnPropertyNames(err))); - } else { - paramStr = JSON.stringify(parameters[k], function (_k, v) { - return v === undefined ? null : v; - }, 2); - } - } catch (e) {} - return k + ':' + paramStr; - }).join('\n'); - ret += '}'; - return ret; -} -function messageForError(message, code, parameters) { - return 'RxError (' + code + '):' + '\n' + message + '\n' + parametersToString(parameters); -} -export var RxError = /*#__PURE__*/function (_Error) { - // always true, use this to detect if its an rxdb-error - - function RxError(code, message, parameters = {}) { - var _this; - var mes = messageForError(message, code, parameters); - _this = _Error.call(this, mes) || this; - _this.code = code; - _this.message = mes; - _this.url = getErrorUrl(code); - _this.parameters = parameters; - _this.rxdb = true; // tag them as internal - return _this; - } - _inheritsLoose(RxError, _Error); - var _proto = RxError.prototype; - _proto.toString = function toString() { - return this.message; - }; - return _createClass(RxError, [{ - key: "name", - get: function () { - return 'RxError (' + this.code + ')'; - } - }, { - key: "typeError", - get: function () { - return false; - } - }]); -}( /*#__PURE__*/_wrapNativeSuper(Error)); -export var RxTypeError = /*#__PURE__*/function (_TypeError) { - // always true, use this to detect if its an rxdb-error - - function RxTypeError(code, message, parameters = {}) { - var _this2; - var mes = messageForError(message, code, parameters); - _this2 = _TypeError.call(this, mes) || this; - _this2.code = code; - _this2.message = mes; - _this2.url = getErrorUrl(code); - _this2.parameters = parameters; - _this2.rxdb = true; // tag them as internal - return _this2; - } - _inheritsLoose(RxTypeError, _TypeError); - var _proto2 = RxTypeError.prototype; - _proto2.toString = function toString() { - return this.message; - }; - return _createClass(RxTypeError, [{ - key: "name", - get: function () { - return 'RxTypeError (' + this.code + ')'; - } - }, { - key: "typeError", - get: function () { - return true; - } - }]); -}( /*#__PURE__*/_wrapNativeSuper(TypeError)); -export function getErrorUrl(code) { - return 'https://rxdb.info/errors.html?console=errors#' + code; -} -export function errorUrlHint(code) { - return '\n You can find out more about this error here: ' + getErrorUrl(code) + ' '; -} -export function newRxError(code, parameters) { - return new RxError(code, overwritable.tunnelErrorMessage(code) + errorUrlHint(code), parameters); -} -export function newRxTypeError(code, parameters) { - return new RxTypeError(code, overwritable.tunnelErrorMessage(code) + errorUrlHint(code), parameters); -} - -/** - * Returns the error if it is a 409 conflict, - * return false if it is another error. - */ -export function isBulkWriteConflictError(err) { - if (err && err.status === 409) { - return err; - } else { - return false; - } -} -var STORAGE_WRITE_ERROR_CODE_TO_MESSAGE = { - 409: 'document write conflict', - 422: 'schema validation error', - 510: 'attachment data missing' -}; -export function rxStorageWriteErrorToRxError(err) { - return newRxError('COL20', { - name: STORAGE_WRITE_ERROR_CODE_TO_MESSAGE[err.status], - document: err.documentId, - writeError: err - }); -} -//# sourceMappingURL=rx-error.js.map \ No newline at end of file diff --git a/dist/esm/rx-error.js.map b/dist/esm/rx-error.js.map deleted file mode 100644 index c840de637f2..00000000000 --- a/dist/esm/rx-error.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-error.js","names":["overwritable","parametersToString","parameters","ret","Object","keys","length","map","k","paramStr","err","JSON","stringify","getOwnPropertyNames","_k","v","undefined","e","join","messageForError","message","code","RxError","_Error","_this","mes","call","url","getErrorUrl","rxdb","_inheritsLoose","_proto","prototype","toString","_createClass","key","get","_wrapNativeSuper","Error","RxTypeError","_TypeError","_this2","_proto2","TypeError","errorUrlHint","newRxError","tunnelErrorMessage","newRxTypeError","isBulkWriteConflictError","status","STORAGE_WRITE_ERROR_CODE_TO_MESSAGE","rxStorageWriteErrorToRxError","name","document","documentId","writeError"],"sources":["../../src/rx-error.ts"],"sourcesContent":["/**\n * here we use custom errors with the additional field 'parameters'\n */\n\nimport { overwritable } from './overwritable.ts';\nimport type {\n RxErrorParameters,\n RxErrorKey,\n RxStorageWriteError,\n RxStorageWriteErrorConflict\n} from './types/index.d.ts';\n\n/**\n * transform an object of parameters to a presentable string\n */\nfunction parametersToString(parameters: any): string {\n let ret = '';\n if (Object.keys(parameters).length === 0)\n return ret;\n ret += 'Given parameters: {\\n';\n ret += Object.keys(parameters)\n .map(k => {\n let paramStr = '[object Object]';\n try {\n if (k === 'errors') {\n paramStr = parameters[k].map((err: any) => JSON.stringify(err, Object.getOwnPropertyNames(err)));\n } else {\n paramStr = JSON.stringify(parameters[k], function (_k, v) {\n return v === undefined ? null : v;\n }, 2);\n }\n } catch (e) { }\n return k + ':' + paramStr;\n })\n .join('\\n');\n ret += '}';\n return ret;\n}\n\nfunction messageForError(\n message: string,\n code: string,\n parameters: any\n): string {\n return 'RxError (' + code + '):' + '\\n' +\n message + '\\n' +\n parametersToString(parameters);\n}\n\nexport class RxError extends Error {\n public code: RxErrorKey;\n public message: string;\n public url: string;\n public parameters: RxErrorParameters;\n // always true, use this to detect if its an rxdb-error\n public rxdb: true;\n constructor(\n code: RxErrorKey,\n message: string,\n parameters: RxErrorParameters = {}\n ) {\n const mes = messageForError(message, code, parameters);\n super(mes);\n this.code = code;\n this.message = mes;\n this.url = getErrorUrl(code);\n this.parameters = parameters;\n this.rxdb = true; // tag them as internal\n }\n get name(): string {\n return 'RxError (' + this.code + ')';\n }\n toString(): string {\n return this.message;\n }\n get typeError(): boolean {\n return false;\n }\n}\n\nexport class RxTypeError extends TypeError {\n public code: RxErrorKey;\n public message: string;\n public url: string;\n public parameters: RxErrorParameters;\n // always true, use this to detect if its an rxdb-error\n public rxdb: true;\n constructor(\n code: RxErrorKey,\n message: string,\n parameters: RxErrorParameters = {}\n ) {\n const mes = messageForError(message, code, parameters);\n super(mes);\n this.code = code;\n this.message = mes;\n this.url = getErrorUrl(code);\n this.parameters = parameters;\n this.rxdb = true; // tag them as internal\n }\n get name(): string {\n return 'RxTypeError (' + this.code + ')';\n }\n toString(): string {\n return this.message;\n }\n get typeError(): boolean {\n return true;\n }\n}\n\n\nexport function getErrorUrl(code: RxErrorKey) {\n return 'https://rxdb.info/errors.html?console=errors#' + code;\n}\n\nexport function errorUrlHint(code: RxErrorKey) {\n return '\\n You can find out more about this error here: ' + getErrorUrl(code) + ' ';\n}\n\nexport function newRxError(\n code: RxErrorKey,\n parameters?: RxErrorParameters\n): RxError {\n return new RxError(\n code,\n overwritable.tunnelErrorMessage(code) + errorUrlHint(code),\n parameters\n );\n}\n\nexport function newRxTypeError(\n code: RxErrorKey,\n parameters?: RxErrorParameters\n): RxTypeError {\n return new RxTypeError(\n code,\n overwritable.tunnelErrorMessage(code) + errorUrlHint(code),\n parameters\n );\n}\n\n\n/**\n * Returns the error if it is a 409 conflict,\n * return false if it is another error.\n */\nexport function isBulkWriteConflictError(\n err?: RxStorageWriteError | any\n): RxStorageWriteErrorConflict | false {\n if (\n err &&\n err.status === 409\n ) {\n return err;\n } else {\n return false;\n }\n}\n\n\nconst STORAGE_WRITE_ERROR_CODE_TO_MESSAGE: { [k: number]: string; } = {\n 409: 'document write conflict',\n 422: 'schema validation error',\n 510: 'attachment data missing'\n};\n\nexport function rxStorageWriteErrorToRxError(err: RxStorageWriteError): RxError {\n return newRxError('COL20', {\n name: STORAGE_WRITE_ERROR_CODE_TO_MESSAGE[err.status],\n document: err.documentId,\n writeError: err\n });\n}\n"],"mappings":";;;AAAA;AACA;AACA;;AAEA,SAASA,YAAY,QAAQ,mBAAmB;AAQhD;AACA;AACA;AACA,SAASC,kBAAkBA,CAACC,UAAe,EAAU;EACjD,IAAIC,GAAG,GAAG,EAAE;EACZ,IAAIC,MAAM,CAACC,IAAI,CAACH,UAAU,CAAC,CAACI,MAAM,KAAK,CAAC,EACpC,OAAOH,GAAG;EACdA,GAAG,IAAI,uBAAuB;EAC9BA,GAAG,IAAIC,MAAM,CAACC,IAAI,CAACH,UAAU,CAAC,CACzBK,GAAG,CAACC,CAAC,IAAI;IACN,IAAIC,QAAQ,GAAG,iBAAiB;IAChC,IAAI;MACA,IAAID,CAAC,KAAK,QAAQ,EAAE;QAChBC,QAAQ,GAAGP,UAAU,CAACM,CAAC,CAAC,CAACD,GAAG,CAAEG,GAAQ,IAAKC,IAAI,CAACC,SAAS,CAACF,GAAG,EAAEN,MAAM,CAACS,mBAAmB,CAACH,GAAG,CAAC,CAAC,CAAC;MACpG,CAAC,MAAM;QACHD,QAAQ,GAAGE,IAAI,CAACC,SAAS,CAACV,UAAU,CAACM,CAAC,CAAC,EAAE,UAAUM,EAAE,EAAEC,CAAC,EAAE;UACtD,OAAOA,CAAC,KAAKC,SAAS,GAAG,IAAI,GAAGD,CAAC;QACrC,CAAC,EAAE,CAAC,CAAC;MACT;IACJ,CAAC,CAAC,OAAOE,CAAC,EAAE,CAAE;IACd,OAAOT,CAAC,GAAG,GAAG,GAAGC,QAAQ;EAC7B,CAAC,CAAC,CACDS,IAAI,CAAC,IAAI,CAAC;EACff,GAAG,IAAI,GAAG;EACV,OAAOA,GAAG;AACd;AAEA,SAASgB,eAAeA,CACpBC,OAAe,EACfC,IAAY,EACZnB,UAAe,EACT;EACN,OAAO,WAAW,GAAGmB,IAAI,GAAG,IAAI,GAAG,IAAI,GACnCD,OAAO,GAAG,IAAI,GACdnB,kBAAkB,CAACC,UAAU,CAAC;AACtC;AAEA,WAAaoB,OAAO,0BAAAC,MAAA;EAKhB;;EAEA,SAAAD,QACID,IAAgB,EAChBD,OAAe,EACflB,UAA6B,GAAG,CAAC,CAAC,EACpC;IAAA,IAAAsB,KAAA;IACE,IAAMC,GAAG,GAAGN,eAAe,CAACC,OAAO,EAAEC,IAAI,EAAEnB,UAAU,CAAC;IACtDsB,KAAA,GAAAD,MAAA,CAAAG,IAAA,OAAMD,GAAG,CAAC;IACVD,KAAA,CAAKH,IAAI,GAAGA,IAAI;IAChBG,KAAA,CAAKJ,OAAO,GAAGK,GAAG;IAClBD,KAAA,CAAKG,GAAG,GAAGC,WAAW,CAACP,IAAI,CAAC;IAC5BG,KAAA,CAAKtB,UAAU,GAAGA,UAAU;IAC5BsB,KAAA,CAAKK,IAAI,GAAG,IAAI,CAAC,CAAC;IAAA,OAAAL,KAAA;EACtB;EAACM,cAAA,CAAAR,OAAA,EAAAC,MAAA;EAAA,IAAAQ,MAAA,GAAAT,OAAA,CAAAU,SAAA;EAAAD,MAAA,CAIDE,QAAQ,GAAR,SAAAA,SAAA,EAAmB;IACf,OAAO,IAAI,CAACb,OAAO;EACvB,CAAC;EAAA,OAAAc,YAAA,CAAAZ,OAAA;IAAAa,GAAA;IAAAC,GAAA,EALD,SAAAA,CAAA,EAAmB;MACf,OAAO,WAAW,GAAG,IAAI,CAACf,IAAI,GAAG,GAAG;IACxC;EAAC;IAAAc,GAAA;IAAAC,GAAA,EAID,SAAAA,CAAA,EAAyB;MACrB,OAAO,KAAK;IAChB;EAAC;AAAA,gBAAAC,gBAAA,CA5BwBC,KAAK;AA+BlC,WAAaC,WAAW,0BAAAC,UAAA;EAKpB;;EAEA,SAAAD,YACIlB,IAAgB,EAChBD,OAAe,EACflB,UAA6B,GAAG,CAAC,CAAC,EACpC;IAAA,IAAAuC,MAAA;IACE,IAAMhB,GAAG,GAAGN,eAAe,CAACC,OAAO,EAAEC,IAAI,EAAEnB,UAAU,CAAC;IACtDuC,MAAA,GAAAD,UAAA,CAAAd,IAAA,OAAMD,GAAG,CAAC;IACVgB,MAAA,CAAKpB,IAAI,GAAGA,IAAI;IAChBoB,MAAA,CAAKrB,OAAO,GAAGK,GAAG;IAClBgB,MAAA,CAAKd,GAAG,GAAGC,WAAW,CAACP,IAAI,CAAC;IAC5BoB,MAAA,CAAKvC,UAAU,GAAGA,UAAU;IAC5BuC,MAAA,CAAKZ,IAAI,GAAG,IAAI,CAAC,CAAC;IAAA,OAAAY,MAAA;EACtB;EAACX,cAAA,CAAAS,WAAA,EAAAC,UAAA;EAAA,IAAAE,OAAA,GAAAH,WAAA,CAAAP,SAAA;EAAAU,OAAA,CAIDT,QAAQ,GAAR,SAAAA,SAAA,EAAmB;IACf,OAAO,IAAI,CAACb,OAAO;EACvB,CAAC;EAAA,OAAAc,YAAA,CAAAK,WAAA;IAAAJ,GAAA;IAAAC,GAAA,EALD,SAAAA,CAAA,EAAmB;MACf,OAAO,eAAe,GAAG,IAAI,CAACf,IAAI,GAAG,GAAG;IAC5C;EAAC;IAAAc,GAAA;IAAAC,GAAA,EAID,SAAAA,CAAA,EAAyB;MACrB,OAAO,IAAI;IACf;EAAC;AAAA,gBAAAC,gBAAA,CA5B4BM,SAAS;AAgC1C,OAAO,SAASf,WAAWA,CAACP,IAAgB,EAAE;EAC1C,OAAO,+CAA+C,GAAGA,IAAI;AACjE;AAEA,OAAO,SAASuB,YAAYA,CAACvB,IAAgB,EAAE;EAC3C,OAAO,kDAAkD,GAAGO,WAAW,CAACP,IAAI,CAAC,GAAG,GAAG;AACvF;AAEA,OAAO,SAASwB,UAAUA,CACtBxB,IAAgB,EAChBnB,UAA8B,EACvB;EACP,OAAO,IAAIoB,OAAO,CACdD,IAAI,EACJrB,YAAY,CAAC8C,kBAAkB,CAACzB,IAAI,CAAC,GAAGuB,YAAY,CAACvB,IAAI,CAAC,EAC1DnB,UACJ,CAAC;AACL;AAEA,OAAO,SAAS6C,cAAcA,CAC1B1B,IAAgB,EAChBnB,UAA8B,EACnB;EACX,OAAO,IAAIqC,WAAW,CAClBlB,IAAI,EACJrB,YAAY,CAAC8C,kBAAkB,CAACzB,IAAI,CAAC,GAAGuB,YAAY,CAACvB,IAAI,CAAC,EAC1DnB,UACJ,CAAC;AACL;;AAGA;AACA;AACA;AACA;AACA,OAAO,SAAS8C,wBAAwBA,CACpCtC,GAA0C,EACI;EAC9C,IACIA,GAAG,IACHA,GAAG,CAACuC,MAAM,KAAK,GAAG,EACpB;IACE,OAAOvC,GAAG;EACd,CAAC,MAAM;IACH,OAAO,KAAK;EAChB;AACJ;AAGA,IAAMwC,mCAA6D,GAAG;EAClE,GAAG,EAAE,yBAAyB;EAC9B,GAAG,EAAE,yBAAyB;EAC9B,GAAG,EAAE;AACT,CAAC;AAED,OAAO,SAASC,4BAA4BA,CAACzC,GAA6B,EAAW;EACjF,OAAOmC,UAAU,CAAC,OAAO,EAAE;IACvBO,IAAI,EAAEF,mCAAmC,CAACxC,GAAG,CAACuC,MAAM,CAAC;IACrDI,QAAQ,EAAE3C,GAAG,CAAC4C,UAAU;IACxBC,UAAU,EAAE7C;EAChB,CAAC,CAAC;AACN","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/rx-query-helper.js b/dist/esm/rx-query-helper.js deleted file mode 100644 index 9eeb6694c7d..00000000000 --- a/dist/esm/rx-query-helper.js +++ /dev/null @@ -1,206 +0,0 @@ -import { LOGICAL_OPERATORS } from "./query-planner.js"; -import { getPrimaryFieldOfPrimaryKey } from "./rx-schema-helper.js"; -import { clone, firstPropertyNameOfObject, toArray, isMaybeReadonlyArray, flatClone, objectPathMonad } from "./plugins/utils/index.js"; -import { compare as mingoSortComparator } from 'mingo/util'; -import { newRxError } from "./rx-error.js"; -import { getMingoQuery } from "./rx-query-mingo.js"; - -/** - * Normalize the query to ensure we have all fields set - * and queries that represent the same query logic are detected as equal by the caching. - */ -export function normalizeMangoQuery(schema, mangoQuery) { - var primaryKey = getPrimaryFieldOfPrimaryKey(schema.primaryKey); - mangoQuery = flatClone(mangoQuery); - var normalizedMangoQuery = clone(mangoQuery); - if (typeof normalizedMangoQuery.skip !== 'number') { - normalizedMangoQuery.skip = 0; - } - if (!normalizedMangoQuery.selector) { - normalizedMangoQuery.selector = {}; - } else { - normalizedMangoQuery.selector = normalizedMangoQuery.selector; - /** - * In mango query, it is possible to have an - * equals comparison by directly assigning a value - * to a property, without the '$eq' operator. - * Like: - * selector: { - * foo: 'bar' - * } - * For normalization, we have to normalize this - * so our checks can perform properly. - * - * - * TODO this must work recursive with nested queries that - * contain multiple selectors via $and or $or etc. - */ - Object.entries(normalizedMangoQuery.selector).forEach(([field, matcher]) => { - if (typeof matcher !== 'object' || matcher === null) { - normalizedMangoQuery.selector[field] = { - $eq: matcher - }; - } - }); - } - - /** - * Ensure that if an index is specified, - * the primaryKey is inside of it. - */ - if (normalizedMangoQuery.index) { - var indexAr = toArray(normalizedMangoQuery.index); - if (!indexAr.includes(primaryKey)) { - indexAr.push(primaryKey); - } - normalizedMangoQuery.index = indexAr; - } - - /** - * To ensure a deterministic sorting, - * we have to ensure the primary key is always part - * of the sort query. - * Primary sorting is added as last sort parameter, - * similar to how we add the primary key to indexes that do not have it. - * - */ - if (!normalizedMangoQuery.sort) { - /** - * If no sort is given at all, - * we can assume that the user does not care about sort order at al. - * - * we cannot just use the primary key as sort parameter - * because it would likely cause the query to run over the primary key index - * which has a bad performance in most cases. - */ - if (normalizedMangoQuery.index) { - normalizedMangoQuery.sort = normalizedMangoQuery.index.map(field => { - return { - [field]: 'asc' - }; - }); - } else { - /** - * Find the index that best matches the fields with the logical operators - */ - if (schema.indexes) { - var fieldsWithLogicalOperator = new Set(); - Object.entries(normalizedMangoQuery.selector).forEach(([field, matcher]) => { - var hasLogical = false; - if (typeof matcher === 'object' && matcher !== null) { - hasLogical = !!Object.keys(matcher).find(operator => LOGICAL_OPERATORS.has(operator)); - } else { - hasLogical = true; - } - if (hasLogical) { - fieldsWithLogicalOperator.add(field); - } - }); - var currentFieldsAmount = -1; - var currentBestIndexForSort; - schema.indexes.forEach(index => { - var useIndex = isMaybeReadonlyArray(index) ? index : [index]; - var firstWrongIndex = useIndex.findIndex(indexField => !fieldsWithLogicalOperator.has(indexField)); - if (firstWrongIndex > 0 && firstWrongIndex > currentFieldsAmount) { - currentFieldsAmount = firstWrongIndex; - currentBestIndexForSort = useIndex; - } - }); - if (currentBestIndexForSort) { - normalizedMangoQuery.sort = currentBestIndexForSort.map(field => { - return { - [field]: 'asc' - }; - }); - } - } - - /** - * Fall back to the primary key as sort order - * if no better one has been found - */ - if (!normalizedMangoQuery.sort) { - normalizedMangoQuery.sort = [{ - [primaryKey]: 'asc' - }]; - } - } - } else { - var isPrimaryInSort = normalizedMangoQuery.sort.find(p => firstPropertyNameOfObject(p) === primaryKey); - if (!isPrimaryInSort) { - normalizedMangoQuery.sort = normalizedMangoQuery.sort.slice(0); - normalizedMangoQuery.sort.push({ - [primaryKey]: 'asc' - }); - } - } - return normalizedMangoQuery; -} - -/** - * Returns the sort-comparator, - * which is able to sort documents in the same way - * a query over the db would do. - */ -export function getSortComparator(schema, query) { - if (!query.sort) { - throw newRxError('SNH', { - query - }); - } - var sortParts = []; - query.sort.forEach(sortBlock => { - var key = Object.keys(sortBlock)[0]; - var direction = Object.values(sortBlock)[0]; - sortParts.push({ - key, - direction, - getValueFn: objectPathMonad(key) - }); - }); - var fun = (a, b) => { - for (var i = 0; i < sortParts.length; ++i) { - var sortPart = sortParts[i]; - var valueA = sortPart.getValueFn(a); - var valueB = sortPart.getValueFn(b); - if (valueA !== valueB) { - var ret = sortPart.direction === 'asc' ? mingoSortComparator(valueA, valueB) : mingoSortComparator(valueB, valueA); - return ret; - } - } - }; - return fun; -} - -/** - * Returns a function - * that can be used to check if a document - * matches the query. - */ -export function getQueryMatcher(_schema, query) { - if (!query.sort) { - throw newRxError('SNH', { - query - }); - } - var mingoQuery = getMingoQuery(query.selector); - var fun = doc => { - return mingoQuery.test(doc); - }; - return fun; -} -export async function runQueryUpdateFunction(rxQuery, fn) { - var docs = await rxQuery.exec(); - if (!docs) { - // only findOne() queries can return null - return null; - } - if (Array.isArray(docs)) { - return Promise.all(docs.map(doc => fn(doc))); - } else { - // via findOne() - var result = await fn(docs); - return result; - } -} -//# sourceMappingURL=rx-query-helper.js.map \ No newline at end of file diff --git a/dist/esm/rx-query-helper.js.map b/dist/esm/rx-query-helper.js.map deleted file mode 100644 index 0f8ad98b22d..00000000000 --- a/dist/esm/rx-query-helper.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-query-helper.js","names":["LOGICAL_OPERATORS","getPrimaryFieldOfPrimaryKey","clone","firstPropertyNameOfObject","toArray","isMaybeReadonlyArray","flatClone","objectPathMonad","compare","mingoSortComparator","newRxError","getMingoQuery","normalizeMangoQuery","schema","mangoQuery","primaryKey","normalizedMangoQuery","skip","selector","Object","entries","forEach","field","matcher","$eq","index","indexAr","includes","push","sort","map","indexes","fieldsWithLogicalOperator","Set","hasLogical","keys","find","operator","has","add","currentFieldsAmount","currentBestIndexForSort","useIndex","firstWrongIndex","findIndex","indexField","isPrimaryInSort","p","slice","getSortComparator","query","sortParts","sortBlock","key","direction","values","getValueFn","fun","a","b","i","length","sortPart","valueA","valueB","ret","getQueryMatcher","_schema","mingoQuery","doc","test","runQueryUpdateFunction","rxQuery","fn","docs","exec","Array","isArray","Promise","all","result"],"sources":["../../src/rx-query-helper.ts"],"sourcesContent":["import { LOGICAL_OPERATORS } from './query-planner.ts';\nimport { getPrimaryFieldOfPrimaryKey } from './rx-schema-helper.ts';\nimport type {\n DeepReadonly,\n DeterministicSortComparator,\n FilledMangoQuery,\n MangoQuery,\n MangoQuerySortDirection,\n QueryMatcher,\n RxDocument,\n RxDocumentData,\n RxJsonSchema,\n RxQuery\n} from './types/index.d.ts';\nimport {\n clone,\n firstPropertyNameOfObject,\n toArray,\n isMaybeReadonlyArray,\n flatClone,\n objectPathMonad,\n ObjectPathMonadFunction\n} from './plugins/utils/index.ts';\nimport {\n compare as mingoSortComparator\n} from 'mingo/util';\nimport { newRxError } from './rx-error.ts';\nimport { getMingoQuery } from './rx-query-mingo.ts';\n\n/**\n * Normalize the query to ensure we have all fields set\n * and queries that represent the same query logic are detected as equal by the caching.\n */\nexport function normalizeMangoQuery(\n schema: RxJsonSchema>,\n mangoQuery: MangoQuery\n): FilledMangoQuery {\n const primaryKey: string = getPrimaryFieldOfPrimaryKey(schema.primaryKey);\n mangoQuery = flatClone(mangoQuery);\n\n const normalizedMangoQuery: FilledMangoQuery = clone(mangoQuery) as any;\n if (typeof normalizedMangoQuery.skip !== 'number') {\n normalizedMangoQuery.skip = 0;\n }\n\n if (!normalizedMangoQuery.selector) {\n normalizedMangoQuery.selector = {};\n } else {\n normalizedMangoQuery.selector = normalizedMangoQuery.selector;\n /**\n * In mango query, it is possible to have an\n * equals comparison by directly assigning a value\n * to a property, without the '$eq' operator.\n * Like:\n * selector: {\n * foo: 'bar'\n * }\n * For normalization, we have to normalize this\n * so our checks can perform properly.\n *\n *\n * TODO this must work recursive with nested queries that\n * contain multiple selectors via $and or $or etc.\n */\n Object\n .entries(normalizedMangoQuery.selector)\n .forEach(([field, matcher]) => {\n if (typeof matcher !== 'object' || matcher === null) {\n (normalizedMangoQuery as any).selector[field] = {\n $eq: matcher\n };\n }\n });\n }\n\n /**\n * Ensure that if an index is specified,\n * the primaryKey is inside of it.\n */\n if (normalizedMangoQuery.index) {\n const indexAr = toArray(normalizedMangoQuery.index);\n if (!indexAr.includes(primaryKey)) {\n indexAr.push(primaryKey);\n }\n normalizedMangoQuery.index = indexAr;\n }\n\n /**\n * To ensure a deterministic sorting,\n * we have to ensure the primary key is always part\n * of the sort query.\n * Primary sorting is added as last sort parameter,\n * similar to how we add the primary key to indexes that do not have it.\n *\n */\n if (!normalizedMangoQuery.sort) {\n /**\n * If no sort is given at all,\n * we can assume that the user does not care about sort order at al.\n *\n * we cannot just use the primary key as sort parameter\n * because it would likely cause the query to run over the primary key index\n * which has a bad performance in most cases.\n */\n if (normalizedMangoQuery.index) {\n normalizedMangoQuery.sort = normalizedMangoQuery.index.map((field: string) => {\n return { [field as any]: 'asc' } as any;\n });\n } else {\n /**\n * Find the index that best matches the fields with the logical operators\n */\n if (schema.indexes) {\n const fieldsWithLogicalOperator: Set = new Set();\n Object.entries(normalizedMangoQuery.selector).forEach(([field, matcher]) => {\n let hasLogical = false;\n if (typeof matcher === 'object' && matcher !== null) {\n hasLogical = !!Object.keys(matcher).find(operator => LOGICAL_OPERATORS.has(operator));\n } else {\n hasLogical = true;\n }\n if (hasLogical) {\n fieldsWithLogicalOperator.add(field);\n }\n });\n\n\n let currentFieldsAmount = -1;\n let currentBestIndexForSort: string[] | readonly string[] | undefined;\n schema.indexes.forEach(index => {\n const useIndex = isMaybeReadonlyArray(index) ? index : [index];\n const firstWrongIndex = useIndex.findIndex(indexField => !fieldsWithLogicalOperator.has(indexField));\n if (\n firstWrongIndex > 0 &&\n firstWrongIndex > currentFieldsAmount\n ) {\n currentFieldsAmount = firstWrongIndex;\n currentBestIndexForSort = useIndex;\n }\n });\n if (currentBestIndexForSort) {\n normalizedMangoQuery.sort = currentBestIndexForSort.map((field: string) => {\n return { [field as any]: 'asc' } as any;\n });\n }\n\n }\n\n /**\n * Fall back to the primary key as sort order\n * if no better one has been found\n */\n if (!normalizedMangoQuery.sort) {\n normalizedMangoQuery.sort = [{ [primaryKey]: 'asc' }] as any;\n }\n }\n } else {\n const isPrimaryInSort = normalizedMangoQuery.sort\n .find(p => firstPropertyNameOfObject(p) === primaryKey);\n if (!isPrimaryInSort) {\n normalizedMangoQuery.sort = normalizedMangoQuery.sort.slice(0);\n normalizedMangoQuery.sort.push({ [primaryKey]: 'asc' } as any);\n }\n }\n\n return normalizedMangoQuery;\n}\n\n/**\n * Returns the sort-comparator,\n * which is able to sort documents in the same way\n * a query over the db would do.\n */\nexport function getSortComparator(\n schema: RxJsonSchema>,\n query: FilledMangoQuery\n): DeterministicSortComparator {\n if (!query.sort) {\n throw newRxError('SNH', { query });\n }\n const sortParts: {\n key: string;\n direction: MangoQuerySortDirection;\n getValueFn: ObjectPathMonadFunction;\n }[] = [];\n query.sort.forEach(sortBlock => {\n const key = Object.keys(sortBlock)[0];\n const direction = Object.values(sortBlock)[0];\n sortParts.push({\n key,\n direction,\n getValueFn: objectPathMonad(key)\n });\n });\n const fun: DeterministicSortComparator = (a: RxDocType, b: RxDocType) => {\n for (let i = 0; i < sortParts.length; ++i) {\n const sortPart = sortParts[i];\n const valueA = sortPart.getValueFn(a);\n const valueB = sortPart.getValueFn(b);\n if (valueA !== valueB) {\n const ret = sortPart.direction === 'asc' ? mingoSortComparator(valueA, valueB) : mingoSortComparator(valueB, valueA);\n return ret as any;\n }\n }\n };\n\n return fun;\n}\n\n\n/**\n * Returns a function\n * that can be used to check if a document\n * matches the query.\n */\nexport function getQueryMatcher(\n _schema: RxJsonSchema | RxJsonSchema>,\n query: FilledMangoQuery\n): QueryMatcher> {\n if (!query.sort) {\n throw newRxError('SNH', { query });\n }\n\n const mingoQuery = getMingoQuery(query.selector as any);\n const fun: QueryMatcher> = (doc: RxDocumentData | DeepReadonly>) => {\n return mingoQuery.test(doc);\n };\n return fun;\n}\n\n\nexport async function runQueryUpdateFunction(\n rxQuery: RxQuery,\n fn: (doc: RxDocument) => Promise>\n): Promise {\n const docs = await rxQuery.exec();\n if (!docs) {\n // only findOne() queries can return null\n return null as any;\n }\n if (Array.isArray(docs)) {\n return Promise.all(\n docs.map(doc => fn(doc))\n ) as any;\n } else {\n // via findOne()\n const result = await fn(docs as any);\n return result as any;\n }\n}\n"],"mappings":"AAAA,SAASA,iBAAiB,QAAQ,oBAAoB;AACtD,SAASC,2BAA2B,QAAQ,uBAAuB;AAanE,SACIC,KAAK,EACLC,yBAAyB,EACzBC,OAAO,EACPC,oBAAoB,EACpBC,SAAS,EACTC,eAAe,QAEZ,0BAA0B;AACjC,SACIC,OAAO,IAAIC,mBAAmB,QAC3B,YAAY;AACnB,SAASC,UAAU,QAAQ,eAAe;AAC1C,SAASC,aAAa,QAAQ,qBAAqB;;AAEnD;AACA;AACA;AACA;AACA,OAAO,SAASC,mBAAmBA,CAC/BC,MAA+C,EAC/CC,UAAiC,EACN;EAC3B,IAAMC,UAAkB,GAAGd,2BAA2B,CAACY,MAAM,CAACE,UAAU,CAAC;EACzED,UAAU,GAAGR,SAAS,CAACQ,UAAU,CAAC;EAElC,IAAME,oBAAiD,GAAGd,KAAK,CAACY,UAAU,CAAQ;EAClF,IAAI,OAAOE,oBAAoB,CAACC,IAAI,KAAK,QAAQ,EAAE;IAC/CD,oBAAoB,CAACC,IAAI,GAAG,CAAC;EACjC;EAEA,IAAI,CAACD,oBAAoB,CAACE,QAAQ,EAAE;IAChCF,oBAAoB,CAACE,QAAQ,GAAG,CAAC,CAAC;EACtC,CAAC,MAAM;IACHF,oBAAoB,CAACE,QAAQ,GAAGF,oBAAoB,CAACE,QAAQ;IAC7D;AACR;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;IACQC,MAAM,CACDC,OAAO,CAACJ,oBAAoB,CAACE,QAAQ,CAAC,CACtCG,OAAO,CAAC,CAAC,CAACC,KAAK,EAAEC,OAAO,CAAC,KAAK;MAC3B,IAAI,OAAOA,OAAO,KAAK,QAAQ,IAAIA,OAAO,KAAK,IAAI,EAAE;QAChDP,oBAAoB,CAASE,QAAQ,CAACI,KAAK,CAAC,GAAG;UAC5CE,GAAG,EAAED;QACT,CAAC;MACL;IACJ,CAAC,CAAC;EACV;;EAEA;AACJ;AACA;AACA;EACI,IAAIP,oBAAoB,CAACS,KAAK,EAAE;IAC5B,IAAMC,OAAO,GAAGtB,OAAO,CAACY,oBAAoB,CAACS,KAAK,CAAC;IACnD,IAAI,CAACC,OAAO,CAACC,QAAQ,CAACZ,UAAU,CAAC,EAAE;MAC/BW,OAAO,CAACE,IAAI,CAACb,UAAU,CAAC;IAC5B;IACAC,oBAAoB,CAACS,KAAK,GAAGC,OAAO;EACxC;;EAEA;AACJ;AACA;AACA;AACA;AACA;AACA;AACA;EACI,IAAI,CAACV,oBAAoB,CAACa,IAAI,EAAE;IAC5B;AACR;AACA;AACA;AACA;AACA;AACA;AACA;IACQ,IAAIb,oBAAoB,CAACS,KAAK,EAAE;MAC5BT,oBAAoB,CAACa,IAAI,GAAGb,oBAAoB,CAACS,KAAK,CAACK,GAAG,CAAER,KAAa,IAAK;QAC1E,OAAO;UAAE,CAACA,KAAK,GAAU;QAAM,CAAC;MACpC,CAAC,CAAC;IACN,CAAC,MAAM;MACH;AACZ;AACA;MACY,IAAIT,MAAM,CAACkB,OAAO,EAAE;QAChB,IAAMC,yBAAsC,GAAG,IAAIC,GAAG,CAAC,CAAC;QACxDd,MAAM,CAACC,OAAO,CAACJ,oBAAoB,CAACE,QAAQ,CAAC,CAACG,OAAO,CAAC,CAAC,CAACC,KAAK,EAAEC,OAAO,CAAC,KAAK;UACxE,IAAIW,UAAU,GAAG,KAAK;UACtB,IAAI,OAAOX,OAAO,KAAK,QAAQ,IAAIA,OAAO,KAAK,IAAI,EAAE;YACjDW,UAAU,GAAG,CAAC,CAACf,MAAM,CAACgB,IAAI,CAACZ,OAAO,CAAC,CAACa,IAAI,CAACC,QAAQ,IAAIrC,iBAAiB,CAACsC,GAAG,CAACD,QAAQ,CAAC,CAAC;UACzF,CAAC,MAAM;YACHH,UAAU,GAAG,IAAI;UACrB;UACA,IAAIA,UAAU,EAAE;YACZF,yBAAyB,CAACO,GAAG,CAACjB,KAAK,CAAC;UACxC;QACJ,CAAC,CAAC;QAGF,IAAIkB,mBAAmB,GAAG,CAAC,CAAC;QAC5B,IAAIC,uBAAiE;QACrE5B,MAAM,CAACkB,OAAO,CAACV,OAAO,CAACI,KAAK,IAAI;UAC5B,IAAMiB,QAAQ,GAAGrC,oBAAoB,CAACoB,KAAK,CAAC,GAAGA,KAAK,GAAG,CAACA,KAAK,CAAC;UAC9D,IAAMkB,eAAe,GAAGD,QAAQ,CAACE,SAAS,CAACC,UAAU,IAAI,CAACb,yBAAyB,CAACM,GAAG,CAACO,UAAU,CAAC,CAAC;UACpG,IACIF,eAAe,GAAG,CAAC,IACnBA,eAAe,GAAGH,mBAAmB,EACvC;YACEA,mBAAmB,GAAGG,eAAe;YACrCF,uBAAuB,GAAGC,QAAQ;UACtC;QACJ,CAAC,CAAC;QACF,IAAID,uBAAuB,EAAE;UACzBzB,oBAAoB,CAACa,IAAI,GAAGY,uBAAuB,CAACX,GAAG,CAAER,KAAa,IAAK;YACvE,OAAO;cAAE,CAACA,KAAK,GAAU;YAAM,CAAC;UACpC,CAAC,CAAC;QACN;MAEJ;;MAEA;AACZ;AACA;AACA;MACY,IAAI,CAACN,oBAAoB,CAACa,IAAI,EAAE;QAC5Bb,oBAAoB,CAACa,IAAI,GAAG,CAAC;UAAE,CAACd,UAAU,GAAG;QAAM,CAAC,CAAQ;MAChE;IACJ;EACJ,CAAC,MAAM;IACH,IAAM+B,eAAe,GAAG9B,oBAAoB,CAACa,IAAI,CAC5CO,IAAI,CAACW,CAAC,IAAI5C,yBAAyB,CAAC4C,CAAC,CAAC,KAAKhC,UAAU,CAAC;IAC3D,IAAI,CAAC+B,eAAe,EAAE;MAClB9B,oBAAoB,CAACa,IAAI,GAAGb,oBAAoB,CAACa,IAAI,CAACmB,KAAK,CAAC,CAAC,CAAC;MAC9DhC,oBAAoB,CAACa,IAAI,CAACD,IAAI,CAAC;QAAE,CAACb,UAAU,GAAG;MAAM,CAAQ,CAAC;IAClE;EACJ;EAEA,OAAOC,oBAAoB;AAC/B;;AAEA;AACA;AACA;AACA;AACA;AACA,OAAO,SAASiC,iBAAiBA,CAC7BpC,MAA+C,EAC/CqC,KAAkC,EACI;EACtC,IAAI,CAACA,KAAK,CAACrB,IAAI,EAAE;IACb,MAAMnB,UAAU,CAAC,KAAK,EAAE;MAAEwC;IAAM,CAAC,CAAC;EACtC;EACA,IAAMC,SAIH,GAAG,EAAE;EACRD,KAAK,CAACrB,IAAI,CAACR,OAAO,CAAC+B,SAAS,IAAI;IAC5B,IAAMC,GAAG,GAAGlC,MAAM,CAACgB,IAAI,CAACiB,SAAS,CAAC,CAAC,CAAC,CAAC;IACrC,IAAME,SAAS,GAAGnC,MAAM,CAACoC,MAAM,CAACH,SAAS,CAAC,CAAC,CAAC,CAAC;IAC7CD,SAAS,CAACvB,IAAI,CAAC;MACXyB,GAAG;MACHC,SAAS;MACTE,UAAU,EAAEjD,eAAe,CAAC8C,GAAG;IACnC,CAAC,CAAC;EACN,CAAC,CAAC;EACF,IAAMI,GAA2C,GAAGA,CAACC,CAAY,EAAEC,CAAY,KAAK;IAChF,KAAK,IAAIC,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGT,SAAS,CAACU,MAAM,EAAE,EAAED,CAAC,EAAE;MACvC,IAAME,QAAQ,GAAGX,SAAS,CAACS,CAAC,CAAC;MAC7B,IAAMG,MAAM,GAAGD,QAAQ,CAACN,UAAU,CAACE,CAAC,CAAC;MACrC,IAAMM,MAAM,GAAGF,QAAQ,CAACN,UAAU,CAACG,CAAC,CAAC;MACrC,IAAII,MAAM,KAAKC,MAAM,EAAE;QACnB,IAAMC,GAAG,GAAGH,QAAQ,CAACR,SAAS,KAAK,KAAK,GAAG7C,mBAAmB,CAACsD,MAAM,EAAEC,MAAM,CAAC,GAAGvD,mBAAmB,CAACuD,MAAM,EAAED,MAAM,CAAC;QACpH,OAAOE,GAAG;MACd;IACJ;EACJ,CAAC;EAED,OAAOR,GAAG;AACd;;AAGA;AACA;AACA;AACA;AACA;AACA,OAAO,SAASS,eAAeA,CAC3BC,OAA0E,EAC1EjB,KAAkC,EACK;EACvC,IAAI,CAACA,KAAK,CAACrB,IAAI,EAAE;IACb,MAAMnB,UAAU,CAAC,KAAK,EAAE;MAAEwC;IAAM,CAAC,CAAC;EACtC;EAEA,IAAMkB,UAAU,GAAGzD,aAAa,CAACuC,KAAK,CAAChC,QAAe,CAAC;EACvD,IAAMuC,GAA4C,GAAIY,GAAwE,IAAK;IAC/H,OAAOD,UAAU,CAACE,IAAI,CAACD,GAAG,CAAC;EAC/B,CAAC;EACD,OAAOZ,GAAG;AACd;AAGA,OAAO,eAAec,sBAAsBA,CACxCC,OAA0C,EAC1CC,EAAkE,EAC5C;EACtB,IAAMC,IAAI,GAAG,MAAMF,OAAO,CAACG,IAAI,CAAC,CAAC;EACjC,IAAI,CAACD,IAAI,EAAE;IACP;IACA,OAAO,IAAI;EACf;EACA,IAAIE,KAAK,CAACC,OAAO,CAACH,IAAI,CAAC,EAAE;IACrB,OAAOI,OAAO,CAACC,GAAG,CACdL,IAAI,CAAC5C,GAAG,CAACuC,GAAG,IAAII,EAAE,CAACJ,GAAG,CAAC,CAC3B,CAAC;EACL,CAAC,MAAM;IACH;IACA,IAAMW,MAAM,GAAG,MAAMP,EAAE,CAACC,IAAW,CAAC;IACpC,OAAOM,MAAM;EACjB;AACJ","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/rx-query-mingo.js b/dist/esm/rx-query-mingo.js deleted file mode 100644 index 43e27c91bc5..00000000000 --- a/dist/esm/rx-query-mingo.js +++ /dev/null @@ -1,48 +0,0 @@ -import { useOperators, OperatorType } from 'mingo/core'; -import { Query } from 'mingo/query'; -import { $project, $sort } from 'mingo/operators/pipeline'; -import { $and, $not, $or, $nor } from 'mingo/operators/query/logical'; -import { $eq, $ne, $gt, $gte, $lt, $lte, $nin, $in } from 'mingo/operators/query/comparison'; -import { $regex, $mod } from 'mingo/operators/query/evaluation'; -import { $elemMatch, $size } from 'mingo/operators/query/array'; -import { $exists, $type } from 'mingo/operators/query/element'; -var mingoInitDone = false; - -/** - * The MongoDB query library is huge and we do not need all the operators. - * If you add an operator here, make sure that you properly add a test in - * the file /test/unit/rx-storage-query-correctness.test.ts - * - * @link https://github.com/kofrasa/mingo#es6 - */ -export function getMingoQuery(selector) { - if (!mingoInitDone) { - useOperators(OperatorType.PIPELINE, { - $sort, - $project - }); - useOperators(OperatorType.QUERY, { - $and, - $eq, - $elemMatch, - $exists, - $gt, - $gte, - $in, - $lt, - $lte, - $ne, - $nin, - $mod, - $nor, - $not, - $or, - $regex, - $size, - $type - }); - mingoInitDone = true; - } - return new Query(selector); -} -//# sourceMappingURL=rx-query-mingo.js.map \ No newline at end of file diff --git a/dist/esm/rx-query-mingo.js.map b/dist/esm/rx-query-mingo.js.map deleted file mode 100644 index 701ad86c8ce..00000000000 --- a/dist/esm/rx-query-mingo.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-query-mingo.js","names":["useOperators","OperatorType","Query","$project","$sort","$and","$not","$or","$nor","$eq","$ne","$gt","$gte","$lt","$lte","$nin","$in","$regex","$mod","$elemMatch","$size","$exists","$type","mingoInitDone","getMingoQuery","selector","PIPELINE","QUERY"],"sources":["../../src/rx-query-mingo.ts"],"sourcesContent":["import { useOperators, OperatorType } from 'mingo/core';\nimport { Query } from 'mingo/query';\nimport type { MangoQuerySelector } from './types/index.d.ts';\nimport {\n $project,\n $sort\n} from 'mingo/operators/pipeline';\nimport {\n $and,\n $not,\n $or,\n $nor\n} from 'mingo/operators/query/logical';\nimport {\n $eq,\n $ne,\n $gt,\n $gte,\n $lt,\n $lte,\n $nin,\n $in\n} from 'mingo/operators/query/comparison';\nimport {\n $regex,\n $mod\n} from 'mingo/operators/query/evaluation';\nimport {\n $elemMatch,\n $size\n} from 'mingo/operators/query/array';\nimport {\n $exists,\n $type\n} from 'mingo/operators/query/element';\n\nlet mingoInitDone = false;\n\n\n/**\n * The MongoDB query library is huge and we do not need all the operators.\n * If you add an operator here, make sure that you properly add a test in\n * the file /test/unit/rx-storage-query-correctness.test.ts\n *\n * @link https://github.com/kofrasa/mingo#es6\n */\nexport function getMingoQuery(\n selector?: MangoQuerySelector\n) {\n if (!mingoInitDone) {\n useOperators(OperatorType.PIPELINE, {\n $sort,\n $project\n } as any);\n useOperators(OperatorType.QUERY, {\n $and,\n $eq,\n $elemMatch,\n $exists,\n $gt,\n $gte,\n $in,\n $lt,\n $lte,\n $ne,\n $nin,\n $mod,\n $nor,\n $not,\n $or,\n $regex,\n $size,\n $type,\n } as any);\n mingoInitDone = true;\n }\n return new Query(selector as any);\n}\n"],"mappings":"AAAA,SAASA,YAAY,EAAEC,YAAY,QAAQ,YAAY;AACvD,SAASC,KAAK,QAAQ,aAAa;AAEnC,SACIC,QAAQ,EACRC,KAAK,QACF,0BAA0B;AACjC,SACIC,IAAI,EACJC,IAAI,EACJC,GAAG,EACHC,IAAI,QACD,+BAA+B;AACtC,SACIC,GAAG,EACHC,GAAG,EACHC,GAAG,EACHC,IAAI,EACJC,GAAG,EACHC,IAAI,EACJC,IAAI,EACJC,GAAG,QACA,kCAAkC;AACzC,SACIC,MAAM,EACNC,IAAI,QACD,kCAAkC;AACzC,SACIC,UAAU,EACVC,KAAK,QACF,6BAA6B;AACpC,SACIC,OAAO,EACPC,KAAK,QACF,+BAA+B;AAEtC,IAAIC,aAAa,GAAG,KAAK;;AAGzB;AACA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,SAASC,aAAaA,CACzBC,QAAwC,EAC1C;EACE,IAAI,CAACF,aAAa,EAAE;IAChBvB,YAAY,CAACC,YAAY,CAACyB,QAAQ,EAAE;MAChCtB,KAAK;MACLD;IACJ,CAAQ,CAAC;IACTH,YAAY,CAACC,YAAY,CAAC0B,KAAK,EAAE;MAC7BtB,IAAI;MACJI,GAAG;MACHU,UAAU;MACVE,OAAO;MACPV,GAAG;MACHC,IAAI;MACJI,GAAG;MACHH,GAAG;MACHC,IAAI;MACJJ,GAAG;MACHK,IAAI;MACJG,IAAI;MACJV,IAAI;MACJF,IAAI;MACJC,GAAG;MACHU,MAAM;MACNG,KAAK;MACLE;IACJ,CAAQ,CAAC;IACTC,aAAa,GAAG,IAAI;EACxB;EACA,OAAO,IAAIrB,KAAK,CAACuB,QAAe,CAAC;AACrC","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/rx-query-single-result.js b/dist/esm/rx-query-single-result.js deleted file mode 100644 index f349830219d..00000000000 --- a/dist/esm/rx-query-single-result.js +++ /dev/null @@ -1,64 +0,0 @@ -import _createClass from "@babel/runtime/helpers/createClass"; -import { mapDocumentsDataToCacheDocs } from "./doc-cache.js"; -import { now, overwriteGetterForCaching } from "./plugins/utils/index.js"; -/** - * RxDB needs the query results in multiple formats. - * Sometimes as a Map or an array with only the documentData. - * For better performance we work with this class - * that initializes stuff lazily so that - * we can directly work with the query results after RxQuery.exec() - */ -export var RxQuerySingleResult = /*#__PURE__*/function () { - /** - * Time at which the current _result state was created. - * Used to determine if the result set has changed since X - * so that we do not emit the same result multiple times on subscription. - */ - - function RxQuerySingleResult(collection, - // only used internally, do not use outside, use this.docsData instead - docsDataFromStorageInstance, - // can be overwritten for count-queries - count) { - this.time = now(); - this.collection = collection; - this.count = count; - this.documents = mapDocumentsDataToCacheDocs(this.collection._docCache, docsDataFromStorageInstance); - } - - /** - * Instead of using the newResultData in the result cache, - * we directly use the objects that are stored in the RxDocument - * to ensure we do not store the same data twice and fill up the memory. - * @overwrites itself with the actual value - */ - return _createClass(RxQuerySingleResult, [{ - key: "docsData", - get: function () { - return overwriteGetterForCaching(this, 'docsData', this.documents.map(d => d._data)); - } - - // A key->document map, used in the event reduce optimization. - }, { - key: "docsDataMap", - get: function () { - var map = new Map(); - this.documents.forEach(d => { - map.set(d.primary, d._data); - }); - return overwriteGetterForCaching(this, 'docsDataMap', map); - } - }, { - key: "docsMap", - get: function () { - var map = new Map(); - var documents = this.documents; - for (var i = 0; i < documents.length; i++) { - var doc = documents[i]; - map.set(doc.primary, doc); - } - return overwriteGetterForCaching(this, 'docsMap', map); - } - }]); -}(); -//# sourceMappingURL=rx-query-single-result.js.map \ No newline at end of file diff --git a/dist/esm/rx-query-single-result.js.map b/dist/esm/rx-query-single-result.js.map deleted file mode 100644 index 71adc454124..00000000000 --- a/dist/esm/rx-query-single-result.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-query-single-result.js","names":["mapDocumentsDataToCacheDocs","now","overwriteGetterForCaching","RxQuerySingleResult","collection","docsDataFromStorageInstance","count","time","documents","_docCache","_createClass","key","get","map","d","_data","Map","forEach","set","primary","i","length","doc"],"sources":["../../src/rx-query-single-result.ts"],"sourcesContent":["import { mapDocumentsDataToCacheDocs } from './doc-cache.ts';\nimport { now, overwriteGetterForCaching } from './plugins/utils/index.ts';\nimport type {\n RxCollection,\n RxDocument,\n RxDocumentData\n} from './types';\n\n/**\n * RxDB needs the query results in multiple formats.\n * Sometimes as a Map or an array with only the documentData.\n * For better performance we work with this class\n * that initializes stuff lazily so that\n * we can directly work with the query results after RxQuery.exec()\n */\nexport class RxQuerySingleResult{\n /**\n * Time at which the current _result state was created.\n * Used to determine if the result set has changed since X\n * so that we do not emit the same result multiple times on subscription.\n */\n public readonly time = now();\n public readonly documents: RxDocument[];\n constructor(\n public readonly collection: RxCollection,\n // only used internally, do not use outside, use this.docsData instead\n docsDataFromStorageInstance: RxDocumentData[],\n // can be overwritten for count-queries\n public readonly count: number,\n ) {\n this.documents = mapDocumentsDataToCacheDocs(this.collection._docCache, docsDataFromStorageInstance);\n }\n\n\n /**\n * Instead of using the newResultData in the result cache,\n * we directly use the objects that are stored in the RxDocument\n * to ensure we do not store the same data twice and fill up the memory.\n * @overwrites itself with the actual value\n */\n get docsData(): RxDocumentData[] {\n return overwriteGetterForCaching(\n this,\n 'docsData',\n this.documents.map(d => d._data)\n );\n }\n\n\n // A key->document map, used in the event reduce optimization.\n get docsDataMap(): Map> {\n const map = new Map>();\n this.documents.forEach(d => {\n map.set(d.primary, d._data);\n });\n return overwriteGetterForCaching(\n this,\n 'docsDataMap',\n map\n );\n }\n\n get docsMap(): Map> {\n const map = new Map>();\n const documents = this.documents;\n for (let i = 0; i < documents.length; i++) {\n const doc = documents[i];\n map.set(doc.primary, doc);\n }\n return overwriteGetterForCaching(\n this,\n 'docsMap',\n map\n );\n }\n}\n"],"mappings":";AAAA,SAASA,2BAA2B,QAAQ,gBAAgB;AAC5D,SAASC,GAAG,EAAEC,yBAAyB,QAAQ,0BAA0B;AAOzE;AACA;AACA;AACA;AACA;AACA;AACA;AACA,WAAaC,mBAAmB;EAC5B;AACJ;AACA;AACA;AACA;;EAGI,SAAAA,oBACoBC,UAAmC;EACnD;EACAC,2BAAwD;EACxD;EACgBC,KAAa,EAC/B;IAAA,KARcC,IAAI,GAAGN,GAAG,CAAC,CAAC;IAAA,KAGRG,UAAmC,GAAnCA,UAAmC;IAAA,KAInCE,KAAa,GAAbA,KAAa;IAE7B,IAAI,CAACE,SAAS,GAAGR,2BAA2B,CAAiB,IAAI,CAACI,UAAU,CAACK,SAAS,EAAEJ,2BAA2B,CAAC;EACxH;;EAGA;AACJ;AACA;AACA;AACA;AACA;EALI,OAAAK,YAAA,CAAAP,mBAAA;IAAAQ,GAAA;IAAAC,GAAA,EAMA,SAAAA,CAAA,EAA4C;MACxC,OAAOV,yBAAyB,CAC5B,IAAI,EACJ,UAAU,EACV,IAAI,CAACM,SAAS,CAACK,GAAG,CAACC,CAAC,IAAIA,CAAC,CAACC,KAAK,CACnC,CAAC;IACL;;IAGA;EAAA;IAAAJ,GAAA;IAAAC,GAAA,EACA,SAAAA,CAAA,EAA0D;MACtD,IAAMC,GAAG,GAAG,IAAIG,GAAG,CAAoC,CAAC;MACxD,IAAI,CAACR,SAAS,CAACS,OAAO,CAACH,CAAC,IAAI;QACxBD,GAAG,CAACK,GAAG,CAACJ,CAAC,CAACK,OAAO,EAAEL,CAAC,CAACC,KAAK,CAAC;MAC/B,CAAC,CAAC;MACF,OAAOb,yBAAyB,CAC5B,IAAI,EACJ,aAAa,EACbW,GACJ,CAAC;IACL;EAAC;IAAAF,GAAA;IAAAC,GAAA,EAED,SAAAA,CAAA,EAAkD;MAC9C,IAAMC,GAAG,GAAG,IAAIG,GAAG,CAAgC,CAAC;MACpD,IAAMR,SAAS,GAAG,IAAI,CAACA,SAAS;MAChC,KAAK,IAAIY,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGZ,SAAS,CAACa,MAAM,EAAED,CAAC,EAAE,EAAE;QACvC,IAAME,GAAG,GAAGd,SAAS,CAACY,CAAC,CAAC;QACxBP,GAAG,CAACK,GAAG,CAACI,GAAG,CAACH,OAAO,EAAEG,GAAG,CAAC;MAC7B;MACA,OAAOpB,yBAAyB,CAC5B,IAAI,EACJ,SAAS,EACTW,GACJ,CAAC;IACL;EAAC;AAAA","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/rx-query.js b/dist/esm/rx-query.js deleted file mode 100644 index 322e2643b87..00000000000 --- a/dist/esm/rx-query.js +++ /dev/null @@ -1,620 +0,0 @@ -import _createClass from "@babel/runtime/helpers/createClass"; -import { BehaviorSubject, firstValueFrom, merge } from 'rxjs'; -import { mergeMap, filter, map, startWith, distinctUntilChanged, shareReplay } from 'rxjs/operators'; -import { sortObject, pluginMissing, overwriteGetterForCaching, now, PROMISE_RESOLVE_FALSE, RXJS_SHARE_REPLAY_DEFAULTS, ensureNotFalsy, areRxDocumentArraysEqual, appendToArray } from "./plugins/utils/index.js"; -import { newRxError } from "./rx-error.js"; -import { runPluginHooks } from "./hooks.js"; -import { calculateNewResults } from "./event-reduce.js"; -import { triggerCacheReplacement } from "./query-cache.js"; -import { getQueryMatcher, normalizeMangoQuery, runQueryUpdateFunction } from "./rx-query-helper.js"; -import { RxQuerySingleResult } from "./rx-query-single-result.js"; -import { getQueryPlan } from "./query-planner.js"; -var _queryCount = 0; -var newQueryID = function () { - return ++_queryCount; -}; -export var RxQueryBase = /*#__PURE__*/function () { - /** - * Some stats then are used for debugging and cache replacement policies - */ - - // used in the query-cache to determine if the RxQuery can be cleaned up. - - // used to count the subscribers to the query - - /** - * Contains the current result state - * or null if query has not run yet. - */ - - function RxQueryBase(op, mangoQuery, collection, - // used by some plugins - other = {}) { - this.id = newQueryID(); - this._execOverDatabaseCount = 0; - this._creationTime = now(); - this._lastEnsureEqual = 0; - this.uncached = false; - this.refCount$ = new BehaviorSubject(null); - this._result = null; - this._latestChangeEvent = -1; - this._lastExecStart = 0; - this._lastExecEnd = 0; - this._ensureEqualQueue = PROMISE_RESOLVE_FALSE; - this.op = op; - this.mangoQuery = mangoQuery; - this.collection = collection; - this.other = other; - if (!mangoQuery) { - this.mangoQuery = _getDefaultQuery(); - } - this.isFindOneByIdQuery = isFindOneByIdQuery(this.collection.schema.primaryPath, mangoQuery); - } - var _proto = RxQueryBase.prototype; - /** - * Returns an observable that emits the results - * This should behave like an rxjs-BehaviorSubject which means: - * - Emit the current result-set on subscribe - * - Emit the new result-set when an RxChangeEvent comes in - * - Do not emit anything before the first result-set was created (no null) - */ - /** - * set the new result-data as result-docs of the query - * @param newResultData json-docs that were received from the storage - */ - _proto._setResultData = function _setResultData(newResultData) { - if (typeof newResultData === 'number') { - this._result = new RxQuerySingleResult(this.collection, [], newResultData); - return; - } else if (newResultData instanceof Map) { - newResultData = Array.from(newResultData.values()); - } - var newQueryResult = new RxQuerySingleResult(this.collection, newResultData, newResultData.length); - this._result = newQueryResult; - } - - /** - * executes the query on the database - * @return results-array with document-data - */; - _proto._execOverDatabase = async function _execOverDatabase() { - this._execOverDatabaseCount = this._execOverDatabaseCount + 1; - this._lastExecStart = now(); - if (this.op === 'count') { - var preparedQuery = this.getPreparedQuery(); - var result = await this.collection.storageInstance.count(preparedQuery); - if (result.mode === 'slow' && !this.collection.database.allowSlowCount) { - throw newRxError('QU14', { - collection: this.collection, - queryObj: this.mangoQuery - }); - } else { - return result.count; - } - } - if (this.op === 'findByIds') { - var ids = ensureNotFalsy(this.mangoQuery.selector)[this.collection.schema.primaryPath].$in; - var ret = new Map(); - var mustBeQueried = []; - // first try to fill from docCache - ids.forEach(id => { - var docData = this.collection._docCache.getLatestDocumentDataIfExists(id); - if (docData) { - if (!docData._deleted) { - var doc = this.collection._docCache.getCachedRxDocument(docData); - ret.set(id, doc); - } - } else { - mustBeQueried.push(id); - } - }); - // everything which was not in docCache must be fetched from the storage - if (mustBeQueried.length > 0) { - var docs = await this.collection.storageInstance.findDocumentsById(mustBeQueried, false); - docs.forEach(docData => { - var doc = this.collection._docCache.getCachedRxDocument(docData); - ret.set(doc.primary, doc); - }); - } - return ret; - } - var docsPromise = queryCollection(this); - return docsPromise.then(docs => { - this._lastExecEnd = now(); - return docs; - }); - } - - /** - * Execute the query - * To have an easier implementations, - * just subscribe and use the first result - */; - _proto.exec = function exec(throwIfMissing) { - if (throwIfMissing && this.op !== 'findOne') { - throw newRxError('QU9', { - collection: this.collection.name, - query: this.mangoQuery, - op: this.op - }); - } - - /** - * run _ensureEqual() here, - * this will make sure that errors in the query which throw inside of the RxStorage, - * will be thrown at this execution context and not in the background. - */ - return _ensureEqual(this).then(() => firstValueFrom(this.$)).then(result => { - if (!result && throwIfMissing) { - throw newRxError('QU10', { - collection: this.collection.name, - query: this.mangoQuery, - op: this.op - }); - } else { - return result; - } - }); - } - - /** - * cached call to get the queryMatcher - * @overwrites itself with the actual value - */; - /** - * returns a string that is used for equal-comparisons - * @overwrites itself with the actual value - */ - _proto.toString = function toString() { - var stringObj = sortObject({ - op: this.op, - query: this.mangoQuery, - other: this.other - }, true); - var value = JSON.stringify(stringObj); - this.toString = () => value; - return value; - } - - /** - * returns the prepared query - * which can be send to the storage instance to query for documents. - * @overwrites itself with the actual value. - */; - _proto.getPreparedQuery = function getPreparedQuery() { - var hookInput = { - rxQuery: this, - // can be mutated by the hooks so we have to deep clone first. - mangoQuery: normalizeMangoQuery(this.collection.schema.jsonSchema, this.mangoQuery) - }; - hookInput.mangoQuery.selector._deleted = { - $eq: false - }; - if (hookInput.mangoQuery.index) { - hookInput.mangoQuery.index.unshift('_deleted'); - } - runPluginHooks('prePrepareQuery', hookInput); - var value = prepareQuery(this.collection.schema.jsonSchema, hookInput.mangoQuery); - this.getPreparedQuery = () => value; - return value; - } - - /** - * returns true if the document matches the query, - * does not use the 'skip' and 'limit' - */; - _proto.doesDocumentDataMatch = function doesDocumentDataMatch(docData) { - // if doc is deleted, it cannot match - if (docData._deleted) { - return false; - } - return this.queryMatcher(docData); - } - - /** - * deletes all found documents - * @return promise with deleted documents - */; - _proto.remove = function remove() { - return this.exec().then(docs => { - if (Array.isArray(docs)) { - // TODO use a bulk operation instead of running .remove() on each document - return Promise.all(docs.map(doc => doc.remove())); - } else { - return docs.remove(); - } - }); - }; - _proto.incrementalRemove = function incrementalRemove() { - return runQueryUpdateFunction(this.asRxQuery, doc => doc.incrementalRemove()); - } - - /** - * helper function to transform RxQueryBase to RxQuery type - */; - /** - * updates all found documents - * @overwritten by plugin (optional) - */ - _proto.update = function update(_updateObj) { - throw pluginMissing('update'); - }; - _proto.patch = function patch(_patch) { - return runQueryUpdateFunction(this.asRxQuery, doc => doc.patch(_patch)); - }; - _proto.incrementalPatch = function incrementalPatch(patch) { - return runQueryUpdateFunction(this.asRxQuery, doc => doc.incrementalPatch(patch)); - }; - _proto.modify = function modify(mutationFunction) { - return runQueryUpdateFunction(this.asRxQuery, doc => doc.modify(mutationFunction)); - }; - _proto.incrementalModify = function incrementalModify(mutationFunction) { - return runQueryUpdateFunction(this.asRxQuery, doc => doc.incrementalModify(mutationFunction)); - } - - // we only set some methods of query-builder here - // because the others depend on these ones - ; - _proto.where = function where(_queryObj) { - throw pluginMissing('query-builder'); - }; - _proto.sort = function sort(_params) { - throw pluginMissing('query-builder'); - }; - _proto.skip = function skip(_amount) { - throw pluginMissing('query-builder'); - }; - _proto.limit = function limit(_amount) { - throw pluginMissing('query-builder'); - }; - return _createClass(RxQueryBase, [{ - key: "$", - get: function () { - if (!this._$) { - var results$ = this.collection.$.pipe( - /** - * Performance shortcut. - * Changes to local documents are not relevant for the query. - */ - filter(changeEvent => !changeEvent.isLocal), - /** - * Start once to ensure the querying also starts - * when there where no changes. - */ - startWith(null), - // ensure query results are up to date. - mergeMap(() => _ensureEqual(this)), - // use the current result set, written by _ensureEqual(). - map(() => this._result), - // do not run stuff above for each new subscriber, only once. - shareReplay(RXJS_SHARE_REPLAY_DEFAULTS), - // do not proceed if result set has not changed. - distinctUntilChanged((prev, curr) => { - if (prev && prev.time === ensureNotFalsy(curr).time) { - return true; - } else { - return false; - } - }), filter(result => !!result), - /** - * Map the result set to a single RxDocument or an array, - * depending on query type - */ - map(result => { - var useResult = ensureNotFalsy(result); - if (this.op === 'count') { - return useResult.count; - } else if (this.op === 'findOne') { - // findOne()-queries emit RxDocument or null - return useResult.documents.length === 0 ? null : useResult.documents[0]; - } else if (this.op === 'findByIds') { - return useResult.docsMap; - } else { - // find()-queries emit RxDocument[] - // Flat copy the array so it won't matter if the user modifies it. - return useResult.documents.slice(0); - } - })); - this._$ = merge(results$, - /** - * Also add the refCount$ to the query observable - * to allow us to count the amount of subscribers. - */ - this.refCount$.pipe(filter(() => false))); - } - return this._$; - } - }, { - key: "$$", - get: function () { - var reactivity = this.collection.database.getReactivityFactory(); - return reactivity.fromObservable(this.$, undefined, this.collection.database); - } - - // stores the changeEvent-number of the last handled change-event - - // time stamps on when the last full exec over the database has run - // used to properly handle events that happen while the find-query is running - - /** - * ensures that the exec-runs - * are not run in parallel - */ - }, { - key: "queryMatcher", - get: function () { - var schema = this.collection.schema.jsonSchema; - var normalizedQuery = normalizeMangoQuery(this.collection.schema.jsonSchema, this.mangoQuery); - return overwriteGetterForCaching(this, 'queryMatcher', getQueryMatcher(schema, normalizedQuery)); - } - }, { - key: "asRxQuery", - get: function () { - return this; - } - }]); -}(); -export function _getDefaultQuery() { - return { - selector: {} - }; -} - -/** - * run this query through the QueryCache - */ -export function tunnelQueryCache(rxQuery) { - return rxQuery.collection._queryCache.getByQuery(rxQuery); -} -export function createRxQuery(op, queryObj, collection, other) { - runPluginHooks('preCreateRxQuery', { - op, - queryObj, - collection, - other - }); - var ret = new RxQueryBase(op, queryObj, collection, other); - - // ensure when created with same params, only one is created - ret = tunnelQueryCache(ret); - triggerCacheReplacement(collection); - return ret; -} - -/** - * Check if the current results-state is in sync with the database - * which means that no write event happened since the last run. - * @return false if not which means it should re-execute - */ -function _isResultsInSync(rxQuery) { - var currentLatestEventNumber = rxQuery.asRxQuery.collection._changeEventBuffer.counter; - if (rxQuery._latestChangeEvent >= currentLatestEventNumber) { - return true; - } else { - return false; - } -} - -/** - * wraps __ensureEqual() - * to ensure it does not run in parallel - * @return true if has changed, false if not - */ -function _ensureEqual(rxQuery) { - // Optimisation shortcut - if (rxQuery.collection.database.destroyed || _isResultsInSync(rxQuery)) { - return PROMISE_RESOLVE_FALSE; - } - rxQuery._ensureEqualQueue = rxQuery._ensureEqualQueue.then(() => __ensureEqual(rxQuery)); - return rxQuery._ensureEqualQueue; -} - -/** - * ensures that the results of this query is equal to the results which a query over the database would give - * @return true if results have changed - */ -function __ensureEqual(rxQuery) { - rxQuery._lastEnsureEqual = now(); - - /** - * Optimisation shortcuts - */ - if ( - // db is closed - rxQuery.collection.database.destroyed || - // nothing happened since last run - _isResultsInSync(rxQuery)) { - return PROMISE_RESOLVE_FALSE; - } - var ret = false; - var mustReExec = false; // if this becomes true, a whole execution over the database is made - if (rxQuery._latestChangeEvent === -1) { - // have not executed yet -> must run - mustReExec = true; - } - - /** - * try to use EventReduce to calculate the new results - */ - if (!mustReExec) { - var missedChangeEvents = rxQuery.asRxQuery.collection._changeEventBuffer.getFrom(rxQuery._latestChangeEvent + 1); - if (missedChangeEvents === null) { - // changeEventBuffer is of bounds -> we must re-execute over the database - mustReExec = true; - } else { - rxQuery._latestChangeEvent = rxQuery.asRxQuery.collection._changeEventBuffer.counter; - var runChangeEvents = rxQuery.asRxQuery.collection._changeEventBuffer.reduceByLastOfDoc(missedChangeEvents); - if (rxQuery.op === 'count') { - // 'count' query - var previousCount = ensureNotFalsy(rxQuery._result).count; - var newCount = previousCount; - runChangeEvents.forEach(cE => { - var didMatchBefore = cE.previousDocumentData && rxQuery.doesDocumentDataMatch(cE.previousDocumentData); - var doesMatchNow = rxQuery.doesDocumentDataMatch(cE.documentData); - if (!didMatchBefore && doesMatchNow) { - newCount++; - } - if (didMatchBefore && !doesMatchNow) { - newCount--; - } - }); - if (newCount !== previousCount) { - ret = true; // true because results changed - rxQuery._setResultData(newCount); - } - } else { - // 'find' or 'findOne' query - var eventReduceResult = calculateNewResults(rxQuery, runChangeEvents); - if (eventReduceResult.runFullQueryAgain) { - // could not calculate the new results, execute must be done - mustReExec = true; - } else if (eventReduceResult.changed) { - // we got the new results, we do not have to re-execute, mustReExec stays false - ret = true; // true because results changed - rxQuery._setResultData(eventReduceResult.newResults); - } - } - } - } - - // oh no we have to re-execute the whole query over the database - if (mustReExec) { - return rxQuery._execOverDatabase().then(newResultData => { - /** - * The RxStorage is defined to always first emit events and then return - * on bulkWrite() calls. So here we have to use the counter AFTER the execOverDatabase() - * has been run, not the one from before. - */ - rxQuery._latestChangeEvent = rxQuery.collection._changeEventBuffer.counter; - - // A count query needs a different has-changed check. - if (typeof newResultData === 'number') { - if (!rxQuery._result || newResultData !== rxQuery._result.count) { - ret = true; - rxQuery._setResultData(newResultData); - } - return ret; - } - if (!rxQuery._result || !areRxDocumentArraysEqual(rxQuery.collection.schema.primaryPath, newResultData, rxQuery._result.docsData)) { - ret = true; // true because results changed - rxQuery._setResultData(newResultData); - } - return ret; - }); - } - return Promise.resolve(ret); // true if results have changed -} - -/** - * @returns a format of the query that can be used with the storage - * when calling RxStorageInstance().query() - */ -export function prepareQuery(schema, mutateableQuery) { - if (!mutateableQuery.sort) { - throw newRxError('SNH', { - query: mutateableQuery - }); - } - - /** - * Store the query plan together with the - * prepared query to save performance. - */ - var queryPlan = getQueryPlan(schema, mutateableQuery); - return { - query: mutateableQuery, - queryPlan - }; -} - -/** - * Runs the query over the storage instance - * of the collection. - * Does some optimizations to ensure findById is used - * when specific queries are used. - */ -export async function queryCollection(rxQuery) { - var docs = []; - var collection = rxQuery.collection; - - /** - * Optimizations shortcut. - * If query is find-one-document-by-id, - * then we do not have to use the slow query() method - * but instead can use findDocumentsById() - */ - if (rxQuery.isFindOneByIdQuery) { - if (Array.isArray(rxQuery.isFindOneByIdQuery)) { - var docIds = rxQuery.isFindOneByIdQuery; - docIds = docIds.filter(docId => { - // first try to fill from docCache - var docData = rxQuery.collection._docCache.getLatestDocumentDataIfExists(docId); - if (docData) { - if (!docData._deleted) { - docs.push(docData); - } - return false; - } else { - return true; - } - }); - // otherwise get from storage - if (docIds.length > 0) { - var docsFromStorage = await collection.storageInstance.findDocumentsById(docIds, false); - appendToArray(docs, docsFromStorage); - } - } else { - var docId = rxQuery.isFindOneByIdQuery; - - // first try to fill from docCache - var docData = rxQuery.collection._docCache.getLatestDocumentDataIfExists(docId); - if (!docData) { - // otherwise get from storage - var fromStorageList = await collection.storageInstance.findDocumentsById([docId], false); - if (fromStorageList[0]) { - docData = fromStorageList[0]; - } - } - if (docData && !docData._deleted) { - docs.push(docData); - } - } - } else { - var preparedQuery = rxQuery.getPreparedQuery(); - var queryResult = await collection.storageInstance.query(preparedQuery); - docs = queryResult.documents; - } - return docs; -} - -/** - * Returns true if the given query - * selects exactly one document by its id. - * Used to optimize performance because these kind of - * queries do not have to run over an index and can use get-by-id instead. - * Returns false if no query of that kind. - * Returns the document id otherwise. - */ -export function isFindOneByIdQuery(primaryPath, query) { - // must have exactly one operator which must be $eq || $in - if (!query.skip && query.selector && Object.keys(query.selector).length === 1 && query.selector[primaryPath]) { - var value = query.selector[primaryPath]; - if (typeof value === 'string') { - return value; - } else if (Object.keys(value).length === 1 && typeof value.$eq === 'string') { - return value.$eq; - } - - // same with $in string arrays - if (Object.keys(value).length === 1 && Array.isArray(value.$eq) && - // must only contain strings - !value.$eq.find(r => typeof r !== 'string')) { - return value.$eq; - } - } - return false; -} -export function isRxQuery(obj) { - return obj instanceof RxQueryBase; -} -//# sourceMappingURL=rx-query.js.map \ No newline at end of file diff --git a/dist/esm/rx-query.js.map b/dist/esm/rx-query.js.map deleted file mode 100644 index 4f42389b4c4..00000000000 --- a/dist/esm/rx-query.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-query.js","names":["BehaviorSubject","firstValueFrom","merge","mergeMap","filter","map","startWith","distinctUntilChanged","shareReplay","sortObject","pluginMissing","overwriteGetterForCaching","now","PROMISE_RESOLVE_FALSE","RXJS_SHARE_REPLAY_DEFAULTS","ensureNotFalsy","areRxDocumentArraysEqual","appendToArray","newRxError","runPluginHooks","calculateNewResults","triggerCacheReplacement","getQueryMatcher","normalizeMangoQuery","runQueryUpdateFunction","RxQuerySingleResult","getQueryPlan","_queryCount","newQueryID","RxQueryBase","op","mangoQuery","collection","other","id","_execOverDatabaseCount","_creationTime","_lastEnsureEqual","uncached","refCount$","_result","_latestChangeEvent","_lastExecStart","_lastExecEnd","_ensureEqualQueue","_getDefaultQuery","isFindOneByIdQuery","schema","primaryPath","_proto","prototype","_setResultData","newResultData","Map","Array","from","values","newQueryResult","length","_execOverDatabase","preparedQuery","getPreparedQuery","result","storageInstance","count","mode","database","allowSlowCount","queryObj","ids","selector","$in","ret","mustBeQueried","forEach","docData","_docCache","getLatestDocumentDataIfExists","_deleted","doc","getCachedRxDocument","set","push","docs","findDocumentsById","primary","docsPromise","queryCollection","then","exec","throwIfMissing","name","query","_ensureEqual","$","toString","stringObj","value","JSON","stringify","hookInput","rxQuery","jsonSchema","$eq","index","unshift","prepareQuery","doesDocumentDataMatch","queryMatcher","remove","isArray","Promise","all","incrementalRemove","asRxQuery","update","_updateObj","patch","incrementalPatch","modify","mutationFunction","incrementalModify","where","_queryObj","sort","_params","skip","_amount","limit","_createClass","key","get","_$","results$","pipe","changeEvent","isLocal","prev","curr","time","useResult","documents","docsMap","slice","reactivity","getReactivityFactory","fromObservable","undefined","normalizedQuery","tunnelQueryCache","_queryCache","getByQuery","createRxQuery","_isResultsInSync","currentLatestEventNumber","_changeEventBuffer","counter","destroyed","__ensureEqual","mustReExec","missedChangeEvents","getFrom","runChangeEvents","reduceByLastOfDoc","previousCount","newCount","cE","didMatchBefore","previousDocumentData","doesMatchNow","documentData","eventReduceResult","runFullQueryAgain","changed","newResults","docsData","resolve","mutateableQuery","queryPlan","docIds","docId","docsFromStorage","fromStorageList","queryResult","Object","keys","find","r","isRxQuery","obj"],"sources":["../../src/rx-query.ts"],"sourcesContent":["import {\n BehaviorSubject,\n firstValueFrom,\n Observable,\n merge\n} from 'rxjs';\nimport {\n mergeMap,\n filter,\n map,\n startWith,\n distinctUntilChanged,\n shareReplay\n} from 'rxjs/operators';\nimport {\n sortObject,\n pluginMissing,\n overwriteGetterForCaching,\n now,\n PROMISE_RESOLVE_FALSE,\n RXJS_SHARE_REPLAY_DEFAULTS,\n ensureNotFalsy,\n areRxDocumentArraysEqual,\n appendToArray\n} from './plugins/utils/index.ts';\nimport {\n newRxError\n} from './rx-error.ts';\nimport {\n runPluginHooks\n} from './hooks.ts';\nimport type {\n RxCollection,\n RxDocument,\n RxQueryOP,\n RxQuery,\n MangoQuery,\n MangoQuerySortPart,\n MangoQuerySelector,\n PreparedQuery,\n RxChangeEvent,\n RxDocumentWriteData,\n RxDocumentData,\n QueryMatcher,\n RxJsonSchema,\n FilledMangoQuery,\n ModifyFunction\n} from './types/index.d.ts';\nimport { calculateNewResults } from './event-reduce.ts';\nimport { triggerCacheReplacement } from './query-cache.ts';\nimport { getQueryMatcher, normalizeMangoQuery, runQueryUpdateFunction } from './rx-query-helper.ts';\nimport { RxQuerySingleResult } from './rx-query-single-result.ts';\nimport { getQueryPlan } from './query-planner.ts';\n\nlet _queryCount = 0;\nconst newQueryID = function (): number {\n return ++_queryCount;\n};\n\nexport class RxQueryBase<\n RxDocType,\n RxQueryResult,\n OrmMethods = {},\n Reactivity = unknown,\n> {\n\n public id: number = newQueryID();\n\n /**\n * Some stats then are used for debugging and cache replacement policies\n */\n public _execOverDatabaseCount: number = 0;\n public _creationTime = now();\n\n // used in the query-cache to determine if the RxQuery can be cleaned up.\n public _lastEnsureEqual = 0;\n\n public uncached = false;\n\n // used to count the subscribers to the query\n public refCount$ = new BehaviorSubject(null);\n\n public isFindOneByIdQuery: false | string | string[];\n\n\n /**\n * Contains the current result state\n * or null if query has not run yet.\n */\n public _result: RxQuerySingleResult | null = null;\n\n\n constructor(\n public op: RxQueryOP,\n public mangoQuery: Readonly>,\n public collection: RxCollection,\n // used by some plugins\n public other: any = {}\n ) {\n if (!mangoQuery) {\n this.mangoQuery = _getDefaultQuery();\n }\n\n this.isFindOneByIdQuery = isFindOneByIdQuery(\n this.collection.schema.primaryPath as string,\n mangoQuery\n );\n }\n get $(): BehaviorSubject {\n if (!this._$) {\n\n const results$ = this.collection.$.pipe(\n /**\n * Performance shortcut.\n * Changes to local documents are not relevant for the query.\n */\n filter(changeEvent => !changeEvent.isLocal),\n /**\n * Start once to ensure the querying also starts\n * when there where no changes.\n */\n startWith(null),\n // ensure query results are up to date.\n mergeMap(() => _ensureEqual(this as any)),\n // use the current result set, written by _ensureEqual().\n map(() => this._result),\n // do not run stuff above for each new subscriber, only once.\n shareReplay(RXJS_SHARE_REPLAY_DEFAULTS),\n // do not proceed if result set has not changed.\n distinctUntilChanged((prev, curr) => {\n if (prev && prev.time === ensureNotFalsy(curr).time) {\n return true;\n } else {\n return false;\n }\n }),\n filter(result => !!result),\n /**\n * Map the result set to a single RxDocument or an array,\n * depending on query type\n */\n map((result) => {\n const useResult = ensureNotFalsy(result);\n if (this.op === 'count') {\n return useResult.count;\n } else if (this.op === 'findOne') {\n // findOne()-queries emit RxDocument or null\n return useResult.documents.length === 0 ? null : useResult.documents[0];\n } else if (this.op === 'findByIds') {\n return useResult.docsMap;\n } else {\n // find()-queries emit RxDocument[]\n // Flat copy the array so it won't matter if the user modifies it.\n return useResult.documents.slice(0);\n }\n })\n );\n\n this._$ = merge(\n results$,\n /**\n * Also add the refCount$ to the query observable\n * to allow us to count the amount of subscribers.\n */\n this.refCount$.pipe(\n filter(() => false)\n )\n );\n }\n return this._$ as any;\n }\n\n get $$(): Reactivity {\n const reactivity = this.collection.database.getReactivityFactory();\n return reactivity.fromObservable(\n this.$,\n undefined,\n this.collection.database\n );\n }\n\n // stores the changeEvent-number of the last handled change-event\n public _latestChangeEvent: -1 | number = -1;\n\n // time stamps on when the last full exec over the database has run\n // used to properly handle events that happen while the find-query is running\n public _lastExecStart: number = 0;\n public _lastExecEnd: number = 0;\n\n /**\n * ensures that the exec-runs\n * are not run in parallel\n */\n public _ensureEqualQueue: Promise = PROMISE_RESOLVE_FALSE;\n\n /**\n * Returns an observable that emits the results\n * This should behave like an rxjs-BehaviorSubject which means:\n * - Emit the current result-set on subscribe\n * - Emit the new result-set when an RxChangeEvent comes in\n * - Do not emit anything before the first result-set was created (no null)\n */\n public _$?: Observable;\n\n /**\n * set the new result-data as result-docs of the query\n * @param newResultData json-docs that were received from the storage\n */\n _setResultData(newResultData: RxDocumentData[] | number | Map>): void {\n if (typeof newResultData === 'number') {\n this._result = new RxQuerySingleResult(\n this.collection,\n [],\n newResultData\n );\n return;\n } else if (newResultData instanceof Map) {\n newResultData = Array.from((newResultData as Map>).values());\n }\n\n const newQueryResult = new RxQuerySingleResult(\n this.collection,\n newResultData,\n newResultData.length\n );\n this._result = newQueryResult;\n }\n\n /**\n * executes the query on the database\n * @return results-array with document-data\n */\n async _execOverDatabase(): Promise[] | number> {\n this._execOverDatabaseCount = this._execOverDatabaseCount + 1;\n this._lastExecStart = now();\n\n\n if (this.op === 'count') {\n const preparedQuery = this.getPreparedQuery();\n const result = await this.collection.storageInstance.count(preparedQuery);\n if (result.mode === 'slow' && !this.collection.database.allowSlowCount) {\n throw newRxError('QU14', {\n collection: this.collection,\n queryObj: this.mangoQuery\n });\n } else {\n return result.count;\n }\n }\n\n if (this.op === 'findByIds') {\n const ids: string[] = ensureNotFalsy(this.mangoQuery.selector as any)[this.collection.schema.primaryPath].$in;\n const ret = new Map>();\n const mustBeQueried: string[] = [];\n // first try to fill from docCache\n ids.forEach(id => {\n const docData = this.collection._docCache.getLatestDocumentDataIfExists(id);\n if (docData) {\n if (!docData._deleted) {\n const doc = this.collection._docCache.getCachedRxDocument(docData);\n ret.set(id, doc);\n }\n } else {\n mustBeQueried.push(id);\n }\n });\n // everything which was not in docCache must be fetched from the storage\n if (mustBeQueried.length > 0) {\n const docs = await this.collection.storageInstance.findDocumentsById(mustBeQueried, false);\n docs.forEach(docData => {\n const doc = this.collection._docCache.getCachedRxDocument(docData);\n ret.set(doc.primary, doc);\n });\n }\n return ret as any;\n }\n\n\n const docsPromise = queryCollection(this as any);\n return docsPromise.then(docs => {\n this._lastExecEnd = now();\n return docs;\n });\n }\n\n /**\n * Execute the query\n * To have an easier implementations,\n * just subscribe and use the first result\n */\n public exec(throwIfMissing: true): Promise>;\n public exec(): Promise;\n public exec(throwIfMissing?: boolean): Promise {\n if (throwIfMissing && this.op !== 'findOne') {\n throw newRxError('QU9', {\n collection: this.collection.name,\n query: this.mangoQuery,\n op: this.op\n });\n }\n\n\n /**\n * run _ensureEqual() here,\n * this will make sure that errors in the query which throw inside of the RxStorage,\n * will be thrown at this execution context and not in the background.\n */\n return _ensureEqual(this as any)\n .then(() => firstValueFrom(this.$))\n .then(result => {\n if (!result && throwIfMissing) {\n throw newRxError('QU10', {\n collection: this.collection.name,\n query: this.mangoQuery,\n op: this.op\n });\n } else {\n return result;\n }\n });\n }\n\n\n\n /**\n * cached call to get the queryMatcher\n * @overwrites itself with the actual value\n */\n get queryMatcher(): QueryMatcher> {\n const schema = this.collection.schema.jsonSchema;\n const normalizedQuery = normalizeMangoQuery(\n this.collection.schema.jsonSchema,\n this.mangoQuery\n );\n return overwriteGetterForCaching(\n this,\n 'queryMatcher',\n getQueryMatcher(\n schema,\n normalizedQuery\n ) as any\n );\n }\n\n /**\n * returns a string that is used for equal-comparisons\n * @overwrites itself with the actual value\n */\n toString(): string {\n const stringObj = sortObject({\n op: this.op,\n query: this.mangoQuery,\n other: this.other\n }, true);\n const value = JSON.stringify(stringObj);\n this.toString = () => value;\n return value;\n }\n\n /**\n * returns the prepared query\n * which can be send to the storage instance to query for documents.\n * @overwrites itself with the actual value.\n */\n getPreparedQuery(): PreparedQuery {\n const hookInput = {\n rxQuery: this,\n // can be mutated by the hooks so we have to deep clone first.\n mangoQuery: normalizeMangoQuery(\n this.collection.schema.jsonSchema,\n this.mangoQuery\n )\n };\n (hookInput.mangoQuery.selector as any)._deleted = { $eq: false };\n if (hookInput.mangoQuery.index) {\n hookInput.mangoQuery.index.unshift('_deleted');\n }\n runPluginHooks('prePrepareQuery', hookInput);\n\n const value = prepareQuery(\n this.collection.schema.jsonSchema,\n hookInput.mangoQuery as any\n );\n\n this.getPreparedQuery = () => value;\n return value;\n }\n\n /**\n * returns true if the document matches the query,\n * does not use the 'skip' and 'limit'\n */\n doesDocumentDataMatch(docData: RxDocType | any): boolean {\n // if doc is deleted, it cannot match\n if (docData._deleted) {\n return false;\n }\n\n return this.queryMatcher(docData);\n }\n\n /**\n * deletes all found documents\n * @return promise with deleted documents\n */\n remove(): Promise {\n return this\n .exec()\n .then(docs => {\n if (Array.isArray(docs)) {\n // TODO use a bulk operation instead of running .remove() on each document\n return Promise.all(docs.map(doc => doc.remove()));\n } else {\n return (docs as any).remove();\n }\n });\n }\n incrementalRemove(): Promise {\n return runQueryUpdateFunction(\n this.asRxQuery,\n (doc) => doc.incrementalRemove(),\n );\n }\n\n\n /**\n * helper function to transform RxQueryBase to RxQuery type\n */\n get asRxQuery(): RxQuery {\n return this as any;\n }\n\n /**\n * updates all found documents\n * @overwritten by plugin (optional)\n */\n update(_updateObj: any): Promise {\n throw pluginMissing('update');\n }\n\n patch(patch: Partial): Promise {\n return runQueryUpdateFunction(\n this.asRxQuery,\n (doc) => doc.patch(patch),\n );\n }\n incrementalPatch(patch: Partial): Promise {\n return runQueryUpdateFunction(\n this.asRxQuery,\n (doc) => doc.incrementalPatch(patch),\n );\n }\n modify(mutationFunction: ModifyFunction): Promise {\n return runQueryUpdateFunction(\n this.asRxQuery,\n (doc) => doc.modify(mutationFunction),\n );\n }\n incrementalModify(mutationFunction: ModifyFunction): Promise {\n return runQueryUpdateFunction(\n this.asRxQuery,\n (doc) => doc.incrementalModify(mutationFunction),\n );\n }\n\n\n // we only set some methods of query-builder here\n // because the others depend on these ones\n where(_queryObj: MangoQuerySelector | keyof RxDocType | string): RxQuery {\n throw pluginMissing('query-builder');\n }\n sort(_params: string | MangoQuerySortPart): RxQuery {\n throw pluginMissing('query-builder');\n }\n skip(_amount: number | null): RxQuery {\n throw pluginMissing('query-builder');\n }\n limit(_amount: number | null): RxQuery {\n throw pluginMissing('query-builder');\n }\n}\n\nexport function _getDefaultQuery(): MangoQuery {\n return {\n selector: {}\n };\n}\n\n/**\n * run this query through the QueryCache\n */\nexport function tunnelQueryCache(\n rxQuery: RxQueryBase\n): RxQuery {\n return rxQuery.collection._queryCache.getByQuery(rxQuery as any);\n}\n\nexport function createRxQuery(\n op: RxQueryOP,\n queryObj: MangoQuery,\n collection: RxCollection,\n other?: any\n) {\n runPluginHooks('preCreateRxQuery', {\n op,\n queryObj,\n collection,\n other\n });\n\n let ret = new RxQueryBase(op, queryObj, collection, other);\n\n // ensure when created with same params, only one is created\n ret = tunnelQueryCache(ret);\n triggerCacheReplacement(collection);\n\n return ret;\n}\n\n/**\n * Check if the current results-state is in sync with the database\n * which means that no write event happened since the last run.\n * @return false if not which means it should re-execute\n */\nfunction _isResultsInSync(rxQuery: RxQueryBase): boolean {\n const currentLatestEventNumber = rxQuery.asRxQuery.collection._changeEventBuffer.counter;\n if (rxQuery._latestChangeEvent >= currentLatestEventNumber) {\n return true;\n } else {\n return false;\n }\n}\n\n\n/**\n * wraps __ensureEqual()\n * to ensure it does not run in parallel\n * @return true if has changed, false if not\n */\nfunction _ensureEqual(rxQuery: RxQueryBase): Promise {\n // Optimisation shortcut\n if (\n rxQuery.collection.database.destroyed ||\n _isResultsInSync(rxQuery)\n ) {\n return PROMISE_RESOLVE_FALSE;\n }\n\n rxQuery._ensureEqualQueue = rxQuery._ensureEqualQueue\n .then(() => __ensureEqual(rxQuery));\n return rxQuery._ensureEqualQueue;\n}\n\n/**\n * ensures that the results of this query is equal to the results which a query over the database would give\n * @return true if results have changed\n */\nfunction __ensureEqual(rxQuery: RxQueryBase): Promise {\n rxQuery._lastEnsureEqual = now();\n\n /**\n * Optimisation shortcuts\n */\n if (\n // db is closed\n rxQuery.collection.database.destroyed ||\n // nothing happened since last run\n _isResultsInSync(rxQuery)\n ) {\n return PROMISE_RESOLVE_FALSE;\n }\n\n let ret = false;\n let mustReExec = false; // if this becomes true, a whole execution over the database is made\n if (rxQuery._latestChangeEvent === -1) {\n // have not executed yet -> must run\n mustReExec = true;\n }\n\n /**\n * try to use EventReduce to calculate the new results\n */\n if (!mustReExec) {\n const missedChangeEvents = rxQuery.asRxQuery.collection._changeEventBuffer.getFrom(rxQuery._latestChangeEvent + 1);\n if (missedChangeEvents === null) {\n // changeEventBuffer is of bounds -> we must re-execute over the database\n mustReExec = true;\n } else {\n rxQuery._latestChangeEvent = rxQuery.asRxQuery.collection._changeEventBuffer.counter;\n\n const runChangeEvents: RxChangeEvent[] = rxQuery.asRxQuery.collection\n ._changeEventBuffer\n .reduceByLastOfDoc(missedChangeEvents);\n\n if (rxQuery.op === 'count') {\n // 'count' query\n const previousCount = ensureNotFalsy(rxQuery._result).count;\n let newCount = previousCount;\n runChangeEvents.forEach(cE => {\n const didMatchBefore = cE.previousDocumentData && rxQuery.doesDocumentDataMatch(cE.previousDocumentData);\n const doesMatchNow = rxQuery.doesDocumentDataMatch(cE.documentData);\n\n if (!didMatchBefore && doesMatchNow) {\n newCount++;\n }\n if (didMatchBefore && !doesMatchNow) {\n newCount--;\n }\n });\n if (newCount !== previousCount) {\n ret = true; // true because results changed\n rxQuery._setResultData(newCount as any);\n }\n } else {\n // 'find' or 'findOne' query\n const eventReduceResult = calculateNewResults(\n rxQuery as any,\n runChangeEvents\n );\n if (eventReduceResult.runFullQueryAgain) {\n // could not calculate the new results, execute must be done\n mustReExec = true;\n } else if (eventReduceResult.changed) {\n // we got the new results, we do not have to re-execute, mustReExec stays false\n ret = true; // true because results changed\n rxQuery._setResultData(eventReduceResult.newResults as any);\n }\n }\n }\n }\n\n // oh no we have to re-execute the whole query over the database\n if (mustReExec) {\n return rxQuery._execOverDatabase()\n .then(newResultData => {\n\n /**\n * The RxStorage is defined to always first emit events and then return\n * on bulkWrite() calls. So here we have to use the counter AFTER the execOverDatabase()\n * has been run, not the one from before.\n */\n rxQuery._latestChangeEvent = rxQuery.collection._changeEventBuffer.counter;\n\n // A count query needs a different has-changed check.\n if (typeof newResultData === 'number') {\n if (\n !rxQuery._result ||\n newResultData !== rxQuery._result.count\n ) {\n ret = true;\n rxQuery._setResultData(newResultData as any);\n }\n return ret;\n }\n if (\n !rxQuery._result ||\n !areRxDocumentArraysEqual(\n rxQuery.collection.schema.primaryPath,\n newResultData,\n rxQuery._result.docsData\n )\n ) {\n ret = true; // true because results changed\n rxQuery._setResultData(newResultData as any);\n }\n return ret;\n });\n }\n return Promise.resolve(ret); // true if results have changed\n}\n\n/**\n * @returns a format of the query that can be used with the storage\n * when calling RxStorageInstance().query()\n */\nexport function prepareQuery(\n schema: RxJsonSchema>,\n mutateableQuery: FilledMangoQuery\n): PreparedQuery {\n if (!mutateableQuery.sort) {\n throw newRxError('SNH', {\n query: mutateableQuery\n });\n }\n\n /**\n * Store the query plan together with the\n * prepared query to save performance.\n */\n const queryPlan = getQueryPlan(\n schema,\n mutateableQuery\n );\n\n return {\n query: mutateableQuery,\n queryPlan\n };\n}\n\n/**\n * Runs the query over the storage instance\n * of the collection.\n * Does some optimizations to ensure findById is used\n * when specific queries are used.\n */\nexport async function queryCollection(\n rxQuery: RxQuery | RxQueryBase\n): Promise[]> {\n let docs: RxDocumentData[] = [];\n const collection = rxQuery.collection;\n\n /**\n * Optimizations shortcut.\n * If query is find-one-document-by-id,\n * then we do not have to use the slow query() method\n * but instead can use findDocumentsById()\n */\n if (rxQuery.isFindOneByIdQuery) {\n if (Array.isArray(rxQuery.isFindOneByIdQuery)) {\n let docIds = rxQuery.isFindOneByIdQuery;\n docIds = docIds.filter(docId => {\n // first try to fill from docCache\n const docData = rxQuery.collection._docCache.getLatestDocumentDataIfExists(docId);\n if (docData) {\n if (!docData._deleted) {\n docs.push(docData);\n }\n return false;\n } else {\n return true;\n }\n });\n // otherwise get from storage\n if (docIds.length > 0) {\n const docsFromStorage = await collection.storageInstance.findDocumentsById(docIds, false);\n appendToArray(docs, docsFromStorage);\n }\n } else {\n const docId = rxQuery.isFindOneByIdQuery;\n\n // first try to fill from docCache\n let docData = rxQuery.collection._docCache.getLatestDocumentDataIfExists(docId);\n if (!docData) {\n // otherwise get from storage\n const fromStorageList = await collection.storageInstance.findDocumentsById([docId], false);\n if (fromStorageList[0]) {\n docData = fromStorageList[0];\n }\n }\n if (docData && !docData._deleted) {\n docs.push(docData);\n }\n }\n } else {\n const preparedQuery = rxQuery.getPreparedQuery();\n const queryResult = await collection.storageInstance.query(preparedQuery);\n docs = queryResult.documents;\n }\n return docs;\n\n}\n\n/**\n * Returns true if the given query\n * selects exactly one document by its id.\n * Used to optimize performance because these kind of\n * queries do not have to run over an index and can use get-by-id instead.\n * Returns false if no query of that kind.\n * Returns the document id otherwise.\n */\nexport function isFindOneByIdQuery(\n primaryPath: string,\n query: MangoQuery\n): false | string | string[] {\n // must have exactly one operator which must be $eq || $in\n if (\n !query.skip &&\n query.selector &&\n Object.keys(query.selector).length === 1 &&\n query.selector[primaryPath]\n ) {\n const value: any = query.selector[primaryPath];\n if (typeof value === 'string') {\n return value;\n } else if (\n Object.keys(value).length === 1 &&\n typeof value.$eq === 'string'\n ) {\n return value.$eq;\n }\n\n // same with $in string arrays\n if (\n Object.keys(value).length === 1 &&\n Array.isArray(value.$eq) &&\n // must only contain strings\n !(value.$eq as any[]).find(r => typeof r !== 'string')\n ) {\n return value.$eq;\n }\n }\n return false;\n}\n\n\n\nexport function isRxQuery(obj: any): boolean {\n return obj instanceof RxQueryBase;\n}\n"],"mappings":";AAAA,SACIA,eAAe,EACfC,cAAc,EAEdC,KAAK,QACF,MAAM;AACb,SACIC,QAAQ,EACRC,MAAM,EACNC,GAAG,EACHC,SAAS,EACTC,oBAAoB,EACpBC,WAAW,QACR,gBAAgB;AACvB,SACIC,UAAU,EACVC,aAAa,EACbC,yBAAyB,EACzBC,GAAG,EACHC,qBAAqB,EACrBC,0BAA0B,EAC1BC,cAAc,EACdC,wBAAwB,EACxBC,aAAa,QACV,0BAA0B;AACjC,SACIC,UAAU,QACP,eAAe;AACtB,SACIC,cAAc,QACX,YAAY;AAkBnB,SAASC,mBAAmB,QAAQ,mBAAmB;AACvD,SAASC,uBAAuB,QAAQ,kBAAkB;AAC1D,SAASC,eAAe,EAAEC,mBAAmB,EAAEC,sBAAsB,QAAQ,sBAAsB;AACnG,SAASC,mBAAmB,QAAQ,6BAA6B;AACjE,SAASC,YAAY,QAAQ,oBAAoB;AAEjD,IAAIC,WAAW,GAAG,CAAC;AACnB,IAAMC,UAAU,GAAG,SAAAA,CAAA,EAAoB;EACnC,OAAO,EAAED,WAAW;AACxB,CAAC;AAED,WAAaE,WAAW;EASpB;AACJ;AACA;;EAII;;EAKA;;EAMA;AACJ;AACA;AACA;;EAII,SAAAA,YACWC,EAAa,EACbC,UAA2C,EAC3CC,UAAmC;EAC1C;EACOC,KAAU,GAAG,CAAC,CAAC,EACxB;IAAA,KAhCKC,EAAE,GAAWN,UAAU,CAAC,CAAC;IAAA,KAKzBO,sBAAsB,GAAW,CAAC;IAAA,KAClCC,aAAa,GAAGxB,GAAG,CAAC,CAAC;IAAA,KAGrByB,gBAAgB,GAAG,CAAC;IAAA,KAEpBC,QAAQ,GAAG,KAAK;IAAA,KAGhBC,SAAS,GAAG,IAAIvC,eAAe,CAAC,IAAI,CAAC;IAAA,KASrCwC,OAAO,GAA0C,IAAI;IAAA,KA6FrDC,kBAAkB,GAAgB,CAAC,CAAC;IAAA,KAIpCC,cAAc,GAAW,CAAC;IAAA,KAC1BC,YAAY,GAAW,CAAC;IAAA,KAMxBC,iBAAiB,GAAqB/B,qBAAqB;IAAA,KApGvDiB,EAAa,GAAbA,EAAa;IAAA,KACbC,UAA2C,GAA3CA,UAA2C;IAAA,KAC3CC,UAAmC,GAAnCA,UAAmC;IAAA,KAEnCC,KAAU,GAAVA,KAAU;IAEjB,IAAI,CAACF,UAAU,EAAE;MACb,IAAI,CAACA,UAAU,GAAGc,gBAAgB,CAAC,CAAC;IACxC;IAEA,IAAI,CAACC,kBAAkB,GAAGA,kBAAkB,CACxC,IAAI,CAACd,UAAU,CAACe,MAAM,CAACC,WAAW,EAClCjB,UACJ,CAAC;EACL;EAAC,IAAAkB,MAAA,GAAApB,WAAA,CAAAqB,SAAA;EAwFD;AACJ;AACA;AACA;AACA;AACA;AACA;EAGI;AACJ;AACA;AACA;EAHID,MAAA,CAIAE,cAAc,GAAd,SAAAA,eAAeC,aAA4F,EAAQ;IAC/G,IAAI,OAAOA,aAAa,KAAK,QAAQ,EAAE;MACnC,IAAI,CAACZ,OAAO,GAAG,IAAIf,mBAAmB,CAClC,IAAI,CAACO,UAAU,EACf,EAAE,EACFoB,aACJ,CAAC;MACD;IACJ,CAAC,MAAM,IAAIA,aAAa,YAAYC,GAAG,EAAE;MACrCD,aAAa,GAAGE,KAAK,CAACC,IAAI,CAAEH,aAAa,CAA4CI,MAAM,CAAC,CAAC,CAAC;IAClG;IAEA,IAAMC,cAAc,GAAG,IAAIhC,mBAAmB,CAC1C,IAAI,CAACO,UAAU,EACfoB,aAAa,EACbA,aAAa,CAACM,MAClB,CAAC;IACD,IAAI,CAAClB,OAAO,GAAGiB,cAAc;EACjC;;EAEA;AACJ;AACA;AACA,KAHI;EAAAR,MAAA,CAIMU,iBAAiB,GAAvB,eAAAA,kBAAA,EAAyE;IACrE,IAAI,CAACxB,sBAAsB,GAAG,IAAI,CAACA,sBAAsB,GAAG,CAAC;IAC7D,IAAI,CAACO,cAAc,GAAG9B,GAAG,CAAC,CAAC;IAG3B,IAAI,IAAI,CAACkB,EAAE,KAAK,OAAO,EAAE;MACrB,IAAM8B,aAAa,GAAG,IAAI,CAACC,gBAAgB,CAAC,CAAC;MAC7C,IAAMC,MAAM,GAAG,MAAM,IAAI,CAAC9B,UAAU,CAAC+B,eAAe,CAACC,KAAK,CAACJ,aAAa,CAAC;MACzE,IAAIE,MAAM,CAACG,IAAI,KAAK,MAAM,IAAI,CAAC,IAAI,CAACjC,UAAU,CAACkC,QAAQ,CAACC,cAAc,EAAE;QACpE,MAAMjD,UAAU,CAAC,MAAM,EAAE;UACrBc,UAAU,EAAE,IAAI,CAACA,UAAU;UAC3BoC,QAAQ,EAAE,IAAI,CAACrC;QACnB,CAAC,CAAC;MACN,CAAC,MAAM;QACH,OAAO+B,MAAM,CAACE,KAAK;MACvB;IACJ;IAEA,IAAI,IAAI,CAAClC,EAAE,KAAK,WAAW,EAAE;MACzB,IAAMuC,GAAa,GAAGtD,cAAc,CAAC,IAAI,CAACgB,UAAU,CAACuC,QAAe,CAAC,CAAC,IAAI,CAACtC,UAAU,CAACe,MAAM,CAACC,WAAW,CAAC,CAACuB,GAAG;MAC7G,IAAMC,GAAG,GAAG,IAAInB,GAAG,CAAgC,CAAC;MACpD,IAAMoB,aAAuB,GAAG,EAAE;MAClC;MACAJ,GAAG,CAACK,OAAO,CAACxC,EAAE,IAAI;QACd,IAAMyC,OAAO,GAAG,IAAI,CAAC3C,UAAU,CAAC4C,SAAS,CAACC,6BAA6B,CAAC3C,EAAE,CAAC;QAC3E,IAAIyC,OAAO,EAAE;UACT,IAAI,CAACA,OAAO,CAACG,QAAQ,EAAE;YACnB,IAAMC,GAAG,GAAG,IAAI,CAAC/C,UAAU,CAAC4C,SAAS,CAACI,mBAAmB,CAACL,OAAO,CAAC;YAClEH,GAAG,CAACS,GAAG,CAAC/C,EAAE,EAAE6C,GAAG,CAAC;UACpB;QACJ,CAAC,MAAM;UACHN,aAAa,CAACS,IAAI,CAAChD,EAAE,CAAC;QAC1B;MACJ,CAAC,CAAC;MACF;MACA,IAAIuC,aAAa,CAACf,MAAM,GAAG,CAAC,EAAE;QAC1B,IAAMyB,IAAI,GAAG,MAAM,IAAI,CAACnD,UAAU,CAAC+B,eAAe,CAACqB,iBAAiB,CAACX,aAAa,EAAE,KAAK,CAAC;QAC1FU,IAAI,CAACT,OAAO,CAACC,OAAO,IAAI;UACpB,IAAMI,GAAG,GAAG,IAAI,CAAC/C,UAAU,CAAC4C,SAAS,CAACI,mBAAmB,CAACL,OAAO,CAAC;UAClEH,GAAG,CAACS,GAAG,CAACF,GAAG,CAACM,OAAO,EAAEN,GAAG,CAAC;QAC7B,CAAC,CAAC;MACN;MACA,OAAOP,GAAG;IACd;IAGA,IAAMc,WAAW,GAAGC,eAAe,CAAY,IAAW,CAAC;IAC3D,OAAOD,WAAW,CAACE,IAAI,CAACL,IAAI,IAAI;MAC5B,IAAI,CAACxC,YAAY,GAAG/B,GAAG,CAAC,CAAC;MACzB,OAAOuE,IAAI;IACf,CAAC,CAAC;EACN;;EAEA;AACJ;AACA;AACA;AACA,KAJI;EAAAlC,MAAA,CAOOwC,IAAI,GAAX,SAAAA,KAAYC,cAAwB,EAAgB;IAChD,IAAIA,cAAc,IAAI,IAAI,CAAC5D,EAAE,KAAK,SAAS,EAAE;MACzC,MAAMZ,UAAU,CAAC,KAAK,EAAE;QACpBc,UAAU,EAAE,IAAI,CAACA,UAAU,CAAC2D,IAAI;QAChCC,KAAK,EAAE,IAAI,CAAC7D,UAAU;QACtBD,EAAE,EAAE,IAAI,CAACA;MACb,CAAC,CAAC;IACN;;IAGA;AACR;AACA;AACA;AACA;IACQ,OAAO+D,YAAY,CAAC,IAAW,CAAC,CAC3BL,IAAI,CAAC,MAAMvF,cAAc,CAAC,IAAI,CAAC6F,CAAC,CAAC,CAAC,CAClCN,IAAI,CAAC1B,MAAM,IAAI;MACZ,IAAI,CAACA,MAAM,IAAI4B,cAAc,EAAE;QAC3B,MAAMxE,UAAU,CAAC,MAAM,EAAE;UACrBc,UAAU,EAAE,IAAI,CAACA,UAAU,CAAC2D,IAAI;UAChCC,KAAK,EAAE,IAAI,CAAC7D,UAAU;UACtBD,EAAE,EAAE,IAAI,CAACA;QACb,CAAC,CAAC;MACN,CAAC,MAAM;QACH,OAAOgC,MAAM;MACjB;IACJ,CAAC,CAAC;EACV;;EAIA;AACJ;AACA;AACA,KAHI;EAoBA;AACJ;AACA;AACA;EAHIb,MAAA,CAIA8C,QAAQ,GAAR,SAAAA,SAAA,EAAmB;IACf,IAAMC,SAAS,GAAGvF,UAAU,CAAC;MACzBqB,EAAE,EAAE,IAAI,CAACA,EAAE;MACX8D,KAAK,EAAE,IAAI,CAAC7D,UAAU;MACtBE,KAAK,EAAE,IAAI,CAACA;IAChB,CAAC,EAAE,IAAI,CAAC;IACR,IAAMgE,KAAK,GAAGC,IAAI,CAACC,SAAS,CAACH,SAAS,CAAC;IACvC,IAAI,CAACD,QAAQ,GAAG,MAAME,KAAK;IAC3B,OAAOA,KAAK;EAChB;;EAEA;AACJ;AACA;AACA;AACA,KAJI;EAAAhD,MAAA,CAKAY,gBAAgB,GAAhB,SAAAA,iBAAA,EAA6C;IACzC,IAAMuC,SAAS,GAAG;MACdC,OAAO,EAAE,IAAI;MACb;MACAtE,UAAU,EAAER,mBAAmB,CAC3B,IAAI,CAACS,UAAU,CAACe,MAAM,CAACuD,UAAU,EACjC,IAAI,CAACvE,UACT;IACJ,CAAC;IACAqE,SAAS,CAACrE,UAAU,CAACuC,QAAQ,CAASQ,QAAQ,GAAG;MAAEyB,GAAG,EAAE;IAAM,CAAC;IAChE,IAAIH,SAAS,CAACrE,UAAU,CAACyE,KAAK,EAAE;MAC5BJ,SAAS,CAACrE,UAAU,CAACyE,KAAK,CAACC,OAAO,CAAC,UAAU,CAAC;IAClD;IACAtF,cAAc,CAAC,iBAAiB,EAAEiF,SAAS,CAAC;IAE5C,IAAMH,KAAK,GAAGS,YAAY,CACtB,IAAI,CAAC1E,UAAU,CAACe,MAAM,CAACuD,UAAU,EACjCF,SAAS,CAACrE,UACd,CAAC;IAED,IAAI,CAAC8B,gBAAgB,GAAG,MAAMoC,KAAK;IACnC,OAAOA,KAAK;EAChB;;EAEA;AACJ;AACA;AACA,KAHI;EAAAhD,MAAA,CAIA0D,qBAAqB,GAArB,SAAAA,sBAAsBhC,OAAwB,EAAW;IACrD;IACA,IAAIA,OAAO,CAACG,QAAQ,EAAE;MAClB,OAAO,KAAK;IAChB;IAEA,OAAO,IAAI,CAAC8B,YAAY,CAACjC,OAAO,CAAC;EACrC;;EAEA;AACJ;AACA;AACA,KAHI;EAAA1B,MAAA,CAIA4D,MAAM,GAAN,SAAAA,OAAA,EAAiC;IAC7B,OAAO,IAAI,CACNpB,IAAI,CAAC,CAAC,CACND,IAAI,CAACL,IAAI,IAAI;MACV,IAAI7B,KAAK,CAACwD,OAAO,CAAC3B,IAAI,CAAC,EAAE;QACrB;QACA,OAAO4B,OAAO,CAACC,GAAG,CAAC7B,IAAI,CAAC9E,GAAG,CAAC0E,GAAG,IAAIA,GAAG,CAAC8B,MAAM,CAAC,CAAC,CAAC,CAAC;MACrD,CAAC,MAAM;QACH,OAAQ1B,IAAI,CAAS0B,MAAM,CAAC,CAAC;MACjC;IACJ,CAAC,CAAC;EACV,CAAC;EAAA5D,MAAA,CACDgE,iBAAiB,GAAjB,SAAAA,kBAAA,EAA4C;IACxC,OAAOzF,sBAAsB,CACzB,IAAI,CAAC0F,SAAS,EACbnC,GAAG,IAAKA,GAAG,CAACkC,iBAAiB,CAAC,CACnC,CAAC;EACL;;EAGA;AACJ;AACA,KAFI;EAOA;AACJ;AACA;AACA;EAHIhE,MAAA,CAIAkE,MAAM,GAAN,SAAAA,OAAOC,UAAe,EAA0B;IAC5C,MAAM1G,aAAa,CAAC,QAAQ,CAAC;EACjC,CAAC;EAAAuC,MAAA,CAEDoE,KAAK,GAAL,SAAAA,MAAMA,MAAyB,EAA0B;IACrD,OAAO7F,sBAAsB,CACzB,IAAI,CAAC0F,SAAS,EACbnC,GAAG,IAAKA,GAAG,CAACsC,KAAK,CAACA,MAAK,CAC5B,CAAC;EACL,CAAC;EAAApE,MAAA,CACDqE,gBAAgB,GAAhB,SAAAA,iBAAiBD,KAAyB,EAA0B;IAChE,OAAO7F,sBAAsB,CACzB,IAAI,CAAC0F,SAAS,EACbnC,GAAG,IAAKA,GAAG,CAACuC,gBAAgB,CAACD,KAAK,CACvC,CAAC;EACL,CAAC;EAAApE,MAAA,CACDsE,MAAM,GAAN,SAAAA,OAAOC,gBAA2C,EAA0B;IACxE,OAAOhG,sBAAsB,CACzB,IAAI,CAAC0F,SAAS,EACbnC,GAAG,IAAKA,GAAG,CAACwC,MAAM,CAACC,gBAAgB,CACxC,CAAC;EACL,CAAC;EAAAvE,MAAA,CACDwE,iBAAiB,GAAjB,SAAAA,kBAAkBD,gBAA2C,EAA0B;IACnF,OAAOhG,sBAAsB,CACzB,IAAI,CAAC0F,SAAS,EACbnC,GAAG,IAAKA,GAAG,CAAC0C,iBAAiB,CAACD,gBAAgB,CACnD,CAAC;EACL;;EAGA;EACA;EAAA;EAAAvE,MAAA,CACAyE,KAAK,GAAL,SAAAA,MAAMC,SAAmE,EAAqC;IAC1G,MAAMjH,aAAa,CAAC,eAAe,CAAC;EACxC,CAAC;EAAAuC,MAAA,CACD2E,IAAI,GAAJ,SAAAA,KAAKC,OAA+C,EAAqC;IACrF,MAAMnH,aAAa,CAAC,eAAe,CAAC;EACxC,CAAC;EAAAuC,MAAA,CACD6E,IAAI,GAAJ,SAAAA,KAAKC,OAAsB,EAAqC;IAC5D,MAAMrH,aAAa,CAAC,eAAe,CAAC;EACxC,CAAC;EAAAuC,MAAA,CACD+E,KAAK,GAAL,SAAAA,MAAMD,OAAsB,EAAqC;IAC7D,MAAMrH,aAAa,CAAC,eAAe,CAAC;EACxC,CAAC;EAAA,OAAAuH,YAAA,CAAApG,WAAA;IAAAqG,GAAA;IAAAC,GAAA,EAnXD,SAAAA,CAAA,EAAwC;MACpC,IAAI,CAAC,IAAI,CAACC,EAAE,EAAE;QAEV,IAAMC,QAAQ,GAAG,IAAI,CAACrG,UAAU,CAAC8D,CAAC,CAACwC,IAAI;QACnC;AAChB;AACA;AACA;QACgBlI,MAAM,CAACmI,WAAW,IAAI,CAACA,WAAW,CAACC,OAAO,CAAC;QAC3C;AAChB;AACA;AACA;QACgBlI,SAAS,CAAC,IAAI,CAAC;QACf;QACAH,QAAQ,CAAC,MAAM0F,YAAY,CAAC,IAAW,CAAC,CAAC;QACzC;QACAxF,GAAG,CAAC,MAAM,IAAI,CAACmC,OAAO,CAAC;QACvB;QACAhC,WAAW,CAACM,0BAA0B,CAAC;QACvC;QACAP,oBAAoB,CAAC,CAACkI,IAAI,EAAEC,IAAI,KAAK;UACjC,IAAID,IAAI,IAAIA,IAAI,CAACE,IAAI,KAAK5H,cAAc,CAAC2H,IAAI,CAAC,CAACC,IAAI,EAAE;YACjD,OAAO,IAAI;UACf,CAAC,MAAM;YACH,OAAO,KAAK;UAChB;QACJ,CAAC,CAAC,EACFvI,MAAM,CAAC0D,MAAM,IAAI,CAAC,CAACA,MAAM,CAAC;QAC1B;AAChB;AACA;AACA;QACgBzD,GAAG,CAAEyD,MAAM,IAAK;UACZ,IAAM8E,SAAS,GAAG7H,cAAc,CAAC+C,MAAM,CAAC;UACxC,IAAI,IAAI,CAAChC,EAAE,KAAK,OAAO,EAAE;YACrB,OAAO8G,SAAS,CAAC5E,KAAK;UAC1B,CAAC,MAAM,IAAI,IAAI,CAAClC,EAAE,KAAK,SAAS,EAAE;YAC9B;YACA,OAAO8G,SAAS,CAACC,SAAS,CAACnF,MAAM,KAAK,CAAC,GAAG,IAAI,GAAGkF,SAAS,CAACC,SAAS,CAAC,CAAC,CAAC;UAC3E,CAAC,MAAM,IAAI,IAAI,CAAC/G,EAAE,KAAK,WAAW,EAAE;YAChC,OAAO8G,SAAS,CAACE,OAAO;UAC5B,CAAC,MAAM;YACH;YACA;YACA,OAAOF,SAAS,CAACC,SAAS,CAACE,KAAK,CAAC,CAAC,CAAC;UACvC;QACJ,CAAC,CACL,CAAC;QAED,IAAI,CAACX,EAAE,GAAGlI,KAAK,CACXmI,QAAQ;QACR;AAChB;AACA;AACA;QACgB,IAAI,CAAC9F,SAAS,CAAC+F,IAAI,CACflI,MAAM,CAAC,MAAM,KAAK,CACtB,CACJ,CAAC;MACL;MACA,OAAO,IAAI,CAACgI,EAAE;IAClB;EAAC;IAAAF,GAAA;IAAAC,GAAA,EAED,SAAAA,CAAA,EAAqB;MACjB,IAAMa,UAAU,GAAG,IAAI,CAAChH,UAAU,CAACkC,QAAQ,CAAC+E,oBAAoB,CAAC,CAAC;MAClE,OAAOD,UAAU,CAACE,cAAc,CAC5B,IAAI,CAACpD,CAAC,EACNqD,SAAS,EACT,IAAI,CAACnH,UAAU,CAACkC,QACpB,CAAC;IACL;;IAEA;;IAGA;IACA;;IAIA;AACJ;AACA;AACA;EAHI;IAAAgE,GAAA;IAAAC,GAAA,EA2IA,SAAAA,CAAA,EAAiE;MAC7D,IAAMpF,MAAM,GAAG,IAAI,CAACf,UAAU,CAACe,MAAM,CAACuD,UAAU;MAChD,IAAM8C,eAAe,GAAG7H,mBAAmB,CACvC,IAAI,CAACS,UAAU,CAACe,MAAM,CAACuD,UAAU,EACjC,IAAI,CAACvE,UACT,CAAC;MACD,OAAOpB,yBAAyB,CAC5B,IAAI,EACJ,cAAc,EACdW,eAAe,CACXyB,MAAM,EACNqG,eACJ,CACJ,CAAC;IACL;EAAC;IAAAlB,GAAA;IAAAC,GAAA,EAsFD,SAAAA,CAAA,EAAmD;MAC/C,OAAO,IAAI;IACf;EAAC;AAAA;AAoDL,OAAO,SAAStF,gBAAgBA,CAAA,EAAqC;EACjE,OAAO;IACHyB,QAAQ,EAAE,CAAC;EACf,CAAC;AACL;;AAEA;AACA;AACA;AACA,OAAO,SAAS+E,gBAAgBA,CAC5BhD,OAAmD,EACb;EACtC,OAAOA,OAAO,CAACrE,UAAU,CAACsH,WAAW,CAACC,UAAU,CAAClD,OAAc,CAAC;AACpE;AAEA,OAAO,SAASmD,aAAaA,CACzB1H,EAAa,EACbsC,QAA+B,EAC/BpC,UAAmC,EACnCC,KAAW,EACb;EACEd,cAAc,CAAC,kBAAkB,EAAE;IAC/BW,EAAE;IACFsC,QAAQ;IACRpC,UAAU;IACVC;EACJ,CAAC,CAAC;EAEF,IAAIuC,GAAG,GAAG,IAAI3C,WAAW,CAAiBC,EAAE,EAAEsC,QAAQ,EAAEpC,UAAU,EAAEC,KAAK,CAAC;;EAE1E;EACAuC,GAAG,GAAG6E,gBAAgB,CAAC7E,GAAG,CAAC;EAC3BnD,uBAAuB,CAACW,UAAU,CAAC;EAEnC,OAAOwC,GAAG;AACd;;AAEA;AACA;AACA;AACA;AACA;AACA,SAASiF,gBAAgBA,CAACpD,OAA8B,EAAW;EAC/D,IAAMqD,wBAAwB,GAAGrD,OAAO,CAACa,SAAS,CAAClF,UAAU,CAAC2H,kBAAkB,CAACC,OAAO;EACxF,IAAIvD,OAAO,CAAC5D,kBAAkB,IAAIiH,wBAAwB,EAAE;IACxD,OAAO,IAAI;EACf,CAAC,MAAM;IACH,OAAO,KAAK;EAChB;AACJ;;AAGA;AACA;AACA;AACA;AACA;AACA,SAAS7D,YAAYA,CAACQ,OAA8B,EAAoB;EACpE;EACA,IACIA,OAAO,CAACrE,UAAU,CAACkC,QAAQ,CAAC2F,SAAS,IACrCJ,gBAAgB,CAACpD,OAAO,CAAC,EAC3B;IACE,OAAOxF,qBAAqB;EAChC;EAEAwF,OAAO,CAACzD,iBAAiB,GAAGyD,OAAO,CAACzD,iBAAiB,CAChD4C,IAAI,CAAC,MAAMsE,aAAa,CAACzD,OAAO,CAAC,CAAC;EACvC,OAAOA,OAAO,CAACzD,iBAAiB;AACpC;;AAEA;AACA;AACA;AACA;AACA,SAASkH,aAAaA,CAAYzD,OAAoC,EAAoB;EACtFA,OAAO,CAAChE,gBAAgB,GAAGzB,GAAG,CAAC,CAAC;;EAEhC;AACJ;AACA;EACI;EACI;EACAyF,OAAO,CAACrE,UAAU,CAACkC,QAAQ,CAAC2F,SAAS;EACrC;EACAJ,gBAAgB,CAACpD,OAAO,CAAC,EAC3B;IACE,OAAOxF,qBAAqB;EAChC;EAEA,IAAI2D,GAAG,GAAG,KAAK;EACf,IAAIuF,UAAU,GAAG,KAAK,CAAC,CAAC;EACxB,IAAI1D,OAAO,CAAC5D,kBAAkB,KAAK,CAAC,CAAC,EAAE;IACnC;IACAsH,UAAU,GAAG,IAAI;EACrB;;EAEA;AACJ;AACA;EACI,IAAI,CAACA,UAAU,EAAE;IACb,IAAMC,kBAAkB,GAAG3D,OAAO,CAACa,SAAS,CAAClF,UAAU,CAAC2H,kBAAkB,CAACM,OAAO,CAAC5D,OAAO,CAAC5D,kBAAkB,GAAG,CAAC,CAAC;IAClH,IAAIuH,kBAAkB,KAAK,IAAI,EAAE;MAC7B;MACAD,UAAU,GAAG,IAAI;IACrB,CAAC,MAAM;MACH1D,OAAO,CAAC5D,kBAAkB,GAAG4D,OAAO,CAACa,SAAS,CAAClF,UAAU,CAAC2H,kBAAkB,CAACC,OAAO;MAEpF,IAAMM,eAA2C,GAAG7D,OAAO,CAACa,SAAS,CAAClF,UAAU,CAC3E2H,kBAAkB,CAClBQ,iBAAiB,CAACH,kBAAkB,CAAC;MAE1C,IAAI3D,OAAO,CAACvE,EAAE,KAAK,OAAO,EAAE;QACxB;QACA,IAAMsI,aAAa,GAAGrJ,cAAc,CAACsF,OAAO,CAAC7D,OAAO,CAAC,CAACwB,KAAK;QAC3D,IAAIqG,QAAQ,GAAGD,aAAa;QAC5BF,eAAe,CAACxF,OAAO,CAAC4F,EAAE,IAAI;UAC1B,IAAMC,cAAc,GAAGD,EAAE,CAACE,oBAAoB,IAAInE,OAAO,CAACM,qBAAqB,CAAC2D,EAAE,CAACE,oBAAoB,CAAC;UACxG,IAAMC,YAAY,GAAGpE,OAAO,CAACM,qBAAqB,CAAC2D,EAAE,CAACI,YAAY,CAAC;UAEnE,IAAI,CAACH,cAAc,IAAIE,YAAY,EAAE;YACjCJ,QAAQ,EAAE;UACd;UACA,IAAIE,cAAc,IAAI,CAACE,YAAY,EAAE;YACjCJ,QAAQ,EAAE;UACd;QACJ,CAAC,CAAC;QACF,IAAIA,QAAQ,KAAKD,aAAa,EAAE;UAC5B5F,GAAG,GAAG,IAAI,CAAC,CAAC;UACZ6B,OAAO,CAAClD,cAAc,CAACkH,QAAe,CAAC;QAC3C;MACJ,CAAC,MAAM;QACH;QACA,IAAMM,iBAAiB,GAAGvJ,mBAAmB,CACzCiF,OAAO,EACP6D,eACJ,CAAC;QACD,IAAIS,iBAAiB,CAACC,iBAAiB,EAAE;UACrC;UACAb,UAAU,GAAG,IAAI;QACrB,CAAC,MAAM,IAAIY,iBAAiB,CAACE,OAAO,EAAE;UAClC;UACArG,GAAG,GAAG,IAAI,CAAC,CAAC;UACZ6B,OAAO,CAAClD,cAAc,CAACwH,iBAAiB,CAACG,UAAiB,CAAC;QAC/D;MACJ;IACJ;EACJ;;EAEA;EACA,IAAIf,UAAU,EAAE;IACZ,OAAO1D,OAAO,CAAC1C,iBAAiB,CAAC,CAAC,CAC7B6B,IAAI,CAACpC,aAAa,IAAI;MAEnB;AAChB;AACA;AACA;AACA;MACgBiD,OAAO,CAAC5D,kBAAkB,GAAG4D,OAAO,CAACrE,UAAU,CAAC2H,kBAAkB,CAACC,OAAO;;MAE1E;MACA,IAAI,OAAOxG,aAAa,KAAK,QAAQ,EAAE;QACnC,IACI,CAACiD,OAAO,CAAC7D,OAAO,IAChBY,aAAa,KAAKiD,OAAO,CAAC7D,OAAO,CAACwB,KAAK,EACzC;UACEQ,GAAG,GAAG,IAAI;UACV6B,OAAO,CAAClD,cAAc,CAACC,aAAoB,CAAC;QAChD;QACA,OAAOoB,GAAG;MACd;MACA,IACI,CAAC6B,OAAO,CAAC7D,OAAO,IAChB,CAACxB,wBAAwB,CACrBqF,OAAO,CAACrE,UAAU,CAACe,MAAM,CAACC,WAAW,EACrCI,aAAa,EACbiD,OAAO,CAAC7D,OAAO,CAACuI,QACpB,CAAC,EACH;QACEvG,GAAG,GAAG,IAAI,CAAC,CAAC;QACZ6B,OAAO,CAAClD,cAAc,CAACC,aAAoB,CAAC;MAChD;MACA,OAAOoB,GAAG;IACd,CAAC,CAAC;EACV;EACA,OAAOuC,OAAO,CAACiE,OAAO,CAACxG,GAAG,CAAC,CAAC,CAAC;AACjC;;AAEA;AACA;AACA;AACA;AACA,OAAO,SAASkC,YAAYA,CACxB3D,MAA+C,EAC/CkI,eAA4C,EACpB;EACxB,IAAI,CAACA,eAAe,CAACrD,IAAI,EAAE;IACvB,MAAM1G,UAAU,CAAC,KAAK,EAAE;MACpB0E,KAAK,EAAEqF;IACX,CAAC,CAAC;EACN;;EAEA;AACJ;AACA;AACA;EACI,IAAMC,SAAS,GAAGxJ,YAAY,CAC1BqB,MAAM,EACNkI,eACJ,CAAC;EAED,OAAO;IACHrF,KAAK,EAAEqF,eAAe;IACtBC;EACJ,CAAC;AACL;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,eAAe3F,eAAeA,CACjCc,OAAyD,EACrB;EACpC,IAAIlB,IAAiC,GAAG,EAAE;EAC1C,IAAMnD,UAAU,GAAGqE,OAAO,CAACrE,UAAU;;EAErC;AACJ;AACA;AACA;AACA;AACA;EACI,IAAIqE,OAAO,CAACvD,kBAAkB,EAAE;IAC5B,IAAIQ,KAAK,CAACwD,OAAO,CAACT,OAAO,CAACvD,kBAAkB,CAAC,EAAE;MAC3C,IAAIqI,MAAM,GAAG9E,OAAO,CAACvD,kBAAkB;MACvCqI,MAAM,GAAGA,MAAM,CAAC/K,MAAM,CAACgL,KAAK,IAAI;QAC5B;QACA,IAAMzG,OAAO,GAAG0B,OAAO,CAACrE,UAAU,CAAC4C,SAAS,CAACC,6BAA6B,CAACuG,KAAK,CAAC;QACjF,IAAIzG,OAAO,EAAE;UACT,IAAI,CAACA,OAAO,CAACG,QAAQ,EAAE;YACnBK,IAAI,CAACD,IAAI,CAACP,OAAO,CAAC;UACtB;UACA,OAAO,KAAK;QAChB,CAAC,MAAM;UACH,OAAO,IAAI;QACf;MACJ,CAAC,CAAC;MACF;MACA,IAAIwG,MAAM,CAACzH,MAAM,GAAG,CAAC,EAAE;QACnB,IAAM2H,eAAe,GAAG,MAAMrJ,UAAU,CAAC+B,eAAe,CAACqB,iBAAiB,CAAC+F,MAAM,EAAE,KAAK,CAAC;QACzFlK,aAAa,CAACkE,IAAI,EAAEkG,eAAe,CAAC;MACxC;IACJ,CAAC,MAAM;MACH,IAAMD,KAAK,GAAG/E,OAAO,CAACvD,kBAAkB;;MAExC;MACA,IAAI6B,OAAO,GAAG0B,OAAO,CAACrE,UAAU,CAAC4C,SAAS,CAACC,6BAA6B,CAACuG,KAAK,CAAC;MAC/E,IAAI,CAACzG,OAAO,EAAE;QACV;QACA,IAAM2G,eAAe,GAAG,MAAMtJ,UAAU,CAAC+B,eAAe,CAACqB,iBAAiB,CAAC,CAACgG,KAAK,CAAC,EAAE,KAAK,CAAC;QAC1F,IAAIE,eAAe,CAAC,CAAC,CAAC,EAAE;UACpB3G,OAAO,GAAG2G,eAAe,CAAC,CAAC,CAAC;QAChC;MACJ;MACA,IAAI3G,OAAO,IAAI,CAACA,OAAO,CAACG,QAAQ,EAAE;QAC9BK,IAAI,CAACD,IAAI,CAACP,OAAO,CAAC;MACtB;IACJ;EACJ,CAAC,MAAM;IACH,IAAMf,aAAa,GAAGyC,OAAO,CAACxC,gBAAgB,CAAC,CAAC;IAChD,IAAM0H,WAAW,GAAG,MAAMvJ,UAAU,CAAC+B,eAAe,CAAC6B,KAAK,CAAChC,aAAa,CAAC;IACzEuB,IAAI,GAAGoG,WAAW,CAAC1C,SAAS;EAChC;EACA,OAAO1D,IAAI;AAEf;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,SAASrC,kBAAkBA,CAC9BE,WAAmB,EACnB4C,KAAsB,EACG;EACzB;EACA,IACI,CAACA,KAAK,CAACkC,IAAI,IACXlC,KAAK,CAACtB,QAAQ,IACdkH,MAAM,CAACC,IAAI,CAAC7F,KAAK,CAACtB,QAAQ,CAAC,CAACZ,MAAM,KAAK,CAAC,IACxCkC,KAAK,CAACtB,QAAQ,CAACtB,WAAW,CAAC,EAC7B;IACE,IAAMiD,KAAU,GAAGL,KAAK,CAACtB,QAAQ,CAACtB,WAAW,CAAC;IAC9C,IAAI,OAAOiD,KAAK,KAAK,QAAQ,EAAE;MAC3B,OAAOA,KAAK;IAChB,CAAC,MAAM,IACHuF,MAAM,CAACC,IAAI,CAACxF,KAAK,CAAC,CAACvC,MAAM,KAAK,CAAC,IAC/B,OAAOuC,KAAK,CAACM,GAAG,KAAK,QAAQ,EAC/B;MACE,OAAON,KAAK,CAACM,GAAG;IACpB;;IAEA;IACA,IACIiF,MAAM,CAACC,IAAI,CAACxF,KAAK,CAAC,CAACvC,MAAM,KAAK,CAAC,IAC/BJ,KAAK,CAACwD,OAAO,CAACb,KAAK,CAACM,GAAG,CAAC;IACxB;IACA,CAAEN,KAAK,CAACM,GAAG,CAAWmF,IAAI,CAACC,CAAC,IAAI,OAAOA,CAAC,KAAK,QAAQ,CAAC,EACxD;MACE,OAAO1F,KAAK,CAACM,GAAG;IACpB;EACJ;EACA,OAAO,KAAK;AAChB;AAIA,OAAO,SAASqF,SAASA,CAACC,GAAQ,EAAW;EACzC,OAAOA,GAAG,YAAYhK,WAAW;AACrC","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/rx-schema-helper.js b/dist/esm/rx-schema-helper.js deleted file mode 100644 index 1d4b5dbbb72..00000000000 --- a/dist/esm/rx-schema-helper.js +++ /dev/null @@ -1,292 +0,0 @@ -import { newRxError } from "./rx-error.js"; -import { appendToArray, ensureNotFalsy, flatClone, getProperty, isMaybeReadonlyArray, REGEX_ALL_DOTS, RX_META_LWT_MINIMUM, sortObject, trimDots } from "./plugins/utils/index.js"; -/** - * Helper function to create a valid RxJsonSchema - * with a given version. - */ -export function getPseudoSchemaForVersion(version, primaryKey) { - var pseudoSchema = fillWithDefaultSettings({ - version, - type: 'object', - primaryKey: primaryKey, - properties: { - [primaryKey]: { - type: 'string', - maxLength: 100 - } - }, - indexes: [[primaryKey]], - required: [primaryKey] - }); - return pseudoSchema; -} - -/** - * Returns the sub-schema for a given path - */ -export function getSchemaByObjectPath(rxJsonSchema, path) { - var usePath = path; - usePath = usePath.replace(REGEX_ALL_DOTS, '.properties.'); - usePath = 'properties.' + usePath; - usePath = trimDots(usePath); - var ret = getProperty(rxJsonSchema, usePath); - return ret; -} -export function fillPrimaryKey(primaryPath, jsonSchema, documentData) { - // optimization shortcut. - if (typeof jsonSchema.primaryKey === 'string') { - return documentData; - } - var newPrimary = getComposedPrimaryKeyOfDocumentData(jsonSchema, documentData); - var existingPrimary = documentData[primaryPath]; - if (existingPrimary && existingPrimary !== newPrimary) { - throw newRxError('DOC19', { - args: { - documentData, - existingPrimary, - newPrimary - }, - schema: jsonSchema - }); - } - documentData[primaryPath] = newPrimary; - return documentData; -} -export function getPrimaryFieldOfPrimaryKey(primaryKey) { - if (typeof primaryKey === 'string') { - return primaryKey; - } else { - return primaryKey.key; - } -} -export function getLengthOfPrimaryKey(schema) { - var primaryPath = getPrimaryFieldOfPrimaryKey(schema.primaryKey); - var schemaPart = getSchemaByObjectPath(schema, primaryPath); - return ensureNotFalsy(schemaPart.maxLength); -} - -/** - * Returns the composed primaryKey of a document by its data. - */ -export function getComposedPrimaryKeyOfDocumentData(jsonSchema, documentData) { - if (typeof jsonSchema.primaryKey === 'string') { - return documentData[jsonSchema.primaryKey]; - } - var compositePrimary = jsonSchema.primaryKey; - return compositePrimary.fields.map(field => { - var value = getProperty(documentData, field); - if (typeof value === 'undefined') { - throw newRxError('DOC18', { - args: { - field, - documentData - } - }); - } - return value; - }).join(compositePrimary.separator); -} - -/** - * Normalize the RxJsonSchema. - * We need this to ensure everything is set up properly - * and we have the same hash on schemas that represent the same value but - * have different json. - * - * - Orders the schemas attributes by alphabetical order - * - Adds the primaryKey to all indexes that do not contain the primaryKey - * - We need this for deterministic sort order on all queries, which is required for event-reduce to work. - * - * @return RxJsonSchema - ordered and filled - */ -export function normalizeRxJsonSchema(jsonSchema) { - var normalizedSchema = sortObject(jsonSchema, true); - return normalizedSchema; -} - -/** - * If the schema does not specify any index, - * we add this index so we at least can run RxQuery() - * and only select non-deleted fields. - */ -export function getDefaultIndex(primaryPath) { - return ['_deleted', primaryPath]; -} - -/** - * fills the schema-json with default-settings - * @return cloned schemaObj - */ -export function fillWithDefaultSettings(schemaObj) { - schemaObj = flatClone(schemaObj); - var primaryPath = getPrimaryFieldOfPrimaryKey(schemaObj.primaryKey); - schemaObj.properties = flatClone(schemaObj.properties); - - // additionalProperties is always false - schemaObj.additionalProperties = false; - - // fill with key-compression-state () - if (!Object.prototype.hasOwnProperty.call(schemaObj, 'keyCompression')) { - schemaObj.keyCompression = false; - } - - // indexes must be array - schemaObj.indexes = schemaObj.indexes ? schemaObj.indexes.slice(0) : []; - - // required must be array - schemaObj.required = schemaObj.required ? schemaObj.required.slice(0) : []; - - // encrypted must be array - schemaObj.encrypted = schemaObj.encrypted ? schemaObj.encrypted.slice(0) : []; - - // add _rev - schemaObj.properties._rev = { - type: 'string', - minLength: 1 - }; - - // add attachments - schemaObj.properties._attachments = { - type: 'object' - }; - - // add deleted flag - schemaObj.properties._deleted = { - type: 'boolean' - }; - - // add meta property - schemaObj.properties._meta = RX_META_SCHEMA; - - /** - * meta fields are all required - */ - schemaObj.required = schemaObj.required ? schemaObj.required.slice(0) : []; - schemaObj.required.push('_deleted'); - schemaObj.required.push('_rev'); - schemaObj.required.push('_meta'); - schemaObj.required.push('_attachments'); - - // final fields are always required - var finalFields = getFinalFields(schemaObj); - appendToArray(schemaObj.required, finalFields); - schemaObj.required = schemaObj.required.filter(field => !field.includes('.')).filter((elem, pos, arr) => arr.indexOf(elem) === pos); // unique; - - // version is 0 by default - schemaObj.version = schemaObj.version || 0; - var useIndexes = schemaObj.indexes.map(index => { - var arIndex = isMaybeReadonlyArray(index) ? index.slice(0) : [index]; - /** - * Append primary key to indexes that do not contain the primaryKey. - * All indexes must have the primaryKey to ensure a deterministic sort order. - */ - if (!arIndex.includes(primaryPath)) { - arIndex.push(primaryPath); - } - - // add _deleted flag to all indexes so we can query only non-deleted fields - // in RxDB itself - if (arIndex[0] !== '_deleted') { - arIndex.unshift('_deleted'); - } - return arIndex; - }); - if (useIndexes.length === 0) { - useIndexes.push(getDefaultIndex(primaryPath)); - } - - // we need this index for the getChangedDocumentsSince() method - useIndexes.push(['_meta.lwt', primaryPath]); - - // also add the internalIndexes - if (schemaObj.internalIndexes) { - schemaObj.internalIndexes.map(idx => { - useIndexes.push(idx); - }); - } - - // make indexes unique - var hasIndex = new Set(); - useIndexes.filter(index => { - var indexStr = index.join(','); - if (hasIndex.has(indexStr)) { - return false; - } else { - hasIndex.add(indexStr); - return true; - } - }); - schemaObj.indexes = useIndexes; - return schemaObj; -} -export var RX_META_SCHEMA = { - type: 'object', - properties: { - /** - * The last-write time. - * Unix time in milliseconds. - */ - lwt: { - type: 'number', - /** - * We use 1 as minimum so that the value is never falsy. - */ - minimum: RX_META_LWT_MINIMUM, - maximum: 1000000000000000, - multipleOf: 0.01 - } - }, - /** - * Additional properties are allowed - * and can be used by plugins to set various flags. - */ - additionalProperties: true, - required: ['lwt'] -}; - -/** - * returns the final-fields of the schema - * @return field-names of the final-fields - */ -export function getFinalFields(jsonSchema) { - var ret = Object.keys(jsonSchema.properties).filter(key => jsonSchema.properties[key].final); - - // primary is also final - var primaryPath = getPrimaryFieldOfPrimaryKey(jsonSchema.primaryKey); - ret.push(primaryPath); - - // fields of composite primary are final - if (typeof jsonSchema.primaryKey !== 'string') { - jsonSchema.primaryKey.fields.forEach(field => ret.push(field)); - } - return ret; -} - -/** - * fills all unset fields with default-values if set - * @hotPath - */ -export function fillObjectWithDefaults(rxSchema, obj) { - var defaultKeys = Object.keys(rxSchema.defaultValues); - for (var i = 0; i < defaultKeys.length; ++i) { - var key = defaultKeys[i]; - if (!Object.prototype.hasOwnProperty.call(obj, key) || typeof obj[key] === 'undefined') { - obj[key] = rxSchema.defaultValues[key]; - } - } - return obj; -} -export var DEFAULT_CHECKPOINT_SCHEMA = { - type: 'object', - properties: { - id: { - type: 'string' - }, - lwt: { - type: 'number' - } - }, - required: ['id', 'lwt'], - additionalProperties: false -}; -//# sourceMappingURL=rx-schema-helper.js.map \ No newline at end of file diff --git a/dist/esm/rx-schema-helper.js.map b/dist/esm/rx-schema-helper.js.map deleted file mode 100644 index 9e06f31e02c..00000000000 --- a/dist/esm/rx-schema-helper.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-schema-helper.js","names":["newRxError","appendToArray","ensureNotFalsy","flatClone","getProperty","isMaybeReadonlyArray","REGEX_ALL_DOTS","RX_META_LWT_MINIMUM","sortObject","trimDots","getPseudoSchemaForVersion","version","primaryKey","pseudoSchema","fillWithDefaultSettings","type","properties","maxLength","indexes","required","getSchemaByObjectPath","rxJsonSchema","path","usePath","replace","ret","fillPrimaryKey","primaryPath","jsonSchema","documentData","newPrimary","getComposedPrimaryKeyOfDocumentData","existingPrimary","args","schema","getPrimaryFieldOfPrimaryKey","key","getLengthOfPrimaryKey","schemaPart","compositePrimary","fields","map","field","value","join","separator","normalizeRxJsonSchema","normalizedSchema","getDefaultIndex","schemaObj","additionalProperties","Object","prototype","hasOwnProperty","call","keyCompression","slice","encrypted","_rev","minLength","_attachments","_deleted","_meta","RX_META_SCHEMA","push","finalFields","getFinalFields","filter","includes","elem","pos","arr","indexOf","useIndexes","index","arIndex","unshift","length","internalIndexes","idx","hasIndex","Set","indexStr","has","add","lwt","minimum","maximum","multipleOf","keys","final","forEach","fillObjectWithDefaults","rxSchema","obj","defaultKeys","defaultValues","i","DEFAULT_CHECKPOINT_SCHEMA","id"],"sources":["../../src/rx-schema-helper.ts"],"sourcesContent":["import { newRxError } from './rx-error.ts';\nimport type {\n CompositePrimaryKey,\n DeepReadonly,\n JsonSchema,\n PrimaryKey,\n RxDocumentData,\n RxJsonSchema,\n RxStorageDefaultCheckpoint,\n StringKeys\n} from './types/index.d.ts';\nimport {\n appendToArray,\n ensureNotFalsy,\n flatClone,\n getProperty,\n isMaybeReadonlyArray,\n REGEX_ALL_DOTS,\n RX_META_LWT_MINIMUM,\n sortObject,\n trimDots\n} from './plugins/utils/index.ts';\nimport type { RxSchema } from './rx-schema.ts';\n\n/**\n * Helper function to create a valid RxJsonSchema\n * with a given version.\n */\nexport function getPseudoSchemaForVersion(\n version: number,\n primaryKey: StringKeys\n): RxJsonSchema> {\n const pseudoSchema: RxJsonSchema> = fillWithDefaultSettings({\n version,\n type: 'object',\n primaryKey: primaryKey as any,\n properties: {\n [primaryKey]: {\n type: 'string',\n maxLength: 100\n }\n } as any,\n indexes: [\n [primaryKey]\n ],\n required: [primaryKey]\n });\n return pseudoSchema;\n}\n\n/**\n * Returns the sub-schema for a given path\n */\nexport function getSchemaByObjectPath(\n rxJsonSchema: RxJsonSchema,\n path: keyof T | string\n): JsonSchema {\n let usePath: string = path as string;\n usePath = usePath.replace(REGEX_ALL_DOTS, '.properties.');\n usePath = 'properties.' + usePath;\n usePath = trimDots(usePath);\n\n const ret = getProperty(rxJsonSchema, usePath);\n return ret;\n}\n\nexport function fillPrimaryKey(\n primaryPath: keyof T,\n jsonSchema: RxJsonSchema,\n documentData: RxDocumentData\n): RxDocumentData {\n // optimization shortcut.\n if (typeof jsonSchema.primaryKey === 'string') {\n return documentData;\n }\n\n const newPrimary = getComposedPrimaryKeyOfDocumentData(\n jsonSchema,\n documentData\n );\n const existingPrimary: string | undefined = documentData[primaryPath] as any;\n if (\n existingPrimary &&\n existingPrimary !== newPrimary\n ) {\n throw newRxError(\n 'DOC19',\n {\n args: {\n documentData,\n existingPrimary,\n newPrimary,\n },\n schema: jsonSchema\n });\n }\n\n (documentData as any)[primaryPath] = newPrimary;\n return documentData;\n}\n\nexport function getPrimaryFieldOfPrimaryKey(\n primaryKey: PrimaryKey\n): StringKeys {\n if (typeof primaryKey === 'string') {\n return primaryKey as any;\n } else {\n return (primaryKey as CompositePrimaryKey).key;\n }\n}\n\nexport function getLengthOfPrimaryKey(\n schema: RxJsonSchema>\n): number {\n const primaryPath = getPrimaryFieldOfPrimaryKey(schema.primaryKey);\n const schemaPart = getSchemaByObjectPath(schema, primaryPath);\n return ensureNotFalsy(schemaPart.maxLength);\n}\n\n/**\n * Returns the composed primaryKey of a document by its data.\n */\nexport function getComposedPrimaryKeyOfDocumentData(\n jsonSchema: RxJsonSchema | RxJsonSchema>,\n documentData: Partial\n): string {\n if (typeof jsonSchema.primaryKey === 'string') {\n return (documentData as any)[jsonSchema.primaryKey];\n }\n\n const compositePrimary: CompositePrimaryKey = jsonSchema.primaryKey as any;\n return compositePrimary.fields.map(field => {\n const value = getProperty(documentData as any, field as string);\n if (typeof value === 'undefined') {\n throw newRxError('DOC18', { args: { field, documentData } });\n }\n return value;\n }).join(compositePrimary.separator);\n}\n\n\n/**\n * Normalize the RxJsonSchema.\n * We need this to ensure everything is set up properly\n * and we have the same hash on schemas that represent the same value but\n * have different json.\n *\n * - Orders the schemas attributes by alphabetical order\n * - Adds the primaryKey to all indexes that do not contain the primaryKey\n * - We need this for deterministic sort order on all queries, which is required for event-reduce to work.\n *\n * @return RxJsonSchema - ordered and filled\n */\nexport function normalizeRxJsonSchema(jsonSchema: RxJsonSchema): RxJsonSchema {\n const normalizedSchema: RxJsonSchema = sortObject(jsonSchema, true);\n return normalizedSchema;\n}\n\n/**\n * If the schema does not specify any index,\n * we add this index so we at least can run RxQuery()\n * and only select non-deleted fields.\n */\nexport function getDefaultIndex(primaryPath: string) {\n return ['_deleted', primaryPath];\n}\n\n/**\n * fills the schema-json with default-settings\n * @return cloned schemaObj\n */\nexport function fillWithDefaultSettings(\n schemaObj: RxJsonSchema\n): RxJsonSchema> {\n schemaObj = flatClone(schemaObj);\n const primaryPath: string = getPrimaryFieldOfPrimaryKey(schemaObj.primaryKey);\n schemaObj.properties = flatClone(schemaObj.properties);\n\n // additionalProperties is always false\n schemaObj.additionalProperties = false;\n\n // fill with key-compression-state ()\n if (!Object.prototype.hasOwnProperty.call(schemaObj, 'keyCompression')) {\n schemaObj.keyCompression = false;\n }\n\n // indexes must be array\n schemaObj.indexes = schemaObj.indexes ? schemaObj.indexes.slice(0) : [];\n\n // required must be array\n schemaObj.required = schemaObj.required ? schemaObj.required.slice(0) : [];\n\n // encrypted must be array\n schemaObj.encrypted = schemaObj.encrypted ? schemaObj.encrypted.slice(0) : [];\n\n // add _rev\n (schemaObj.properties as any)._rev = {\n type: 'string',\n minLength: 1\n };\n\n // add attachments\n (schemaObj.properties as any)._attachments = {\n type: 'object'\n };\n\n // add deleted flag\n (schemaObj.properties as any)._deleted = {\n type: 'boolean'\n };\n\n // add meta property\n (schemaObj.properties as any)._meta = RX_META_SCHEMA;\n\n /**\n * meta fields are all required\n */\n schemaObj.required = schemaObj.required ? schemaObj.required.slice(0) : [];\n (schemaObj.required as string[]).push('_deleted');\n (schemaObj.required as string[]).push('_rev');\n (schemaObj.required as string[]).push('_meta');\n (schemaObj.required as string[]).push('_attachments');\n\n // final fields are always required\n const finalFields = getFinalFields(schemaObj);\n appendToArray(schemaObj.required as any, finalFields);\n schemaObj.required = schemaObj.required\n .filter((field: string) => !field.includes('.'))\n .filter((elem: any, pos: any, arr: any) => arr.indexOf(elem) === pos); // unique;\n\n // version is 0 by default\n schemaObj.version = schemaObj.version || 0;\n\n const useIndexes: string[][] = schemaObj.indexes.map(index => {\n const arIndex = isMaybeReadonlyArray(index) ? index.slice(0) : [index];\n /**\n * Append primary key to indexes that do not contain the primaryKey.\n * All indexes must have the primaryKey to ensure a deterministic sort order.\n */\n if (!arIndex.includes(primaryPath)) {\n arIndex.push(primaryPath);\n }\n\n // add _deleted flag to all indexes so we can query only non-deleted fields\n // in RxDB itself\n if (arIndex[0] !== '_deleted') {\n arIndex.unshift('_deleted');\n }\n\n return arIndex;\n });\n\n if (useIndexes.length === 0) {\n useIndexes.push(getDefaultIndex(primaryPath));\n }\n\n // we need this index for the getChangedDocumentsSince() method\n useIndexes.push(['_meta.lwt', primaryPath]);\n\n // also add the internalIndexes\n if (schemaObj.internalIndexes) {\n schemaObj.internalIndexes.map(idx => {\n useIndexes.push(idx);\n });\n }\n\n // make indexes unique\n const hasIndex = new Set();\n useIndexes.filter(index => {\n const indexStr = index.join(',');\n if (hasIndex.has(indexStr)) {\n return false;\n } else {\n hasIndex.add(indexStr);\n return true;\n }\n });\n\n schemaObj.indexes = useIndexes;\n\n return schemaObj as any;\n}\n\n\nexport const RX_META_SCHEMA: JsonSchema = {\n type: 'object',\n properties: {\n /**\n * The last-write time.\n * Unix time in milliseconds.\n */\n lwt: {\n type: 'number',\n /**\n * We use 1 as minimum so that the value is never falsy.\n */\n minimum: RX_META_LWT_MINIMUM,\n maximum: 1000000000000000,\n multipleOf: 0.01\n }\n },\n /**\n * Additional properties are allowed\n * and can be used by plugins to set various flags.\n */\n additionalProperties: true as any,\n required: [\n 'lwt'\n ]\n};\n\n\n/**\n * returns the final-fields of the schema\n * @return field-names of the final-fields\n */\nexport function getFinalFields(\n jsonSchema: RxJsonSchema\n): string[] {\n const ret = Object.keys(jsonSchema.properties)\n .filter(key => (jsonSchema as any).properties[key].final);\n\n // primary is also final\n const primaryPath = getPrimaryFieldOfPrimaryKey(jsonSchema.primaryKey);\n ret.push(primaryPath);\n\n // fields of composite primary are final\n if (typeof jsonSchema.primaryKey !== 'string') {\n (jsonSchema.primaryKey as CompositePrimaryKey).fields\n .forEach(field => ret.push(field as string));\n }\n\n return ret;\n}\n\n/**\n * fills all unset fields with default-values if set\n * @hotPath\n */\nexport function fillObjectWithDefaults(rxSchema: RxSchema, obj: any): any {\n const defaultKeys = Object.keys(rxSchema.defaultValues);\n for (let i = 0; i < defaultKeys.length; ++i) {\n const key = defaultKeys[i];\n if (!Object.prototype.hasOwnProperty.call(obj, key) || typeof obj[key] === 'undefined') {\n obj[key] = rxSchema.defaultValues[key];\n }\n }\n return obj;\n}\n\nexport const DEFAULT_CHECKPOINT_SCHEMA: DeepReadonly> = {\n type: 'object',\n properties: {\n id: {\n type: 'string'\n },\n lwt: {\n type: 'number'\n }\n },\n required: [\n 'id',\n 'lwt'\n ],\n additionalProperties: false\n} as const;\n"],"mappings":"AAAA,SAASA,UAAU,QAAQ,eAAe;AAW1C,SACIC,aAAa,EACbC,cAAc,EACdC,SAAS,EACTC,WAAW,EACXC,oBAAoB,EACpBC,cAAc,EACdC,mBAAmB,EACnBC,UAAU,EACVC,QAAQ,QACL,0BAA0B;AAGjC;AACA;AACA;AACA;AACA,OAAO,SAASC,yBAAyBA,CACrCC,OAAe,EACfC,UAAyB,EACM;EAC/B,IAAMC,YAA6C,GAAGC,uBAAuB,CAAC;IAC1EH,OAAO;IACPI,IAAI,EAAE,QAAQ;IACdH,UAAU,EAAEA,UAAiB;IAC7BI,UAAU,EAAE;MACR,CAACJ,UAAU,GAAG;QACVG,IAAI,EAAE,QAAQ;QACdE,SAAS,EAAE;MACf;IACJ,CAAQ;IACRC,OAAO,EAAE,CACL,CAACN,UAAU,CAAC,CACf;IACDO,QAAQ,EAAE,CAACP,UAAU;EACzB,CAAC,CAAC;EACF,OAAOC,YAAY;AACvB;;AAEA;AACA;AACA;AACA,OAAO,SAASO,qBAAqBA,CACjCC,YAA6B,EAC7BC,IAAsB,EACZ;EACV,IAAIC,OAAe,GAAGD,IAAc;EACpCC,OAAO,GAAGA,OAAO,CAACC,OAAO,CAAClB,cAAc,EAAE,cAAc,CAAC;EACzDiB,OAAO,GAAG,aAAa,GAAGA,OAAO;EACjCA,OAAO,GAAGd,QAAQ,CAACc,OAAO,CAAC;EAE3B,IAAME,GAAG,GAAGrB,WAAW,CAACiB,YAAY,EAAEE,OAAO,CAAC;EAC9C,OAAOE,GAAG;AACd;AAEA,OAAO,SAASC,cAAcA,CAC1BC,WAAoB,EACpBC,UAA2B,EAC3BC,YAA+B,EACd;EACjB;EACA,IAAI,OAAOD,UAAU,CAAChB,UAAU,KAAK,QAAQ,EAAE;IAC3C,OAAOiB,YAAY;EACvB;EAEA,IAAMC,UAAU,GAAGC,mCAAmC,CAClDH,UAAU,EACVC,YACJ,CAAC;EACD,IAAMG,eAAmC,GAAGH,YAAY,CAACF,WAAW,CAAQ;EAC5E,IACIK,eAAe,IACfA,eAAe,KAAKF,UAAU,EAChC;IACE,MAAM9B,UAAU,CACZ,OAAO,EACP;MACIiC,IAAI,EAAE;QACFJ,YAAY;QACZG,eAAe;QACfF;MACJ,CAAC;MACDI,MAAM,EAAEN;IACZ,CAAC,CAAC;EACV;EAECC,YAAY,CAASF,WAAW,CAAC,GAAGG,UAAU;EAC/C,OAAOD,YAAY;AACvB;AAEA,OAAO,SAASM,2BAA2BA,CACvCvB,UAAiC,EACZ;EACrB,IAAI,OAAOA,UAAU,KAAK,QAAQ,EAAE;IAChC,OAAOA,UAAU;EACrB,CAAC,MAAM;IACH,OAAQA,UAAU,CAAoCwB,GAAG;EAC7D;AACJ;AAEA,OAAO,SAASC,qBAAqBA,CACjCH,MAA+C,EACzC;EACN,IAAMP,WAAW,GAAGQ,2BAA2B,CAACD,MAAM,CAACtB,UAAU,CAAC;EAClE,IAAM0B,UAAU,GAAGlB,qBAAqB,CAACc,MAAM,EAAEP,WAAW,CAAC;EAC7D,OAAOzB,cAAc,CAACoC,UAAU,CAACrB,SAAS,CAAC;AAC/C;;AAEA;AACA;AACA;AACA,OAAO,SAASc,mCAAmCA,CAC/CH,UAA6E,EAC7EC,YAAgC,EAC1B;EACN,IAAI,OAAOD,UAAU,CAAChB,UAAU,KAAK,QAAQ,EAAE;IAC3C,OAAQiB,YAAY,CAASD,UAAU,CAAChB,UAAU,CAAC;EACvD;EAEA,IAAM2B,gBAAgD,GAAGX,UAAU,CAAChB,UAAiB;EACrF,OAAO2B,gBAAgB,CAACC,MAAM,CAACC,GAAG,CAACC,KAAK,IAAI;IACxC,IAAMC,KAAK,GAAGvC,WAAW,CAACyB,YAAY,EAASa,KAAe,CAAC;IAC/D,IAAI,OAAOC,KAAK,KAAK,WAAW,EAAE;MAC9B,MAAM3C,UAAU,CAAC,OAAO,EAAE;QAAEiC,IAAI,EAAE;UAAES,KAAK;UAAEb;QAAa;MAAE,CAAC,CAAC;IAChE;IACA,OAAOc,KAAK;EAChB,CAAC,CAAC,CAACC,IAAI,CAACL,gBAAgB,CAACM,SAAS,CAAC;AACvC;;AAGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,SAASC,qBAAqBA,CAAIlB,UAA2B,EAAmB;EACnF,IAAMmB,gBAAiC,GAAGvC,UAAU,CAACoB,UAAU,EAAE,IAAI,CAAC;EACtE,OAAOmB,gBAAgB;AAC3B;;AAEA;AACA;AACA;AACA;AACA;AACA,OAAO,SAASC,eAAeA,CAACrB,WAAmB,EAAE;EACjD,OAAO,CAAC,UAAU,EAAEA,WAAW,CAAC;AACpC;;AAEA;AACA;AACA;AACA;AACA,OAAO,SAASb,uBAAuBA,CACnCmC,SAA0B,EACK;EAC/BA,SAAS,GAAG9C,SAAS,CAAC8C,SAAS,CAAC;EAChC,IAAMtB,WAAmB,GAAGQ,2BAA2B,CAACc,SAAS,CAACrC,UAAU,CAAC;EAC7EqC,SAAS,CAACjC,UAAU,GAAGb,SAAS,CAAC8C,SAAS,CAACjC,UAAU,CAAC;;EAEtD;EACAiC,SAAS,CAACC,oBAAoB,GAAG,KAAK;;EAEtC;EACA,IAAI,CAACC,MAAM,CAACC,SAAS,CAACC,cAAc,CAACC,IAAI,CAACL,SAAS,EAAE,gBAAgB,CAAC,EAAE;IACpEA,SAAS,CAACM,cAAc,GAAG,KAAK;EACpC;;EAEA;EACAN,SAAS,CAAC/B,OAAO,GAAG+B,SAAS,CAAC/B,OAAO,GAAG+B,SAAS,CAAC/B,OAAO,CAACsC,KAAK,CAAC,CAAC,CAAC,GAAG,EAAE;;EAEvE;EACAP,SAAS,CAAC9B,QAAQ,GAAG8B,SAAS,CAAC9B,QAAQ,GAAG8B,SAAS,CAAC9B,QAAQ,CAACqC,KAAK,CAAC,CAAC,CAAC,GAAG,EAAE;;EAE1E;EACAP,SAAS,CAACQ,SAAS,GAAGR,SAAS,CAACQ,SAAS,GAAGR,SAAS,CAACQ,SAAS,CAACD,KAAK,CAAC,CAAC,CAAC,GAAG,EAAE;;EAE7E;EACCP,SAAS,CAACjC,UAAU,CAAS0C,IAAI,GAAG;IACjC3C,IAAI,EAAE,QAAQ;IACd4C,SAAS,EAAE;EACf,CAAC;;EAED;EACCV,SAAS,CAACjC,UAAU,CAAS4C,YAAY,GAAG;IACzC7C,IAAI,EAAE;EACV,CAAC;;EAED;EACCkC,SAAS,CAACjC,UAAU,CAAS6C,QAAQ,GAAG;IACrC9C,IAAI,EAAE;EACV,CAAC;;EAED;EACCkC,SAAS,CAACjC,UAAU,CAAS8C,KAAK,GAAGC,cAAc;;EAEpD;AACJ;AACA;EACId,SAAS,CAAC9B,QAAQ,GAAG8B,SAAS,CAAC9B,QAAQ,GAAG8B,SAAS,CAAC9B,QAAQ,CAACqC,KAAK,CAAC,CAAC,CAAC,GAAG,EAAE;EACzEP,SAAS,CAAC9B,QAAQ,CAAc6C,IAAI,CAAC,UAAU,CAAC;EAChDf,SAAS,CAAC9B,QAAQ,CAAc6C,IAAI,CAAC,MAAM,CAAC;EAC5Cf,SAAS,CAAC9B,QAAQ,CAAc6C,IAAI,CAAC,OAAO,CAAC;EAC7Cf,SAAS,CAAC9B,QAAQ,CAAc6C,IAAI,CAAC,cAAc,CAAC;;EAErD;EACA,IAAMC,WAAW,GAAGC,cAAc,CAACjB,SAAS,CAAC;EAC7ChD,aAAa,CAACgD,SAAS,CAAC9B,QAAQ,EAAS8C,WAAW,CAAC;EACrDhB,SAAS,CAAC9B,QAAQ,GAAG8B,SAAS,CAAC9B,QAAQ,CAClCgD,MAAM,CAAEzB,KAAa,IAAK,CAACA,KAAK,CAAC0B,QAAQ,CAAC,GAAG,CAAC,CAAC,CAC/CD,MAAM,CAAC,CAACE,IAAS,EAAEC,GAAQ,EAAEC,GAAQ,KAAKA,GAAG,CAACC,OAAO,CAACH,IAAI,CAAC,KAAKC,GAAG,CAAC,CAAC,CAAC;;EAE3E;EACArB,SAAS,CAACtC,OAAO,GAAGsC,SAAS,CAACtC,OAAO,IAAI,CAAC;EAE1C,IAAM8D,UAAsB,GAAGxB,SAAS,CAAC/B,OAAO,CAACuB,GAAG,CAACiC,KAAK,IAAI;IAC1D,IAAMC,OAAO,GAAGtE,oBAAoB,CAACqE,KAAK,CAAC,GAAGA,KAAK,CAAClB,KAAK,CAAC,CAAC,CAAC,GAAG,CAACkB,KAAK,CAAC;IACtE;AACR;AACA;AACA;IACQ,IAAI,CAACC,OAAO,CAACP,QAAQ,CAACzC,WAAW,CAAC,EAAE;MAChCgD,OAAO,CAACX,IAAI,CAACrC,WAAW,CAAC;IAC7B;;IAEA;IACA;IACA,IAAIgD,OAAO,CAAC,CAAC,CAAC,KAAK,UAAU,EAAE;MAC3BA,OAAO,CAACC,OAAO,CAAC,UAAU,CAAC;IAC/B;IAEA,OAAOD,OAAO;EAClB,CAAC,CAAC;EAEF,IAAIF,UAAU,CAACI,MAAM,KAAK,CAAC,EAAE;IACzBJ,UAAU,CAACT,IAAI,CAAChB,eAAe,CAACrB,WAAW,CAAC,CAAC;EACjD;;EAEA;EACA8C,UAAU,CAACT,IAAI,CAAC,CAAC,WAAW,EAAErC,WAAW,CAAC,CAAC;;EAE3C;EACA,IAAIsB,SAAS,CAAC6B,eAAe,EAAE;IAC3B7B,SAAS,CAAC6B,eAAe,CAACrC,GAAG,CAACsC,GAAG,IAAI;MACjCN,UAAU,CAACT,IAAI,CAACe,GAAG,CAAC;IACxB,CAAC,CAAC;EACN;;EAEA;EACA,IAAMC,QAAQ,GAAG,IAAIC,GAAG,CAAS,CAAC;EAClCR,UAAU,CAACN,MAAM,CAACO,KAAK,IAAI;IACvB,IAAMQ,QAAQ,GAAGR,KAAK,CAAC9B,IAAI,CAAC,GAAG,CAAC;IAChC,IAAIoC,QAAQ,CAACG,GAAG,CAACD,QAAQ,CAAC,EAAE;MACxB,OAAO,KAAK;IAChB,CAAC,MAAM;MACHF,QAAQ,CAACI,GAAG,CAACF,QAAQ,CAAC;MACtB,OAAO,IAAI;IACf;EACJ,CAAC,CAAC;EAEFjC,SAAS,CAAC/B,OAAO,GAAGuD,UAAU;EAE9B,OAAOxB,SAAS;AACpB;AAGA,OAAO,IAAMc,cAA0B,GAAG;EACtChD,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACR;AACR;AACA;AACA;IACQqE,GAAG,EAAE;MACDtE,IAAI,EAAE,QAAQ;MACd;AACZ;AACA;MACYuE,OAAO,EAAE/E,mBAAmB;MAC5BgF,OAAO,EAAE,gBAAgB;MACzBC,UAAU,EAAE;IAChB;EACJ,CAAC;EACD;AACJ;AACA;AACA;EACItC,oBAAoB,EAAE,IAAW;EACjC/B,QAAQ,EAAE,CACN,KAAK;AAEb,CAAC;;AAGD;AACA;AACA;AACA;AACA,OAAO,SAAS+C,cAAcA,CAC1BtC,UAA2B,EACnB;EACR,IAAMH,GAAG,GAAG0B,MAAM,CAACsC,IAAI,CAAC7D,UAAU,CAACZ,UAAU,CAAC,CACzCmD,MAAM,CAAC/B,GAAG,IAAKR,UAAU,CAASZ,UAAU,CAACoB,GAAG,CAAC,CAACsD,KAAK,CAAC;;EAE7D;EACA,IAAM/D,WAAW,GAAGQ,2BAA2B,CAACP,UAAU,CAAChB,UAAU,CAAC;EACtEa,GAAG,CAACuC,IAAI,CAACrC,WAAW,CAAC;;EAErB;EACA,IAAI,OAAOC,UAAU,CAAChB,UAAU,KAAK,QAAQ,EAAE;IAC1CgB,UAAU,CAAChB,UAAU,CAA4B4B,MAAM,CACnDmD,OAAO,CAACjD,KAAK,IAAIjB,GAAG,CAACuC,IAAI,CAACtB,KAAe,CAAC,CAAC;EACpD;EAEA,OAAOjB,GAAG;AACd;;AAEA;AACA;AACA;AACA;AACA,OAAO,SAASmE,sBAAsBA,CAACC,QAAuB,EAAEC,GAAQ,EAAO;EAC3E,IAAMC,WAAW,GAAG5C,MAAM,CAACsC,IAAI,CAACI,QAAQ,CAACG,aAAa,CAAC;EACvD,KAAK,IAAIC,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGF,WAAW,CAAClB,MAAM,EAAE,EAAEoB,CAAC,EAAE;IACzC,IAAM7D,GAAG,GAAG2D,WAAW,CAACE,CAAC,CAAC;IAC1B,IAAI,CAAC9C,MAAM,CAACC,SAAS,CAACC,cAAc,CAACC,IAAI,CAACwC,GAAG,EAAE1D,GAAG,CAAC,IAAI,OAAO0D,GAAG,CAAC1D,GAAG,CAAC,KAAK,WAAW,EAAE;MACpF0D,GAAG,CAAC1D,GAAG,CAAC,GAAGyD,QAAQ,CAACG,aAAa,CAAC5D,GAAG,CAAC;IAC1C;EACJ;EACA,OAAO0D,GAAG;AACd;AAEA,OAAO,IAAMI,yBAA+E,GAAG;EAC3FnF,IAAI,EAAE,QAAQ;EACdC,UAAU,EAAE;IACRmF,EAAE,EAAE;MACApF,IAAI,EAAE;IACV,CAAC;IACDsE,GAAG,EAAE;MACDtE,IAAI,EAAE;IACV;EACJ,CAAC;EACDI,QAAQ,EAAE,CACN,IAAI,EACJ,KAAK,CACR;EACD+B,oBAAoB,EAAE;AAC1B,CAAU","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/rx-schema.js b/dist/esm/rx-schema.js deleted file mode 100644 index 2628d923875..00000000000 --- a/dist/esm/rx-schema.js +++ /dev/null @@ -1,157 +0,0 @@ -import _createClass from "@babel/runtime/helpers/createClass"; -import { overwriteGetterForCaching, isMaybeReadonlyArray, deepEqual } from "./plugins/utils/index.js"; -import { newRxError } from "./rx-error.js"; -import { runPluginHooks } from "./hooks.js"; -import { fillWithDefaultSettings, getComposedPrimaryKeyOfDocumentData, getFinalFields, getPrimaryFieldOfPrimaryKey, getSchemaByObjectPath, normalizeRxJsonSchema } from "./rx-schema-helper.js"; -import { overwritable } from "./overwritable.js"; -export var RxSchema = /*#__PURE__*/function () { - function RxSchema(jsonSchema, hashFunction) { - this.jsonSchema = jsonSchema; - this.hashFunction = hashFunction; - this.indexes = getIndexes(this.jsonSchema); - - // primary is always required - this.primaryPath = getPrimaryFieldOfPrimaryKey(this.jsonSchema.primaryKey); - this.finalFields = getFinalFields(this.jsonSchema); - } - var _proto = RxSchema.prototype; - /** - * checks if a given change on a document is allowed - * Ensures that: - * - final fields are not modified - * @throws {Error} if not valid - */ - _proto.validateChange = function validateChange(dataBefore, dataAfter) { - this.finalFields.forEach(fieldName => { - if (!deepEqual(dataBefore[fieldName], dataAfter[fieldName])) { - throw newRxError('DOC9', { - dataBefore, - dataAfter, - fieldName, - schema: this.jsonSchema - }); - } - }); - } - - /** - * creates the schema-based document-prototype, - * see RxCollection.getDocumentPrototype() - */; - _proto.getDocumentPrototype = function getDocumentPrototype() { - var proto = {}; - - /** - * On the top level, we know all keys - * and therefore do not have to create a new Proxy object - * for each document. Instead we define the getter in the prototype once. - */ - var pathProperties = getSchemaByObjectPath(this.jsonSchema, ''); - Object.keys(pathProperties).forEach(key => { - var fullPath = key; - - // getter - value - proto.__defineGetter__(key, function () { - if (!this.get || typeof this.get !== 'function') { - /** - * When an object gets added to the state of a vuejs-component, - * it happens that this getter is called with another scope. - * To prevent errors, we have to return undefined in this case - */ - return undefined; - } - var ret = this.get(fullPath); - return ret; - }); - // getter - observable$ - Object.defineProperty(proto, key + '$', { - get: function () { - return this.get$(fullPath); - }, - enumerable: false, - configurable: false - }); - // getter - reactivity$$ - Object.defineProperty(proto, key + '$$', { - get: function () { - return this.get$$(fullPath); - }, - enumerable: false, - configurable: false - }); - // getter - populate_ - Object.defineProperty(proto, key + '_', { - get: function () { - return this.populate(fullPath); - }, - enumerable: false, - configurable: false - }); - }); - overwriteGetterForCaching(this, 'getDocumentPrototype', () => proto); - return proto; - }; - _proto.getPrimaryOfDocumentData = function getPrimaryOfDocumentData(documentData) { - return getComposedPrimaryKeyOfDocumentData(this.jsonSchema, documentData); - }; - return _createClass(RxSchema, [{ - key: "version", - get: function () { - return this.jsonSchema.version; - } - }, { - key: "defaultValues", - get: function () { - var values = {}; - Object.entries(this.jsonSchema.properties).filter(([, v]) => Object.prototype.hasOwnProperty.call(v, 'default')).forEach(([k, v]) => values[k] = v.default); - return overwriteGetterForCaching(this, 'defaultValues', values); - } - - /** - * @overrides itself on the first call - * - * TODO this should be a pure function that - * caches the hash in a WeakMap. - */ - }, { - key: "hash", - get: function () { - return overwriteGetterForCaching(this, 'hash', this.hashFunction(JSON.stringify(this.jsonSchema))); - } - }]); -}(); -export function getIndexes(jsonSchema) { - return (jsonSchema.indexes || []).map(index => isMaybeReadonlyArray(index) ? index : [index]); -} - -/** - * array with previous version-numbers - */ -export function getPreviousVersions(schema) { - var version = schema.version ? schema.version : 0; - var c = 0; - return new Array(version).fill(0).map(() => c++); -} -export function createRxSchema(jsonSchema, hashFunction, runPreCreateHooks = true) { - if (runPreCreateHooks) { - runPluginHooks('preCreateRxSchema', jsonSchema); - } - var useJsonSchema = fillWithDefaultSettings(jsonSchema); - useJsonSchema = normalizeRxJsonSchema(useJsonSchema); - overwritable.deepFreezeWhenDevMode(useJsonSchema); - var schema = new RxSchema(useJsonSchema, hashFunction); - runPluginHooks('createRxSchema', schema); - return schema; -} -export function isRxSchema(obj) { - return obj instanceof RxSchema; -} - -/** - * Used as helper function the generate the document type out of the schema via typescript. - * @link https://github.com/pubkey/rxdb/discussions/3467 - */ -export function toTypedRxJsonSchema(schema) { - return schema; -} -//# sourceMappingURL=rx-schema.js.map \ No newline at end of file diff --git a/dist/esm/rx-schema.js.map b/dist/esm/rx-schema.js.map deleted file mode 100644 index 9188bc2802f..00000000000 --- a/dist/esm/rx-schema.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-schema.js","names":["overwriteGetterForCaching","isMaybeReadonlyArray","deepEqual","newRxError","runPluginHooks","fillWithDefaultSettings","getComposedPrimaryKeyOfDocumentData","getFinalFields","getPrimaryFieldOfPrimaryKey","getSchemaByObjectPath","normalizeRxJsonSchema","overwritable","RxSchema","jsonSchema","hashFunction","indexes","getIndexes","primaryPath","primaryKey","finalFields","_proto","prototype","validateChange","dataBefore","dataAfter","forEach","fieldName","schema","getDocumentPrototype","proto","pathProperties","Object","keys","key","fullPath","__defineGetter__","get","undefined","ret","defineProperty","get$","enumerable","configurable","get$$","populate","getPrimaryOfDocumentData","documentData","_createClass","version","values","entries","properties","filter","v","hasOwnProperty","call","k","default","JSON","stringify","map","index","getPreviousVersions","c","Array","fill","createRxSchema","runPreCreateHooks","useJsonSchema","deepFreezeWhenDevMode","isRxSchema","obj","toTypedRxJsonSchema"],"sources":["../../src/rx-schema.ts"],"sourcesContent":["import {\n overwriteGetterForCaching,\n isMaybeReadonlyArray,\n deepEqual\n} from './plugins/utils/index.ts';\nimport {\n newRxError,\n} from './rx-error.ts';\nimport {\n runPluginHooks\n} from './hooks.ts';\n\nimport type {\n DeepMutable,\n DeepReadonly,\n HashFunction,\n MaybeReadonly,\n RxDocument,\n RxDocumentData,\n RxJsonSchema,\n StringKeys\n} from './types/index.d.ts';\nimport {\n fillWithDefaultSettings,\n getComposedPrimaryKeyOfDocumentData,\n getFinalFields,\n getPrimaryFieldOfPrimaryKey,\n getSchemaByObjectPath,\n normalizeRxJsonSchema\n} from './rx-schema-helper.ts';\nimport { overwritable } from './overwritable.ts';\n\nexport class RxSchema {\n public indexes: MaybeReadonly[];\n public readonly primaryPath: StringKeys>;\n public finalFields: string[];\n\n constructor(\n public readonly jsonSchema: RxJsonSchema>,\n public readonly hashFunction: HashFunction\n ) {\n this.indexes = getIndexes(this.jsonSchema);\n\n // primary is always required\n this.primaryPath = getPrimaryFieldOfPrimaryKey(this.jsonSchema.primaryKey);\n\n this.finalFields = getFinalFields(this.jsonSchema);\n }\n\n public get version(): number {\n return this.jsonSchema.version;\n }\n\n public get defaultValues(): { [P in keyof RxDocType]: RxDocType[P] } {\n const values = {} as { [P in keyof RxDocType]: RxDocType[P] };\n Object\n .entries(this.jsonSchema.properties)\n .filter(([, v]) => Object.prototype.hasOwnProperty.call(v, 'default'))\n .forEach(([k, v]) => (values as any)[k] = (v as any).default);\n return overwriteGetterForCaching(\n this,\n 'defaultValues',\n values\n );\n }\n\n /**\n * @overrides itself on the first call\n *\n * TODO this should be a pure function that\n * caches the hash in a WeakMap.\n */\n public get hash(): Promise {\n return overwriteGetterForCaching(\n this,\n 'hash',\n this.hashFunction(JSON.stringify(this.jsonSchema))\n );\n }\n\n /**\n * checks if a given change on a document is allowed\n * Ensures that:\n * - final fields are not modified\n * @throws {Error} if not valid\n */\n validateChange(dataBefore: any, dataAfter: any): void {\n this.finalFields.forEach(fieldName => {\n if (!deepEqual(dataBefore[fieldName], dataAfter[fieldName])) {\n throw newRxError('DOC9', {\n dataBefore,\n dataAfter,\n fieldName,\n schema: this.jsonSchema\n });\n }\n });\n }\n\n /**\n * creates the schema-based document-prototype,\n * see RxCollection.getDocumentPrototype()\n */\n public getDocumentPrototype(): any {\n const proto: any = {};\n\n /**\n * On the top level, we know all keys\n * and therefore do not have to create a new Proxy object\n * for each document. Instead we define the getter in the prototype once.\n */\n const pathProperties = getSchemaByObjectPath(\n this.jsonSchema,\n ''\n );\n Object.keys(pathProperties)\n .forEach(key => {\n const fullPath = key;\n\n // getter - value\n proto.__defineGetter__(\n key,\n function (this: RxDocument) {\n if (!this.get || typeof this.get !== 'function') {\n /**\n * When an object gets added to the state of a vuejs-component,\n * it happens that this getter is called with another scope.\n * To prevent errors, we have to return undefined in this case\n */\n return undefined;\n }\n const ret = this.get(fullPath);\n return ret;\n }\n );\n // getter - observable$\n Object.defineProperty(proto, key + '$', {\n get: function () {\n return this.get$(fullPath);\n },\n enumerable: false,\n configurable: false\n });\n // getter - reactivity$$\n Object.defineProperty(proto, key + '$$', {\n get: function () {\n return this.get$$(fullPath);\n },\n enumerable: false,\n configurable: false\n });\n // getter - populate_\n Object.defineProperty(proto, key + '_', {\n get: function () {\n return this.populate(fullPath);\n },\n enumerable: false,\n configurable: false\n });\n });\n\n overwriteGetterForCaching(\n this,\n 'getDocumentPrototype',\n () => proto\n );\n return proto;\n }\n\n\n getPrimaryOfDocumentData(\n documentData: Partial\n ): string {\n return getComposedPrimaryKeyOfDocumentData(\n this.jsonSchema,\n documentData\n );\n }\n}\n\nexport function getIndexes(\n jsonSchema: RxJsonSchema\n): MaybeReadonly[] {\n return (jsonSchema.indexes || []).map(index => isMaybeReadonlyArray(index) ? index : [index]);\n}\n\n/**\n * array with previous version-numbers\n */\nexport function getPreviousVersions(schema: RxJsonSchema): number[] {\n const version = schema.version ? schema.version : 0;\n let c = 0;\n return new Array(version)\n .fill(0)\n .map(() => c++);\n}\n\nexport function createRxSchema(\n jsonSchema: RxJsonSchema,\n hashFunction: HashFunction,\n runPreCreateHooks = true\n): RxSchema {\n if (runPreCreateHooks) {\n runPluginHooks('preCreateRxSchema', jsonSchema);\n }\n\n let useJsonSchema = fillWithDefaultSettings(jsonSchema);\n useJsonSchema = normalizeRxJsonSchema(useJsonSchema);\n overwritable.deepFreezeWhenDevMode(useJsonSchema);\n\n const schema = new RxSchema(useJsonSchema, hashFunction);\n runPluginHooks('createRxSchema', schema);\n return schema;\n}\n\nexport function isRxSchema(obj: any): boolean {\n return obj instanceof RxSchema;\n}\n\n/**\n * Used as helper function the generate the document type out of the schema via typescript.\n * @link https://github.com/pubkey/rxdb/discussions/3467\n */\nexport function toTypedRxJsonSchema>>(schema: T): DeepMutable {\n return schema as any;\n}\n"],"mappings":";AAAA,SACIA,yBAAyB,EACzBC,oBAAoB,EACpBC,SAAS,QACN,0BAA0B;AACjC,SACIC,UAAU,QACP,eAAe;AACtB,SACIC,cAAc,QACX,YAAY;AAYnB,SACIC,uBAAuB,EACvBC,mCAAmC,EACnCC,cAAc,EACdC,2BAA2B,EAC3BC,qBAAqB,EACrBC,qBAAqB,QAClB,uBAAuB;AAC9B,SAASC,YAAY,QAAQ,mBAAmB;AAEhD,WAAaC,QAAQ;EAKjB,SAAAA,SACoBC,UAAmD,EACnDC,YAA0B,EAC5C;IAAA,KAFkBD,UAAmD,GAAnDA,UAAmD;IAAA,KACnDC,YAA0B,GAA1BA,YAA0B;IAE1C,IAAI,CAACC,OAAO,GAAGC,UAAU,CAAC,IAAI,CAACH,UAAU,CAAC;;IAE1C;IACA,IAAI,CAACI,WAAW,GAAGT,2BAA2B,CAAC,IAAI,CAACK,UAAU,CAACK,UAAU,CAAC;IAE1E,IAAI,CAACC,WAAW,GAAGZ,cAAc,CAAC,IAAI,CAACM,UAAU,CAAC;EACtD;EAAC,IAAAO,MAAA,GAAAR,QAAA,CAAAS,SAAA;EAiCD;AACJ;AACA;AACA;AACA;AACA;EALID,MAAA,CAMAE,cAAc,GAAd,SAAAA,eAAeC,UAAe,EAAEC,SAAc,EAAQ;IAClD,IAAI,CAACL,WAAW,CAACM,OAAO,CAACC,SAAS,IAAI;MAClC,IAAI,CAACxB,SAAS,CAACqB,UAAU,CAACG,SAAS,CAAC,EAAEF,SAAS,CAACE,SAAS,CAAC,CAAC,EAAE;QACzD,MAAMvB,UAAU,CAAC,MAAM,EAAE;UACrBoB,UAAU;UACVC,SAAS;UACTE,SAAS;UACTC,MAAM,EAAE,IAAI,CAACd;QACjB,CAAC,CAAC;MACN;IACJ,CAAC,CAAC;EACN;;EAEA;AACJ;AACA;AACA,KAHI;EAAAO,MAAA,CAIOQ,oBAAoB,GAA3B,SAAAA,qBAAA,EAAmC;IAC/B,IAAMC,KAAU,GAAG,CAAC,CAAC;;IAErB;AACR;AACA;AACA;AACA;IACQ,IAAMC,cAAc,GAAGrB,qBAAqB,CACxC,IAAI,CAACI,UAAU,EACf,EACJ,CAAC;IACDkB,MAAM,CAACC,IAAI,CAACF,cAAc,CAAC,CACtBL,OAAO,CAACQ,GAAG,IAAI;MACZ,IAAMC,QAAQ,GAAGD,GAAG;;MAEpB;MACAJ,KAAK,CAACM,gBAAgB,CAClBF,GAAG,EACH,YAA4B;QACxB,IAAI,CAAC,IAAI,CAACG,GAAG,IAAI,OAAO,IAAI,CAACA,GAAG,KAAK,UAAU,EAAE;UAC7C;AAC5B;AACA;AACA;AACA;UAC4B,OAAOC,SAAS;QACpB;QACA,IAAMC,GAAG,GAAG,IAAI,CAACF,GAAG,CAACF,QAAQ,CAAC;QAC9B,OAAOI,GAAG;MACd,CACJ,CAAC;MACD;MACAP,MAAM,CAACQ,cAAc,CAACV,KAAK,EAAEI,GAAG,GAAG,GAAG,EAAE;QACpCG,GAAG,EAAE,SAAAA,CAAA,EAAY;UACb,OAAO,IAAI,CAACI,IAAI,CAACN,QAAQ,CAAC;QAC9B,CAAC;QACDO,UAAU,EAAE,KAAK;QACjBC,YAAY,EAAE;MAClB,CAAC,CAAC;MACF;MACAX,MAAM,CAACQ,cAAc,CAACV,KAAK,EAAEI,GAAG,GAAG,IAAI,EAAE;QACrCG,GAAG,EAAE,SAAAA,CAAA,EAAY;UACb,OAAO,IAAI,CAACO,KAAK,CAACT,QAAQ,CAAC;QAC/B,CAAC;QACDO,UAAU,EAAE,KAAK;QACjBC,YAAY,EAAE;MAClB,CAAC,CAAC;MACF;MACAX,MAAM,CAACQ,cAAc,CAACV,KAAK,EAAEI,GAAG,GAAG,GAAG,EAAE;QACpCG,GAAG,EAAE,SAAAA,CAAA,EAAY;UACb,OAAO,IAAI,CAACQ,QAAQ,CAACV,QAAQ,CAAC;QAClC,CAAC;QACDO,UAAU,EAAE,KAAK;QACjBC,YAAY,EAAE;MAClB,CAAC,CAAC;IACN,CAAC,CAAC;IAEN1C,yBAAyB,CACrB,IAAI,EACJ,sBAAsB,EACtB,MAAM6B,KACV,CAAC;IACD,OAAOA,KAAK;EAChB,CAAC;EAAAT,MAAA,CAGDyB,wBAAwB,GAAxB,SAAAA,yBACIC,YAAgC,EAC1B;IACN,OAAOxC,mCAAmC,CACtC,IAAI,CAACO,UAAU,EACfiC,YACJ,CAAC;EACL,CAAC;EAAA,OAAAC,YAAA,CAAAnC,QAAA;IAAAqB,GAAA;IAAAG,GAAA,EAhID,SAAAA,CAAA,EAA6B;MACzB,OAAO,IAAI,CAACvB,UAAU,CAACmC,OAAO;IAClC;EAAC;IAAAf,GAAA;IAAAG,GAAA,EAED,SAAAA,CAAA,EAAqE;MACjE,IAAMa,MAAM,GAAG,CAAC,CAA6C;MAC7DlB,MAAM,CACDmB,OAAO,CAAC,IAAI,CAACrC,UAAU,CAACsC,UAAU,CAAC,CACnCC,MAAM,CAAC,CAAC,GAAGC,CAAC,CAAC,KAAKtB,MAAM,CAACV,SAAS,CAACiC,cAAc,CAACC,IAAI,CAACF,CAAC,EAAE,SAAS,CAAC,CAAC,CACrE5B,OAAO,CAAC,CAAC,CAAC+B,CAAC,EAAEH,CAAC,CAAC,KAAMJ,MAAM,CAASO,CAAC,CAAC,GAAIH,CAAC,CAASI,OAAO,CAAC;MACjE,OAAOzD,yBAAyB,CAC5B,IAAI,EACJ,eAAe,EACfiD,MACJ,CAAC;IACL;;IAEA;AACJ;AACA;AACA;AACA;AACA;EALI;IAAAhB,GAAA;IAAAG,GAAA,EAMA,SAAAA,CAAA,EAAmC;MAC/B,OAAOpC,yBAAyB,CAC5B,IAAI,EACJ,MAAM,EACN,IAAI,CAACc,YAAY,CAAC4C,IAAI,CAACC,SAAS,CAAC,IAAI,CAAC9C,UAAU,CAAC,CACrD,CAAC;IACL;EAAC;AAAA;AAsGL,OAAO,SAASG,UAAUA,CACtBH,UAAmC,EACV;EACzB,OAAO,CAACA,UAAU,CAACE,OAAO,IAAI,EAAE,EAAE6C,GAAG,CAACC,KAAK,IAAI5D,oBAAoB,CAAC4D,KAAK,CAAC,GAAGA,KAAK,GAAG,CAACA,KAAK,CAAC,CAAC;AACjG;;AAEA;AACA;AACA;AACA,OAAO,SAASC,mBAAmBA,CAACnC,MAAyB,EAAY;EACrE,IAAMqB,OAAO,GAAGrB,MAAM,CAACqB,OAAO,GAAGrB,MAAM,CAACqB,OAAO,GAAG,CAAC;EACnD,IAAIe,CAAC,GAAG,CAAC;EACT,OAAO,IAAIC,KAAK,CAAChB,OAAO,CAAC,CACpBiB,IAAI,CAAC,CAAC,CAAC,CACPL,GAAG,CAAC,MAAMG,CAAC,EAAE,CAAC;AACvB;AAEA,OAAO,SAASG,cAAcA,CAC1BrD,UAA2B,EAC3BC,YAA0B,EAC1BqD,iBAAiB,GAAG,IAAI,EACb;EACX,IAAIA,iBAAiB,EAAE;IACnB/D,cAAc,CAAC,mBAAmB,EAAES,UAAU,CAAC;EACnD;EAEA,IAAIuD,aAAa,GAAG/D,uBAAuB,CAACQ,UAAU,CAAC;EACvDuD,aAAa,GAAG1D,qBAAqB,CAAC0D,aAAa,CAAC;EACpDzD,YAAY,CAAC0D,qBAAqB,CAACD,aAAa,CAAC;EAEjD,IAAMzC,MAAM,GAAG,IAAIf,QAAQ,CAACwD,aAAa,EAAEtD,YAAY,CAAC;EACxDV,cAAc,CAAC,gBAAgB,EAAEuB,MAAM,CAAC;EACxC,OAAOA,MAAM;AACjB;AAEA,OAAO,SAAS2C,UAAUA,CAACC,GAAQ,EAAW;EAC1C,OAAOA,GAAG,YAAY3D,QAAQ;AAClC;;AAEA;AACA;AACA;AACA;AACA,OAAO,SAAS4D,mBAAmBA,CAA4C7C,MAAS,EAAkB;EACtG,OAAOA,MAAM;AACjB","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/rx-storage-helper.js b/dist/esm/rx-storage-helper.js deleted file mode 100644 index 938589436da..00000000000 --- a/dist/esm/rx-storage-helper.js +++ /dev/null @@ -1,722 +0,0 @@ -/** - * Helper functions for accessing the RxStorage instances. - */ - -import { overwritable } from "./overwritable.js"; -import { newRxError } from "./rx-error.js"; -import { getPrimaryFieldOfPrimaryKey } from "./rx-schema-helper.js"; -import { PROMISE_RESOLVE_TRUE, RXDB_VERSION, RX_META_LWT_MINIMUM, appendToArray, createRevision, ensureNotFalsy, flatClone, getDefaultRevision, getDefaultRxDocumentMeta, lastOfArray, now, promiseWait, randomCouchString } from "./plugins/utils/index.js"; -import { filter, map, startWith, switchMap } from 'rxjs'; -import { prepareQuery } from "./rx-query.js"; -import { normalizeMangoQuery } from "./rx-query-helper.js"; -import { runPluginHooks } from "./hooks.js"; -export var INTERNAL_STORAGE_NAME = '_rxdb_internal'; -export var RX_DATABASE_LOCAL_DOCS_STORAGE_NAME = 'rxdatabase_storage_local'; -export async function getSingleDocument(storageInstance, documentId) { - var results = await storageInstance.findDocumentsById([documentId], false); - var doc = results[0]; - if (doc) { - return doc; - } else { - return undefined; - } -} - -/** - * Writes a single document, - * throws RxStorageBulkWriteError on failure - */ -export async function writeSingle(instance, writeRow, context) { - var writeResult = await instance.bulkWrite([writeRow], context); - if (writeResult.error.length > 0) { - var error = writeResult.error[0]; - throw error; - } else { - var ret = writeResult.success[0]; - return ret; - } -} - -/** - * Observe the plain document data of a single document. - * Do not forget to unsubscribe. - */ -export function observeSingle(storageInstance, documentId) { - var firstFindPromise = getSingleDocument(storageInstance, documentId); - var ret = storageInstance.changeStream().pipe(map(evBulk => evBulk.events.find(ev => ev.documentId === documentId)), filter(ev => !!ev), map(ev => Promise.resolve(ensureNotFalsy(ev).documentData)), startWith(firstFindPromise), switchMap(v => v), filter(v => !!v)); - return ret; -} - -/** - * Checkpoints must be stackable over another. - * This is required form some RxStorage implementations - * like the sharding plugin, where a checkpoint only represents - * the document state from some, but not all shards. - */ -export function stackCheckpoints(checkpoints) { - return Object.assign({}, ...checkpoints); -} -export function throwIfIsStorageWriteError(collection, documentId, writeData, error) { - if (error) { - if (error.status === 409) { - throw newRxError('CONFLICT', { - collection: collection.name, - id: documentId, - writeError: error, - data: writeData - }); - } else if (error.status === 422) { - throw newRxError('VD2', { - collection: collection.name, - id: documentId, - writeError: error, - data: writeData - }); - } else { - throw error; - } - } -} - -/** - * Analyzes a list of BulkWriteRows and determines - * which documents must be inserted, updated or deleted - * and which events must be emitted and which documents cause a conflict - * and must not be written. - * Used as helper inside of some RxStorage implementations. - * @hotPath The performance of this function is critical - */ -export function categorizeBulkWriteRows(storageInstance, primaryPath, -/** - * Current state of the documents - * inside of the storage. Used to determine - * which writes cause conflicts. - * This must be a Map for better performance. - */ -docsInDb, -/** - * The write rows that are passed to - * RxStorageInstance().bulkWrite(). - */ -bulkWriteRows, context, -/** - * Used by some storages for better performance. - * For example when get-by-id and insert/update can run in parallel. - */ -onInsert, onUpdate) { - var hasAttachments = !!storageInstance.schema.attachments; - var bulkInsertDocs = []; - var bulkUpdateDocs = []; - var errors = []; - var eventBulkId = randomCouchString(10); - var eventBulk = { - id: eventBulkId, - events: [], - checkpoint: null, - context, - startTime: now(), - endTime: 0 - }; - var eventBulkEvents = eventBulk.events; - var attachmentsAdd = []; - var attachmentsRemove = []; - var attachmentsUpdate = []; - var hasDocsInDb = docsInDb.size > 0; - var newestRow; - - /** - * @performance is really important in this loop! - */ - var rowAmount = bulkWriteRows.length; - var _loop = function () { - var writeRow = bulkWriteRows[rowId]; - - // use these variables to have less property accesses - var document = writeRow.document; - var previous = writeRow.previous; - var docId = document[primaryPath]; - var documentDeleted = document._deleted; - var previousDeleted = previous && previous._deleted; - var documentInDb = undefined; - if (hasDocsInDb) { - documentInDb = docsInDb.get(docId); - } - var attachmentError; - if (!documentInDb) { - /** - * It is possible to insert already deleted documents, - * this can happen on replication. - */ - var insertedIsDeleted = documentDeleted ? true : false; - if (hasAttachments) { - Object.entries(document._attachments).forEach(([attachmentId, attachmentData]) => { - if (!attachmentData.data) { - attachmentError = { - documentId: docId, - isError: true, - status: 510, - writeRow, - attachmentId - }; - errors.push(attachmentError); - } else { - attachmentsAdd.push({ - documentId: docId, - attachmentId, - attachmentData: attachmentData, - digest: attachmentData.digest - }); - } - }); - } - if (!attachmentError) { - if (hasAttachments) { - bulkInsertDocs.push(stripAttachmentsDataFromRow(writeRow)); - if (onInsert) { - onInsert(document); - } - } else { - bulkInsertDocs.push(writeRow); - if (onInsert) { - onInsert(document); - } - } - newestRow = writeRow; - } - if (!insertedIsDeleted) { - var event = { - documentId: docId, - operation: 'INSERT', - documentData: hasAttachments ? stripAttachmentsDataFromDocument(document) : document, - previousDocumentData: hasAttachments && previous ? stripAttachmentsDataFromDocument(previous) : previous - }; - eventBulkEvents.push(event); - } - } else { - // update existing document - var revInDb = documentInDb._rev; - - /** - * Check for conflict - */ - if (!previous || !!previous && revInDb !== previous._rev) { - // is conflict error - var err = { - isError: true, - status: 409, - documentId: docId, - writeRow: writeRow, - documentInDb - }; - errors.push(err); - return 1; // continue - } - - // handle attachments data - - var updatedRow = hasAttachments ? stripAttachmentsDataFromRow(writeRow) : writeRow; - if (hasAttachments) { - if (documentDeleted) { - /** - * Deleted documents must have cleared all their attachments. - */ - if (previous) { - Object.keys(previous._attachments).forEach(attachmentId => { - attachmentsRemove.push({ - documentId: docId, - attachmentId, - digest: ensureNotFalsy(previous)._attachments[attachmentId].digest - }); - }); - } - } else { - // first check for errors - Object.entries(document._attachments).find(([attachmentId, attachmentData]) => { - var previousAttachmentData = previous ? previous._attachments[attachmentId] : undefined; - if (!previousAttachmentData && !attachmentData.data) { - attachmentError = { - documentId: docId, - documentInDb: documentInDb, - isError: true, - status: 510, - writeRow, - attachmentId - }; - } - return true; - }); - if (!attachmentError) { - Object.entries(document._attachments).forEach(([attachmentId, attachmentData]) => { - var previousAttachmentData = previous ? previous._attachments[attachmentId] : undefined; - if (!previousAttachmentData) { - attachmentsAdd.push({ - documentId: docId, - attachmentId, - attachmentData: attachmentData, - digest: attachmentData.digest - }); - } else { - var newDigest = updatedRow.document._attachments[attachmentId].digest; - if (attachmentData.data && - /** - * Performance shortcut, - * do not update the attachment data if it did not change. - */ - previousAttachmentData.digest !== newDigest) { - attachmentsUpdate.push({ - documentId: docId, - attachmentId, - attachmentData: attachmentData, - digest: attachmentData.digest - }); - } - } - }); - } - } - } - if (attachmentError) { - errors.push(attachmentError); - } else { - if (hasAttachments) { - bulkUpdateDocs.push(stripAttachmentsDataFromRow(updatedRow)); - if (onUpdate) { - onUpdate(document); - } - } else { - bulkUpdateDocs.push(updatedRow); - if (onUpdate) { - onUpdate(document); - } - } - newestRow = updatedRow; - } - var eventDocumentData = null; - var previousEventDocumentData = null; - var operation = null; - if (previousDeleted && !documentDeleted) { - operation = 'INSERT'; - eventDocumentData = hasAttachments ? stripAttachmentsDataFromDocument(document) : document; - } else if (previous && !previousDeleted && !documentDeleted) { - operation = 'UPDATE'; - eventDocumentData = hasAttachments ? stripAttachmentsDataFromDocument(document) : document; - previousEventDocumentData = previous; - } else if (documentDeleted) { - operation = 'DELETE'; - eventDocumentData = ensureNotFalsy(document); - previousEventDocumentData = previous; - } else { - throw newRxError('SNH', { - args: { - writeRow - } - }); - } - var _event = { - documentId: docId, - documentData: eventDocumentData, - previousDocumentData: previousEventDocumentData, - operation: operation - }; - eventBulkEvents.push(_event); - } - }; - for (var rowId = 0; rowId < rowAmount; rowId++) { - if (_loop()) continue; - } - return { - bulkInsertDocs, - bulkUpdateDocs, - newestRow, - errors, - eventBulk, - attachmentsAdd, - attachmentsRemove, - attachmentsUpdate - }; -} -export function stripAttachmentsDataFromRow(writeRow) { - return { - previous: writeRow.previous, - document: stripAttachmentsDataFromDocument(writeRow.document) - }; -} -export function getAttachmentSize(attachmentBase64String) { - return atob(attachmentBase64String).length; -} - -/** - * Used in custom RxStorage implementations. - */ -export function attachmentWriteDataToNormalData(writeData) { - var data = writeData.data; - if (!data) { - return writeData; - } - var ret = { - length: getAttachmentSize(data), - digest: writeData.digest, - type: writeData.type - }; - return ret; -} -export function stripAttachmentsDataFromDocument(doc) { - if (!doc._attachments || Object.keys(doc._attachments).length === 0) { - return doc; - } - var useDoc = flatClone(doc); - useDoc._attachments = {}; - Object.entries(doc._attachments).forEach(([attachmentId, attachmentData]) => { - useDoc._attachments[attachmentId] = attachmentWriteDataToNormalData(attachmentData); - }); - return useDoc; -} - -/** - * Flat clone the document data - * and also the _meta field. - * Used many times when we want to change the meta - * during replication etc. - */ -export function flatCloneDocWithMeta(doc) { - return Object.assign({}, doc, { - _meta: flatClone(doc._meta) - }); -} -/** - * Wraps the normal storageInstance of a RxCollection - * to ensure that all access is properly using the hooks - * and other data transformations and also ensure that database.lockedRun() - * is used properly. - */ -export function getWrappedStorageInstance(database, storageInstance, -/** - * The original RxJsonSchema - * before it was mutated by hooks. - */ -rxJsonSchema) { - overwritable.deepFreezeWhenDevMode(rxJsonSchema); - var ret = { - originalStorageInstance: storageInstance, - schema: storageInstance.schema, - internals: storageInstance.internals, - collectionName: storageInstance.collectionName, - databaseName: storageInstance.databaseName, - options: storageInstance.options, - bulkWrite(rows, context) { - var databaseToken = database.token; - var toStorageWriteRows = new Array(rows.length); - /** - * Use the same timestamp for all docs of this rows-set. - * This improves performance because calling Date.now() inside of the now() function - * is too costly. - */ - var time = now(); - for (var index = 0; index < rows.length; index++) { - var writeRow = rows[index]; - var document = flatCloneDocWithMeta(writeRow.document); - document._meta.lwt = time; - - /** - * Yes we really want to set the revision here. - * If you make a plugin that relies on having its own revision - * stored into the storage, use this.originalStorageInstance.bulkWrite() instead. - */ - var previous = writeRow.previous; - document._rev = createRevision(databaseToken, previous); - toStorageWriteRows[index] = { - document, - previous - }; - } - runPluginHooks('preStorageWrite', { - storageInstance: this.originalStorageInstance, - rows: toStorageWriteRows - }); - return database.lockedRun(() => storageInstance.bulkWrite(toStorageWriteRows, context)) - /** - * The RxStorageInstance MUST NOT allow to insert already _deleted documents, - * without sending the previous document version. - * But for better developer experience, RxDB does allow to re-insert deleted documents. - * We do this by automatically fixing the conflict errors for that case - * by running another bulkWrite() and merging the results. - * @link https://github.com/pubkey/rxdb/pull/3839 - */.then(writeResult => { - var useWriteResult = { - error: [], - success: writeResult.success.slice(0) - }; - var reInsertErrors = writeResult.error.length === 0 ? [] : writeResult.error.filter(error => { - if (error.status === 409 && !error.writeRow.previous && !error.writeRow.document._deleted && ensureNotFalsy(error.documentInDb)._deleted) { - return true; - } - useWriteResult.error.push(error); - return false; - }); - if (reInsertErrors.length > 0) { - var reInserts = reInsertErrors.map(error => { - return { - previous: error.documentInDb, - document: Object.assign({}, error.writeRow.document, { - _rev: createRevision(database.token, error.documentInDb) - }) - }; - }); - return database.lockedRun(() => storageInstance.bulkWrite(reInserts, context)).then(subResult => { - appendToArray(useWriteResult.error, subResult.error); - appendToArray(useWriteResult.success, subResult.success); - return useWriteResult; - }); - } - return writeResult; - }); - }, - query(preparedQuery) { - return database.lockedRun(() => storageInstance.query(preparedQuery)); - }, - count(preparedQuery) { - return database.lockedRun(() => storageInstance.count(preparedQuery)); - }, - findDocumentsById(ids, deleted) { - return database.lockedRun(() => storageInstance.findDocumentsById(ids, deleted)); - }, - getAttachmentData(documentId, attachmentId, digest) { - return database.lockedRun(() => storageInstance.getAttachmentData(documentId, attachmentId, digest)); - }, - getChangedDocumentsSince: !storageInstance.getChangedDocumentsSince ? undefined : (limit, checkpoint) => { - return database.lockedRun(() => storageInstance.getChangedDocumentsSince(ensureNotFalsy(limit), checkpoint)); - }, - cleanup(minDeletedTime) { - return database.lockedRun(() => storageInstance.cleanup(minDeletedTime)); - }, - remove() { - database.storageInstances.delete(ret); - return database.lockedRun(() => storageInstance.remove()); - }, - close() { - database.storageInstances.delete(ret); - return database.lockedRun(() => storageInstance.close()); - }, - changeStream() { - return storageInstance.changeStream(); - }, - conflictResultionTasks() { - return storageInstance.conflictResultionTasks(); - }, - resolveConflictResultionTask(taskSolution) { - if (taskSolution.output.isEqual) { - return storageInstance.resolveConflictResultionTask(taskSolution); - } - var doc = Object.assign({}, taskSolution.output.documentData, { - _meta: getDefaultRxDocumentMeta(), - _rev: getDefaultRevision(), - _attachments: {} - }); - var documentData = flatClone(doc); - delete documentData._meta; - delete documentData._rev; - delete documentData._attachments; - return storageInstance.resolveConflictResultionTask({ - id: taskSolution.id, - output: { - isEqual: false, - documentData - } - }); - } - }; - database.storageInstances.add(ret); - return ret; -} - -/** - * Each RxStorage implementation should - * run this method at the first step of createStorageInstance() - * to ensure that the configuration is correct. - */ -export function ensureRxStorageInstanceParamsAreCorrect(params) { - if (params.schema.keyCompression) { - throw newRxError('UT5', { - args: { - params - } - }); - } - if (hasEncryption(params.schema)) { - throw newRxError('UT6', { - args: { - params - } - }); - } - if (params.schema.attachments && params.schema.attachments.compression) { - throw newRxError('UT7', { - args: { - params - } - }); - } -} -export function hasEncryption(jsonSchema) { - if (!!jsonSchema.encrypted && jsonSchema.encrypted.length > 0 || jsonSchema.attachments && jsonSchema.attachments.encrypted) { - return true; - } else { - return false; - } -} -export function getChangedDocumentsSinceQuery(storageInstance, limit, checkpoint) { - var primaryPath = getPrimaryFieldOfPrimaryKey(storageInstance.schema.primaryKey); - var sinceLwt = checkpoint ? checkpoint.lwt : RX_META_LWT_MINIMUM; - var sinceId = checkpoint ? checkpoint.id : ''; - return normalizeMangoQuery(storageInstance.schema, { - selector: { - $or: [{ - '_meta.lwt': { - $gt: sinceLwt - } - }, { - '_meta.lwt': { - $eq: sinceLwt - }, - [primaryPath]: { - $gt: checkpoint ? sinceId : '' - } - }], - // add this hint for better index usage - '_meta.lwt': { - $gte: sinceLwt - } - }, - sort: [{ - '_meta.lwt': 'asc' - }, { - [primaryPath]: 'asc' - }], - skip: 0, - limit - /** - * DO NOT SET A SPECIFIC INDEX HERE! - * The query might be modified by some plugin - * before sending it to the storage. - * We can be sure that in the end the query planner - * will find the best index. - */ - // index: ['_meta.lwt', primaryPath] - }); -} -export async function getChangedDocumentsSince(storageInstance, limit, checkpoint) { - if (storageInstance.getChangedDocumentsSince) { - return storageInstance.getChangedDocumentsSince(limit, checkpoint); - } - var primaryPath = getPrimaryFieldOfPrimaryKey(storageInstance.schema.primaryKey); - var query = prepareQuery(storageInstance.schema, getChangedDocumentsSinceQuery(storageInstance, limit, checkpoint)); - var result = await storageInstance.query(query); - var documents = result.documents; - var lastDoc = lastOfArray(documents); - return { - documents: documents, - checkpoint: lastDoc ? { - id: lastDoc[primaryPath], - lwt: lastDoc._meta.lwt - } : checkpoint ? checkpoint : { - id: '', - lwt: 0 - } - }; -} - -/** - * Wraps the storage and simluates - * delays. Mostly used in tests. - */ -export function randomDelayStorage(input) { - /** - * Ensure writes to a delay storage - * are still correctly run in order. - */ - var randomDelayStorageWriteQueue = PROMISE_RESOLVE_TRUE; - var retStorage = { - name: 'random-delay-' + input.storage.name, - rxdbVersion: RXDB_VERSION, - async createStorageInstance(params) { - await promiseWait(input.delayTimeBefore()); - var storageInstance = await input.storage.createStorageInstance(params); - await promiseWait(input.delayTimeAfter()); - return { - databaseName: storageInstance.databaseName, - internals: storageInstance.internals, - options: storageInstance.options, - schema: storageInstance.schema, - collectionName: storageInstance.collectionName, - bulkWrite(a, b) { - randomDelayStorageWriteQueue = randomDelayStorageWriteQueue.then(async () => { - await promiseWait(input.delayTimeBefore()); - var response = await storageInstance.bulkWrite(a, b); - await promiseWait(input.delayTimeAfter()); - return response; - }); - var ret = randomDelayStorageWriteQueue; - return ret; - }, - async findDocumentsById(a, b) { - await promiseWait(input.delayTimeBefore()); - var ret = await storageInstance.findDocumentsById(a, b); - await promiseWait(input.delayTimeAfter()); - return ret; - }, - async query(a) { - await promiseWait(input.delayTimeBefore()); - var ret = await storageInstance.query(a); - return ret; - }, - async count(a) { - await promiseWait(input.delayTimeBefore()); - var ret = await storageInstance.count(a); - await promiseWait(input.delayTimeAfter()); - return ret; - }, - async getAttachmentData(a, b, c) { - await promiseWait(input.delayTimeBefore()); - var ret = await storageInstance.getAttachmentData(a, b, c); - await promiseWait(input.delayTimeAfter()); - return ret; - }, - getChangedDocumentsSince: !storageInstance.getChangedDocumentsSince ? undefined : async (a, b) => { - await promiseWait(input.delayTimeBefore()); - var ret = await ensureNotFalsy(storageInstance.getChangedDocumentsSince)(a, b); - await promiseWait(input.delayTimeAfter()); - return ret; - }, - changeStream() { - return storageInstance.changeStream(); - }, - conflictResultionTasks() { - return storageInstance.conflictResultionTasks(); - }, - resolveConflictResultionTask(a) { - return storageInstance.resolveConflictResultionTask(a); - }, - async cleanup(a) { - await promiseWait(input.delayTimeBefore()); - var ret = await storageInstance.cleanup(a); - await promiseWait(input.delayTimeAfter()); - return ret; - }, - async close() { - await promiseWait(input.delayTimeBefore()); - var ret = await storageInstance.close(); - await promiseWait(input.delayTimeAfter()); - return ret; - }, - async remove() { - await promiseWait(input.delayTimeBefore()); - var ret = await storageInstance.remove(); - await promiseWait(input.delayTimeAfter()); - return ret; - } - }; - } - }; - return retStorage; -} -//# sourceMappingURL=rx-storage-helper.js.map \ No newline at end of file diff --git a/dist/esm/rx-storage-helper.js.map b/dist/esm/rx-storage-helper.js.map deleted file mode 100644 index 3dd4d50bcab..00000000000 --- a/dist/esm/rx-storage-helper.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-storage-helper.js","names":["overwritable","newRxError","getPrimaryFieldOfPrimaryKey","PROMISE_RESOLVE_TRUE","RXDB_VERSION","RX_META_LWT_MINIMUM","appendToArray","createRevision","ensureNotFalsy","flatClone","getDefaultRevision","getDefaultRxDocumentMeta","lastOfArray","now","promiseWait","randomCouchString","filter","map","startWith","switchMap","prepareQuery","normalizeMangoQuery","runPluginHooks","INTERNAL_STORAGE_NAME","RX_DATABASE_LOCAL_DOCS_STORAGE_NAME","getSingleDocument","storageInstance","documentId","results","findDocumentsById","doc","undefined","writeSingle","instance","writeRow","context","writeResult","bulkWrite","error","length","ret","success","observeSingle","firstFindPromise","changeStream","pipe","evBulk","events","find","ev","Promise","resolve","documentData","v","stackCheckpoints","checkpoints","Object","assign","throwIfIsStorageWriteError","collection","writeData","status","name","id","writeError","data","categorizeBulkWriteRows","primaryPath","docsInDb","bulkWriteRows","onInsert","onUpdate","hasAttachments","schema","attachments","bulkInsertDocs","bulkUpdateDocs","errors","eventBulkId","eventBulk","checkpoint","startTime","endTime","eventBulkEvents","attachmentsAdd","attachmentsRemove","attachmentsUpdate","hasDocsInDb","size","newestRow","rowAmount","_loop","rowId","document","previous","docId","documentDeleted","_deleted","previousDeleted","documentInDb","get","attachmentError","insertedIsDeleted","entries","_attachments","forEach","attachmentId","attachmentData","isError","push","digest","stripAttachmentsDataFromRow","event","operation","stripAttachmentsDataFromDocument","previousDocumentData","revInDb","_rev","err","updatedRow","keys","previousAttachmentData","newDigest","eventDocumentData","previousEventDocumentData","args","getAttachmentSize","attachmentBase64String","atob","attachmentWriteDataToNormalData","type","useDoc","flatCloneDocWithMeta","_meta","getWrappedStorageInstance","database","rxJsonSchema","deepFreezeWhenDevMode","originalStorageInstance","internals","collectionName","databaseName","options","rows","databaseToken","token","toStorageWriteRows","Array","time","index","lwt","lockedRun","then","useWriteResult","slice","reInsertErrors","reInserts","subResult","query","preparedQuery","count","ids","deleted","getAttachmentData","getChangedDocumentsSince","limit","cleanup","minDeletedTime","remove","storageInstances","delete","close","conflictResultionTasks","resolveConflictResultionTask","taskSolution","output","isEqual","add","ensureRxStorageInstanceParamsAreCorrect","params","keyCompression","hasEncryption","compression","jsonSchema","encrypted","getChangedDocumentsSinceQuery","primaryKey","sinceLwt","sinceId","selector","$or","$gt","$eq","$gte","sort","skip","result","documents","lastDoc","randomDelayStorage","input","randomDelayStorageWriteQueue","retStorage","storage","rxdbVersion","createStorageInstance","delayTimeBefore","delayTimeAfter","a","b","response","c"],"sources":["../../src/rx-storage-helper.ts"],"sourcesContent":["/**\n * Helper functions for accessing the RxStorage instances.\n */\n\nimport { overwritable } from './overwritable.ts';\nimport { newRxError } from './rx-error.ts';\nimport {\n getPrimaryFieldOfPrimaryKey\n} from './rx-schema-helper.ts';\nimport type {\n BulkWriteRow,\n BulkWriteRowProcessed,\n CategorizeBulkWriteRowsOutput,\n EventBulk,\n RxAttachmentData,\n RxAttachmentWriteData,\n RxCollection,\n RxDatabase,\n RxDocumentData,\n RxDocumentWriteData,\n RxJsonSchema,\n RxStorageWriteError,\n RxStorageChangeEvent,\n RxStorageInstance,\n RxStorageInstanceCreationParams,\n StringKeys,\n RxStorageWriteErrorConflict,\n RxStorageWriteErrorAttachment,\n RxStorage,\n RxStorageDefaultCheckpoint,\n FilledMangoQuery\n} from './types/index.d.ts';\nimport {\n PROMISE_RESOLVE_TRUE,\n RXDB_VERSION,\n RX_META_LWT_MINIMUM,\n appendToArray,\n createRevision,\n ensureNotFalsy,\n flatClone,\n getDefaultRevision,\n getDefaultRxDocumentMeta,\n lastOfArray,\n now,\n promiseWait,\n randomCouchString\n} from './plugins/utils/index.ts';\nimport { Observable, filter, map, startWith, switchMap } from 'rxjs';\nimport { prepareQuery } from './rx-query.ts';\nimport { normalizeMangoQuery } from './rx-query-helper.ts';\nimport { runPluginHooks } from './hooks.ts';\n\nexport const INTERNAL_STORAGE_NAME = '_rxdb_internal';\nexport const RX_DATABASE_LOCAL_DOCS_STORAGE_NAME = 'rxdatabase_storage_local';\n\nexport async function getSingleDocument(\n storageInstance: RxStorageInstance,\n documentId: string\n): Promise | undefined> {\n const results = await storageInstance.findDocumentsById([documentId], false);\n const doc = results[0];\n if (doc) {\n return doc;\n } else {\n return undefined;\n }\n}\n\n/**\n * Writes a single document,\n * throws RxStorageBulkWriteError on failure\n */\nexport async function writeSingle(\n instance: RxStorageInstance,\n writeRow: BulkWriteRow,\n context: string\n): Promise> {\n const writeResult = await instance.bulkWrite(\n [writeRow],\n context\n );\n if (writeResult.error.length > 0) {\n const error = writeResult.error[0];\n throw error;\n } else {\n const ret = writeResult.success[0];\n return ret;\n }\n}\n\n/**\n * Observe the plain document data of a single document.\n * Do not forget to unsubscribe.\n */\nexport function observeSingle(\n storageInstance: RxStorageInstance,\n documentId: string\n): Observable> {\n const firstFindPromise = getSingleDocument(storageInstance, documentId);\n const ret = storageInstance\n .changeStream()\n .pipe(\n map(evBulk => evBulk.events.find(ev => ev.documentId === documentId)),\n filter(ev => !!ev),\n map(ev => Promise.resolve(ensureNotFalsy(ev).documentData)),\n startWith(firstFindPromise),\n switchMap(v => v),\n filter(v => !!v)\n ) as any;\n return ret;\n}\n\n/**\n * Checkpoints must be stackable over another.\n * This is required form some RxStorage implementations\n * like the sharding plugin, where a checkpoint only represents\n * the document state from some, but not all shards.\n */\nexport function stackCheckpoints(\n checkpoints: CheckpointType[]\n): CheckpointType {\n return Object.assign(\n {},\n ...checkpoints\n );\n}\n\nexport function throwIfIsStorageWriteError(\n collection: RxCollection,\n documentId: string,\n writeData: RxDocumentWriteData | RxDocType,\n error: RxStorageWriteError | undefined\n) {\n if (error) {\n if (error.status === 409) {\n throw newRxError('CONFLICT', {\n collection: collection.name,\n id: documentId,\n writeError: error,\n data: writeData\n });\n } else if (error.status === 422) {\n throw newRxError('VD2', {\n collection: collection.name,\n id: documentId,\n writeError: error,\n data: writeData\n });\n } else {\n throw error;\n }\n }\n}\n\n\n/**\n * Analyzes a list of BulkWriteRows and determines\n * which documents must be inserted, updated or deleted\n * and which events must be emitted and which documents cause a conflict\n * and must not be written.\n * Used as helper inside of some RxStorage implementations.\n * @hotPath The performance of this function is critical\n */\nexport function categorizeBulkWriteRows(\n storageInstance: RxStorageInstance,\n primaryPath: StringKeys,\n /**\n * Current state of the documents\n * inside of the storage. Used to determine\n * which writes cause conflicts.\n * This must be a Map for better performance.\n */\n docsInDb: Map[StringKeys] | string, RxDocumentData>,\n /**\n * The write rows that are passed to\n * RxStorageInstance().bulkWrite().\n */\n bulkWriteRows: BulkWriteRow[],\n context: string,\n /**\n * Used by some storages for better performance.\n * For example when get-by-id and insert/update can run in parallel.\n */\n onInsert?: (docData: RxDocumentData) => void,\n onUpdate?: (docData: RxDocumentData) => void\n): CategorizeBulkWriteRowsOutput {\n const hasAttachments = !!storageInstance.schema.attachments;\n const bulkInsertDocs: BulkWriteRowProcessed[] = [];\n const bulkUpdateDocs: BulkWriteRowProcessed[] = [];\n const errors: RxStorageWriteError[] = [];\n const eventBulkId = randomCouchString(10);\n const eventBulk: EventBulk>, any> = {\n id: eventBulkId,\n events: [],\n checkpoint: null,\n context,\n startTime: now(),\n endTime: 0\n };\n const eventBulkEvents = eventBulk.events;\n\n const attachmentsAdd: {\n documentId: string;\n attachmentId: string;\n attachmentData: RxAttachmentWriteData;\n digest: string;\n }[] = [];\n const attachmentsRemove: {\n documentId: string;\n attachmentId: string;\n digest: string;\n }[] = [];\n const attachmentsUpdate: {\n documentId: string;\n attachmentId: string;\n attachmentData: RxAttachmentWriteData;\n digest: string;\n }[] = [];\n\n const hasDocsInDb = docsInDb.size > 0;\n let newestRow: BulkWriteRowProcessed | undefined;\n\n /**\n * @performance is really important in this loop!\n */\n const rowAmount = bulkWriteRows.length;\n for (let rowId = 0; rowId < rowAmount; rowId++) {\n const writeRow = bulkWriteRows[rowId];\n\n // use these variables to have less property accesses\n const document = writeRow.document;\n const previous = writeRow.previous;\n const docId = document[primaryPath] as string;\n const documentDeleted = document._deleted;\n const previousDeleted = previous && previous._deleted;\n\n let documentInDb: RxDocumentData | undefined = undefined as any;\n if (hasDocsInDb) {\n documentInDb = docsInDb.get(docId);\n }\n let attachmentError: RxStorageWriteErrorAttachment | undefined;\n\n if (!documentInDb) {\n /**\n * It is possible to insert already deleted documents,\n * this can happen on replication.\n */\n const insertedIsDeleted = documentDeleted ? true : false;\n if (hasAttachments) {\n Object\n .entries(document._attachments)\n .forEach(([attachmentId, attachmentData]) => {\n if (\n !(attachmentData as RxAttachmentWriteData).data\n ) {\n attachmentError = {\n documentId: docId,\n isError: true,\n status: 510,\n writeRow,\n attachmentId\n };\n errors.push(attachmentError);\n } else {\n attachmentsAdd.push({\n documentId: docId,\n attachmentId,\n attachmentData: attachmentData as any,\n digest: attachmentData.digest\n });\n }\n });\n }\n if (!attachmentError) {\n if (hasAttachments) {\n bulkInsertDocs.push(stripAttachmentsDataFromRow(writeRow));\n if (onInsert) {\n onInsert(document);\n }\n } else {\n bulkInsertDocs.push(writeRow as any);\n if (onInsert) {\n onInsert(document);\n }\n }\n\n newestRow = writeRow as any;\n }\n\n if (!insertedIsDeleted) {\n const event = {\n documentId: docId,\n operation: 'INSERT' as const,\n documentData: hasAttachments ? stripAttachmentsDataFromDocument(document) : document as any,\n previousDocumentData: hasAttachments && previous ? stripAttachmentsDataFromDocument(previous) : previous as any\n };\n eventBulkEvents.push(event);\n }\n } else {\n // update existing document\n const revInDb: string = documentInDb._rev;\n\n /**\n * Check for conflict\n */\n if (\n (\n !previous\n ) ||\n (\n !!previous &&\n revInDb !== previous._rev\n )\n ) {\n // is conflict error\n const err: RxStorageWriteError = {\n isError: true,\n status: 409,\n documentId: docId,\n writeRow: writeRow,\n documentInDb\n };\n errors.push(err);\n continue;\n }\n\n // handle attachments data\n\n const updatedRow: BulkWriteRowProcessed = hasAttachments ? stripAttachmentsDataFromRow(writeRow) : writeRow as any;\n if (hasAttachments) {\n if (documentDeleted) {\n /**\n * Deleted documents must have cleared all their attachments.\n */\n if (previous) {\n Object\n .keys(previous._attachments)\n .forEach(attachmentId => {\n attachmentsRemove.push({\n documentId: docId,\n attachmentId,\n digest: ensureNotFalsy(previous)._attachments[attachmentId].digest\n });\n });\n }\n } else {\n // first check for errors\n Object\n .entries(document._attachments)\n .find(([attachmentId, attachmentData]) => {\n const previousAttachmentData = previous ? previous._attachments[attachmentId] : undefined;\n if (\n !previousAttachmentData &&\n !(attachmentData as RxAttachmentWriteData).data\n ) {\n attachmentError = {\n documentId: docId,\n documentInDb: documentInDb as any,\n isError: true,\n status: 510,\n writeRow,\n attachmentId\n };\n }\n return true;\n });\n if (!attachmentError) {\n Object\n .entries(document._attachments)\n .forEach(([attachmentId, attachmentData]) => {\n const previousAttachmentData = previous ? previous._attachments[attachmentId] : undefined;\n if (!previousAttachmentData) {\n attachmentsAdd.push({\n documentId: docId,\n attachmentId,\n attachmentData: attachmentData as any,\n digest: attachmentData.digest\n });\n } else {\n const newDigest = updatedRow.document._attachments[attachmentId].digest;\n if (\n (attachmentData as RxAttachmentWriteData).data &&\n /**\n * Performance shortcut,\n * do not update the attachment data if it did not change.\n */\n previousAttachmentData.digest !== newDigest\n ) {\n attachmentsUpdate.push({\n documentId: docId,\n attachmentId,\n attachmentData: attachmentData as RxAttachmentWriteData,\n digest: attachmentData.digest\n });\n }\n }\n });\n }\n }\n }\n\n if (attachmentError) {\n errors.push(attachmentError);\n } else {\n if (hasAttachments) {\n bulkUpdateDocs.push(stripAttachmentsDataFromRow(updatedRow));\n if (onUpdate) {\n onUpdate(document);\n }\n } else {\n bulkUpdateDocs.push(updatedRow);\n if (onUpdate) {\n onUpdate(document);\n }\n }\n newestRow = updatedRow as any;\n }\n\n let eventDocumentData: RxDocumentData | undefined = null as any;\n let previousEventDocumentData: RxDocumentData | undefined = null as any;\n let operation: 'INSERT' | 'UPDATE' | 'DELETE' = null as any;\n\n if (previousDeleted && !documentDeleted) {\n operation = 'INSERT';\n eventDocumentData = hasAttachments ? stripAttachmentsDataFromDocument(document) : document as any;\n } else if (previous && !previousDeleted && !documentDeleted) {\n operation = 'UPDATE';\n eventDocumentData = hasAttachments ? stripAttachmentsDataFromDocument(document) : document as any;\n previousEventDocumentData = previous;\n } else if (documentDeleted) {\n operation = 'DELETE';\n eventDocumentData = ensureNotFalsy(document) as any;\n previousEventDocumentData = previous;\n } else {\n throw newRxError('SNH', { args: { writeRow } });\n }\n\n const event = {\n documentId: docId,\n documentData: eventDocumentData as RxDocumentData,\n previousDocumentData: previousEventDocumentData,\n operation: operation\n };\n eventBulkEvents.push(event);\n }\n }\n\n return {\n bulkInsertDocs,\n bulkUpdateDocs,\n newestRow,\n errors,\n eventBulk,\n attachmentsAdd,\n attachmentsRemove,\n attachmentsUpdate\n };\n}\n\nexport function stripAttachmentsDataFromRow(writeRow: BulkWriteRow): BulkWriteRowProcessed {\n return {\n previous: writeRow.previous,\n document: stripAttachmentsDataFromDocument(writeRow.document)\n };\n}\n\nexport function getAttachmentSize(\n attachmentBase64String: string\n): number {\n return atob(attachmentBase64String).length;\n}\n\n/**\n * Used in custom RxStorage implementations.\n */\nexport function attachmentWriteDataToNormalData(writeData: RxAttachmentData | RxAttachmentWriteData): RxAttachmentData {\n const data = (writeData as RxAttachmentWriteData).data;\n if (!data) {\n return writeData as any;\n }\n const ret: RxAttachmentData = {\n length: getAttachmentSize(data),\n digest: writeData.digest,\n type: writeData.type\n };\n return ret;\n}\n\nexport function stripAttachmentsDataFromDocument(doc: RxDocumentWriteData): RxDocumentData {\n if (!doc._attachments || Object.keys(doc._attachments).length === 0) {\n return doc;\n }\n\n const useDoc: RxDocumentData = flatClone(doc) as any;\n useDoc._attachments = {};\n Object\n .entries(doc._attachments)\n .forEach(([attachmentId, attachmentData]) => {\n useDoc._attachments[attachmentId] = attachmentWriteDataToNormalData(attachmentData);\n });\n return useDoc;\n}\n\n/**\n * Flat clone the document data\n * and also the _meta field.\n * Used many times when we want to change the meta\n * during replication etc.\n */\nexport function flatCloneDocWithMeta(\n doc: RxDocumentData\n): RxDocumentData {\n return Object.assign(\n {},\n doc,\n {\n _meta: flatClone(doc._meta)\n }\n );\n}\n\nexport type WrappedRxStorageInstance = RxStorageInstance & {\n originalStorageInstance: RxStorageInstance;\n};\n\n/**\n * Wraps the normal storageInstance of a RxCollection\n * to ensure that all access is properly using the hooks\n * and other data transformations and also ensure that database.lockedRun()\n * is used properly.\n */\nexport function getWrappedStorageInstance<\n RxDocType,\n Internals,\n InstanceCreationOptions,\n CheckpointType\n>(\n database: RxDatabase<{}, Internals, InstanceCreationOptions, any>,\n storageInstance: RxStorageInstance,\n /**\n * The original RxJsonSchema\n * before it was mutated by hooks.\n */\n rxJsonSchema: RxJsonSchema>\n): WrappedRxStorageInstance {\n overwritable.deepFreezeWhenDevMode(rxJsonSchema);\n\n const ret: WrappedRxStorageInstance = {\n originalStorageInstance: storageInstance,\n schema: storageInstance.schema,\n internals: storageInstance.internals,\n collectionName: storageInstance.collectionName,\n databaseName: storageInstance.databaseName,\n options: storageInstance.options,\n bulkWrite(\n rows: BulkWriteRow[],\n context: string\n ) {\n const databaseToken = database.token;\n const toStorageWriteRows: BulkWriteRow[] = new Array(rows.length);\n /**\n * Use the same timestamp for all docs of this rows-set.\n * This improves performance because calling Date.now() inside of the now() function\n * is too costly.\n */\n const time = now();\n for (let index = 0; index < rows.length; index++) {\n const writeRow = rows[index];\n const document = flatCloneDocWithMeta(writeRow.document);\n document._meta.lwt = time;\n\n /**\n * Yes we really want to set the revision here.\n * If you make a plugin that relies on having its own revision\n * stored into the storage, use this.originalStorageInstance.bulkWrite() instead.\n */\n const previous = writeRow.previous;\n document._rev = createRevision(\n databaseToken,\n previous\n );\n toStorageWriteRows[index] = {\n document,\n previous\n };\n }\n\n runPluginHooks('preStorageWrite', {\n storageInstance: this.originalStorageInstance,\n rows: toStorageWriteRows\n });\n\n return database.lockedRun(\n () => storageInstance.bulkWrite(\n toStorageWriteRows,\n context\n )\n )\n /**\n * The RxStorageInstance MUST NOT allow to insert already _deleted documents,\n * without sending the previous document version.\n * But for better developer experience, RxDB does allow to re-insert deleted documents.\n * We do this by automatically fixing the conflict errors for that case\n * by running another bulkWrite() and merging the results.\n * @link https://github.com/pubkey/rxdb/pull/3839\n */\n .then(writeResult => {\n const useWriteResult: typeof writeResult = {\n error: [],\n success: writeResult.success.slice(0)\n };\n const reInsertErrors: RxStorageWriteErrorConflict[] = writeResult.error.length === 0\n ? []\n : writeResult.error\n .filter((error) => {\n if (\n error.status === 409 &&\n !error.writeRow.previous &&\n !error.writeRow.document._deleted &&\n ensureNotFalsy(error.documentInDb)._deleted\n ) {\n return true;\n }\n useWriteResult.error.push(error);\n return false;\n }) as any;\n if (reInsertErrors.length > 0) {\n const reInserts: BulkWriteRow[] = reInsertErrors\n .map((error) => {\n return {\n previous: error.documentInDb,\n document: Object.assign(\n {},\n error.writeRow.document,\n {\n _rev: createRevision(\n database.token,\n error.documentInDb\n )\n }\n )\n };\n });\n\n return database.lockedRun(\n () => storageInstance.bulkWrite(\n reInserts,\n context\n )\n ).then(subResult => {\n appendToArray(useWriteResult.error, subResult.error);\n appendToArray(useWriteResult.success, subResult.success);\n return useWriteResult;\n });\n }\n return writeResult;\n });\n },\n query(preparedQuery) {\n return database.lockedRun(\n () => storageInstance.query(preparedQuery)\n );\n },\n count(preparedQuery) {\n return database.lockedRun(\n () => storageInstance.count(preparedQuery)\n );\n },\n findDocumentsById(ids, deleted) {\n return database.lockedRun(\n () => storageInstance.findDocumentsById(ids, deleted)\n );\n },\n getAttachmentData(\n documentId: string,\n attachmentId: string,\n digest: string\n ) {\n return database.lockedRun(\n () => storageInstance.getAttachmentData(documentId, attachmentId, digest)\n );\n },\n getChangedDocumentsSince: !storageInstance.getChangedDocumentsSince ? undefined : (limit: number, checkpoint?: any) => {\n return database.lockedRun(\n () => ((storageInstance as any).getChangedDocumentsSince)(ensureNotFalsy(limit), checkpoint)\n );\n },\n cleanup(minDeletedTime: number) {\n return database.lockedRun(\n () => storageInstance.cleanup(minDeletedTime)\n );\n },\n remove() {\n database.storageInstances.delete(ret);\n return database.lockedRun(\n () => storageInstance.remove()\n );\n },\n close() {\n database.storageInstances.delete(ret);\n return database.lockedRun(\n () => storageInstance.close()\n );\n },\n changeStream() {\n return storageInstance.changeStream();\n },\n conflictResultionTasks() {\n return storageInstance.conflictResultionTasks();\n },\n resolveConflictResultionTask(taskSolution) {\n if (taskSolution.output.isEqual) {\n return storageInstance.resolveConflictResultionTask(taskSolution);\n }\n\n const doc = Object.assign(\n {},\n taskSolution.output.documentData,\n {\n _meta: getDefaultRxDocumentMeta(),\n _rev: getDefaultRevision(),\n _attachments: {}\n }\n );\n\n const documentData = flatClone(doc);\n delete (documentData as any)._meta;\n delete (documentData as any)._rev;\n delete (documentData as any)._attachments;\n\n return storageInstance.resolveConflictResultionTask({\n id: taskSolution.id,\n output: {\n isEqual: false,\n documentData\n }\n });\n }\n };\n\n database.storageInstances.add(ret);\n return ret;\n}\n\n/**\n * Each RxStorage implementation should\n * run this method at the first step of createStorageInstance()\n * to ensure that the configuration is correct.\n */\nexport function ensureRxStorageInstanceParamsAreCorrect(\n params: RxStorageInstanceCreationParams\n) {\n if (params.schema.keyCompression) {\n throw newRxError('UT5', { args: { params } });\n }\n if (hasEncryption(params.schema)) {\n throw newRxError('UT6', { args: { params } });\n }\n if (\n params.schema.attachments &&\n params.schema.attachments.compression\n ) {\n throw newRxError('UT7', { args: { params } });\n }\n}\n\nexport function hasEncryption(jsonSchema: RxJsonSchema): boolean {\n if (\n (!!jsonSchema.encrypted && jsonSchema.encrypted.length > 0) ||\n (jsonSchema.attachments && jsonSchema.attachments.encrypted)\n ) {\n return true;\n } else {\n return false;\n }\n}\n\nexport function getChangedDocumentsSinceQuery(\n storageInstance: RxStorageInstance,\n limit: number,\n checkpoint?: CheckpointType\n): FilledMangoQuery {\n const primaryPath = getPrimaryFieldOfPrimaryKey(storageInstance.schema.primaryKey);\n const sinceLwt = checkpoint ? (checkpoint as unknown as RxStorageDefaultCheckpoint).lwt : RX_META_LWT_MINIMUM;\n const sinceId = checkpoint ? (checkpoint as unknown as RxStorageDefaultCheckpoint).id : '';\n return normalizeMangoQuery(storageInstance.schema, {\n selector: {\n $or: [\n {\n '_meta.lwt': {\n $gt: sinceLwt\n }\n },\n {\n '_meta.lwt': {\n $eq: sinceLwt\n },\n [primaryPath]: {\n $gt: checkpoint ? sinceId : ''\n }\n }\n ],\n // add this hint for better index usage\n '_meta.lwt': {\n $gte: sinceLwt\n }\n } as any,\n sort: [\n { '_meta.lwt': 'asc' },\n { [primaryPath]: 'asc' }\n ] as any,\n skip: 0,\n limit,\n /**\n * DO NOT SET A SPECIFIC INDEX HERE!\n * The query might be modified by some plugin\n * before sending it to the storage.\n * We can be sure that in the end the query planner\n * will find the best index.\n */\n // index: ['_meta.lwt', primaryPath]\n });\n}\n\nexport async function getChangedDocumentsSince(\n storageInstance: RxStorageInstance,\n limit: number,\n checkpoint?: CheckpointType\n): Promise<{\n documents: RxDocumentData[];\n /**\n * The checkpoint contains data so that another\n * call to getChangedDocumentsSince() will continue\n * from exactly the last document that was returned before.\n */\n checkpoint: CheckpointType;\n}> {\n if (storageInstance.getChangedDocumentsSince) {\n return storageInstance.getChangedDocumentsSince(limit, checkpoint);\n }\n\n const primaryPath = getPrimaryFieldOfPrimaryKey(storageInstance.schema.primaryKey);\n const query = prepareQuery>(\n storageInstance.schema,\n getChangedDocumentsSinceQuery(\n storageInstance,\n limit,\n checkpoint\n )\n );\n\n const result = await storageInstance.query(query);\n const documents = result.documents;\n const lastDoc = lastOfArray(documents);\n\n return {\n documents: documents,\n checkpoint: lastDoc ? {\n id: (lastDoc as any)[primaryPath],\n lwt: lastDoc._meta.lwt\n } as any : checkpoint ? checkpoint : {\n id: '',\n lwt: 0\n }\n };\n}\n\n\n/**\n * Wraps the storage and simluates\n * delays. Mostly used in tests.\n */\nexport function randomDelayStorage(\n input: {\n storage: RxStorage;\n delayTimeBefore: () => number;\n delayTimeAfter: () => number;\n }\n): RxStorage {\n /**\n * Ensure writes to a delay storage\n * are still correctly run in order.\n */\n let randomDelayStorageWriteQueue: Promise = PROMISE_RESOLVE_TRUE;\n\n const retStorage: RxStorage = {\n name: 'random-delay-' + input.storage.name,\n rxdbVersion: RXDB_VERSION,\n async createStorageInstance(params) {\n await promiseWait(input.delayTimeBefore());\n const storageInstance = await input.storage.createStorageInstance(params);\n await promiseWait(input.delayTimeAfter());\n\n return {\n databaseName: storageInstance.databaseName,\n internals: storageInstance.internals,\n options: storageInstance.options,\n schema: storageInstance.schema,\n collectionName: storageInstance.collectionName,\n bulkWrite(a, b) {\n randomDelayStorageWriteQueue = randomDelayStorageWriteQueue.then(async () => {\n await promiseWait(input.delayTimeBefore());\n const response = await storageInstance.bulkWrite(a, b);\n await promiseWait(input.delayTimeAfter());\n return response;\n });\n const ret = randomDelayStorageWriteQueue;\n return ret;\n },\n async findDocumentsById(a, b) {\n await promiseWait(input.delayTimeBefore());\n const ret = await storageInstance.findDocumentsById(a, b);\n await promiseWait(input.delayTimeAfter());\n return ret;\n },\n async query(a) {\n await promiseWait(input.delayTimeBefore());\n const ret = await storageInstance.query(a);\n return ret;\n },\n async count(a) {\n await promiseWait(input.delayTimeBefore());\n const ret = await storageInstance.count(a);\n await promiseWait(input.delayTimeAfter());\n return ret;\n\n },\n async getAttachmentData(a, b, c) {\n await promiseWait(input.delayTimeBefore());\n const ret = await storageInstance.getAttachmentData(a, b, c);\n await promiseWait(input.delayTimeAfter());\n return ret;\n\n },\n getChangedDocumentsSince: !storageInstance.getChangedDocumentsSince ? undefined : async (a, b) => {\n await promiseWait(input.delayTimeBefore());\n const ret = await ensureNotFalsy(storageInstance.getChangedDocumentsSince)(a, b);\n await promiseWait(input.delayTimeAfter());\n return ret;\n\n },\n changeStream() {\n return storageInstance.changeStream();\n },\n conflictResultionTasks() {\n return storageInstance.conflictResultionTasks();\n },\n resolveConflictResultionTask(a) {\n return storageInstance.resolveConflictResultionTask(a);\n },\n async cleanup(a) {\n await promiseWait(input.delayTimeBefore());\n const ret = await storageInstance.cleanup(a);\n await promiseWait(input.delayTimeAfter());\n return ret;\n\n },\n async close() {\n await promiseWait(input.delayTimeBefore());\n const ret = await storageInstance.close();\n await promiseWait(input.delayTimeAfter());\n return ret;\n\n },\n async remove() {\n await promiseWait(input.delayTimeBefore());\n const ret = await storageInstance.remove();\n await promiseWait(input.delayTimeAfter());\n return ret;\n },\n };\n\n\n }\n };\n return retStorage;\n}\n"],"mappings":"AAAA;AACA;AACA;;AAEA,SAASA,YAAY,QAAQ,mBAAmB;AAChD,SAASC,UAAU,QAAQ,eAAe;AAC1C,SACIC,2BAA2B,QACxB,uBAAuB;AAwB9B,SACIC,oBAAoB,EACpBC,YAAY,EACZC,mBAAmB,EACnBC,aAAa,EACbC,cAAc,EACdC,cAAc,EACdC,SAAS,EACTC,kBAAkB,EAClBC,wBAAwB,EACxBC,WAAW,EACXC,GAAG,EACHC,WAAW,EACXC,iBAAiB,QACd,0BAA0B;AACjC,SAAqBC,MAAM,EAAEC,GAAG,EAAEC,SAAS,EAAEC,SAAS,QAAQ,MAAM;AACpE,SAASC,YAAY,QAAQ,eAAe;AAC5C,SAASC,mBAAmB,QAAQ,sBAAsB;AAC1D,SAASC,cAAc,QAAQ,YAAY;AAE3C,OAAO,IAAMC,qBAAqB,GAAG,gBAAgB;AACrD,OAAO,IAAMC,mCAAmC,GAAG,0BAA0B;AAE7E,OAAO,eAAeC,iBAAiBA,CACnCC,eAAuD,EACvDC,UAAkB,EAC4B;EAC9C,IAAMC,OAAO,GAAG,MAAMF,eAAe,CAACG,iBAAiB,CAAC,CAACF,UAAU,CAAC,EAAE,KAAK,CAAC;EAC5E,IAAMG,GAAG,GAAGF,OAAO,CAAC,CAAC,CAAC;EACtB,IAAIE,GAAG,EAAE;IACL,OAAOA,GAAG;EACd,CAAC,MAAM;IACH,OAAOC,SAAS;EACpB;AACJ;;AAEA;AACA;AACA;AACA;AACA,OAAO,eAAeC,WAAWA,CAC7BC,QAAgD,EAChDC,QAAiC,EACjCC,OAAe,EACmB;EAClC,IAAMC,WAAW,GAAG,MAAMH,QAAQ,CAACI,SAAS,CACxC,CAACH,QAAQ,CAAC,EACVC,OACJ,CAAC;EACD,IAAIC,WAAW,CAACE,KAAK,CAACC,MAAM,GAAG,CAAC,EAAE;IAC9B,IAAMD,KAAK,GAAGF,WAAW,CAACE,KAAK,CAAC,CAAC,CAAC;IAClC,MAAMA,KAAK;EACf,CAAC,MAAM;IACH,IAAME,GAAG,GAAGJ,WAAW,CAACK,OAAO,CAAC,CAAC,CAAC;IAClC,OAAOD,GAAG;EACd;AACJ;;AAEA;AACA;AACA;AACA;AACA,OAAO,SAASE,aAAaA,CACzBhB,eAAuD,EACvDC,UAAkB,EACmB;EACrC,IAAMgB,gBAAgB,GAAGlB,iBAAiB,CAACC,eAAe,EAAEC,UAAU,CAAC;EACvE,IAAMa,GAAG,GAAGd,eAAe,CACtBkB,YAAY,CAAC,CAAC,CACdC,IAAI,CACD5B,GAAG,CAAC6B,MAAM,IAAIA,MAAM,CAACC,MAAM,CAACC,IAAI,CAACC,EAAE,IAAIA,EAAE,CAACtB,UAAU,KAAKA,UAAU,CAAC,CAAC,EACrEX,MAAM,CAACiC,EAAE,IAAI,CAAC,CAACA,EAAE,CAAC,EAClBhC,GAAG,CAACgC,EAAE,IAAIC,OAAO,CAACC,OAAO,CAAC3C,cAAc,CAACyC,EAAE,CAAC,CAACG,YAAY,CAAC,CAAC,EAC3DlC,SAAS,CAACyB,gBAAgB,CAAC,EAC3BxB,SAAS,CAACkC,CAAC,IAAIA,CAAC,CAAC,EACjBrC,MAAM,CAACqC,CAAC,IAAI,CAAC,CAACA,CAAC,CACnB,CAAQ;EACZ,OAAOb,GAAG;AACd;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,SAASc,gBAAgBA,CAC5BC,WAA6B,EACf;EACd,OAAOC,MAAM,CAACC,MAAM,CAChB,CAAC,CAAC,EACF,GAAGF,WACP,CAAC;AACL;AAEA,OAAO,SAASG,0BAA0BA,CACtCC,UAA6C,EAC7ChC,UAAkB,EAClBiC,SAAqD,EACrDtB,KAAiD,EACnD;EACE,IAAIA,KAAK,EAAE;IACP,IAAIA,KAAK,CAACuB,MAAM,KAAK,GAAG,EAAE;MACtB,MAAM5D,UAAU,CAAC,UAAU,EAAE;QACzB0D,UAAU,EAAEA,UAAU,CAACG,IAAI;QAC3BC,EAAE,EAAEpC,UAAU;QACdqC,UAAU,EAAE1B,KAAK;QACjB2B,IAAI,EAAEL;MACV,CAAC,CAAC;IACN,CAAC,MAAM,IAAItB,KAAK,CAACuB,MAAM,KAAK,GAAG,EAAE;MAC7B,MAAM5D,UAAU,CAAC,KAAK,EAAE;QACpB0D,UAAU,EAAEA,UAAU,CAACG,IAAI;QAC3BC,EAAE,EAAEpC,UAAU;QACdqC,UAAU,EAAE1B,KAAK;QACjB2B,IAAI,EAAEL;MACV,CAAC,CAAC;IACN,CAAC,MAAM;MACH,MAAMtB,KAAK;IACf;EACJ;AACJ;;AAGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,SAAS4B,uBAAuBA,CACnCxC,eAAiD,EACjDyC,WAAkC;AAClC;AACJ;AACA;AACA;AACA;AACA;AACIC,QAAmG;AACnG;AACJ;AACA;AACA;AACIC,aAAwC,EACxClC,OAAe;AACf;AACJ;AACA;AACA;AACImC,QAAuD,EACvDC,QAAuD,EACf;EACxC,IAAMC,cAAc,GAAG,CAAC,CAAC9C,eAAe,CAAC+C,MAAM,CAACC,WAAW;EAC3D,IAAMC,cAAkD,GAAG,EAAE;EAC7D,IAAMC,cAAkD,GAAG,EAAE;EAC7D,IAAMC,MAAwC,GAAG,EAAE;EACnD,IAAMC,WAAW,GAAG/D,iBAAiB,CAAC,EAAE,CAAC;EACzC,IAAMgE,SAA0E,GAAG;IAC/EhB,EAAE,EAAEe,WAAW;IACf/B,MAAM,EAAE,EAAE;IACViC,UAAU,EAAE,IAAI;IAChB7C,OAAO;IACP8C,SAAS,EAAEpE,GAAG,CAAC,CAAC;IAChBqE,OAAO,EAAE;EACb,CAAC;EACD,IAAMC,eAAe,GAAGJ,SAAS,CAAChC,MAAM;EAExC,IAAMqC,cAKH,GAAG,EAAE;EACR,IAAMC,iBAIH,GAAG,EAAE;EACR,IAAMC,iBAKH,GAAG,EAAE;EAER,IAAMC,WAAW,GAAGnB,QAAQ,CAACoB,IAAI,GAAG,CAAC;EACrC,IAAIC,SAAuD;;EAE3D;AACJ;AACA;EACI,IAAMC,SAAS,GAAGrB,aAAa,CAAC9B,MAAM;EAAC,IAAAoD,KAAA,YAAAA,CAAA,EACS;IAC5C,IAAMzD,QAAQ,GAAGmC,aAAa,CAACuB,KAAK,CAAC;;IAErC;IACA,IAAMC,QAAQ,GAAG3D,QAAQ,CAAC2D,QAAQ;IAClC,IAAMC,QAAQ,GAAG5D,QAAQ,CAAC4D,QAAQ;IAClC,IAAMC,KAAK,GAAGF,QAAQ,CAAC1B,WAAW,CAAW;IAC7C,IAAM6B,eAAe,GAAGH,QAAQ,CAACI,QAAQ;IACzC,IAAMC,eAAe,GAAGJ,QAAQ,IAAIA,QAAQ,CAACG,QAAQ;IAErD,IAAIE,YAAmD,GAAGpE,SAAgB;IAC1E,IAAIwD,WAAW,EAAE;MACbY,YAAY,GAAG/B,QAAQ,CAACgC,GAAG,CAACL,KAAK,CAAC;IACtC;IACA,IAAIM,eAAqE;IAEzE,IAAI,CAACF,YAAY,EAAE;MACf;AACZ;AACA;AACA;MACY,IAAMG,iBAAiB,GAAGN,eAAe,GAAG,IAAI,GAAG,KAAK;MACxD,IAAIxB,cAAc,EAAE;QAChBhB,MAAM,CACD+C,OAAO,CAACV,QAAQ,CAACW,YAAY,CAAC,CAC9BC,OAAO,CAAC,CAAC,CAACC,YAAY,EAAEC,cAAc,CAAC,KAAK;UACzC,IACI,CAAEA,cAAc,CAA2B1C,IAAI,EACjD;YACEoC,eAAe,GAAG;cACd1E,UAAU,EAAEoE,KAAK;cACjBa,OAAO,EAAE,IAAI;cACb/C,MAAM,EAAE,GAAG;cACX3B,QAAQ;cACRwE;YACJ,CAAC;YACD7B,MAAM,CAACgC,IAAI,CAACR,eAAe,CAAC;UAChC,CAAC,MAAM;YACHjB,cAAc,CAACyB,IAAI,CAAC;cAChBlF,UAAU,EAAEoE,KAAK;cACjBW,YAAY;cACZC,cAAc,EAAEA,cAAqB;cACrCG,MAAM,EAAEH,cAAc,CAACG;YAC3B,CAAC,CAAC;UACN;QACJ,CAAC,CAAC;MACV;MACA,IAAI,CAACT,eAAe,EAAE;QAClB,IAAI7B,cAAc,EAAE;UAChBG,cAAc,CAACkC,IAAI,CAACE,2BAA2B,CAAC7E,QAAQ,CAAC,CAAC;UAC1D,IAAIoC,QAAQ,EAAE;YACVA,QAAQ,CAACuB,QAAQ,CAAC;UACtB;QACJ,CAAC,MAAM;UACHlB,cAAc,CAACkC,IAAI,CAAC3E,QAAe,CAAC;UACpC,IAAIoC,QAAQ,EAAE;YACVA,QAAQ,CAACuB,QAAQ,CAAC;UACtB;QACJ;QAEAJ,SAAS,GAAGvD,QAAe;MAC/B;MAEA,IAAI,CAACoE,iBAAiB,EAAE;QACpB,IAAMU,KAAK,GAAG;UACVrF,UAAU,EAAEoE,KAAK;UACjBkB,SAAS,EAAE,QAAiB;UAC5B7D,YAAY,EAAEoB,cAAc,GAAG0C,gCAAgC,CAACrB,QAAQ,CAAC,GAAGA,QAAe;UAC3FsB,oBAAoB,EAAE3C,cAAc,IAAIsB,QAAQ,GAAGoB,gCAAgC,CAACpB,QAAQ,CAAC,GAAGA;QACpG,CAAC;QACDX,eAAe,CAAC0B,IAAI,CAACG,KAAK,CAAC;MAC/B;IACJ,CAAC,MAAM;MACH;MACA,IAAMI,OAAe,GAAGjB,YAAY,CAACkB,IAAI;;MAEzC;AACZ;AACA;MACY,IAEQ,CAACvB,QAAQ,IAGT,CAAC,CAACA,QAAQ,IACVsB,OAAO,KAAKtB,QAAQ,CAACuB,IACxB,EACH;QACE;QACA,IAAMC,GAAmC,GAAG;UACxCV,OAAO,EAAE,IAAI;UACb/C,MAAM,EAAE,GAAG;UACXlC,UAAU,EAAEoE,KAAK;UACjB7D,QAAQ,EAAEA,QAAQ;UAClBiE;QACJ,CAAC;QACDtB,MAAM,CAACgC,IAAI,CAACS,GAAG,CAAC;QAAC;MAErB;;MAEA;;MAEA,IAAMC,UAA4C,GAAG/C,cAAc,GAAGuC,2BAA2B,CAAC7E,QAAQ,CAAC,GAAGA,QAAe;MAC7H,IAAIsC,cAAc,EAAE;QAChB,IAAIwB,eAAe,EAAE;UACjB;AACpB;AACA;UACoB,IAAIF,QAAQ,EAAE;YACVtC,MAAM,CACDgE,IAAI,CAAC1B,QAAQ,CAACU,YAAY,CAAC,CAC3BC,OAAO,CAACC,YAAY,IAAI;cACrBrB,iBAAiB,CAACwB,IAAI,CAAC;gBACnBlF,UAAU,EAAEoE,KAAK;gBACjBW,YAAY;gBACZI,MAAM,EAAEtG,cAAc,CAACsF,QAAQ,CAAC,CAACU,YAAY,CAACE,YAAY,CAAC,CAACI;cAChE,CAAC,CAAC;YACN,CAAC,CAAC;UACV;QACJ,CAAC,MAAM;UACH;UACAtD,MAAM,CACD+C,OAAO,CAACV,QAAQ,CAACW,YAAY,CAAC,CAC9BxD,IAAI,CAAC,CAAC,CAAC0D,YAAY,EAAEC,cAAc,CAAC,KAAK;YACtC,IAAMc,sBAAsB,GAAG3B,QAAQ,GAAGA,QAAQ,CAACU,YAAY,CAACE,YAAY,CAAC,GAAG3E,SAAS;YACzF,IACI,CAAC0F,sBAAsB,IACvB,CAAEd,cAAc,CAA2B1C,IAAI,EACjD;cACEoC,eAAe,GAAG;gBACd1E,UAAU,EAAEoE,KAAK;gBACjBI,YAAY,EAAEA,YAAmB;gBACjCS,OAAO,EAAE,IAAI;gBACb/C,MAAM,EAAE,GAAG;gBACX3B,QAAQ;gBACRwE;cACJ,CAAC;YACL;YACA,OAAO,IAAI;UACf,CAAC,CAAC;UACN,IAAI,CAACL,eAAe,EAAE;YAClB7C,MAAM,CACD+C,OAAO,CAACV,QAAQ,CAACW,YAAY,CAAC,CAC9BC,OAAO,CAAC,CAAC,CAACC,YAAY,EAAEC,cAAc,CAAC,KAAK;cACzC,IAAMc,sBAAsB,GAAG3B,QAAQ,GAAGA,QAAQ,CAACU,YAAY,CAACE,YAAY,CAAC,GAAG3E,SAAS;cACzF,IAAI,CAAC0F,sBAAsB,EAAE;gBACzBrC,cAAc,CAACyB,IAAI,CAAC;kBAChBlF,UAAU,EAAEoE,KAAK;kBACjBW,YAAY;kBACZC,cAAc,EAAEA,cAAqB;kBACrCG,MAAM,EAAEH,cAAc,CAACG;gBAC3B,CAAC,CAAC;cACN,CAAC,MAAM;gBACH,IAAMY,SAAS,GAAGH,UAAU,CAAC1B,QAAQ,CAACW,YAAY,CAACE,YAAY,CAAC,CAACI,MAAM;gBACvE,IACKH,cAAc,CAA2B1C,IAAI;gBAC9C;AACxC;AACA;AACA;gBACwCwD,sBAAsB,CAACX,MAAM,KAAKY,SAAS,EAC7C;kBACEpC,iBAAiB,CAACuB,IAAI,CAAC;oBACnBlF,UAAU,EAAEoE,KAAK;oBACjBW,YAAY;oBACZC,cAAc,EAAEA,cAAuC;oBACvDG,MAAM,EAAEH,cAAc,CAACG;kBAC3B,CAAC,CAAC;gBACN;cACJ;YACJ,CAAC,CAAC;UACV;QACJ;MACJ;MAEA,IAAIT,eAAe,EAAE;QACjBxB,MAAM,CAACgC,IAAI,CAACR,eAAe,CAAC;MAChC,CAAC,MAAM;QACH,IAAI7B,cAAc,EAAE;UAChBI,cAAc,CAACiC,IAAI,CAACE,2BAA2B,CAACQ,UAAU,CAAC,CAAC;UAC5D,IAAIhD,QAAQ,EAAE;YACVA,QAAQ,CAACsB,QAAQ,CAAC;UACtB;QACJ,CAAC,MAAM;UACHjB,cAAc,CAACiC,IAAI,CAACU,UAAU,CAAC;UAC/B,IAAIhD,QAAQ,EAAE;YACVA,QAAQ,CAACsB,QAAQ,CAAC;UACtB;QACJ;QACAJ,SAAS,GAAG8B,UAAiB;MACjC;MAEA,IAAII,iBAAwD,GAAG,IAAW;MAC1E,IAAIC,yBAAgE,GAAG,IAAW;MAClF,IAAIX,SAAyC,GAAG,IAAW;MAE3D,IAAIf,eAAe,IAAI,CAACF,eAAe,EAAE;QACrCiB,SAAS,GAAG,QAAQ;QACpBU,iBAAiB,GAAGnD,cAAc,GAAG0C,gCAAgC,CAACrB,QAAQ,CAAC,GAAGA,QAAe;MACrG,CAAC,MAAM,IAAIC,QAAQ,IAAI,CAACI,eAAe,IAAI,CAACF,eAAe,EAAE;QACzDiB,SAAS,GAAG,QAAQ;QACpBU,iBAAiB,GAAGnD,cAAc,GAAG0C,gCAAgC,CAACrB,QAAQ,CAAC,GAAGA,QAAe;QACjG+B,yBAAyB,GAAG9B,QAAQ;MACxC,CAAC,MAAM,IAAIE,eAAe,EAAE;QACxBiB,SAAS,GAAG,QAAQ;QACpBU,iBAAiB,GAAGnH,cAAc,CAACqF,QAAQ,CAAQ;QACnD+B,yBAAyB,GAAG9B,QAAQ;MACxC,CAAC,MAAM;QACH,MAAM7F,UAAU,CAAC,KAAK,EAAE;UAAE4H,IAAI,EAAE;YAAE3F;UAAS;QAAE,CAAC,CAAC;MACnD;MAEA,IAAM8E,MAAK,GAAG;QACVrF,UAAU,EAAEoE,KAAK;QACjB3C,YAAY,EAAEuE,iBAA8C;QAC5DR,oBAAoB,EAAES,yBAAyB;QAC/CX,SAAS,EAAEA;MACf,CAAC;MACD9B,eAAe,CAAC0B,IAAI,CAACG,MAAK,CAAC;IAC/B;EACJ,CAAC;EA3ND,KAAK,IAAIpB,KAAK,GAAG,CAAC,EAAEA,KAAK,GAAGF,SAAS,EAAEE,KAAK,EAAE;IAAA,IAAAD,KAAA,IAiGlC;EAAS;EA4HrB,OAAO;IACHhB,cAAc;IACdC,cAAc;IACda,SAAS;IACTZ,MAAM;IACNE,SAAS;IACTK,cAAc;IACdC,iBAAiB;IACjBC;EACJ,CAAC;AACL;AAEA,OAAO,SAASyB,2BAA2BA,CAAY7E,QAAiC,EAAoC;EACxH,OAAO;IACH4D,QAAQ,EAAE5D,QAAQ,CAAC4D,QAAQ;IAC3BD,QAAQ,EAAEqB,gCAAgC,CAAChF,QAAQ,CAAC2D,QAAQ;EAChE,CAAC;AACL;AAEA,OAAO,SAASiC,iBAAiBA,CAC7BC,sBAA8B,EACxB;EACN,OAAOC,IAAI,CAACD,sBAAsB,CAAC,CAACxF,MAAM;AAC9C;;AAEA;AACA;AACA;AACA,OAAO,SAAS0F,+BAA+BA,CAACrE,SAAmD,EAAoB;EACnH,IAAMK,IAAI,GAAIL,SAAS,CAA2BK,IAAI;EACtD,IAAI,CAACA,IAAI,EAAE;IACP,OAAOL,SAAS;EACpB;EACA,IAAMpB,GAAqB,GAAG;IAC1BD,MAAM,EAAEuF,iBAAiB,CAAC7D,IAAI,CAAC;IAC/B6C,MAAM,EAAElD,SAAS,CAACkD,MAAM;IACxBoB,IAAI,EAAEtE,SAAS,CAACsE;EACpB,CAAC;EACD,OAAO1F,GAAG;AACd;AAEA,OAAO,SAAS0E,gCAAgCA,CAAYpF,GAAmC,EAA6B;EACxH,IAAI,CAACA,GAAG,CAAC0E,YAAY,IAAIhD,MAAM,CAACgE,IAAI,CAAC1F,GAAG,CAAC0E,YAAY,CAAC,CAACjE,MAAM,KAAK,CAAC,EAAE;IACjE,OAAOT,GAAG;EACd;EAEA,IAAMqG,MAAiC,GAAG1H,SAAS,CAACqB,GAAG,CAAQ;EAC/DqG,MAAM,CAAC3B,YAAY,GAAG,CAAC,CAAC;EACxBhD,MAAM,CACD+C,OAAO,CAACzE,GAAG,CAAC0E,YAAY,CAAC,CACzBC,OAAO,CAAC,CAAC,CAACC,YAAY,EAAEC,cAAc,CAAC,KAAK;IACzCwB,MAAM,CAAC3B,YAAY,CAACE,YAAY,CAAC,GAAGuB,+BAA+B,CAACtB,cAAc,CAAC;EACvF,CAAC,CAAC;EACN,OAAOwB,MAAM;AACjB;;AAEA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,SAASC,oBAAoBA,CAChCtG,GAA8B,EACL;EACzB,OAAO0B,MAAM,CAACC,MAAM,CAChB,CAAC,CAAC,EACF3B,GAAG,EACH;IACIuG,KAAK,EAAE5H,SAAS,CAACqB,GAAG,CAACuG,KAAK;EAC9B,CACJ,CAAC;AACL;AAMA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,SAASC,yBAAyBA,CAMrCC,QAAiE,EACjE7G,eAAiG;AACjG;AACJ;AACA;AACA;AACI8G,YAAqD,EACkB;EACvExI,YAAY,CAACyI,qBAAqB,CAACD,YAAY,CAAC;EAEhD,IAAMhG,GAA4E,GAAG;IACjFkG,uBAAuB,EAAEhH,eAAe;IACxC+C,MAAM,EAAE/C,eAAe,CAAC+C,MAAM;IAC9BkE,SAAS,EAAEjH,eAAe,CAACiH,SAAS;IACpCC,cAAc,EAAElH,eAAe,CAACkH,cAAc;IAC9CC,YAAY,EAAEnH,eAAe,CAACmH,YAAY;IAC1CC,OAAO,EAAEpH,eAAe,CAACoH,OAAO;IAChCzG,SAASA,CACL0G,IAA+B,EAC/B5G,OAAe,EACjB;MACE,IAAM6G,aAAa,GAAGT,QAAQ,CAACU,KAAK;MACpC,IAAMC,kBAA6C,GAAG,IAAIC,KAAK,CAACJ,IAAI,CAACxG,MAAM,CAAC;MAC5E;AACZ;AACA;AACA;AACA;MACY,IAAM6G,IAAI,GAAGvI,GAAG,CAAC,CAAC;MAClB,KAAK,IAAIwI,KAAK,GAAG,CAAC,EAAEA,KAAK,GAAGN,IAAI,CAACxG,MAAM,EAAE8G,KAAK,EAAE,EAAE;QAC9C,IAAMnH,QAAQ,GAAG6G,IAAI,CAACM,KAAK,CAAC;QAC5B,IAAMxD,QAAQ,GAAGuC,oBAAoB,CAAClG,QAAQ,CAAC2D,QAAQ,CAAC;QACxDA,QAAQ,CAACwC,KAAK,CAACiB,GAAG,GAAGF,IAAI;;QAEzB;AAChB;AACA;AACA;AACA;QACgB,IAAMtD,QAAQ,GAAG5D,QAAQ,CAAC4D,QAAQ;QAClCD,QAAQ,CAACwB,IAAI,GAAG9G,cAAc,CAC1ByI,aAAa,EACblD,QACJ,CAAC;QACDoD,kBAAkB,CAACG,KAAK,CAAC,GAAG;UACxBxD,QAAQ;UACRC;QACJ,CAAC;MACL;MAEAxE,cAAc,CAAC,iBAAiB,EAAE;QAC9BI,eAAe,EAAE,IAAI,CAACgH,uBAAuB;QAC7CK,IAAI,EAAEG;MACV,CAAC,CAAC;MAEF,OAAOX,QAAQ,CAACgB,SAAS,CACrB,MAAM7H,eAAe,CAACW,SAAS,CAC3B6G,kBAAkB,EAClB/G,OACJ,CACJ;MACI;AAChB;AACA;AACA;AACA;AACA;AACA;AACA,SAPgB,CAQCqH,IAAI,CAACpH,WAAW,IAAI;QACjB,IAAMqH,cAAkC,GAAG;UACvCnH,KAAK,EAAE,EAAE;UACTG,OAAO,EAAEL,WAAW,CAACK,OAAO,CAACiH,KAAK,CAAC,CAAC;QACxC,CAAC;QACD,IAAMC,cAAwD,GAAGvH,WAAW,CAACE,KAAK,CAACC,MAAM,KAAK,CAAC,GACzF,EAAE,GACFH,WAAW,CAACE,KAAK,CACdtB,MAAM,CAAEsB,KAAK,IAAK;UACf,IACIA,KAAK,CAACuB,MAAM,KAAK,GAAG,IACpB,CAACvB,KAAK,CAACJ,QAAQ,CAAC4D,QAAQ,IACxB,CAACxD,KAAK,CAACJ,QAAQ,CAAC2D,QAAQ,CAACI,QAAQ,IACjCzF,cAAc,CAAC8B,KAAK,CAAC6D,YAAY,CAAC,CAACF,QAAQ,EAC7C;YACE,OAAO,IAAI;UACf;UACAwD,cAAc,CAACnH,KAAK,CAACuE,IAAI,CAACvE,KAAK,CAAC;UAChC,OAAO,KAAK;QAChB,CAAC,CAAQ;QACjB,IAAIqH,cAAc,CAACpH,MAAM,GAAG,CAAC,EAAE;UAC3B,IAAMqH,SAAoC,GAAGD,cAAc,CACtD1I,GAAG,CAAEqB,KAAK,IAAK;YACZ,OAAO;cACHwD,QAAQ,EAAExD,KAAK,CAAC6D,YAAY;cAC5BN,QAAQ,EAAErC,MAAM,CAACC,MAAM,CACnB,CAAC,CAAC,EACFnB,KAAK,CAACJ,QAAQ,CAAC2D,QAAQ,EACvB;gBACIwB,IAAI,EAAE9G,cAAc,CAChBgI,QAAQ,CAACU,KAAK,EACd3G,KAAK,CAAC6D,YACV;cACJ,CACJ;YACJ,CAAC;UACL,CAAC,CAAC;UAEN,OAAOoC,QAAQ,CAACgB,SAAS,CACrB,MAAM7H,eAAe,CAACW,SAAS,CAC3BuH,SAAS,EACTzH,OACJ,CACJ,CAAC,CAACqH,IAAI,CAACK,SAAS,IAAI;YAChBvJ,aAAa,CAACmJ,cAAc,CAACnH,KAAK,EAAEuH,SAAS,CAACvH,KAAK,CAAC;YACpDhC,aAAa,CAACmJ,cAAc,CAAChH,OAAO,EAAEoH,SAAS,CAACpH,OAAO,CAAC;YACxD,OAAOgH,cAAc;UACzB,CAAC,CAAC;QACN;QACA,OAAOrH,WAAW;MACtB,CAAC,CAAC;IACV,CAAC;IACD0H,KAAKA,CAACC,aAAa,EAAE;MACjB,OAAOxB,QAAQ,CAACgB,SAAS,CACrB,MAAM7H,eAAe,CAACoI,KAAK,CAACC,aAAa,CAC7C,CAAC;IACL,CAAC;IACDC,KAAKA,CAACD,aAAa,EAAE;MACjB,OAAOxB,QAAQ,CAACgB,SAAS,CACrB,MAAM7H,eAAe,CAACsI,KAAK,CAACD,aAAa,CAC7C,CAAC;IACL,CAAC;IACDlI,iBAAiBA,CAACoI,GAAG,EAAEC,OAAO,EAAE;MAC5B,OAAO3B,QAAQ,CAACgB,SAAS,CACrB,MAAM7H,eAAe,CAACG,iBAAiB,CAACoI,GAAG,EAAEC,OAAO,CACxD,CAAC;IACL,CAAC;IACDC,iBAAiBA,CACbxI,UAAkB,EAClB+E,YAAoB,EACpBI,MAAc,EAChB;MACE,OAAOyB,QAAQ,CAACgB,SAAS,CACrB,MAAM7H,eAAe,CAACyI,iBAAiB,CAACxI,UAAU,EAAE+E,YAAY,EAAEI,MAAM,CAC5E,CAAC;IACL,CAAC;IACDsD,wBAAwB,EAAE,CAAC1I,eAAe,CAAC0I,wBAAwB,GAAGrI,SAAS,GAAG,CAACsI,KAAa,EAAErF,UAAgB,KAAK;MACnH,OAAOuD,QAAQ,CAACgB,SAAS,CACrB,MAAQ7H,eAAe,CAAS0I,wBAAwB,CAAE5J,cAAc,CAAC6J,KAAK,CAAC,EAAErF,UAAU,CAC/F,CAAC;IACL,CAAC;IACDsF,OAAOA,CAACC,cAAsB,EAAE;MAC5B,OAAOhC,QAAQ,CAACgB,SAAS,CACrB,MAAM7H,eAAe,CAAC4I,OAAO,CAACC,cAAc,CAChD,CAAC;IACL,CAAC;IACDC,MAAMA,CAAA,EAAG;MACLjC,QAAQ,CAACkC,gBAAgB,CAACC,MAAM,CAAClI,GAAG,CAAC;MACrC,OAAO+F,QAAQ,CAACgB,SAAS,CACrB,MAAM7H,eAAe,CAAC8I,MAAM,CAAC,CACjC,CAAC;IACL,CAAC;IACDG,KAAKA,CAAA,EAAG;MACJpC,QAAQ,CAACkC,gBAAgB,CAACC,MAAM,CAAClI,GAAG,CAAC;MACrC,OAAO+F,QAAQ,CAACgB,SAAS,CACrB,MAAM7H,eAAe,CAACiJ,KAAK,CAAC,CAChC,CAAC;IACL,CAAC;IACD/H,YAAYA,CAAA,EAAG;MACX,OAAOlB,eAAe,CAACkB,YAAY,CAAC,CAAC;IACzC,CAAC;IACDgI,sBAAsBA,CAAA,EAAG;MACrB,OAAOlJ,eAAe,CAACkJ,sBAAsB,CAAC,CAAC;IACnD,CAAC;IACDC,4BAA4BA,CAACC,YAAY,EAAE;MACvC,IAAIA,YAAY,CAACC,MAAM,CAACC,OAAO,EAAE;QAC7B,OAAOtJ,eAAe,CAACmJ,4BAA4B,CAACC,YAAY,CAAC;MACrE;MAEA,IAAMhJ,GAAG,GAAG0B,MAAM,CAACC,MAAM,CACrB,CAAC,CAAC,EACFqH,YAAY,CAACC,MAAM,CAAC3H,YAAY,EAChC;QACIiF,KAAK,EAAE1H,wBAAwB,CAAC,CAAC;QACjC0G,IAAI,EAAE3G,kBAAkB,CAAC,CAAC;QAC1B8F,YAAY,EAAE,CAAC;MACnB,CACJ,CAAC;MAED,IAAMpD,YAAY,GAAG3C,SAAS,CAACqB,GAAG,CAAC;MACnC,OAAQsB,YAAY,CAASiF,KAAK;MAClC,OAAQjF,YAAY,CAASiE,IAAI;MACjC,OAAQjE,YAAY,CAASoD,YAAY;MAEzC,OAAO9E,eAAe,CAACmJ,4BAA4B,CAAC;QAChD9G,EAAE,EAAE+G,YAAY,CAAC/G,EAAE;QACnBgH,MAAM,EAAE;UACJC,OAAO,EAAE,KAAK;UACd5H;QACJ;MACJ,CAAC,CAAC;IACN;EACJ,CAAC;EAEDmF,QAAQ,CAACkC,gBAAgB,CAACQ,GAAG,CAACzI,GAAG,CAAC;EAClC,OAAOA,GAAG;AACd;;AAEA;AACA;AACA;AACA;AACA;AACA,OAAO,SAAS0I,uCAAuCA,CACnDC,MAAiD,EACnD;EACE,IAAIA,MAAM,CAAC1G,MAAM,CAAC2G,cAAc,EAAE;IAC9B,MAAMnL,UAAU,CAAC,KAAK,EAAE;MAAE4H,IAAI,EAAE;QAAEsD;MAAO;IAAE,CAAC,CAAC;EACjD;EACA,IAAIE,aAAa,CAACF,MAAM,CAAC1G,MAAM,CAAC,EAAE;IAC9B,MAAMxE,UAAU,CAAC,KAAK,EAAE;MAAE4H,IAAI,EAAE;QAAEsD;MAAO;IAAE,CAAC,CAAC;EACjD;EACA,IACIA,MAAM,CAAC1G,MAAM,CAACC,WAAW,IACzByG,MAAM,CAAC1G,MAAM,CAACC,WAAW,CAAC4G,WAAW,EACvC;IACE,MAAMrL,UAAU,CAAC,KAAK,EAAE;MAAE4H,IAAI,EAAE;QAAEsD;MAAO;IAAE,CAAC,CAAC;EACjD;AACJ;AAEA,OAAO,SAASE,aAAaA,CAACE,UAA6B,EAAW;EAClE,IACK,CAAC,CAACA,UAAU,CAACC,SAAS,IAAID,UAAU,CAACC,SAAS,CAACjJ,MAAM,GAAG,CAAC,IACzDgJ,UAAU,CAAC7G,WAAW,IAAI6G,UAAU,CAAC7G,WAAW,CAAC8G,SAAU,EAC9D;IACE,OAAO,IAAI;EACf,CAAC,MAAM;IACH,OAAO,KAAK;EAChB;AACJ;AAEA,OAAO,SAASC,6BAA6BA,CACzC/J,eAAuE,EACvE2I,KAAa,EACbrF,UAA2B,EACA;EAC3B,IAAMb,WAAW,GAAGjE,2BAA2B,CAACwB,eAAe,CAAC+C,MAAM,CAACiH,UAAU,CAAC;EAClF,IAAMC,QAAQ,GAAG3G,UAAU,GAAIA,UAAU,CAA2CsE,GAAG,GAAGjJ,mBAAmB;EAC7G,IAAMuL,OAAO,GAAG5G,UAAU,GAAIA,UAAU,CAA2CjB,EAAE,GAAG,EAAE;EAC1F,OAAO1C,mBAAmB,CAACK,eAAe,CAAC+C,MAAM,EAAE;IAC/CoH,QAAQ,EAAE;MACNC,GAAG,EAAE,CACD;QACI,WAAW,EAAE;UACTC,GAAG,EAAEJ;QACT;MACJ,CAAC,EACD;QACI,WAAW,EAAE;UACTK,GAAG,EAAEL;QACT,CAAC;QACD,CAACxH,WAAW,GAAG;UACX4H,GAAG,EAAE/G,UAAU,GAAG4G,OAAO,GAAG;QAChC;MACJ,CAAC,CACJ;MACD;MACA,WAAW,EAAE;QACTK,IAAI,EAAEN;MACV;IACJ,CAAQ;IACRO,IAAI,EAAE,CACF;MAAE,WAAW,EAAE;IAAM,CAAC,EACtB;MAAE,CAAC/H,WAAW,GAAG;IAAM,CAAC,CACpB;IACRgI,IAAI,EAAE,CAAC;IACP9B;IACA;AACR;AACA;AACA;AACA;AACA;AACA;IACQ;EACJ,CAAC,CAAC;AACN;AAEA,OAAO,eAAeD,wBAAwBA,CAC1C1I,eAAuE,EACvE2I,KAAa,EACbrF,UAA2B,EAS5B;EACC,IAAItD,eAAe,CAAC0I,wBAAwB,EAAE;IAC1C,OAAO1I,eAAe,CAAC0I,wBAAwB,CAACC,KAAK,EAAErF,UAAU,CAAC;EACtE;EAEA,IAAMb,WAAW,GAAGjE,2BAA2B,CAACwB,eAAe,CAAC+C,MAAM,CAACiH,UAAU,CAAC;EAClF,IAAM5B,KAAK,GAAG1I,YAAY,CACtBM,eAAe,CAAC+C,MAAM,EACtBgH,6BAA6B,CACzB/J,eAAe,EACf2I,KAAK,EACLrF,UACJ,CACJ,CAAC;EAED,IAAMoH,MAAM,GAAG,MAAM1K,eAAe,CAACoI,KAAK,CAACA,KAAK,CAAC;EACjD,IAAMuC,SAAS,GAAGD,MAAM,CAACC,SAAS;EAClC,IAAMC,OAAO,GAAG1L,WAAW,CAACyL,SAAS,CAAC;EAEtC,OAAO;IACHA,SAAS,EAAEA,SAAS;IACpBrH,UAAU,EAAEsH,OAAO,GAAG;MAClBvI,EAAE,EAAGuI,OAAO,CAASnI,WAAW,CAAC;MACjCmF,GAAG,EAAEgD,OAAO,CAACjE,KAAK,CAACiB;IACvB,CAAC,GAAUtE,UAAU,GAAGA,UAAU,GAAG;MACjCjB,EAAE,EAAE,EAAE;MACNuF,GAAG,EAAE;IACT;EACJ,CAAC;AACL;;AAGA;AACA;AACA;AACA;AACA,OAAO,SAASiD,kBAAkBA,CAC9BC,KAIC,EAC4C;EAC7C;AACJ;AACA;AACA;EACI,IAAIC,4BAA0C,GAAGtM,oBAAoB;EAErE,IAAMuM,UAAyD,GAAG;IAC9D5I,IAAI,EAAE,eAAe,GAAG0I,KAAK,CAACG,OAAO,CAAC7I,IAAI;IAC1C8I,WAAW,EAAExM,YAAY;IACzB,MAAMyM,qBAAqBA,CAAC1B,MAAM,EAAE;MAChC,MAAMrK,WAAW,CAAC0L,KAAK,CAACM,eAAe,CAAC,CAAC,CAAC;MAC1C,IAAMpL,eAAe,GAAG,MAAM8K,KAAK,CAACG,OAAO,CAACE,qBAAqB,CAAC1B,MAAM,CAAC;MACzE,MAAMrK,WAAW,CAAC0L,KAAK,CAACO,cAAc,CAAC,CAAC,CAAC;MAEzC,OAAO;QACHlE,YAAY,EAAEnH,eAAe,CAACmH,YAAY;QAC1CF,SAAS,EAAEjH,eAAe,CAACiH,SAAS;QACpCG,OAAO,EAAEpH,eAAe,CAACoH,OAAO;QAChCrE,MAAM,EAAE/C,eAAe,CAAC+C,MAAM;QAC9BmE,cAAc,EAAElH,eAAe,CAACkH,cAAc;QAC9CvG,SAASA,CAAC2K,CAAC,EAAEC,CAAC,EAAE;UACZR,4BAA4B,GAAGA,4BAA4B,CAACjD,IAAI,CAAC,YAAY;YACzE,MAAM1I,WAAW,CAAC0L,KAAK,CAACM,eAAe,CAAC,CAAC,CAAC;YAC1C,IAAMI,QAAQ,GAAG,MAAMxL,eAAe,CAACW,SAAS,CAAC2K,CAAC,EAAEC,CAAC,CAAC;YACtD,MAAMnM,WAAW,CAAC0L,KAAK,CAACO,cAAc,CAAC,CAAC,CAAC;YACzC,OAAOG,QAAQ;UACnB,CAAC,CAAC;UACF,IAAM1K,GAAG,GAAGiK,4BAA4B;UACxC,OAAOjK,GAAG;QACd,CAAC;QACD,MAAMX,iBAAiBA,CAACmL,CAAC,EAAEC,CAAC,EAAE;UAC1B,MAAMnM,WAAW,CAAC0L,KAAK,CAACM,eAAe,CAAC,CAAC,CAAC;UAC1C,IAAMtK,GAAG,GAAG,MAAMd,eAAe,CAACG,iBAAiB,CAACmL,CAAC,EAAEC,CAAC,CAAC;UACzD,MAAMnM,WAAW,CAAC0L,KAAK,CAACO,cAAc,CAAC,CAAC,CAAC;UACzC,OAAOvK,GAAG;QACd,CAAC;QACD,MAAMsH,KAAKA,CAACkD,CAAC,EAAE;UACX,MAAMlM,WAAW,CAAC0L,KAAK,CAACM,eAAe,CAAC,CAAC,CAAC;UAC1C,IAAMtK,GAAG,GAAG,MAAMd,eAAe,CAACoI,KAAK,CAACkD,CAAC,CAAC;UAC1C,OAAOxK,GAAG;QACd,CAAC;QACD,MAAMwH,KAAKA,CAACgD,CAAC,EAAE;UACX,MAAMlM,WAAW,CAAC0L,KAAK,CAACM,eAAe,CAAC,CAAC,CAAC;UAC1C,IAAMtK,GAAG,GAAG,MAAMd,eAAe,CAACsI,KAAK,CAACgD,CAAC,CAAC;UAC1C,MAAMlM,WAAW,CAAC0L,KAAK,CAACO,cAAc,CAAC,CAAC,CAAC;UACzC,OAAOvK,GAAG;QAEd,CAAC;QACD,MAAM2H,iBAAiBA,CAAC6C,CAAC,EAAEC,CAAC,EAAEE,CAAC,EAAE;UAC7B,MAAMrM,WAAW,CAAC0L,KAAK,CAACM,eAAe,CAAC,CAAC,CAAC;UAC1C,IAAMtK,GAAG,GAAG,MAAMd,eAAe,CAACyI,iBAAiB,CAAC6C,CAAC,EAAEC,CAAC,EAAEE,CAAC,CAAC;UAC5D,MAAMrM,WAAW,CAAC0L,KAAK,CAACO,cAAc,CAAC,CAAC,CAAC;UACzC,OAAOvK,GAAG;QAEd,CAAC;QACD4H,wBAAwB,EAAE,CAAC1I,eAAe,CAAC0I,wBAAwB,GAAGrI,SAAS,GAAG,OAAOiL,CAAC,EAAEC,CAAC,KAAK;UAC9F,MAAMnM,WAAW,CAAC0L,KAAK,CAACM,eAAe,CAAC,CAAC,CAAC;UAC1C,IAAMtK,GAAG,GAAG,MAAMhC,cAAc,CAACkB,eAAe,CAAC0I,wBAAwB,CAAC,CAAC4C,CAAC,EAAEC,CAAC,CAAC;UAChF,MAAMnM,WAAW,CAAC0L,KAAK,CAACO,cAAc,CAAC,CAAC,CAAC;UACzC,OAAOvK,GAAG;QAEd,CAAC;QACDI,YAAYA,CAAA,EAAG;UACX,OAAOlB,eAAe,CAACkB,YAAY,CAAC,CAAC;QACzC,CAAC;QACDgI,sBAAsBA,CAAA,EAAG;UACrB,OAAOlJ,eAAe,CAACkJ,sBAAsB,CAAC,CAAC;QACnD,CAAC;QACDC,4BAA4BA,CAACmC,CAAC,EAAE;UAC5B,OAAOtL,eAAe,CAACmJ,4BAA4B,CAACmC,CAAC,CAAC;QAC1D,CAAC;QACD,MAAM1C,OAAOA,CAAC0C,CAAC,EAAE;UACb,MAAMlM,WAAW,CAAC0L,KAAK,CAACM,eAAe,CAAC,CAAC,CAAC;UAC1C,IAAMtK,GAAG,GAAG,MAAMd,eAAe,CAAC4I,OAAO,CAAC0C,CAAC,CAAC;UAC5C,MAAMlM,WAAW,CAAC0L,KAAK,CAACO,cAAc,CAAC,CAAC,CAAC;UACzC,OAAOvK,GAAG;QAEd,CAAC;QACD,MAAMmI,KAAKA,CAAA,EAAG;UACV,MAAM7J,WAAW,CAAC0L,KAAK,CAACM,eAAe,CAAC,CAAC,CAAC;UAC1C,IAAMtK,GAAG,GAAG,MAAMd,eAAe,CAACiJ,KAAK,CAAC,CAAC;UACzC,MAAM7J,WAAW,CAAC0L,KAAK,CAACO,cAAc,CAAC,CAAC,CAAC;UACzC,OAAOvK,GAAG;QAEd,CAAC;QACD,MAAMgI,MAAMA,CAAA,EAAG;UACX,MAAM1J,WAAW,CAAC0L,KAAK,CAACM,eAAe,CAAC,CAAC,CAAC;UAC1C,IAAMtK,GAAG,GAAG,MAAMd,eAAe,CAAC8I,MAAM,CAAC,CAAC;UAC1C,MAAM1J,WAAW,CAAC0L,KAAK,CAACO,cAAc,CAAC,CAAC,CAAC;UACzC,OAAOvK,GAAG;QACd;MACJ,CAAC;IAGL;EACJ,CAAC;EACD,OAAOkK,UAAU;AACrB","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/rx-storage-multiinstance.js b/dist/esm/rx-storage-multiinstance.js deleted file mode 100644 index 4598f80c5b5..00000000000 --- a/dist/esm/rx-storage-multiinstance.js +++ /dev/null @@ -1,117 +0,0 @@ -/** - * When a persistent RxStorage is used in more the one JavaScript process, - * the even stream of the changestream() function must be broadcasted to the other - * RxStorageInstances of the same databaseName+collectionName. - * - * In the past this was done by RxDB but it makes more sense to do this - * at the RxStorage level so that the broadcasting etc can all happen inside of a WebWorker - * and not on the main thread. - * Also it makes it less complex to stack up different RxStorages onto each other - * like what we do with the in-memory plugin. - * - * This is intended to be used inside of createStorageInstance() of a storage. - * Do not use this if the storage anyway broadcasts the events like when using MongoDB - * or in the future W3C might introduce a way to listen to IndexedDB changes. - */ - -import { Subject } from 'rxjs'; -import { mergeWith } from 'rxjs/operators'; -import { BroadcastChannel } from 'broadcast-channel'; - -/** - * The broadcast-channel is reused by the databaseInstanceToken. - * This is required so that it is easy to simulate multi-tab usage - * in the test where different instances of the same RxDatabase must - * have different broadcast channels. - * But also it ensures that for each RxDatabase we only create a single - * broadcast channel that can even be reused in the leader election plugin. - * - * TODO at the end of the unit tests, - * we should ensure that all channels are closed and cleaned up. - * Otherwise we have forgot something. - */ -export var BROADCAST_CHANNEL_BY_TOKEN = new Map(); -export function getBroadcastChannelReference(storageName, databaseInstanceToken, databaseName, refObject) { - var state = BROADCAST_CHANNEL_BY_TOKEN.get(databaseInstanceToken); - if (!state) { - state = { - /** - * We have to use the databaseName instead of the databaseInstanceToken - * in the BroadcastChannel name because different instances must end with the same - * channel name to be able to broadcast messages between each other. - */ - bc: new BroadcastChannel(['RxDB:', storageName, databaseName].join('|')), - refs: new Set() - }; - BROADCAST_CHANNEL_BY_TOKEN.set(databaseInstanceToken, state); - } - state.refs.add(refObject); - return state.bc; -} -export function removeBroadcastChannelReference(databaseInstanceToken, refObject) { - var state = BROADCAST_CHANNEL_BY_TOKEN.get(databaseInstanceToken); - if (!state) { - return; - } - state.refs.delete(refObject); - if (state.refs.size === 0) { - BROADCAST_CHANNEL_BY_TOKEN.delete(databaseInstanceToken); - return state.bc.close(); - } -} -export function addRxStorageMultiInstanceSupport(storageName, instanceCreationParams, instance, -/** - * If provided, that channel will be used - * instead of an own one. - */ -providedBroadcastChannel) { - if (!instanceCreationParams.multiInstance) { - return; - } - var broadcastChannel = providedBroadcastChannel ? providedBroadcastChannel : getBroadcastChannelReference(storageName, instanceCreationParams.databaseInstanceToken, instance.databaseName, instance); - var changesFromOtherInstances$ = new Subject(); - var eventListener = msg => { - if (msg.storageName === storageName && msg.databaseName === instanceCreationParams.databaseName && msg.collectionName === instanceCreationParams.collectionName && msg.version === instanceCreationParams.schema.version) { - changesFromOtherInstances$.next(msg.eventBulk); - } - }; - broadcastChannel.addEventListener('message', eventListener); - var oldChangestream$ = instance.changeStream(); - var closed = false; - var sub = oldChangestream$.subscribe(eventBulk => { - if (closed) { - return; - } - broadcastChannel.postMessage({ - storageName: storageName, - databaseName: instanceCreationParams.databaseName, - collectionName: instanceCreationParams.collectionName, - version: instanceCreationParams.schema.version, - eventBulk - }); - }); - instance.changeStream = function () { - return changesFromOtherInstances$.asObservable().pipe(mergeWith(oldChangestream$)); - }; - var oldClose = instance.close.bind(instance); - instance.close = async function () { - closed = true; - sub.unsubscribe(); - broadcastChannel.removeEventListener('message', eventListener); - if (!providedBroadcastChannel) { - await removeBroadcastChannelReference(instanceCreationParams.databaseInstanceToken, instance); - } - return oldClose(); - }; - var oldRemove = instance.remove.bind(instance); - instance.remove = async function () { - closed = true; - sub.unsubscribe(); - broadcastChannel.removeEventListener('message', eventListener); - if (!providedBroadcastChannel) { - await removeBroadcastChannelReference(instanceCreationParams.databaseInstanceToken, instance); - } - return oldRemove(); - }; -} -//# sourceMappingURL=rx-storage-multiinstance.js.map \ No newline at end of file diff --git a/dist/esm/rx-storage-multiinstance.js.map b/dist/esm/rx-storage-multiinstance.js.map deleted file mode 100644 index 3f3d1391c3d..00000000000 --- a/dist/esm/rx-storage-multiinstance.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-storage-multiinstance.js","names":["Subject","mergeWith","BroadcastChannel","BROADCAST_CHANNEL_BY_TOKEN","Map","getBroadcastChannelReference","storageName","databaseInstanceToken","databaseName","refObject","state","get","bc","join","refs","Set","set","add","removeBroadcastChannelReference","delete","size","close","addRxStorageMultiInstanceSupport","instanceCreationParams","instance","providedBroadcastChannel","multiInstance","broadcastChannel","changesFromOtherInstances$","eventListener","msg","collectionName","version","schema","next","eventBulk","addEventListener","oldChangestream$","changeStream","closed","sub","subscribe","postMessage","asObservable","pipe","oldClose","bind","unsubscribe","removeEventListener","oldRemove","remove"],"sources":["../../src/rx-storage-multiinstance.ts"],"sourcesContent":["/**\n * When a persistent RxStorage is used in more the one JavaScript process,\n * the even stream of the changestream() function must be broadcasted to the other\n * RxStorageInstances of the same databaseName+collectionName.\n *\n * In the past this was done by RxDB but it makes more sense to do this\n * at the RxStorage level so that the broadcasting etc can all happen inside of a WebWorker\n * and not on the main thread.\n * Also it makes it less complex to stack up different RxStorages onto each other\n * like what we do with the in-memory plugin.\n *\n * This is intended to be used inside of createStorageInstance() of a storage.\n * Do not use this if the storage anyway broadcasts the events like when using MongoDB\n * or in the future W3C might introduce a way to listen to IndexedDB changes.\n */\n\nimport { Observable, Subject } from 'rxjs';\nimport { mergeWith } from 'rxjs/operators';\nimport type {\n EventBulk,\n RxStorageChangeEvent,\n RxStorageInstance,\n RxStorageInstanceCreationParams\n} from './types/index.d.ts';\n\nimport {\n BroadcastChannel\n} from 'broadcast-channel';\n\n/**\n * The broadcast-channel is reused by the databaseInstanceToken.\n * This is required so that it is easy to simulate multi-tab usage\n * in the test where different instances of the same RxDatabase must\n * have different broadcast channels.\n * But also it ensures that for each RxDatabase we only create a single\n * broadcast channel that can even be reused in the leader election plugin.\n *\n * TODO at the end of the unit tests,\n * we should ensure that all channels are closed and cleaned up.\n * Otherwise we have forgot something.\n */\nexport const BROADCAST_CHANNEL_BY_TOKEN: Map;\n /**\n * Contains all context objects that currently use the channel.\n * If this becomes empty, we can close the channel\n */\n refs: Set;\n}> = new Map();\n\n\nexport type RxStorageMultiInstanceBroadcastType = {\n storageName: string;\n collectionName: string;\n /**\n * collection.schema.version\n */\n version: number;\n databaseName: string;\n eventBulk: EventBulk;\n};\n\nexport function getBroadcastChannelReference(\n storageName: string,\n databaseInstanceToken: string,\n databaseName: string,\n refObject: any\n): BroadcastChannel {\n let state = BROADCAST_CHANNEL_BY_TOKEN.get(databaseInstanceToken);\n if (!state) {\n state = {\n /**\n * We have to use the databaseName instead of the databaseInstanceToken\n * in the BroadcastChannel name because different instances must end with the same\n * channel name to be able to broadcast messages between each other.\n */\n bc: new BroadcastChannel(['RxDB:', storageName, databaseName].join('|')),\n refs: new Set()\n };\n BROADCAST_CHANNEL_BY_TOKEN.set(databaseInstanceToken, state);\n }\n state.refs.add(refObject);\n return state.bc;\n}\n\nexport function removeBroadcastChannelReference(\n databaseInstanceToken: string,\n refObject: any\n) {\n const state = BROADCAST_CHANNEL_BY_TOKEN.get(databaseInstanceToken);\n if (!state) {\n return;\n }\n state.refs.delete(refObject);\n if (state.refs.size === 0) {\n BROADCAST_CHANNEL_BY_TOKEN.delete(databaseInstanceToken);\n return state.bc.close();\n }\n}\n\n\nexport function addRxStorageMultiInstanceSupport(\n storageName: string,\n instanceCreationParams: RxStorageInstanceCreationParams,\n instance: RxStorageInstance,\n /**\n * If provided, that channel will be used\n * instead of an own one.\n */\n providedBroadcastChannel?: BroadcastChannel\n) {\n if (!instanceCreationParams.multiInstance) {\n return;\n }\n\n type Emit = EventBulk, any>;\n\n const broadcastChannel = providedBroadcastChannel ?\n providedBroadcastChannel :\n getBroadcastChannelReference(\n storageName,\n instanceCreationParams.databaseInstanceToken,\n instance.databaseName,\n instance\n );\n\n const changesFromOtherInstances$: Subject = new Subject();\n\n\n const eventListener = (msg: RxStorageMultiInstanceBroadcastType) => {\n if (\n msg.storageName === storageName &&\n msg.databaseName === instanceCreationParams.databaseName &&\n msg.collectionName === instanceCreationParams.collectionName &&\n msg.version === instanceCreationParams.schema.version\n ) {\n changesFromOtherInstances$.next(msg.eventBulk);\n }\n };\n\n broadcastChannel.addEventListener('message', eventListener);\n\n const oldChangestream$ = instance.changeStream();\n\n let closed = false;\n const sub = oldChangestream$.subscribe(eventBulk => {\n if (closed) {\n return;\n }\n broadcastChannel.postMessage({\n storageName: storageName,\n databaseName: instanceCreationParams.databaseName,\n collectionName: instanceCreationParams.collectionName,\n version: instanceCreationParams.schema.version,\n eventBulk\n });\n });\n\n instance.changeStream = function (): Observable {\n return changesFromOtherInstances$.asObservable().pipe(\n mergeWith(oldChangestream$)\n );\n };\n\n const oldClose = instance.close.bind(instance);\n instance.close = async function () {\n closed = true;\n sub.unsubscribe();\n broadcastChannel.removeEventListener('message', eventListener);\n if (!providedBroadcastChannel) {\n await removeBroadcastChannelReference(\n instanceCreationParams.databaseInstanceToken,\n instance\n );\n }\n return oldClose();\n };\n\n const oldRemove = instance.remove.bind(instance);\n instance.remove = async function () {\n closed = true;\n sub.unsubscribe();\n broadcastChannel.removeEventListener('message', eventListener);\n if (!providedBroadcastChannel) {\n await removeBroadcastChannelReference(\n instanceCreationParams.databaseInstanceToken,\n instance\n );\n }\n return oldRemove();\n };\n}\n"],"mappings":"AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA,SAAqBA,OAAO,QAAQ,MAAM;AAC1C,SAASC,SAAS,QAAQ,gBAAgB;AAQ1C,SACIC,gBAAgB,QACb,mBAAmB;;AAE1B;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,OAAO,IAAMC,0BAOX,GAAG,IAAIC,GAAG,CAAC,CAAC;AAcd,OAAO,SAASC,4BAA4BA,CACxCC,WAAmB,EACnBC,qBAA6B,EAC7BC,YAAoB,EACpBC,SAAc,EACuC;EACrD,IAAIC,KAAK,GAAGP,0BAA0B,CAACQ,GAAG,CAACJ,qBAAqB,CAAC;EACjE,IAAI,CAACG,KAAK,EAAE;IACRA,KAAK,GAAG;MACJ;AACZ;AACA;AACA;AACA;MACYE,EAAE,EAAE,IAAIV,gBAAgB,CAAC,CAAC,OAAO,EAAEI,WAAW,EAAEE,YAAY,CAAC,CAACK,IAAI,CAAC,GAAG,CAAC,CAAC;MACxEC,IAAI,EAAE,IAAIC,GAAG,CAAM;IACvB,CAAC;IACDZ,0BAA0B,CAACa,GAAG,CAACT,qBAAqB,EAAEG,KAAK,CAAC;EAChE;EACAA,KAAK,CAACI,IAAI,CAACG,GAAG,CAACR,SAAS,CAAC;EACzB,OAAOC,KAAK,CAACE,EAAE;AACnB;AAEA,OAAO,SAASM,+BAA+BA,CAC3CX,qBAA6B,EAC7BE,SAAc,EAChB;EACE,IAAMC,KAAK,GAAGP,0BAA0B,CAACQ,GAAG,CAACJ,qBAAqB,CAAC;EACnE,IAAI,CAACG,KAAK,EAAE;IACR;EACJ;EACAA,KAAK,CAACI,IAAI,CAACK,MAAM,CAACV,SAAS,CAAC;EAC5B,IAAIC,KAAK,CAACI,IAAI,CAACM,IAAI,KAAK,CAAC,EAAE;IACvBjB,0BAA0B,CAACgB,MAAM,CAACZ,qBAAqB,CAAC;IACxD,OAAOG,KAAK,CAACE,EAAE,CAACS,KAAK,CAAC,CAAC;EAC3B;AACJ;AAGA,OAAO,SAASC,gCAAgCA,CAC5ChB,WAAmB,EACnBiB,sBAAuE,EACvEC,QAAgD;AAChD;AACJ;AACA;AACA;AACIC,wBAAgD,EAClD;EACE,IAAI,CAACF,sBAAsB,CAACG,aAAa,EAAE;IACvC;EACJ;EAIA,IAAMC,gBAAgB,GAAGF,wBAAwB,GAC7CA,wBAAwB,GACxBpB,4BAA4B,CACxBC,WAAW,EACXiB,sBAAsB,CAAChB,qBAAqB,EAC5CiB,QAAQ,CAAChB,YAAY,EACrBgB,QACJ,CAAC;EAEL,IAAMI,0BAAyC,GAAG,IAAI5B,OAAO,CAAC,CAAC;EAG/D,IAAM6B,aAAa,GAAIC,GAAwC,IAAK;IAChE,IACIA,GAAG,CAACxB,WAAW,KAAKA,WAAW,IAC/BwB,GAAG,CAACtB,YAAY,KAAKe,sBAAsB,CAACf,YAAY,IACxDsB,GAAG,CAACC,cAAc,KAAKR,sBAAsB,CAACQ,cAAc,IAC5DD,GAAG,CAACE,OAAO,KAAKT,sBAAsB,CAACU,MAAM,CAACD,OAAO,EACvD;MACEJ,0BAA0B,CAACM,IAAI,CAACJ,GAAG,CAACK,SAAS,CAAC;IAClD;EACJ,CAAC;EAEDR,gBAAgB,CAACS,gBAAgB,CAAC,SAAS,EAAEP,aAAa,CAAC;EAE3D,IAAMQ,gBAAgB,GAAGb,QAAQ,CAACc,YAAY,CAAC,CAAC;EAEhD,IAAIC,MAAM,GAAG,KAAK;EAClB,IAAMC,GAAG,GAAGH,gBAAgB,CAACI,SAAS,CAACN,SAAS,IAAI;IAChD,IAAII,MAAM,EAAE;MACR;IACJ;IACAZ,gBAAgB,CAACe,WAAW,CAAC;MACzBpC,WAAW,EAAEA,WAAW;MACxBE,YAAY,EAAEe,sBAAsB,CAACf,YAAY;MACjDuB,cAAc,EAAER,sBAAsB,CAACQ,cAAc;MACrDC,OAAO,EAAET,sBAAsB,CAACU,MAAM,CAACD,OAAO;MAC9CG;IACJ,CAAC,CAAC;EACN,CAAC,CAAC;EAEFX,QAAQ,CAACc,YAAY,GAAG,YAA8B;IAClD,OAAOV,0BAA0B,CAACe,YAAY,CAAC,CAAC,CAACC,IAAI,CACjD3C,SAAS,CAACoC,gBAAgB,CAC9B,CAAC;EACL,CAAC;EAED,IAAMQ,QAAQ,GAAGrB,QAAQ,CAACH,KAAK,CAACyB,IAAI,CAACtB,QAAQ,CAAC;EAC9CA,QAAQ,CAACH,KAAK,GAAG,kBAAkB;IAC/BkB,MAAM,GAAG,IAAI;IACbC,GAAG,CAACO,WAAW,CAAC,CAAC;IACjBpB,gBAAgB,CAACqB,mBAAmB,CAAC,SAAS,EAAEnB,aAAa,CAAC;IAC9D,IAAI,CAACJ,wBAAwB,EAAE;MAC3B,MAAMP,+BAA+B,CACjCK,sBAAsB,CAAChB,qBAAqB,EAC5CiB,QACJ,CAAC;IACL;IACA,OAAOqB,QAAQ,CAAC,CAAC;EACrB,CAAC;EAED,IAAMI,SAAS,GAAGzB,QAAQ,CAAC0B,MAAM,CAACJ,IAAI,CAACtB,QAAQ,CAAC;EAChDA,QAAQ,CAAC0B,MAAM,GAAG,kBAAkB;IAChCX,MAAM,GAAG,IAAI;IACbC,GAAG,CAACO,WAAW,CAAC,CAAC;IACjBpB,gBAAgB,CAACqB,mBAAmB,CAAC,SAAS,EAAEnB,aAAa,CAAC;IAC9D,IAAI,CAACJ,wBAAwB,EAAE;MAC3B,MAAMP,+BAA+B,CACjCK,sBAAsB,CAAChB,qBAAqB,EAC5CiB,QACJ,CAAC;IACL;IACA,OAAOyB,SAAS,CAAC,CAAC;EACtB,CAAC;AACL","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/types/conflict-handling.d.js b/dist/esm/types/conflict-handling.d.js deleted file mode 100644 index c3179137c06..00000000000 --- a/dist/esm/types/conflict-handling.d.js +++ /dev/null @@ -1,3 +0,0 @@ -export {}; -export {}; -//# sourceMappingURL=conflict-handling.d.js.map \ No newline at end of file diff --git a/dist/esm/types/conflict-handling.d.js.map b/dist/esm/types/conflict-handling.d.js.map deleted file mode 100644 index e9584db0a01..00000000000 --- a/dist/esm/types/conflict-handling.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"conflict-handling.d.js","names":[],"sources":["../../../src/types/conflict-handling.d.ts"],"sourcesContent":["import type { WithDeleted } from './rx-storage.d.ts';\n\n/**\n * Notice that the conflict handler input/output\n * does not work on RxDocumentData, but only on WithDeleted.\n * This is because the _meta attributes are meant for the local storing of document data, they not replicated\n * and therefore cannot be used to resolve conflicts.\n */\nexport type RxConflictHandlerInput = {\n assumedMasterState?: WithDeleted;\n realMasterState: WithDeleted;\n newDocumentState: WithDeleted;\n};\n\n/**\n * The conflict handler either returns:\n * - The resolved new document state\n * - A flag to identify the given 'realMasterState' and 'newDocumentState'\n * as being exactly equal, so no conflict has to be resolved.\n */\nexport type RxConflictHandlerOutput = {\n isEqual: false;\n documentData: WithDeleted;\n} | {\n isEqual: true;\n};\n\nexport type RxConflictHandler = (\n i: RxConflictHandlerInput,\n context: string\n) => Promise>;\n\nexport type RxConflictResultionTask = {\n /**\n * Unique id for that single task.\n */\n id: string;\n /**\n * Tasks must have a context\n * which makes it easy to filter/identify them again\n * with plugins or other hacky stuff.\n */\n context: string;\n input: RxConflictHandlerInput;\n};\n\n\nexport type RxConflictResultionTaskSolution = {\n /**\n * Id of the RxConflictResultionTask\n */\n id: string;\n output: RxConflictHandlerOutput;\n};\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/types/couchdb.d.js b/dist/esm/types/couchdb.d.js deleted file mode 100644 index 1c15e5e0a5e..00000000000 --- a/dist/esm/types/couchdb.d.js +++ /dev/null @@ -1,3 +0,0 @@ -export {}; -export {}; -//# sourceMappingURL=couchdb.d.js.map \ No newline at end of file diff --git a/dist/esm/types/couchdb.d.js.map b/dist/esm/types/couchdb.d.js.map deleted file mode 100644 index 6f8ed63d830..00000000000 --- a/dist/esm/types/couchdb.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"couchdb.d.js","names":[],"sources":["../../../src/types/couchdb.d.ts"],"sourcesContent":["import type {\n MangoQuery,\n MangoQuerySelector,\n MangoQuerySortPart\n} from './rx-query.d.ts';\nimport type { BulkWriteRow } from './rx-storage.d.ts';\n\n/**\n * This file contains types that are CouchDB specific\n */\n\nexport interface CouchReplicationOptions {\n live?: boolean;\n retry?: boolean;\n filter?: Function;\n doc_ids?: string[];\n query_params?: any;\n view?: any;\n since?: number | 'now';\n heartbeat?: number;\n timeout?: number;\n batch_size?: number;\n batches_limit?: number;\n back_off_function?: Function;\n checkpoint?: false | 'source' | 'target';\n include_docs?: boolean;\n limit?: number;\n}\n\nexport interface CouchChangesOptionsBase {\n include_docs?: boolean;\n conflicts?: boolean;\n attachments?: boolean;\n binary?: boolean;\n descending?: boolean;\n since?: any;\n limit?: number;\n timeout?: any;\n heartbeat?: number | boolean;\n filter?: any;\n doc_ids?: string | string[];\n query_param?: any;\n view?: any;\n return_docs?: boolean;\n batch_size?: number;\n style?: string;\n}\n\nexport interface CouchChangesOptionsLive extends CouchChangesOptionsBase {\n live: true;\n}\n\nexport interface CouchChangesOptionsNonLive extends CouchChangesOptionsBase {\n live: false;\n}\ninterface CouchChangesOnChangeEvent {\n on: (eventName: string, handler: Function) => void;\n off: (eventName: string, handler: Function) => void;\n cancel(): void;\n}\n\nexport type CouchWriteError = {\n /**\n * status code from couchdb\n * 409 for 'conflict'\n */\n status: number;\n error: true;\n /**\n * primary key value of the errored document\n */\n id: string;\n};\n\n/**\n * possible couch-settings\n * @link https://couchdb.com/api.html#create_database\n */\nexport interface CouchSettings {\n auto_compaction?: boolean;\n revs_limit?: number;\n ajax?: any;\n fetch?: any;\n auth?: any;\n skip_setup?: boolean;\n storage?: any;\n size?: number;\n location?: string;\n iosDatabaseLocation?: string;\n}\n\n/**\n * options for couch.allDocs()\n * @link https://couchdb.com/api.html#batch_fetch\n */\nexport type CouchAllDocsOptions = {\n include_docs?: boolean;\n conflicts?: boolean;\n attachments?: boolean;\n binary?: boolean;\n startkey?: string;\n endkey?: string;\n inclusive_end?: boolean;\n limit?: number;\n skip?: number;\n descending?: boolean;\n key?: string;\n keys?: string[];\n update_seq?: string;\n\n // undocument but needed\n revs?: boolean;\n deleted?: 'ok';\n};\n\nexport type CouchSyncHandlerEvents = 'change' | 'paused' | 'active' | 'error' | 'complete';\nexport type CouchSyncHandler = {\n on(ev: CouchSyncHandlerEvents, fn: (el: any) => void): void;\n off(ev: CouchSyncHandlerEvents, fn: any): void;\n cancel(): void;\n};\n\nexport type CouchChangeRow = {\n id: string;\n seq: number;\n deleted?: true;\n changes: {\n rev: 'string';\n }[];\n /**\n * only if include_docs === true\n */\n doc?: CouchChangeDoc;\n};\n\nexport type CouchAttachmentMeta = {\n digest: string;\n content_type: string;\n length: number;\n stub: boolean;\n\n /**\n * 'revpos indicates the generation number (numeric prefix in the revID) at which the attachment was last altered'\n * @link https://github.com/couchbase/couchbase-lite-ios/issues/1200#issuecomment-206444554\n */\n revpos: number;\n};\n\nexport type CouchAttachmentWithData = CouchAttachmentMeta & {\n /**\n * Base64 string with the data\n * or directly a buffer\n */\n data: Blob;\n type: string;\n /**\n * If set, must be false\n * because we have the full data and not only a stub.\n */\n stub?: false;\n};\n\nexport type CouchChangeDoc = {\n _id: string;\n _rev: string;\n /**\n * True if the document is deleted.\n */\n _deleted?: boolean;\n _attachments: {\n [attachmentId: string]: CouchAttachmentMeta;\n };\n};\n\nexport type WithAttachments = Data & {\n /**\n * Intentional optional,\n * if the document has no attachments,\n * we do NOT have an empty object.\n */\n _attachments?: {\n [attachmentId: string]: CouchAttachmentMeta;\n };\n};\nexport type WithAttachmentsData = Data & {\n /**\n * Intentional optional,\n * if the document has no attachments,\n * we do NOT have an empty object.\n */\n _attachments?: {\n [attachmentId: string]: CouchAttachmentWithData;\n };\n};\n\n\nexport type WithCouchMeta = Data & {\n _rev: string;\n _attachments?: {\n [attachmentId: string]: CouchAttachmentMeta;\n };\n _deleted?: boolean;\n};\n\nexport type CouchdbChangesResult = {\n results: CouchChangeRow[];\n last_seq: number;\n};\n\ndeclare type Debug = {\n enable(what: string): void;\n disable(): void;\n};\n\nexport type CouchDbSorting = (string | string[] | { [k: string]: 'asc' | 'desc' | 1 | -1; })[];\n\n// this is not equal to the standard MangoQuery\n// because of different sorting\nexport type CouchdbQuery = MangoQuery & {\n sort?: CouchDbSorting;\n};\n\nexport type CouchBulkDocResultRow = {\n ok: boolean;\n id: string;\n rev: string;\n\n error?: 'conflict';\n reason?: string;\n};\n\nexport type CouchCheckpoint = {\n sequence: number;\n};\n\nexport type CouchBulkDocOptions = {\n new_edits?: boolean;\n\n // custom options for RxDB\n isDeeper?: boolean;\n custom?: {\n primaryPath: string;\n writeRowById: Map>;\n insertDocsById: Map;\n previousDocsInDb: Map;\n context: string;\n };\n};\n\nexport type CouchMangoQuery = MangoQuery & {\n index: undefined;\n use_index?: string;\n};\n\nexport type ExplainedCouchQuery = {\n dbname: string;\n index: {\n ddoc: string | null;\n name: string; // 'idx-rxdb-index-age,_id'\n type: 'json';\n def: {\n fields: MangoQuerySortPart[];\n };\n };\n selector: MangoQuerySelector;\n range: {\n start_key: any[];\n end_key: any[];\n };\n opts: {\n use_index: string[];\n bookmark: string;\n sort: MangoQuerySortPart[];\n conflicts: boolean;\n r: any[];\n };\n skip: number;\n};\n\nexport type CouchAllDocsResponse = {\n offset: number;\n rows: {\n id: string;\n doc: any;\n key: string;\n value: {\n rev: string;\n deleted?: boolean;\n };\n error?: 'not_found' | string;\n }[];\n total_rows: number;\n};\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/types/index.d.js b/dist/esm/types/index.d.js deleted file mode 100644 index 0f1f33b2de2..00000000000 --- a/dist/esm/types/index.d.js +++ /dev/null @@ -1,2 +0,0 @@ - -//# sourceMappingURL=index.d.js.map \ No newline at end of file diff --git a/dist/esm/types/index.d.js.map b/dist/esm/types/index.d.js.map deleted file mode 100644 index ac00bcdc177..00000000000 --- a/dist/esm/types/index.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.d.js","names":[],"sources":["../../../src/types/index.d.ts"],"sourcesContent":["export type * from './couchdb.d.ts';\nexport type * from './rx-attachment.d.ts';\nexport type * from './rx-collection.d.ts';\nexport type * from './rx-database.d.ts';\nexport type * from './rx-database-internal-store.d.ts';\nexport type * from './rx-document.d.ts';\nexport type * from './rx-error.d.ts';\nexport type * from './rx-plugin.d.ts';\nexport type * from './rx-query.d.ts';\nexport type * from './rx-schema.d.ts';\nexport type * from './rx-storage.d.ts';\nexport type * from './rx-storage.interface.d.ts';\nexport type * from './replication-protocol.d.ts';\nexport type * from './conflict-handling.d.ts';\nexport type * from './rx-change-event.d.ts';\nexport type * from './query-planner.d.ts';\nexport type * from './util.d.ts';\n\n// plugins\nexport type * from './plugins/replication.d.ts';\nexport type * from './plugins/replication-graphql.d.ts';\nexport type * from './plugins/replication.d.ts';\nexport type * from './plugins/local-documents.d.ts';\nexport type * from './plugins/migration.d.ts';\nexport type * from './plugins/backup.d.ts';\nexport type * from './plugins/cleanup.d.ts';\nexport type * from './plugins/lokijs.d.ts';\nexport type * from './plugins/dexie.d.ts';\nexport type * from './plugins/reactivity.d.ts';\nexport type * from './plugins/update.d.ts';\nexport type * from './plugins/crdt.d.ts';\nexport type * from './plugins/state.d.ts';\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/types/modules/index.d.js b/dist/esm/types/modules/index.d.js deleted file mode 100644 index 0f1f33b2de2..00000000000 --- a/dist/esm/types/modules/index.d.js +++ /dev/null @@ -1,2 +0,0 @@ - -//# sourceMappingURL=index.d.js.map \ No newline at end of file diff --git a/dist/esm/types/modules/index.d.js.map b/dist/esm/types/modules/index.d.js.map deleted file mode 100644 index 320b12e6029..00000000000 --- a/dist/esm/types/modules/index.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"index.d.js","names":[],"sources":["../../../../src/types/modules/index.d.ts"],"sourcesContent":[""],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/types/modules/lokijs.d.js b/dist/esm/types/modules/lokijs.d.js deleted file mode 100644 index 33bd5f0813c..00000000000 --- a/dist/esm/types/modules/lokijs.d.js +++ /dev/null @@ -1,2 +0,0 @@ - -//# sourceMappingURL=lokijs.d.js.map \ No newline at end of file diff --git a/dist/esm/types/modules/lokijs.d.js.map b/dist/esm/types/modules/lokijs.d.js.map deleted file mode 100644 index e598510d6fd..00000000000 --- a/dist/esm/types/modules/lokijs.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"lokijs.d.js","names":[],"sources":["../../../../src/types/modules/lokijs.d.ts"],"sourcesContent":["declare module 'lokijs';\ndeclare module 'lokijs/src/loki-fs-structured-adapter.js';\ndeclare module 'lokijs/src/incremental-indexeddb-adapter.js';\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/types/modules/mocha.parallel.d.js b/dist/esm/types/modules/mocha.parallel.d.js deleted file mode 100644 index 458ee742a80..00000000000 --- a/dist/esm/types/modules/mocha.parallel.d.js +++ /dev/null @@ -1,2 +0,0 @@ - -//# sourceMappingURL=mocha.parallel.d.js.map \ No newline at end of file diff --git a/dist/esm/types/modules/mocha.parallel.d.js.map b/dist/esm/types/modules/mocha.parallel.d.js.map deleted file mode 100644 index ea5b59352bf..00000000000 --- a/dist/esm/types/modules/mocha.parallel.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"mocha.parallel.d.js","names":[],"sources":["../../../../src/types/modules/mocha.parallel.d.ts"],"sourcesContent":["declare module 'mocha.parallel';\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/types/plugins/backup.d.js b/dist/esm/types/plugins/backup.d.js deleted file mode 100644 index 6e072730062..00000000000 --- a/dist/esm/types/plugins/backup.d.js +++ /dev/null @@ -1,3 +0,0 @@ -export {}; -export {}; -//# sourceMappingURL=backup.d.js.map \ No newline at end of file diff --git a/dist/esm/types/plugins/backup.d.js.map b/dist/esm/types/plugins/backup.d.js.map deleted file mode 100644 index 5e851955db4..00000000000 --- a/dist/esm/types/plugins/backup.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"backup.d.js","names":[],"sources":["../../../../src/types/plugins/backup.d.ts"],"sourcesContent":["export type BackupOptions = {\n live: boolean;\n directory: string;\n /**\n * If true,\n * attachments will also be saved\n */\n attachments?: boolean;\n /**\n * How many documents can be processed in one batch\n * [default=10]\n */\n batchSize?: number;\n /**\n * If not set, all collections will be backed up.\n */\n collections?: string[];\n};\n\nexport type BackupMetaFileContent = {\n createdAt: number;\n updatedAt: number;\n collectionStates: {\n [collectionName: string]: {\n checkpoint?: any;\n };\n };\n};\n\nexport type RxBackupWriteEvent = {\n collectionName: string;\n documentId: string;\n files: string[];\n deleted: boolean;\n};\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/types/plugins/cleanup.d.js b/dist/esm/types/plugins/cleanup.d.js deleted file mode 100644 index 43c635805b3..00000000000 --- a/dist/esm/types/plugins/cleanup.d.js +++ /dev/null @@ -1,3 +0,0 @@ -export {}; -export {}; -//# sourceMappingURL=cleanup.d.js.map \ No newline at end of file diff --git a/dist/esm/types/plugins/cleanup.d.js.map b/dist/esm/types/plugins/cleanup.d.js.map deleted file mode 100644 index 075c7b55ff3..00000000000 --- a/dist/esm/types/plugins/cleanup.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"cleanup.d.js","names":[],"sources":["../../../../src/types/plugins/cleanup.d.ts"],"sourcesContent":["export type RxCleanupPolicy = {\n /**\n * The minimum time in milliseconds\n * of how long a document must have been deleted\n * until it is purged by the cleanup.\n * This should be higher then the time you expect\n * your user to be offline for.\n * If this is too low, deleted documents might not\n * replicate their deletion state.\n */\n minimumDeletedTime: number;\n /**\n * The minimum amount of that that the RxCollection must have existed.\n * This ensures that at the initial page load, more important\n * tasks are not slowed down because a cleanup process is running.\n */\n minimumCollectionAge: number;\n /**\n * After the initial cleanup is done,\n * a new cleanup is started after [runEach] milliseconds\n */\n runEach: number;\n /**\n * If set to true,\n * RxDB will await all running replications\n * to not have a replication cycle running.\n * This ensures we do not remove deleted documents\n * when they might not have already been replicated.\n */\n awaitReplicationsInSync: boolean;\n /**\n * If true, it will only start the cleanup\n * when the current instance is also the leader.\n * This ensures that when RxDB is used in multiInstance mode,\n * only one instance will start the cleanup.\n */\n waitForLeadership: boolean;\n};\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/types/plugins/crdt.d.js b/dist/esm/types/plugins/crdt.d.js deleted file mode 100644 index a8517ff2ec7..00000000000 --- a/dist/esm/types/plugins/crdt.d.js +++ /dev/null @@ -1,3 +0,0 @@ -export {}; -export {}; -//# sourceMappingURL=crdt.d.js.map \ No newline at end of file diff --git a/dist/esm/types/plugins/crdt.d.js.map b/dist/esm/types/plugins/crdt.d.js.map deleted file mode 100644 index 4a539147445..00000000000 --- a/dist/esm/types/plugins/crdt.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"crdt.d.js","names":[],"sources":["../../../../src/types/plugins/crdt.d.ts"],"sourcesContent":["import type { MangoQuerySelector } from '../rx-query.d.ts';\nimport type { StringKeys } from '../util.d.ts';\nimport type { UpdateQuery } from './update.d.ts';\n\n\nexport type CRDTEntry = {\n selector?: MangoQuerySelector;\n ifMatch?: UpdateQuery;\n ifNotMatch?: UpdateQuery;\n};\n\n/**\n * Options for the crdt plugin.\n * We set these in the schema because changing them\n * is not possible on the fly because it would\n * destroy the document state in an unpredictable way.\n */\nexport type CRDTSchemaOptions = {\n /**\n * Determines which field of the document must be used\n * to store the crdt operations.\n * The given field must exist with the content of \"CRDT_FIELD_SCHEMA\" in the\n * properties part of your schema.\n */\n field: StringKeys | string;\n\n /**\n * After BOTH of the limits\n * maxOperations/maxTTL is reached,\n * the document will clean up the stored operations\n * and merged them together to ensure\n * that not too many operations are stored which could slow down the\n * database operations.\n */\n // TODO not implemented yet, make a pull request if you need that.\n // maxOperations: number;\n // maxTTL: number;\n};\n\n\nexport type CRDTOperation = {\n body: CRDTEntry[];\n /**\n * A string to uniquely represent the creator\n * of this operation.\n * Mostly you would use the RxDatabase().storageToken().\n */\n creator: string;\n\n /**\n * Unix time in milliseconds\n * that determines when the operation was created.\n * Used to properly clean up old operations.\n */\n time: number;\n};\n\n\nexport type CRDTDocumentField = {\n /**\n * An array with arrays of CRDT operations.\n * The index of the top level array is equal\n * to the revision height where the operations\n * belong to.\n * Sorted by revision height ascending.\n * If we have a conflict and we need a rebuild,\n * the operations will be run in the revision height\n * sort order to make everything deterministic.\n */\n operations: CRDTOperation[][];\n\n /**\n * A hash to uniquely define the whole operations state.\n */\n hash: string;\n};\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/types/plugins/dexie.d.js b/dist/esm/types/plugins/dexie.d.js deleted file mode 100644 index 4cfb0d03693..00000000000 --- a/dist/esm/types/plugins/dexie.d.js +++ /dev/null @@ -1,3 +0,0 @@ -export {}; -export {}; -//# sourceMappingURL=dexie.d.js.map \ No newline at end of file diff --git a/dist/esm/types/plugins/dexie.d.js.map b/dist/esm/types/plugins/dexie.d.js.map deleted file mode 100644 index 14bfc0ee657..00000000000 --- a/dist/esm/types/plugins/dexie.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"dexie.d.js","names":[],"sources":["../../../../src/types/plugins/dexie.d.ts"],"sourcesContent":["import type {\n Dexie,\n DexieOptions,\n Table as DexieTable\n} from 'dexie';\n\nexport type DexieSettings = DexieOptions;\n\n/**\n * The internals is a Promise that resolves\n * when the database has fully opened\n * and Dexie.on.ready was called\n * @link https://dexie.org/docs/Dexie/Dexie.on.ready\n *\n */\nexport type DexieStorageInternals = Promise<{\n dexieDb: Dexie;\n /**\n * Contains all normal documents. Deleted ones and non-deleted ones.\n */\n dexieTable: DexieTable;\n // contains the attachments data\n dexieAttachmentsTable: DexieTable;\n\n // these must be transformed because indexeddb does not allow boolean indexing\n booleanIndexes: string[];\n}>;\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/types/plugins/local-documents.d.js b/dist/esm/types/plugins/local-documents.d.js deleted file mode 100644 index b5db4b1183e..00000000000 --- a/dist/esm/types/plugins/local-documents.d.js +++ /dev/null @@ -1,3 +0,0 @@ -export {}; -export {}; -//# sourceMappingURL=local-documents.d.js.map \ No newline at end of file diff --git a/dist/esm/types/plugins/local-documents.d.js.map b/dist/esm/types/plugins/local-documents.d.js.map deleted file mode 100644 index 1dafe553ff7..00000000000 --- a/dist/esm/types/plugins/local-documents.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"local-documents.d.js","names":[],"sources":["../../../../src/types/plugins/local-documents.d.ts"],"sourcesContent":["import type { Observable } from 'rxjs';\nimport type { DocumentCache } from '../../doc-cache.d.ts';\nimport type { IncrementalWriteQueue } from '../../incremental-write.d.ts';\nimport type { RxCollection } from '../rx-collection.d.ts';\nimport type { RxDatabase } from '../rx-database.d.ts';\nimport type { RxDocumentBase } from '../rx-document.d.ts';\nimport type { RxStorageInstance } from '../rx-storage.interface.d.ts';\nimport type { Override } from '../util.d.ts';\n\nexport type LocalDocumentParent = RxDatabase | RxCollection;\nexport type LocalDocumentState = {\n database: RxDatabase;\n parent: LocalDocumentParent;\n storageInstance: RxStorageInstance;\n docCache: DocumentCache;\n incrementalWriteQueue: IncrementalWriteQueue;\n};\nexport type RxLocalDocumentData<\n Data = {\n // local documents are schemaless and contain any data\n [key: string]: any;\n }\n> = {\n id: string;\n data: Data;\n};\n\ndeclare type LocalDocumentModifyFunction = (\n doc: Data,\n rxLocalDocument: RxLocalDocument\n) => Data | Promise;\n\n\nexport declare type RxLocalDocument = Override<\n RxDocumentBase, {}, Reactivity>,\n {\n readonly parent: Parent;\n isLocal(): true;\n\n /**\n * Because local documents store their relevant data inside of the 'data' property,\n * the incremental mutation methods are changed a bit to only allow to change parts of the data property.\n */\n incrementalModify(mutationFunction: LocalDocumentModifyFunction): Promise>;\n incrementalPatch(patch: Partial): Promise>;\n\n $: Observable>;\n }\n>;\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/types/plugins/lokijs.d.js b/dist/esm/types/plugins/lokijs.d.js deleted file mode 100644 index 5d83b72b5b3..00000000000 --- a/dist/esm/types/plugins/lokijs.d.js +++ /dev/null @@ -1,3 +0,0 @@ -export {}; -export {}; -//# sourceMappingURL=lokijs.d.js.map \ No newline at end of file diff --git a/dist/esm/types/plugins/lokijs.d.js.map b/dist/esm/types/plugins/lokijs.d.js.map deleted file mode 100644 index 0279bb5299d..00000000000 --- a/dist/esm/types/plugins/lokijs.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"lokijs.d.js","names":[],"sources":["../../../../src/types/plugins/lokijs.d.ts"],"sourcesContent":["import type { LeaderElector } from 'broadcast-channel';\nimport type { AddReturn } from 'unload';\nimport type { LokiSaveQueue } from '../../plugins/storage-lokijs/loki-save-queue.ts';\n\nexport type LokiDatabaseSettings = any;\n\nexport type LokiCollectionSettings = Partial;\n\nexport type LokiSettings = {\n database?: LokiDatabaseSettings;\n collection?: LokiCollectionSettings;\n};\n\nexport type LokiStorageInternals = {\n leaderElector?: LeaderElector;\n localState?: Promise;\n};\n\nexport type LokiRemoteRequestBroadcastMessage = {\n response: false;\n type: string;\n databaseName: string;\n collectionName: string;\n operation: string;\n params: any[];\n requestId: string;\n};\n\nexport type LokiRemoteResponseBroadcastMessage = {\n response: true;\n type: string;\n databaseName: string;\n collectionName: string;\n requestId: string;\n result: any | any[];\n // if true, the result property will contain an error state\n isError: boolean;\n};\n\nexport type LokiDatabaseState = {\n database: any;\n databaseSettings: LokiDatabaseSettings;\n saveQueue: LokiSaveQueue;\n\n // all known collections of the database\n collections: {\n [collectionName: string]: any;\n };\n\n /**\n * Registered unload handlers\n * so we can remove them on close.\n */\n unloads: AddReturn[];\n};\n\nexport type LokiLocalDatabaseState = {\n databaseState: LokiDatabaseState;\n collection: any;\n};\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/types/plugins/migration.d.js b/dist/esm/types/plugins/migration.d.js deleted file mode 100644 index 6f04103ef00..00000000000 --- a/dist/esm/types/plugins/migration.d.js +++ /dev/null @@ -1,3 +0,0 @@ -export {}; -export {}; -//# sourceMappingURL=migration.d.js.map \ No newline at end of file diff --git a/dist/esm/types/plugins/migration.d.js.map b/dist/esm/types/plugins/migration.d.js.map deleted file mode 100644 index b7e6d0d567d..00000000000 --- a/dist/esm/types/plugins/migration.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"migration.d.js","names":[],"sources":["../../../../src/types/plugins/migration.d.ts"],"sourcesContent":["import type {\n WithAttachments\n} from '../couchdb.d.ts';\nimport type { RxCollection } from '../rx-collection.d.ts';\nimport type { MaybePromise } from '../util.d.ts';\n\nexport type MigrationStrategy = (\n oldDocumentData: WithAttachments,\n collection: RxCollection\n) => MaybePromise | null>;\n\nexport type MigrationStrategies = {\n [toVersion: number]: MigrationStrategy;\n};\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/types/plugins/reactivity.d.js b/dist/esm/types/plugins/reactivity.d.js deleted file mode 100644 index 3c50a37fb7f..00000000000 --- a/dist/esm/types/plugins/reactivity.d.js +++ /dev/null @@ -1,3 +0,0 @@ -export {}; -export {}; -//# sourceMappingURL=reactivity.d.js.map \ No newline at end of file diff --git a/dist/esm/types/plugins/reactivity.d.js.map b/dist/esm/types/plugins/reactivity.d.js.map deleted file mode 100644 index 59ade115252..00000000000 --- a/dist/esm/types/plugins/reactivity.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"reactivity.d.js","names":[],"sources":["../../../../src/types/plugins/reactivity.d.ts"],"sourcesContent":["import type { Observable } from 'rxjs';\nimport type { RxDatabase } from '../rx-database';\n\nexport interface RxReactivityFactory {\n fromObservable(\n obs: Observable,\n initialValue: InitData,\n rxDatabase: RxDatabase\n ): Reactivity; // TODO must use generic data like Reactivity\n}\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/types/plugins/replication-graphql.d.js b/dist/esm/types/plugins/replication-graphql.d.js deleted file mode 100644 index 63bd570ab32..00000000000 --- a/dist/esm/types/plugins/replication-graphql.d.js +++ /dev/null @@ -1,3 +0,0 @@ -export {}; -export {}; -//# sourceMappingURL=replication-graphql.d.js.map \ No newline at end of file diff --git a/dist/esm/types/plugins/replication-graphql.d.js.map b/dist/esm/types/plugins/replication-graphql.d.js.map deleted file mode 100644 index e4cdcd0c707..00000000000 --- a/dist/esm/types/plugins/replication-graphql.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"replication-graphql.d.js","names":[],"sources":["../../../../src/types/plugins/replication-graphql.d.ts"],"sourcesContent":["import { RxReplicationWriteToMasterRow } from '../replication-protocol.ts';\nimport { ById, MaybePromise } from '../util.ts';\nimport {\n ReplicationOptions,\n ReplicationPullHandlerResult,\n ReplicationPullOptions,\n ReplicationPushHandlerResult,\n ReplicationPushOptions\n} from './replication.ts';\n\nexport interface RxGraphQLReplicationQueryBuilderResponseObject {\n query: string;\n operationName?: string;\n variables: any;\n}\n\nexport type RxGraphQLReplicationClientState = {\n headers: ById;\n credentials: RequestCredentials | undefined;\n};\n\nexport type RxGraphQLReplicationQueryBuilderResponse =\n RxGraphQLReplicationQueryBuilderResponseObject |\n Promise;\nexport type RxGraphQLReplicationPushQueryBuilder = (\n // typed 'any' because the data might be modified by the push.modifier.\n rows: RxReplicationWriteToMasterRow[]\n) => RxGraphQLReplicationQueryBuilderResponse;\n\n\nexport type RxGraphQLReplicationPullQueryBuilder = (\n latestPulledCheckpoint: CheckpointType | undefined,\n limit: number\n) => RxGraphQLReplicationQueryBuilderResponse;\nexport type GraphQLSyncPullOptions = Omit<\n ReplicationPullOptions,\n 'handler' | 'stream$'\n> & {\n queryBuilder: RxGraphQLReplicationPullQueryBuilder;\n streamQueryBuilder?: RxGraphQLReplicationPullStreamQueryBuilder;\n dataPath?: string;\n responseModifier?: RxGraphQLPullResponseModifier;\n includeWsHeaders?: boolean;\n};\n\nexport type RxGraphQLPullResponseModifier = (\n // the exact response that was returned from the server\n plainResponse: ReplicationPullHandlerResult | any,\n // either 'handler' if it came from the pull.handler, or 'stream' if it came from the pull.stream\n origin: 'handler' | 'stream',\n requestCheckpoint?: CheckpointType\n) => MaybePromise>;\n\nexport type RxGraphQLPushResponseModifier = (\n // the exact response that was returned from the server\n plainResponse: ReplicationPushHandlerResult | any,\n) => MaybePromise>;\n\nexport type RxGraphQLReplicationPullStreamQueryBuilder = (headers: { [k: string]: string; }) => RxGraphQLReplicationQueryBuilderResponse;\n\nexport type GraphQLSyncPushOptions = Omit<\n ReplicationPushOptions,\n 'handler'\n> & {\n queryBuilder: RxGraphQLReplicationPushQueryBuilder;\n dataPath?: string;\n responseModifier?: RxGraphQLPushResponseModifier;\n};\n\nexport type GraphQLServerUrl = {\n http?: string;\n ws?: string;\n};\n\nexport type SyncOptionsGraphQL = Omit<\n ReplicationOptions,\n 'pull' | 'push'\n> & {\n url: GraphQLServerUrl;\n fetch?: WindowOrWorkerGlobalScope['fetch'];\n headers?: { [k: string]: string; }; // send with all requests to the endpoint\n credentials?: RequestCredentials;\n pull?: GraphQLSyncPullOptions;\n push?: GraphQLSyncPushOptions;\n};\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/types/plugins/replication.d.js b/dist/esm/types/plugins/replication.d.js deleted file mode 100644 index 8fd290b116f..00000000000 --- a/dist/esm/types/plugins/replication.d.js +++ /dev/null @@ -1,3 +0,0 @@ -export {}; -export {}; -//# sourceMappingURL=replication.d.js.map \ No newline at end of file diff --git a/dist/esm/types/plugins/replication.d.js.map b/dist/esm/types/plugins/replication.d.js.map deleted file mode 100644 index ab7122cbc8f..00000000000 --- a/dist/esm/types/plugins/replication.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"replication.d.js","names":[],"sources":["../../../../src/types/plugins/replication.d.ts"],"sourcesContent":["import { Observable } from 'rxjs';\nimport type {\n InternalStoreDocType,\n MaybePromise,\n RxCollection,\n RxDocumentData,\n RxReplicationPullStreamItem,\n RxReplicationWriteToMasterRow,\n WithDeleted\n} from '../../types/index.d.ts';\n\n\nexport type InternalStoreReplicationPushDocType = InternalStoreDocType<{\n checkpoint: any;\n}>;\nexport type InternalStoreReplicationPullDocType = InternalStoreDocType<{\n lastPulledDoc: RxDocumentData;\n}>;\n\nexport type ReplicationPullHandlerResult = {\n checkpoint: CheckpointType | null;\n documents: WithDeleted[];\n};\n\nexport type ReplicationPushHandlerResult = RxDocType[];\n\nexport type ReplicationPullHandler = (\n lastPulledCheckpoint: CheckpointType | undefined,\n batchSize: number\n) => Promise>;\nexport type ReplicationPullOptions = {\n /**\n * A handler that pulls the new remote changes\n * from the remote actor.\n */\n handler: ReplicationPullHandler;\n\n\n /**\n * An observable that streams all document changes\n * that are happening on the backend.\n * Emits an document bulk together with the latest checkpoint of these documents.\n * Also can emit a 'RESYNC' event when the client was offline and is online again.\n *\n * Not required for non-live replication.\n */\n stream$?: Observable>;\n\n /**\n * Amount of documents that the remote will send in one request.\n * If the response contains less then [batchSize] documents,\n * RxDB will assume there are no more changes on the backend\n * that are not replicated.\n * [default=100]\n */\n batchSize?: number;\n\n /**\n * A modifier that runs on all documents that are pulled,\n * before they are used by RxDB.\n * - the ones from the pull handler\n * - the ones from the pull stream\n */\n modifier?: (docData: any) => MaybePromise>;\n\n /**\n * If set, the push replication\n * will start from the given checkpoint.\n */\n initialCheckpoint?: any;\n};\n\n/**\n * Gets the new write rows.\n * Returns the current master state of all conflicting writes,\n * so that they can be resolved on the client.\n */\nexport type ReplicationPushHandler = (\n docs: RxReplicationWriteToMasterRow[]\n) => Promise[]>;\nexport type ReplicationPushOptions = {\n /**\n * A handler that sends the new local changes\n * to the remote actor.\n * On error, all documents are send again at later time.\n */\n handler: ReplicationPushHandler;\n\n\n /**\n * A modifier that runs on all pushed documents before\n * they are send into the push handler.\n */\n modifier?: (docData: WithDeleted) => MaybePromise;\n\n /**\n * How many local changes to process at once.\n */\n batchSize?: number;\n\n /**\n * If set, the push replication\n * will start from the given checkpoint.\n */\n initialCheckpoint?: any;\n};\n\n\nexport type ReplicationOptions = {\n /**\n * An id for the replication to identify it\n * and so that RxDB is able to resume the replication on app reload.\n * If you replicate with a remote server, it is recommended to put the\n * server url into the replicationIdentifier.\n * Like 'my-rest-replication-to-https://example.com/api/sync'\n */\n replicationIdentifier: string;\n collection: RxCollection;\n /**\n * Define a custom property that is used\n * to flag a document as being deleted.\n * @default '_deleted'\n */\n deletedField?: '_deleted' | string;\n pull?: ReplicationPullOptions;\n push?: ReplicationPushOptions;\n /**\n * By default it will do an ongoing realtime replication.\n * By settings live: false the replication will run once until the local state\n * is in sync with the remote state, then it will cancel itself.\n * @default true\n */\n live?: boolean;\n /**\n * Time in milliseconds after when a failed backend request\n * has to be retried.\n * This time will be skipped if a offline->online switch is detected\n * via `navigator.onLine`\n * @default 5000\n */\n retryTime?: number;\n /**\n * When multiInstance is `true`, like when you use RxDB in multiple browser tabs,\n * the replication should always run in only one of the open browser tabs.\n * If waitForLeadership is `true`, it will wait until the current instance is leader.\n * If waitForLeadership is `false`, it will start replicating, even if it is not leader.\n * @default true\n */\n waitForLeadership?: boolean;\n /**\n * If this is set to `false`,\n * the replication will not start automatically\n * but will wait for `replicationState.start()` being called.\n * @default true\n */\n autoStart?: boolean;\n};\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/types/plugins/state.d.js b/dist/esm/types/plugins/state.d.js deleted file mode 100644 index 2f2520deaf5..00000000000 --- a/dist/esm/types/plugins/state.d.js +++ /dev/null @@ -1,3 +0,0 @@ -export {}; -export {}; -//# sourceMappingURL=state.d.js.map \ No newline at end of file diff --git a/dist/esm/types/plugins/state.d.js.map b/dist/esm/types/plugins/state.d.js.map deleted file mode 100644 index 52fb331215b..00000000000 --- a/dist/esm/types/plugins/state.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"state.d.js","names":[],"sources":["../../../../src/types/plugins/state.d.ts"],"sourcesContent":["import type { RxStateBase } from '../../plugins/state/rx-state';\nimport type { ExtendObservables, ExtendReactivity } from '../rx-document';\n\nexport type RxState = RxStateBase & T & ExtendObservables> & ExtendReactivity, Reactivity>;\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/types/plugins/update.d.js b/dist/esm/types/plugins/update.d.js deleted file mode 100644 index 644ad9b5cfb..00000000000 --- a/dist/esm/types/plugins/update.d.js +++ /dev/null @@ -1,3 +0,0 @@ -export {}; -export {}; -//# sourceMappingURL=update.d.js.map \ No newline at end of file diff --git a/dist/esm/types/plugins/update.d.js.map b/dist/esm/types/plugins/update.d.js.map deleted file mode 100644 index 3551135b2cc..00000000000 --- a/dist/esm/types/plugins/update.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"update.d.js","names":[],"sources":["../../../../src/types/plugins/update.d.ts"],"sourcesContent":["import type { AnyKeys, AnyObject } from '../util.d.ts';\n\n// import type {\n// UpdateExpression\n// } from 'mingo/updater';\n\n/**\n * We use an own type here, copied from mongoose\n * @link https://github.com/Automattic/mongoose/blob/eb292d2c4cc98ee315f118d6199a83938f06d901/types/index.d.ts#L466\n * TODO when mingo implements a schema-based type for UpdateExpression, we can use that one.\n */\nexport type UpdateQuery = {\n $min?: AnyKeys & AnyObject;\n $max?: AnyKeys & AnyObject;\n $inc?: AnyKeys & AnyObject;\n $set?: AnyKeys & AnyObject;\n $unset?: AnyKeys & AnyObject;\n $push?: AnyKeys & AnyObject;\n $addToSet?: AnyKeys & AnyObject;\n $pop?: AnyKeys & AnyObject;\n $pullAll?: AnyKeys & AnyObject;\n $rename?: Record;\n};\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/types/query-planner.d.js b/dist/esm/types/query-planner.d.js deleted file mode 100644 index 5e45db52fa5..00000000000 --- a/dist/esm/types/query-planner.d.js +++ /dev/null @@ -1,3 +0,0 @@ -export {}; -export {}; -//# sourceMappingURL=query-planner.d.js.map \ No newline at end of file diff --git a/dist/esm/types/query-planner.d.js.map b/dist/esm/types/query-planner.d.js.map deleted file mode 100644 index 7dd18f0ecfe..00000000000 --- a/dist/esm/types/query-planner.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"query-planner.d.js","names":[],"sources":["../../../src/types/query-planner.d.ts"],"sourcesContent":["export type RxQueryPlanKey = string | number | undefined;\n\nexport type RxQueryPlanerOpts = {\n startKey: RxQueryPlanKey;\n endKey: RxQueryPlanKey;\n /**\n * True if the first matching document\n * must also be included into the result set.\n */\n inclusiveStart: boolean;\n /**\n * True if the last matching document\n * must also be included into the result set.\n */\n inclusiveEnd: boolean;\n};\n\nexport type RxQueryPlan = {\n index: string[];\n /**\n * If the index does not match the sort params,\n * we have to resort the query results manually\n * after fetching them from the index.\n */\n sortSatisfiedByIndex: boolean;\n\n /**\n * If the whole selector matching is satisfied\n * by the index, we do not have to run a does-document-data-match-query\n * stuff.\n */\n selectorSatisfiedByIndex: boolean;\n\n /**\n * TODO add a flag that determines\n * if we have to run the selector matching on all results\n * or if the used index anyway matches ALL operators.\n */\n\n startKeys: RxQueryPlanKey[];\n endKeys: RxQueryPlanKey[];\n /**\n * True if the first matching document\n * must also be included into the result set.\n */\n inclusiveStart: boolean;\n /**\n * True if the last matching document\n * must also be included into the result set.\n */\n inclusiveEnd: boolean;\n\n};\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/types/replication-protocol.d.js b/dist/esm/types/replication-protocol.d.js deleted file mode 100644 index 15c8d746f47..00000000000 --- a/dist/esm/types/replication-protocol.d.js +++ /dev/null @@ -1,3 +0,0 @@ -export {}; -export {}; -//# sourceMappingURL=replication-protocol.d.js.map \ No newline at end of file diff --git a/dist/esm/types/replication-protocol.d.js.map b/dist/esm/types/replication-protocol.d.js.map deleted file mode 100644 index 431820ecb37..00000000000 --- a/dist/esm/types/replication-protocol.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"replication-protocol.d.js","names":[],"sources":["../../../src/types/replication-protocol.d.ts"],"sourcesContent":["import { BehaviorSubject, Observable, Subject } from 'rxjs';\nimport type {\n RxConflictHandler,\n RxConflictHandlerInput,\n RxConflictHandlerOutput\n} from './conflict-handling.d.ts';\nimport type { RxError, RxTypeError } from './rx-error.d.ts';\nimport type {\n BulkWriteRow,\n RxDocumentData,\n WithDeleted,\n WithDeletedAndAttachments\n} from './rx-storage.d.ts';\nimport type {\n RxStorageInstance\n} from './rx-storage.interface.d.ts';\nimport type { HashFunction } from './util.d.ts';\n\nexport type RxStorageReplicationMeta = {\n\n /**\n * Combined primary key consisting\n * of: [replicationId, itemId, isCheckpoint]\n * so that the same RxStorageInstance\n * can be used for multiple replication states.\n */\n id: string;\n\n /**\n * Either the document primaryKey\n * or the id of the replication checkpoint.\n */\n itemId: string;\n\n /**\n * True if the doc data is about a checkpoint,\n * False if it is about a document state from the master.\n * Stored as a string so it can be used\n * in the combined primary key 'id'\n */\n isCheckpoint: '0' | '1';\n checkpointData?: CheckpointType;\n\n /**\n * the document state of the master\n * only set if not checkpoint.\n */\n docData?: RxDocType | RxDocumentData | any;\n /**\n * If the current assumed master was written while\n * resolving a conflict, this field contains\n * the revision of the conflict-solution that\n * is stored in the forkInstance.\n */\n isResolvedConflict?: string;\n};\n\nexport type RxReplicationWriteToMasterRow = {\n assumedMasterState?: WithDeletedAndAttachments;\n newDocumentState: WithDeletedAndAttachments;\n};\n\n\nexport type DocumentsWithCheckpoint = {\n documents: WithDeletedAndAttachments[];\n checkpoint: CheckpointType;\n};\n\n\nexport type RxReplicationPullStreamItem = DocumentsWithCheckpoint |\n /**\n * Emit this when the masterChangeStream$ might have missed out\n * some events because the fork lost the connection to the master.\n * Like when the user went offline and reconnects.\n */\n 'RESYNC';\n\n/**\n * The replication handler contains all logic\n * that is required by the replication protocol\n * to interact with the master instance.\n * This is an abstraction so that we can use different\n * handlers for GraphQL, REST or any other transportation layer.\n * Even a RxStorageInstance can be wrapped in a way to represent a replication handler.\n *\n * The RxStorage instance of the master branch that is\n * replicated with the fork branch.\n * The replication algorithm is made to make\n * as less writes on the master as possible.\n * The master instance is always 'the truth' which\n * does never contain conflicting document states.\n * All conflicts are handled on the fork branch\n * before being replicated to the master.\n */\nexport type RxReplicationHandler = {\n masterChangeStream$: Observable>;\n masterChangesSince(\n checkpoint: MasterCheckpointType,\n batchSize: number\n ): Promise>;\n /**\n * Writes the fork changes to the master.\n * Only returns the conflicts if there are any.\n * (otherwise returns an empty array.)\n */\n masterWrite(\n rows: RxReplicationWriteToMasterRow[]\n ): Promise[]>;\n};\n\nexport type RxStorageInstanceReplicationInput = {\n /**\n * A string that uniquely identifies\n * the replication.\n * Ensures that checkpoint are not\n * mixed with other replications.\n */\n identifier: string;\n pullBatchSize: number;\n pushBatchSize: number;\n replicationHandler: RxReplicationHandler;\n conflictHandler: RxConflictHandler;\n\n // can be set to also replicate the _meta field of the document.\n keepMeta?: boolean;\n\n /**\n * The fork is the one that contains the forked chain of document writes.\n * All conflicts are solved on the fork and only resolved correct document data\n * is written back to the parent.\n */\n forkInstance: RxStorageInstance;\n\n /**\n * The replication needs to store some meta data\n * for documents to know which state is at the master\n * and how/if it diverges from the fork.\n * In the past this was stored in the _meta field of\n * the forkInstance documents but that was not a good design decision\n * because it required additional writes on the forkInstance\n * to know which documents have been upstream replicated\n * to not cause conflicts.\n * Using the metaInstance instead leads to better overall performance\n * because RxDB will not re-emit query results or document state\n * when replication meta data is written.\n *\n * In addition to per-document meta data,\n * the replication checkpoints are also stored in this instance.\n *\n */\n metaInstance: RxStorageInstance, any, any>;\n\n /**\n * When a write happens to the fork,\n * normally the replication will directly try to persist.\n *\n * For many use cases, it is better to await the next event loop tick\n * or to wait until the RxDatabase is idle or requestIdleCallback() calls\n * to ensure the CPU is idle.\n * This can improve performance because the persistence will not affect UI\n * renders.\n *\n * But: The longer you wait here, the higher is the risk of losing fork\n * writes when the replication is destroyed unexpected.\n */\n waitBeforePersist?: () => Promise;\n\n hashFunction: HashFunction;\n\n initialCheckpoint?: {\n upstream?: any;\n downstream?: any;\n };\n};\n\nexport type RxStorageInstanceReplicationState = {\n // store the primaryPath here for better reuse and performance.\n primaryPath: string;\n hasAttachments: boolean;\n input: RxStorageInstanceReplicationInput;\n\n events: {\n /**\n * Streams all document writes that have successfully\n * been written in one direction.\n */\n processed: {\n up: Subject>;\n down: Subject>;\n };\n resolvedConflicts: Subject<{\n input: RxConflictHandlerInput;\n output: RxConflictHandlerOutput;\n }>;\n /**\n * Contains the cancel state.\n * Emit true here to cancel the replication.\n */\n canceled: BehaviorSubject;\n /**\n * Contains true if the replication is doing something\n * at this point in time.\n * If this is false, it means that the replication\n * is idle AND in sync.\n */\n active: {\n [direction in RxStorageReplicationDirection]: BehaviorSubject;\n };\n /**\n * All errors that would otherwise be unhandled,\n * get emitted here.\n */\n error: Subject;\n };\n\n\n /**\n * Contains counters that can be used in tests\n * or to debug problems.\n */\n stats: {\n down: {\n addNewTask: number;\n downstreamResyncOnce: number;\n downstreamProcessChanges: number;\n masterChangeStreamEmit: number;\n persistFromMaster: number;\n };\n up: {\n upstreamInitialSync: number;\n forkChangeStreamEmit: number;\n processTasks: number;\n persistToMaster: number;\n persistToMasterHadConflicts: number;\n persistToMasterConflictWrites: number;\n };\n };\n\n /**\n * Used in checkpoints and ._meta fields\n * to ensure we do not mix up meta data of\n * different replications.\n * We have to use the promise because the key is hashed which runs async.\n */\n checkpointKey: Promise;\n\n /**\n * Storage.bulkWrites() that are initialized from the\n * downstream, get this flag as context-param\n * so that the emitted event bulk can be identified\n * to be sourced from the downstream and it will not try\n * to upstream these documents again.\n */\n downstreamBulkWriteFlag: Promise;\n\n /**\n * Tracks if the streams have been in sync\n * for at least one time.\n */\n firstSyncDone: {\n [direction in RxStorageReplicationDirection]: BehaviorSubject;\n };\n\n /**\n * Can be used to detect if the replication is doing something\n * or if it is in an idle state.\n */\n streamQueue: {\n [direction in RxStorageReplicationDirection]: Promise;\n };\n\n checkpointQueue: Promise;\n\n /**\n * For better performance we store the last known checkpoint\n * document so that we can likely do checkpoint storing without\n * conflicts.\n */\n lastCheckpointDoc: {\n [direction in RxStorageReplicationDirection]?: RxDocumentData>;\n };\n};\n\nexport type RxStorageReplicationDirection = 'up' | 'down';\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/types/rx-attachment.d.js b/dist/esm/types/rx-attachment.d.js deleted file mode 100644 index 3e8ce690c72..00000000000 --- a/dist/esm/types/rx-attachment.d.js +++ /dev/null @@ -1,3 +0,0 @@ -export {}; -export {}; -//# sourceMappingURL=rx-attachment.d.js.map \ No newline at end of file diff --git a/dist/esm/types/rx-attachment.d.js.map b/dist/esm/types/rx-attachment.d.js.map deleted file mode 100644 index a1e235e40c0..00000000000 --- a/dist/esm/types/rx-attachment.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-attachment.d.js","names":[],"sources":["../../../src/types/rx-attachment.d.ts"],"sourcesContent":["import type {\n RxDocument\n} from './rx-document.d.ts';\n\ndeclare type Buffer = any;\n\nexport type RxAttachmentCreator = {\n id: string;\n /**\n * Content type like 'plain/text'\n */\n type: string;\n /**\n * The data of the attachment.\n */\n data: Blob;\n};\n\nexport declare class RxAttachment {\n readonly doc: RxDocument;\n readonly id: string;\n readonly type: string;\n readonly length: number;\n readonly digest: string;\n readonly rev: string;\n\n remove(): Promise;\n getData(): Promise;\n getStringData(): Promise;\n}\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/types/rx-change-event.d.js b/dist/esm/types/rx-change-event.d.js deleted file mode 100644 index ecfe821a68c..00000000000 --- a/dist/esm/types/rx-change-event.d.js +++ /dev/null @@ -1,3 +0,0 @@ -export {}; -export {}; -//# sourceMappingURL=rx-change-event.d.js.map \ No newline at end of file diff --git a/dist/esm/types/rx-change-event.d.js.map b/dist/esm/types/rx-change-event.d.js.map deleted file mode 100644 index 43dfd1a2e63..00000000000 --- a/dist/esm/types/rx-change-event.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-change-event.d.js","names":[],"sources":["../../../src/types/rx-change-event.d.ts"],"sourcesContent":["import type {\n EventBulk,\n RxDocumentData\n} from './rx-storage.d.ts';\n\n\nexport type RxChangeEventBase = {\n operation: 'INSERT' | 'UPDATE' | 'DELETE';\n\n readonly documentId: string;\n\n // optional, does not exist on changes to localdocs of the database\n readonly collectionName?: string;\n\n // true if the event is about a local document, false if not.\n readonly isLocal: boolean;\n\n documentData: RxDocumentData;\n};\n\nexport type RxChangeEventInsert = RxChangeEventBase & {\n operation: 'INSERT';\n previousDocumentData: undefined;\n};\n\nexport type RxChangeEventUpdate = RxChangeEventBase & {\n operation: 'UPDATE';\n previousDocumentData: RxDocumentData;\n};\n\nexport type RxChangeEventDelete = RxChangeEventBase & {\n operation: 'DELETE';\n previousDocumentData: RxDocumentData;\n};\n\nexport type RxChangeEvent = RxChangeEventInsert | RxChangeEventUpdate | RxChangeEventDelete;\n\n/**\n * Internally, all events are processed via bulks\n * to save performance when sending them over a transport layer\n * or de-duplicating them.\n */\nexport type RxChangeEventBulk = EventBulk, any> & {\n // optional, not given for changes to local documents of a RxDatabase.\n collectionName?: string;\n /**\n * Token of the database instance that created the events.\n * Used to determine if the events came from another instance over the BroadcastChannel.\n */\n databaseToken: string;\n /**\n * The storageToken of the RxDatabase that created the events.\n * Used to ensure we do not process events of other RxDatabases.\n */\n storageToken: string;\n /**\n * If true, the events belong to some internal stuff like from plugins.\n * Internal events are not emitted to the outside over the .$ Observables.\n */\n internal: boolean;\n};\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/types/rx-collection.d.js b/dist/esm/types/rx-collection.d.js deleted file mode 100644 index d5b4bf986dd..00000000000 --- a/dist/esm/types/rx-collection.d.js +++ /dev/null @@ -1,3 +0,0 @@ -export {}; -export {}; -//# sourceMappingURL=rx-collection.d.js.map \ No newline at end of file diff --git a/dist/esm/types/rx-collection.d.js.map b/dist/esm/types/rx-collection.d.js.map deleted file mode 100644 index 87c790d0de9..00000000000 --- a/dist/esm/types/rx-collection.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-collection.d.js","names":[],"sources":["../../../src/types/rx-collection.d.ts"],"sourcesContent":["import type {\n RxJsonSchema,\n RxDocument,\n MigrationStrategies,\n RxConflictHandler\n} from './index.d.ts';\nimport type {\n RxCollectionBase\n} from '../rx-collection.d.ts';\nimport type { QueryCache } from '../query-cache.d.ts';\nimport type { RxLocalDocumentMutation } from './rx-database.d.ts';\n\nexport interface KeyFunctionMap {\n [key: string]: Function;\n}\nexport interface NumberFunctionMap {\n [key: number]: Function;\n}\n\n\n/**\n * Params to create a new collection.\n * Notice the name of the collection is set one level higher\n * when calling addCollections()\n */\nexport type RxCollectionCreator = {\n schema: RxJsonSchema;\n instanceCreationOptions?: any;\n migrationStrategies?: MigrationStrategies;\n autoMigrate?: boolean;\n statics?: KeyFunctionMap;\n methods?: KeyFunctionMap;\n attachments?: KeyFunctionMap;\n options?: any;\n /**\n * Set this to true if you want to store local documents\n * in the RxCollection instance.\n */\n localDocuments?: boolean;\n cacheReplacementPolicy?: RxCacheReplacementPolicy;\n\n /**\n * Depending on which plugins or storage is used,\n * the RxCollection might need a way to resolve conflicts\n * which is done by this conflict handler.\n * If no conflict handler is provided, a master-always-wins handler\n * will be used as default\n */\n conflictHandler?: RxConflictHandler;\n};\n\nexport type RxCacheReplacementPolicy = (collection: RxCollection, queryCache: QueryCache) => void;\n\nexport type RxCollectionHookCallback<\n RxDocumentType,\n OrmMethods,\n Reactivity\n> = (\n data: RxDocumentType,\n instance: RxDocument\n) => void | Promise | any;\nexport type RxCollectionHookNoInstance = (data: RxDocumentType) => void | Promise | any;\nexport type RxCollectionHookCallbackNonAsync = (\n data: RxDocumentType,\n instance: RxDocument\n) => void | any;\nexport type RxCollectionHookNoInstanceCallback<\n RxDocumentType,\n OrmMethods,\n Reactivity\n> = (\n data: RxDocumentType,\n instance: RxCollection\n) => Promise | void | any;\n\nexport type RxCollection<\n RxDocumentType = any,\n OrmMethods = {},\n StaticMethods = {},\n InstanceCreationOptions = {},\n Reactivity = unknown\n> = StaticMethods &\n RxCollectionBase &\n RxCollectionGenerated;\n\nexport interface RxCollectionGenerated extends RxLocalDocumentMutation> {\n\n // HOOKS\n preInsert(fun: RxCollectionHookNoInstanceCallback, parallel: boolean): void;\n preSave(fun: RxCollectionHookCallback, parallel: boolean): void;\n preRemove(fun: RxCollectionHookCallback, parallel: boolean): void;\n postInsert(fun: RxCollectionHookCallback, parallel: boolean): void;\n postSave(fun: RxCollectionHookCallback, parallel: boolean): void;\n postRemove(fun: RxCollectionHookCallback, parallel: boolean): void;\n postCreate(fun: RxCollectionHookCallbackNonAsync): void;\n\n // only inMemory-collections\n awaitPersistence(): Promise;\n}\n\n/**\n * Properties are possibly encrypted so type them as any. TODO this is no longer needed.\n */\nexport type RxDumpCollectionAsAny = { [P in keyof T]: any };\n\ninterface RxDumpCollectionBase {\n name: string;\n passwordHash?: string;\n schemaHash: string;\n}\nexport interface RxDumpCollection extends RxDumpCollectionBase {\n docs: RxDocumentType[];\n}\n/**\n * All base properties are typed as any because they can be encrypted.\n */\nexport interface RxDumpCollectionAny extends RxDumpCollectionBase {\n docs: RxDumpCollectionAsAny[];\n}\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/types/rx-database-internal-store.d.js b/dist/esm/types/rx-database-internal-store.d.js deleted file mode 100644 index 5dd8a67161d..00000000000 --- a/dist/esm/types/rx-database-internal-store.d.js +++ /dev/null @@ -1,3 +0,0 @@ -export {}; -export {}; -//# sourceMappingURL=rx-database-internal-store.d.js.map \ No newline at end of file diff --git a/dist/esm/types/rx-database-internal-store.d.js.map b/dist/esm/types/rx-database-internal-store.d.js.map deleted file mode 100644 index e78441ef795..00000000000 --- a/dist/esm/types/rx-database-internal-store.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-database-internal-store.d.js","names":[],"sources":["../../../src/types/rx-database-internal-store.d.ts"],"sourcesContent":["import type {\n RxMigrationStatus\n} from '../plugins/migration-schema/index.ts';\nimport type { RxJsonSchema } from './rx-schema.d.ts';\n\nexport type InternalStoreDocType = {\n id: string;\n key: string;\n context: string;\n data: Data;\n};\n\n/**\n * Stores information about the collections.\n * The collection.name is the 'key' value.\n */\nexport type InternalStoreStorageTokenDocType = InternalStoreDocType<{\n rxdbVersion: string;\n token: string;\n instanceToken: string;\n passwordHash?: string;\n}>;\n\n/**\n * Stores information about the collections.\n * The collection.name is the 'key' value.\n */\nexport type InternalStoreCollectionDocType = InternalStoreDocType<{\n /**\n * Plain name of the collection\n */\n name: string;\n schema: RxJsonSchema;\n schemaHash: string;\n version: number;\n\n /**\n * Storages that are connected to this collection\n * so that when the collection is removed,\n * these storages must also be removed.\n * For example the replication meta storage\n * must be reset when the collection is removed.\n */\n connectedStorages: {\n collectionName: string;\n schema: RxJsonSchema;\n }[];\n\n /**\n * Contains the migration status\n * only if a migration has been started.\n */\n migrationStatus?: RxMigrationStatus;\n}>;\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/types/rx-database.d.js b/dist/esm/types/rx-database.d.js deleted file mode 100644 index a66189b52df..00000000000 --- a/dist/esm/types/rx-database.d.js +++ /dev/null @@ -1,3 +0,0 @@ -export {}; -export {}; -//# sourceMappingURL=rx-database.d.js.map \ No newline at end of file diff --git a/dist/esm/types/rx-database.d.js.map b/dist/esm/types/rx-database.d.js.map deleted file mode 100644 index 75c0ffa8179..00000000000 --- a/dist/esm/types/rx-database.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-database.d.js","names":[],"sources":["../../../src/types/rx-database.d.ts"],"sourcesContent":["import type {\n RxCollection,\n RxDumpCollection,\n RxDumpCollectionAsAny\n} from './rx-collection.d.ts';\nimport type {\n RxDatabaseBase\n} from '../rx-database.d.ts';\nimport { Observable } from 'rxjs';\nimport type { RxStorage } from './rx-storage.interface.d.ts';\nimport type { RxLocalDocument } from './plugins/local-documents.d.ts';\nimport type { RxCleanupPolicy } from './plugins/cleanup.d.ts';\nimport type { ById, HashFunction } from './util.d.ts';\nimport type { RxReactivityFactory } from './plugins/reactivity.d.ts';\n\nexport interface RxDatabaseCreator {\n storage: RxStorage;\n instanceCreationOptions?: InstanceCreationOptions;\n name: string;\n password?: string | any;\n multiInstance?: boolean;\n eventReduce?: boolean;\n ignoreDuplicate?: boolean;\n options?: any;\n cleanupPolicy?: Partial;\n /**\n * Set this to true if you want to store local documents\n * in the RxDatabase instance.\n */\n localDocuments?: boolean;\n\n /**\n * Hash method used to hash strings and json-stringified objects.\n * This hash does not have to be cryptographically secure,\n * but it is very important that is does have not create\n * collisions.\n * Default is the sha256 from the ohash library\n * @link https://www.npmjs.com/package/ohash\n */\n hashFunction?: HashFunction;\n\n /**\n * By default, count() queries in 'slow' mode are not allowed.\n */\n allowSlowCount?: boolean;\n\n /**\n * Can be used to add a custom reactivity Factory\n * that is used on all getters and values that end with the double $$.\n * For example you can use the signals api of your framework and vuejs ref()\n */\n reactivity?: RxReactivityFactory;\n}\n\nexport type CollectionsOfDatabase = ById;\nexport type RxDatabase<\n Collections = CollectionsOfDatabase,\n Internals = any,\n InstanceCreationOptions = any,\n Reactivity = any\n> = RxDatabaseBase<\n Internals,\n InstanceCreationOptions,\n Collections,\n Reactivity\n> & Collections & RxDatabaseGenerated;\n\n\nexport interface RxLocalDocumentMutation {\n insertLocal(id: string, data: LocalDocType): Promise<\n RxLocalDocument\n >;\n upsertLocal(id: string, data: LocalDocType): Promise<\n RxLocalDocument\n >;\n getLocal(id: string): Promise<\n RxLocalDocument | null\n >;\n getLocal$(id: string): Observable<\n RxLocalDocument | null\n >;\n}\n\nexport interface RxDatabaseGenerated extends RxLocalDocumentMutation> { }\n\n/**\n * Extract the **DocumentType** of a collection.\n */\ntype ExtractDTcol

= P extends RxCollection ? T : { [prop: string]: any; };\n\ninterface RxDumpDatabaseBase {\n instanceToken: string;\n name: string;\n passwordHash: string | null;\n}\nexport interface RxDumpDatabase extends RxDumpDatabaseBase {\n collections: RxDumpCollection>[];\n}\n/**\n * All base properties are typed as any because they can be encrypted.\n */\nexport interface RxDumpDatabaseAny extends RxDumpDatabaseBase {\n collections: RxDumpCollection>>[];\n}\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/types/rx-document.d.js b/dist/esm/types/rx-document.d.js deleted file mode 100644 index 614c0e3902c..00000000000 --- a/dist/esm/types/rx-document.d.js +++ /dev/null @@ -1,3 +0,0 @@ -export {}; -export {}; -//# sourceMappingURL=rx-document.d.js.map \ No newline at end of file diff --git a/dist/esm/types/rx-document.d.js.map b/dist/esm/types/rx-document.d.js.map deleted file mode 100644 index eb6baf2addd..00000000000 --- a/dist/esm/types/rx-document.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-document.d.js","names":[],"sources":["../../../src/types/rx-document.d.ts"],"sourcesContent":["import {\n Observable\n} from 'rxjs';\n\nimport type {\n RxCollection,\n} from './rx-collection.d.ts';\nimport type {\n RxAttachment,\n RxAttachmentCreator\n} from './rx-attachment.d.ts';\nimport type { RxDocumentData, WithDeleted } from './rx-storage.d.ts';\nimport type { RxChangeEvent } from './rx-change-event.d.ts';\nimport type { DeepReadonly, MaybePromise, PlainJsonValue } from './util.d.ts';\nimport type { UpdateQuery } from './plugins/update.d.ts';\nimport type { CRDTEntry } from './plugins/crdt.d.ts';\n\n\n\nexport type RxDocument = RxDocumentBase<\n RxDocumentType,\n OrmMethods,\n Reactivity\n> & RxDocumentType & OrmMethods & ExtendObservables & ExtendReactivity;\n\n\n/**\n * Extend the base properties by the property$ fields\n * so it knows that RxDocument.age also has RxDocument.age$ which is\n * an observable.\n * TODO how to do this for the nested fields?\n */\ntype ExtendObservables = {\n [P in keyof RxDocumentType as `${string & P}$`]: Observable;\n};\n\ntype ExtendReactivity = {\n [P in keyof RxDocumentType as `${string & P}$$`]: Reactivity;\n};\n\n/**\n * The public facing modify update function.\n * It only gets the document parts as input, that\n * are mutateable by the user.\n */\nexport type ModifyFunction = (\n doc: WithDeleted\n) => MaybePromise> | MaybePromise;\n\n/**\n * Meta data that is attached to each document by RxDB.\n */\nexport type RxDocumentMeta = {\n /**\n * Last write time.\n * Unix epoch in milliseconds.\n */\n lwt: number;\n\n /**\n * Any other value can be attached to the _meta data.\n * Mostly done by plugins to mark documents.\n */\n [k: string]: PlainJsonValue;\n};\n\nexport declare interface RxDocumentBase {\n isInstanceOfRxDocument: true;\n collection: RxCollection;\n readonly deleted: boolean;\n\n readonly $: Observable>;\n readonly $$: Reactivity;\n readonly deleted$: Observable;\n readonly deleted$$: Reactivity;\n\n readonly primary: string;\n readonly allAttachments$: Observable[]>;\n\n // internal things\n _data: RxDocumentData;\n primaryPath: string;\n revision: string;\n /**\n * Used to de-duplicate the enriched property objects\n * of the document.\n */\n _propertyCache: Map;\n $emit(cE: RxChangeEvent): void;\n _saveData(newData: any, oldData: any): Promise>;\n // /internal things\n\n // Returns the latest state of the document\n getLatest(): RxDocument;\n\n\n get$(path: string): Observable;\n get$$(path: string): Reactivity;\n get(objPath: string): DeepReadonly;\n populate(objPath: string): Promise | any | null>;\n\n /**\n * mutate the document with a function\n */\n modify(mutationFunction: ModifyFunction, context?: string): Promise>;\n incrementalModify(mutationFunction: ModifyFunction, context?: string): Promise>;\n\n /**\n * patches the given properties\n */\n patch(patch: Partial): Promise>;\n incrementalPatch(patch: Partial): Promise>;\n\n update(updateObj: UpdateQuery): Promise>;\n incrementalUpdate(updateObj: UpdateQuery): Promise>;\n\n updateCRDT(updateObj: CRDTEntry | CRDTEntry[]): Promise>;\n\n remove(): Promise>;\n incrementalRemove(): Promise>;\n\n // only for temporary documents\n set(objPath: string, value: any): RxDocument;\n save(): Promise;\n\n // attachments\n putAttachment(\n creator: RxAttachmentCreator\n ): Promise>;\n getAttachment(id: string): RxAttachment | null;\n allAttachments(): RxAttachment[];\n\n toJSON(withRevAndAttachments: true): DeepReadonly>;\n toJSON(withRevAndAttachments?: false): DeepReadonly;\n\n toMutableJSON(withRevAndAttachments: true): RxDocumentData;\n toMutableJSON(withRevAndAttachments?: false): RxDocType;\n\n destroy(): void;\n}\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/types/rx-error.d.js b/dist/esm/types/rx-error.d.js deleted file mode 100644 index d8398b6148a..00000000000 --- a/dist/esm/types/rx-error.d.js +++ /dev/null @@ -1,3 +0,0 @@ -export {}; -export {}; -//# sourceMappingURL=rx-error.d.js.map \ No newline at end of file diff --git a/dist/esm/types/rx-error.d.js.map b/dist/esm/types/rx-error.d.js.map deleted file mode 100644 index e3af896cace..00000000000 --- a/dist/esm/types/rx-error.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-error.d.js","names":[],"sources":["../../../src/types/rx-error.d.ts"],"sourcesContent":["import type { RxJsonSchema } from './rx-schema.d.ts';\nimport {\n RxSchema\n} from '../rx-schema.ts';\nimport type { RxPlugin } from './rx-plugin.d.ts';\nimport { ERROR_MESSAGES } from '../plugins/dev-mode/error-messages.ts';\nimport type { RxReplicationWriteToMasterRow } from './replication-protocol.d.ts';\nimport type { BulkWriteRow, RxDocumentData } from './rx-storage.d.ts';\n\ntype KeyOf = Extract;\nexport type RxErrorKey = KeyOf;\n\nexport type {\n RxError,\n RxTypeError\n} from '../rx-error.ts';\n\n/**\n * this lists all possible parameters\n */\nexport interface RxErrorParameters {\n readonly error?: PlainJsonError;\n readonly errors?: PlainJsonError[];\n readonly writeError?: RxStorageWriteError;\n readonly schemaPath?: string;\n readonly objPath?: string;\n readonly rootPath?: string;\n readonly childpath?: string;\n readonly obj?: any;\n readonly document?: any;\n readonly schema?: Readonly | RxSchema>;\n readonly schemaObj?: any;\n readonly pluginKey?: string;\n readonly originalDoc?: Readonly;\n readonly finalDoc?: Readonly;\n readonly regex?: string;\n readonly fieldName?: string;\n readonly id?: string;\n readonly data?: any;\n readonly missingCollections?: string[];\n readonly primaryPath?: string;\n readonly primary?: string;\n readonly primaryKey?: string;\n readonly have?: any;\n readonly should?: any;\n readonly name?: string;\n readonly adapter?: any;\n readonly link?: string;\n readonly path?: string;\n readonly value?: any;\n readonly givenName?: string;\n readonly fromVersion?: number;\n readonly toVersion?: number;\n readonly version?: number;\n readonly args?: any;\n readonly opts?: any;\n readonly dataBefore?: any;\n readonly dataAfter?: any;\n readonly pull?: boolean;\n readonly push?: boolean;\n readonly url?: string;\n readonly key?: string;\n readonly queryObj?: any;\n readonly query?: any;\n readonly op?: string;\n readonly skip?: any;\n readonly limit?: any;\n readonly passwordHash?: string;\n readonly existingPasswordHash?: string;\n readonly password?: string | any;\n readonly minPassLength?: number;\n readonly own?: any;\n readonly source?: any;\n readonly method?: any;\n readonly field?: string;\n readonly ref?: string;\n readonly funName?: string;\n readonly functionName?: string;\n readonly schemaHash?: string;\n readonly previousSchema?: Readonly>;\n readonly previousSchemaHash?: string;\n readonly type?: string;\n readonly when?: string;\n readonly parallel?: boolean;\n readonly collection?: any;\n readonly database?: any;\n readonly storage?: string;\n readonly indexes?: Array | Readonly>;\n readonly index?: string | string[] | readonly string[];\n readonly plugin?: RxPlugin | any;\n readonly plugins?: Set;\n\n // used in the replication plugin\n\n /**\n * The checkpoint of the response from the last successful\n * pull by the client.\n * Null if there was no pull operation before\n * so that there is no last pulled checkpoint.\n */\n readonly checkpoint?: any;\n /**\n * The documents that failed to be pushed.\n * Typed as 'any' because they might be modified by the push modifier.\n */\n readonly pushRows?: RxReplicationWriteToMasterRow[];\n readonly direction?: 'pull' | 'push';\n\n}\n\n/**\n * Error-Items which are created by the jsonschema-validator\n */\nexport type RxValidationError = {\n readonly field: string;\n readonly message: string;\n};\n\n/**\n * Use to have a transferable error object\n * in plain json instead of a JavaScript Error instance.\n */\nexport type PlainJsonError = {\n name: string;\n message: string;\n rxdb?: true;\n code?: RxErrorKey;\n url?: string;\n extensions?: Record;\n parameters?: RxErrorParameters;\n stack?: string;\n};\n\n\n\n\n\n/**\n * Error that can happen per document when\n * RxStorage.bulkWrite() is called\n */\nexport type RxStorageWriteErrorBase = {\n\n status: number\n | 409 // conflict\n | 422 // schema validation error\n | 510 // attachment data missing\n ;\n\n /**\n * set this property to make it easy\n * to detect if the object is a RxStorageBulkWriteError\n */\n isError: true;\n\n // primary key of the document\n documentId: string;\n\n // the original document data that should have been written.\n writeRow: BulkWriteRow;\n};\n\nexport type RxStorageWriteErrorConflict = RxStorageWriteErrorBase & {\n status: 409;\n /**\n * A conflict error state must contain the\n * document state in the database.\n * This ensures that we can continue resolving a conflict\n * without having to pull the document out of the db first.\n * Is not set if the error happens on an insert.\n */\n documentInDb: RxDocumentData;\n};\n\nexport type RxStorageWriteErrorValidation = RxStorageWriteErrorBase & {\n status: 422;\n /**\n * Other properties that give\n * information about the error,\n * for example a schema validation error\n * might contain the exact error from the validator here.\n * Must be plain JSON!\n */\n validationErrors: RxValidationError[];\n};\n\nexport type RxStorageWriteErrorAttachment = RxStorageWriteErrorBase & {\n status: 510;\n attachmentId: string;\n documentInDb?: RxDocumentData;\n};\n\n\nexport type RxStorageWriteError =\n RxStorageWriteErrorConflict |\n RxStorageWriteErrorValidation |\n RxStorageWriteErrorAttachment;\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/types/rx-plugin.d.js b/dist/esm/types/rx-plugin.d.js deleted file mode 100644 index 8ab98e85fa8..00000000000 --- a/dist/esm/types/rx-plugin.d.js +++ /dev/null @@ -1,3 +0,0 @@ -export {}; -export {}; -//# sourceMappingURL=rx-plugin.d.js.map \ No newline at end of file diff --git a/dist/esm/types/rx-plugin.d.js.map b/dist/esm/types/rx-plugin.d.js.map deleted file mode 100644 index 1625b7a7238..00000000000 --- a/dist/esm/types/rx-plugin.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-plugin.d.js","names":[],"sources":["../../../src/types/rx-plugin.d.ts"],"sourcesContent":["import type {\n RxQuery,\n RxQueryOP,\n MangoQuery\n} from './rx-query.d.ts';\nimport type {\n RxCollection,\n RxCollectionCreator\n} from './rx-collection.d.ts';\nimport type {\n RxStorageInstanceCreationParams\n} from './rx-storage.d.ts';\nimport type {\n DeepReadonly,\n FilledMangoQuery,\n RxDatabase,\n RxDatabaseCreator,\n RxDocument,\n RxStorage,\n RxReplicationWriteToMasterRow,\n WithDeleted,\n RxState,\n BulkWriteRow,\n RxStorageInstance\n} from './index.d.ts';\nimport type { RxSchema } from '../rx-schema.d.ts';\n\nexport type RxPluginPreCreateRxQueryArgs = {\n op: RxQueryOP;\n queryObj: MangoQuery;\n collection: RxCollection;\n};\n\nexport type RxPluginPreAddRxPluginArgs = {\n // the plugin that is getting added\n plugin: RxPlugin | any;\n // previous added plugins\n plugins: Set;\n};\n\nexport type RxPluginPrePrepareQueryArgs = {\n rxQuery: RxQuery;\n mangoQuery: FilledMangoQuery;\n};\n\n\n/**\n * Depending on which plugins are used together,\n * it is important that the plugin is able to define if\n * the hooks must be added as first or as last array item.\n * For example the encryption plugin must run encryption\n * before the key-compression changes the fieldnames.\n */\nexport type RxPluginHooks = {\n /**\n * Hook function that is added as first.\n */\n before?: (i: Input) => void;\n /**\n * Hook function that is added as last.\n */\n after?: (i: Input) => void;\n};\n\nexport interface RxPlugin {\n /**\n * A string to uniquely identifies the plugin.\n * Can be used to throw when different versions of the same plugin are used.\n * And also other checks.\n * Use kebab-case.\n */\n readonly name: string;\n\n /**\n * set this to true so RxDB\n * knows that this object in a rxdb plugin\n */\n readonly rxdb: true;\n\n /**\n * Init function where dependent plugins could be added.\n * (optional)\n */\n init?(): any;\n\n prototypes?: {\n RxSchema?: (proto: RxSchema) => void;\n RxDocument?: (proto: RxDocument) => void;\n RxQuery?: (proto: RxQuery) => void;\n RxCollection?: (proto: RxCollection) => void;\n RxDatabase?: (proto: RxDatabase) => void;\n };\n overwritable?: {\n isDevMode?: () => boolean;\n deepFreezeWhenDevMode?: (obj: T) => DeepReadonly;\n validatePassword?: Function;\n checkAdapter?: Function;\n tunnelErrorMessage?: Function;\n };\n hooks?: {\n preAddRxPlugin?: RxPluginHooks;\n preCreateRxDatabase?: RxPluginHooks;\n createRxDatabase?: RxPluginHooks<{\n database: RxDatabase;\n creator: RxDatabaseCreator;\n }>;\n preDestroyRxDatabase?: RxPluginHooks;\n postRemoveRxDatabase?: RxPluginHooks<{\n databaseName: string;\n storage: RxStorage;\n }>;\n createRxCollection?: RxPluginHooks<{\n collection: RxCollection;\n creator: RxCollectionCreator;\n }>;\n createRxState?: RxPluginHooks<{\n collection: RxCollection;\n state: RxState;\n }>;\n preCreateRxCollection?: RxPluginHooks & {\n name: string;\n database: RxDatabase;\n }>;\n postDestroyRxCollection?: RxPluginHooks;\n postRemoveRxCollection?: RxPluginHooks<{\n storage: RxStorage;\n databaseName: string;\n collectionName: string;\n }>;\n preCreateRxSchema?: RxPluginHooks;\n createRxSchema?: RxPluginHooks;\n preCreateRxQuery?: RxPluginHooks;\n prePrepareQuery?: RxPluginHooks;\n createRxQuery?: RxPluginHooks;\n createRxDocument?: RxPluginHooks;\n postCreateRxDocument?: RxPluginHooks;\n preCreateRxStorageInstance?: RxPluginHooks>;\n /**\n * Runs before a write to the storage instance of a RxCollection or RxDatabase.\n */\n preStorageWrite?: RxPluginHooks<{\n storageInstance: RxStorageInstance;\n rows: BulkWriteRow[];\n }>;\n preMigrateDocument?: RxPluginHooks;\n postMigrateDocument?: RxPluginHooks;\n preReplicationMasterWrite?: RxPluginHooks<{\n rows: RxReplicationWriteToMasterRow[];\n collection: RxCollection;\n }>;\n preReplicationMasterWriteDocumentsHandle?: RxPluginHooks<{\n result: WithDeleted[];\n collection: RxCollection;\n }>;\n };\n}\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/types/rx-query.d.js b/dist/esm/types/rx-query.d.js deleted file mode 100644 index 1a4c9cff22b..00000000000 --- a/dist/esm/types/rx-query.d.js +++ /dev/null @@ -1,3 +0,0 @@ -export {}; -export {}; -//# sourceMappingURL=rx-query.d.js.map \ No newline at end of file diff --git a/dist/esm/types/rx-query.d.js.map b/dist/esm/types/rx-query.d.js.map deleted file mode 100644 index 39567183259..00000000000 --- a/dist/esm/types/rx-query.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-query.d.js","names":[],"sources":["../../../src/types/rx-query.d.ts"],"sourcesContent":["import type {\n RxQueryBase\n} from '../rx-query.d.ts';\nimport type { Paths, StringKeys } from './util.d.ts';\n\n/**\n * Typed Mango Query Selector\n * @link https://github.com/mongodb/node-mongodb-native/blob/26bce4a8debb65df5a42dc8599e886c9c83de10d/src/mongo_types.ts\n * @link https://stackoverflow.com/a/58436959/3443137\n */\n\n\nexport type PropertyType = string extends Property\n ? unknown\n : Property extends keyof Type\n ? Type[Property]\n : Property extends `${number}`\n ? Type extends ReadonlyArray\n ? ArrayType\n : unknown\n : Property extends `${infer Key}.${infer Rest}`\n ? Key extends `${number}`\n ? Type extends ReadonlyArray\n ? PropertyType\n : unknown\n : Key extends keyof Type\n ? Type[Key] extends Map\n ? MapType\n : PropertyType\n : unknown\n : unknown;\n\n\nexport type MangoQueryRegexOptions = 'i' | 'g' | 'm' | 'gi' | 'ig' | 'igm' | string;\n\n/*\n * The MongoDB query library is huge and we do not need all the operators.\n * If you add an operator here, make sure that you properly add a test in\n * the file /test/unit/rx-storage-query-correctness.test.ts\n *\n * @link https://github.com/kofrasa/mingo#es6\n */\nexport interface MangoQueryOperators {\n $eq?: PathValueType;\n $gt?: PathValueType;\n $gte?: PathValueType;\n $lt?: PathValueType;\n $lte?: PathValueType;\n $ne?: PathValueType;\n $in?: PathValueType[];\n $nin?: PathValueType[];\n $regex?: string;\n $options?: MangoQueryRegexOptions;\n $exists?: boolean;\n $type?: 'null' | 'boolean' | 'number' | 'string' | 'array' | 'object';\n $mod?: number;\n $not?: PathValueType;\n $size?: number;\n $elemMatch?: MangoQuerySelector;\n}\n\nexport type MangoQuerySelector = Partial<{\n [Property in Paths]: MangoQueryOperators | PropertyType;\n}> & {\n $and?: MangoQuerySelector[];\n $or?: MangoQuerySelector[];\n $nor?: MangoQuerySelector[];\n};\n\n/**\n * Discussion was at:\n * @link https://github.com/pubkey/rxdb/issues/1972\n */\nexport type MangoQuerySortDirection = 'asc' | 'desc';\nexport type MangoQuerySortPart = {\n [k in StringKeys | string]: MangoQuerySortDirection;\n};\n\nexport type MangoQuerySelectorAndIndex = {\n /**\n * Selector is optional,\n * if not given, the query matches all documents\n * that are not _deleted=true.\n */\n selector?: MangoQuerySelector;\n /**\n * By default, the RxStorage implementation\n * decides which index to use when running the query.\n *\n * For better performance, a different index might be defined\n * by setting it in the query.\n * How this improves performance and if the defined index is used,\n * depends on the RxStorage implementation.\n */\n index?: string | string[];\n};\n\nexport type MangoQueryNoLimit = MangoQuerySelectorAndIndex & {\n /**\n * Sorting of the results.\n * If no sort is set, RxDB will sort by the primary key.\n * Also if sort is set, RxDB will add primaryKey sorting\n * if the primaryKey was not in the sort parameters before.\n * This ensures that there is a deterministic sorting of the\n * results, not mather at which order the documents have been\n * inserted into the storage.\n */\n sort?: MangoQuerySortPart[];\n};\n\nexport type MangoQuery = MangoQueryNoLimit & {\n skip?: number;\n limit?: number;\n};\n\nexport type RxQueryOP = 'find' | 'findOne' | 'count' | 'findByIds';\n\nexport declare class RxQuery<\n RxDocumentType = any,\n RxQueryResult = any,\n OrmMethods = {},\n Reactivity = unknown\n> extends RxQueryBase {\n equals(queryObj: any): RxQuery;\n eq(queryObj: any): RxQuery;\n or(queryObj: keyof RxDocumentType | string | any[]): RxQuery;\n nor(queryObj: keyof RxDocumentType | string | any[]): RxQuery;\n and(queryObj: keyof RxDocumentType | string | any[]): RxQuery;\n gt(queryObj: any): RxQuery;\n gte(queryObj: any): RxQuery;\n lt(queryObj: any): RxQuery;\n lte(queryObj: any): RxQuery;\n ne(queryObj: any): RxQuery;\n in(queryObj: any[]): RxQuery;\n nin(queryObj: any[]): RxQuery;\n all(queryObj: any): RxQuery;\n regex(queryObj: string | {\n $regex: string;\n $options: MangoQueryRegexOptions;\n }): RxQuery;\n exists(queryObj: any): RxQuery;\n elemMatch(queryObj: any): RxQuery;\n mod(p1: any, p2: any, p3: any): RxQuery;\n}\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/types/rx-schema.d.js b/dist/esm/types/rx-schema.d.js deleted file mode 100644 index 4e9357b22af..00000000000 --- a/dist/esm/types/rx-schema.d.js +++ /dev/null @@ -1,3 +0,0 @@ -export {}; -export {}; -//# sourceMappingURL=rx-schema.d.js.map \ No newline at end of file diff --git a/dist/esm/types/rx-schema.d.js.map b/dist/esm/types/rx-schema.d.js.map deleted file mode 100644 index 6012722753e..00000000000 --- a/dist/esm/types/rx-schema.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-schema.d.js","names":[],"sources":["../../../src/types/rx-schema.d.ts"],"sourcesContent":["import { AsTyped } from 'as-typed';\nimport type { CRDTSchemaOptions } from './plugins/crdt.d.ts';\nimport type { StringKeys } from './util.d.ts';\n\n/**\n * @link https://github.com/types/lib-json-schema/blob/master/v4/index.d.ts\n */\nexport type JsonSchemaTypes = 'array' | 'boolean' | 'integer' | 'number' | 'null' | 'object' | 'string' | (string & {});\n\nexport type CompositePrimaryKey = {\n /**\n * The top level field of the document that will be used\n * to store the composite key as string.\n */\n key: StringKeys;\n\n /**\n * The fields of the composite key,\n * the fields must be required and final\n * and have the type number, int, or string.\n */\n fields: (StringKeys | string)[] | readonly (StringKeys | string)[];\n /**\n * The separator which is used to concat the\n * primary fields values.\n * Choose a character as separator that is known\n * to never appear inside of the primary fields values.\n * I recommend to use the pipe char '|'.\n */\n separator: string;\n};\n\nexport type PrimaryKey = StringKeys | CompositePrimaryKey;\n\nexport type JsonSchema = {\n allOf?: JsonSchema[] | readonly JsonSchema[];\n anyOf?: JsonSchema[] | readonly JsonSchema[];\n oneOf?: JsonSchema[] | readonly JsonSchema[];\n additionalItems?: boolean | JsonSchema;\n additionalProperties?: boolean | JsonSchema;\n type?: JsonSchemaTypes | JsonSchemaTypes[] | readonly JsonSchemaTypes[];\n description?: string;\n dependencies?: {\n [key: string]: JsonSchema | string[] | readonly string[];\n };\n exclusiveMinimum?: boolean;\n exclusiveMaximum?: boolean;\n items?: JsonSchema | JsonSchema[] | readonly JsonSchema[];\n multipleOf?: number;\n maxProperties?: number;\n maximum?: number;\n minimum?: number;\n maxLength?: number;\n minLength?: number;\n maxItems?: number;\n minItems?: number;\n minProperties?: number;\n pattern?: string;\n patternProperties?: {\n [key: string]: JsonSchema;\n };\n properties?: {\n [key in StringKeys]: JsonSchema;\n };\n required?: string[] | readonly string[];\n uniqueItems?: boolean;\n enum?: any[] | readonly any[];\n not?: JsonSchema;\n definitions?: {\n [key: string]: JsonSchema;\n };\n format?: 'date-time' | 'email' | 'hostname' | 'ipv4' | 'ipv6' | 'uri' | string;\n example?: any;\n\n // RxDB-specific\n ref?: string;\n final?: boolean;\n};\n\nexport interface TopLevelProperty extends JsonSchema {\n default?: any;\n}\n\n/**\n * @link https://developer.mozilla.org/en-US/docs/Web/API/Compression_Streams_API\n */\nexport type CompressionMode = 'deflate' | 'gzip';\n\nexport type RxJsonSchema<\n /**\n * The doctype must be given, and '=any' cannot be used,\n * otherwise the keyof of primaryKey\n * would be optional when the type of the document is not known.\n */\n RxDocType\n> = {\n title?: string;\n description?: string;\n version: number;\n\n /**\n * The primary key of the documents.\n * Must be in the top level of the properties of the schema\n * and that property must have the type 'string'\n */\n primaryKey: PrimaryKey;\n\n /**\n * TODO this looks like a typescript-bug\n * we have to allows all string because the 'object'-literal is not recognized\n * retry this in later typescript-versions\n */\n type: 'object' | string;\n properties: { [key in StringKeys]: TopLevelProperty };\n\n /**\n * On the top level the required-array must be set\n * because we always have to set the primary key to required.\n *\n * TODO required should be made non-optional on the top level\n */\n required?: StringKeys[] | readonly StringKeys[];\n\n\n /**\n * Indexes that will be used for the queries.\n * RxDB will internally prepend the _deleted field to the index\n * because queries do NOT return documents with _deleted=true.\n */\n indexes?: (string | string[])[] | (string | readonly string[])[] | readonly (string | string[])[] | readonly (string | readonly string[])[];\n\n /**\n * Internally used indexes that do not get _deleted prepended\n * by RxDB. Use these to speed up queries that are run manually on the storage\n * or to speed up requests when you use the RxDB server.\n * These could also be utilised when you build a plugin that\n * has to query documents without respecting the _deleted value.\n */\n internalIndexes?: string[][] | readonly string[][];\n\n\n encrypted?: string[] | readonly string[];\n keyCompression?: boolean;\n /**\n * if not set, rxdb will set 'false' as default\n * Having additionalProperties: true is not allowed on the root level to ensure\n * that property names do not clash with properties of the RxDocument class\n * or ORM methods.\n */\n additionalProperties?: false;\n attachments?: {\n encrypted?: boolean;\n /**\n * @link https://developer.mozilla.org/en-US/docs/Web/API/Compression_Streams_API\n */\n compression?: CompressionMode;\n };\n /**\n * Options for the sharding plugin of rxdb-premium.\n * We set these on the schema because changing the shard amount or mode\n * will require a migration.\n * @link https://rxdb.info/rx-storage-sharding.html\n */\n sharding?: {\n /**\n * Amount of shards.\n * This value cannot be changed after you have stored data,\n * if you change it anyway, you will loose the existing data.\n */\n shards: number;\n /**\n * Either shard by collection or by database.\n * For most use cases (IndexedDB based storages), sharding by collection is the way to go\n * because it has a faster initial load time.\n */\n mode: 'database' | 'collection';\n };\n crdt?: CRDTSchemaOptions;\n};\n\n/**\n * Used to aggregate the document type from the schema.\n * @link https://github.com/pubkey/rxdb/discussions/3467\n */\nexport type ExtractDocumentTypeFromTypedRxJsonSchema = AsTyped;\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/types/rx-storage.d.js b/dist/esm/types/rx-storage.d.js deleted file mode 100644 index 7b893f6b18e..00000000000 --- a/dist/esm/types/rx-storage.d.js +++ /dev/null @@ -1,3 +0,0 @@ -export {}; -export {}; -//# sourceMappingURL=rx-storage.d.js.map \ No newline at end of file diff --git a/dist/esm/types/rx-storage.d.js.map b/dist/esm/types/rx-storage.d.js.map deleted file mode 100644 index 713c1434c2d..00000000000 --- a/dist/esm/types/rx-storage.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-storage.d.js","names":[],"sources":["../../../src/types/rx-storage.d.ts"],"sourcesContent":["import type { ChangeEvent } from 'event-reduce-js';\nimport type { RxChangeEvent } from './rx-change-event.d.ts';\nimport type { RxDocumentMeta } from './rx-document.d.ts';\nimport type { RxStorageWriteError } from './rx-error.d.ts';\nimport type { RxJsonSchema } from './rx-schema.d.ts';\nimport type { Override } from './util.d.ts';\n\n/**\n * The document data how it comes out of the storage instance.\n * Contains all meta data like revision, attachments and deleted-flag.\n */\nexport type RxDocumentData = T & {\n\n /**\n * As other NoSQL databases,\n * RxDB also assumes that no data is finally deleted.\n * Instead the documents are stored with _deleted: true\n * which means they will not be returned at queries.\n */\n _deleted: boolean;\n\n /**\n * The attachments meta data is stored besides to document.\n */\n _attachments: {\n [attachmentId: string]: RxAttachmentData;\n };\n\n /**\n * Contains a revision which is concatenated with a [height: number]-[identifier: string]\n * like: '1-3hl4kj3l4kgj34g34glk'.\n * The revision is used to detect write conflicts and have a document history.\n * Revisions behave similar to couchdb revisions:\n * @link https://docs.couchdb.org/en/stable/replication/conflicts.html#revision-tree\n\n * When writing a document, you must send the correct revision in the previous-field\n * to make sure that you do not cause a write conflict.\n * The revision of the 'new' document-field must be created, for example via util.createRevision().\n * Any revision that matches the [height]-[hash] format can be used.\n */\n _rev: string;\n _meta: RxDocumentMeta;\n};\n\nexport type RxDocumentDataById = {\n [documentId: string]: RxDocumentData;\n};\n\n/**\n * The document data how it is send to the\n * storage instance to save it.\n */\n// We & T here instead of in RxDocumentData to preserver indexability by keyof T which the Override breaks\nexport type RxDocumentWriteData = T & Override, {\n _attachments: {\n /**\n * To create a new attachment, set the write data\n * To delete an attachment, leave it out on the _attachments property.\n * To change an attachment, set the new write data.\n * To not touch an attachment, just send the stub again\n * which came out of the storage instance.\n */\n [attachmentId: string]: RxAttachmentData | RxAttachmentWriteData;\n };\n}>;\n\nexport type WithDeleted = DocType & {\n _deleted: boolean;\n};\nexport type WithDeletedAndAttachments = DocType & {\n _deleted: boolean;\n\n /**\n * Here the _attachments might exist\n * or might not, depending one the use case.\n */\n _attachments?: {\n [attachmentId: string]: RxAttachmentData | RxAttachmentWriteData;\n };\n};\n\n/**\n * Send to the bulkWrite() method of a storage instance.\n */\nexport type BulkWriteRow = {\n /**\n * The current document state in the storage engine,\n * assumed by the application.\n * Undefined if the document is a new insert.\n * Notice that we send the full document data as 'previous', not just the revision.\n * The reason is that to get the previous revision you anyway have to get the full\n * previous document and so it is easier to just send it all to the storage instance.\n * This will later allow us to use something different then the _rev key for conflict detection\n * when we implement other storage instances.\n */\n previous?: RxDocumentData;\n /**\n * The new document data to be stored in the storage instance.\n */\n document: RxDocumentWriteData;\n};\nexport type BulkWriteRowById = {\n [documentId: string]: BulkWriteRow;\n};\n\n/**\n * After the RxStorage has processed all rows,\n * we have this to work with afterwards.\n */\nexport type BulkWriteRowProcessed = BulkWriteRow & {\n document: RxDocumentData;\n};\n\n\nexport type RxAttachmentData = {\n /**\n * Size of the attachments data\n */\n length: number;\n /**\n * Content type like 'plain/text'\n */\n type: string;\n /**\n * The hash of the attachments content.\n * It is calculated by RxDB, and send to the storage.\n * The only guarantee is that the digest will change when the attachments data changes.\n * @link https://github.com/pouchdb/pouchdb/issues/3156#issuecomment-66831010\n * @link https://github.com/pubkey/rxdb/pull/4107\n */\n digest: string;\n};\n\n/**\n * Data which is needed for new attachments\n * that are send from RxDB to the RxStorage implementation.\n */\nexport type RxAttachmentWriteData = RxAttachmentData & {\n /**\n * The data of the attachment. As string in base64 format.\n * In the past we used Blob internally but it created many\n * problems because of then we need the full data (for encryption/compression)\n * so we anyway have to get the string value out of the Blob.\n *\n * Also using Blob has no performance benefit because in some RxStorage implementations,\n * it just keeps the transaction open for longer because the Blob\n * has be be read.\n */\n data: string;\n};\n\n\n/**\n * The returned data from RxStorageInstance.bulkWrite()\n * For better performance, we do NOT use an indexed object,\n * but only plain arrays. Because most of the time\n * RxDB anyway only need the array data and we can save performance\n * by not indexing the results.\n */\nexport type RxStorageBulkWriteResponse = {\n /**\n * contains all succeeded writes.\n */\n success: RxDocumentData[];\n /**\n * contains all errored writes.\n */\n error: RxStorageWriteError[];\n};\n\n/**\n * We return a complex object instead of a single array\n * so we are able to add additional fields in the future.\n */\nexport type RxStorageQueryResult = {\n // the found documents, sort order is important.\n documents: RxDocumentData[];\n};\n\nexport type RxStorageCountResult = {\n count: number;\n /**\n * Returns the mode which was used by the storage\n * to count the documents.\n * If this returns 'slow', RxDB will throw by default\n * if 'allowSlowCount' is not set.\n */\n mode: 'fast' | 'slow';\n};\n\nexport type RxStorageInstanceCreationParams = {\n\n /**\n * A string to uniquely identify the instance of the JavaScript object\n * of the RxDatabase where this RxStorageInstance belongs to.\n * In most cases you would use RxDatabase.token here.\n *\n * This is used so that we can add caching or reuse stuff that belongs to the same RxDatabase.\n * For example the BroadcastChannel that is used for event propagation between multiple browser tabs\n * is cached by this token.\n *\n * In theory we could just use the databaseName for that. But to make it easier in unit tests\n * to simulate cross-tab usage, we cannot assume that the databaseName is unique in a single\n * JavaScript process. Therefore we use the instance token instead.\n */\n databaseInstanceToken: string;\n\n\n databaseName: string;\n collectionName: string;\n schema: RxJsonSchema>;\n options: InstanceCreationOptions;\n /**\n * If multiInstance is true, there can be more\n * then one instance of the database, for example\n * when multiple browser tabs exist or more then one Node.js\n * process relies on the same storage.\n */\n multiInstance: boolean;\n password?: string | any;\n\n /**\n * Some storages can do additional checks\n * that are performance expensive\n * and should only be done in dev-mode.\n */\n devMode: boolean;\n};\n\nexport type ChangeStreamOptions = {\n\n /**\n * Sequence number of the first event to start with.\n * If you want to get all ongoing events,\n * first get the latest sequence number and input it here.\n *\n * Optional on changeStream,\n * will start from the newest sequence.\n */\n startSequence?: number;\n /**\n * limits the amount of results\n */\n limit?: number;\n};\n\n/**\n * In the past we handles each RxChangeEvent by its own.\n * But it has been shown that this take way more performance then needed,\n * especially when the events get transferred over a data layer\n * like with WebWorkers or the BroadcastChannel.\n * So we now process events as bulks internally.\n */\nexport type EventBulk = {\n /**\n * Unique id of the bulk,\n * used to detect duplicate bulks\n * that have already been processed.\n */\n id: string;\n events: EventType[];\n\n /**\n * Required for replication.\n * Passing this checkpoint into getChangedDocumentsSince()\n * must return all items that have been modified AFTER this write event.\n */\n checkpoint: CheckpointType;\n\n /**\n * The context that was given at the call to bulkWrite()\n * that caused this EventBulk.\n */\n context: string;\n\n /**\n * Unix timestamp in milliseconds of when the operation was triggered\n * and when it was finished.\n * This is optional because we do not have this time\n * for events that come from the internal storage instance changestream.\n * TODO do we even need this values?\n */\n startTime: number;\n endTime: number;\n};\n\nexport type ChangeStreamEvent = ChangeEvent> & {\n /**\n * An integer that is increasing\n * and unique per event.\n * Can be used to sort events or get information\n * about how many events there are.\n */\n sequence: number;\n /**\n * The value of the primary key\n * of the changed document\n */\n id: string;\n};\n\nexport type RxStorageChangeEvent = Omit, 'isLocal' | 'collectionName'>;\n\n/**\n * An example for how a RxStorage checkpoint can look like.\n * NOTICE: Not all implementations use this type.\n */\nexport type RxStorageDefaultCheckpoint = {\n id: string;\n lwt: number;\n};\n\n\n\n\nexport type CategorizeBulkWriteRowsOutput = {\n\n // TODO only needs the document, not the row.\n bulkInsertDocs: BulkWriteRowProcessed[];\n bulkUpdateDocs: BulkWriteRowProcessed[];\n\n errors: RxStorageWriteError[];\n eventBulk: EventBulk>, any>;\n attachmentsAdd: {\n documentId: string;\n attachmentId: string;\n attachmentData: RxAttachmentWriteData;\n digest: string;\n }[];\n attachmentsRemove: {\n documentId: string;\n attachmentId: string;\n digest: string;\n }[];\n attachmentsUpdate: {\n documentId: string;\n attachmentId: string;\n attachmentData: RxAttachmentWriteData;\n digest: string;\n }[];\n /**\n * Contains the non-error document row that\n * has the newest _meta.lwt time.\n * Empty if no successful write exists.\n */\n newestRow?: BulkWriteRowProcessed;\n};\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/types/rx-storage.interface.d.js b/dist/esm/types/rx-storage.interface.d.js deleted file mode 100644 index 5f06942fc41..00000000000 --- a/dist/esm/types/rx-storage.interface.d.js +++ /dev/null @@ -1,3 +0,0 @@ -export {}; -export {}; -//# sourceMappingURL=rx-storage.interface.d.js.map \ No newline at end of file diff --git a/dist/esm/types/rx-storage.interface.d.js.map b/dist/esm/types/rx-storage.interface.d.js.map deleted file mode 100644 index 47e6cd1d634..00000000000 --- a/dist/esm/types/rx-storage.interface.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"rx-storage.interface.d.js","names":[],"sources":["../../../src/types/rx-storage.interface.d.ts"],"sourcesContent":["import type {\n BulkWriteRow,\n EventBulk,\n RxDocumentData,\n RxStorageBulkWriteResponse,\n RxStorageChangeEvent,\n RxStorageCountResult,\n RxStorageInstanceCreationParams,\n RxStorageQueryResult\n} from './rx-storage.ts';\nimport type {\n MangoQuerySelector,\n MangoQuerySortPart,\n RxConflictResultionTask,\n RxConflictResultionTaskSolution,\n RxJsonSchema,\n RxQueryPlan\n} from './index.d.ts';\nimport type {\n Observable\n} from 'rxjs';\n\n/**\n * RxStorage\n * This is an interface that abstracts the storage engine.\n * This allows us to use RxDB with different storage engines.\n *\n * @link https://rxdb.info/rx-storage.html\n * @link https://github.com/pubkey/rxdb/issues/1636\n */\n\n\n/**\n * A RxStorage is a module that acts\n * as a factory that can create multiple RxStorageInstance\n * objects.\n *\n * All data inputs and outputs of a StorageInstance must be plain json objects.\n * Do not use Map, Set or anything else that cannot be JSON.stringify-ed.\n * This will ensure that the storage can exchange data\n * when it is a WebWorker or a WASM process or data is send via BroadcastChannel.\n */\nexport interface RxStorage {\n /**\n * name of the storage engine\n * used to detect if plugins do not work so we can throw proper errors.\n */\n readonly name: string;\n\n /**\n * RxDB version is part of the storage\n * so we can have fallbacks and stuff when\n * multiple storages with different version are in use\n * like in the storage migration plugin.\n */\n readonly rxdbVersion: string;\n\n /**\n * Creates a storage instance\n * that can contain the NoSQL documents of a collection.\n */\n createStorageInstance(\n params: RxStorageInstanceCreationParams\n ): Promise>;\n}\n\n\n/**\n * User provided mango queries will be filled up by RxDB via normalizeMangoQuery()\n * so we do not have to do many if-field-exist tests in the internals.\n */\nexport type FilledMangoQuery = {\n /**\n * The selector is required here.\n */\n selector: MangoQuerySelector>;\n\n /**\n * In contrast to the user-provided MangoQuery,\n * the sorting is required here because\n * RxDB has to ensure that the primary key is always\n * part of the sort params.\n */\n sort: MangoQuerySortPart>[];\n\n /**\n * In the normalized mango query,\n * the index must always be a string[],\n * never just a string.\n * This makes it easier to use the query because\n * we do not have to do an array check.\n */\n index?: string[];\n\n /**\n * Skip must be set which defaults to 0\n */\n skip: number;\n\n limit?: number;\n};\n\n\n/**\n * Before sending a query to the storageInstance.query()\n * we run it through the query planner and do some normalization\n * stuff. Notice that the queryPlan is a hint for the storage and\n * it is not required to use it when running queries. Some storages\n * might use their own query planning instead.\n */\nexport type PreparedQuery = {\n // original query from the input\n query: FilledMangoQuery;\n queryPlan: RxQueryPlan;\n};\n\nexport interface RxStorageInstance<\n /**\n * The type of the documents that can be stored in this instance.\n * All documents in an instance must comply to the same schema.\n * Also all documents are RxDocumentData with the meta properties like\n * _deleted or _rev etc.\n */\n RxDocType,\n Internals,\n InstanceCreationOptions,\n CheckpointType = any\n> {\n readonly databaseName: string;\n /**\n * Returns the internal data that is used by the storage engine.\n */\n readonly internals: Readonly;\n readonly options: Readonly;\n /**\n * The schema that defines the documents that are stored in this instance.\n * Notice that the schema must be enhanced with the meta properties like\n * _meta, _rev and _deleted etc. which are added by fillWithDefaultSettings()\n */\n readonly schema: Readonly>>;\n readonly collectionName: string;\n\n /**\n * (Optional) reference to the underlying persistent storage instance.\n * If set, things like replication will run on that storageInstance instead of the parent.\n * This is mostly used in things like the memory-synced storage where we want to\n * run replications and migrations on the persistent storage instead of the in-memory storage.\n *\n * Having this is the least hacky option. The only other option would be to toggle all calls to the\n * storageInstance by checking the givent context-string. But this would make it impossible\n * to run a replication on the parentStorage itself.\n */\n readonly underlyingPersistentStorage?: RxStorageInstance;\n\n /**\n * Writes multiple documents to the storage instance.\n * The write for each single document is atomic, there\n * is no transaction around all documents.\n * The written documents must be the newest revision of that documents data.\n * If the previous document is not the current newest revision, a conflict error\n * must be returned.\n * It must be possible that some document writes succeed\n * and others error. We need this to have a similar behavior as most NoSQL databases.\n */\n bulkWrite(\n documentWrites: BulkWriteRow[],\n /**\n * Context will be used in all\n * changeStream()-events that are emitted as a result\n * of that bulkWrite() operation.\n * Used in plugins so that we can detect that event X\n * comes from operation Y.\n */\n context: string\n ): Promise>;\n\n /**\n * Get Multiple documents by their primary value.\n * This must also return deleted documents.\n */\n findDocumentsById(\n /**\n * List of primary values\n * of the documents to find.\n */\n ids: string[],\n /**\n * If set to true, deleted documents will also be returned.\n */\n withDeleted: boolean\n\n ): Promise<\n /**\n * For better performance, we return an array\n * instead of an indexed object because most consumers\n * of this anyway have to fill a Map() instance or\n * even do only need the list at all.\n */\n RxDocumentData[]\n >;\n\n /**\n * Runs a NoSQL 'mango' query over the storage\n * and returns the found documents data.\n * Having all storage instances behave similar\n * is likely the most difficult thing when creating a new\n * rx-storage implementation.\n */\n query(\n preparedQuery: PreparedQuery\n ): Promise>;\n\n /**\n * Returns the amount of non-deleted documents\n * that match the given query.\n * Sort, skip and limit of the query must be ignored!\n */\n count(\n preparedQuery: PreparedQuery\n ): Promise;\n\n /**\n * Returns the plain data of a single attachment.\n */\n getAttachmentData(\n documentId: string,\n attachmentId: string,\n digest: string\n ): Promise;\n\n /**\n * Returns the current (not the old!) data of all documents that have been changed AFTER the given checkpoint.\n * If the returned array does not reach the limit, it can be assumed that the \"end\" is reached, when paginating over the changes.\n * Also returns a new checkpoint for each document which can be used to continue with the pagination from that change on.\n * Must never return the same document multiple times in the same call operation.\n * This is used by RxDB to known what has changed since X so these docs can be handled by the backup or the replication\n * plugin.\n *\n * Important: This method is optional. If not defined,\n * RxDB will manually run a query and use the last returned document\n * for checkpointing. In the future we might even remove this method completely\n * and let RxDB do the work instead of the RxStorage.\n */\n getChangedDocumentsSince?(\n limit: number,\n /**\n * The checkpoint from with to start\n * when the events are sorted in time.\n * If we want to start from the beginning,\n * undefined is used as a checkpoint.\n */\n checkpoint?: CheckpointType\n ): Promise<{\n documents: RxDocumentData[];\n /**\n * The checkpoint contains data so that another\n * call to getChangedDocumentsSince() will continue\n * from exactly the last document that was returned before.\n */\n checkpoint: CheckpointType;\n }>;\n\n /**\n * Returns an ongoing stream\n * of all changes that happen to the\n * storage instance.\n * Do not forget to unsubscribe.\n *\n * If the RxStorage support multi-instance,\n * and the storage is persistent,\n * then the emitted changes of one RxStorageInstance\n * must be also emitted to other instances with the same databaseName+collectionName.\n * See ./rx-storage-multiinstance.ts\n */\n changeStream(): Observable, CheckpointType>>;\n\n /**\n * Runs a cleanup that removes all tompstones\n * of documents that have _deleted set to true\n * to free up disc space.\n *\n * Returns true if all cleanable documents have been removed.\n * Returns false if there are more documents to be cleaned up,\n * but not all have been purged because that would block the storage for too long.\n */\n cleanup(\n /**\n * The minimum time in milliseconds\n * of how long a document must have been deleted\n * until it is purged by the cleanup.\n */\n minimumDeletedTime: number\n ): Promise<\n /**\n * True if all docs cleaned up,\n * false if there are more docs to clean up\n */\n boolean\n >;\n\n /**\n * Closes the storage instance so it cannot be used\n * anymore and should clear all memory.\n * The returned promise must resolve when everything is cleaned up.\n */\n close(): Promise;\n\n /**\n * Remove the database and\n * deletes all of its data.\n */\n remove(): Promise;\n\n /**\n * Instead of passing the conflict-resolver function\n * into the storage, we have to work with an observable that emits tasks\n * and a resolver that takes resolved tasks.\n * This is needed because the RxStorageInstance might run inside of a Worker\n * other JavaScript process, so we cannot pass plain code.\n */\n conflictResultionTasks(): Observable>;\n resolveConflictResultionTask(taskSolution: RxConflictResultionTaskSolution): Promise;\n}\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/esm/types/util.d.js b/dist/esm/types/util.d.js deleted file mode 100644 index 4e14938a97f..00000000000 --- a/dist/esm/types/util.d.js +++ /dev/null @@ -1,3 +0,0 @@ -export {}; -export {}; -//# sourceMappingURL=util.d.js.map \ No newline at end of file diff --git a/dist/esm/types/util.d.js.map b/dist/esm/types/util.d.js.map deleted file mode 100644 index 9d1b26e0974..00000000000 --- a/dist/esm/types/util.d.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"util.d.js","names":[],"sources":["../../../src/types/util.d.ts"],"sourcesContent":["import type { RxStorage } from './rx-storage.interface';\n\nexport type MaybePromise = Promise | T;\n\n\nexport type PlainJsonValue = string | number | boolean | PlainSimpleJsonObject | PlainSimpleJsonObject[] | PlainJsonValue[];\nexport type PlainSimpleJsonObject = {\n [k: string]: PlainJsonValue | PlainJsonValue[];\n};\n\n/**\n * @link https://stackoverflow.com/a/49670389/3443137\n */\ntype DeepReadonly =\n T extends (infer R)[] ? DeepReadonlyArray :\n T extends Function ? T :\n T extends object ? DeepReadonlyObject :\n T;\n\ninterface DeepReadonlyArray extends ReadonlyArray> { }\n\ntype DeepReadonlyObject = {\n readonly [P in keyof T]: DeepReadonly;\n};\n\nexport type MaybeReadonly = T | Readonly;\n\n\n/**\n * Opposite of DeepReadonly,\n * makes everything mutable again.\n */\ntype DeepMutable = (\n T extends object\n ? {\n -readonly [K in keyof T]: (\n T[K] extends object\n ? DeepMutable\n : T[K]\n )\n }\n : never\n);\n\n/**\n * Can be used like 'keyof'\n * but only represents the string keys, not the Symbols or numbers.\n * @link https://stackoverflow.com/a/51808262/3443137\n */\nexport type StringKeys = Extract;\n\nexport type AnyKeys = { [P in keyof T]?: T[P] | any };\nexport interface AnyObject {\n [k: string]: any;\n}\n\n/**\n * @link https://dev.to/vborodulin/ts-how-to-override-properties-with-type-intersection-554l\n */\nexport type Override = Omit & T2;\n\n\n\nexport type ById = {\n [id: string]: T;\n};\n\n/**\n * Must be async to support async hashing like from the WebCrypto API.\n */\nexport type HashFunction = (input: string) => Promise;\n\nexport declare type QueryMatcher = (doc: DocType | DeepReadonly) => boolean;\n\n/**\n * To have a deterministic sorting, we cannot return 0,\n * we only return 1 or -1.\n * This ensures that we always end with the same output array, no mather of the\n * pre-sorting of the input array.\n */\nexport declare type DeterministicSortComparator = (a: DocType, b: DocType) => 1 | -1;\n\n/**\n * To test a storage, we need these\n * configuration values.\n */\nexport type RxTestStorage = {\n // can be used to setup async stuff\n readonly init?: () => any;\n // TODO remove name here, it can be read out already via getStorage().name\n readonly name: string;\n readonly getStorage: () => RxStorage;\n /**\n * Returns a storage that is used in performance tests.\n * For example in a browser it should return the storage with an IndexedDB based adapter,\n * while in node.js it must use the filesystem.\n */\n readonly getPerformanceStorage: () => {\n storage: RxStorage;\n /**\n * A description that describes the storage and setting.\n * For example 'dexie-native'.\n */\n description: string;\n };\n /**\n * True if the storage is able to\n * keep data after an instance is closed and opened again.\n */\n readonly hasPersistence: boolean;\n readonly hasMultiInstance: boolean;\n readonly hasAttachments: boolean;\n\n /**\n * Some storages likes the memory-synced storage,\n * are not able to provide a replication while guaranteeing\n * data integrity.\n */\n readonly hasReplication: boolean;\n\n /**\n * To make it possible to test alternative encryption plugins,\n * you can specify hasEncryption to signal\n * the test runner that the given storage already contains an\n * encryption plugin that should be used to test encryption tests.\n * Otherwise the encryption-crypto-js plugin will be tested.\n *\n * hasEncryption must contain a function that is able\n * to create a new password.\n */\n readonly hasEncryption?: () => Promise;\n};\n\n\n/**\n * The paths as strings-type of nested object\n * @link https://stackoverflow.com/a/58436959/3443137\n */\ntype Join = K extends string | number ?\n P extends string | number ?\n `${K}${'' extends P ? '' : '.'}${P}`\n : never : never;\n\nexport type Paths = [D] extends [never] ? never : T extends object ?\n { [K in keyof T]-?: K extends string | number ?\n `${K}` | (Paths extends infer R ? Join : never)\n : never\n }[keyof T] : '';\n\nexport type Leaves = [D] extends [never] ? never : T extends object ?\n { [K in keyof T]-?: Join> }[keyof T] : '';\ntype Prev = [never, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10,\n 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, ...0[]];\n"],"mappings":"","ignoreList":[]} \ No newline at end of file diff --git a/dist/types/change-event-buffer.d.ts b/dist/types/change-event-buffer.d.ts deleted file mode 100644 index 0a85f84137a..00000000000 --- a/dist/types/change-event-buffer.d.ts +++ /dev/null @@ -1,34 +0,0 @@ -import type { RxChangeEvent, RxCollection } from './types/index.d.ts'; -export declare class ChangeEventBuffer { - collection: RxCollection; - private subs; - limit: number; - counter: number; - private eventCounterMap; - /** - * array with changeEvents - * starts with oldest known event, ends with newest - */ - buffer: RxChangeEvent[]; - constructor(collection: RxCollection); - _handleChangeEvent(changeEvent: RxChangeEvent): void; - /** - * gets the array-index for the given pointer - * @return arrayIndex which can be used to iterate from there. If null, pointer is out of lower bound - */ - getArrayIndexByPointer(pointer: number): number | null; - /** - * get all changeEvents which came in later than the pointer-event - * @return array with change-events. If null, pointer out of bounds - */ - getFrom(pointer: number): RxChangeEvent[] | null; - runFrom(pointer: number, fn: Function): void; - /** - * no matter how many operations are done on one document, - * only the last operation has to be checked to calculate the new state - * this function reduces the events to the last ChangeEvent of each doc - */ - reduceByLastOfDoc(changeEvents: RxChangeEvent[]): RxChangeEvent[]; - destroy(): void; -} -export declare function createChangeEventBuffer(collection: RxCollection): ChangeEventBuffer; diff --git a/dist/types/custom-index.d.ts b/dist/types/custom-index.d.ts deleted file mode 100644 index 2cf78031427..00000000000 --- a/dist/types/custom-index.d.ts +++ /dev/null @@ -1,58 +0,0 @@ -/** - * For some RxStorage implementations, - * we need to use our custom crafted indexes - * so we can easily iterate over them. And sort plain arrays of document data. - * - * We really often have to craft an index string for a given document. - * Performance of everything in this file is very important - * which is why the code sometimes looks strange. - * Run performance tests before and after you touch anything here! - */ -import type { JsonSchema, RxDocumentData, RxJsonSchema } from './types/index.ts'; -import { ObjectPathMonadFunction } from './plugins/utils/index.ts'; -/** - * Prepare all relevant information - * outside of the returned function - * from getIndexableStringMonad() - * to save performance when the returned - * function is called many times. - */ -type IndexMetaField = { - fieldName: string; - schemaPart: JsonSchema; - parsedLengths?: ParsedLengths; - getValue: ObjectPathMonadFunction; - getIndexStringPart: (docData: RxDocumentData) => string; -}; -export declare function getIndexMeta(schema: RxJsonSchema>, index: string[]): IndexMetaField[]; -/** - * Crafts an indexable string that can be used - * to check if a document would be sorted below or above - * another documents, dependent on the index values. - * @monad for better performance - * - * IMPORTANT: Performance is really important here - * which is why we code so 'strange'. - * Always run performance tests when you want to - * change something in this method. - */ -export declare function getIndexableStringMonad(schema: RxJsonSchema>, index: string[]): (docData: RxDocumentData) => string; -declare type ParsedLengths = { - minimum: number; - maximum: number; - nonDecimals: number; - decimals: number; - roundedMinimum: number; -}; -export declare function getStringLengthOfIndexNumber(schemaPart: JsonSchema): ParsedLengths; -export declare function getIndexStringLength(schema: RxJsonSchema>, index: string[]): number; -export declare function getPrimaryKeyFromIndexableString(indexableString: string, primaryKeyLength: number): string; -export declare function getNumberIndexString(parsedLengths: ParsedLengths, fieldValue: number): string; -export declare function getStartIndexStringFromLowerBound(schema: RxJsonSchema, index: string[], lowerBound: (string | boolean | number | null | undefined)[]): string; -export declare function getStartIndexStringFromUpperBound(schema: RxJsonSchema, index: string[], upperBound: (string | boolean | number | null | undefined)[]): string; -/** - * Used in storages where it is not possible - * to define inclusiveEnd/inclusiveStart - */ -export declare function changeIndexableStringByOneQuantum(str: string, direction: 1 | -1): string; -export {}; diff --git a/dist/types/doc-cache.d.ts b/dist/types/doc-cache.d.ts deleted file mode 100644 index cd199df4748..00000000000 --- a/dist/types/doc-cache.d.ts +++ /dev/null @@ -1,85 +0,0 @@ -import type { RxChangeEvent, RxDocument, RxDocumentData } from './types/index.d.ts'; -import { Observable } from 'rxjs'; -/** - * Because we have to create many cache items, - * we use an array instead of an object with properties - * for better performance and less memory usage. - * @link https://stackoverflow.com/questions/17295056/array-vs-object-efficiency-in-javascript - */ -declare type CacheItem = [ - /** - * Store the different document states of time - * based on their revision height. - * We store WeakRefs so that we can later clean up - * document states that are no longer needed. - */ - Map>>, - /** - * Store the latest known document state. - * As long as any state of the document is in the cache, - * we observe the changestream and update the latestDoc accordingly. - * This makes it easier to optimize performance on other parts - * because for each known document we can always get the current state - * in the storage. - * Also it makes it possible to call RxDocument.latest() in a non-async way - * to retrieve the latest document state or to observe$ some property. - * - * To not prevent the whole cacheItem from being garbage collected, - * we store only the document data here, but not the RxDocument. - */ - RxDocumentData -]; -/** - * @link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/FinalizationRegistry - */ -declare type FinalizationRegistryValue = { - docId: string; - revisionHeight: number; -}; -/** - * The DocumentCache stores RxDocument objects - * by their primary key and revision. - * This is useful on client side applications where - * it is not known how much memory can be used, so - * we de-duplicate RxDocument states to save memory. - * To not fill up the memory with old document states, the DocumentCache - * only contains weak references to the RxDocuments themself. - * @link https://caniuse.com/?search=weakref - */ -export declare class DocumentCache { - readonly primaryPath: string; - readonly changes$: Observable>; - /** - * A method that can create a RxDocument by the given document data. - */ - documentCreator: (docData: RxDocumentData) => RxDocument; - cacheItemByDocId: Map>; - /** - * Some JavaScript runtimes like QuickJS, - * so not have a FinalizationRegistry or WeakRef. - * Therefore we need a workaround which might waste a lot of memory, - * but at least works. - */ - readonly registry?: FinalizationRegistry; - constructor(primaryPath: string, changes$: Observable>, - /** - * A method that can create a RxDocument by the given document data. - */ - documentCreator: (docData: RxDocumentData) => RxDocument); - /** - * Get the RxDocument from the cache - * and create a new one if not exits before. - * @overwrites itself with the actual function - * because this is @performance relevant. - * It is called on each document row for each write and read. - */ - get getCachedRxDocuments(): (docsData: RxDocumentData[]) => RxDocument[]; - get getCachedRxDocument(): (docData: RxDocumentData) => RxDocument; - /** - * Throws if not exists - */ - getLatestDocumentData(docId: string): RxDocumentData; - getLatestDocumentDataIfExists(docId: string): RxDocumentData | undefined; -} -export declare function mapDocumentsDataToCacheDocs(docCache: DocumentCache, docsData: RxDocumentData[]): RxDocument[]; -export {}; diff --git a/dist/types/event-reduce.d.ts b/dist/types/event-reduce.d.ts deleted file mode 100644 index 7a2dfa87caf..00000000000 --- a/dist/types/event-reduce.d.ts +++ /dev/null @@ -1,15 +0,0 @@ -import { QueryParams } from 'event-reduce-js'; -import type { RxQuery, MangoQuery, RxChangeEvent, StringKeys, RxDocumentData } from './types/index.d.ts'; -export type EventReduceResultNeg = { - runFullQueryAgain: true; -}; -export type EventReduceResultPos = { - runFullQueryAgain: false; - changed: boolean; - newResults: RxDocumentType[]; -}; -export type EventReduceResult = EventReduceResultNeg | EventReduceResultPos; -export declare function getSortFieldsOfQuery(primaryKey: StringKeys>, query: MangoQuery): (string | StringKeys)[]; -export declare const RXQUERY_QUERY_PARAMS_CACHE: WeakMap>; -export declare function getQueryParams(rxQuery: RxQuery): QueryParams; -export declare function calculateNewResults(rxQuery: RxQuery, rxChangeEvents: RxChangeEvent[]): EventReduceResult; diff --git a/dist/types/hooks.d.ts b/dist/types/hooks.d.ts deleted file mode 100644 index 4014c283684..00000000000 --- a/dist/types/hooks.d.ts +++ /dev/null @@ -1,17 +0,0 @@ -/** - * hook-functions that can be extended by the plugin - */ -export declare const HOOKS: { - [k: string]: any[]; -}; -export declare function runPluginHooks(hookKey: string, obj: any): void; -/** - * TODO - * we should not run the hooks in parallel - * this makes stuff unpredictable. - */ -export declare function runAsyncPluginHooks(hookKey: string, obj: any): Promise; -/** - * used in tests to remove hooks - */ -export declare function _clearHook(type: string, fun: Function): void; diff --git a/dist/types/incremental-write.d.ts b/dist/types/incremental-write.d.ts deleted file mode 100644 index 2dafb5af70d..00000000000 --- a/dist/types/incremental-write.d.ts +++ /dev/null @@ -1,29 +0,0 @@ -import type { ModifyFunction, MaybePromise, RxDocumentData, RxDocumentWriteData, RxError, RxStorageInstance, StringKeys } from './types/index.d.ts'; -export type IncrementalWriteModifier = (doc: RxDocumentData) => MaybePromise> | MaybePromise>; -type IncrementalWriteQueueItem = { - lastKnownDocumentState: RxDocumentData; - modifier: IncrementalWriteModifier; - resolve: (d: RxDocumentData) => void; - reject: (error: RxError) => void; -}; -/** - * The incremental write queue - * batches up all incremental writes to a collection - * so that performance can be improved by: - * - Running only one write even when there are multiple modifications to the same document. - * - Run all writes ins a single bulkWrite() call even when there are writes to many documents. - */ -export declare class IncrementalWriteQueue { - readonly storageInstance: RxStorageInstance; - readonly primaryPath: StringKeys>; - readonly preWrite: (newData: RxDocumentData, oldData: RxDocumentData) => MaybePromise; - readonly postWrite: (docData: RxDocumentData) => void; - queueByDocId: Map[]>; - isRunning: boolean; - constructor(storageInstance: RxStorageInstance, primaryPath: StringKeys>, preWrite: (newData: RxDocumentData, oldData: RxDocumentData) => MaybePromise, postWrite: (docData: RxDocumentData) => void); - addWrite(lastKnownDocumentState: RxDocumentData, modifier: IncrementalWriteModifier): Promise>; - triggerRun(): Promise; -} -export declare function modifierFromPublicToInternal(publicModifier: ModifyFunction): IncrementalWriteModifier; -export declare function findNewestOfDocumentStates(docs: RxDocumentData[]): RxDocumentData; -export {}; diff --git a/dist/types/index.d.ts b/dist/types/index.d.ts deleted file mode 100644 index 5eacc48f16f..00000000000 --- a/dist/types/index.d.ts +++ /dev/null @@ -1,30 +0,0 @@ -// @ts-nocheck - /** - * this is the main entry-point - * for when the you call "import from 'rxdb'". - */ -export * from './plugin.d.ts'; -export * from './rx-database.d.ts'; -export * from './rx-error.d.ts'; -export * from './rx-database-internal-store.d.ts'; -export * from './overwritable.d.ts'; -export * from './rx-collection.d.ts'; -export * from './rx-collection-helper.d.ts'; -export * from './rx-document.d.ts'; -export * from './rx-change-event.d.ts'; -export * from './rx-document-prototype-merge.d.ts'; -export * from './rx-query.d.ts'; -export * from './rx-query-single-result.d.ts'; -export * from './rx-query-helper.d.ts'; -export * from './rx-schema.d.ts'; -export * from './rx-schema-helper.d.ts'; -export * from './rx-storage-helper.d.ts'; -export * from './replication-protocol/index.d.ts'; -export * from './rx-storage-multiinstance.d.ts'; -export * from './custom-index.d.ts'; -export * from './query-planner.d.ts'; -export * from './plugin-helpers.d.ts'; -export * from './plugins/utils/index.d.ts'; -export * from './hooks.d.ts'; -export * from './query-cache.d.ts'; -export type * from './types/index.d.ts'; diff --git a/dist/types/overwritable.d.ts b/dist/types/overwritable.d.ts deleted file mode 100644 index 4f5abe596f1..00000000000 --- a/dist/types/overwritable.d.ts +++ /dev/null @@ -1,26 +0,0 @@ -/** - * functions that can or should be overwritten by plugins - * IMPORTANT: Do not import any big stuff from RxDB here! - * An 'overwritable' can be used inside WebWorkers for RxStorage only, - * and we do not want to have the full RxDB lib bundled in them. - */ -import type { DeepReadonly } from './types/util.d.ts'; -export declare const overwritable: { - /** - * if this method is overwritten with one - * that returns true, we do additional checks - * which help the developer but have bad performance - */ - isDevMode(): boolean; - /** - * Deep freezes and object when in dev-mode. - * Deep-Freezing has the same performance as deep-cloning, so we only do that in dev-mode. - * Also, we can ensure the readonly state via typescript - * @link https://developer.mozilla.org/de/docs/Web/JavaScript/Reference/Global_Objects/Object/freeze - */ - deepFreezeWhenDevMode(obj: T): DeepReadonly; - /** - * overwritten to map error-codes to text-messages - */ - tunnelErrorMessage(message: string): string; -}; diff --git a/dist/types/plugin-helpers.d.ts b/dist/types/plugin-helpers.d.ts deleted file mode 100644 index 24f58bc1006..00000000000 --- a/dist/types/plugin-helpers.d.ts +++ /dev/null @@ -1,30 +0,0 @@ -import { WrappedRxStorageInstance } from './rx-storage-helper.ts'; -import type { RxDocumentData, RxDocumentWriteData, RxJsonSchema, RxStorage, RxStorageInstance, RxValidationError, MaybePromise } from './types/index.d.ts'; -type WrappedStorageFunction = (args: { - storage: RxStorage; -}) => RxStorage; -/** - * Returns the validation errors. - * If document is fully valid, returns an empty array. - */ -type ValidatorFunction = (docData: RxDocumentData) => RxValidationError[]; -/** - * This factory is used in the validation plugins - * so that we can reuse the basic storage wrapping code. - */ -export declare function wrappedValidateStorageFactory( -/** - * Returns a method that can be used to validate - * documents and throws when the document is not valid. - */ -getValidator: (schema: RxJsonSchema) => ValidatorFunction, -/** - * A string to identify the validation library. - */ -validatorKey: string): WrappedStorageFunction; -/** - * Used in plugins to easily modify all in- and outgoing - * data of that storage instance. - */ -export declare function wrapRxStorageInstance(originalSchema: RxJsonSchema>, instance: RxStorageInstance, modifyToStorage: (docData: RxDocumentWriteData) => MaybePromise>, modifyFromStorage: (docData: RxDocumentData) => MaybePromise>, modifyAttachmentFromStorage?: (attachmentData: string) => MaybePromise): WrappedRxStorageInstance; -export {}; diff --git a/dist/types/plugin.d.ts b/dist/types/plugin.d.ts deleted file mode 100644 index d803a6b9277..00000000000 --- a/dist/types/plugin.d.ts +++ /dev/null @@ -1,6 +0,0 @@ -import type { RxPlugin } from './types/index.d.ts'; -/** - * Add a plugin to the RxDB library. - * Plugins are added globally and cannot be removed. - */ -export declare function addRxPlugin(plugin: RxPlugin): void; diff --git a/dist/types/plugins/attachments-compression/index.d.ts b/dist/types/plugins/attachments-compression/index.d.ts deleted file mode 100644 index 0903c18e0a1..00000000000 --- a/dist/types/plugins/attachments-compression/index.d.ts +++ /dev/null @@ -1,16 +0,0 @@ -import type { RxStorage, CompressionMode } from '../../types/index.d.ts'; -/** - * @link https://github.com/WICG/compression/blob/main/explainer.md - */ -export declare function compressBase64(mode: CompressionMode, base64String: string): Promise; -export declare function decompressBase64(mode: CompressionMode, base64String: string): Promise; -/** - * A RxStorage wrapper that compresses attachment data on writes - * and decompresses the data on reads. - * - * This is using the CompressionStream API, - * @link https://caniuse.com/?search=compressionstream - */ -export declare function wrappedAttachmentsCompressionStorage(args: { - storage: RxStorage; -}): RxStorage; diff --git a/dist/types/plugins/attachments/attachments-utils.d.ts b/dist/types/plugins/attachments/attachments-utils.d.ts deleted file mode 100644 index f31fa1db383..00000000000 --- a/dist/types/plugins/attachments/attachments-utils.d.ts +++ /dev/null @@ -1,10 +0,0 @@ -import type { RxStorageInstance, WithDeletedAndAttachments } from '../../types/index.d.ts'; -export declare function ensureSchemaSupportsAttachments(doc: any): void; -export declare function assignMethodsToAttachment(attachment: any): void; -/** - * Fill up the missing attachment.data of the newDocument - * so that the new document can be send to somewhere else - * which could then receive all required attachments data - * that it did not have before. - */ -export declare function fillWriteDataForAttachmentsChange(primaryPath: string, storageInstance: RxStorageInstance, newDocument: WithDeletedAndAttachments, originalDocument?: WithDeletedAndAttachments): Promise>; diff --git a/dist/types/plugins/attachments/index.d.ts b/dist/types/plugins/attachments/index.d.ts deleted file mode 100644 index 258e521e09a..00000000000 --- a/dist/types/plugins/attachments/index.d.ts +++ /dev/null @@ -1,36 +0,0 @@ -import type { RxDocument, RxPlugin, RxAttachmentData, RxDocumentData, RxAttachmentCreator } from '../../types/index.ts'; -/** - * an RxAttachment is basically just the attachment-stub - * wrapped so that you can access the attachment-data - */ -export declare class RxAttachment { - doc: RxDocument; - id: string; - type: string; - length: number; - digest: string; - constructor({ doc, id, type, length, digest }: any); - remove(): Promise; - /** - * returns the data for the attachment - */ - getData(): Promise; - getStringData(): Promise; -} -export declare function fromStorageInstanceResult(id: string, attachmentData: RxAttachmentData, rxDocument: RxDocument): RxAttachment; -export declare function putAttachment(this: RxDocument, attachmentData: RxAttachmentCreator): Promise; -/** - * get an attachment of the document by its id - */ -export declare function getAttachment(this: RxDocument, id: string): RxAttachment | null; -/** - * returns all attachments of the document - */ -export declare function allAttachments(this: RxDocument): RxAttachment[]; -export declare function preMigrateDocument(data: { - docData: RxDocumentData; - oldCollection: any; -}): Promise; -export declare function postMigrateDocument(_action: any): Promise; -export declare const RxDBAttachmentsPlugin: RxPlugin; -export * from './attachments-utils.ts'; diff --git a/dist/types/plugins/backup/file-util.d.ts b/dist/types/plugins/backup/file-util.d.ts deleted file mode 100644 index 3d5d8d2d98d..00000000000 --- a/dist/types/plugins/backup/file-util.d.ts +++ /dev/null @@ -1,17 +0,0 @@ -import type { BackupMetaFileContent, BackupOptions, RxDatabase } from '../../types/index.d.ts'; -/** - * ensure that the given folder exists - */ -export declare function ensureFolderExists(folderPath: string): void; -/** - * deletes and recreates the folder - */ -export declare function clearFolder(folderPath: string): void; -export declare function deleteFolder(folderPath: string): void; -export declare function prepareFolders(database: RxDatabase, options: BackupOptions): void; -export declare function writeToFile(location: string, data: string | Blob): Promise; -export declare function writeJsonToFile(location: string, data: any): Promise; -export declare function metaFileLocation(options: BackupOptions): string; -export declare function getMeta(options: BackupOptions): Promise; -export declare function setMeta(options: BackupOptions, meta: BackupMetaFileContent): Promise; -export declare function documentFolder(options: BackupOptions, docId: string): string; diff --git a/dist/types/plugins/backup/index.d.ts b/dist/types/plugins/backup/index.d.ts deleted file mode 100644 index 4e8b40c2e50..00000000000 --- a/dist/types/plugins/backup/index.d.ts +++ /dev/null @@ -1,37 +0,0 @@ -import { Observable } from 'rxjs'; -import type { BackupOptions, RxBackupWriteEvent, RxDatabase, RxDocument, RxPlugin } from '../../types/index.d.ts'; -/** - * Backups a single documents, - * returns the paths to all written files - */ -export declare function backupSingleDocument(rxDocument: RxDocument, options: BackupOptions): Promise; -export declare class RxBackupState { - readonly database: RxDatabase; - readonly options: BackupOptions; - isStopped: boolean; - private subs; - private persistRunning; - private initialReplicationDone$; - private readonly internalWriteEvents$; - readonly writeEvents$: Observable; - constructor(database: RxDatabase, options: BackupOptions); - /** - * Persists all data from all collections, - * beginning from the oldest sequence checkpoint - * to the newest one. - * Do not call this while it is already running. - * Returns true if there are more documents to process - */ - persistOnce(): Promise; - _persistOnce(): Promise; - watchForChanges(): void; - /** - * Returns a promise that resolves when the initial backup is done - * and the filesystem is in sync with the database state - */ - awaitInitialBackup(): Promise; - cancel(): Promise; -} -export declare function backup(this: RxDatabase, options: BackupOptions): RxBackupState; -export * from './file-util.ts'; -export declare const RxDBBackupPlugin: RxPlugin; diff --git a/dist/types/plugins/cleanup/cleanup-helper.d.ts b/dist/types/plugins/cleanup/cleanup-helper.d.ts deleted file mode 100644 index 709cef5df28..00000000000 --- a/dist/types/plugins/cleanup/cleanup-helper.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -import type { RxCleanupPolicy } from '../../types/index.d.ts'; -export declare const DEFAULT_CLEANUP_POLICY: RxCleanupPolicy; diff --git a/dist/types/plugins/cleanup/cleanup-state.d.ts b/dist/types/plugins/cleanup/cleanup-state.d.ts deleted file mode 100644 index c20f3738873..00000000000 --- a/dist/types/plugins/cleanup/cleanup-state.d.ts +++ /dev/null @@ -1,11 +0,0 @@ -import type { RxCleanupPolicy, RxState } from '../../types/index.d.ts'; -export declare function startCleanupForRxState(state: RxState): Promise; -/** - * Runs the cleanup for a single RxState - */ -export declare function cleanupRxState(state: RxState, cleanupPolicy: RxCleanupPolicy): Promise; -/** - * TODO this is not waiting for writes! - * it just runs on interval. - */ -export declare function runCleanupAfterWrite(state: RxState, cleanupPolicy: RxCleanupPolicy): Promise; diff --git a/dist/types/plugins/cleanup/cleanup.d.ts b/dist/types/plugins/cleanup/cleanup.d.ts deleted file mode 100644 index cf31209ef4e..00000000000 --- a/dist/types/plugins/cleanup/cleanup.d.ts +++ /dev/null @@ -1,12 +0,0 @@ -import type { RxCleanupPolicy, RxCollection } from '../../types/index.d.ts'; -export declare function startCleanupForRxCollection(rxCollection: RxCollection): Promise; -export declare function initialCleanupWait(collection: RxCollection, cleanupPolicy: RxCleanupPolicy): Promise; -/** - * Runs the cleanup for a single RxCollection - */ -export declare function cleanupRxCollection(rxCollection: RxCollection, cleanupPolicy: RxCleanupPolicy): Promise; -/** - * TODO this is not waiting for deletes! - * it just runs on interval. - */ -export declare function runCleanupAfterDelete(rxCollection: RxCollection, cleanupPolicy: RxCleanupPolicy): Promise; diff --git a/dist/types/plugins/cleanup/index.d.ts b/dist/types/plugins/cleanup/index.d.ts deleted file mode 100644 index 8e33ab8273b..00000000000 --- a/dist/types/plugins/cleanup/index.d.ts +++ /dev/null @@ -1,3 +0,0 @@ -import type { RxPlugin } from '../../types/index.d.ts'; -export declare const RxDBCleanupPlugin: RxPlugin; -export * from './cleanup.ts'; diff --git a/dist/types/plugins/crdt/index.d.ts b/dist/types/plugins/crdt/index.d.ts deleted file mode 100644 index fc4989f82eb..00000000000 --- a/dist/types/plugins/crdt/index.d.ts +++ /dev/null @@ -1,12 +0,0 @@ -import type { CRDTDocumentField, CRDTEntry, CRDTOperation, HashFunction, JsonSchema, RxConflictHandler, RxDocument, RxDocumentData, RxJsonSchema, RxPlugin, WithDeleted } from '../../types/index.d.ts'; -import { RxCollection } from '../../index.ts'; -export declare function updateCRDT(this: RxDocument, entry: CRDTEntry | CRDTEntry[]): Promise>; -export declare function insertCRDT(this: RxCollection, entry: CRDTEntry | CRDTEntry[]): Promise | RxDocument>; -export declare function sortOperationComparator(a: CRDTOperation, b: CRDTOperation): 1 | -1; -export declare function hashCRDTOperations(hashFunction: HashFunction, crdts: CRDTDocumentField): Promise; -export declare function getCRDTSchemaPart(): JsonSchema>; -export declare function mergeCRDTFields(hashFunction: HashFunction, crdtsA: CRDTDocumentField, crdtsB: CRDTDocumentField): Promise>; -export declare function rebuildFromCRDT(schema: RxJsonSchema>, docData: WithDeleted | RxDocType, crdts: CRDTDocumentField): WithDeleted; -export declare function getCRDTConflictHandler(hashFunction: HashFunction, schema: RxJsonSchema>): RxConflictHandler; -export declare const RX_CRDT_CONTEXT = "rx-crdt"; -export declare const RxDBcrdtPlugin: RxPlugin; diff --git a/dist/types/plugins/dev-mode/check-document.d.ts b/dist/types/plugins/dev-mode/check-document.d.ts deleted file mode 100644 index 305a9eab2fd..00000000000 --- a/dist/types/plugins/dev-mode/check-document.d.ts +++ /dev/null @@ -1,9 +0,0 @@ -import type { BulkWriteRow, RxDocumentData, RxStorageInstance } from '../../types/index.d.ts'; -export declare function ensurePrimaryKeyValid(primaryKey: string, docData: RxDocumentData): void; -/** - * Deeply checks if the object contains an - * instance of the JavaScript Date class. - * @recursive - */ -export declare function containsDateInstance(obj: any): boolean; -export declare function checkWriteRows(storageInstance: RxStorageInstance, rows: BulkWriteRow[]): void; diff --git a/dist/types/plugins/dev-mode/check-migration-strategies.d.ts b/dist/types/plugins/dev-mode/check-migration-strategies.d.ts deleted file mode 100644 index 06757d0cc0d..00000000000 --- a/dist/types/plugins/dev-mode/check-migration-strategies.d.ts +++ /dev/null @@ -1,6 +0,0 @@ -import type { RxJsonSchema, NumberFunctionMap } from '../../types/index.d.ts'; -/** - * checks if the migrationStrategies are ok, throws if not - * @throws {Error|TypeError} if not ok - */ -export declare function checkMigrationStrategies(schema: RxJsonSchema, migrationStrategies: NumberFunctionMap): boolean; diff --git a/dist/types/plugins/dev-mode/check-orm.d.ts b/dist/types/plugins/dev-mode/check-orm.d.ts deleted file mode 100644 index 7bd5d3fb74d..00000000000 --- a/dist/types/plugins/dev-mode/check-orm.d.ts +++ /dev/null @@ -1,7 +0,0 @@ -import type { KeyFunctionMap, RxJsonSchema } from '../../types/index.d.ts'; -/** - * checks if the given static methods are allowed - * @throws if not allowed - */ -export declare function checkOrmMethods(statics?: KeyFunctionMap): void; -export declare function checkOrmDocumentMethods(schema: RxJsonSchema, methods?: any): void; diff --git a/dist/types/plugins/dev-mode/check-query.d.ts b/dist/types/plugins/dev-mode/check-query.d.ts deleted file mode 100644 index b7e34326565..00000000000 --- a/dist/types/plugins/dev-mode/check-query.d.ts +++ /dev/null @@ -1,14 +0,0 @@ -import type { RxPluginPreCreateRxQueryArgs, RxPluginPrePrepareQueryArgs, FilledMangoQuery, RxJsonSchema, RxDocumentData } from '../../types/index.d.ts'; -/** - * accidentally passing a non-valid object into the query params - * is very hard to debug especially when queries are observed - * This is why we do some checks here in dev-mode - */ -export declare function checkQuery(args: RxPluginPreCreateRxQueryArgs): void; -export declare function checkMangoQuery(args: RxPluginPrePrepareQueryArgs): void; -export declare function areSelectorsSatisfiedByIndex(schema: RxJsonSchema>, query: FilledMangoQuery): boolean; -/** - * Ensures that the selector does not contain any RegExp instance. - * @recursive - */ -export declare function ensureObjectDoesNotContainRegExp(selector: any): void; diff --git a/dist/types/plugins/dev-mode/check-schema.d.ts b/dist/types/plugins/dev-mode/check-schema.d.ts deleted file mode 100644 index 5fb396dd56f..00000000000 --- a/dist/types/plugins/dev-mode/check-schema.d.ts +++ /dev/null @@ -1,18 +0,0 @@ -import type { RxJsonSchema } from '../../types/index.d.ts'; -/** - * checks if the fieldname is allowed - * this makes sure that the fieldnames can be transformed into javascript-vars - * and does not conquer the observe$ and populate_ fields - * @throws {Error} - */ -export declare function checkFieldNameRegex(fieldName: string): void; -/** - * validate that all schema-related things are ok - */ -export declare function validateFieldsDeep(rxJsonSchema: RxJsonSchema): true; -export declare function checkPrimaryKey(jsonSchema: RxJsonSchema): void; -/** - * does the checking - * @throws {Error} if something is not ok - */ -export declare function checkSchema(jsonSchema: RxJsonSchema): void; diff --git a/dist/types/plugins/dev-mode/entity-properties.d.ts b/dist/types/plugins/dev-mode/entity-properties.d.ts deleted file mode 100644 index da060873826..00000000000 --- a/dist/types/plugins/dev-mode/entity-properties.d.ts +++ /dev/null @@ -1,3 +0,0 @@ -export declare function rxCollectionProperties(): string[]; -export declare function rxDatabaseProperties(): string[]; -export declare function rxDocumentProperties(): string[]; diff --git a/dist/types/plugins/dev-mode/error-messages.d.ts b/dist/types/plugins/dev-mode/error-messages.d.ts deleted file mode 100644 index ebc08852533..00000000000 --- a/dist/types/plugins/dev-mode/error-messages.d.ts +++ /dev/null @@ -1,180 +0,0 @@ -/** - * this plugin adds the error-messages - * without it, only error-codes will be shown - * This is mainly because error-string are hard to compress and we need a smaller build - */ -export declare const ERROR_MESSAGES: { - UT1: string; - UT2: string; - UT3: string; - UT4: string; - UT5: string; - UT6: string; - UT7: string; - PL1: string; - PL3: string; - P2: string; - QU1: string; - QU4: string; - QU5: string; - QU6: string; - QU9: string; - QU10: string; - QU11: string; - QU12: string; - QU13: string; - QU14: string; - QU15: string; - QU16: string; - MQ1: string; - MQ2: string; - MQ3: string; - MQ4: string; - MQ5: string; - MQ6: string; - MQ7: string; - MQ8: string; - DB1: string; - DB2: string; - DB3: string; - DB4: string; - DB5: string; - DB6: string; - DB8: string; - DB11: string; - DB12: string; - DB13: string; - DB14: string; - COL1: string; - COL2: string; - COL3: string; - COL4: string; - COL5: string; - COL6: string; - COL7: string; - COL8: string; - COL9: string; - COL10: string; - COL11: string; - COL12: string; - COL13: string; - COL14: string; - COL15: string; - COL16: string; - COL17: string; - COL18: string; - COL20: string; - COL21: string; - CONFLICT: string; - DOC1: string; - DOC2: string; - DOC3: string; - DOC4: string; - DOC5: string; - DOC6: string; - DOC7: string; - DOC8: string; - DOC9: string; - DOC10: string; - DOC11: string; - DOC13: string; - DOC14: string; - DOC15: string; - DOC16: string; - DOC17: string; - DOC18: string; - DOC19: string; - DOC20: string; - DOC21: string; - DOC22: string; - DOC23: string; - DOC24: string; - DM1: string; - DM2: string; - DM3: string; - DM4: string; - DM5: string; - AT1: string; - EN1: string; - EN2: string; - EN3: string; - EN4: string; - JD1: string; - JD2: string; - JD3: string; - LD1: string; - LD2: string; - LD3: string; - LD4: string; - LD5: string; - LD6: string; - LD7: string; - LD8: string; - RC1: string; - RC2: string; - RC4: string; - RC5: string; - RC6: string; - RC7: string; - RC_PULL: string; - RC_STREAM: string; - RC_PUSH: string; - RC_PUSH_NO_AR: string; - RC_WEBRTC_PEER: string; - RC_COUCHDB_1: string; - RC_COUCHDB_2: string; - RC_OUTDATED: string; - RC_UNAUTHORIZED: string; - RC_FORBIDDEN: string; - SC1: string; - SC2: string; - SC3: string; - SC4: string; - SC6: string; - SC7: string; - SC8: string; - SC10: string; - SC11: string; - SC13: string; - SC14: string; - SC15: string; - SC16: string; - SC17: string; - SC18: string; - SC19: string; - SC20: string; - SC21: string; - SC22: string; - SC23: string; - SC24: string; - SC25: string; - SC26: string; - SC27: string; - SC28: string; - SC29: string; - SC30: string; - SC32: string; - SC33: string; - SC34: string; - SC35: string; - SC36: string; - SC37: string; - SC38: string; - SC39: string; - SC40: string; - SC41: string; - VD1: string; - VD2: string; - S1: string; - GQL1: string; - GQL3: string; - CRDT1: string; - CRDT2: string; - CRDT3: string; - /** - * Should never be thrown, use this for - * null checks etc. so you do not have to increase the - * build size with error message strings. - */ - SNH: string; -}; diff --git a/dist/types/plugins/dev-mode/index.d.ts b/dist/types/plugins/dev-mode/index.d.ts deleted file mode 100644 index 19bc68f95f7..00000000000 --- a/dist/types/plugins/dev-mode/index.d.ts +++ /dev/null @@ -1,19 +0,0 @@ -import type { RxPlugin } from '../../types/index.d.ts'; -import { DeepReadonly } from '../../types/util.ts'; -export * from './check-schema.ts'; -export * from './unallowed-properties.ts'; -export * from './check-query.ts'; -/** - * Suppresses the warning message shown in the console, typically invoked once the developer (hello!) - * has acknowledged it. - */ -export declare function disableWarnings(): void; -/** - * Deep freezes and object when in dev-mode. - * Deep-Freezing has the same performance as deep-cloning, so we only do that in dev-mode. - * Also we can ensure the readonly state via typescript - * @link https://developer.mozilla.org/de/docs/Web/JavaScript/Reference/Global_Objects/Object/freeze - */ -export declare function deepFreezeWhenDevMode(obj: T): DeepReadonly; -export declare const DEV_MODE_PLUGIN_NAME = "dev-mode"; -export declare const RxDBDevModePlugin: RxPlugin; diff --git a/dist/types/plugins/dev-mode/unallowed-properties.d.ts b/dist/types/plugins/dev-mode/unallowed-properties.d.ts deleted file mode 100644 index 2dc9676b59b..00000000000 --- a/dist/types/plugins/dev-mode/unallowed-properties.d.ts +++ /dev/null @@ -1,20 +0,0 @@ -import type { RxCollectionCreator, RxDatabaseCreator } from '../../types/index.d.ts'; -/** - * if the name of a collection - * clashes with a property of RxDatabase, - * we get problems so this function prohibits this - */ -export declare function ensureCollectionNameValid(args: RxCollectionCreator & { - name: string; -}): void; -export declare function ensureDatabaseNameIsValid(args: RxDatabaseCreator): void; -/** - * Validates that a given string is ok to be used with couchdb-collection-names. - * We only allow these strings as database- or collection names because it ensures - * that you later do not get in trouble when you want to use the database together witch couchdb. - * - * @link https://docs.couchdb.org/en/stable/api/database/common.html - * @link https://neighbourhood.ie/blog/2020/10/13/everything-you-need-to-know-about-couchdb-database-names/ - * @throws {RxError} - */ -export declare function validateDatabaseName(name: string): true; diff --git a/dist/types/plugins/electron/electron-helper.d.ts b/dist/types/plugins/electron/electron-helper.d.ts deleted file mode 100644 index 8e3840863c0..00000000000 --- a/dist/types/plugins/electron/electron-helper.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -export declare const IPC_RENDERER_KEY_PREFIX = "rxdb-ipc-renderer-storage"; -export declare const IPC_RENDERER_TO_MAIN = "rxdb-renderer-to-main"; diff --git a/dist/types/plugins/electron/index.d.ts b/dist/types/plugins/electron/index.d.ts deleted file mode 100644 index 242fec4db04..00000000000 --- a/dist/types/plugins/electron/index.d.ts +++ /dev/null @@ -1,3 +0,0 @@ -export * from './rx-storage-ipc-renderer.ts'; -export * from './rx-storage-ipc-main.ts'; -export * from './electron-helper.ts'; diff --git a/dist/types/plugins/electron/rx-storage-ipc-main.d.ts b/dist/types/plugins/electron/rx-storage-ipc-main.d.ts deleted file mode 100644 index a32b0b78de4..00000000000 --- a/dist/types/plugins/electron/rx-storage-ipc-main.d.ts +++ /dev/null @@ -1,10 +0,0 @@ -/** - * This file contains everything - * that is supposed to run inside of the electron main process - */ -import type { RxStorage } from '../../types/index.d.ts'; -export declare function exposeIpcMainRxStorage(args: { - key: string; - storage: RxStorage; - ipcMain: any; -}): void; diff --git a/dist/types/plugins/electron/rx-storage-ipc-renderer.d.ts b/dist/types/plugins/electron/rx-storage-ipc-renderer.d.ts deleted file mode 100644 index b385be1da7a..00000000000 --- a/dist/types/plugins/electron/rx-storage-ipc-renderer.d.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { RxStorageRemote, RxStorageRemoteSettings } from '../storage-remote/index.ts'; -export type RxStorageIpcRendererSettings = { - /** - * Set the same key on both sides - * to ensure that messages do not get mixed - * up when you use more then one storage. - */ - key: string; - ipcRenderer: any; - mode: RxStorageRemoteSettings['mode']; -}; -export type RxStorageIpcRenderer = RxStorageRemote; -export declare function getRxStorageIpcRenderer(settings: RxStorageIpcRendererSettings): RxStorageIpcRenderer; diff --git a/dist/types/plugins/encryption-crypto-js/index.d.ts b/dist/types/plugins/encryption-crypto-js/index.d.ts deleted file mode 100644 index daddfb4ff88..00000000000 --- a/dist/types/plugins/encryption-crypto-js/index.d.ts +++ /dev/null @@ -1,10 +0,0 @@ -import type { InternalStoreDocType, RxStorage } from '../../types/index.d.ts'; -export declare const MINIMUM_PASSWORD_LENGTH: 8; -export declare function encryptString(value: string, password: string): string; -export declare function decryptString(cipherText: string, password: any): string; -export type InternalStorePasswordDocType = InternalStoreDocType<{ - hash: string; -}>; -export declare function wrappedKeyEncryptionCryptoJsStorage(args: { - storage: RxStorage; -}): RxStorage; diff --git a/dist/types/plugins/flutter/index.d.ts b/dist/types/plugins/flutter/index.d.ts deleted file mode 100644 index 1410a538b71..00000000000 --- a/dist/types/plugins/flutter/index.d.ts +++ /dev/null @@ -1,11 +0,0 @@ -import type { RxDatabase } from '../../types/index.d.ts'; -export type CreateRxDatabaseFunctionType = (databaseName: string) => Promise; -export declare function setFlutterRxDatabaseConnector(createDB: CreateRxDatabaseFunctionType): void; -/** - * Create a simple lokijs adapter so that we can persist string via flutter - * @link https://github.com/techfort/LokiJS/blob/master/tutorials/Persistence%20Adapters.md#creating-your-own-basic-persistence-adapter - */ -export declare function getLokijsAdapterFlutter(): { - loadDatabase(databaseName: string, callback: (v: string | Error) => {}): Promise; - saveDatabase(databaseName: string, dbstring: string, callback: (v: string | Error | null) => {}): Promise; -}; diff --git a/dist/types/plugins/json-dump/index.d.ts b/dist/types/plugins/json-dump/index.d.ts deleted file mode 100644 index cf3eb30d82f..00000000000 --- a/dist/types/plugins/json-dump/index.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -import type { RxPlugin } from '../../types/index.d.ts'; -export declare const RxDBJsonDumpPlugin: RxPlugin; diff --git a/dist/types/plugins/key-compression/index.d.ts b/dist/types/plugins/key-compression/index.d.ts deleted file mode 100644 index 242edd48787..00000000000 --- a/dist/types/plugins/key-compression/index.d.ts +++ /dev/null @@ -1,18 +0,0 @@ -/** - * this plugin adds the keycompression-capabilities to rxdb - * if you don't use this, ensure that you set disableKeyCompression to false in your schema - */ -import { CompressionTable } from 'jsonschema-key-compression'; -import type { RxJsonSchema, RxStorage, RxDocumentData } from '../../types/index.d.ts'; -declare type CompressionState = { - table: CompressionTable; - schema: RxJsonSchema; - compressedSchema: RxJsonSchema; -}; -export declare function getCompressionStateByRxJsonSchema(schema: RxJsonSchema): CompressionState; -export declare function wrappedKeyCompressionStorage(args: { - storage: RxStorage; -}): RxStorage; -export declare function compressDocumentData(compressionState: CompressionState, docData: RxDocumentData): RxDocumentData; -export declare function decompressDocumentData(compressionState: CompressionState, docData: RxDocumentData): RxDocumentData; -export {}; diff --git a/dist/types/plugins/leader-election/index.d.ts b/dist/types/plugins/leader-election/index.d.ts deleted file mode 100644 index 7d9ecd3d4e3..00000000000 --- a/dist/types/plugins/leader-election/index.d.ts +++ /dev/null @@ -1,25 +0,0 @@ -/** - * this plugin adds the leader-election-capabilities to rxdb - */ -import { LeaderElector, BroadcastChannel } from 'broadcast-channel'; -import type { RxDatabase, RxPlugin } from '../../types/index.d.ts'; -/** - * Returns the leader elector of a broadcast channel. - * Used to ensure we reuse the same elector for the channel each time. - */ -export declare function getLeaderElectorByBroadcastChannel(broadcastChannel: BroadcastChannel): LeaderElector; -/** - * @overwrites RxDatabase().leaderElector for caching - */ -export declare function getForDatabase(this: RxDatabase): LeaderElector; -export declare function isLeader(this: RxDatabase): boolean; -export declare function waitForLeadership(this: RxDatabase): Promise; -/** - * runs when the database gets destroyed - */ -export declare function onDestroy(db: RxDatabase): void; -export declare const rxdb = true; -export declare const prototypes: { - RxDatabase: (proto: any) => void; -}; -export declare const RxDBLeaderElectionPlugin: RxPlugin; diff --git a/dist/types/plugins/local-documents/index.d.ts b/dist/types/plugins/local-documents/index.d.ts deleted file mode 100644 index 402a5f99421..00000000000 --- a/dist/types/plugins/local-documents/index.d.ts +++ /dev/null @@ -1,6 +0,0 @@ -import type { RxPlugin } from '../../types/index.d.ts'; -export * from './local-documents-helper.ts'; -export * from './local-documents.ts'; -export * from './rx-local-document.ts'; -export type { LocalDocumentParent, LocalDocumentState, RxLocalDocument, RxLocalDocumentData } from '../../types/plugins/local-documents.d.ts'; -export declare const RxDBLocalDocumentsPlugin: RxPlugin; diff --git a/dist/types/plugins/local-documents/local-documents-helper.d.ts b/dist/types/plugins/local-documents/local-documents-helper.d.ts deleted file mode 100644 index 0ca29fc33a8..00000000000 --- a/dist/types/plugins/local-documents/local-documents-helper.d.ts +++ /dev/null @@ -1,10 +0,0 @@ -import type { LocalDocumentParent, LocalDocumentState, RxDocumentData, RxJsonSchema, RxLocalDocumentData, RxStorage } from '../../types/index.d.ts'; -export declare const LOCAL_DOC_STATE_BY_PARENT: WeakMap>; -export declare const LOCAL_DOC_STATE_BY_PARENT_RESOLVED: WeakMap; -export declare function createLocalDocStateByParent(parent: LocalDocumentParent): void; -export declare function getLocalDocStateByParent(parent: LocalDocumentParent): Promise; -export declare function createLocalDocumentStorageInstance(databaseInstanceToken: string, storage: RxStorage, databaseName: string, collectionName: string, instanceCreationOptions: any, multiInstance: boolean): Promise>; -export declare function closeStateByParent(parent: LocalDocumentParent): Promise | undefined; -export declare function removeLocalDocumentsStorageInstance(storage: RxStorage, databaseName: string, collectionName: string): Promise; -export declare function getCollectionLocalInstanceName(collectionName: string): string; -export declare const RX_LOCAL_DOCUMENT_SCHEMA: RxJsonSchema>; diff --git a/dist/types/plugins/local-documents/local-documents.d.ts b/dist/types/plugins/local-documents/local-documents.d.ts deleted file mode 100644 index 849b3144ec2..00000000000 --- a/dist/types/plugins/local-documents/local-documents.d.ts +++ /dev/null @@ -1,14 +0,0 @@ -import type { RxCollection, RxDatabase, RxLocalDocument } from '../../types/index.d.ts'; -import { Observable } from 'rxjs'; -/** - * save the local-document-data - * throws if already exists - */ -export declare function insertLocal = any, Reactivity = unknown>(this: RxDatabase | RxCollection, id: string, data: DocData): Promise>; -/** - * save the local-document-data - * overwrites existing if exists - */ -export declare function upsertLocal = any, Reactivity = unknown>(this: any, id: string, data: DocData): Promise>; -export declare function getLocal(this: any, id: string): Promise | null>; -export declare function getLocal$(this: RxCollection, id: string): Observable | null>; diff --git a/dist/types/plugins/local-documents/rx-local-document.d.ts b/dist/types/plugins/local-documents/rx-local-document.d.ts deleted file mode 100644 index 58c6f1451d6..00000000000 --- a/dist/types/plugins/local-documents/rx-local-document.d.ts +++ /dev/null @@ -1,10 +0,0 @@ -import type { RxCollection, RxDatabase, RxDocumentData, RxLocalDocument, RxLocalDocumentData } from '../../types/index.d.ts'; -declare const RxDocumentParent: any; -declare class RxLocalDocumentClass extends RxDocumentParent { - readonly id: string; - readonly parent: RxCollection | RxDatabase; - constructor(id: string, jsonData: DocData, parent: RxCollection | RxDatabase); -} -export declare function createRxLocalDocument(data: RxDocumentData>, parent: any): RxLocalDocument; -export declare function getRxDatabaseFromLocalDocument(doc: RxLocalDocument | RxLocalDocumentClass): any; -export {}; diff --git a/dist/types/plugins/migration-schema/index.d.ts b/dist/types/plugins/migration-schema/index.d.ts deleted file mode 100644 index bafaab18572..00000000000 --- a/dist/types/plugins/migration-schema/index.d.ts +++ /dev/null @@ -1,8 +0,0 @@ -import type { RxPlugin, RxCollection } from '../../types/index.ts'; -import { RxMigrationState } from './rx-migration-state.ts'; -export declare const DATA_MIGRATOR_BY_COLLECTION: WeakMap; -export declare const RxDBMigrationPlugin: RxPlugin; -export declare const RxDBMigrationSchemaPlugin: RxPlugin; -export * from './rx-migration-state.ts'; -export * from './migration-helpers.ts'; -export * from './migration-types.ts'; diff --git a/dist/types/plugins/migration-schema/migration-helpers.d.ts b/dist/types/plugins/migration-schema/migration-helpers.d.ts deleted file mode 100644 index 2d603264eb3..00000000000 --- a/dist/types/plugins/migration-schema/migration-helpers.d.ts +++ /dev/null @@ -1,29 +0,0 @@ -import { BehaviorSubject } from 'rxjs'; -import type { InternalStoreCollectionDocType, RxCollection, RxDatabase, RxDocumentData } from '../../types/index.d.ts'; -import { RxMigrationState } from './rx-migration-state.ts'; -export declare function getOldCollectionMeta(migrationState: RxMigrationState): Promise>; -/** - * runs the doc-data through all following migrationStrategies - * so it will match the newest schema. - * @throws Error if final doc does not match final schema or migrationStrategy crashes - * @return final object or null if migrationStrategy deleted it - */ -export declare function migrateDocumentData(collection: RxCollection, docSchemaVersion: number, docData: any): Promise; -export declare function runStrategyIfNotNull(collection: RxCollection, version: number, docOrNull: any | null): Promise; -/** - * returns true if a migration is needed - */ -export declare function mustMigrate(migrationState: RxMigrationState): Promise; -export declare const MIGRATION_DEFAULT_BATCH_SIZE = 200; -export type MigrationStateWithCollection = { - collection: RxCollection; - migrationState: RxMigrationState; -}; -export declare const DATA_MIGRATION_STATE_SUBJECT_BY_DATABASE: WeakMap>; -export declare function addMigrationStateToDatabase(migrationState: RxMigrationState): void; -export declare function getMigrationStateByDatabase(database: RxDatabase): BehaviorSubject; -/** - * Complete on database destroy - * so people do not have to unsubscribe - */ -export declare function onDatabaseDestroy(database: RxDatabase): void; diff --git a/dist/types/plugins/migration-schema/migration-types.d.ts b/dist/types/plugins/migration-schema/migration-types.d.ts deleted file mode 100644 index bbc8eaaac99..00000000000 --- a/dist/types/plugins/migration-schema/migration-types.d.ts +++ /dev/null @@ -1,33 +0,0 @@ -import type { InternalStoreDocType, PlainJsonError } from '../../types/index.d.ts'; -export type RxMigrationStatus = { - collectionName: string; - status: 'RUNNING' | 'DONE' | 'ERROR'; - error?: PlainJsonError; - /** - * Counters so that you can display - * the migration state to your user in the UI - * and show a loading bar. - */ - count: { - /** - * Total amount of documents that - * have to be migrated - */ - total: number; - /** - * Amount of documents that have been migrated already - * = success + purged - */ - handled: number; - /** - * Total percentage [0-100] - */ - percent: number; - }; -}; -/** - * To be shared between browser tabs, - * the migration status is written into a document in the internal storage of the database. - */ -export type RxMigrationStatusDocument = InternalStoreDocType; -export type MigrationStatusUpdate = (before: RxMigrationStatus) => RxMigrationStatus; diff --git a/dist/types/plugins/migration-schema/rx-migration-state.d.ts b/dist/types/plugins/migration-schema/rx-migration-state.d.ts deleted file mode 100644 index e0276c8aef0..00000000000 --- a/dist/types/plugins/migration-schema/rx-migration-state.d.ts +++ /dev/null @@ -1,36 +0,0 @@ -import { Observable } from 'rxjs'; -import type { NumberFunctionMap, RxCollection, RxDatabase, RxStorageInstance } from '../../types/index.d.ts'; -import { getOldCollectionMeta, mustMigrate } from './migration-helpers.ts'; -import type { MigrationStatusUpdate, RxMigrationStatus } from './migration-types.ts'; -export declare class RxMigrationState { - readonly collection: RxCollection; - readonly migrationStrategies: NumberFunctionMap; - readonly statusDocKey: string; - database: RxDatabase; - private started; - readonly oldCollectionMeta: ReturnType; - readonly mustMigrate: ReturnType; - readonly statusDocId: string; - readonly $: Observable; - constructor(collection: RxCollection, migrationStrategies: NumberFunctionMap, statusDocKey?: string); - getStatus(): Promise; - /** - * Starts the migration. - * Returns void so that people to not get the idea to await - * this function. - * Instead use migratePromise() if you want to await - * the migration. This ensures it works even if the migration - * is run on a different browser tab. - */ - startMigration(batchSize?: number): Promise; - updateStatusHandlers: MigrationStatusUpdate[]; - updateStatusQueue: Promise; - updateStatus(handler: MigrationStatusUpdate): Promise; - migrateStorage(oldStorage: RxStorageInstance, newStorage: RxStorageInstance, batchSize: number): Promise; - countAllDoucments(storageInstances: RxStorageInstance[]): Promise; - getConnectedStorageInstances(): Promise<{ - oldStorage: RxStorageInstance; - newStorage: RxStorageInstance; - }[]>; - migratePromise(batchSize?: number): Promise; -} diff --git a/dist/types/plugins/migration-storage/index.d.ts b/dist/types/plugins/migration-storage/index.d.ts deleted file mode 100644 index 58c3a7aee83..00000000000 --- a/dist/types/plugins/migration-storage/index.d.ts +++ /dev/null @@ -1,32 +0,0 @@ -import { RxDatabase, RxCollection, BulkWriteRow, RxStorageBulkWriteResponse, RxStorage } from '../../index.ts'; -export type RxStorageOld = RxStorage | any; -export type AfterMigrateBatchHandlerInput = { - databaseName: string; - collectionName: string; - oldDatabaseName: string; - insertToNewWriteRows: BulkWriteRow[]; - writeToNewResult: RxStorageBulkWriteResponse; -}; -export type AfterMigrateBatchHandler = (input: AfterMigrateBatchHandlerInput) => any | Promise; -export type MigrateStorageParams = { - database: RxDatabase; - /** - * Using the migration plugin requires you - * to rename your new old database. - * The original name of the v11 database must be provided here. - */ - oldDatabaseName: string; - oldStorage: RxStorageOld; - batchSize?: number; - parallel?: boolean; - afterMigrateBatch?: AfterMigrateBatchHandler; - logFunction?: (message: string) => void; -}; -/** - * Migrates collections of RxDB version A and puts them - * into a RxDatabase that is created with version B. - * This function only works from the previous major version upwards. - * Do not use it to migrate like rxdb v9 to v14. - */ -export declare function migrateStorage(params: MigrateStorageParams): Promise; -export declare function migrateCollection(collection: RxCollection, oldDatabaseName: string, oldStorage: RxStorageOld, batchSize: number, afterMigrateBatch?: AfterMigrateBatchHandler, logFunction?: (message: string) => void): Promise; diff --git a/dist/types/plugins/query-builder/index.d.ts b/dist/types/plugins/query-builder/index.d.ts deleted file mode 100644 index 9f00e54eef6..00000000000 --- a/dist/types/plugins/query-builder/index.d.ts +++ /dev/null @@ -1,5 +0,0 @@ -import type { RxPlugin, RxQuery } from '../../types/index.d.ts'; -export declare function runBuildingStep(rxQuery: RxQuery, functionName: string, value: any): RxQuery; -export declare function applyBuildingStep(proto: any, functionName: string): void; -export * from './mquery/nosql-query-builder.ts'; -export declare const RxDBQueryBuilderPlugin: RxPlugin; diff --git a/dist/types/plugins/query-builder/mquery/mquery-utils.d.ts b/dist/types/plugins/query-builder/mquery/mquery-utils.d.ts deleted file mode 100644 index 59e89208cca..00000000000 --- a/dist/types/plugins/query-builder/mquery/mquery-utils.d.ts +++ /dev/null @@ -1,12 +0,0 @@ -/** - * this is copied from - * @link https://github.com/aheckmann/mquery/blob/master/lib/utils.js - */ -/** - * Merges 'from' into 'to' without overwriting existing properties. - */ -export declare function merge(to: any, from: any): any; -/** - * Determines if `arg` is an object. - */ -export declare function isObject(arg: Object | any[] | String | Function | RegExp | any): boolean; diff --git a/dist/types/plugins/query-builder/mquery/nosql-query-builder.d.ts b/dist/types/plugins/query-builder/mquery/nosql-query-builder.d.ts deleted file mode 100644 index 6fddebed962..00000000000 --- a/dist/types/plugins/query-builder/mquery/nosql-query-builder.d.ts +++ /dev/null @@ -1,165 +0,0 @@ -import type { MangoQuery, MangoQuerySelector, MangoQuerySortPart } from '../../../types/index.d.ts'; -declare type MQueryOptions = { - limit?: number; - skip?: number; - sort?: any; -}; -export declare class NoSqlQueryBuilderClass { - _path?: any; - options: MQueryOptions; - _conditions: MangoQuerySelector; - _fields: any; - private _distinct; - /** - * MQuery constructor used for building queries. - * - * ####Example: - * var query = new MQuery({ name: 'mquery' }); - * query.where('age').gte(21).exec(callback); - * - */ - constructor(mangoQuery?: MangoQuery, _path?: any); - /** - * Specifies a `path` for use with chaining. - */ - where(_path: string, _val?: MangoQuerySelector): NoSqlQueryBuilder; - /** - * Specifies the complementary comparison value for paths specified with `where()` - * ####Example - * User.where('age').equals(49); - */ - equals(val: any): NoSqlQueryBuilder; - /** - * Specifies the complementary comparison value for paths specified with `where()` - * This is alias of `equals` - */ - eq(val: any): NoSqlQueryBuilder; - /** - * Specifies arguments for an `$or` condition. - * ####Example - * query.or([{ color: 'red' }, { status: 'emergency' }]) - */ - or(array: any[]): NoSqlQueryBuilder; - /** - * Specifies arguments for a `$nor` condition. - * ####Example - * query.nor([{ color: 'green' }, { status: 'ok' }]) - */ - nor(array: any[]): NoSqlQueryBuilder; - /** - * Specifies arguments for a `$and` condition. - * ####Example - * query.and([{ color: 'green' }, { status: 'ok' }]) - * @see $and http://docs.mongodb.org/manual/reference/operator/and/ - */ - and(array: any[]): NoSqlQueryBuilder; - /** - * Specifies a `$mod` condition - */ - mod(_path: string, _val: number): NoSqlQueryBuilder; - /** - * Specifies an `$exists` condition - * ####Example - * // { name: { $exists: true }} - * Thing.where('name').exists() - * Thing.where('name').exists(true) - * Thing.find().exists('name') - */ - exists(_path: string, _val: number): NoSqlQueryBuilder; - /** - * Specifies an `$elemMatch` condition - * ####Example - * query.elemMatch('comment', { author: 'autobot', votes: {$gte: 5}}) - * query.where('comment').elemMatch({ author: 'autobot', votes: {$gte: 5}}) - * query.elemMatch('comment', function (elem) { - * elem.where('author').equals('autobot'); - * elem.where('votes').gte(5); - * }) - * query.where('comment').elemMatch(function (elem) { - * elem.where({ author: 'autobot' }); - * elem.where('votes').gte(5); - * }) - */ - elemMatch(_path: string, _criteria: any): NoSqlQueryBuilder; - /** - * Sets the sort order - * If an object is passed, values allowed are 'asc', 'desc', 'ascending', 'descending', 1, and -1. - * If a string is passed, it must be a space delimited list of path names. - * The sort order of each path is ascending unless the path name is prefixed with `-` which will be treated as descending. - * ####Example - * query.sort({ field: 'asc', test: -1 }); - * query.sort('field -test'); - * query.sort([['field', 1], ['test', -1]]); - */ - sort(arg: any): NoSqlQueryBuilder; - /** - * Merges another MQuery or conditions object into this one. - * - * When a MQuery is passed, conditions, field selection and options are merged. - * - */ - merge(source: any): NoSqlQueryBuilder; - /** - * Finds documents. - * ####Example - * query.find() - * query.find({ name: 'Burning Lights' }) - */ - find(criteria: any): NoSqlQueryBuilder; - /** - * Make sure _path is set. - * - * @param {String} method - */ - _ensurePath(method: any): void; - toJSON(): { - query: MangoQuery; - path?: string; - }; -} -export declare function mQuerySortToRxDBSort(sort: { - [k: string]: 1 | -1; -}): MangoQuerySortPart[]; -/** - * Because some prototype-methods are generated, - * we have to define the type of NoSqlQueryBuilder here - */ -export interface NoSqlQueryBuilder extends NoSqlQueryBuilderClass { - maxScan: ReturnSelfNumberFunction; - batchSize: ReturnSelfNumberFunction; - limit: ReturnSelfNumberFunction; - skip: ReturnSelfNumberFunction; - comment: ReturnSelfFunction; - gt: ReturnSelfFunction; - gte: ReturnSelfFunction; - lt: ReturnSelfFunction; - lte: ReturnSelfFunction; - ne: ReturnSelfFunction; - in: ReturnSelfFunction; - nin: ReturnSelfFunction; - all: ReturnSelfFunction; - regex: ReturnSelfFunction; - size: ReturnSelfFunction; -} -declare type ReturnSelfFunction = (v: any) => NoSqlQueryBuilder; -declare type ReturnSelfNumberFunction = (v: number | null) => NoSqlQueryBuilder; -/** - * limit, skip, maxScan, batchSize, comment - * - * Sets these associated options. - * - * query.comment('feed query'); - */ -export declare const OTHER_MANGO_ATTRIBUTES: string[]; -/** - * gt, gte, lt, lte, ne, in, nin, all, regex, size, maxDistance - * - * Thing.where('type').nin(array) - */ -export declare const OTHER_MANGO_OPERATORS: string[]; -/** - * Determines if `conds` can be merged using `mquery().merge()` - */ -export declare function canMerge(conds: any): boolean; -export declare function createQueryBuilder(query?: MangoQuery, path?: any): NoSqlQueryBuilder; -export {}; diff --git a/dist/types/plugins/replication-couchdb/couchdb-helper.d.ts b/dist/types/plugins/replication-couchdb/couchdb-helper.d.ts deleted file mode 100644 index 99bb2622768..00000000000 --- a/dist/types/plugins/replication-couchdb/couchdb-helper.d.ts +++ /dev/null @@ -1,19 +0,0 @@ -import type { RxDocumentData, StringKeys, WithDeleted } from '../../types/index.d.ts'; -import { URLQueryParams } from './couchdb-types.ts'; -export declare const COUCHDB_NEW_REPLICATION_PLUGIN_IDENTITY_PREFIX = "couchdb"; -export declare function mergeUrlQueryParams(params: URLQueryParams): string; -export declare function couchDBDocToRxDocData(primaryPath: string, couchDocData: any): WithDeleted; -export declare function couchSwapIdToPrimary(primaryKey: StringKeys>, docData: any): any; -/** - * Swaps the primaryKey of the document - * to the _id property. - */ -export declare function couchSwapPrimaryToId(primaryKey: StringKeys>, docData: any): RxDocType & { - _id: string; -}; -export declare function getDefaultFetch(): typeof fetch; -/** - * Returns a fetch handler that contains the username and password - * in the Authorization header - */ -export declare function getFetchWithCouchDBAuthorization(username: string, password: string): typeof fetch; diff --git a/dist/types/plugins/replication-couchdb/couchdb-types.d.ts b/dist/types/plugins/replication-couchdb/couchdb-types.d.ts deleted file mode 100644 index f23eec9d56f..00000000000 --- a/dist/types/plugins/replication-couchdb/couchdb-types.d.ts +++ /dev/null @@ -1,22 +0,0 @@ -import type { ById, ReplicationOptions, ReplicationPullOptions, ReplicationPushOptions } from '../../types/index.d.ts'; -export type CouchDBCheckpointType = { - sequence: number; -}; -export type FetchMethodType = typeof fetch; -export type SyncOptionsCouchDB = Omit, 'pull' | 'push'> & { - url: string; - /** - * Here you can set a custom fetch method - * to use http headers or credentials when doing requests. - */ - fetch?: FetchMethodType; - pull?: Omit, 'handler' | 'stream$'> & { - /** - * Heartbeat time in milliseconds - * for the long polling of the changestream. - */ - heartbeat?: number; - }; - push?: Omit, 'handler'>; -}; -export type URLQueryParams = ById; diff --git a/dist/types/plugins/replication-couchdb/index.d.ts b/dist/types/plugins/replication-couchdb/index.d.ts deleted file mode 100644 index c69d445eeb8..00000000000 --- a/dist/types/plugins/replication-couchdb/index.d.ts +++ /dev/null @@ -1,18 +0,0 @@ -import type { RxCollection, ReplicationPullOptions, ReplicationPushOptions } from '../../types/index.d.ts'; -import { RxReplicationState } from '../replication/index.ts'; -import type { CouchDBCheckpointType, FetchMethodType, SyncOptionsCouchDB } from './couchdb-types.ts'; -export * from './couchdb-helper.ts'; -export * from './couchdb-types.ts'; -export declare class RxCouchDBReplicationState extends RxReplicationState { - readonly url: string; - fetch: FetchMethodType; - readonly replicationIdentifier: string; - readonly collection: RxCollection; - readonly pull?: ReplicationPullOptions | undefined; - readonly push?: ReplicationPushOptions | undefined; - readonly live: boolean; - retryTime: number; - autoStart: boolean; - constructor(url: string, fetch: FetchMethodType, replicationIdentifier: string, collection: RxCollection, pull?: ReplicationPullOptions | undefined, push?: ReplicationPushOptions | undefined, live?: boolean, retryTime?: number, autoStart?: boolean); -} -export declare function replicateCouchDB(options: SyncOptionsCouchDB): RxCouchDBReplicationState; diff --git a/dist/types/plugins/replication-firestore/firestore-helper.d.ts b/dist/types/plugins/replication-firestore/firestore-helper.d.ts deleted file mode 100644 index ba3aecbfec9..00000000000 --- a/dist/types/plugins/replication-firestore/firestore-helper.d.ts +++ /dev/null @@ -1,10 +0,0 @@ -import { QueryDocumentSnapshot, Timestamp } from 'firebase/firestore'; -import type { WithDeleted } from '../../types/index.d.ts'; -import type { GetQuery } from './firestore-types.ts'; -export declare function getFirestoreSortFieldValue(docData: any, primaryKey: string): string; -export declare function stripServerTimestampField(serverTimestampField: string, docData: RxDocType): WithDeleted; -export declare function serverTimestampToIsoString(serverTimestampField: string, docData: any): string; -export declare function isoStringToServerTimestamp(isoString: string): Timestamp; -export declare function firestoreRowToDocData(serverTimestampField: string, primaryPath: string, row: QueryDocumentSnapshot): WithDeleted; -export declare function stripPrimaryKey(primaryPath: string, docData: any): any; -export declare function getContentByIds(ids: string[], getQuery: GetQuery): Promise[]>; diff --git a/dist/types/plugins/replication-firestore/firestore-types.d.ts b/dist/types/plugins/replication-firestore/firestore-types.d.ts deleted file mode 100644 index 865b5bfed1b..00000000000 --- a/dist/types/plugins/replication-firestore/firestore-types.d.ts +++ /dev/null @@ -1,46 +0,0 @@ -import type { MaybePromise, ReplicationOptions, ReplicationPullOptions, ReplicationPushOptions, WithDeleted } from '../../types/index.d.ts'; -import type { CollectionReference, Firestore, QueryFieldFilterConstraint, QuerySnapshot } from 'firebase/firestore'; -export type FirestoreCheckpointType = { - id: string; - /** - * Firestore internally sets the time to an object like - * { - * "seconds": 1669807105, - * "nanoseconds": 476000000 - * } - * But to be able to query that, we have to use a date string - * like '2022-11-30T11:18:25.141Z' - * so we store that string instead. - */ - serverTimestamp: string; -}; -export type FirestoreCollection = CollectionReference; -export type FirestoreOptions = { - projectId: string; - collection: FirestoreCollection; - database: Firestore; -}; -export type FirestoreSyncPullOptions = Omit, 'handler' | 'stream$'> & { - filter?: QueryFieldFilterConstraint | QueryFieldFilterConstraint[]; -}; -export type FirestoreSyncPushOptions = Omit, 'handler'> & { - filter?(item: WithDeleted): MaybePromise; -}; -export type SyncOptionsFirestore = Omit, 'pull' | 'push'> & { - firestore: FirestoreOptions; - /** - * In firestore it is not possible to read out - * the internally used write timestamp. - * Even if we could read it out, it is not indexed which - * is required for fetch 'changes-since-x'. - * So instead we have to rely on a custom user defined field - * that contains the server time which is set by firestore via serverTimestamp() - * IMPORTANT: The serverTimestampField MUST NOT be part of the collections RxJsonSchema! - * [default='serverTimestamp'] - * @link https://groups.google.com/g/firebase-talk/c/tAmPzPei-mE - */ - serverTimestampField?: string; - pull?: FirestoreSyncPullOptions; - push?: FirestoreSyncPushOptions; -}; -export type GetQuery = (ids: string[]) => Promise>; diff --git a/dist/types/plugins/replication-firestore/index.d.ts b/dist/types/plugins/replication-firestore/index.d.ts deleted file mode 100644 index e99ab92f85d..00000000000 --- a/dist/types/plugins/replication-firestore/index.d.ts +++ /dev/null @@ -1,17 +0,0 @@ -import type { RxCollection, ReplicationPullOptions, ReplicationPushOptions } from '../../types/index.d.ts'; -import { RxReplicationState } from '../replication/index.ts'; -import type { FirestoreCheckpointType, FirestoreOptions, SyncOptionsFirestore } from './firestore-types.ts'; -export * from './firestore-helper.ts'; -export * from './firestore-types.ts'; -export declare class RxFirestoreReplicationState extends RxReplicationState { - readonly firestore: FirestoreOptions; - readonly replicationIdentifierHash: string; - readonly collection: RxCollection; - readonly pull?: ReplicationPullOptions | undefined; - readonly push?: ReplicationPushOptions | undefined; - readonly live: boolean; - retryTime: number; - autoStart: boolean; - constructor(firestore: FirestoreOptions, replicationIdentifierHash: string, collection: RxCollection, pull?: ReplicationPullOptions | undefined, push?: ReplicationPushOptions | undefined, live?: boolean, retryTime?: number, autoStart?: boolean); -} -export declare function replicateFirestore(options: SyncOptionsFirestore): RxFirestoreReplicationState; diff --git a/dist/types/plugins/replication-graphql/graphql-schema-from-rx-schema.d.ts b/dist/types/plugins/replication-graphql/graphql-schema-from-rx-schema.d.ts deleted file mode 100644 index 631cc02e6ce..00000000000 --- a/dist/types/plugins/replication-graphql/graphql-schema-from-rx-schema.d.ts +++ /dev/null @@ -1,51 +0,0 @@ -import type { RxJsonSchema } from '../../types/index.d.ts'; -export type Prefixes = { - push?: string; - pushRow?: string; - checkpoint?: string; - pull?: string; - pullBulk?: string; - stream?: string; - headers?: string; -}; -/** - * just type some common types - * to have better IDE autocomplete, - * all strings are allowed - */ -export type GraphQLParamType = 'ID' | 'ID!' | 'String' | 'String!' | 'Int' | 'Int!' | 'Float' | 'Float!' | string; -export type GraphQLSchemaFromRxSchemaInputSingleCollection = { - schema: RxJsonSchema; - /** - * These fields of the document data - * will be used for the checkpoint. - */ - checkpointFields: string[]; - ignoreInputKeys?: string[]; - ignoreOutputKeys?: string[]; - withRevisions?: boolean; - prefixes?: Prefixes; - headerFields?: string[]; - /** - * Name of the boolean field that marks deleted documents. - * [default='_deleted'] - */ - deletedField?: string; -}; -export type GraphQLSchemaFromRxSchemaInput = { - [collectionName: string]: GraphQLSchemaFromRxSchemaInputSingleCollection; -}; -export type GraphQLSchemaFromRxSchemaOutput = { - asString: string; - queries: string[]; - mutations: string[]; - subscriptions: string[]; - inputs: string[]; - types: string[]; -}; -export declare const SPACING = " "; -/** - * Create a GraphQL schema from a given RxJsonSchema - */ -export declare function graphQLSchemaFromRxSchema(input: GraphQLSchemaFromRxSchemaInput): GraphQLSchemaFromRxSchemaOutput; -export declare function fillUpOptionals(input: GraphQLSchemaFromRxSchemaInputSingleCollection): GraphQLSchemaFromRxSchemaInputSingleCollection; diff --git a/dist/types/plugins/replication-graphql/graphql-websocket.d.ts b/dist/types/plugins/replication-graphql/graphql-websocket.d.ts deleted file mode 100644 index 68988ca2e4a..00000000000 --- a/dist/types/plugins/replication-graphql/graphql-websocket.d.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { Client } from 'graphql-ws'; -export type WebsocketWithRefCount = { - url: string; - socket: Client; - refCount: number; -}; -export declare const GRAPHQL_WEBSOCKET_BY_URL: Map; -export declare function getGraphQLWebSocket(url: string, headers?: { - [k: string]: string; -}): Client; -export declare function removeGraphQLWebSocketRef(url: string): void; diff --git a/dist/types/plugins/replication-graphql/helper.d.ts b/dist/types/plugins/replication-graphql/helper.d.ts deleted file mode 100644 index 44f998805af..00000000000 --- a/dist/types/plugins/replication-graphql/helper.d.ts +++ /dev/null @@ -1,12 +0,0 @@ -import type { RxGraphQLReplicationClientState, RxGraphQLReplicationQueryBuilderResponseObject } from '../../types/index.d.ts'; -export declare const GRAPHQL_REPLICATION_PLUGIN_IDENTITY_PREFIX = "graphql"; -export interface GraphQLError { - message: string; - locations: Array<{ - line: number; - column: number; - }>; - path: string[]; -} -export type GraphQLErrors = Array; -export declare function graphQLRequest(fetchRequest: WindowOrWorkerGlobalScope['fetch'], httpUrl: string, clientState: RxGraphQLReplicationClientState, queryParams: RxGraphQLReplicationQueryBuilderResponseObject): Promise; diff --git a/dist/types/plugins/replication-graphql/index.d.ts b/dist/types/plugins/replication-graphql/index.d.ts deleted file mode 100644 index b2a0d359646..00000000000 --- a/dist/types/plugins/replication-graphql/index.d.ts +++ /dev/null @@ -1,26 +0,0 @@ -import type { RxCollection, ReplicationPullOptions, ReplicationPushOptions, GraphQLServerUrl, RxGraphQLReplicationQueryBuilderResponseObject, RxGraphQLReplicationClientState, ById } from '../../types/index.d.ts'; -import { RxReplicationState } from '../replication/index.ts'; -import { SyncOptionsGraphQL } from '../../index.ts'; -export declare class RxGraphQLReplicationState extends RxReplicationState { - readonly url: GraphQLServerUrl; - readonly clientState: RxGraphQLReplicationClientState; - readonly replicationIdentifier: string; - readonly collection: RxCollection; - readonly deletedField: string; - readonly pull?: ReplicationPullOptions | undefined; - readonly push?: ReplicationPushOptions | undefined; - readonly live?: boolean | undefined; - retryTime?: number | undefined; - autoStart?: boolean | undefined; - readonly customFetch?: ((input: RequestInfo | URL, init?: RequestInit | undefined) => Promise) | undefined; - constructor(url: GraphQLServerUrl, clientState: RxGraphQLReplicationClientState, replicationIdentifier: string, collection: RxCollection, deletedField: string, pull?: ReplicationPullOptions | undefined, push?: ReplicationPushOptions | undefined, live?: boolean | undefined, retryTime?: number | undefined, autoStart?: boolean | undefined, customFetch?: ((input: RequestInfo | URL, init?: RequestInit | undefined) => Promise) | undefined); - setHeaders(headers: ById): void; - setCredentials(credentials: RequestCredentials | undefined): void; - graphQLRequest(queryParams: RxGraphQLReplicationQueryBuilderResponseObject): Promise; -} -export declare function replicateGraphQL({ collection, url, headers, credentials, deletedField, waitForLeadership, pull, push, live, fetch: customFetch, retryTime, // in ms -autoStart, replicationIdentifier }: SyncOptionsGraphQL): RxGraphQLReplicationState; -export * from './helper.ts'; -export * from './graphql-schema-from-rx-schema.ts'; -export * from './query-builder-from-rx-schema.ts'; -export * from './graphql-websocket.ts'; diff --git a/dist/types/plugins/replication-graphql/query-builder-from-rx-schema.d.ts b/dist/types/plugins/replication-graphql/query-builder-from-rx-schema.d.ts deleted file mode 100644 index a3d7a9c81c4..00000000000 --- a/dist/types/plugins/replication-graphql/query-builder-from-rx-schema.d.ts +++ /dev/null @@ -1,5 +0,0 @@ -import { GraphQLSchemaFromRxSchemaInputSingleCollection } from './graphql-schema-from-rx-schema.ts'; -import type { RxGraphQLReplicationPullQueryBuilder, RxGraphQLReplicationPullStreamQueryBuilder, RxGraphQLReplicationPushQueryBuilder } from '../../types/index.d.ts'; -export declare function pullQueryBuilderFromRxSchema(collectionName: string, input: GraphQLSchemaFromRxSchemaInputSingleCollection): RxGraphQLReplicationPullQueryBuilder; -export declare function pullStreamBuilderFromRxSchema(collectionName: string, input: GraphQLSchemaFromRxSchemaInputSingleCollection): RxGraphQLReplicationPullStreamQueryBuilder; -export declare function pushQueryBuilderFromRxSchema(collectionName: string, input: GraphQLSchemaFromRxSchemaInputSingleCollection): RxGraphQLReplicationPushQueryBuilder; diff --git a/dist/types/plugins/replication-nats/index.d.ts b/dist/types/plugins/replication-nats/index.d.ts deleted file mode 100644 index 9bb0ff61eaf..00000000000 --- a/dist/types/plugins/replication-nats/index.d.ts +++ /dev/null @@ -1,16 +0,0 @@ -import type { RxCollection, ReplicationPullOptions, ReplicationPushOptions } from '../../types/index.d.ts'; -import { RxReplicationState } from '../replication/index.ts'; -import type { NatsCheckpointType, NatsSyncOptions } from './nats-types.ts'; -export * from './nats-types.ts'; -export * from './nats-helper.ts'; -export declare class RxNatsReplicationState extends RxReplicationState { - readonly replicationIdentifier: string; - readonly collection: RxCollection; - readonly pull?: ReplicationPullOptions | undefined; - readonly push?: ReplicationPushOptions | undefined; - readonly live: boolean; - retryTime: number; - autoStart: boolean; - constructor(replicationIdentifier: string, collection: RxCollection, pull?: ReplicationPullOptions | undefined, push?: ReplicationPushOptions | undefined, live?: boolean, retryTime?: number, autoStart?: boolean); -} -export declare function replicateNats(options: NatsSyncOptions): RxNatsReplicationState; diff --git a/dist/types/plugins/replication-nats/nats-helper.d.ts b/dist/types/plugins/replication-nats/nats-helper.d.ts deleted file mode 100644 index 77040106747..00000000000 --- a/dist/types/plugins/replication-nats/nats-helper.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -import type { StoredMsg, Stream } from 'nats'; -export declare function getNatsServerDocumentState(natsStream: Stream, subjectPrefix: string, docId: string): Promise; diff --git a/dist/types/plugins/replication-nats/nats-types.d.ts b/dist/types/plugins/replication-nats/nats-types.d.ts deleted file mode 100644 index 443b3622f92..00000000000 --- a/dist/types/plugins/replication-nats/nats-types.d.ts +++ /dev/null @@ -1,21 +0,0 @@ -import type { ReplicationOptions, ReplicationPullOptions, ReplicationPushOptions } from '../../types/index.d.ts'; -import { ConnectionOptions } from 'nats'; -export type NatsCheckpointType = { - sequence: number; -}; -export type NatsSyncPullOptions = Omit, 'handler' | 'stream$'> & {}; -export type NatsSyncPushOptions = Omit, 'handler'> & {}; -export type NatsSyncOptions = Omit, 'pull' | 'push'> & { - connection: ConnectionOptions; - streamName: string; - /** - * NATS subject prefix like 'foo.bar' - * which means a message for a document would have the subject - * 'foo.bar.myDoc' where the last part 'myDoc' would be the primaryKey in - * the RxDB document. - * @link https://docs.nats.io/nats-concepts/subjects - */ - subjectPrefix: string; - pull?: NatsSyncPullOptions; - push?: NatsSyncPushOptions; -}; diff --git a/dist/types/plugins/replication-webrtc/connection-handler-p2pcf.d.ts b/dist/types/plugins/replication-webrtc/connection-handler-p2pcf.d.ts deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/dist/types/plugins/replication-webrtc/connection-handler-simple-peer.d.ts b/dist/types/plugins/replication-webrtc/connection-handler-simple-peer.d.ts deleted file mode 100644 index b0c9d74e95a..00000000000 --- a/dist/types/plugins/replication-webrtc/connection-handler-simple-peer.d.ts +++ /dev/null @@ -1,54 +0,0 @@ -import type { WebRTCConnectionHandlerCreator } from './webrtc-types.ts'; -import type { Instance as SimplePeerInstance, Options as SimplePeerOptions } from 'simple-peer'; -export type SimplePeer = SimplePeerInstance & { - id: string; -}; -export type SimplePeerInitMessage = { - type: 'init'; - yourPeerId: string; -}; -export type SimplePeerJoinMessage = { - type: 'join'; - room: string; -}; -export type SimplePeerJoinedMessage = { - type: 'joined'; - otherPeerIds: string[]; -}; -export type SimplePeerSignalMessage = { - type: 'signal'; - room: string; - senderPeerId: string; - receiverPeerId: string; - data: string; -}; -export type SimplePeerPingMessage = { - type: 'ping'; -}; -export type PeerMessage = SimplePeerInitMessage | SimplePeerJoinMessage | SimplePeerJoinedMessage | SimplePeerSignalMessage | SimplePeerPingMessage; -export declare const DEFAULT_SIGNALING_SERVER: string; -export type SimplePeerWrtc = SimplePeerOptions['wrtc']; -export type SimplePeerConfig = SimplePeerOptions['config']; -export type SimplePeerConnectionHandlerOptions = { - /** - * If no server is specified, the default signaling server - * from signaling.rxdb.info is used. - * This server is not reliable and you should use - * your own signaling server instead. - */ - signalingServerUrl?: string; - wrtc?: SimplePeerWrtc; - config?: SimplePeerConfig; - webSocketConstructor?: WebSocket; -}; -export declare const SIMPLE_PEER_PING_INTERVAL: number; -/** - * Returns a connection handler that uses simple-peer and the signaling server. - */ -export declare function getConnectionHandlerSimplePeer({ signalingServerUrl, wrtc, config, webSocketConstructor }: SimplePeerConnectionHandlerOptions): WebRTCConnectionHandlerCreator; -/** - * Multiple people had problems because it requires to have - * the nextTick() method in the runtime. So we check here and - * throw a helpful error. - */ -export declare function ensureProcessNextTickIsSet(): void; diff --git a/dist/types/plugins/replication-webrtc/connection-handler-webtorrent.d.ts b/dist/types/plugins/replication-webrtc/connection-handler-webtorrent.d.ts deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/dist/types/plugins/replication-webrtc/index.d.ts b/dist/types/plugins/replication-webrtc/index.d.ts deleted file mode 100644 index d654a97b62b..00000000000 --- a/dist/types/plugins/replication-webrtc/index.d.ts +++ /dev/null @@ -1,27 +0,0 @@ -import { BehaviorSubject, Subject, Subscription } from 'rxjs'; -import type { RxCollection, RxError, RxReplicationHandler, RxTypeError } from '../../types/index.d.ts'; -import type { WebRTCConnectionHandler, WebRTCPeerState, WebRTCReplicationCheckpoint, RxWebRTCReplicationState, SyncOptionsWebRTC } from './webrtc-types.ts'; -export declare function replicateWebRTC(options: SyncOptionsWebRTC): Promise>; -/** - * Because the WebRTC replication runs between many instances, - * we use a Pool instead of returning a single replication state. - */ -export declare class RxWebRTCReplicationPool { - readonly collection: RxCollection; - readonly options: SyncOptionsWebRTC; - readonly connectionHandler: WebRTCConnectionHandler; - peerStates$: BehaviorSubject>>; - canceled: boolean; - masterReplicationHandler: RxReplicationHandler; - subs: Subscription[]; - error$: Subject; - constructor(collection: RxCollection, options: SyncOptionsWebRTC, connectionHandler: WebRTCConnectionHandler); - addPeer(peer: PeerType, replicationState?: RxWebRTCReplicationState): void; - removePeer(peer: PeerType): void; - awaitFirstPeer(): Promise>>; - cancel(): Promise; -} -export * from './webrtc-helper.ts'; -export * from './signaling-server.ts'; -export * from './webrtc-types.ts'; -export * from './connection-handler-simple-peer.ts'; diff --git a/dist/types/plugins/replication-webrtc/signaling-server.d.ts b/dist/types/plugins/replication-webrtc/signaling-server.d.ts deleted file mode 100644 index 756a4d7aa7b..00000000000 --- a/dist/types/plugins/replication-webrtc/signaling-server.d.ts +++ /dev/null @@ -1,18 +0,0 @@ -/// -import type { WebSocket, ServerOptions } from 'ws'; -export declare const PEER_ID_LENGTH = 12; -export type ServerPeer = { - id: string; - socket: WebSocket; - rooms: Set; - lastPing: number; -}; -/** - * Starts a WebRTC signaling server - * that can be used in tests. -*/ -export declare function startSignalingServerSimplePeer(serverOptions: ServerOptions): Promise<{ - port: number | undefined; - server: import("ws").Server; - localUrl: string; -}>; diff --git a/dist/types/plugins/replication-webrtc/webrtc-helper.d.ts b/dist/types/plugins/replication-webrtc/webrtc-helper.d.ts deleted file mode 100644 index 9393a14c14a..00000000000 --- a/dist/types/plugins/replication-webrtc/webrtc-helper.d.ts +++ /dev/null @@ -1,16 +0,0 @@ -import type { HashFunction } from '../../types/index.d.ts'; -import type { WebRTCConnectionHandler, WebRTCMessage, WebRTCResponse } from './webrtc-types.ts'; -/** - * To deterministically define which peer is master and - * which peer is fork, we compare the storage tokens. - * But we have to hash them before, to ensure that - * a storageToken like 'aaaaaa' is not always the master - * for all peers. - */ -export declare function isMasterInWebRTCReplication(hashFunction: HashFunction, ownStorageToken: string, otherStorageToken: string): Promise; -/** - * Send a message to the peer and await the answer. - * @throws with an EmptyErrorImpl if the peer connection - * was closed before an answer was received. - */ -export declare function sendMessageAndAwaitAnswer(handler: WebRTCConnectionHandler, peer: PeerType, message: WebRTCMessage): Promise; diff --git a/dist/types/plugins/replication-webrtc/webrtc-types.d.ts b/dist/types/plugins/replication-webrtc/webrtc-types.d.ts deleted file mode 100644 index 314ced7e43f..00000000000 --- a/dist/types/plugins/replication-webrtc/webrtc-types.d.ts +++ /dev/null @@ -1,51 +0,0 @@ -import { Observable, Subscription } from 'rxjs'; -import type { MaybePromise, ReplicationOptions, ReplicationPullOptions, ReplicationPushOptions, RxError, RxReplicationHandler, RxStorageDefaultCheckpoint, RxTypeError, StringKeys } from '../../types/index.d.ts'; -import { RxReplicationState } from '../replication/index.ts'; -import { WebsocketMessageResponseType, WebsocketMessageType } from '../replication-websocket/index.ts'; -export type WebRTCReplicationCheckpoint = RxStorageDefaultCheckpoint; -export type WebRTCMessage = Omit & { - method: StringKeys> | 'token'; -}; -export type WebRTCResponse = Omit; -export type PeerWithMessage = { - peer: PeerType; - message: WebRTCMessage; -}; -export type PeerWithResponse = { - peer: PeerType; - response: WebRTCResponse; -}; -export type WebRTCConnectionHandler = { - connect$: Observable; - disconnect$: Observable; - message$: Observable>; - response$: Observable>; - error$: Observable; - send(peer: PeerType, message: WebRTCMessage | WebRTCResponse): Promise; - destroy(): Promise; -}; -export type WebRTCConnectionHandlerCreator = (opts: SyncOptionsWebRTC) => Promise>; -export type WebRTCSyncPushOptions = Omit, 'handler'> & {}; -export type WebRTCSyncPullOptions = Omit, 'handler' | 'stream$'> & {}; -export type SyncOptionsWebRTC = Omit, 'pull' | 'push' | 'replicationIdentifier' | 'deletedField' | 'live' | 'autostart' | 'waitForLeadership'> & { - /** - * It will only replicate with other instances - * that use the same topic. - */ - topic: string; - connectionHandlerCreator: WebRTCConnectionHandlerCreator; - /** - * Run on new peers so that bad peers can be blocked. - * If returns true, the peer is valid and it will replicate. - * If returns false, it will drop the peer. - */ - isPeerValid?: (peer: PeerType) => MaybePromise; - pull?: WebRTCSyncPullOptions; - push?: WebRTCSyncPushOptions; -}; -export type RxWebRTCReplicationState = RxReplicationState; -export type WebRTCPeerState = { - peer: PeerType; - replicationState?: RxWebRTCReplicationState; - subs: Subscription[]; -}; diff --git a/dist/types/plugins/replication-websocket/index.d.ts b/dist/types/plugins/replication-websocket/index.d.ts deleted file mode 100644 index 2d76562d8e9..00000000000 --- a/dist/types/plugins/replication-websocket/index.d.ts +++ /dev/null @@ -1,3 +0,0 @@ -export * from './websocket-client.ts'; -export * from './websocket-server.ts'; -export * from './websocket-types.ts'; diff --git a/dist/types/plugins/replication-websocket/websocket-client.d.ts b/dist/types/plugins/replication-websocket/websocket-client.d.ts deleted file mode 100644 index e4bbac74983..00000000000 --- a/dist/types/plugins/replication-websocket/websocket-client.d.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { RxReplicationState } from '../replication/index.ts'; -import { WebsocketClientOptions } from './websocket-types.ts'; -import IsomorphicWebSocket from 'isomorphic-ws'; -import { Subject, BehaviorSubject } from 'rxjs'; -import type { RxError } from '../../types/index.d.ts'; -export type WebsocketClient = { - url: string; - socket: any; - connected$: BehaviorSubject; - message$: Subject; - error$: Subject; -}; -/** - * Copied and adapted from the 'reconnecting-websocket' npm module. - * Some bundlers have problems with bundling the isomorphic-ws plugin - * so we directly check the correctness in RxDB to ensure that we can - * throw a helpful error. - */ -export declare function ensureIsWebsocket(w: typeof IsomorphicWebSocket): void; -export declare function createWebSocketClient(options: WebsocketClientOptions): Promise; -export declare function replicateWithWebsocketServer(options: WebsocketClientOptions): Promise>; diff --git a/dist/types/plugins/replication-websocket/websocket-server.d.ts b/dist/types/plugins/replication-websocket/websocket-server.d.ts deleted file mode 100644 index 183adba8f14..00000000000 --- a/dist/types/plugins/replication-websocket/websocket-server.d.ts +++ /dev/null @@ -1,6 +0,0 @@ -import type { RxDatabase, RxReplicationHandler } from '../../types/index.d.ts'; -import type { ServerOptions } from 'isomorphic-ws'; -import type { WebsocketServerOptions, WebsocketServerState } from './websocket-types.ts'; -export declare function startSocketServer(options: ServerOptions): WebsocketServerState; -export declare function getReplicationHandlerByCollection(database: RxDatabase, collectionName: string): RxReplicationHandler; -export declare function startWebsocketServer(options: WebsocketServerOptions): WebsocketServerState; diff --git a/dist/types/plugins/replication-websocket/websocket-types.d.ts b/dist/types/plugins/replication-websocket/websocket-types.d.ts deleted file mode 100644 index 0be7f8e8a4f..00000000000 --- a/dist/types/plugins/replication-websocket/websocket-types.d.ts +++ /dev/null @@ -1,32 +0,0 @@ -import type { Observable } from 'rxjs'; -import type { ServerOptions, ClientOptions, WebSocketServer, WebSocket } from 'ws'; -import type { RxCollection, RxDatabase, RxReplicationHandler, StringKeys } from '../../types/index.d.ts'; -export type WebsocketServerOptions = { - database: RxDatabase; -} & ServerOptions; -export type WebsocketServerState = { - server: WebSocketServer; - close: () => Promise; - onConnection$: Observable; -}; -export type WebsocketClientOptions = { - replicationIdentifier: string; - collection: RxCollection; - url: string; - batchSize?: number; - live?: boolean; - headers?: { - [k: string]: string; - }; -} & ClientOptions; -export type WebsocketMessageType = { - id: string; - collection: string; - method: StringKeys> | 'auth'; - params: any[]; -}; -export type WebsocketMessageResponseType = { - id: string; - collection: string; - result: any; -}; diff --git a/dist/types/plugins/replication/index.d.ts b/dist/types/plugins/replication/index.d.ts deleted file mode 100644 index 64de49dc436..00000000000 --- a/dist/types/plugins/replication/index.d.ts +++ /dev/null @@ -1,72 +0,0 @@ -/** - * This plugin contains the primitives to create - * a RxDB client-server replication. - * It is used in the other replication plugins - * but also can be used as standalone with a custom replication handler. - */ -import { BehaviorSubject, Observable, Subject, Subscription } from 'rxjs'; -import type { ReplicationOptions, ReplicationPullOptions, ReplicationPushOptions, RxCollection, RxDocumentData, RxError, RxJsonSchema, RxReplicationPullStreamItem, RxStorageInstance, RxStorageInstanceReplicationState, RxStorageReplicationMeta, RxTypeError, WithDeleted } from '../../types/index.d.ts'; -export declare const REPLICATION_STATE_BY_COLLECTION: WeakMap[]>; -export declare class RxReplicationState { - /** - * The identifier, used to flag revisions - * and to identify which documents state came from the remote. - */ - readonly replicationIdentifier: string; - readonly collection: RxCollection; - readonly deletedField: string; - readonly pull?: ReplicationPullOptions | undefined; - readonly push?: ReplicationPushOptions | undefined; - readonly live?: boolean | undefined; - retryTime?: number | undefined; - autoStart?: boolean | undefined; - readonly subs: Subscription[]; - readonly subjects: { - received: Subject>; - sent: Subject>; - error: Subject; - canceled: BehaviorSubject; - active: BehaviorSubject; - }; - readonly received$: Observable>; - readonly sent$: Observable>; - readonly error$: Observable; - readonly canceled$: Observable; - readonly active$: Observable; - readonly metaInfoPromise: Promise<{ - collectionName: string; - schema: RxJsonSchema>>; - }>; - startPromise: Promise; - onCancel: (() => void)[]; - constructor( - /** - * The identifier, used to flag revisions - * and to identify which documents state came from the remote. - */ - replicationIdentifier: string, collection: RxCollection, deletedField: string, pull?: ReplicationPullOptions | undefined, push?: ReplicationPushOptions | undefined, live?: boolean | undefined, retryTime?: number | undefined, autoStart?: boolean | undefined); - private callOnStart; - internalReplicationState?: RxStorageInstanceReplicationState; - metaInstance?: RxStorageInstance, any, {}, any>; - remoteEvents$: Subject>; - start(): Promise; - isStopped(): boolean; - awaitInitialReplication(): Promise; - /** - * Returns a promise that resolves when: - * - All local data is replicated with the remote - * - No replication cycle is running or in retry-state - * - * WARNING: USing this function directly in a multi-tab browser application - * is dangerous because only the leading instance will ever be replicated, - * so this promise will not resolve in the other tabs. - * For multi-tab support you should set and observe a flag in a local document. - */ - awaitInSync(): Promise; - reSync(): void; - emitEvent(ev: RxReplicationPullStreamItem): void; - cancel(): Promise; - remove(): Promise; -} -export declare function replicateRxCollection({ replicationIdentifier, collection, deletedField, pull, push, live, retryTime, waitForLeadership, autoStart, }: ReplicationOptions): RxReplicationState; -export declare function startReplicationOnLeaderShip(waitForLeadership: boolean, replicationState: RxReplicationState): Promise; diff --git a/dist/types/plugins/replication/replication-helper.d.ts b/dist/types/plugins/replication/replication-helper.d.ts deleted file mode 100644 index 9781c5ab004..00000000000 --- a/dist/types/plugins/replication/replication-helper.d.ts +++ /dev/null @@ -1,14 +0,0 @@ -import type { RxCollection, WithDeleted } from '../../types/index.d.ts'; -export declare const DEFAULT_MODIFIER: (d: any) => Promise; -export declare function swapDefaultDeletedTodeletedField(deletedField: string, doc: WithDeleted): RxDocType; -/** - * Must be run over all plain document data - * that was pulled from the remote. - * Used to fill up fields or modify the deleted field etc. - */ -export declare function handlePulledDocuments(collection: RxCollection, deletedField: string, docs: RxDocType[]): WithDeleted[]; -/** - * Like normal promiseWait() - * but will skip the wait time if the online-state changes. - */ -export declare function awaitRetry(collection: RxCollection, retryTime: number): Promise; diff --git a/dist/types/plugins/state/helpers.d.ts b/dist/types/plugins/state/helpers.d.ts deleted file mode 100644 index 36642a1679c..00000000000 --- a/dist/types/plugins/state/helpers.d.ts +++ /dev/null @@ -1,6 +0,0 @@ -import type { DeepReadonly, RxJsonSchema } from '../../types'; -import type { RxStateDocument } from './types'; -export declare const RX_STATE_SCHEMA_TITLE = "RxStateCollection"; -export declare const RX_STATE_ID_LENGTH = 14; -export declare const RX_STATE_COLLECTION_SCHEMA: DeepReadonly>; -export declare function nextRxStateId(lastId?: string): string; diff --git a/dist/types/plugins/state/index.d.ts b/dist/types/plugins/state/index.d.ts deleted file mode 100644 index d2cc2d54b0e..00000000000 --- a/dist/types/plugins/state/index.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -import type { RxDatabase, RxPlugin, RxState } from '../../types/index.d.ts'; -export * from './helpers.ts'; -export declare function addState(this: RxDatabase, namespace?: string): Promise>; -export declare const RxDBStatePlugin: RxPlugin; diff --git a/dist/types/plugins/state/rx-state.d.ts b/dist/types/plugins/state/rx-state.d.ts deleted file mode 100644 index 558e3932938..00000000000 --- a/dist/types/plugins/state/rx-state.d.ts +++ /dev/null @@ -1,45 +0,0 @@ -import { Observable, Subject } from 'rxjs'; -import type { RxCollection, RxDatabase, RxQuery, RxDocument, Paths } from '../../types'; -import { RxStateDocument, RxStateOperation, RxStateModifier } from './types.ts'; -/** - * RxDB internally used properties are - * prefixed with lodash _ to make them less - * likely to clash with actual state properties - * from the user. - */ -export declare class RxStateBase { - readonly prefix: string; - readonly collection: RxCollection; - _id: number; - _state: T | any; - $: Observable; - _lastIdQuery: RxQuery | null>; - _nonPersisted: { - path: string; - modifier: RxStateModifier; - }[]; - _writeQueue: Promise; - _initDone: boolean; - _instanceId: string; - _ownEmits$: Subject; - constructor(prefix: string, collection: RxCollection); - set(path: Paths | '', modifier: RxStateModifier): Promise; - /** - * To have deterministic writes, - * and to ensure that multiple js realms do not overwrite - * each other, the write happens with incremental ids - * that would throw conflict errors and trigger a retry. - */ - _triggerWrite(): Promise; - get(path?: Paths): any; - get$(path?: Paths): Observable; - get$$(path?: Paths): Reactivity; - /** - * Merges the state operations into a single write row - * to store space and make recreating the state from - * disc faster. - */ - _cleanup(): Promise; -} -export declare function createRxState(database: RxDatabase, prefix: string): Promise>; -export declare function mergeOperationsIntoState(state: T, operations: RxStateOperation[]): void; diff --git a/dist/types/plugins/state/types.d.ts b/dist/types/plugins/state/types.d.ts deleted file mode 100644 index bfd1b9f9f68..00000000000 --- a/dist/types/plugins/state/types.d.ts +++ /dev/null @@ -1,28 +0,0 @@ -import { DeepReadonly } from '../../types'; -/** - * - */ -export type RxStateDocument = { - /** - * Ensures that when multiple - * javascript realms write at the same time, - * we do not overwrite each other but instead - * one write must conflict-error and retry. - * The clock value is also the primary key. - * The clock value contains incremental numbers - * in a string format like '0001', '0123'... - */ - id: string; - /** - * Id of the JavaScript Instance of RxState - * that did the write. Used to optimise performance - * by not running these modifiers twice. - */ - sId: string; - ops: RxStateOperation[]; -}; -export type RxStateOperation = { - k: string; - v: any; -}; -export type RxStateModifier = (preValue: DeepReadonly) => any; diff --git a/dist/types/plugins/storage-denokv/denokv-helper.d.ts b/dist/types/plugins/storage-denokv/denokv-helper.d.ts deleted file mode 100644 index e0c8b792803..00000000000 --- a/dist/types/plugins/storage-denokv/denokv-helper.d.ts +++ /dev/null @@ -1,15 +0,0 @@ -export declare const RX_STORAGE_NAME_DENOKV = "denokv"; -export declare function getDenoKVIndexName(index: string[]): string; -/** - * Used for non-index rows that contain the document data, - * not just a documentId - */ -export declare const DENOKV_DOCUMENT_ROOT_PATH = "||"; -export declare const CLEANUP_INDEX: string[]; -/** - * Get the global Deno variable from globalThis.Deno - * so that compiling with plain typescript does not fail. - * TODO download the deno typings from somewhere - * and use them. - */ -export declare function getDenoGlobal(): any; diff --git a/dist/types/plugins/storage-denokv/denokv-query.d.ts b/dist/types/plugins/storage-denokv/denokv-query.d.ts deleted file mode 100644 index b5732c84e2e..00000000000 --- a/dist/types/plugins/storage-denokv/denokv-query.d.ts +++ /dev/null @@ -1,3 +0,0 @@ -import type { PreparedQuery, RxStorageQueryResult } from '../../types/index.d.ts'; -import { RxStorageInstanceDenoKV } from "./rx-storage-instance-denokv.ts"; -export declare function queryDenoKV(instance: RxStorageInstanceDenoKV, preparedQuery: PreparedQuery): Promise>; diff --git a/dist/types/plugins/storage-denokv/denokv-types.d.ts b/dist/types/plugins/storage-denokv/denokv-types.d.ts deleted file mode 100644 index fb5a8d1fe6b..00000000000 --- a/dist/types/plugins/storage-denokv/denokv-types.d.ts +++ /dev/null @@ -1,17 +0,0 @@ -import type { RxDocumentData } from "../../types/index.d.ts"; -export type DenoKVSettings = { - consistencyLevel: "strong" | "eventual"; - openKvPath?: string; - batchSize?: number; -}; -export type DenoKVStorageInternals = { - indexes: { - [indexName: string]: DenoKVIndexMeta; - }; -}; -export type DenoKVIndexMeta = { - indexId: string; - indexName: string; - index: string[]; - getIndexableString: (doc: RxDocumentData) => string; -}; diff --git a/dist/types/plugins/storage-denokv/index.d.ts b/dist/types/plugins/storage-denokv/index.d.ts deleted file mode 100644 index 549dda11eeb..00000000000 --- a/dist/types/plugins/storage-denokv/index.d.ts +++ /dev/null @@ -1,11 +0,0 @@ -import type { RxStorage, RxStorageInstanceCreationParams } from '../../types/index.d.ts'; -import type { DenoKVSettings, DenoKVStorageInternals } from './denokv-types.ts'; -import { RxStorageInstanceDenoKV } from "./rx-storage-instance-denokv.ts"; -export declare class RxStorageDenoKV implements RxStorage, DenoKVSettings> { - settings: DenoKVSettings; - name: string; - readonly rxdbVersion = "15.24.0"; - constructor(settings: DenoKVSettings); - createStorageInstance(params: RxStorageInstanceCreationParams): Promise>; -} -export declare function getRxStorageDenoKV(settings?: DenoKVSettings): RxStorageDenoKV; diff --git a/dist/types/plugins/storage-denokv/rx-storage-instance-denokv.d.ts b/dist/types/plugins/storage-denokv/rx-storage-instance-denokv.d.ts deleted file mode 100644 index 4fff6bd82eb..00000000000 --- a/dist/types/plugins/storage-denokv/rx-storage-instance-denokv.d.ts +++ /dev/null @@ -1,42 +0,0 @@ -import { Observable } from 'rxjs'; -import type { RxStorageInstance, RxStorageChangeEvent, RxDocumentData, BulkWriteRow, RxStorageBulkWriteResponse, RxStorageQueryResult, RxJsonSchema, RxStorageInstanceCreationParams, EventBulk, StringKeys, RxConflictResultionTaskSolution, RxStorageDefaultCheckpoint, RxStorageCountResult, RxConflictResultionTask, PreparedQuery } from '../../types/index.d.ts'; -import type { DenoKVSettings, DenoKVStorageInternals } from './denokv-types.ts'; -import { RxStorageDenoKV } from './index.ts'; -export declare class RxStorageInstanceDenoKV implements RxStorageInstance, DenoKVSettings, RxStorageDefaultCheckpoint> { - readonly storage: RxStorageDenoKV; - readonly databaseName: string; - readonly collectionName: string; - readonly schema: Readonly>>; - readonly internals: DenoKVStorageInternals; - readonly options: Readonly; - readonly settings: DenoKVSettings; - readonly keySpace: string; - readonly kvOptions: { - consistency: "strong" | "eventual"; - }; - readonly primaryPath: StringKeys>; - private changes$; - closed?: Promise; - readonly kvPromise: Promise; - constructor(storage: RxStorageDenoKV, databaseName: string, collectionName: string, schema: Readonly>>, internals: DenoKVStorageInternals, options: Readonly, settings: DenoKVSettings, keySpace?: string, kvOptions?: { - consistency: "strong" | "eventual"; - }); - /** - * DenoKV has no transactions - * so we have to ensure that there is no write in between our queries - * which would confuse RxDB and return wrong query results. - */ - retryUntilNoWriteInBetween(fn: () => Promise): Promise; - bulkWrite(documentWrites: BulkWriteRow[], context: string): Promise>; - findDocumentsById(ids: string[], withDeleted: boolean): Promise[]>; - query(preparedQuery: PreparedQuery): Promise>; - count(preparedQuery: PreparedQuery): Promise; - getAttachmentData(documentId: string, attachmentId: string, digest: string): Promise; - changeStream(): Observable>, RxStorageDefaultCheckpoint>>; - cleanup(minimumDeletedTime: number): Promise; - close(): Promise; - remove(): Promise; - conflictResultionTasks(): Observable>; - resolveConflictResultionTask(_taskSolution: RxConflictResultionTaskSolution): Promise; -} -export declare function createDenoKVStorageInstance(storage: RxStorageDenoKV, params: RxStorageInstanceCreationParams, settings: DenoKVSettings): Promise>; diff --git a/dist/types/plugins/storage-dexie/dexie-helper.d.ts b/dist/types/plugins/storage-dexie/dexie-helper.d.ts deleted file mode 100644 index c9edc06ab4f..00000000000 --- a/dist/types/plugins/storage-dexie/dexie-helper.d.ts +++ /dev/null @@ -1,40 +0,0 @@ -import type { DexieStorageInternals, RxDocumentData, RxJsonSchema } from '../../types/index.d.ts'; -import type { DexieSettings } from '../../types/index.d.ts'; -export declare const DEXIE_DOCS_TABLE_NAME = "docs"; -export declare const DEXIE_CHANGES_TABLE_NAME = "changes"; -export declare const DEXIE_ATTACHMENTS_TABLE_NAME = "attachments"; -export declare const RX_STORAGE_NAME_DEXIE = "dexie"; -export declare function getDexieDbWithTables(databaseName: string, collectionName: string, settings: DexieSettings, schema: RxJsonSchema): DexieStorageInternals; -export declare function closeDexieDb(statePromise: DexieStorageInternals): Promise; -/** - * It is not possible to set non-javascript-variable-syntax - * keys as IndexedDB indexes. So we have to substitute the pipe-char - * which comes from the key-compression plugin. - */ -export declare const DEXIE_PIPE_SUBSTITUTE = "__"; -export declare function dexieReplaceIfStartsWithPipe(str: string): string; -export declare function dexieReplaceIfStartsWithPipeRevert(str: string): string; -/** - * IndexedDB does not support boolean indexing. - * So we have to replace true/false with '1'/'0' - * @param d - */ -export declare function fromStorageToDexie(booleanIndexes: string[], d: RxDocumentData): any; -export declare function fromDexieToStorage(booleanIndexes: string[], d: any): RxDocumentData; -/** - * @recursive - */ -export declare function fromStorageToDexieField(documentData: RxDocumentData): any; -export declare function fromDexieToStorageField(documentData: any): RxDocumentData; -/** - * Creates a string that can be used to create the dexie store. - * @link https://dexie.org/docs/API-Reference#quick-reference - */ -export declare function getDexieStoreSchema(rxJsonSchema: RxJsonSchema): string; -/** - * Returns all documents in the database. - * Non-deleted plus deleted ones. - */ -export declare function getDocsInDb(internals: DexieStorageInternals, docIds: string[]): Promise[]>; -export declare function attachmentObjectId(documentId: string, attachmentId: string): string; -export declare function getBooleanIndexes(schema: RxJsonSchema): string[]; diff --git a/dist/types/plugins/storage-dexie/dexie-query.d.ts b/dist/types/plugins/storage-dexie/dexie-query.d.ts deleted file mode 100644 index 86524887d4a..00000000000 --- a/dist/types/plugins/storage-dexie/dexie-query.d.ts +++ /dev/null @@ -1,9 +0,0 @@ -import type { PreparedQuery, RxQueryPlan, RxStorageQueryResult } from '../../types/index.d.ts'; -import type { RxStorageInstanceDexie } from './rx-storage-instance-dexie.ts'; -export declare function mapKeyForKeyRange(k: any): any; -export declare function getKeyRangeByQueryPlan(booleanIndexes: string[], queryPlan: RxQueryPlan, IDBKeyRange?: any): any; -/** - * Runs mango queries over the Dexie.js database. - */ -export declare function dexieQuery(instance: RxStorageInstanceDexie, preparedQuery: PreparedQuery): Promise>; -export declare function dexieCount(instance: RxStorageInstanceDexie, preparedQuery: PreparedQuery): Promise; diff --git a/dist/types/plugins/storage-dexie/index.d.ts b/dist/types/plugins/storage-dexie/index.d.ts deleted file mode 100644 index 5095f935a9b..00000000000 --- a/dist/types/plugins/storage-dexie/index.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -export * from './rx-storage-dexie.ts'; -export * from './rx-storage-instance-dexie.ts'; -export * from './dexie-helper.ts'; -export * from './dexie-query.ts'; diff --git a/dist/types/plugins/storage-dexie/rx-storage-dexie.d.ts b/dist/types/plugins/storage-dexie/rx-storage-dexie.d.ts deleted file mode 100644 index c48ed2275b6..00000000000 --- a/dist/types/plugins/storage-dexie/rx-storage-dexie.d.ts +++ /dev/null @@ -1,11 +0,0 @@ -import type { RxStorage, RxStorageInstanceCreationParams } from '../../types/index.d.ts'; -import type { DexieSettings, DexieStorageInternals } from '../../types/plugins/dexie.d.ts'; -import { RxStorageInstanceDexie } from './rx-storage-instance-dexie.ts'; -export declare class RxStorageDexie implements RxStorage { - settings: DexieSettings; - name: string; - readonly rxdbVersion = "15.24.0"; - constructor(settings: DexieSettings); - createStorageInstance(params: RxStorageInstanceCreationParams): Promise>; -} -export declare function getRxStorageDexie(settings?: DexieSettings): RxStorageDexie; diff --git a/dist/types/plugins/storage-dexie/rx-storage-instance-dexie.d.ts b/dist/types/plugins/storage-dexie/rx-storage-instance-dexie.d.ts deleted file mode 100644 index 373f5e22b70..00000000000 --- a/dist/types/plugins/storage-dexie/rx-storage-instance-dexie.d.ts +++ /dev/null @@ -1,32 +0,0 @@ -import { Observable } from 'rxjs'; -import type { RxStorageInstance, RxStorageChangeEvent, RxDocumentData, BulkWriteRow, RxStorageBulkWriteResponse, RxStorageQueryResult, RxJsonSchema, RxStorageInstanceCreationParams, EventBulk, StringKeys, RxConflictResultionTask, RxConflictResultionTaskSolution, RxStorageDefaultCheckpoint, RxStorageCountResult, PreparedQuery } from '../../types/index.d.ts'; -import type { DexieSettings, DexieStorageInternals } from '../../types/plugins/dexie.d.ts'; -import { RxStorageDexie } from './rx-storage-dexie.ts'; -export declare const DEXIE_TEST_META_FIELD = "dexieTestMetaField"; -export declare class RxStorageInstanceDexie implements RxStorageInstance { - readonly storage: RxStorageDexie; - readonly databaseName: string; - readonly collectionName: string; - readonly schema: Readonly>>; - readonly internals: DexieStorageInternals; - readonly options: Readonly; - readonly settings: DexieSettings; - readonly devMode: boolean; - readonly primaryPath: StringKeys>; - private changes$; - readonly instanceId: number; - closed?: Promise; - constructor(storage: RxStorageDexie, databaseName: string, collectionName: string, schema: Readonly>>, internals: DexieStorageInternals, options: Readonly, settings: DexieSettings, devMode: boolean); - bulkWrite(documentWrites: BulkWriteRow[], context: string): Promise>; - findDocumentsById(ids: string[], deleted: boolean): Promise[]>; - query(preparedQuery: PreparedQuery): Promise>; - count(preparedQuery: PreparedQuery): Promise; - changeStream(): Observable>, RxStorageDefaultCheckpoint>>; - cleanup(minimumDeletedTime: number): Promise; - getAttachmentData(documentId: string, attachmentId: string, _digest: string): Promise; - remove(): Promise; - close(): Promise; - conflictResultionTasks(): Observable>; - resolveConflictResultionTask(_taskSolution: RxConflictResultionTaskSolution): Promise; -} -export declare function createDexieStorageInstance(storage: RxStorageDexie, params: RxStorageInstanceCreationParams, settings: DexieSettings): Promise>; diff --git a/dist/types/plugins/storage-foundationdb/foundationdb-helpers.d.ts b/dist/types/plugins/storage-foundationdb/foundationdb-helpers.d.ts deleted file mode 100644 index 097e8fdef12..00000000000 --- a/dist/types/plugins/storage-foundationdb/foundationdb-helpers.d.ts +++ /dev/null @@ -1,3 +0,0 @@ -export declare function getFoundationDBIndexName(index: string[]): string; -export declare const CLEANUP_INDEX: string[]; -export declare const FOUNDATION_DB_WRITE_BATCH_SIZE = 2000; diff --git a/dist/types/plugins/storage-foundationdb/foundationdb-query.d.ts b/dist/types/plugins/storage-foundationdb/foundationdb-query.d.ts deleted file mode 100644 index 1fc43becc07..00000000000 --- a/dist/types/plugins/storage-foundationdb/foundationdb-query.d.ts +++ /dev/null @@ -1,3 +0,0 @@ -import type { PreparedQuery, RxStorageQueryResult } from '../../types/index.d.ts'; -import { RxStorageInstanceFoundationDB } from './rx-storage-instance-foundationdb.ts'; -export declare function queryFoundationDB(instance: RxStorageInstanceFoundationDB, preparedQuery: PreparedQuery): Promise>; diff --git a/dist/types/plugins/storage-foundationdb/foundationdb-types.d.ts b/dist/types/plugins/storage-foundationdb/foundationdb-types.d.ts deleted file mode 100644 index 4d148ff94d8..00000000000 --- a/dist/types/plugins/storage-foundationdb/foundationdb-types.d.ts +++ /dev/null @@ -1,46 +0,0 @@ -import type { EventBulk, RxAttachmentWriteData, RxDocumentData, RxStorage, RxStorageChangeEvent, RxStorageDefaultCheckpoint } from '../../types/index.d.ts'; -export type RxStorageFoundationDBSettings = { - /** - * Version of the API of the foundationDB server. - */ - apiVersion: number; - /** - * Path to the foundationDB cluster file - * like '/path/to/fdb.cluster' - * (optional) - */ - clusterFile?: string; - batchSize?: number; -}; -export type RxStorageFoundationDBInstanceCreationOptions = { - batchSize?: number; -}; -/** - * TODO atm we cannot import types from 'foundationdb' - * because 'foundationdb' is an optional peer dependency - * this is NOT also in the devDependencies. - * This is because it requires to install the foundationdb client cli - * which would mean everyone that wants to develop RxDB must have this installed manually. - */ -export type FoundationDBIndexMeta = { - indexName: string; - index: string[]; - getIndexableString: (doc: RxDocumentData) => string; - db: FoundationDBDatabase; -}; -export type FoundationDBConnection = any; -export type FoundationDBDatabase = any; -export type FoundationDBTransaction = any; -export type FoundationDBStorageInternals = { - connection: FoundationDBConnection; - dbsPromise: Promise<{ - root: FoundationDBDatabase; - main: FoundationDBDatabase; - attachments: FoundationDBDatabase; - events: FoundationDBDatabase>, RxStorageDefaultCheckpoint>>; - indexes: { - [indexName: string]: FoundationDBIndexMeta; - }; - }>; -}; -export type RxStorageFoundationDB = RxStorage, RxStorageFoundationDBInstanceCreationOptions> & {}; diff --git a/dist/types/plugins/storage-foundationdb/index.d.ts b/dist/types/plugins/storage-foundationdb/index.d.ts deleted file mode 100644 index 77b090db922..00000000000 --- a/dist/types/plugins/storage-foundationdb/index.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -import type { RxStorageFoundationDB, RxStorageFoundationDBSettings } from './foundationdb-types.ts'; -export declare function getRxStorageFoundationDB(settings: RxStorageFoundationDBSettings): RxStorageFoundationDB; -export * from './foundationdb-types.ts'; -export * from './foundationdb-helpers.ts'; diff --git a/dist/types/plugins/storage-foundationdb/rx-storage-instance-foundationdb.d.ts b/dist/types/plugins/storage-foundationdb/rx-storage-instance-foundationdb.d.ts deleted file mode 100644 index 674b46b56d2..00000000000 --- a/dist/types/plugins/storage-foundationdb/rx-storage-instance-foundationdb.d.ts +++ /dev/null @@ -1,28 +0,0 @@ -import { Observable } from 'rxjs'; -import type { BulkWriteRow, EventBulk, PreparedQuery, RxConflictResultionTask, RxConflictResultionTaskSolution, RxDocumentData, RxJsonSchema, RxStorageBulkWriteResponse, RxStorageChangeEvent, RxStorageCountResult, RxStorageDefaultCheckpoint, RxStorageInstance, RxStorageInstanceCreationParams, RxStorageQueryResult, StringKeys } from '../../types/index.d.ts'; -import type { FoundationDBStorageInternals, RxStorageFoundationDB, RxStorageFoundationDBInstanceCreationOptions, RxStorageFoundationDBSettings } from './foundationdb-types.ts'; -export declare class RxStorageInstanceFoundationDB implements RxStorageInstance, RxStorageFoundationDBInstanceCreationOptions, RxStorageDefaultCheckpoint> { - readonly storage: RxStorageFoundationDB; - readonly databaseName: string; - readonly collectionName: string; - readonly schema: Readonly>>; - readonly internals: FoundationDBStorageInternals; - readonly options: Readonly; - readonly settings: RxStorageFoundationDBSettings; - readonly primaryPath: StringKeys>; - closed?: Promise; - private changes$; - constructor(storage: RxStorageFoundationDB, databaseName: string, collectionName: string, schema: Readonly>>, internals: FoundationDBStorageInternals, options: Readonly, settings: RxStorageFoundationDBSettings); - bulkWrite(documentWrites: BulkWriteRow[], context: string): Promise>; - findDocumentsById(ids: string[], withDeleted: boolean): Promise[]>; - query(preparedQuery: PreparedQuery): Promise>; - count(preparedQuery: PreparedQuery): Promise; - getAttachmentData(documentId: string, attachmentId: string, _digest: string): Promise; - changeStream(): Observable, RxStorageDefaultCheckpoint>>; - remove(): Promise; - cleanup(minimumDeletedTime: number): Promise; - conflictResultionTasks(): Observable>; - resolveConflictResultionTask(_taskSolution: RxConflictResultionTaskSolution): Promise; - close(): Promise; -} -export declare function createFoundationDBStorageInstance(storage: RxStorageFoundationDB, params: RxStorageInstanceCreationParams, settings: RxStorageFoundationDBSettings): Promise>; diff --git a/dist/types/plugins/storage-lokijs/index.d.ts b/dist/types/plugins/storage-lokijs/index.d.ts deleted file mode 100644 index 99da95e5f33..00000000000 --- a/dist/types/plugins/storage-lokijs/index.d.ts +++ /dev/null @@ -1,3 +0,0 @@ -export * from './rx-storage-lokijs.ts'; -export * from './lokijs-helper.ts'; -export * from './rx-storage-instance-loki.ts'; diff --git a/dist/types/plugins/storage-lokijs/loki-save-queue.d.ts b/dist/types/plugins/storage-lokijs/loki-save-queue.d.ts deleted file mode 100644 index a6fb97936ac..00000000000 --- a/dist/types/plugins/storage-lokijs/loki-save-queue.d.ts +++ /dev/null @@ -1,22 +0,0 @@ -import type { LokiDatabaseSettings } from '../../types/index.d.ts'; -/** - * The autosave feature of lokijs has strange behaviors - * and often runs a save in critical moments when other - * more important tasks are running. - * So instead we use a custom save queue that ensures we - * only run loki.saveDatabase() when nothing else is running. - */ -export declare class LokiSaveQueue { - readonly lokiDatabase: any; - readonly databaseSettings: LokiDatabaseSettings; - writesSinceLastRun: number; - /** - * Ensures that we do not run multiple saves - * in parallel - */ - saveQueue: Promise; - saveQueueC: number; - constructor(lokiDatabase: any, databaseSettings: LokiDatabaseSettings); - addWrite(): void; - run(): Promise; -} diff --git a/dist/types/plugins/storage-lokijs/lokijs-helper.d.ts b/dist/types/plugins/storage-lokijs/lokijs-helper.d.ts deleted file mode 100644 index 3da5fa58f3a..00000000000 --- a/dist/types/plugins/storage-lokijs/lokijs-helper.d.ts +++ /dev/null @@ -1,49 +0,0 @@ -import { RxStorageInstanceLoki } from './rx-storage-instance-loki.ts'; -import type { DeterministicSortComparator, FilledMangoQuery, LokiDatabaseSettings, LokiDatabaseState, LokiLocalDatabaseState, RxDocumentData, RxJsonSchema } from '../../types/index.d.ts'; -import { LeaderElector } from 'broadcast-channel'; -export declare const CHANGES_COLLECTION_SUFFIX = "-rxdb-changes"; -export declare const LOKI_BROADCAST_CHANNEL_MESSAGE_TYPE = "rxdb-lokijs-remote-request"; -export declare const LOKI_KEY_OBJECT_BROADCAST_CHANNEL_MESSAGE_TYPE = "rxdb-lokijs-remote-request-key-object"; -export declare const RX_STORAGE_NAME_LOKIJS = "lokijs"; -/** - * Loki attaches a $loki property to all data - * which must be removed before returning the data back to RxDB. - */ -export declare function stripLokiKey(docData: RxDocumentData & { - $loki?: number; -}): T; -/** - * Used to check in tests if all instances have been cleaned up. - */ -export declare const OPEN_LOKIJS_STORAGE_INSTANCES: Set>; -export declare const LOKIJS_COLLECTION_DEFAULT_OPTIONS: Partial; -export declare function getLokiDatabase(databaseName: string, databaseSettings: LokiDatabaseSettings): Promise; -export declare function closeLokiCollections(databaseName: string, collections: any[]): Promise; -/** - * This function is at lokijs-helper - * because we need it in multiple places. - */ -export declare function getLokiSortComparator(_schema: RxJsonSchema>, query: FilledMangoQuery): DeterministicSortComparator; -export declare function getLokiLeaderElector(databaseInstanceToken: string, broadcastChannelRefObject: any, databaseName: string): LeaderElector; -/** - * For multi-instance usage, we send requests to the RxStorage - * to the current leading instance over the BroadcastChannel. - */ -export declare function requestRemoteInstance(instance: RxStorageInstanceLoki, operation: string, params: any[]): Promise; -/** - * Handles a request that came from a remote instance via requestRemoteInstance() - * Runs the requested operation over the local db instance and sends back the result. - */ -export declare function handleRemoteRequest(instance: RxStorageInstanceLoki, msg: any): Promise; -export declare function waitUntilHasLeader(leaderElector: LeaderElector): Promise; -/** - * If the local state must be used, that one is returned. - * Returns false if a remote instance must be used. - */ -export declare function mustUseLocalState(instance: RxStorageInstanceLoki): Promise; -/** - * LokiJS does not understand the 'official' $regex operator, - * so we have to transform these back into RegExp objects. - * @recursive - */ -export declare function transformRegexToRegExp(selector: any): any; diff --git a/dist/types/plugins/storage-lokijs/rx-storage-instance-loki.d.ts b/dist/types/plugins/storage-lokijs/rx-storage-instance-loki.d.ts deleted file mode 100644 index ba335bae616..00000000000 --- a/dist/types/plugins/storage-lokijs/rx-storage-instance-loki.d.ts +++ /dev/null @@ -1,31 +0,0 @@ -import { Observable } from 'rxjs'; -import type { RxStorageInstance, LokiSettings, RxStorageChangeEvent, RxDocumentData, BulkWriteRow, RxStorageBulkWriteResponse, RxStorageQueryResult, RxJsonSchema, LokiStorageInternals, RxStorageInstanceCreationParams, LokiDatabaseSettings, LokiLocalDatabaseState, EventBulk, StringKeys, RxConflictResultionTask, RxConflictResultionTaskSolution, RxStorageDefaultCheckpoint, RxStorageCountResult, PreparedQuery } from '../../types/index.d.ts'; -import type { RxStorageLoki } from './rx-storage-lokijs.ts'; -export declare class RxStorageInstanceLoki implements RxStorageInstance { - readonly databaseInstanceToken: string; - readonly storage: RxStorageLoki; - readonly databaseName: string; - readonly collectionName: string; - readonly schema: Readonly>>; - readonly internals: LokiStorageInternals; - readonly options: Readonly; - readonly databaseSettings: LokiDatabaseSettings; - readonly primaryPath: StringKeys>; - private changes$; - readonly instanceId: number; - closed?: Promise; - constructor(databaseInstanceToken: string, storage: RxStorageLoki, databaseName: string, collectionName: string, schema: Readonly>>, internals: LokiStorageInternals, options: Readonly, databaseSettings: LokiDatabaseSettings); - bulkWrite(documentWrites: BulkWriteRow[], context: string): Promise>; - findDocumentsById(ids: string[], deleted: boolean): Promise[]>; - query(preparedQueryOriginal: PreparedQuery): Promise>; - count(preparedQuery: PreparedQuery): Promise; - getAttachmentData(_documentId: string, _attachmentId: string, _digest: string): Promise; - changeStream(): Observable>, RxStorageDefaultCheckpoint>>; - cleanup(minimumDeletedTime: number): Promise; - close(): Promise; - remove(): Promise; - conflictResultionTasks(): Observable>; - resolveConflictResultionTask(_taskSolution: RxConflictResultionTaskSolution): Promise; -} -export declare function createLokiLocalState(params: RxStorageInstanceCreationParams, databaseSettings: LokiDatabaseSettings): Promise; -export declare function createLokiStorageInstance(storage: RxStorageLoki, params: RxStorageInstanceCreationParams, databaseSettings: LokiDatabaseSettings): Promise>; diff --git a/dist/types/plugins/storage-lokijs/rx-storage-lokijs.d.ts b/dist/types/plugins/storage-lokijs/rx-storage-lokijs.d.ts deleted file mode 100644 index 535fcf5c39f..00000000000 --- a/dist/types/plugins/storage-lokijs/rx-storage-lokijs.d.ts +++ /dev/null @@ -1,28 +0,0 @@ -import type { LokiDatabaseSettings, LokiSettings, LokiStorageInternals, RxStorage, RxStorageInstanceCreationParams } from '../../types/index.d.ts'; -import { RxStorageInstanceLoki } from './rx-storage-instance-loki.ts'; -import type { LeaderElector } from 'broadcast-channel'; -export declare class RxStorageLoki implements RxStorage { - databaseSettings: LokiDatabaseSettings; - name: string; - readonly rxdbVersion = "15.24.0"; - /** - * Create one leader elector by db name. - * This is done inside of the storage, not globally - * to make it easier to test multi-tab behavior. - */ - leaderElectorByLokiDbName: Map; - constructor(databaseSettings: LokiDatabaseSettings); - createStorageInstance(params: RxStorageInstanceCreationParams): Promise>; -} -/** - * @deprecated The lokijs RxStorage is deprecated, more info at: - * @link https://rxdb.info/rx-storage-lokijs.html - */ -export declare function getRxStorageLoki(databaseSettings?: LokiDatabaseSettings): RxStorageLoki; diff --git a/dist/types/plugins/storage-memory/binary-search-bounds.d.ts b/dist/types/plugins/storage-memory/binary-search-bounds.d.ts deleted file mode 100644 index 0e1791bbdc5..00000000000 --- a/dist/types/plugins/storage-memory/binary-search-bounds.d.ts +++ /dev/null @@ -1,14 +0,0 @@ -/** - * Everything in this file was copied and adapted from - * @link https://github.com/mikolalysenko/binary-search-bounds - * - * TODO We should use the original npm module instead when this bug is fixed: - * @link https://github.com/mikolalysenko/binary-search-bounds/pull/14 - */ -type Compare = ((a: T, b: T) => number | null | undefined); -export declare function boundGE(a: T[], y: T, c: Compare, l?: any, h?: any): any; -export declare function boundGT(a: T[], y: T, c: Compare, l?: any, h?: any): any; -export declare function boundLT(a: T[], y: T, c: Compare, l?: any, h?: any): any; -export declare function boundLE(a: T[], y: T, c: Compare, l?: any, h?: any): any; -export declare function boundEQ(a: T[], y: T, c: Compare, l?: any, h?: any): any; -export {}; diff --git a/dist/types/plugins/storage-memory/index.d.ts b/dist/types/plugins/storage-memory/index.d.ts deleted file mode 100644 index 0dfc1b202fe..00000000000 --- a/dist/types/plugins/storage-memory/index.d.ts +++ /dev/null @@ -1,7 +0,0 @@ -import type { RxStorageMemory, RxStorageMemorySettings } from './memory-types.ts'; -export declare function getRxStorageMemory(settings?: RxStorageMemorySettings): RxStorageMemory; -export * from './memory-helper.ts'; -export * from './binary-search-bounds.ts'; -export * from './memory-types.ts'; -export * from './memory-indexes.ts'; -export * from './rx-storage-instance-memory.ts'; diff --git a/dist/types/plugins/storage-memory/memory-helper.d.ts b/dist/types/plugins/storage-memory/memory-helper.d.ts deleted file mode 100644 index 772e8cae9c9..00000000000 --- a/dist/types/plugins/storage-memory/memory-helper.d.ts +++ /dev/null @@ -1,12 +0,0 @@ -import type { BulkWriteRow, RxDocumentData, RxJsonSchema } from '../../types/index.d.ts'; -import type { DocWithIndexString, MemoryStorageInternals, MemoryStorageInternalsByIndex } from './memory-types.ts'; -import type { RxStorageInstanceMemory } from './rx-storage-instance-memory.ts'; -export declare function getMemoryCollectionKey(databaseName: string, collectionName: string, schemaVersion: number): string; -export declare function ensureNotRemoved(instance: RxStorageInstanceMemory): void; -export declare function attachmentMapKey(documentId: string, attachmentId: string): string; -/** - * @hotPath - */ -export declare function putWriteRowToState(docId: string, state: MemoryStorageInternals, stateByIndex: MemoryStorageInternalsByIndex[], row: BulkWriteRow, docInState?: RxDocumentData): void; -export declare function removeDocFromState(primaryPath: string, schema: RxJsonSchema>, state: MemoryStorageInternals, doc: RxDocumentData): void; -export declare function compareDocsWithIndex(a: DocWithIndexString, b: DocWithIndexString): 1 | 0 | -1; diff --git a/dist/types/plugins/storage-memory/memory-indexes.d.ts b/dist/types/plugins/storage-memory/memory-indexes.d.ts deleted file mode 100644 index 2d50102a65d..00000000000 --- a/dist/types/plugins/storage-memory/memory-indexes.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -import type { RxDocumentData, RxJsonSchema } from '../../types/index.d.ts'; -import type { MemoryStorageInternals } from './memory-types.ts'; -export declare function addIndexesToInternalsState(state: MemoryStorageInternals, schema: RxJsonSchema>): void; -export declare function getMemoryIndexName(index: string[]): string; diff --git a/dist/types/plugins/storage-memory/memory-types.d.ts b/dist/types/plugins/storage-memory/memory-types.d.ts deleted file mode 100644 index 5bebf6f19b4..00000000000 --- a/dist/types/plugins/storage-memory/memory-types.d.ts +++ /dev/null @@ -1,78 +0,0 @@ -import { Subject } from 'rxjs'; -import type { CategorizeBulkWriteRowsOutput, EventBulk, RxAttachmentWriteData, RxConflictResultionTask, RxDocumentData, RxJsonSchema, RxStorage, RxStorageChangeEvent, RxStorageDefaultCheckpoint } from '../../types/index.d.ts'; -export type RxStorageMemorySettings = {}; -export type RxStorageMemoryInstanceCreationOptions = {}; -export type RxStorageMemory = RxStorage, RxStorageMemoryInstanceCreationOptions> & { - /** - * State by collectionKey - */ - collectionStates: Map>; -}; -export type MemoryStorageInternalsByIndex = { - index: string[]; - docsWithIndex: DocWithIndexString[]; - getIndexableString: (docData: RxDocumentData) => string; -}; -/** - * The internals are shared between multiple storage instances - * that have been created with the same [databaseName+collectionName] combination. - */ -export type MemoryStorageInternals = { - id: string; - /** - * Schema of the first instance created with the given settings. - * Used to ensure that the same storage is not re-created with - * a different schema. - */ - schema: RxJsonSchema>; - /** - * We reuse the memory state when multiple instances - * are created with the same params. - * If refCount becomes 0, we can delete the state. - */ - refCount: number; - /** - * If this becomes true, - * it means that an instance has called remove() - * so all other instances should also not work anymore. - */ - removed: boolean; - documents: Map>; - /** - * Attachments data, indexed by a combined string - * consisting of [documentId + '||' + attachmentId] - */ - attachments: Map; - byIndex: { - /** - * Because RxDB requires a deterministic sorting - * on all indexes, we can be sure that the composed index key - * of each document is unique, because it contains the primaryKey - * as last index part. - * So we do not have to store the index-position when we want to do fast - * writes. Instead we can do a binary search over the existing array - * because RxDB also knows the previous state of the document when we do a bulkWrite(). - */ - [indexName: string]: MemoryStorageInternalsByIndex; - }; - /** - * We need these to do lazy writes. - */ - ensurePersistenceTask?: CategorizeBulkWriteRowsOutput; - ensurePersistenceIdlePromise?: Promise; - /** - * To easier test the conflict resolution, - * the memory storage exposes the conflict resolution task subject - * so that we can inject own tasks during tests. - */ - conflictResultionTasks$: Subject>; - changes$: Subject>, RxStorageDefaultCheckpoint>>; -}; -export type DocWithIndexString = { - id: string; - doc: RxDocumentData; - indexString: string; -}; diff --git a/dist/types/plugins/storage-memory/rx-storage-instance-memory.d.ts b/dist/types/plugins/storage-memory/rx-storage-instance-memory.d.ts deleted file mode 100644 index 0e2ca71efc0..00000000000 --- a/dist/types/plugins/storage-memory/rx-storage-instance-memory.d.ts +++ /dev/null @@ -1,49 +0,0 @@ -import { Observable } from 'rxjs'; -import type { BulkWriteRow, CategorizeBulkWriteRowsOutput, EventBulk, PreparedQuery, RxConflictResultionTask, RxConflictResultionTaskSolution, RxDocumentData, RxJsonSchema, RxStorageBulkWriteResponse, RxStorageChangeEvent, RxStorageCountResult, RxStorageDefaultCheckpoint, RxStorageInstance, RxStorageInstanceCreationParams, RxStorageQueryResult, StringKeys } from '../../types/index.d.ts'; -import type { MemoryStorageInternals, RxStorageMemory, RxStorageMemoryInstanceCreationOptions, RxStorageMemorySettings } from './memory-types.ts'; -/** - * Used in tests to ensure everything - * is closed correctly - */ -export declare const OPEN_MEMORY_INSTANCES: Set>; -export declare class RxStorageInstanceMemory implements RxStorageInstance, RxStorageMemoryInstanceCreationOptions, RxStorageDefaultCheckpoint> { - readonly storage: RxStorageMemory; - readonly databaseName: string; - readonly collectionName: string; - readonly schema: Readonly>>; - readonly internals: MemoryStorageInternals; - readonly options: Readonly; - readonly settings: RxStorageMemorySettings; - readonly devMode: boolean; - readonly primaryPath: StringKeys>; - closed: boolean; - /** - * Used by some plugins and storage wrappers - * to find out details about the internals of a write operation. - * For example if you want to know which documents really have been replaced - * or newly inserted. - */ - categorizedByWriteInput: WeakMap[], CategorizeBulkWriteRowsOutput>; - constructor(storage: RxStorageMemory, databaseName: string, collectionName: string, schema: Readonly>>, internals: MemoryStorageInternals, options: Readonly, settings: RxStorageMemorySettings, devMode: boolean); - bulkWrite(documentWrites: BulkWriteRow[], context: string): Promise>; - /** - * Instead of directly inserting the documents into all indexes, - * we do it lazy in the background. This gives the application time - * to directly work with the write-result and to do stuff like rendering DOM - * notes and processing RxDB queries. - * Then in some later time, or just before the next read/write, - * it is ensured that the indexes have been written. - */ - ensurePersistence(): void; - findDocumentsById(docIds: string[], withDeleted: boolean): Promise[]>; - query(preparedQuery: PreparedQuery): Promise>; - count(preparedQuery: PreparedQuery): Promise; - cleanup(minimumDeletedTime: number): Promise; - getAttachmentData(documentId: string, attachmentId: string, digest: string): Promise; - changeStream(): Observable>, RxStorageDefaultCheckpoint>>; - remove(): Promise; - close(): Promise; - conflictResultionTasks(): Observable>; - resolveConflictResultionTask(_taskSolution: RxConflictResultionTaskSolution): Promise; -} -export declare function createMemoryStorageInstance(storage: RxStorageMemory, params: RxStorageInstanceCreationParams, settings: RxStorageMemorySettings): Promise>; diff --git a/dist/types/plugins/storage-mongodb/index.d.ts b/dist/types/plugins/storage-mongodb/index.d.ts deleted file mode 100644 index 1e44e1f0c5f..00000000000 --- a/dist/types/plugins/storage-mongodb/index.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -export * from './rx-storage-mongodb.ts'; -export * from './rx-storage-instance-mongodb.ts'; -export * from './mongodb-helper.ts'; -export * from './mongodb-types.ts'; diff --git a/dist/types/plugins/storage-mongodb/mongodb-helper.d.ts b/dist/types/plugins/storage-mongodb/mongodb-helper.d.ts deleted file mode 100644 index 2e794451231..00000000000 --- a/dist/types/plugins/storage-mongodb/mongodb-helper.d.ts +++ /dev/null @@ -1,16 +0,0 @@ -import type { FilledMangoQuery, MangoQuerySelector, MangoQuerySortPart, RxDocumentData, RxJsonSchema } from '../../types/index.d.ts'; -import { Sort as MongoSort } from 'mongodb'; -import { MongoDBPreparedQuery, MongoQuerySelector } from './mongodb-types.ts'; -export declare const RX_STORAGE_NAME_MONGODB = "mongodb"; -/** - * MongoDB uses the _id field by itself (max 12 bytes) - * so we have to substitute the _id field if - * it is used in the RxDocType. - */ -export declare const MONGO_ID_SUBSTITUTE_FIELDNAME = "__id"; -export declare function primarySwapMongoDBQuerySelector(primaryKey: keyof RxDocType, selector: MangoQuerySelector): MongoQuerySelector; -export declare function prepareMongoDBQuery(schema: RxJsonSchema>, mutateableQuery: FilledMangoQuery): MongoDBPreparedQuery; -export declare function swapMongoToRxDoc(docData: any): RxDocumentData; -export declare function swapRxDocToMongo(docData: RxDocumentData): any; -export declare function swapToMongoSort(sort: MangoQuerySortPart[]): MongoSort; -export declare function getMongoDBIndexName(index: string[]): string; diff --git a/dist/types/plugins/storage-mongodb/mongodb-types.d.ts b/dist/types/plugins/storage-mongodb/mongodb-types.d.ts deleted file mode 100644 index a85d495e1f4..00000000000 --- a/dist/types/plugins/storage-mongodb/mongodb-types.d.ts +++ /dev/null @@ -1,20 +0,0 @@ -import type { Filter as MongoQueryFilter, Sort as MongoSort, TransactionOptions } from 'mongodb'; -import type { FilledMangoQuery, RxDocumentData } from '../../types/index.d.ts'; -export type MongoQuerySelector = MongoQueryFilter; -export type MongoDBDatabaseSettings = { - /** - * MongoDB ConnectionString - * Example: mongodb://localhost: - */ - connection: string | 'mongodb://localhost:27017'; - transactionOptions?: TransactionOptions; -}; -export type MongoDBPreparedQuery = { - query: FilledMangoQuery; - mongoSelector: MongoQuerySelector>; - mongoSort: MongoSort; -}; -export type MongoDBSettings = {}; -export type MongoDBStorageInternals = {}; -export type RxStorageMongoDBInstanceCreationOptions = {}; -export type RxStorageMongoDBSettings = {}; diff --git a/dist/types/plugins/storage-mongodb/rx-storage-instance-mongodb.d.ts b/dist/types/plugins/storage-mongodb/rx-storage-instance-mongodb.d.ts deleted file mode 100644 index 3e9cf0e26f9..00000000000 --- a/dist/types/plugins/storage-mongodb/rx-storage-instance-mongodb.d.ts +++ /dev/null @@ -1,54 +0,0 @@ -import { BehaviorSubject, Observable } from 'rxjs'; -import type { BulkWriteRow, EventBulk, PreparedQuery, RxConflictResultionTask, RxConflictResultionTaskSolution, RxDocumentData, RxJsonSchema, RxStorageBulkWriteResponse, RxStorageChangeEvent, RxStorageCountResult, RxStorageDefaultCheckpoint, RxStorageInstance, RxStorageInstanceCreationParams, RxStorageQueryResult, StringKeys } from '../../types/index.d.ts'; -import { MongoDBStorageInternals, RxStorageMongoDBInstanceCreationOptions, RxStorageMongoDBSettings } from './mongodb-types.ts'; -import { RxStorageMongoDB } from './rx-storage-mongodb.ts'; -import { Db as MongoDatabase, Collection as MongoCollection, MongoClient, ObjectId, ClientSession } from 'mongodb'; -export declare class RxStorageInstanceMongoDB implements RxStorageInstance { - readonly storage: RxStorageMongoDB; - readonly databaseName: string; - readonly collectionName: string; - readonly schema: Readonly>>; - readonly internals: MongoDBStorageInternals; - readonly options: Readonly; - readonly settings: RxStorageMongoDBSettings; - readonly primaryPath: StringKeys>; - readonly inMongoPrimaryPath: string; - closed?: Promise; - private readonly changes$; - readonly mongoClient: MongoClient; - readonly mongoDatabase: MongoDatabase; - readonly mongoCollectionPromise: Promise | any>>; - /** - * Closing the connection must not happen when - * an operation is running, otherwise we get an error. - * So we store all running operations here so that - * they can be awaited. - */ - readonly runningOperations: BehaviorSubject; - writeQueue: Promise; - /** - * We use this to be able to still fetch - * the objectId after transforming the document from mongo-style (with _id) - * to RxDB - */ - readonly mongoObjectIdCache: WeakMap, ObjectId>; - constructor(storage: RxStorageMongoDB, databaseName: string, collectionName: string, schema: Readonly>>, internals: MongoDBStorageInternals, options: Readonly, settings: RxStorageMongoDBSettings); - /** - * Bulk writes on the mongodb storage. - * Notice that MongoDB does not support cross-document transactions - * so we have to do a update-if-previous-is-correct like operations. - * (Similar to what RxDB does with the revision system) - */ - bulkWrite(documentWrites: BulkWriteRow[], context: string): Promise>; - findDocumentsById(docIds: string[], withDeleted: boolean, session?: ClientSession): Promise[]>; - query(originalPreparedQuery: PreparedQuery): Promise>; - count(originalPreparedQuery: PreparedQuery): Promise; - cleanup(minimumDeletedTime: number): Promise; - getAttachmentData(_documentId: string, _attachmentId: string, _digest: string): Promise; - changeStream(): Observable>, RxStorageDefaultCheckpoint>>; - remove(): Promise; - close(): Promise; - conflictResultionTasks(): Observable>; - resolveConflictResultionTask(_taskSolution: RxConflictResultionTaskSolution): Promise; -} -export declare function createMongoDBStorageInstance(storage: RxStorageMongoDB, params: RxStorageInstanceCreationParams, settings: RxStorageMongoDBSettings): Promise>; diff --git a/dist/types/plugins/storage-mongodb/rx-storage-mongodb.d.ts b/dist/types/plugins/storage-mongodb/rx-storage-mongodb.d.ts deleted file mode 100644 index 3022efec06c..00000000000 --- a/dist/types/plugins/storage-mongodb/rx-storage-mongodb.d.ts +++ /dev/null @@ -1,11 +0,0 @@ -import type { RxStorage, RxStorageInstanceCreationParams } from '../../types/index.d.ts'; -import type { MongoDBDatabaseSettings, MongoDBSettings, MongoDBStorageInternals } from './mongodb-types.ts'; -import { RxStorageInstanceMongoDB } from './rx-storage-instance-mongodb.ts'; -export declare class RxStorageMongoDB implements RxStorage { - databaseSettings: MongoDBDatabaseSettings; - name: string; - readonly rxdbVersion = "15.24.0"; - constructor(databaseSettings: MongoDBDatabaseSettings); - createStorageInstance(params: RxStorageInstanceCreationParams): Promise>; -} -export declare function getRxStorageMongoDB(databaseSettings: MongoDBDatabaseSettings): RxStorageMongoDB; diff --git a/dist/types/plugins/storage-remote-websocket/index.d.ts b/dist/types/plugins/storage-remote-websocket/index.d.ts deleted file mode 100644 index cb22a216d22..00000000000 --- a/dist/types/plugins/storage-remote-websocket/index.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -import type { RxStorageRemoteWebsocketClient, RxStorageRemoteWebsocketClientOptions, RxStorageRemoteWebsocketServerOptions, RxStorageRemoteWebsocketServerState } from './types.ts'; -export declare function startRxStorageRemoteWebsocketServer(options: RxStorageRemoteWebsocketServerOptions): RxStorageRemoteWebsocketServerState; -export declare function getRxStorageRemoteWebsocket(options: RxStorageRemoteWebsocketClientOptions): RxStorageRemoteWebsocketClient; -export * from './types.ts'; diff --git a/dist/types/plugins/storage-remote-websocket/types.d.ts b/dist/types/plugins/storage-remote-websocket/types.d.ts deleted file mode 100644 index 794d2dccec7..00000000000 --- a/dist/types/plugins/storage-remote-websocket/types.d.ts +++ /dev/null @@ -1,19 +0,0 @@ -import type { WebsocketServerState } from '../replication-websocket/index.ts'; -import type { ServerOptions, ClientOptions } from 'ws'; -import type { RxDatabase, RxStorage } from '../../types/index.d.ts'; -import type { CustomRequestHandler, RxStorageRemoteExposeType, RxStorageRemoteSettings } from '../storage-remote/storage-remote-types.ts'; -import { RxStorageRemote } from '../storage-remote/index.ts'; -export type RxStorageRemoteWebsocketServerOptions = ServerOptions & { - storage?: RxStorage; - database?: RxDatabase; - customRequestHandler?: CustomRequestHandler; -}; -export type RxStorageRemoteWebsocketServerState = { - serverState: WebsocketServerState; - exposeState: RxStorageRemoteExposeType; -}; -export type RxStorageRemoteWebsocketClientOptions = ClientOptions & { - url: string; - mode: RxStorageRemoteSettings['mode']; -}; -export type RxStorageRemoteWebsocketClient = RxStorageRemote; diff --git a/dist/types/plugins/storage-remote/index.d.ts b/dist/types/plugins/storage-remote/index.d.ts deleted file mode 100644 index 664269c86ee..00000000000 --- a/dist/types/plugins/storage-remote/index.d.ts +++ /dev/null @@ -1,5 +0,0 @@ -export * from './rx-storage-remote.ts'; -export * from './storage-remote-types.ts'; -export * from './storage-remote-helpers.ts'; -export * from './message-channel-cache.ts'; -export * from './remote.ts'; diff --git a/dist/types/plugins/storage-remote/message-channel-cache.d.ts b/dist/types/plugins/storage-remote/message-channel-cache.d.ts deleted file mode 100644 index 3062597b8dd..00000000000 --- a/dist/types/plugins/storage-remote/message-channel-cache.d.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { RemoteMessageChannel, RxStorageRemoteSettings } from './storage-remote-types.ts'; -export type RemoteMessageChannelCacheItem = { - identifier: string; - cacheKey: string; - messageChannel: Promise; - refCount: number; - keepAlive: boolean; -}; -export declare const MESSAGE_CHANNEL_CACHE_BY_IDENTIFIER: Map>; -export declare const CACHE_ITEM_BY_MESSAGE_CHANNEL: WeakMap; -export declare const OPEN_REMOTE_MESSAGE_CHANNELS: Set; -export declare function getMessageChannel(settings: RxStorageRemoteSettings, cacheKeys: string[], keepAlive?: boolean): Promise; -export declare function closeMessageChannel(messageChannel: RemoteMessageChannel): Promise; diff --git a/dist/types/plugins/storage-remote/remote.d.ts b/dist/types/plugins/storage-remote/remote.d.ts deleted file mode 100644 index 689fea32dc2..00000000000 --- a/dist/types/plugins/storage-remote/remote.d.ts +++ /dev/null @@ -1,6 +0,0 @@ -import type { RxStorageRemoteExposeSettings, RxStorageRemoteExposeType } from './storage-remote-types.ts'; -/** - * Run this on the 'remote' part, - * so that RxStorageMessageChannel can connect to it. - */ -export declare function exposeRxStorageRemote(settings: RxStorageRemoteExposeSettings): RxStorageRemoteExposeType; diff --git a/dist/types/plugins/storage-remote/rx-storage-remote.d.ts b/dist/types/plugins/storage-remote/rx-storage-remote.d.ts deleted file mode 100644 index 633a6ea3962..00000000000 --- a/dist/types/plugins/storage-remote/rx-storage-remote.d.ts +++ /dev/null @@ -1,46 +0,0 @@ -import { Observable } from 'rxjs'; -import type { BulkWriteRow, EventBulk, RxConflictResultionTask, RxConflictResultionTaskSolution, RxDocumentData, RxJsonSchema, RxStorage, RxStorageBulkWriteResponse, RxStorageChangeEvent, RxStorageCountResult, RxStorageInstance, RxStorageInstanceCreationParams, RxStorageQueryResult } from '../../types/index.d.ts'; -import type { MessageFromRemote, RemoteMessageChannel, RxStorageRemoteInternals, RxStorageRemoteSettings } from './storage-remote-types.ts'; -export declare class RxStorageRemote implements RxStorage { - readonly settings: RxStorageRemoteSettings; - readonly name: string; - readonly rxdbVersion = "15.24.0"; - private seed; - private lastRequestId; - messageChannelIfOneMode?: Promise; - constructor(settings: RxStorageRemoteSettings); - getRequestId(): string; - createStorageInstance(params: RxStorageInstanceCreationParams): Promise>; - customRequest(data: In): Promise; -} -export declare class RxStorageInstanceRemote implements RxStorageInstance { - readonly storage: RxStorageRemote; - readonly databaseName: string; - readonly collectionName: string; - readonly schema: Readonly>>; - readonly internals: RxStorageRemoteInternals; - readonly options: Readonly; - private changes$; - private conflicts$; - private subs; - private closed?; - messages$: Observable; - constructor(storage: RxStorageRemote, databaseName: string, collectionName: string, schema: Readonly>>, internals: RxStorageRemoteInternals, options: Readonly); - private requestRemote; - bulkWrite(documentWrites: BulkWriteRow[], context: string): Promise>; - findDocumentsById(ids: string[], deleted: boolean): Promise[]>; - query(preparedQuery: any): Promise>; - count(preparedQuery: any): Promise; - getAttachmentData(documentId: string, attachmentId: string, digest: string): Promise; - getChangedDocumentsSince(limit: number, checkpoint?: any): Promise<{ - documents: RxDocumentData[]; - checkpoint: any; - }>; - changeStream(): Observable>, any>>; - cleanup(minDeletedTime: number): Promise; - close(): Promise; - remove(): Promise; - conflictResultionTasks(): Observable>; - resolveConflictResultionTask(taskSolution: RxConflictResultionTaskSolution): Promise; -} -export declare function getRxStorageRemote(settings: RxStorageRemoteSettings): RxStorageRemote; diff --git a/dist/types/plugins/storage-remote/storage-remote-helpers.d.ts b/dist/types/plugins/storage-remote/storage-remote-helpers.d.ts deleted file mode 100644 index 293adaceb36..00000000000 --- a/dist/types/plugins/storage-remote/storage-remote-helpers.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -import type { RxError, RxTypeError } from '../../types/index.d.ts'; -import type { MessageFromRemote, MessageToRemote } from './storage-remote-types.ts'; -export declare function createErrorAnswer(msg: MessageToRemote, error: Error | TypeError | RxError | RxTypeError): MessageFromRemote; -export declare function createAnswer(msg: MessageToRemote, ret: any): MessageFromRemote; diff --git a/dist/types/plugins/storage-remote/storage-remote-types.d.ts b/dist/types/plugins/storage-remote/storage-remote-types.d.ts deleted file mode 100644 index 8c5cf94e022..00000000000 --- a/dist/types/plugins/storage-remote/storage-remote-types.d.ts +++ /dev/null @@ -1,76 +0,0 @@ -import type { Observable } from 'rxjs'; -import type { MaybePromise, PlainJsonError, RxDatabase, RxStorage, RxStorageInstance, RxStorageInstanceCreationParams } from '../../types/index.d.ts'; -export type MessageFromRemote = { - connectionId: string; - answerTo: string; - method: keyof RxStorageInstance | 'create' | 'custom'; - error?: PlainJsonError; - return?: any | string; -}; -export type MessageToRemote = { - connectionId: string; - /** - * Unique ID of the request - */ - requestId: string; - method: keyof RxStorageInstance | 'create' | 'custom'; - params: RxStorageInstanceCreationParams | // used in the create call - any[] | // used to call RxStorageInstance methods - any; -}; -/** - * A message channel represents a single - * channel that is able to communicate with the remote. - * For example a single websocket connection or WebWorker instance. - * The storage must be able to open and close MessageChannels - * according to the modes settings. - */ -export type RemoteMessageChannel = { - send(msg: MessageToRemote): void; - messages$: Observable; - close(): Promise; -}; -export type RxStorageRemoteSettings = { - identifier: string; - /** - * There are different modes - * that determine how many message channels are used. - * These modes can have different performance patterns. - * - * [default='storage'] - */ - mode?: 'one' | 'storage' | 'database' | 'collection'; - messageChannelCreator: () => Promise; -}; -export type RxStorageRemoteInternals = { - params: RxStorageInstanceCreationParams; - connectionId: string; - messageChannel: RemoteMessageChannel; -}; -export type RxStorageRemoteExposeSettingsBase = { - send(msg: MessageFromRemote): void; - messages$: Observable; - customRequestHandler?: CustomRequestHandler; -}; -export type RxStorageRemoteExposeSettingsRxDatabase = RxStorageRemoteExposeSettingsBase & { - /** - * The database which must be mapped to the remote storage server. - */ - database: RxDatabase; -}; -export type RxStorageRemoteExposeSettingsRxStorage = RxStorageRemoteExposeSettingsBase & { - /** - * The original storage - * which actually stores the data. - */ - storage: RxStorage; -}; -export type RxStorageRemoteExposeSettings = RxStorageRemoteExposeSettingsRxDatabase | RxStorageRemoteExposeSettingsRxStorage; -export type RxStorageRemoteExposeType = { - instanceByFullName: Map; -}; -/** - * If set, the clients can send RxDB-unrelated custom messages - * to the remote storage and it will answer them. - */ -export type CustomRequestHandler = (data: In) => MaybePromise; diff --git a/dist/types/plugins/test-utils/config.d.ts b/dist/types/plugins/test-utils/config.d.ts deleted file mode 100644 index cc999f36936..00000000000 --- a/dist/types/plugins/test-utils/config.d.ts +++ /dev/null @@ -1,17 +0,0 @@ -/// -import type { RxStorage, RxTestStorage } from '../../types'; -export type TestConfig = { - storage: RxTestStorage; -}; -export declare const isDeno: boolean; -export declare const isBun: boolean; -export declare const isNode: boolean; -export declare function setConfig(newConfig: TestConfig): void; -export declare function getConfig(): TestConfig; -export declare const ENV_VARIABLES: any; -export declare const DEFAULT_STORAGE: string; -export declare function isFastMode(): boolean; -export declare function initTestEnvironment(): void; -export declare function getEncryptedStorage(baseStorage?: RxStorage): RxStorage; -export declare function isNotOneOfTheseStorages(storageNames: string[]): boolean; -export declare function getPassword(): Promise; diff --git a/dist/types/plugins/test-utils/humans-collection.d.ts b/dist/types/plugins/test-utils/humans-collection.d.ts deleted file mode 100644 index 3cce6644e0f..00000000000 --- a/dist/types/plugins/test-utils/humans-collection.d.ts +++ /dev/null @@ -1,25 +0,0 @@ -import * as schemaObjects from './schema-objects.ts'; -import { RxJsonSchema, RxCollection, RxDatabase, MigrationStrategies, RxAttachmentCreator, RxStorage } from '../../index.ts'; -import { HumanDocumentType } from './schemas.ts'; -export declare function create(size?: number, collectionName?: string, multiInstance?: boolean, eventReduce?: boolean, storage?: RxStorage): Promise>; -export declare function createBySchema(schema: RxJsonSchema, name?: string, storage?: RxStorage, migrationStrategies?: MigrationStrategies): Promise>; -export declare function createAttachments(size?: number, name?: string, multiInstance?: boolean): Promise>; -export declare function createNoCompression(size?: number, name?: string): Promise>; -export declare function createAgeIndex(amount?: number): Promise>; -export declare function multipleOnSameDB(size?: number): Promise<{ - db: RxDatabase<{ - human: RxCollection; - human2: RxCollection; - }>; - collection: RxCollection; - collection2: RxCollection; -}>; -export declare function createNested(amount?: number): Promise>; -export declare function createDeepNested(amount?: number): Promise>; -export declare function createMultiInstance(name: string, amount?: number, password?: undefined, storage?: RxStorage): Promise>; -export declare function createPrimary(amount?: number, name?: string): Promise>; -export declare function createHumanWithTimestamp(amount?: number, databaseName?: string, multiInstance?: boolean, storage?: RxStorage): Promise>; -export declare function createMigrationCollection(amount?: number, addMigrationStrategies?: MigrationStrategies, name?: string, autoMigrate?: boolean, attachment?: RxAttachmentCreator): Promise>; -export declare function createRelated(name?: string): Promise>; -export declare function createRelatedNested(name?: string): Promise>; -export declare function createIdAndAgeIndex(amount?: number): Promise>; diff --git a/dist/types/plugins/test-utils/index.d.ts b/dist/types/plugins/test-utils/index.d.ts deleted file mode 100644 index 9605677ee34..00000000000 --- a/dist/types/plugins/test-utils/index.d.ts +++ /dev/null @@ -1,18 +0,0 @@ -/** - * This plugins contains thing that are needed for testing - * in RxDB related context. Mostly used in the unit tests and - * also in the tests for the premium and the server repository. - */ -export * from './config.ts'; -export * from './humans-collection.ts'; -export * from './port-manager.ts'; -export * from './revisions.ts'; -export * from './test-util.ts'; -export * from './schema-objects.ts'; -export * from './schemas.ts'; -import * as humansCollectionConst from './humans-collection.ts'; -export declare const humansCollection: typeof humansCollectionConst; -import * as schemasConst from './schemas.ts'; -export declare const schemas: typeof schemasConst; -import * as schemaObjectsConst from './schema-objects.ts'; -export declare const schemaObjects: typeof schemaObjectsConst; diff --git a/dist/types/plugins/test-utils/port-manager.d.ts b/dist/types/plugins/test-utils/port-manager.d.ts deleted file mode 100644 index 61da19e0443..00000000000 --- a/dist/types/plugins/test-utils/port-manager.d.ts +++ /dev/null @@ -1,6 +0,0 @@ -/** - * Returns an unused port. - * Used to ensure that different tests - * do not accidentally use the same port. - */ -export declare function nextPort(): Promise; diff --git a/dist/types/plugins/test-utils/revisions.d.ts b/dist/types/plugins/test-utils/revisions.d.ts deleted file mode 100644 index 2cb6b1fc592..00000000000 --- a/dist/types/plugins/test-utils/revisions.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -export declare const EXAMPLE_REVISION_1 = "1-12080c42d471e3d2625e49dcca3b8e1a"; -export declare const EXAMPLE_REVISION_2 = "2-22080c42d471e3d2625e49dcca3b8e2b"; -export declare const EXAMPLE_REVISION_3 = "3-32080c42d471e3d2625e49dcca3b8e3c"; -export declare const EXAMPLE_REVISION_4 = "4-42080c42d471e3d2625e49dcca3b8e3c"; diff --git a/dist/types/plugins/test-utils/schema-objects.d.ts b/dist/types/plugins/test-utils/schema-objects.d.ts deleted file mode 100644 index c1c7d026b49..00000000000 --- a/dist/types/plugins/test-utils/schema-objects.d.ts +++ /dev/null @@ -1,206 +0,0 @@ -/** - * this file contains objects which match the schemas in schemas.js - */ -import { HumanDocumentType } from './schemas.ts'; -/** - * Some storages had problems with umlauts and other special chars. - * So we add these to all test strings. - * TODO add emojis - */ -export declare const TEST_DATA_CHARSET = "0987654321ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz\u00E4\u00F6\u00FC\u00D6\u00C4\u00DF\u00DC[]{}'"; -export declare const TEST_DATA_CHARSET_LAST_SORTED: string; -export declare function randomStringWithSpecialChars(length: number): string; -export interface SimpleHumanDocumentType { - passportId: string; - firstName: string; - lastName: string; -} -export declare function humanData(passportId?: string, age?: number, firstName?: string): HumanDocumentType; -export declare function simpleHumanData(): SimpleHumanDocumentType; -export interface SimpleHumanV3DocumentType { - passportId: string; - age: number; - oneOptional?: string; -} -export declare function simpleHumanV3Data(partial?: Partial): SimpleHumanV3DocumentType; -export interface SimpleHumanAgeDocumentType { - passportId: string; - age: string; -} -export declare function simpleHumanAge(partial?: Partial): SimpleHumanAgeDocumentType; -export interface HumanWithSubOtherDocumentType { - passportId: string; - other: { - age: number; - }; -} -export declare function humanWithSubOther(): HumanWithSubOtherDocumentType; -export interface NoIndexHumanDocumentType { - firstName: string; - lastName: string; -} -export declare function NoIndexHuman(): NoIndexHumanDocumentType; -export interface NestedHumanDocumentType { - passportId: string; - firstName: string; - mainSkill: { - name: string; - level: number; - }; -} -export declare function nestedHumanData(partial?: Partial): NestedHumanDocumentType; -export interface DeepNestedHumanDocumentType { - passportId: string; - mainSkill: { - name: string; - attack: { - good: boolean; - count: number; - }; - }; -} -export declare function deepNestedHumanData(): DeepNestedHumanDocumentType; -export interface BigHumanDocumentType { - passportId: string; - dnaHash: string; - firstName: string; - lastName: string; - age: number; -} -export declare function bigHumanDocumentType(): BigHumanDocumentType; -export interface HeroArrayDocumentType { - name: string; - skills: { - name: string; - damage: number; - }[]; -} -export declare function heroArrayData(): HeroArrayDocumentType; -export interface SimpleHeroArrayDocumentType { - name: string; - skills: string[]; -} -export declare function simpleHeroArray(partial?: Partial): SimpleHeroArrayDocumentType; -export interface EncryptedHumanDocumentType { - passportId: string; - firstName: string; - secret: string; -} -export declare function encryptedHumanData(secret?: string): EncryptedHumanDocumentType; -export interface EncryptedObjectHumanDocumentType { - passportId: string; - firstName: string; - secret: { - name: string; - subname: string; - }; -} -export declare function encryptedObjectHumanData(): EncryptedObjectHumanDocumentType; -export interface EncryptedDeepHumanDocumentType { - passportId: string; - firstName: string; - firstLevelPassword: string; - secretData: { - pw: string; - }; - deepSecret: { - darkhole: { - pw: string; - }; - }; - nestedSecret: { - darkhole: { - pw: string; - }; - }; -} -export declare function encryptedDeepHumanDocumentType(): EncryptedDeepHumanDocumentType; -export interface CompoundIndexDocumentType { - passportId: string; - passportCountry: string; - age: number; -} -export declare function compoundIndexData(): CompoundIndexDocumentType; -export interface CompoundIndexNoStringDocumentType { - passportId: string; - passportCountry: { - [prop: string]: string; - }; - age: number; -} -export declare function compoundIndexNoStringData(): CompoundIndexNoStringDocumentType; -export interface NostringIndexDocumentType { - passportId: {}; - firstName: string; -} -export declare function nostringIndex(): NostringIndexDocumentType; -export interface RefHumanDocumentType { - name: string; - bestFriend: string; -} -export declare function refHumanData(bestFriend?: string): RefHumanDocumentType; -export interface RefHumanNestedDocumentType { - name: string; - foo: { - bestFriend: string; - }; -} -export declare function refHumanNestedData(bestFriend?: string): RefHumanNestedDocumentType; -export interface HumanWithTimestampNestedDocumentType extends HumanWithTimestampDocumentType { - address?: { - street: string; - suite: string; - city: string; - zipcode: string; - geo: { - lat: string; - lng: string; - }; - }; -} -export interface HumanWithTimestampDocumentType { - id: string; - name: string; - age: number; - updatedAt: number; - deletedAt?: number; -} -export declare function humanWithTimestampData(givenData?: Partial): HumanWithTimestampDocumentType; -export interface AverageSchemaDocumentType { - id: string; - var1: string; - var2: number; - deep: { - deep1: string; - deep2: string; - deeper: { - deepNr: number; - }; - }; - list: { - deep1: string; - deep2: string; - }[]; -} -export declare function averageSchemaData(partial?: Partial): AverageSchemaDocumentType; -export interface PointDocumentType { - id: string; - x: number; - y: number; -} -export declare function pointData(): PointDocumentType; -export interface HumanWithIdAndAgeIndexDocumentType { - id: string; - name: string; - age: number; -} -export declare function humanWithIdAndAgeIndexDocumentType(age?: number): HumanWithIdAndAgeIndexDocumentType; -export type HumanWithCompositePrimary = { - id?: string; - firstName: string; - lastName: string; - info: { - age: number; - }; -}; -export declare function humanWithCompositePrimary(partial?: Partial): HumanWithCompositePrimary; diff --git a/dist/types/plugins/test-utils/schemas.d.ts b/dist/types/plugins/test-utils/schemas.d.ts deleted file mode 100644 index 6f9314d31e7..00000000000 --- a/dist/types/plugins/test-utils/schemas.d.ts +++ /dev/null @@ -1,355 +0,0 @@ -import { SimpleHumanV3DocumentType, HumanWithSubOtherDocumentType, NestedHumanDocumentType, DeepNestedHumanDocumentType, EncryptedHumanDocumentType, EncryptedObjectHumanDocumentType, EncryptedDeepHumanDocumentType, CompoundIndexDocumentType, CompoundIndexNoStringDocumentType, HeroArrayDocumentType, SimpleHeroArrayDocumentType, RefHumanDocumentType, RefHumanNestedDocumentType, AverageSchemaDocumentType, PointDocumentType, HumanWithTimestampDocumentType, BigHumanDocumentType, NostringIndexDocumentType, NoIndexHumanDocumentType, HumanWithCompositePrimary, HumanWithTimestampNestedDocumentType } from './schema-objects.ts'; -import type { ExtractDocumentTypeFromTypedRxJsonSchema, RxJsonSchema } from '../../types/rx-schema'; -export declare const humanSchemaLiteral: import("../../index.ts").DeepReadonlyObject<{ - readonly title: "human schema"; - readonly description: "describes a human being"; - readonly version: 0; - readonly keyCompression: false; - readonly primaryKey: "passportId"; - readonly type: "object"; - readonly properties: { - readonly passportId: { - readonly type: "string"; - readonly maxLength: 100; - }; - readonly firstName: { - readonly type: "string"; - readonly maxLength: 100; - }; - readonly lastName: { - readonly type: "string"; - readonly maxLength: 100; - }; - readonly age: { - readonly description: "age in years"; - readonly type: "integer"; - readonly minimum: 0; - readonly maximum: 150; - readonly multipleOf: 1; - }; - }; - readonly required: readonly ["firstName", "lastName", "passportId", "age"]; - readonly indexes: readonly ["firstName"]; -}>; -declare const humanSchemaTyped: { - title: "human schema"; - description: "describes a human being"; - version: 0; - keyCompression: false; - primaryKey: "passportId"; - type: "object"; - properties: { - passportId: { - type: "string"; - maxLength: 100; - }; - firstName: { - type: "string"; - maxLength: 100; - }; - lastName: { - type: "string"; - maxLength: 100; - }; - age: { - description: "age in years"; - type: "integer"; - minimum: 0; - maximum: 150; - multipleOf: 1; - }; - }; - required: ["firstName", "lastName", "passportId", "age"]; - indexes: ["firstName"]; -}; -export type HumanDocumentType = ExtractDocumentTypeFromTypedRxJsonSchema; -export declare const human: RxJsonSchema; -export declare const humanDefault: RxJsonSchema; -export declare const humanFinal: RxJsonSchema; -export declare const simpleHuman: RxJsonSchema; -export declare const simpleHumanV3: RxJsonSchema; -export declare const humanAgeIndex: RxJsonSchema; -export declare const humanSubIndex: RxJsonSchema; -/** - * each field is an index, - * use this to slow down inserts in tests - */ -export declare const humanWithAllIndex: RxJsonSchema; -export declare const nestedHuman: RxJsonSchema; -export declare const deepNestedHuman: RxJsonSchema; -export declare const noIndexHuman: RxJsonSchema; -export declare const noStringIndex: RxJsonSchema; -export declare const bigHuman: RxJsonSchema; -export declare const encryptedHuman: RxJsonSchema; -export declare const encryptedObjectHuman: RxJsonSchema; -export declare const encryptedDeepHuman: RxJsonSchema; -export declare const notExistingIndex: RxJsonSchema<{ - passportId: string; - address: { - street: string; - }; -}>; -export declare const compoundIndex: RxJsonSchema; -export declare const compoundIndexNoString: RxJsonSchema; -export declare const empty: RxJsonSchema; -export declare const heroArray: RxJsonSchema; -export declare const simpleArrayHero: RxJsonSchema; -export declare const primaryHumanLiteral: import("../../index.ts").DeepReadonlyObject<{ - readonly title: "human schema with primary"; - readonly version: 0; - readonly description: "describes a human being with passportID as primary"; - readonly keyCompression: false; - readonly primaryKey: "passportId"; - readonly type: "object"; - readonly properties: { - readonly passportId: { - readonly type: "string"; - readonly minLength: 4; - readonly maxLength: 100; - }; - readonly firstName: { - readonly type: "string"; - readonly maxLength: 100; - }; - readonly lastName: { - readonly type: "string"; - readonly maxLength: 500; - }; - readonly age: { - readonly type: "integer"; - readonly minimum: 0; - readonly maximum: 150; - readonly multipleOf: 1; - }; - }; - readonly required: readonly ["passportId", "firstName", "lastName"]; -}>; -declare const primaryHumanTypedSchema: { - title: "human schema with primary"; - version: 0; - description: "describes a human being with passportID as primary"; - keyCompression: false; - primaryKey: "passportId"; - type: "object"; - properties: { - passportId: { - type: "string"; - minLength: 4; - maxLength: 100; - }; - firstName: { - type: "string"; - maxLength: 100; - }; - lastName: { - type: "string"; - maxLength: 500; - }; - age: { - type: "integer"; - minimum: 0; - maximum: 150; - multipleOf: 1; - }; - }; - required: ["passportId", "firstName", "lastName"]; -}; -export type PrimaryHumanDocType = ExtractDocumentTypeFromTypedRxJsonSchema; -export declare const primaryHuman: RxJsonSchema; -export declare const humanNormalizeSchema1Literal: import("../../index.ts").DeepReadonlyObject<{ - readonly title: "human schema"; - readonly version: 0; - readonly keyCompression: false; - readonly description: "describes a human being"; - readonly primaryKey: "passportId"; - readonly type: "object"; - readonly properties: { - readonly passportId: { - readonly type: "string"; - readonly minLength: 4; - readonly maxLength: 100; - }; - readonly age: { - readonly description: "age in years"; - readonly type: "integer"; - readonly minimum: 0; - readonly maximum: 150; - readonly multipleOf: 1; - }; - }; - readonly required: readonly ["age", "passportId"]; -}>; -declare const humanNormalizeSchema1Typed: { - title: "human schema"; - version: 0; - keyCompression: false; - description: "describes a human being"; - primaryKey: "passportId"; - type: "object"; - properties: { - passportId: { - type: "string"; - minLength: 4; - maxLength: 100; - }; - age: { - description: "age in years"; - type: "integer"; - minimum: 0; - maximum: 150; - multipleOf: 1; - }; - }; - required: ["age", "passportId"]; -}; -export type AgeHumanDocumentType = ExtractDocumentTypeFromTypedRxJsonSchema; -export declare const humanNormalizeSchema1: RxJsonSchema; -export declare const humanNormalizeSchema2: RxJsonSchema; -export declare const refHuman: RxJsonSchema; -export declare const humanCompositePrimary: RxJsonSchema; -export declare const humanCompositePrimarySchemaLiteral: import("../../index.ts").DeepReadonlyObject<{ - readonly title: "human schema"; - readonly description: "describes a human being"; - readonly version: 0; - readonly keyCompression: false; - readonly primaryKey: { - readonly key: "id"; - readonly fields: readonly ["firstName", "info.age"]; - readonly separator: "|"; - }; - readonly encrypted: readonly []; - readonly type: "object"; - readonly properties: { - readonly id: { - readonly type: "string"; - readonly maxLength: 100; - }; - readonly firstName: { - readonly type: "string"; - readonly maxLength: 100; - }; - readonly lastName: { - readonly type: "string"; - }; - readonly info: { - readonly type: "object"; - readonly properties: { - readonly age: { - readonly description: "age in years"; - readonly type: "integer"; - readonly minimum: 0; - readonly maximum: 150; - }; - }; - readonly required: readonly ["age"]; - }; - readonly readonlyProps: { - readonly allOf: readonly []; - readonly anyOf: readonly []; - readonly oneOf: readonly []; - readonly type: readonly []; - readonly dependencies: { - readonly someDep: readonly ["asd"]; - }; - readonly items: readonly []; - readonly required: readonly []; - readonly enum: readonly []; - }; - }; - readonly required: readonly ["id", "firstName", "lastName", "info"]; - readonly indexes: readonly ["firstName"]; -}>; -declare const humanCompositePrimarySchemaTyped: { - title: "human schema"; - description: "describes a human being"; - version: 0; - keyCompression: false; - primaryKey: { - key: "id"; - fields: ["firstName", "info.age"]; - separator: "|"; - }; - encrypted: []; - type: "object"; - properties: { - id: { - type: "string"; - maxLength: 100; - }; - firstName: { - type: "string"; - maxLength: 100; - }; - lastName: { - type: "string"; - }; - info: { - type: "object"; - properties: { - age: { - description: "age in years"; - type: "integer"; - minimum: 0; - maximum: 150; - }; - }; - required: ["age"]; - }; - readonlyProps: { - allOf: []; - anyOf: []; - oneOf: []; - type: []; - dependencies: { - someDep: ["asd"]; - }; - items: []; - required: []; - enum: []; - }; - }; - required: ["id", "firstName", "lastName", "info"]; - indexes: ["firstName"]; -}; -export type HumanCompositePrimaryDocType = ExtractDocumentTypeFromTypedRxJsonSchema; -export declare const refHumanNested: RxJsonSchema; -/** - * an average schema used in performance-tests - */ -export declare function averageSchema(): RxJsonSchema; -export declare const point: RxJsonSchema; -export declare const humanMinimal: RxJsonSchema; -export declare const humanMinimalBroken: RxJsonSchema<{ - passportId: string; - broken: number; -}>; -/** - * used in the graphql-test - * contains timestamp - */ -export declare const humanWithTimestamp: RxJsonSchema; -export declare const humanWithTimestampNested: RxJsonSchema; -/** - * each field is an index, - * use this to slow down inserts in tests - */ -export declare const humanWithTimestampAllIndex: RxJsonSchema; -export declare const humanWithSimpleAndCompoundIndexes: RxJsonSchema<{ - id: string; - name: string; - age: number; - createdAt: number; - updatedAt: number; -}>; -export declare const humanWithDeepNestedIndexes: RxJsonSchema<{ - id: string; - name: string; - job: any; -}>; -export declare const humanIdAndAgeIndex: RxJsonSchema<{ - id: string; - name: string; - age: number; -}>; -export declare function enableKeyCompression(schema: RxJsonSchema): RxJsonSchema; -export {}; diff --git a/dist/types/plugins/test-utils/test-util.d.ts b/dist/types/plugins/test-utils/test-util.d.ts deleted file mode 100644 index be70f172bcc..00000000000 --- a/dist/types/plugins/test-utils/test-util.d.ts +++ /dev/null @@ -1,6 +0,0 @@ -import type { Func } from 'mocha'; -import type { RxCollection } from '../../types'; -import type { RxReplicationState } from '../replication/index.ts'; -export declare function testMultipleTimes(times: number, title: string, test: Func): void; -export declare function ensureCollectionsHaveEqualState(c1: RxCollection, c2: RxCollection): Promise; -export declare function ensureReplicationHasNoErrors(replicationState: RxReplicationState): void; diff --git a/dist/types/plugins/update/index.d.ts b/dist/types/plugins/update/index.d.ts deleted file mode 100644 index f70af91086f..00000000000 --- a/dist/types/plugins/update/index.d.ts +++ /dev/null @@ -1,5 +0,0 @@ -import type { RxDocument, RxQuery, RxPlugin, UpdateQuery } from '../../types/index.d.ts'; -export declare function incrementalUpdate(this: RxDocument, updateObj: UpdateQuery): Promise>; -export declare function update(this: RxDocument, updateObj: UpdateQuery): Promise>; -export declare function RxQueryUpdate(this: RxQuery, updateObj: UpdateQuery): Promise; -export declare const RxDBUpdatePlugin: RxPlugin; diff --git a/dist/types/plugins/update/mingo-updater.d.ts b/dist/types/plugins/update/mingo-updater.d.ts deleted file mode 100644 index 6217714e5f3..00000000000 --- a/dist/types/plugins/update/mingo-updater.d.ts +++ /dev/null @@ -1,5 +0,0 @@ -/** - * Custom build of the mingo updater for smaller build size - */ -import type { UpdateQuery } from '../../types/index'; -export declare function mingoUpdater(d: T, op: UpdateQuery): T; diff --git a/dist/types/plugins/utils/index.d.ts b/dist/types/plugins/utils/index.d.ts deleted file mode 100644 index 4f3f41355fb..00000000000 --- a/dist/types/plugins/utils/index.d.ts +++ /dev/null @@ -1,18 +0,0 @@ -export * from './utils-array.ts'; -export * from './utils-blob.ts'; -export * from './utils-base64.ts'; -export * from './utils-revision.ts'; -export * from './utils-document.ts'; -export * from './utils-hash.ts'; -export * from './utils-promise.ts'; -export * from './utils-regex.ts'; -export * from './utils-string.ts'; -export * from './utils-object-deep-equal.ts'; -export * from './utils-object-dot-prop.ts'; -export * from './utils-object.ts'; -export * from './utils-map.ts'; -export * from './utils-error.ts'; -export * from './utils-time.ts'; -export * from './utils-other.ts'; -export * from './utils-rxdb-version.ts'; -export * from './utils-global.ts'; diff --git a/dist/types/plugins/utils/utils-array.d.ts b/dist/types/plugins/utils/utils-array.d.ts deleted file mode 100644 index 03b7c448148..00000000000 --- a/dist/types/plugins/utils/utils-array.d.ts +++ /dev/null @@ -1,47 +0,0 @@ -import type { MaybePromise, MaybeReadonly } from '../../types/index.d.ts'; -export declare function lastOfArray(ar: T[]): T | undefined; -/** - * shuffle the given array - */ -export declare function shuffleArray(arr: T[]): T[]; -export declare function randomOfArray(arr: T[]): T; -export declare function toArray(input: T | T[] | Readonly | Readonly): T[]; -/** - * Split array with items into smaller arrays with items - * @link https://stackoverflow.com/a/7273794/3443137 - */ -export declare function batchArray(array: T[], batchSize: number): T[][]; -/** - * @link https://stackoverflow.com/a/15996017 - */ -export declare function removeOneFromArrayIfMatches(ar: T[], condition: (x: T) => boolean): T[]; -/** - * returns true if the supplied argument is either an Array or a Readonly> - */ -export declare function isMaybeReadonlyArray(x: any): x is MaybeReadonly; -export declare function isOneItemOfArrayInOtherArray(ar1: T[], ar2: T[]): boolean; -/** - * Use this in array.filter() to remove all empty slots - * and have the correct typings afterwards. - * @link https://stackoverflow.com/a/46700791/3443137 - */ -export declare function arrayFilterNotEmpty(value: TValue | null | undefined): value is TValue; -export declare function countUntilNotMatching(ar: T[], matchingFn: (v: T, idx: number) => boolean): number; -export declare function asyncFilter(array: T[], predicate: (item: T, index: number, a: T[]) => MaybePromise): Promise; -/** - * @link https://stackoverflow.com/a/3762735 - */ -export declare function sumNumberArray(array: number[]): number; -export declare function maxOfNumbers(arr: number[]): number; -/** - * Appends the given documents to the given array. - * This will mutate the first given array. - * Mostly used as faster alternative to Array.concat() - * because .concat() is so slow. - * @link https://www.measurethat.net/Benchmarks/Show/4223/0/array-concat-vs-spread-operator-vs-push#latest_results_block - */ -export declare function appendToArray(ar: T[], add: T[] | readonly T[]): void; -/** - * @link https://gist.github.com/telekosmos/3b62a31a5c43f40849bb - */ -export declare function uniqueArray(arrArg: string[]): string[]; diff --git a/dist/types/plugins/utils/utils-base64.d.ts b/dist/types/plugins/utils/utils-base64.d.ts deleted file mode 100644 index 6ddc7a52cec..00000000000 --- a/dist/types/plugins/utils/utils-base64.d.ts +++ /dev/null @@ -1,15 +0,0 @@ -/** - * atob() and btoa() do not work well with non ascii chars, - * so we have to use these helper methods instead. - * @link https://stackoverflow.com/a/30106551/3443137 - */ -export declare function b64EncodeUnicode(str: string): string; -export declare function b64DecodeUnicode(str: string): string; -/** - * @link https://stackoverflow.com/a/9458996/3443137 - */ -export declare function arrayBufferToBase64(buffer: ArrayBuffer): string; -/** - * @link https://stackoverflow.com/a/21797381 - */ -export declare function base64ToArrayBuffer(base64: string): ArrayBuffer; diff --git a/dist/types/plugins/utils/utils-blob.d.ts b/dist/types/plugins/utils/utils-blob.d.ts deleted file mode 100644 index 6f699cef15a..00000000000 --- a/dist/types/plugins/utils/utils-blob.d.ts +++ /dev/null @@ -1,13 +0,0 @@ -/** - * Since RxDB 13.0.0 we only use Blob instead of falling back to Buffer, - * because Node.js >18 supports Blobs anyway. - */ -/** - * depending if we are on node or browser, - * we have to use Buffer(node) or Blob(browser) - */ -export declare function createBlob(data: string, type: string): Blob; -export declare function createBlobFromBase64(base64String: string, type: string): Promise; -export declare function blobToString(blob: Blob | string): Promise; -export declare function blobToBase64String(blob: Blob | string): Promise; -export declare function getBlobSize(blob: Blob): number; diff --git a/dist/types/plugins/utils/utils-document.d.ts b/dist/types/plugins/utils/utils-document.d.ts deleted file mode 100644 index 9e09cc9f26e..00000000000 --- a/dist/types/plugins/utils/utils-document.d.ts +++ /dev/null @@ -1,26 +0,0 @@ -import type { DeepReadonly, RxDocumentData, RxDocumentMeta, StringKeys, WithDeleted, WithDeletedAndAttachments } from '../../types/index.d.ts'; -/** - * We use 1 as minimum so that the value is never falsy. - * This const is used in several places because querying - * with a value lower then the minimum could give false results. - */ -export declare const RX_META_LWT_MINIMUM = 1; -export declare function getDefaultRxDocumentMeta(): RxDocumentMeta; -/** - * Returns a revision that is not valid. - * Use this to have correct typings - * while the storage wrapper anyway will overwrite the revision. - */ -export declare function getDefaultRevision(): string; -export declare function stripMetaDataFromDocument(docData: RxDocumentData): RxDocType; -/** - * Faster way to check the equality of document lists - * compared to doing a deep-equal. - * Here we only check the ids and revisions. - */ -export declare function areRxDocumentArraysEqual(primaryPath: StringKeys>, ar1: RxDocumentData[], ar2: RxDocumentData[]): boolean; -export declare function getSortDocumentsByLastWriteTimeComparator(primaryPath: string): (a: RxDocumentData, b: RxDocumentData) => number; -export declare function sortDocumentsByLastWriteTime(primaryPath: string, docs: RxDocumentData[]): RxDocumentData[]; -type AnyDocFormat = RxDocType | WithDeleted | RxDocumentData | WithDeletedAndAttachments; -export declare function toWithDeleted(docData: AnyDocFormat | DeepReadonly>): WithDeleted; -export {}; diff --git a/dist/types/plugins/utils/utils-error.d.ts b/dist/types/plugins/utils/utils-error.d.ts deleted file mode 100644 index 350a3fc1d05..00000000000 --- a/dist/types/plugins/utils/utils-error.d.ts +++ /dev/null @@ -1,8 +0,0 @@ -import type { PlainJsonError, RxError, RxTypeError } from '../../types/index.d.ts'; -/** - * Returns an error that indicates that a plugin is missing - * We do not throw a RxError because this should not be handled - * programmatically but by using the correct import - */ -export declare function pluginMissing(pluginKey: string): Error; -export declare function errorToPlainJson(err: Error | TypeError | RxError | RxTypeError): PlainJsonError; diff --git a/dist/types/plugins/utils/utils-global.d.ts b/dist/types/plugins/utils/utils-global.d.ts deleted file mode 100644 index 3dc4e24e702..00000000000 --- a/dist/types/plugins/utils/utils-global.d.ts +++ /dev/null @@ -1,6 +0,0 @@ -/** - * Can be used by some plugins to have a "global" object that - * can be imported and mutated at will. - */ -export declare const RXDB_UTILS_GLOBAL: any; -export declare const PREMIUM_FLAG_HASH = "6da4936d1425ff3a5c44c02342c6daf791d266be3ae8479b8ec59e261df41b93"; diff --git a/dist/types/plugins/utils/utils-hash.d.ts b/dist/types/plugins/utils/utils-hash.d.ts deleted file mode 100644 index be8402a69df..00000000000 --- a/dist/types/plugins/utils/utils-hash.d.ts +++ /dev/null @@ -1,19 +0,0 @@ -import type { HashFunction } from '../../types/index.d.ts'; -/** - * TODO in the future we should no longer provide a - * fallback to crypto.subtle.digest. - * Instead users without crypto.subtle.digest support, should have to provide their own - * hash function. - */ -export declare function jsSha256(input: string): Promise; -export declare function nativeSha256(input: string): Promise; -export declare const canUseCryptoSubtle: boolean; -/** - * Default hash method used to hash - * strings and do equal comparisons. - * - * IMPORTANT: Changing the default hashing method - * requires a BREAKING change! - */ -export declare const defaultHashSha256: HashFunction; -export declare function hashStringToNumber(str: string): number; diff --git a/dist/types/plugins/utils/utils-map.d.ts b/dist/types/plugins/utils/utils-map.d.ts deleted file mode 100644 index 8e85e687d8e..00000000000 --- a/dist/types/plugins/utils/utils-map.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -export declare function getFromMapOrThrow(map: Map | WeakMap, key: K): V; -export declare function getFromMapOrCreate(map: Map | WeakMap, index: MapIndex, creator: () => MapValue, ifWasThere?: (value: MapValue) => void): MapValue; diff --git a/dist/types/plugins/utils/utils-number.d.ts b/dist/types/plugins/utils/utils-number.d.ts deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/dist/types/plugins/utils/utils-object-deep-equal.d.ts b/dist/types/plugins/utils/utils-object-deep-equal.d.ts deleted file mode 100644 index bea71cd0d17..00000000000 --- a/dist/types/plugins/utils/utils-object-deep-equal.d.ts +++ /dev/null @@ -1,7 +0,0 @@ -/** - * Copied from the fast-deep-equal package - * because it does not support es modules and causes optimization bailouts. - * TODO use the npm package again when this is merged: - * @link https://github.com/epoberezkin/fast-deep-equal/pull/105 - */ -export declare function deepEqual(a: any, b: any): boolean; diff --git a/dist/types/plugins/utils/utils-object-dot-prop.d.ts b/dist/types/plugins/utils/utils-object-dot-prop.d.ts deleted file mode 100644 index 95e8babedb3..00000000000 --- a/dist/types/plugins/utils/utils-object-dot-prop.d.ts +++ /dev/null @@ -1,14 +0,0 @@ -/** - * Copied from - * @link https://github.com/sindresorhus/dot-prop/blob/main/index.js - * because it is currently an esm only module. - * TODO use the npm package again when RxDB is also fully esm. - */ -/** - * TODO we need some performance tests and improvements here. - */ -export declare function getProperty(object: any, path: string | string[], value?: any): any; -export declare function setProperty(object: any, path: string, value: any): any; -export declare function deleteProperty(object: any, path: string): boolean | undefined; -export declare function hasProperty(object: any, path: string): boolean; -export declare function deepKeys(object: any): any[]; diff --git a/dist/types/plugins/utils/utils-object.d.ts b/dist/types/plugins/utils/utils-object.d.ts deleted file mode 100644 index cfc8d9bd18f..00000000000 --- a/dist/types/plugins/utils/utils-object.d.ts +++ /dev/null @@ -1,55 +0,0 @@ -import type { DeepReadonlyObject } from '../../types/index.d.ts'; -export declare function deepFreeze(o: T): T; -/** - * To get specific nested path values from objects, - * RxDB normally uses the 'dot-prop' npm module. - * But when performance is really relevant, this is not fast enough. - * Instead we use a monad that can prepare some stuff up front - * and we can reuse the generated function. - */ -export type ObjectPathMonadFunction = (obj: T) => R; -export declare function objectPathMonad(objectPath: string): ObjectPathMonadFunction; -export declare function getFromObjectOrThrow(obj: { - [k: string]: V; -}, key: string): V; -/** - * returns a flattened object - * @link https://gist.github.com/penguinboy/762197 - */ -export declare function flattenObject(ob: any): any; -/** - * does a flat copy on the objects, - * is about 3 times faster then using deepClone - * @link https://jsperf.com/object-rest-spread-vs-clone/2 - */ -export declare function flatClone(obj: T | DeepReadonlyObject | Readonly): T; -/** - * @link https://stackoverflow.com/a/11509718/3443137 - */ -export declare function firstPropertyNameOfObject(obj: any): string; -export declare function firstPropertyValueOfObject(obj: { - [k: string]: T; -}): T; -/** - * deep-sort an object so its attributes are in lexical order. - * Also sorts the arrays inside of the object if no-array-sort not set - */ -export declare function sortObject(obj: any, noArraySort?: boolean): any; -/** - * Deep clone a plain json object. - * Does not work with recursive stuff - * or non-plain-json. - * IMPORTANT: Performance of this is very important, - * do not change it without running performance tests! - * - * @link https://github.com/zxdong262/deep-copy/blob/master/src/index.ts - */ -declare function deepClone(src: T | DeepReadonlyObject): T; -export declare const clone: typeof deepClone; -/** - * overwrites the getter with the actual value - * Mostly used for caching stuff on the first run - */ -export declare function overwriteGetterForCaching(obj: any, getterName: string, value: ValueType): ValueType; -export declare function hasDeepProperty(obj: any, property: string): boolean; -export {}; diff --git a/dist/types/plugins/utils/utils-other.d.ts b/dist/types/plugins/utils/utils-other.d.ts deleted file mode 100644 index fc2cf466604..00000000000 --- a/dist/types/plugins/utils/utils-other.d.ts +++ /dev/null @@ -1,13 +0,0 @@ -export declare function runXTimes(xTimes: number, fn: (idx: number) => void): void; -export declare function ensureNotFalsy(obj: T | false | undefined | null, message?: string): T; -export declare function ensureInteger(obj: unknown): number; -/** - * Using shareReplay() without settings will not unsubscribe - * if there are no more subscribers. - * So we use these defaults. - * @link https://cartant.medium.com/rxjs-whats-changed-with-sharereplay-65c098843e95 - */ -export declare const RXJS_SHARE_REPLAY_DEFAULTS: { - bufferSize: number; - refCount: boolean; -}; diff --git a/dist/types/plugins/utils/utils-promise.d.ts b/dist/types/plugins/utils/utils-promise.d.ts deleted file mode 100644 index 31fffacb37e..00000000000 --- a/dist/types/plugins/utils/utils-promise.d.ts +++ /dev/null @@ -1,39 +0,0 @@ -/** - * returns a promise that resolves on the next tick - */ -export declare function nextTick(): Promise; -export declare function promiseWait(ms?: number): Promise; -export declare function toPromise(maybePromise: Promise | T): Promise; -/** - * returns true if promise is given - */ -export declare function isPromise(value: any): boolean; -/** - * Reusing resolved promises has a better - * performance than creating new ones each time. - */ -export declare const PROMISE_RESOLVE_TRUE: Promise; -export declare const PROMISE_RESOLVE_FALSE: Promise; -export declare const PROMISE_RESOLVE_NULL: Promise; -export declare const PROMISE_RESOLVE_VOID: Promise; -export declare function requestIdlePromiseNoQueue( -/** - * We always set a timeout! - * RxDB might be used on the server side where the - * server runs 24/4 on 99% CPU. So without a timeout - * this would never resolve which could cause a memory leak. - */ -timeout?: number | undefined): Promise; -export declare function requestIdlePromise(timeout?: number | undefined): Promise; -/** - * run the callback if requestIdleCallback available - * do nothing if not - * @link https://developer.mozilla.org/de/docs/Web/API/Window/requestIdleCallback - */ -export declare function requestIdleCallbackIfAvailable(fun: Function): void; -/** - * like Promise.all() but runs in series instead of parallel - * @link https://github.com/egoist/promise.series/blob/master/index.js - * @param tasks array with functions that return a promise - */ -export declare function promiseSeries(tasks: Function[], initial?: any): Promise; diff --git a/dist/types/plugins/utils/utils-regex.d.ts b/dist/types/plugins/utils/utils-regex.d.ts deleted file mode 100644 index 12e8c884d70..00000000000 --- a/dist/types/plugins/utils/utils-regex.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -export declare const REGEX_ALL_DOTS: RegExp; -export declare const REGEX_ALL_PIPES: RegExp; diff --git a/dist/types/plugins/utils/utils-revision.d.ts b/dist/types/plugins/utils/utils-revision.d.ts deleted file mode 100644 index bef793cd88b..00000000000 --- a/dist/types/plugins/utils/utils-revision.d.ts +++ /dev/null @@ -1,21 +0,0 @@ -import type { RxDocumentData } from '../../types/index.d.ts'; -/** - * Parses the full revision. - * Do NOT use this if you only need the revision height, - * then use getHeightOfRevision() instead which is faster. - */ -export declare function parseRevision(revision: string): { - height: number; - hash: string; -}; -/** - * @hotPath Performance is very important here - * because we need to parse the revision height very often. - * Do not use `parseInt(revision.split('-')[0], 10)` because - * only fetching the start-number chars is faster. - */ -export declare function getHeightOfRevision(revision: string): number; -/** - * Creates the next write revision for a given document. - */ -export declare function createRevision(databaseInstanceToken: string, previousDocData?: RxDocumentData): string; diff --git a/dist/types/plugins/utils/utils-rxdb-version.d.ts b/dist/types/plugins/utils/utils-rxdb-version.d.ts deleted file mode 100644 index 47b14c43abd..00000000000 --- a/dist/types/plugins/utils/utils-rxdb-version.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -/** - * This file is replaced in the 'npm run build:version' script. - */ -export declare const RXDB_VERSION = "15.24.0"; diff --git a/dist/types/plugins/utils/utils-rxdb-version.template.d.ts b/dist/types/plugins/utils/utils-rxdb-version.template.d.ts deleted file mode 100644 index 2f324759e7c..00000000000 --- a/dist/types/plugins/utils/utils-rxdb-version.template.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -/** - * This file is replaced in the 'npm run build:version' script. - */ -export declare const RXDB_VERSION = "|PLACEHOLDER|"; diff --git a/dist/types/plugins/utils/utils-string.d.ts b/dist/types/plugins/utils/utils-string.d.ts deleted file mode 100644 index 3c6c6a88651..00000000000 --- a/dist/types/plugins/utils/utils-string.d.ts +++ /dev/null @@ -1,32 +0,0 @@ -/** - * get a random string which can be used with couchdb - * @link http://stackoverflow.com/a/1349426/3443137 - */ -export declare function randomCouchString(length?: number): string; -/** - * A random string that is never inside of any storage - */ -export declare const RANDOM_STRING = "Fz7SZXPmYJujkzjY1rpXWvlWBqoGAfAX"; -/** - * uppercase first char - */ -export declare function ucfirst(str: string): string; -/** - * removes trailing and ending dots from the string - */ -export declare function trimDots(str: string): string; -/** - * @link https://stackoverflow.com/a/44950500/3443137 - */ -export declare function lastCharOfString(str: string): string; -/** - * returns true if the given name is likely a folder path - */ -export declare function isFolderPath(name: string): boolean; -/** - * @link https://gist.github.com/andreburgaud/6f73fd2d690b629346b8 - * @link https://stackoverflow.com/a/76240378/3443137 - */ -export declare function arrayBufferToString(arrayBuffer: ArrayBuffer): string; -export declare function stringToArrayBuffer(str: string): ArrayBuffer; -export declare function normalizeString(str: string): string; diff --git a/dist/types/plugins/utils/utils-time.d.ts b/dist/types/plugins/utils/utils-time.d.ts deleted file mode 100644 index 2703bf7664e..00000000000 --- a/dist/types/plugins/utils/utils-time.d.ts +++ /dev/null @@ -1,5 +0,0 @@ -/** - * Returns the current time in milliseconds, - * also ensures to not return the same value twice. - */ -export declare function now(): number; diff --git a/dist/types/plugins/validate-ajv/index.d.ts b/dist/types/plugins/validate-ajv/index.d.ts deleted file mode 100644 index 20aa7e4cbc8..00000000000 --- a/dist/types/plugins/validate-ajv/index.d.ts +++ /dev/null @@ -1,5 +0,0 @@ -import type { RxDocumentData, RxJsonSchema } from '../../types/index.d.ts'; -export declare function getValidator(schema: RxJsonSchema): (docData: RxDocumentData) => any; -export declare const wrappedValidateAjvStorage: (args: { - storage: import("../../types/rx-storage.interface").RxStorage; -}) => import("../../types/rx-storage.interface").RxStorage; diff --git a/dist/types/plugins/validate-is-my-json-valid/index.d.ts b/dist/types/plugins/validate-is-my-json-valid/index.d.ts deleted file mode 100644 index 41311c3ea77..00000000000 --- a/dist/types/plugins/validate-is-my-json-valid/index.d.ts +++ /dev/null @@ -1,5 +0,0 @@ -import type { RxJsonSchema } from '../../types/index.d.ts'; -export declare function getValidator(schema: RxJsonSchema): (docData: any) => any; -export declare const wrappedValidateIsMyJsonValidStorage: (args: { - storage: import("../../types/rx-storage.interface").RxStorage; -}) => import("../../types/rx-storage.interface").RxStorage; diff --git a/dist/types/plugins/validate-z-schema/index.d.ts b/dist/types/plugins/validate-z-schema/index.d.ts deleted file mode 100644 index 0789aa2cb18..00000000000 --- a/dist/types/plugins/validate-z-schema/index.d.ts +++ /dev/null @@ -1,5 +0,0 @@ -import type { RxJsonSchema } from '../../types/index.d.ts'; -export declare function getValidator(schema: RxJsonSchema): (docData: any) => any; -export declare const wrappedValidateZSchemaStorage: (args: { - storage: import("../../types/rx-storage.interface").RxStorage; -}) => import("../../types/rx-storage.interface").RxStorage; diff --git a/dist/types/query-cache.d.ts b/dist/types/query-cache.d.ts deleted file mode 100644 index c4999e7bd5f..00000000000 --- a/dist/types/query-cache.d.ts +++ /dev/null @@ -1,35 +0,0 @@ -/** - * the query-cache makes sure that on every query-state, exactly one instance can exist - * if you use the same mango-query more then once, it will reuse the first RxQuery - */ -import type { RxQuery, RxCacheReplacementPolicy, RxCollection } from './types/index.d.ts'; -export declare class QueryCache { - _map: Map; - /** - * check if an equal query is in the cache, - * if true, return the cached one, - * if false, save the given one and return it - */ - getByQuery(rxQuery: RxQuery): RxQuery; -} -export declare function createQueryCache(): QueryCache; -export declare function uncacheRxQuery(queryCache: QueryCache, rxQuery: RxQuery): void; -export declare function countRxQuerySubscribers(rxQuery: RxQuery): number; -export declare const DEFAULT_TRY_TO_KEEP_MAX = 100; -export declare const DEFAULT_UNEXECUTED_LIFETIME: number; -/** - * The default cache replacement policy - * See docs-src/query-cache.md to learn how it should work. - * Notice that this runs often and should block the cpu as less as possible - * This is a monad which makes it easier to unit test - */ -export declare const defaultCacheReplacementPolicyMonad: (tryToKeepMax: number, unExecutedLifetime: number) => RxCacheReplacementPolicy; -export declare const defaultCacheReplacementPolicy: RxCacheReplacementPolicy; -export declare const COLLECTIONS_WITH_RUNNING_CLEANUP: WeakSet; -/** - * Triggers the cache replacement policy after waitTime has passed. - * We do not run this directly because at exactly the time a query is created, - * we need all CPU to minimize latency. - * Also this should not be triggered multiple times when waitTime is still waiting. - */ -export declare function triggerCacheReplacement(rxCollection: RxCollection): void; diff --git a/dist/types/query-planner.d.ts b/dist/types/query-planner.d.ts deleted file mode 100644 index 0d46d308b15..00000000000 --- a/dist/types/query-planner.d.ts +++ /dev/null @@ -1,30 +0,0 @@ -import type { FilledMangoQuery, MangoQuerySelector, RxDocumentData, RxJsonSchema, RxQueryPlan, RxQueryPlanKey, RxQueryPlanerOpts } from './types/index.d.ts'; -export declare const INDEX_MAX: string; -/** - * Do not use -Infinity here because it would be - * transformed to null on JSON.stringify() which can break things - * when the query plan is send to the storage as json. - * @link https://stackoverflow.com/a/16644751 - * Notice that for IndexedDB IDBKeyRange we have - * to transform the value back to -Infinity - * before we can use it in IDBKeyRange.bound. - */ -export declare const INDEX_MIN: number; -/** - * Returns the query plan which contains - * information about how to run the query - * and which indexes to use. - * - * This is used in some storage like Memory, dexie.js and IndexedDB. - */ -export declare function getQueryPlan(schema: RxJsonSchema>, query: FilledMangoQuery): RxQueryPlan; -export declare const LOGICAL_OPERATORS: Set; -export declare const LOWER_BOUND_LOGICAL_OPERATORS: Set; -export declare const UPPER_BOUND_LOGICAL_OPERATORS: Set; -export declare function isSelectorSatisfiedByIndex(index: string[], selector: MangoQuerySelector, startKeys: RxQueryPlanKey[], endKeys: RxQueryPlanKey[]): boolean; -export declare function getMatcherQueryOpts(operator: string, operatorValue: any): Partial; -/** - * Returns a number that determines the quality of the query plan. - * Higher number means better query plan. - */ -export declare function rateQueryPlan(schema: RxJsonSchema>, query: FilledMangoQuery, queryPlan: RxQueryPlan): number; diff --git a/dist/types/replication-protocol/checkpoint.d.ts b/dist/types/replication-protocol/checkpoint.d.ts deleted file mode 100644 index 692853d7ca4..00000000000 --- a/dist/types/replication-protocol/checkpoint.d.ts +++ /dev/null @@ -1,8 +0,0 @@ -import type { RxStorageInstanceReplicationInput, RxStorageInstanceReplicationState, RxStorageReplicationDirection } from '../types/index.d.ts'; -export declare function getLastCheckpointDoc(state: RxStorageInstanceReplicationState, direction: RxStorageReplicationDirection): Promise; -/** - * Sets the checkpoint, - * automatically resolves conflicts that appear. - */ -export declare function setCheckpoint(state: RxStorageInstanceReplicationState, direction: RxStorageReplicationDirection, checkpoint: CheckpointType): Promise; -export declare function getCheckpointKey(input: RxStorageInstanceReplicationInput): Promise; diff --git a/dist/types/replication-protocol/conflicts.d.ts b/dist/types/replication-protocol/conflicts.d.ts deleted file mode 100644 index ef4158b5560..00000000000 --- a/dist/types/replication-protocol/conflicts.d.ts +++ /dev/null @@ -1,14 +0,0 @@ -import type { RxConflictHandler, RxConflictHandlerInput, RxConflictHandlerOutput, RxDocumentData, RxStorageInstanceReplicationState } from '../types/index.d.ts'; -export declare const defaultConflictHandler: RxConflictHandler; -/** - * Resolves a conflict error or determines that the given document states are equal. - * Returns the resolved document that must be written to the fork. - * Then the new document state can be pushed upstream. - * If document is not in conflict, returns undefined. - * If error is non-409, it throws an error. - * Conflicts are only solved in the upstream, never in the downstream. - */ -export declare function resolveConflictError(state: RxStorageInstanceReplicationState, input: RxConflictHandlerInput, forkState: RxDocumentData): Promise<{ - resolvedDoc: RxDocumentData; - output: RxConflictHandlerOutput; -} | undefined>; diff --git a/dist/types/replication-protocol/downstream.d.ts b/dist/types/replication-protocol/downstream.d.ts deleted file mode 100644 index 17bebdecbee..00000000000 --- a/dist/types/replication-protocol/downstream.d.ts +++ /dev/null @@ -1,10 +0,0 @@ -import type { RxStorageInstanceReplicationState } from '../types/index.d.ts'; -/** - * Writes all documents from the master to the fork. - * The downstream has two operation modes - * - Sync by iterating over the checkpoints via downstreamResyncOnce() - * - Sync by listening to the changestream via downstreamProcessChanges() - * We need this to be able to do initial syncs - * and still can have fast event based sync when the client is not offline. - */ -export declare function startReplicationDownstream(state: RxStorageInstanceReplicationState): Promise; diff --git a/dist/types/replication-protocol/helper.d.ts b/dist/types/replication-protocol/helper.d.ts deleted file mode 100644 index a213bcf0121..00000000000 --- a/dist/types/replication-protocol/helper.d.ts +++ /dev/null @@ -1,5 +0,0 @@ -import type { BulkWriteRow, RxDocumentData, RxDocumentWriteData, RxStorageInstance, RxStorageInstanceReplicationState, RxStorageReplicationMeta, WithDeletedAndAttachments } from '../types/index.d.ts'; -export declare function docStateToWriteDoc(databaseInstanceToken: string, hasAttachments: boolean, keepMeta: boolean, docState: WithDeletedAndAttachments, previous?: RxDocumentData): RxDocumentWriteData; -export declare function writeDocToDocState(writeDoc: RxDocumentData, keepAttachments: boolean, keepMeta: boolean): WithDeletedAndAttachments; -export declare function stripAttachmentsDataFromMetaWriteRows(state: RxStorageInstanceReplicationState, rows: BulkWriteRow>[]): BulkWriteRow>[]; -export declare function getUnderlyingPersistentStorage(instance: RxStorageInstance): RxStorageInstance; diff --git a/dist/types/replication-protocol/index.d.ts b/dist/types/replication-protocol/index.d.ts deleted file mode 100644 index bf17a864e1f..00000000000 --- a/dist/types/replication-protocol/index.d.ts +++ /dev/null @@ -1,24 +0,0 @@ -/** - * These files contain the replication protocol. - * It can be used to replicated RxStorageInstances or RxCollections - * or even to do a client(s)-server replication. - */ -import type { RxConflictHandler, RxReplicationHandler, RxStorageInstance, RxStorageInstanceReplicationInput, RxStorageInstanceReplicationState } from '../types/index.d.ts'; -export * from './checkpoint.ts'; -export * from './downstream.ts'; -export * from './upstream.ts'; -export * from './meta-instance.ts'; -export * from './conflicts.ts'; -export * from './helper.ts'; -export declare function replicateRxStorageInstance(input: RxStorageInstanceReplicationInput): RxStorageInstanceReplicationState; -export declare function awaitRxStorageReplicationFirstInSync(state: RxStorageInstanceReplicationState): Promise; -export declare function awaitRxStorageReplicationInSync(replicationState: RxStorageInstanceReplicationState): Promise<[any, any, any]>; -export declare function awaitRxStorageReplicationIdle(state: RxStorageInstanceReplicationState): Promise; -export declare function rxStorageInstanceToReplicationHandler(instance: RxStorageInstance, conflictHandler: RxConflictHandler, databaseInstanceToken: string, -/** - * If set to true, - * the _meta.lwt from the pushed documents is kept. - * (Used in the migration to ensure checkpoints are still valid) - */ -keepMeta?: boolean): RxReplicationHandler; -export declare function cancelRxStorageReplication(replicationState: RxStorageInstanceReplicationState): Promise; diff --git a/dist/types/replication-protocol/meta-instance.d.ts b/dist/types/replication-protocol/meta-instance.d.ts deleted file mode 100644 index 9eb31a4dc4f..00000000000 --- a/dist/types/replication-protocol/meta-instance.d.ts +++ /dev/null @@ -1,12 +0,0 @@ -import type { BulkWriteRow, ById, RxDocumentData, RxJsonSchema, RxStorageInstanceReplicationState, RxStorageReplicationMeta, WithDeleted } from '../types/index.d.ts'; -export declare const META_INSTANCE_SCHEMA_TITLE = "RxReplicationProtocolMetaData"; -export declare function getRxReplicationMetaInstanceSchema(replicatedDocumentsSchema: RxJsonSchema>, encrypted: boolean): RxJsonSchema>>; -/** - * Returns the document states of what the fork instance - * assumes to be the latest state on the master instance. - */ -export declare function getAssumedMasterState(state: RxStorageInstanceReplicationState, docIds: string[]): Promise; - metaDocument: RxDocumentData>; -}>>; -export declare function getMetaWriteRow(state: RxStorageInstanceReplicationState, newMasterDocState: WithDeleted, previous?: RxDocumentData>, isResolvedConflict?: string): Promise>>; diff --git a/dist/types/replication-protocol/upstream.d.ts b/dist/types/replication-protocol/upstream.d.ts deleted file mode 100644 index 70191e27063..00000000000 --- a/dist/types/replication-protocol/upstream.d.ts +++ /dev/null @@ -1,10 +0,0 @@ -import type { RxStorageInstanceReplicationState } from '../types/index.d.ts'; -/** - * Writes all document changes from the fork to the master. - * The upstream runs on two modes: - * - For initial replication, a checkpoint-iteration is used - * - For ongoing local writes, we just subscribe to the changeStream of the fork. - * In contrast to the master, the fork can be assumed to never loose connection, - * so we do not have to prepare for missed out events. - */ -export declare function startReplicationUpstream(state: RxStorageInstanceReplicationState): Promise; diff --git a/dist/types/rx-change-event.d.ts b/dist/types/rx-change-event.d.ts deleted file mode 100644 index c796ab869a6..00000000000 --- a/dist/types/rx-change-event.d.ts +++ /dev/null @@ -1,19 +0,0 @@ -/** - * RxChangeEvents a emitted when something in the database changes - * they can be grabbed by the observables of database, collection and document - */ -import type { ChangeEvent as EventReduceChangeEvent } from 'event-reduce-js'; -import type { EventBulk, RxChangeEvent, RxDocumentData } from './types/index.d.ts'; -export declare function getDocumentDataOfRxChangeEvent(rxChangeEvent: RxChangeEvent): RxDocumentData; -/** - * Might return null which means an - * already deleted document got modified but still is deleted. - * These kind of events are not relevant for the event-reduce algorithm - * and must be filtered out. - */ -export declare function rxChangeEventToEventReduceChangeEvent(rxChangeEvent: RxChangeEvent): EventReduceChangeEvent | null; -/** - * Flattens the given events into a single array of events. - * Used mostly in tests. - */ -export declare function flattenEvents(input: EventBulk | EventBulk[] | EventType | EventType[]): EventType[]; diff --git a/dist/types/rx-collection-helper.d.ts b/dist/types/rx-collection-helper.d.ts deleted file mode 100644 index 53f0980acc9..00000000000 --- a/dist/types/rx-collection-helper.d.ts +++ /dev/null @@ -1,24 +0,0 @@ -import type { HashFunction, InternalStoreDocType, RxCollection, RxDatabase, RxDocumentData, RxStorage, RxStorageInstance, RxStorageInstanceCreationParams } from './types/index.d.ts'; -import type { RxSchema } from './rx-schema.ts'; -import type { RxCollectionBase } from './rx-collection.ts'; -/** - * fills in the default data. - * This also clones the data. - */ -export declare function fillObjectDataBeforeInsert(schema: RxSchema, data: Partial> | any): RxDocumentData; -/** - * Creates the storage instances that are used internally in the collection - */ -export declare function createRxCollectionStorageInstance(rxDatabase: RxDatabase<{}, Internals, InstanceCreationOptions>, storageInstanceCreationParams: RxStorageInstanceCreationParams): Promise>; -/** - * Removes the main storage of the collection - * and all connected storages like the ones from the replication meta etc. - */ -export declare function removeCollectionStorages(storage: RxStorage, databaseInternalStorage: RxStorageInstance, any, any>, databaseInstanceToken: string, databaseName: string, collectionName: string, password?: string, -/** - * If no hash function is provided, - * we assume that the whole internal store is removed anyway - * so we do not have to delete the meta documents. - */ -hashFunction?: HashFunction): Promise; -export declare function ensureRxCollectionIsNotDestroyed(collection: RxCollection | RxCollectionBase): void; diff --git a/dist/types/rx-collection.d.ts b/dist/types/rx-collection.d.ts deleted file mode 100644 index 0fb8119277d..00000000000 --- a/dist/types/rx-collection.d.ts +++ /dev/null @@ -1,151 +0,0 @@ -import type { RxMigrationState } from './plugins/migration-schema/index.ts'; -import { DocumentCache } from './doc-cache.ts'; -import { QueryCache } from './query-cache.ts'; -import { ChangeEventBuffer } from './change-event-buffer.ts'; -import { Subscription, Observable } from 'rxjs'; -import type { KeyFunctionMap, RxCollection, RxDatabase, RxQuery, RxDocument, RxDumpCollection, RxDumpCollectionAny, MangoQuery, MangoQueryNoLimit, RxCacheReplacementPolicy, RxStorageWriteError, RxChangeEvent, RxChangeEventInsert, RxChangeEventUpdate, RxChangeEventDelete, RxStorageInstance, CollectionsOfDatabase, RxConflictHandler, MaybePromise, CRDTEntry, MangoQuerySelectorAndIndex, MigrationStrategies } from './types/index.d.ts'; -import { RxSchema } from './rx-schema.ts'; -import { WrappedRxStorageInstance } from './rx-storage-helper.ts'; -import { IncrementalWriteQueue } from './incremental-write.ts'; -declare const HOOKS_WHEN: readonly ["pre", "post"]; -type HookWhenType = typeof HOOKS_WHEN[number]; -declare const HOOKS_KEYS: readonly ["insert", "save", "remove", "create"]; -type HookKeyType = typeof HOOKS_KEYS[number]; -export declare class RxCollectionBase { - database: RxDatabase; - name: string; - schema: RxSchema; - internalStorageInstance: RxStorageInstance; - instanceCreationOptions: InstanceCreationOptions; - migrationStrategies: MigrationStrategies; - methods: KeyFunctionMap; - attachments: KeyFunctionMap; - options: any; - cacheReplacementPolicy: RxCacheReplacementPolicy; - statics: KeyFunctionMap; - conflictHandler: RxConflictHandler; - /** - * Stores all 'normal' documents - */ - storageInstance: WrappedRxStorageInstance; - readonly timeouts: Set>; - incrementalWriteQueue: IncrementalWriteQueue; - constructor(database: RxDatabase, name: string, schema: RxSchema, internalStorageInstance: RxStorageInstance, instanceCreationOptions?: InstanceCreationOptions, migrationStrategies?: MigrationStrategies, methods?: KeyFunctionMap, attachments?: KeyFunctionMap, options?: any, cacheReplacementPolicy?: RxCacheReplacementPolicy, statics?: KeyFunctionMap, conflictHandler?: RxConflictHandler); - get insert$(): Observable>; - get update$(): Observable>; - get remove$(): Observable>; - _incrementalUpsertQueues: Map>; - synced: boolean; - hooks: { - [key in HookKeyType]: { - [when in HookWhenType]: { - series: Function[]; - parallel: Function[]; - }; - }; - }; - _subs: Subscription[]; - _docCache: DocumentCache; - _queryCache: QueryCache; - $: Observable>; - checkpoint$: Observable; - _changeEventBuffer: ChangeEventBuffer; - /** - * When the collection is destroyed, - * these functions will be called an awaited. - * Used to automatically clean up stuff that - * belongs to this collection. - */ - onDestroy: (() => MaybePromise)[]; - destroyed: boolean; - onRemove: (() => MaybePromise)[]; - prepare(): Promise; - /** - * Manually call the cleanup function of the storage. - * @link https://rxdb.info/cleanup.html - */ - cleanup(_minimumDeletedTime?: number): Promise; - migrationNeeded(): Promise; - getMigrationState(): RxMigrationState; - startMigration(batchSize?: number): Promise; - migratePromise(batchSize?: number): Promise; - insert(json: RxDocumentType | RxDocument): Promise>; - bulkInsert(docsData: RxDocumentType[]): Promise<{ - success: RxDocument[]; - error: RxStorageWriteError[]; - }>; - bulkRemove(ids: string[]): Promise<{ - success: RxDocument[]; - error: RxStorageWriteError[]; - }>; - /** - * same as bulkInsert but overwrites existing document with same primary - */ - bulkUpsert(docsData: Partial[]): Promise<{ - success: RxDocument[]; - error: RxStorageWriteError[]; - }>; - /** - * same as insert but overwrites existing document with same primary - */ - upsert(json: Partial): Promise>; - /** - * upserts to a RxDocument, uses incrementalModify if document already exists - */ - incrementalUpsert(json: Partial): Promise>; - find(queryObj?: MangoQuery): RxQuery[], OrmMethods, Reactivity>; - findOne(queryObj?: MangoQueryNoLimit | string): RxQuery | null, OrmMethods, Reactivity>; - count(queryObj?: MangoQuerySelectorAndIndex): RxQuery; - /** - * find a list documents by their primary key - * has way better performance then running multiple findOne() or a find() with a complex $or-selected - */ - findByIds(ids: string[]): RxQuery>, OrmMethods, Reactivity>; - /** - * Export collection to a JSON friendly format. - */ - exportJSON(): Promise>; - exportJSON(): Promise>; - /** - * Import the parsed JSON export into the collection. - * @param _exportedJSON The previously exported data from the `.exportJSON()` method. - */ - importJSON(_exportedJSON: RxDumpCollectionAny): Promise; - insertCRDT(_updateObj: CRDTEntry | CRDTEntry[]): RxDocument; - /** - * HOOKS - */ - addHook(when: HookWhenType, key: HookKeyType, fun: any, parallel?: boolean): void; - getHooks(when: HookWhenType, key: HookKeyType): { - series: Function[]; - parallel: Function[]; - }; - hasHooks(when: HookWhenType, key: HookKeyType): boolean; - _runHooks(when: HookWhenType, key: HookKeyType, data: any, instance?: any): Promise; - /** - * does the same as ._runHooks() but with non-async-functions - */ - _runHooksSync(when: HookWhenType, key: HookKeyType, data: any, instance: any): void; - /** - * Returns a promise that resolves after the given time. - * Ensures that is properly cleans up when the collection is destroyed - * so that no running timeouts prevent the exit of the JavaScript process. - */ - promiseWait(time: number): Promise; - destroy(): Promise; - /** - * remove all data of the collection - */ - remove(): Promise; - get asRxCollection(): RxCollection; -} -/** - * creates and prepares a new collection - */ -export declare function createRxCollection({ database, name, schema, instanceCreationOptions, migrationStrategies, autoMigrate, statics, methods, attachments, options, localDocuments, cacheReplacementPolicy, conflictHandler }: any): Promise; -export declare function isRxCollection(obj: any): boolean; -export {}; diff --git a/dist/types/rx-database-internal-store.d.ts b/dist/types/rx-database-internal-store.d.ts deleted file mode 100644 index 0d66a4cbd32..00000000000 --- a/dist/types/rx-database-internal-store.d.ts +++ /dev/null @@ -1,36 +0,0 @@ -import type { CollectionsOfDatabase, InternalStoreCollectionDocType, InternalStoreDocType, InternalStoreStorageTokenDocType, RxCollection, RxDatabase, RxDocumentData, RxJsonSchema, RxStorageInstance } from './types/index.d.ts'; -export declare const INTERNAL_CONTEXT_COLLECTION = "collection"; -export declare const INTERNAL_CONTEXT_STORAGE_TOKEN = "storage-token"; -export declare const INTERNAL_CONTEXT_MIGRATION_STATUS = "rx-migration-status"; -/** - * Do not change the title, - * we have to flag the internal schema so that - * some RxStorage implementations are able - * to detect if the created RxStorageInstance - * is from the internals or not, - * to do some optimizations in some cases. - */ -export declare const INTERNAL_STORE_SCHEMA_TITLE = "RxInternalDocument"; -export declare const INTERNAL_STORE_SCHEMA: RxJsonSchema>>; -export declare function getPrimaryKeyOfInternalDocument(key: string, context: string): string; -/** - * Returns all internal documents - * with context 'collection' - */ -export declare function getAllCollectionDocuments(storageInstance: RxStorageInstance, any, any>): Promise[]>; -/** - * to not confuse multiInstance-messages with other databases that have the same - * name and adapter, but do not share state with this one (for example in-memory-instances), - * we set a storage-token and use it in the broadcast-channel - */ -export declare const STORAGE_TOKEN_DOCUMENT_KEY = "storageToken"; -export declare const STORAGE_TOKEN_DOCUMENT_ID: string; -export declare function ensureStorageTokenDocumentExists(rxDatabase: RxDatabase): Promise>; -export declare function isDatabaseStateVersionCompatibleWithDatabaseCode(databaseStateVersion: string, codeVersion: string): boolean; -export declare function addConnectedStorageToCollection(collection: RxCollection, storageCollectionName: string, schema: RxJsonSchema): Promise; -export declare function removeConnectedStorageFromCollection(collection: RxCollection, storageCollectionName: string, schema: RxJsonSchema): Promise; -/** - * returns the primary for a given collection-data - * used in the internal store of a RxDatabase - */ -export declare function _collectionNamePrimary(name: string, schema: RxJsonSchema): string; diff --git a/dist/types/rx-database.d.ts b/dist/types/rx-database.d.ts deleted file mode 100644 index e65c8128d29..00000000000 --- a/dist/types/rx-database.d.ts +++ /dev/null @@ -1,187 +0,0 @@ -import { IdleQueue } from 'custom-idle-queue'; -import type { LeaderElector } from 'broadcast-channel'; -import type { CollectionsOfDatabase, RxDatabase, RxCollectionCreator, RxCollection, RxDumpDatabase, RxDumpDatabaseAny, BackupOptions, RxStorage, RxStorageInstance, RxChangeEvent, RxDatabaseCreator, RxChangeEventBulk, RxDocumentData, RxCleanupPolicy, InternalStoreDocType, InternalStoreStorageTokenDocType, RxTypeError, RxError, HashFunction, MaybePromise, RxState } from './types/index.d.ts'; -import { Subject, Subscription, Observable } from 'rxjs'; -import { WrappedRxStorageInstance } from './rx-storage-helper.ts'; -import type { RxBackupState } from './plugins/backup/index.ts'; -import { ObliviousSet } from 'oblivious-set'; -import type { RxMigrationState } from './plugins/migration-schema/index.ts'; -import type { RxReactivityFactory } from './types/plugins/reactivity.d.ts'; -export declare class RxDatabaseBase { - readonly name: string; - /** - * Uniquely identifies the instance - * of this RxDatabase. - */ - readonly token: string; - readonly storage: RxStorage; - readonly instanceCreationOptions: InstanceCreationOptions; - readonly password: any; - readonly multiInstance: boolean; - readonly eventReduce: boolean; - options: any; - /** - * Stores information documents about the collections of the database - */ - readonly internalStore: RxStorageInstance; - readonly hashFunction: HashFunction; - readonly cleanupPolicy?: Partial | undefined; - readonly allowSlowCount?: boolean | undefined; - readonly reactivity?: RxReactivityFactory | undefined; - readonly idleQueue: IdleQueue; - readonly rxdbVersion = "15.24.0"; - /** - * Contains all known non-closed storage instances - * that belong to this database. - * Used in plugins and unit tests. - */ - readonly storageInstances: Set>; - constructor(name: string, - /** - * Uniquely identifies the instance - * of this RxDatabase. - */ - token: string, storage: RxStorage, instanceCreationOptions: InstanceCreationOptions, password: any, multiInstance: boolean, eventReduce: boolean, options: any, - /** - * Stores information documents about the collections of the database - */ - internalStore: RxStorageInstance, hashFunction: HashFunction, cleanupPolicy?: Partial | undefined, allowSlowCount?: boolean | undefined, reactivity?: RxReactivityFactory | undefined); - get $(): Observable>; - getReactivityFactory(): RxReactivityFactory; - _subs: Subscription[]; - /** - * Because having unhandled exceptions would fail, - * we have to store the async errors of the constructor here - * so we can throw them later. - */ - startupErrors: (RxError | RxTypeError)[]; - /** - * When the database is destroyed, - * these functions will be called an awaited. - * Used to automatically clean up stuff that - * belongs to this collection. - */ - onDestroy: (() => MaybePromise)[]; - destroyed: boolean; - collections: Collections; - states: { - [name: string]: RxState; - }; - readonly eventBulks$: Subject>; - private observable$; - /** - * Unique token that is stored with the data. - * Used to detect if the dataset has been deleted - * and if two RxDatabase instances work on the same dataset or not. - * - * Because reading and writing the storageToken runs in the hot path - * of database creation, we do not await the storageWrites but instead - * work with the promise when we need the value. - */ - storageToken: Promise; - /** - * Stores the whole state of the internal storage token document. - * We need this in some plugins. - */ - storageTokenDocument: Promise>; - /** - * Contains the ids of all event bulks that have been emitted - * by the database. - * Used to detect duplicates that come in again via BroadcastChannel - * or other streams. - * TODO instead of having this here, we should add a test to ensure each RxStorage - * behaves equal and does never emit duplicate eventBulks. - */ - emittedEventBulkIds: ObliviousSet; - /** - * This is the main handle-point for all change events - * ChangeEvents created by this instance go: - * RxDocument -> RxCollection -> RxDatabase.$emit -> MultiInstance - * ChangeEvents created by other instances go: - * MultiInstance -> RxDatabase.$emit -> RxCollection -> RxDatabase - */ - $emit(changeEventBulk: RxChangeEventBulk): void; - /** - * removes the collection-doc from the internalStore - */ - removeCollectionDoc(name: string, schema: any): Promise; - /** - * creates multiple RxCollections at once - * to be much faster by saving db txs and doing stuff in bulk-operations - * This function is not called often, but mostly in the critical path at the initial page load - * So it must be as fast as possible. - */ - addCollections>(collectionCreators: { - [key in keyof CreatedCollections]: RxCollectionCreator; - }): Promise<{ - [key in keyof CreatedCollections]: RxCollection; - }>; - /** - * runs the given function between idleQueue-locking - */ - lockedRun(fn: (...args: any[]) => T): T extends Promise ? T : Promise; - requestIdlePromise(): Promise; - /** - * Export database to a JSON friendly format. - */ - exportJSON(_collections?: string[]): Promise>; - exportJSON(_collections?: string[]): Promise>; - addState(_name?: string): Promise>; - /** - * Import the parsed JSON export into the collection. - * @param _exportedJSON The previously exported data from the `.exportJSON()` method. - * @note When an interface is loaded in this collection all base properties of the type are typed as `any` - * since data could be encrypted. - */ - importJSON(_exportedJSON: RxDumpDatabaseAny): Promise; - backup(_options: BackupOptions): RxBackupState; - leaderElector(): LeaderElector; - isLeader(): boolean; - /** - * returns a promise which resolves when the instance becomes leader - */ - waitForLeadership(): Promise; - migrationStates(): Observable; - /** - * destroys the database-instance and all collections - */ - destroy(): Promise; - /** - * deletes the database and its stored data. - * Returns the names of all removed collections. - */ - remove(): Promise; - get asRxDatabase(): RxDatabase<{}, Internals, InstanceCreationOptions, Reactivity>; -} -/** - * Creates the storage instances that are used internally in the database - * to store schemas and other configuration stuff. - */ -export declare function createRxDatabaseStorageInstance(databaseInstanceToken: string, storage: RxStorage, databaseName: string, options: InstanceCreationOptions, multiInstance: boolean, password?: string): Promise>; -export declare function createRxDatabase({ storage, instanceCreationOptions, name, password, multiInstance, eventReduce, ignoreDuplicate, options, cleanupPolicy, allowSlowCount, localDocuments, hashFunction, reactivity }: RxDatabaseCreator): Promise>; -/** - * Removes the database and all its known data - * with all known collections and all internal meta data. - * - * Returns the names of the removed collections. - */ -export declare function removeRxDatabase(databaseName: string, storage: RxStorage, password?: string): Promise; -export declare function isRxDatabase(obj: any): boolean; -export declare function dbCount(): number; -/** - * Returns true if the given RxDatabase was the first - * instance that was created on the storage with this name. - * - * Can be used for some optimizations because on the first instantiation, - * we can assume that no data was written before. - */ -export declare function isRxDatabaseFirstTimeInstantiated(database: RxDatabase): Promise; -/** - * For better performance some tasks run async - * and are awaited later. - * But we still have to ensure that there have been no errors - * on database creation. - */ -export declare function ensureNoStartupErrors(rxDatabase: RxDatabaseBase): Promise; diff --git a/dist/types/rx-document-prototype-merge.d.ts b/dist/types/rx-document-prototype-merge.d.ts deleted file mode 100644 index 2795cae8d4b..00000000000 --- a/dist/types/rx-document-prototype-merge.d.ts +++ /dev/null @@ -1,24 +0,0 @@ -/** - * For the ORM capabilities, - * we have to merge the document prototype - * with the ORM functions and the data - * We do this iterating over the properties and - * adding them to a new object. - * In the future we should do this by chaining the __proto__ objects - */ -import type { RxCollection, RxDocument, RxDocumentData } from './types/index.d.ts'; -export declare function getDocumentPrototype(rxCollection: RxCollection): any; -export declare function getRxDocumentConstructor(rxCollection: RxCollection): any; -/** - * Create a RxDocument-instance from the jsonData - * and the prototype merge. - * You should never call this method directly, - * instead you should get the document from collection._docCache.getCachedRxDocument(). - */ -export declare function createNewRxDocument(rxCollection: RxCollection, documentConstructor: any, docData: RxDocumentData): RxDocument; -/** - * returns the prototype-object - * that contains the orm-methods, - * used in the proto-merge - */ -export declare function getDocumentOrmPrototype(rxCollection: RxCollection): any; diff --git a/dist/types/rx-document.d.ts b/dist/types/rx-document.d.ts deleted file mode 100644 index c29c4a8f8fe..00000000000 --- a/dist/types/rx-document.d.ts +++ /dev/null @@ -1,271 +0,0 @@ -import { Observable } from 'rxjs'; -import type { RxDocument, RxCollection, RxDocumentData, RxDocumentWriteData, UpdateQuery, CRDTEntry, ModifyFunction } from './types/index.d.ts'; -export declare const basePrototype: { - readonly primaryPath: import("./types/util").StringKeys<{ - _deleted: boolean; - _attachments: { - [attachmentId: string]: import("./types/rx-storage").RxAttachmentData; - }; - _rev: string; - _meta: import("./types/rx-document").RxDocumentMeta; - }> | undefined; - readonly primary: any; - readonly revision: string | undefined; - readonly deleted$: any; - readonly deleted$$: any; - readonly deleted: boolean | undefined; - getLatest(this: RxDocument): RxDocument; - /** - * returns the observable which emits the plain-data of this document - */ - readonly $: Observable; - readonly $$: any; - /** - * returns observable of the value of the given path - */ - get$(this: RxDocument, path: string): Observable; - get$$(this: RxDocument, path: string): any; - /** - * populate the given path - */ - populate(this: RxDocument, path: string): Promise; - /** - * get data by objectPath - * @hotPath Performance here is really important, - * run some tests before changing anything. - */ - get(this: RxDocument, objPath: string): any | null; - toJSON(this: RxDocument, withMetaFields?: boolean): import("./types/util").DeepReadonlyObject<{ - _deleted: boolean; - _attachments: { - [attachmentId: string]: import("./types/rx-storage").RxAttachmentData; - }; - _rev: string; - _meta: import("./types/rx-document").RxDocumentMeta; - }>; - toMutableJSON(this: RxDocument, withMetaFields?: boolean): { - _deleted: boolean; - _attachments: { - [attachmentId: string]: import("./types/rx-storage").RxAttachmentData; - }; - _rev: string; - _meta: import("./types/rx-document").RxDocumentMeta; - }; - /** - * updates document - * @overwritten by plugin (optional) - * @param updateObj mongodb-like syntax - */ - update(_updateObj: UpdateQuery): never; - incrementalUpdate(_updateObj: UpdateQuery): never; - updateCRDT(_updateObj: CRDTEntry | CRDTEntry[]): never; - putAttachment(): never; - getAttachment(): never; - allAttachments(): never; - readonly allAttachments$: void; - modify(this: RxDocument, mutationFunction: ModifyFunction, _context?: string): Promise; - /** - * runs an incremental update over the document - * @param function that takes the document-data and returns a new data-object - */ - incrementalModify(this: RxDocument, mutationFunction: ModifyFunction, _context?: string): Promise; - patch(this: RxDocument, patch: Partial): Promise>; - /** - * patches the given properties - */ - incrementalPatch(this: RxDocument, patch: Partial): Promise>; - /** - * saves the new document-data - * and handles the events - */ - _saveData(this: RxDocument, newData: RxDocumentWriteData, oldData: RxDocumentData): Promise>; - /** - * Remove the document. - * Notice that there is no hard delete, - * instead deleted documents get flagged with _deleted=true. - */ - remove(this: RxDocument): Promise; - incrementalRemove(this: RxDocument): Promise; - destroy(): never; -}; -export declare function createRxDocumentConstructor(proto?: { - readonly primaryPath: import("./types/util").StringKeys<{ - _deleted: boolean; - _attachments: { - [attachmentId: string]: import("./types/rx-storage").RxAttachmentData; - }; - _rev: string; - _meta: import("./types/rx-document").RxDocumentMeta; - }> | undefined; - readonly primary: any; - readonly revision: string | undefined; - readonly deleted$: any; - readonly deleted$$: any; - readonly deleted: boolean | undefined; - getLatest(this: RxDocument): RxDocument; - /** - * returns the observable which emits the plain-data of this document - */ - readonly $: Observable; - readonly $$: any; - /** - * returns observable of the value of the given path - */ - get$(this: RxDocument, path: string): Observable; - get$$(this: RxDocument, path: string): any; - /** - * populate the given path - */ - populate(this: RxDocument, path: string): Promise; - /** - * get data by objectPath - * @hotPath Performance here is really important, - * run some tests before changing anything. - */ - get(this: RxDocument, objPath: string): any; - toJSON(this: RxDocument, withMetaFields?: boolean): import("./types/util").DeepReadonlyObject<{ - _deleted: boolean; - _attachments: { - [attachmentId: string]: import("./types/rx-storage").RxAttachmentData; - }; - _rev: string; - _meta: import("./types/rx-document").RxDocumentMeta; - }>; - toMutableJSON(this: RxDocument, withMetaFields?: boolean): { - _deleted: boolean; - _attachments: { - [attachmentId: string]: import("./types/rx-storage").RxAttachmentData; - }; - _rev: string; - _meta: import("./types/rx-document").RxDocumentMeta; - }; - /** - * updates document - * @overwritten by plugin (optional) - * @param updateObj mongodb-like syntax - */ - update(_updateObj: UpdateQuery): never; - incrementalUpdate(_updateObj: UpdateQuery): never; - updateCRDT(_updateObj: CRDTEntry | CRDTEntry[]): never; - putAttachment(): never; - getAttachment(): never; - allAttachments(): never; - readonly allAttachments$: void; - modify(this: RxDocument, mutationFunction: ModifyFunction, _context?: string | undefined): Promise; - /** - * runs an incremental update over the document - * @param function that takes the document-data and returns a new data-object - */ - incrementalModify(this: RxDocument, mutationFunction: ModifyFunction, _context?: string | undefined): Promise; - patch(this: RxDocument, patch: Partial): Promise>; - /** - * patches the given properties - */ - incrementalPatch(this: RxDocument, patch: Partial): Promise>; - /** - * saves the new document-data - * and handles the events - */ - _saveData(this: RxDocument, newData: RxDocumentWriteData, oldData: RxDocumentData): Promise>; - /** - * Remove the document. - * Notice that there is no hard delete, - * instead deleted documents get flagged with _deleted=true. - */ - remove(this: RxDocument): Promise; - incrementalRemove(this: RxDocument): Promise; - destroy(): never; -}): { - (this: RxDocument, collection: RxCollection, docData: RxDocumentData): void; - prototype: { - readonly primaryPath: import("./types/util").StringKeys<{ - _deleted: boolean; - _attachments: { - [attachmentId: string]: import("./types/rx-storage").RxAttachmentData; - }; - _rev: string; - _meta: import("./types/rx-document").RxDocumentMeta; - }> | undefined; - readonly primary: any; - readonly revision: string | undefined; - readonly deleted$: any; - readonly deleted$$: any; - readonly deleted: boolean | undefined; - getLatest(this: RxDocument): RxDocument; - /** - * returns the observable which emits the plain-data of this document - */ - readonly $: Observable; - readonly $$: any; - /** - * returns observable of the value of the given path - */ - get$(this: RxDocument, path: string): Observable; - get$$(this: RxDocument, path: string): any; - /** - * populate the given path - */ - populate(this: RxDocument, path: string): Promise; - /** - * get data by objectPath - * @hotPath Performance here is really important, - * run some tests before changing anything. - */ - get(this: RxDocument, objPath: string): any; - toJSON(this: RxDocument, withMetaFields?: boolean): import("./types/util").DeepReadonlyObject<{ - _deleted: boolean; - _attachments: { - [attachmentId: string]: import("./types/rx-storage").RxAttachmentData; - }; - _rev: string; - _meta: import("./types/rx-document").RxDocumentMeta; - }>; - toMutableJSON(this: RxDocument, withMetaFields?: boolean): { - _deleted: boolean; - _attachments: { - [attachmentId: string]: import("./types/rx-storage").RxAttachmentData; - }; - _rev: string; - _meta: import("./types/rx-document").RxDocumentMeta; - }; - /** - * updates document - * @overwritten by plugin (optional) - * @param updateObj mongodb-like syntax - */ - update(_updateObj: UpdateQuery): never; - incrementalUpdate(_updateObj: UpdateQuery): never; - updateCRDT(_updateObj: CRDTEntry | CRDTEntry[]): never; - putAttachment(): never; - getAttachment(): never; - allAttachments(): never; - readonly allAttachments$: void; - modify(this: RxDocument, mutationFunction: ModifyFunction, _context?: string | undefined): Promise; - /** - * runs an incremental update over the document - * @param function that takes the document-data and returns a new data-object - */ - incrementalModify(this: RxDocument, mutationFunction: ModifyFunction, _context?: string | undefined): Promise; - patch(this: RxDocument, patch: Partial): Promise>; - /** - * patches the given properties - */ - incrementalPatch(this: RxDocument, patch: Partial): Promise>; - /** - * saves the new document-data - * and handles the events - */ - _saveData(this: RxDocument, newData: RxDocumentWriteData, oldData: RxDocumentData): Promise>; - /** - * Remove the document. - * Notice that there is no hard delete, - * instead deleted documents get flagged with _deleted=true. - */ - remove(this: RxDocument): Promise; - incrementalRemove(this: RxDocument): Promise; - destroy(): never; - }; -}; -export declare function createWithConstructor(constructor: any, collection: RxCollection, jsonData: RxDocumentData): RxDocument | null; -export declare function isRxDocument(obj: any): boolean; -export declare function beforeDocumentUpdateWrite(collection: RxCollection, newData: RxDocumentWriteData, oldData: RxDocumentData): Promise; diff --git a/dist/types/rx-error.d.ts b/dist/types/rx-error.d.ts deleted file mode 100644 index c8f4d9cfbb9..00000000000 --- a/dist/types/rx-error.d.ts +++ /dev/null @@ -1,36 +0,0 @@ -/** - * here we use custom errors with the additional field 'parameters' - */ -import type { RxErrorParameters, RxErrorKey, RxStorageWriteError, RxStorageWriteErrorConflict } from './types/index.d.ts'; -export declare class RxError extends Error { - code: RxErrorKey; - message: string; - url: string; - parameters: RxErrorParameters; - rxdb: true; - constructor(code: RxErrorKey, message: string, parameters?: RxErrorParameters); - get name(): string; - toString(): string; - get typeError(): boolean; -} -export declare class RxTypeError extends TypeError { - code: RxErrorKey; - message: string; - url: string; - parameters: RxErrorParameters; - rxdb: true; - constructor(code: RxErrorKey, message: string, parameters?: RxErrorParameters); - get name(): string; - toString(): string; - get typeError(): boolean; -} -export declare function getErrorUrl(code: RxErrorKey): string; -export declare function errorUrlHint(code: RxErrorKey): string; -export declare function newRxError(code: RxErrorKey, parameters?: RxErrorParameters): RxError; -export declare function newRxTypeError(code: RxErrorKey, parameters?: RxErrorParameters): RxTypeError; -/** - * Returns the error if it is a 409 conflict, - * return false if it is another error. - */ -export declare function isBulkWriteConflictError(err?: RxStorageWriteError | any): RxStorageWriteErrorConflict | false; -export declare function rxStorageWriteErrorToRxError(err: RxStorageWriteError): RxError; diff --git a/dist/types/rx-query-helper.d.ts b/dist/types/rx-query-helper.d.ts deleted file mode 100644 index b15d69b6743..00000000000 --- a/dist/types/rx-query-helper.d.ts +++ /dev/null @@ -1,19 +0,0 @@ -import type { DeterministicSortComparator, FilledMangoQuery, MangoQuery, QueryMatcher, RxDocument, RxDocumentData, RxJsonSchema, RxQuery } from './types/index.d.ts'; -/** - * Normalize the query to ensure we have all fields set - * and queries that represent the same query logic are detected as equal by the caching. - */ -export declare function normalizeMangoQuery(schema: RxJsonSchema>, mangoQuery: MangoQuery): FilledMangoQuery; -/** - * Returns the sort-comparator, - * which is able to sort documents in the same way - * a query over the db would do. - */ -export declare function getSortComparator(schema: RxJsonSchema>, query: FilledMangoQuery): DeterministicSortComparator; -/** - * Returns a function - * that can be used to check if a document - * matches the query. - */ -export declare function getQueryMatcher(_schema: RxJsonSchema | RxJsonSchema>, query: FilledMangoQuery): QueryMatcher>; -export declare function runQueryUpdateFunction(rxQuery: RxQuery, fn: (doc: RxDocument) => Promise>): Promise; diff --git a/dist/types/rx-query-mingo.d.ts b/dist/types/rx-query-mingo.d.ts deleted file mode 100644 index 1ff79872c74..00000000000 --- a/dist/types/rx-query-mingo.d.ts +++ /dev/null @@ -1,10 +0,0 @@ -import { Query } from 'mingo/query'; -import type { MangoQuerySelector } from './types/index.d.ts'; -/** - * The MongoDB query library is huge and we do not need all the operators. - * If you add an operator here, make sure that you properly add a test in - * the file /test/unit/rx-storage-query-correctness.test.ts - * - * @link https://github.com/kofrasa/mingo#es6 - */ -export declare function getMingoQuery(selector?: MangoQuerySelector): Query; diff --git a/dist/types/rx-query-single-result.d.ts b/dist/types/rx-query-single-result.d.ts deleted file mode 100644 index d04449f7773..00000000000 --- a/dist/types/rx-query-single-result.d.ts +++ /dev/null @@ -1,29 +0,0 @@ -import type { RxCollection, RxDocument, RxDocumentData } from './types'; -/** - * RxDB needs the query results in multiple formats. - * Sometimes as a Map or an array with only the documentData. - * For better performance we work with this class - * that initializes stuff lazily so that - * we can directly work with the query results after RxQuery.exec() - */ -export declare class RxQuerySingleResult { - readonly collection: RxCollection; - readonly count: number; - /** - * Time at which the current _result state was created. - * Used to determine if the result set has changed since X - * so that we do not emit the same result multiple times on subscription. - */ - readonly time: number; - readonly documents: RxDocument[]; - constructor(collection: RxCollection, docsDataFromStorageInstance: RxDocumentData[], count: number); - /** - * Instead of using the newResultData in the result cache, - * we directly use the objects that are stored in the RxDocument - * to ensure we do not store the same data twice and fill up the memory. - * @overwrites itself with the actual value - */ - get docsData(): RxDocumentData[]; - get docsDataMap(): Map>; - get docsMap(): Map>; -} diff --git a/dist/types/rx-query.d.ts b/dist/types/rx-query.d.ts deleted file mode 100644 index b2ffff51f98..00000000000 --- a/dist/types/rx-query.d.ts +++ /dev/null @@ -1,132 +0,0 @@ -import { BehaviorSubject, Observable } from 'rxjs'; -import type { RxCollection, RxDocument, RxQueryOP, RxQuery, MangoQuery, MangoQuerySortPart, MangoQuerySelector, PreparedQuery, RxDocumentWriteData, RxDocumentData, QueryMatcher, RxJsonSchema, FilledMangoQuery, ModifyFunction } from './types/index.d.ts'; -import { RxQuerySingleResult } from './rx-query-single-result.ts'; -export declare class RxQueryBase { - op: RxQueryOP; - mangoQuery: Readonly>; - collection: RxCollection; - other: any; - id: number; - /** - * Some stats then are used for debugging and cache replacement policies - */ - _execOverDatabaseCount: number; - _creationTime: number; - _lastEnsureEqual: number; - uncached: boolean; - refCount$: BehaviorSubject; - isFindOneByIdQuery: false | string | string[]; - /** - * Contains the current result state - * or null if query has not run yet. - */ - _result: RxQuerySingleResult | null; - constructor(op: RxQueryOP, mangoQuery: Readonly>, collection: RxCollection, other?: any); - get $(): BehaviorSubject; - get $$(): Reactivity; - _latestChangeEvent: -1 | number; - _lastExecStart: number; - _lastExecEnd: number; - /** - * ensures that the exec-runs - * are not run in parallel - */ - _ensureEqualQueue: Promise; - /** - * Returns an observable that emits the results - * This should behave like an rxjs-BehaviorSubject which means: - * - Emit the current result-set on subscribe - * - Emit the new result-set when an RxChangeEvent comes in - * - Do not emit anything before the first result-set was created (no null) - */ - _$?: Observable; - /** - * set the new result-data as result-docs of the query - * @param newResultData json-docs that were received from the storage - */ - _setResultData(newResultData: RxDocumentData[] | number | Map>): void; - /** - * executes the query on the database - * @return results-array with document-data - */ - _execOverDatabase(): Promise[] | number>; - /** - * Execute the query - * To have an easier implementations, - * just subscribe and use the first result - */ - exec(throwIfMissing: true): Promise>; - exec(): Promise; - /** - * cached call to get the queryMatcher - * @overwrites itself with the actual value - */ - get queryMatcher(): QueryMatcher>; - /** - * returns a string that is used for equal-comparisons - * @overwrites itself with the actual value - */ - toString(): string; - /** - * returns the prepared query - * which can be send to the storage instance to query for documents. - * @overwrites itself with the actual value. - */ - getPreparedQuery(): PreparedQuery; - /** - * returns true if the document matches the query, - * does not use the 'skip' and 'limit' - */ - doesDocumentDataMatch(docData: RxDocType | any): boolean; - /** - * deletes all found documents - * @return promise with deleted documents - */ - remove(): Promise; - incrementalRemove(): Promise; - /** - * helper function to transform RxQueryBase to RxQuery type - */ - get asRxQuery(): RxQuery; - /** - * updates all found documents - * @overwritten by plugin (optional) - */ - update(_updateObj: any): Promise; - patch(patch: Partial): Promise; - incrementalPatch(patch: Partial): Promise; - modify(mutationFunction: ModifyFunction): Promise; - incrementalModify(mutationFunction: ModifyFunction): Promise; - where(_queryObj: MangoQuerySelector | keyof RxDocType | string): RxQuery; - sort(_params: string | MangoQuerySortPart): RxQuery; - skip(_amount: number | null): RxQuery; - limit(_amount: number | null): RxQuery; -} -export declare function _getDefaultQuery(): MangoQuery; -/** - * run this query through the QueryCache - */ -export declare function tunnelQueryCache(rxQuery: RxQueryBase): RxQuery; -export declare function createRxQuery(op: RxQueryOP, queryObj: MangoQuery, collection: RxCollection, other?: any): RxQueryBase; -/** - * @returns a format of the query that can be used with the storage - * when calling RxStorageInstance().query() - */ -export declare function prepareQuery(schema: RxJsonSchema>, mutateableQuery: FilledMangoQuery): PreparedQuery; -/** - * Runs the query over the storage instance - * of the collection. - * Does some optimizations to ensure findById is used - * when specific queries are used. - */ -export declare function queryCollection(rxQuery: RxQuery | RxQueryBase): Promise[]>; -/** - * Returns true if the given query - * selects exactly one document by its id. - * Used to optimize performance because these kind of - * queries do not have to run over an index and can use get-by-id instead. - * Returns false if no query of that kind. - * Returns the document id otherwise. - */ -export declare function isFindOneByIdQuery(primaryPath: string, query: MangoQuery): false | string | string[]; -export declare function isRxQuery(obj: any): boolean; diff --git a/dist/types/rx-schema-helper.d.ts b/dist/types/rx-schema-helper.d.ts deleted file mode 100644 index a59adc2120e..00000000000 --- a/dist/types/rx-schema-helper.d.ts +++ /dev/null @@ -1,54 +0,0 @@ -import type { DeepReadonly, JsonSchema, PrimaryKey, RxDocumentData, RxJsonSchema, RxStorageDefaultCheckpoint, StringKeys } from './types/index.d.ts'; -import type { RxSchema } from './rx-schema.ts'; -/** - * Helper function to create a valid RxJsonSchema - * with a given version. - */ -export declare function getPseudoSchemaForVersion(version: number, primaryKey: StringKeys): RxJsonSchema>; -/** - * Returns the sub-schema for a given path - */ -export declare function getSchemaByObjectPath(rxJsonSchema: RxJsonSchema, path: keyof T | string): JsonSchema; -export declare function fillPrimaryKey(primaryPath: keyof T, jsonSchema: RxJsonSchema, documentData: RxDocumentData): RxDocumentData; -export declare function getPrimaryFieldOfPrimaryKey(primaryKey: PrimaryKey): StringKeys; -export declare function getLengthOfPrimaryKey(schema: RxJsonSchema>): number; -/** - * Returns the composed primaryKey of a document by its data. - */ -export declare function getComposedPrimaryKeyOfDocumentData(jsonSchema: RxJsonSchema | RxJsonSchema>, documentData: Partial): string; -/** - * Normalize the RxJsonSchema. - * We need this to ensure everything is set up properly - * and we have the same hash on schemas that represent the same value but - * have different json. - * - * - Orders the schemas attributes by alphabetical order - * - Adds the primaryKey to all indexes that do not contain the primaryKey - * - We need this for deterministic sort order on all queries, which is required for event-reduce to work. - * - * @return RxJsonSchema - ordered and filled - */ -export declare function normalizeRxJsonSchema(jsonSchema: RxJsonSchema): RxJsonSchema; -/** - * If the schema does not specify any index, - * we add this index so we at least can run RxQuery() - * and only select non-deleted fields. - */ -export declare function getDefaultIndex(primaryPath: string): string[]; -/** - * fills the schema-json with default-settings - * @return cloned schemaObj - */ -export declare function fillWithDefaultSettings(schemaObj: RxJsonSchema): RxJsonSchema>; -export declare const RX_META_SCHEMA: JsonSchema; -/** - * returns the final-fields of the schema - * @return field-names of the final-fields - */ -export declare function getFinalFields(jsonSchema: RxJsonSchema): string[]; -/** - * fills all unset fields with default-values if set - * @hotPath - */ -export declare function fillObjectWithDefaults(rxSchema: RxSchema, obj: any): any; -export declare const DEFAULT_CHECKPOINT_SCHEMA: DeepReadonly>; diff --git a/dist/types/rx-schema.d.ts b/dist/types/rx-schema.d.ts deleted file mode 100644 index 500a89bc054..00000000000 --- a/dist/types/rx-schema.d.ts +++ /dev/null @@ -1,45 +0,0 @@ -import type { DeepMutable, DeepReadonly, HashFunction, MaybeReadonly, RxDocumentData, RxJsonSchema, StringKeys } from './types/index.d.ts'; -export declare class RxSchema { - readonly jsonSchema: RxJsonSchema>; - readonly hashFunction: HashFunction; - indexes: MaybeReadonly[]; - readonly primaryPath: StringKeys>; - finalFields: string[]; - constructor(jsonSchema: RxJsonSchema>, hashFunction: HashFunction); - get version(): number; - get defaultValues(): { - [P in keyof RxDocType]: RxDocType[P]; - }; - /** - * @overrides itself on the first call - * - * TODO this should be a pure function that - * caches the hash in a WeakMap. - */ - get hash(): Promise; - /** - * checks if a given change on a document is allowed - * Ensures that: - * - final fields are not modified - * @throws {Error} if not valid - */ - validateChange(dataBefore: any, dataAfter: any): void; - /** - * creates the schema-based document-prototype, - * see RxCollection.getDocumentPrototype() - */ - getDocumentPrototype(): any; - getPrimaryOfDocumentData(documentData: Partial): string; -} -export declare function getIndexes(jsonSchema: RxJsonSchema): MaybeReadonly[]; -/** - * array with previous version-numbers - */ -export declare function getPreviousVersions(schema: RxJsonSchema): number[]; -export declare function createRxSchema(jsonSchema: RxJsonSchema, hashFunction: HashFunction, runPreCreateHooks?: boolean): RxSchema; -export declare function isRxSchema(obj: any): boolean; -/** - * Used as helper function the generate the document type out of the schema via typescript. - * @link https://github.com/pubkey/rxdb/discussions/3467 - */ -export declare function toTypedRxJsonSchema>>(schema: T): DeepMutable; diff --git a/dist/types/rx-storage-helper.d.ts b/dist/types/rx-storage-helper.d.ts deleted file mode 100644 index f2f5f03f522..00000000000 --- a/dist/types/rx-storage-helper.d.ts +++ /dev/null @@ -1,107 +0,0 @@ -/** - * Helper functions for accessing the RxStorage instances. - */ -import type { BulkWriteRow, BulkWriteRowProcessed, CategorizeBulkWriteRowsOutput, RxAttachmentData, RxAttachmentWriteData, RxCollection, RxDatabase, RxDocumentData, RxDocumentWriteData, RxJsonSchema, RxStorageWriteError, RxStorageInstance, RxStorageInstanceCreationParams, StringKeys, RxStorage, FilledMangoQuery } from './types/index.d.ts'; -import { Observable } from 'rxjs'; -export declare const INTERNAL_STORAGE_NAME = "_rxdb_internal"; -export declare const RX_DATABASE_LOCAL_DOCS_STORAGE_NAME = "rxdatabase_storage_local"; -export declare function getSingleDocument(storageInstance: RxStorageInstance, documentId: string): Promise | undefined>; -/** - * Writes a single document, - * throws RxStorageBulkWriteError on failure - */ -export declare function writeSingle(instance: RxStorageInstance, writeRow: BulkWriteRow, context: string): Promise>; -/** - * Observe the plain document data of a single document. - * Do not forget to unsubscribe. - */ -export declare function observeSingle(storageInstance: RxStorageInstance, documentId: string): Observable>; -/** - * Checkpoints must be stackable over another. - * This is required form some RxStorage implementations - * like the sharding plugin, where a checkpoint only represents - * the document state from some, but not all shards. - */ -export declare function stackCheckpoints(checkpoints: CheckpointType[]): CheckpointType; -export declare function throwIfIsStorageWriteError(collection: RxCollection, documentId: string, writeData: RxDocumentWriteData | RxDocType, error: RxStorageWriteError | undefined): void; -/** - * Analyzes a list of BulkWriteRows and determines - * which documents must be inserted, updated or deleted - * and which events must be emitted and which documents cause a conflict - * and must not be written. - * Used as helper inside of some RxStorage implementations. - * @hotPath The performance of this function is critical - */ -export declare function categorizeBulkWriteRows(storageInstance: RxStorageInstance, primaryPath: StringKeys, -/** - * Current state of the documents - * inside of the storage. Used to determine - * which writes cause conflicts. - * This must be a Map for better performance. - */ -docsInDb: Map[StringKeys] | string, RxDocumentData>, -/** - * The write rows that are passed to - * RxStorageInstance().bulkWrite(). - */ -bulkWriteRows: BulkWriteRow[], context: string, -/** - * Used by some storages for better performance. - * For example when get-by-id and insert/update can run in parallel. - */ -onInsert?: (docData: RxDocumentData) => void, onUpdate?: (docData: RxDocumentData) => void): CategorizeBulkWriteRowsOutput; -export declare function stripAttachmentsDataFromRow(writeRow: BulkWriteRow): BulkWriteRowProcessed; -export declare function getAttachmentSize(attachmentBase64String: string): number; -/** - * Used in custom RxStorage implementations. - */ -export declare function attachmentWriteDataToNormalData(writeData: RxAttachmentData | RxAttachmentWriteData): RxAttachmentData; -export declare function stripAttachmentsDataFromDocument(doc: RxDocumentWriteData): RxDocumentData; -/** - * Flat clone the document data - * and also the _meta field. - * Used many times when we want to change the meta - * during replication etc. - */ -export declare function flatCloneDocWithMeta(doc: RxDocumentData): RxDocumentData; -export type WrappedRxStorageInstance = RxStorageInstance & { - originalStorageInstance: RxStorageInstance; -}; -/** - * Wraps the normal storageInstance of a RxCollection - * to ensure that all access is properly using the hooks - * and other data transformations and also ensure that database.lockedRun() - * is used properly. - */ -export declare function getWrappedStorageInstance(database: RxDatabase<{}, Internals, InstanceCreationOptions, any>, storageInstance: RxStorageInstance, -/** - * The original RxJsonSchema - * before it was mutated by hooks. - */ -rxJsonSchema: RxJsonSchema>): WrappedRxStorageInstance; -/** - * Each RxStorage implementation should - * run this method at the first step of createStorageInstance() - * to ensure that the configuration is correct. - */ -export declare function ensureRxStorageInstanceParamsAreCorrect(params: RxStorageInstanceCreationParams): void; -export declare function hasEncryption(jsonSchema: RxJsonSchema): boolean; -export declare function getChangedDocumentsSinceQuery(storageInstance: RxStorageInstance, limit: number, checkpoint?: CheckpointType): FilledMangoQuery; -export declare function getChangedDocumentsSince(storageInstance: RxStorageInstance, limit: number, checkpoint?: CheckpointType): Promise<{ - documents: RxDocumentData[]; - /** - * The checkpoint contains data so that another - * call to getChangedDocumentsSince() will continue - * from exactly the last document that was returned before. - */ - checkpoint: CheckpointType; -}>; -/** - * Wraps the storage and simluates - * delays. Mostly used in tests. - */ -export declare function randomDelayStorage(input: { - storage: RxStorage; - delayTimeBefore: () => number; - delayTimeAfter: () => number; -}): RxStorage; diff --git a/dist/types/rx-storage-multiinstance.d.ts b/dist/types/rx-storage-multiinstance.d.ts deleted file mode 100644 index c719b3038e6..00000000000 --- a/dist/types/rx-storage-multiinstance.d.ts +++ /dev/null @@ -1,55 +0,0 @@ -/** - * When a persistent RxStorage is used in more the one JavaScript process, - * the even stream of the changestream() function must be broadcasted to the other - * RxStorageInstances of the same databaseName+collectionName. - * - * In the past this was done by RxDB but it makes more sense to do this - * at the RxStorage level so that the broadcasting etc can all happen inside of a WebWorker - * and not on the main thread. - * Also it makes it less complex to stack up different RxStorages onto each other - * like what we do with the in-memory plugin. - * - * This is intended to be used inside of createStorageInstance() of a storage. - * Do not use this if the storage anyway broadcasts the events like when using MongoDB - * or in the future W3C might introduce a way to listen to IndexedDB changes. - */ -import type { EventBulk, RxStorageInstance, RxStorageInstanceCreationParams } from './types/index.d.ts'; -import { BroadcastChannel } from 'broadcast-channel'; -/** - * The broadcast-channel is reused by the databaseInstanceToken. - * This is required so that it is easy to simulate multi-tab usage - * in the test where different instances of the same RxDatabase must - * have different broadcast channels. - * But also it ensures that for each RxDatabase we only create a single - * broadcast channel that can even be reused in the leader election plugin. - * - * TODO at the end of the unit tests, - * we should ensure that all channels are closed and cleaned up. - * Otherwise we have forgot something. - */ -export declare const BROADCAST_CHANNEL_BY_TOKEN: Map; - /** - * Contains all context objects that currently use the channel. - * If this becomes empty, we can close the channel - */ - refs: Set; -}>; -export type RxStorageMultiInstanceBroadcastType = { - storageName: string; - collectionName: string; - /** - * collection.schema.version - */ - version: number; - databaseName: string; - eventBulk: EventBulk; -}; -export declare function getBroadcastChannelReference(storageName: string, databaseInstanceToken: string, databaseName: string, refObject: any): BroadcastChannel; -export declare function removeBroadcastChannelReference(databaseInstanceToken: string, refObject: any): Promise | undefined; -export declare function addRxStorageMultiInstanceSupport(storageName: string, instanceCreationParams: RxStorageInstanceCreationParams, instance: RxStorageInstance, -/** - * If provided, that channel will be used - * instead of an own one. - */ -providedBroadcastChannel?: BroadcastChannel): void; diff --git a/dist/types/types/conflict-handling.d.ts b/dist/types/types/conflict-handling.d.ts deleted file mode 100644 index 82fe6473a33..00000000000 --- a/dist/types/types/conflict-handling.d.ts +++ /dev/null @@ -1,54 +0,0 @@ -import type { WithDeleted } from './rx-storage.d.ts'; - -/** - * Notice that the conflict handler input/output - * does not work on RxDocumentData, but only on WithDeleted. - * This is because the _meta attributes are meant for the local storing of document data, they not replicated - * and therefore cannot be used to resolve conflicts. - */ -export type RxConflictHandlerInput = { - assumedMasterState?: WithDeleted; - realMasterState: WithDeleted; - newDocumentState: WithDeleted; -}; - -/** - * The conflict handler either returns: - * - The resolved new document state - * - A flag to identify the given 'realMasterState' and 'newDocumentState' - * as being exactly equal, so no conflict has to be resolved. - */ -export type RxConflictHandlerOutput = { - isEqual: false; - documentData: WithDeleted; -} | { - isEqual: true; -}; - -export type RxConflictHandler = ( - i: RxConflictHandlerInput, - context: string -) => Promise>; - -export type RxConflictResultionTask = { - /** - * Unique id for that single task. - */ - id: string; - /** - * Tasks must have a context - * which makes it easy to filter/identify them again - * with plugins or other hacky stuff. - */ - context: string; - input: RxConflictHandlerInput; -}; - - -export type RxConflictResultionTaskSolution = { - /** - * Id of the RxConflictResultionTask - */ - id: string; - output: RxConflictHandlerOutput; -}; diff --git a/dist/types/types/couchdb.d.ts b/dist/types/types/couchdb.d.ts deleted file mode 100644 index 2183d2f87b5..00000000000 --- a/dist/types/types/couchdb.d.ts +++ /dev/null @@ -1,293 +0,0 @@ -import type { - MangoQuery, - MangoQuerySelector, - MangoQuerySortPart -} from './rx-query.d.ts'; -import type { BulkWriteRow } from './rx-storage.d.ts'; - -/** - * This file contains types that are CouchDB specific - */ - -export interface CouchReplicationOptions { - live?: boolean; - retry?: boolean; - filter?: Function; - doc_ids?: string[]; - query_params?: any; - view?: any; - since?: number | 'now'; - heartbeat?: number; - timeout?: number; - batch_size?: number; - batches_limit?: number; - back_off_function?: Function; - checkpoint?: false | 'source' | 'target'; - include_docs?: boolean; - limit?: number; -} - -export interface CouchChangesOptionsBase { - include_docs?: boolean; - conflicts?: boolean; - attachments?: boolean; - binary?: boolean; - descending?: boolean; - since?: any; - limit?: number; - timeout?: any; - heartbeat?: number | boolean; - filter?: any; - doc_ids?: string | string[]; - query_param?: any; - view?: any; - return_docs?: boolean; - batch_size?: number; - style?: string; -} - -export interface CouchChangesOptionsLive extends CouchChangesOptionsBase { - live: true; -} - -export interface CouchChangesOptionsNonLive extends CouchChangesOptionsBase { - live: false; -} -interface CouchChangesOnChangeEvent { - on: (eventName: string, handler: Function) => void; - off: (eventName: string, handler: Function) => void; - cancel(): void; -} - -export type CouchWriteError = { - /** - * status code from couchdb - * 409 for 'conflict' - */ - status: number; - error: true; - /** - * primary key value of the errored document - */ - id: string; -}; - -/** - * possible couch-settings - * @link https://couchdb.com/api.html#create_database - */ -export interface CouchSettings { - auto_compaction?: boolean; - revs_limit?: number; - ajax?: any; - fetch?: any; - auth?: any; - skip_setup?: boolean; - storage?: any; - size?: number; - location?: string; - iosDatabaseLocation?: string; -} - -/** - * options for couch.allDocs() - * @link https://couchdb.com/api.html#batch_fetch - */ -export type CouchAllDocsOptions = { - include_docs?: boolean; - conflicts?: boolean; - attachments?: boolean; - binary?: boolean; - startkey?: string; - endkey?: string; - inclusive_end?: boolean; - limit?: number; - skip?: number; - descending?: boolean; - key?: string; - keys?: string[]; - update_seq?: string; - - // undocument but needed - revs?: boolean; - deleted?: 'ok'; -}; - -export type CouchSyncHandlerEvents = 'change' | 'paused' | 'active' | 'error' | 'complete'; -export type CouchSyncHandler = { - on(ev: CouchSyncHandlerEvents, fn: (el: any) => void): void; - off(ev: CouchSyncHandlerEvents, fn: any): void; - cancel(): void; -}; - -export type CouchChangeRow = { - id: string; - seq: number; - deleted?: true; - changes: { - rev: 'string'; - }[]; - /** - * only if include_docs === true - */ - doc?: CouchChangeDoc; -}; - -export type CouchAttachmentMeta = { - digest: string; - content_type: string; - length: number; - stub: boolean; - - /** - * 'revpos indicates the generation number (numeric prefix in the revID) at which the attachment was last altered' - * @link https://github.com/couchbase/couchbase-lite-ios/issues/1200#issuecomment-206444554 - */ - revpos: number; -}; - -export type CouchAttachmentWithData = CouchAttachmentMeta & { - /** - * Base64 string with the data - * or directly a buffer - */ - data: Blob; - type: string; - /** - * If set, must be false - * because we have the full data and not only a stub. - */ - stub?: false; -}; - -export type CouchChangeDoc = { - _id: string; - _rev: string; - /** - * True if the document is deleted. - */ - _deleted?: boolean; - _attachments: { - [attachmentId: string]: CouchAttachmentMeta; - }; -}; - -export type WithAttachments = Data & { - /** - * Intentional optional, - * if the document has no attachments, - * we do NOT have an empty object. - */ - _attachments?: { - [attachmentId: string]: CouchAttachmentMeta; - }; -}; -export type WithAttachmentsData = Data & { - /** - * Intentional optional, - * if the document has no attachments, - * we do NOT have an empty object. - */ - _attachments?: { - [attachmentId: string]: CouchAttachmentWithData; - }; -}; - - -export type WithCouchMeta = Data & { - _rev: string; - _attachments?: { - [attachmentId: string]: CouchAttachmentMeta; - }; - _deleted?: boolean; -}; - -export type CouchdbChangesResult = { - results: CouchChangeRow[]; - last_seq: number; -}; - -declare type Debug = { - enable(what: string): void; - disable(): void; -}; - -export type CouchDbSorting = (string | string[] | { [k: string]: 'asc' | 'desc' | 1 | -1; })[]; - -// this is not equal to the standard MangoQuery -// because of different sorting -export type CouchdbQuery = MangoQuery & { - sort?: CouchDbSorting; -}; - -export type CouchBulkDocResultRow = { - ok: boolean; - id: string; - rev: string; - - error?: 'conflict'; - reason?: string; -}; - -export type CouchCheckpoint = { - sequence: number; -}; - -export type CouchBulkDocOptions = { - new_edits?: boolean; - - // custom options for RxDB - isDeeper?: boolean; - custom?: { - primaryPath: string; - writeRowById: Map>; - insertDocsById: Map; - previousDocsInDb: Map; - context: string; - }; -}; - -export type CouchMangoQuery = MangoQuery & { - index: undefined; - use_index?: string; -}; - -export type ExplainedCouchQuery = { - dbname: string; - index: { - ddoc: string | null; - name: string; // 'idx-rxdb-index-age,_id' - type: 'json'; - def: { - fields: MangoQuerySortPart[]; - }; - }; - selector: MangoQuerySelector; - range: { - start_key: any[]; - end_key: any[]; - }; - opts: { - use_index: string[]; - bookmark: string; - sort: MangoQuerySortPart[]; - conflicts: boolean; - r: any[]; - }; - skip: number; -}; - -export type CouchAllDocsResponse = { - offset: number; - rows: { - id: string; - doc: any; - key: string; - value: { - rev: string; - deleted?: boolean; - }; - error?: 'not_found' | string; - }[]; - total_rows: number; -}; diff --git a/dist/types/types/index.d.ts b/dist/types/types/index.d.ts deleted file mode 100644 index 42332e71c18..00000000000 --- a/dist/types/types/index.d.ts +++ /dev/null @@ -1,32 +0,0 @@ -export type * from './couchdb.d.ts'; -export type * from './rx-attachment.d.ts'; -export type * from './rx-collection.d.ts'; -export type * from './rx-database.d.ts'; -export type * from './rx-database-internal-store.d.ts'; -export type * from './rx-document.d.ts'; -export type * from './rx-error.d.ts'; -export type * from './rx-plugin.d.ts'; -export type * from './rx-query.d.ts'; -export type * from './rx-schema.d.ts'; -export type * from './rx-storage.d.ts'; -export type * from './rx-storage.interface.d.ts'; -export type * from './replication-protocol.d.ts'; -export type * from './conflict-handling.d.ts'; -export type * from './rx-change-event.d.ts'; -export type * from './query-planner.d.ts'; -export type * from './util.d.ts'; - -// plugins -export type * from './plugins/replication.d.ts'; -export type * from './plugins/replication-graphql.d.ts'; -export type * from './plugins/replication.d.ts'; -export type * from './plugins/local-documents.d.ts'; -export type * from './plugins/migration.d.ts'; -export type * from './plugins/backup.d.ts'; -export type * from './plugins/cleanup.d.ts'; -export type * from './plugins/lokijs.d.ts'; -export type * from './plugins/dexie.d.ts'; -export type * from './plugins/reactivity.d.ts'; -export type * from './plugins/update.d.ts'; -export type * from './plugins/crdt.d.ts'; -export type * from './plugins/state.d.ts'; diff --git a/dist/types/types/modules/index.d.ts b/dist/types/types/modules/index.d.ts deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/dist/types/types/modules/lokijs.d.ts b/dist/types/types/modules/lokijs.d.ts deleted file mode 100644 index 976f3182f19..00000000000 --- a/dist/types/types/modules/lokijs.d.ts +++ /dev/null @@ -1,3 +0,0 @@ -declare module 'lokijs'; -declare module 'lokijs/src/loki-fs-structured-adapter.js'; -declare module 'lokijs/src/incremental-indexeddb-adapter.js'; diff --git a/dist/types/types/modules/mocha.parallel.d.ts b/dist/types/types/modules/mocha.parallel.d.ts deleted file mode 100644 index 59d36e0a85f..00000000000 --- a/dist/types/types/modules/mocha.parallel.d.ts +++ /dev/null @@ -1 +0,0 @@ -declare module 'mocha.parallel'; diff --git a/dist/types/types/plugins/backup.d.ts b/dist/types/types/plugins/backup.d.ts deleted file mode 100644 index 97c34d71c38..00000000000 --- a/dist/types/types/plugins/backup.d.ts +++ /dev/null @@ -1,35 +0,0 @@ -export type BackupOptions = { - live: boolean; - directory: string; - /** - * If true, - * attachments will also be saved - */ - attachments?: boolean; - /** - * How many documents can be processed in one batch - * [default=10] - */ - batchSize?: number; - /** - * If not set, all collections will be backed up. - */ - collections?: string[]; -}; - -export type BackupMetaFileContent = { - createdAt: number; - updatedAt: number; - collectionStates: { - [collectionName: string]: { - checkpoint?: any; - }; - }; -}; - -export type RxBackupWriteEvent = { - collectionName: string; - documentId: string; - files: string[]; - deleted: boolean; -}; diff --git a/dist/types/types/plugins/cleanup.d.ts b/dist/types/types/plugins/cleanup.d.ts deleted file mode 100644 index 7228a7341db..00000000000 --- a/dist/types/types/plugins/cleanup.d.ts +++ /dev/null @@ -1,38 +0,0 @@ -export type RxCleanupPolicy = { - /** - * The minimum time in milliseconds - * of how long a document must have been deleted - * until it is purged by the cleanup. - * This should be higher then the time you expect - * your user to be offline for. - * If this is too low, deleted documents might not - * replicate their deletion state. - */ - minimumDeletedTime: number; - /** - * The minimum amount of that that the RxCollection must have existed. - * This ensures that at the initial page load, more important - * tasks are not slowed down because a cleanup process is running. - */ - minimumCollectionAge: number; - /** - * After the initial cleanup is done, - * a new cleanup is started after [runEach] milliseconds - */ - runEach: number; - /** - * If set to true, - * RxDB will await all running replications - * to not have a replication cycle running. - * This ensures we do not remove deleted documents - * when they might not have already been replicated. - */ - awaitReplicationsInSync: boolean; - /** - * If true, it will only start the cleanup - * when the current instance is also the leader. - * This ensures that when RxDB is used in multiInstance mode, - * only one instance will start the cleanup. - */ - waitForLeadership: boolean; -}; diff --git a/dist/types/types/plugins/crdt.d.ts b/dist/types/types/plugins/crdt.d.ts deleted file mode 100644 index 662a911c4c8..00000000000 --- a/dist/types/types/plugins/crdt.d.ts +++ /dev/null @@ -1,76 +0,0 @@ -import type { MangoQuerySelector } from '../rx-query.d.ts'; -import type { StringKeys } from '../util.d.ts'; -import type { UpdateQuery } from './update.d.ts'; - - -export type CRDTEntry = { - selector?: MangoQuerySelector; - ifMatch?: UpdateQuery; - ifNotMatch?: UpdateQuery; -}; - -/** - * Options for the crdt plugin. - * We set these in the schema because changing them - * is not possible on the fly because it would - * destroy the document state in an unpredictable way. - */ -export type CRDTSchemaOptions = { - /** - * Determines which field of the document must be used - * to store the crdt operations. - * The given field must exist with the content of "CRDT_FIELD_SCHEMA" in the - * properties part of your schema. - */ - field: StringKeys | string; - - /** - * After BOTH of the limits - * maxOperations/maxTTL is reached, - * the document will clean up the stored operations - * and merged them together to ensure - * that not too many operations are stored which could slow down the - * database operations. - */ - // TODO not implemented yet, make a pull request if you need that. - // maxOperations: number; - // maxTTL: number; -}; - - -export type CRDTOperation = { - body: CRDTEntry[]; - /** - * A string to uniquely represent the creator - * of this operation. - * Mostly you would use the RxDatabase().storageToken(). - */ - creator: string; - - /** - * Unix time in milliseconds - * that determines when the operation was created. - * Used to properly clean up old operations. - */ - time: number; -}; - - -export type CRDTDocumentField = { - /** - * An array with arrays of CRDT operations. - * The index of the top level array is equal - * to the revision height where the operations - * belong to. - * Sorted by revision height ascending. - * If we have a conflict and we need a rebuild, - * the operations will be run in the revision height - * sort order to make everything deterministic. - */ - operations: CRDTOperation[][]; - - /** - * A hash to uniquely define the whole operations state. - */ - hash: string; -}; diff --git a/dist/types/types/plugins/dexie.d.ts b/dist/types/types/plugins/dexie.d.ts deleted file mode 100644 index fe487a81871..00000000000 --- a/dist/types/types/plugins/dexie.d.ts +++ /dev/null @@ -1,27 +0,0 @@ -import type { - Dexie, - DexieOptions, - Table as DexieTable -} from 'dexie'; - -export type DexieSettings = DexieOptions; - -/** - * The internals is a Promise that resolves - * when the database has fully opened - * and Dexie.on.ready was called - * @link https://dexie.org/docs/Dexie/Dexie.on.ready - * - */ -export type DexieStorageInternals = Promise<{ - dexieDb: Dexie; - /** - * Contains all normal documents. Deleted ones and non-deleted ones. - */ - dexieTable: DexieTable; - // contains the attachments data - dexieAttachmentsTable: DexieTable; - - // these must be transformed because indexeddb does not allow boolean indexing - booleanIndexes: string[]; -}>; diff --git a/dist/types/types/plugins/local-documents.d.ts b/dist/types/types/plugins/local-documents.d.ts deleted file mode 100644 index 76a29a1a740..00000000000 --- a/dist/types/types/plugins/local-documents.d.ts +++ /dev/null @@ -1,49 +0,0 @@ -import type { Observable } from 'rxjs'; -import type { DocumentCache } from '../../doc-cache.d.ts'; -import type { IncrementalWriteQueue } from '../../incremental-write.d.ts'; -import type { RxCollection } from '../rx-collection.d.ts'; -import type { RxDatabase } from '../rx-database.d.ts'; -import type { RxDocumentBase } from '../rx-document.d.ts'; -import type { RxStorageInstance } from '../rx-storage.interface.d.ts'; -import type { Override } from '../util.d.ts'; - -export type LocalDocumentParent = RxDatabase | RxCollection; -export type LocalDocumentState = { - database: RxDatabase; - parent: LocalDocumentParent; - storageInstance: RxStorageInstance; - docCache: DocumentCache; - incrementalWriteQueue: IncrementalWriteQueue; -}; -export type RxLocalDocumentData< - Data = { - // local documents are schemaless and contain any data - [key: string]: any; - } -> = { - id: string; - data: Data; -}; - -declare type LocalDocumentModifyFunction = ( - doc: Data, - rxLocalDocument: RxLocalDocument -) => Data | Promise; - - -export declare type RxLocalDocument = Override< - RxDocumentBase, {}, Reactivity>, - { - readonly parent: Parent; - isLocal(): true; - - /** - * Because local documents store their relevant data inside of the 'data' property, - * the incremental mutation methods are changed a bit to only allow to change parts of the data property. - */ - incrementalModify(mutationFunction: LocalDocumentModifyFunction): Promise>; - incrementalPatch(patch: Partial): Promise>; - - $: Observable>; - } ->; diff --git a/dist/types/types/plugins/lokijs.d.ts b/dist/types/types/plugins/lokijs.d.ts deleted file mode 100644 index 6eb3b2d15e9..00000000000 --- a/dist/types/types/plugins/lokijs.d.ts +++ /dev/null @@ -1,60 +0,0 @@ -import type { LeaderElector } from 'broadcast-channel'; -import type { AddReturn } from 'unload'; -import type { LokiSaveQueue } from '../../plugins/storage-lokijs/loki-save-queue.ts'; - -export type LokiDatabaseSettings = any; - -export type LokiCollectionSettings = Partial; - -export type LokiSettings = { - database?: LokiDatabaseSettings; - collection?: LokiCollectionSettings; -}; - -export type LokiStorageInternals = { - leaderElector?: LeaderElector; - localState?: Promise; -}; - -export type LokiRemoteRequestBroadcastMessage = { - response: false; - type: string; - databaseName: string; - collectionName: string; - operation: string; - params: any[]; - requestId: string; -}; - -export type LokiRemoteResponseBroadcastMessage = { - response: true; - type: string; - databaseName: string; - collectionName: string; - requestId: string; - result: any | any[]; - // if true, the result property will contain an error state - isError: boolean; -}; - -export type LokiDatabaseState = { - database: any; - databaseSettings: LokiDatabaseSettings; - saveQueue: LokiSaveQueue; - - // all known collections of the database - collections: { - [collectionName: string]: any; - }; - - /** - * Registered unload handlers - * so we can remove them on close. - */ - unloads: AddReturn[]; -}; - -export type LokiLocalDatabaseState = { - databaseState: LokiDatabaseState; - collection: any; -}; diff --git a/dist/types/types/plugins/migration.d.ts b/dist/types/types/plugins/migration.d.ts deleted file mode 100644 index d418b9158b8..00000000000 --- a/dist/types/types/plugins/migration.d.ts +++ /dev/null @@ -1,14 +0,0 @@ -import type { - WithAttachments -} from '../couchdb.d.ts'; -import type { RxCollection } from '../rx-collection.d.ts'; -import type { MaybePromise } from '../util.d.ts'; - -export type MigrationStrategy = ( - oldDocumentData: WithAttachments, - collection: RxCollection -) => MaybePromise | null>; - -export type MigrationStrategies = { - [toVersion: number]: MigrationStrategy; -}; diff --git a/dist/types/types/plugins/reactivity.d.ts b/dist/types/types/plugins/reactivity.d.ts deleted file mode 100644 index 8ef4432bd29..00000000000 --- a/dist/types/types/plugins/reactivity.d.ts +++ /dev/null @@ -1,10 +0,0 @@ -import type { Observable } from 'rxjs'; -import type { RxDatabase } from '../rx-database'; - -export interface RxReactivityFactory { - fromObservable( - obs: Observable, - initialValue: InitData, - rxDatabase: RxDatabase - ): Reactivity; // TODO must use generic data like Reactivity -} diff --git a/dist/types/types/plugins/replication-graphql.d.ts b/dist/types/types/plugins/replication-graphql.d.ts deleted file mode 100644 index a9c090857ad..00000000000 --- a/dist/types/types/plugins/replication-graphql.d.ts +++ /dev/null @@ -1,85 +0,0 @@ -import { RxReplicationWriteToMasterRow } from '../replication-protocol.ts'; -import { ById, MaybePromise } from '../util.ts'; -import { - ReplicationOptions, - ReplicationPullHandlerResult, - ReplicationPullOptions, - ReplicationPushHandlerResult, - ReplicationPushOptions -} from './replication.ts'; - -export interface RxGraphQLReplicationQueryBuilderResponseObject { - query: string; - operationName?: string; - variables: any; -} - -export type RxGraphQLReplicationClientState = { - headers: ById; - credentials: RequestCredentials | undefined; -}; - -export type RxGraphQLReplicationQueryBuilderResponse = - RxGraphQLReplicationQueryBuilderResponseObject | - Promise; -export type RxGraphQLReplicationPushQueryBuilder = ( - // typed 'any' because the data might be modified by the push.modifier. - rows: RxReplicationWriteToMasterRow[] -) => RxGraphQLReplicationQueryBuilderResponse; - - -export type RxGraphQLReplicationPullQueryBuilder = ( - latestPulledCheckpoint: CheckpointType | undefined, - limit: number -) => RxGraphQLReplicationQueryBuilderResponse; -export type GraphQLSyncPullOptions = Omit< - ReplicationPullOptions, - 'handler' | 'stream$' -> & { - queryBuilder: RxGraphQLReplicationPullQueryBuilder; - streamQueryBuilder?: RxGraphQLReplicationPullStreamQueryBuilder; - dataPath?: string; - responseModifier?: RxGraphQLPullResponseModifier; - includeWsHeaders?: boolean; -}; - -export type RxGraphQLPullResponseModifier = ( - // the exact response that was returned from the server - plainResponse: ReplicationPullHandlerResult | any, - // either 'handler' if it came from the pull.handler, or 'stream' if it came from the pull.stream - origin: 'handler' | 'stream', - requestCheckpoint?: CheckpointType -) => MaybePromise>; - -export type RxGraphQLPushResponseModifier = ( - // the exact response that was returned from the server - plainResponse: ReplicationPushHandlerResult | any, -) => MaybePromise>; - -export type RxGraphQLReplicationPullStreamQueryBuilder = (headers: { [k: string]: string; }) => RxGraphQLReplicationQueryBuilderResponse; - -export type GraphQLSyncPushOptions = Omit< - ReplicationPushOptions, - 'handler' -> & { - queryBuilder: RxGraphQLReplicationPushQueryBuilder; - dataPath?: string; - responseModifier?: RxGraphQLPushResponseModifier; -}; - -export type GraphQLServerUrl = { - http?: string; - ws?: string; -}; - -export type SyncOptionsGraphQL = Omit< - ReplicationOptions, - 'pull' | 'push' -> & { - url: GraphQLServerUrl; - fetch?: WindowOrWorkerGlobalScope['fetch']; - headers?: { [k: string]: string; }; // send with all requests to the endpoint - credentials?: RequestCredentials; - pull?: GraphQLSyncPullOptions; - push?: GraphQLSyncPushOptions; -}; diff --git a/dist/types/types/plugins/replication.d.ts b/dist/types/types/plugins/replication.d.ts deleted file mode 100644 index 4155171c678..00000000000 --- a/dist/types/types/plugins/replication.d.ts +++ /dev/null @@ -1,157 +0,0 @@ -import { Observable } from 'rxjs'; -import type { - InternalStoreDocType, - MaybePromise, - RxCollection, - RxDocumentData, - RxReplicationPullStreamItem, - RxReplicationWriteToMasterRow, - WithDeleted -} from '../../types/index.d.ts'; - - -export type InternalStoreReplicationPushDocType = InternalStoreDocType<{ - checkpoint: any; -}>; -export type InternalStoreReplicationPullDocType = InternalStoreDocType<{ - lastPulledDoc: RxDocumentData; -}>; - -export type ReplicationPullHandlerResult = { - checkpoint: CheckpointType | null; - documents: WithDeleted[]; -}; - -export type ReplicationPushHandlerResult = RxDocType[]; - -export type ReplicationPullHandler = ( - lastPulledCheckpoint: CheckpointType | undefined, - batchSize: number -) => Promise>; -export type ReplicationPullOptions = { - /** - * A handler that pulls the new remote changes - * from the remote actor. - */ - handler: ReplicationPullHandler; - - - /** - * An observable that streams all document changes - * that are happening on the backend. - * Emits an document bulk together with the latest checkpoint of these documents. - * Also can emit a 'RESYNC' event when the client was offline and is online again. - * - * Not required for non-live replication. - */ - stream$?: Observable>; - - /** - * Amount of documents that the remote will send in one request. - * If the response contains less then [batchSize] documents, - * RxDB will assume there are no more changes on the backend - * that are not replicated. - * [default=100] - */ - batchSize?: number; - - /** - * A modifier that runs on all documents that are pulled, - * before they are used by RxDB. - * - the ones from the pull handler - * - the ones from the pull stream - */ - modifier?: (docData: any) => MaybePromise>; - - /** - * If set, the push replication - * will start from the given checkpoint. - */ - initialCheckpoint?: any; -}; - -/** - * Gets the new write rows. - * Returns the current master state of all conflicting writes, - * so that they can be resolved on the client. - */ -export type ReplicationPushHandler = ( - docs: RxReplicationWriteToMasterRow[] -) => Promise[]>; -export type ReplicationPushOptions = { - /** - * A handler that sends the new local changes - * to the remote actor. - * On error, all documents are send again at later time. - */ - handler: ReplicationPushHandler; - - - /** - * A modifier that runs on all pushed documents before - * they are send into the push handler. - */ - modifier?: (docData: WithDeleted) => MaybePromise; - - /** - * How many local changes to process at once. - */ - batchSize?: number; - - /** - * If set, the push replication - * will start from the given checkpoint. - */ - initialCheckpoint?: any; -}; - - -export type ReplicationOptions = { - /** - * An id for the replication to identify it - * and so that RxDB is able to resume the replication on app reload. - * If you replicate with a remote server, it is recommended to put the - * server url into the replicationIdentifier. - * Like 'my-rest-replication-to-https://example.com/api/sync' - */ - replicationIdentifier: string; - collection: RxCollection; - /** - * Define a custom property that is used - * to flag a document as being deleted. - * @default '_deleted' - */ - deletedField?: '_deleted' | string; - pull?: ReplicationPullOptions; - push?: ReplicationPushOptions; - /** - * By default it will do an ongoing realtime replication. - * By settings live: false the replication will run once until the local state - * is in sync with the remote state, then it will cancel itself. - * @default true - */ - live?: boolean; - /** - * Time in milliseconds after when a failed backend request - * has to be retried. - * This time will be skipped if a offline->online switch is detected - * via `navigator.onLine` - * @default 5000 - */ - retryTime?: number; - /** - * When multiInstance is `true`, like when you use RxDB in multiple browser tabs, - * the replication should always run in only one of the open browser tabs. - * If waitForLeadership is `true`, it will wait until the current instance is leader. - * If waitForLeadership is `false`, it will start replicating, even if it is not leader. - * @default true - */ - waitForLeadership?: boolean; - /** - * If this is set to `false`, - * the replication will not start automatically - * but will wait for `replicationState.start()` being called. - * @default true - */ - autoStart?: boolean; -}; diff --git a/dist/types/types/plugins/state.d.ts b/dist/types/types/plugins/state.d.ts deleted file mode 100644 index 801ad5ff7a9..00000000000 --- a/dist/types/types/plugins/state.d.ts +++ /dev/null @@ -1,4 +0,0 @@ -import type { RxStateBase } from '../../plugins/state/rx-state'; -import type { ExtendObservables, ExtendReactivity } from '../rx-document'; - -export type RxState = RxStateBase & T & ExtendObservables> & ExtendReactivity, Reactivity>; diff --git a/dist/types/types/plugins/update.d.ts b/dist/types/types/plugins/update.d.ts deleted file mode 100644 index 85b7bca57e1..00000000000 --- a/dist/types/types/plugins/update.d.ts +++ /dev/null @@ -1,23 +0,0 @@ -import type { AnyKeys, AnyObject } from '../util.d.ts'; - -// import type { -// UpdateExpression -// } from 'mingo/updater'; - -/** - * We use an own type here, copied from mongoose - * @link https://github.com/Automattic/mongoose/blob/eb292d2c4cc98ee315f118d6199a83938f06d901/types/index.d.ts#L466 - * TODO when mingo implements a schema-based type for UpdateExpression, we can use that one. - */ -export type UpdateQuery = { - $min?: AnyKeys & AnyObject; - $max?: AnyKeys & AnyObject; - $inc?: AnyKeys & AnyObject; - $set?: AnyKeys & AnyObject; - $unset?: AnyKeys & AnyObject; - $push?: AnyKeys & AnyObject; - $addToSet?: AnyKeys & AnyObject; - $pop?: AnyKeys & AnyObject; - $pullAll?: AnyKeys & AnyObject; - $rename?: Record; -}; diff --git a/dist/types/types/query-planner.d.ts b/dist/types/types/query-planner.d.ts deleted file mode 100644 index 38be596531c..00000000000 --- a/dist/types/types/query-planner.d.ts +++ /dev/null @@ -1,53 +0,0 @@ -export type RxQueryPlanKey = string | number | undefined; - -export type RxQueryPlanerOpts = { - startKey: RxQueryPlanKey; - endKey: RxQueryPlanKey; - /** - * True if the first matching document - * must also be included into the result set. - */ - inclusiveStart: boolean; - /** - * True if the last matching document - * must also be included into the result set. - */ - inclusiveEnd: boolean; -}; - -export type RxQueryPlan = { - index: string[]; - /** - * If the index does not match the sort params, - * we have to resort the query results manually - * after fetching them from the index. - */ - sortSatisfiedByIndex: boolean; - - /** - * If the whole selector matching is satisfied - * by the index, we do not have to run a does-document-data-match-query - * stuff. - */ - selectorSatisfiedByIndex: boolean; - - /** - * TODO add a flag that determines - * if we have to run the selector matching on all results - * or if the used index anyway matches ALL operators. - */ - - startKeys: RxQueryPlanKey[]; - endKeys: RxQueryPlanKey[]; - /** - * True if the first matching document - * must also be included into the result set. - */ - inclusiveStart: boolean; - /** - * True if the last matching document - * must also be included into the result set. - */ - inclusiveEnd: boolean; - -}; diff --git a/dist/types/types/replication-protocol.d.ts b/dist/types/types/replication-protocol.d.ts deleted file mode 100644 index f0d39406052..00000000000 --- a/dist/types/types/replication-protocol.d.ts +++ /dev/null @@ -1,284 +0,0 @@ -import { BehaviorSubject, Observable, Subject } from 'rxjs'; -import type { - RxConflictHandler, - RxConflictHandlerInput, - RxConflictHandlerOutput -} from './conflict-handling.d.ts'; -import type { RxError, RxTypeError } from './rx-error.d.ts'; -import type { - BulkWriteRow, - RxDocumentData, - WithDeleted, - WithDeletedAndAttachments -} from './rx-storage.d.ts'; -import type { - RxStorageInstance -} from './rx-storage.interface.d.ts'; -import type { HashFunction } from './util.d.ts'; - -export type RxStorageReplicationMeta = { - - /** - * Combined primary key consisting - * of: [replicationId, itemId, isCheckpoint] - * so that the same RxStorageInstance - * can be used for multiple replication states. - */ - id: string; - - /** - * Either the document primaryKey - * or the id of the replication checkpoint. - */ - itemId: string; - - /** - * True if the doc data is about a checkpoint, - * False if it is about a document state from the master. - * Stored as a string so it can be used - * in the combined primary key 'id' - */ - isCheckpoint: '0' | '1'; - checkpointData?: CheckpointType; - - /** - * the document state of the master - * only set if not checkpoint. - */ - docData?: RxDocType | RxDocumentData | any; - /** - * If the current assumed master was written while - * resolving a conflict, this field contains - * the revision of the conflict-solution that - * is stored in the forkInstance. - */ - isResolvedConflict?: string; -}; - -export type RxReplicationWriteToMasterRow = { - assumedMasterState?: WithDeletedAndAttachments; - newDocumentState: WithDeletedAndAttachments; -}; - - -export type DocumentsWithCheckpoint = { - documents: WithDeletedAndAttachments[]; - checkpoint: CheckpointType; -}; - - -export type RxReplicationPullStreamItem = DocumentsWithCheckpoint | - /** - * Emit this when the masterChangeStream$ might have missed out - * some events because the fork lost the connection to the master. - * Like when the user went offline and reconnects. - */ - 'RESYNC'; - -/** - * The replication handler contains all logic - * that is required by the replication protocol - * to interact with the master instance. - * This is an abstraction so that we can use different - * handlers for GraphQL, REST or any other transportation layer. - * Even a RxStorageInstance can be wrapped in a way to represent a replication handler. - * - * The RxStorage instance of the master branch that is - * replicated with the fork branch. - * The replication algorithm is made to make - * as less writes on the master as possible. - * The master instance is always 'the truth' which - * does never contain conflicting document states. - * All conflicts are handled on the fork branch - * before being replicated to the master. - */ -export type RxReplicationHandler = { - masterChangeStream$: Observable>; - masterChangesSince( - checkpoint: MasterCheckpointType, - batchSize: number - ): Promise>; - /** - * Writes the fork changes to the master. - * Only returns the conflicts if there are any. - * (otherwise returns an empty array.) - */ - masterWrite( - rows: RxReplicationWriteToMasterRow[] - ): Promise[]>; -}; - -export type RxStorageInstanceReplicationInput = { - /** - * A string that uniquely identifies - * the replication. - * Ensures that checkpoint are not - * mixed with other replications. - */ - identifier: string; - pullBatchSize: number; - pushBatchSize: number; - replicationHandler: RxReplicationHandler; - conflictHandler: RxConflictHandler; - - // can be set to also replicate the _meta field of the document. - keepMeta?: boolean; - - /** - * The fork is the one that contains the forked chain of document writes. - * All conflicts are solved on the fork and only resolved correct document data - * is written back to the parent. - */ - forkInstance: RxStorageInstance; - - /** - * The replication needs to store some meta data - * for documents to know which state is at the master - * and how/if it diverges from the fork. - * In the past this was stored in the _meta field of - * the forkInstance documents but that was not a good design decision - * because it required additional writes on the forkInstance - * to know which documents have been upstream replicated - * to not cause conflicts. - * Using the metaInstance instead leads to better overall performance - * because RxDB will not re-emit query results or document state - * when replication meta data is written. - * - * In addition to per-document meta data, - * the replication checkpoints are also stored in this instance. - * - */ - metaInstance: RxStorageInstance, any, any>; - - /** - * When a write happens to the fork, - * normally the replication will directly try to persist. - * - * For many use cases, it is better to await the next event loop tick - * or to wait until the RxDatabase is idle or requestIdleCallback() calls - * to ensure the CPU is idle. - * This can improve performance because the persistence will not affect UI - * renders. - * - * But: The longer you wait here, the higher is the risk of losing fork - * writes when the replication is destroyed unexpected. - */ - waitBeforePersist?: () => Promise; - - hashFunction: HashFunction; - - initialCheckpoint?: { - upstream?: any; - downstream?: any; - }; -}; - -export type RxStorageInstanceReplicationState = { - // store the primaryPath here for better reuse and performance. - primaryPath: string; - hasAttachments: boolean; - input: RxStorageInstanceReplicationInput; - - events: { - /** - * Streams all document writes that have successfully - * been written in one direction. - */ - processed: { - up: Subject>; - down: Subject>; - }; - resolvedConflicts: Subject<{ - input: RxConflictHandlerInput; - output: RxConflictHandlerOutput; - }>; - /** - * Contains the cancel state. - * Emit true here to cancel the replication. - */ - canceled: BehaviorSubject; - /** - * Contains true if the replication is doing something - * at this point in time. - * If this is false, it means that the replication - * is idle AND in sync. - */ - active: { - [direction in RxStorageReplicationDirection]: BehaviorSubject; - }; - /** - * All errors that would otherwise be unhandled, - * get emitted here. - */ - error: Subject; - }; - - - /** - * Contains counters that can be used in tests - * or to debug problems. - */ - stats: { - down: { - addNewTask: number; - downstreamResyncOnce: number; - downstreamProcessChanges: number; - masterChangeStreamEmit: number; - persistFromMaster: number; - }; - up: { - upstreamInitialSync: number; - forkChangeStreamEmit: number; - processTasks: number; - persistToMaster: number; - persistToMasterHadConflicts: number; - persistToMasterConflictWrites: number; - }; - }; - - /** - * Used in checkpoints and ._meta fields - * to ensure we do not mix up meta data of - * different replications. - * We have to use the promise because the key is hashed which runs async. - */ - checkpointKey: Promise; - - /** - * Storage.bulkWrites() that are initialized from the - * downstream, get this flag as context-param - * so that the emitted event bulk can be identified - * to be sourced from the downstream and it will not try - * to upstream these documents again. - */ - downstreamBulkWriteFlag: Promise; - - /** - * Tracks if the streams have been in sync - * for at least one time. - */ - firstSyncDone: { - [direction in RxStorageReplicationDirection]: BehaviorSubject; - }; - - /** - * Can be used to detect if the replication is doing something - * or if it is in an idle state. - */ - streamQueue: { - [direction in RxStorageReplicationDirection]: Promise; - }; - - checkpointQueue: Promise; - - /** - * For better performance we store the last known checkpoint - * document so that we can likely do checkpoint storing without - * conflicts. - */ - lastCheckpointDoc: { - [direction in RxStorageReplicationDirection]?: RxDocumentData>; - }; -}; - -export type RxStorageReplicationDirection = 'up' | 'down'; diff --git a/dist/types/types/rx-attachment.d.ts b/dist/types/types/rx-attachment.d.ts deleted file mode 100644 index f7275bbd8e7..00000000000 --- a/dist/types/types/rx-attachment.d.ts +++ /dev/null @@ -1,30 +0,0 @@ -import type { - RxDocument -} from './rx-document.d.ts'; - -declare type Buffer = any; - -export type RxAttachmentCreator = { - id: string; - /** - * Content type like 'plain/text' - */ - type: string; - /** - * The data of the attachment. - */ - data: Blob; -}; - -export declare class RxAttachment { - readonly doc: RxDocument; - readonly id: string; - readonly type: string; - readonly length: number; - readonly digest: string; - readonly rev: string; - - remove(): Promise; - getData(): Promise; - getStringData(): Promise; -} diff --git a/dist/types/types/rx-change-event.d.ts b/dist/types/types/rx-change-event.d.ts deleted file mode 100644 index 72294efb5bf..00000000000 --- a/dist/types/types/rx-change-event.d.ts +++ /dev/null @@ -1,61 +0,0 @@ -import type { - EventBulk, - RxDocumentData -} from './rx-storage.d.ts'; - - -export type RxChangeEventBase = { - operation: 'INSERT' | 'UPDATE' | 'DELETE'; - - readonly documentId: string; - - // optional, does not exist on changes to localdocs of the database - readonly collectionName?: string; - - // true if the event is about a local document, false if not. - readonly isLocal: boolean; - - documentData: RxDocumentData; -}; - -export type RxChangeEventInsert = RxChangeEventBase & { - operation: 'INSERT'; - previousDocumentData: undefined; -}; - -export type RxChangeEventUpdate = RxChangeEventBase & { - operation: 'UPDATE'; - previousDocumentData: RxDocumentData; -}; - -export type RxChangeEventDelete = RxChangeEventBase & { - operation: 'DELETE'; - previousDocumentData: RxDocumentData; -}; - -export type RxChangeEvent = RxChangeEventInsert | RxChangeEventUpdate | RxChangeEventDelete; - -/** - * Internally, all events are processed via bulks - * to save performance when sending them over a transport layer - * or de-duplicating them. - */ -export type RxChangeEventBulk = EventBulk, any> & { - // optional, not given for changes to local documents of a RxDatabase. - collectionName?: string; - /** - * Token of the database instance that created the events. - * Used to determine if the events came from another instance over the BroadcastChannel. - */ - databaseToken: string; - /** - * The storageToken of the RxDatabase that created the events. - * Used to ensure we do not process events of other RxDatabases. - */ - storageToken: string; - /** - * If true, the events belong to some internal stuff like from plugins. - * Internal events are not emitted to the outside over the .$ Observables. - */ - internal: boolean; -}; diff --git a/dist/types/types/rx-collection.d.ts b/dist/types/types/rx-collection.d.ts deleted file mode 100644 index c5b3cf90adc..00000000000 --- a/dist/types/types/rx-collection.d.ts +++ /dev/null @@ -1,119 +0,0 @@ -import type { - RxJsonSchema, - RxDocument, - MigrationStrategies, - RxConflictHandler -} from './index.d.ts'; -import type { - RxCollectionBase -} from '../rx-collection.d.ts'; -import type { QueryCache } from '../query-cache.d.ts'; -import type { RxLocalDocumentMutation } from './rx-database.d.ts'; - -export interface KeyFunctionMap { - [key: string]: Function; -} -export interface NumberFunctionMap { - [key: number]: Function; -} - - -/** - * Params to create a new collection. - * Notice the name of the collection is set one level higher - * when calling addCollections() - */ -export type RxCollectionCreator = { - schema: RxJsonSchema; - instanceCreationOptions?: any; - migrationStrategies?: MigrationStrategies; - autoMigrate?: boolean; - statics?: KeyFunctionMap; - methods?: KeyFunctionMap; - attachments?: KeyFunctionMap; - options?: any; - /** - * Set this to true if you want to store local documents - * in the RxCollection instance. - */ - localDocuments?: boolean; - cacheReplacementPolicy?: RxCacheReplacementPolicy; - - /** - * Depending on which plugins or storage is used, - * the RxCollection might need a way to resolve conflicts - * which is done by this conflict handler. - * If no conflict handler is provided, a master-always-wins handler - * will be used as default - */ - conflictHandler?: RxConflictHandler; -}; - -export type RxCacheReplacementPolicy = (collection: RxCollection, queryCache: QueryCache) => void; - -export type RxCollectionHookCallback< - RxDocumentType, - OrmMethods, - Reactivity -> = ( - data: RxDocumentType, - instance: RxDocument -) => void | Promise | any; -export type RxCollectionHookNoInstance = (data: RxDocumentType) => void | Promise | any; -export type RxCollectionHookCallbackNonAsync = ( - data: RxDocumentType, - instance: RxDocument -) => void | any; -export type RxCollectionHookNoInstanceCallback< - RxDocumentType, - OrmMethods, - Reactivity -> = ( - data: RxDocumentType, - instance: RxCollection -) => Promise | void | any; - -export type RxCollection< - RxDocumentType = any, - OrmMethods = {}, - StaticMethods = {}, - InstanceCreationOptions = {}, - Reactivity = unknown -> = StaticMethods & - RxCollectionBase & - RxCollectionGenerated; - -export interface RxCollectionGenerated extends RxLocalDocumentMutation> { - - // HOOKS - preInsert(fun: RxCollectionHookNoInstanceCallback, parallel: boolean): void; - preSave(fun: RxCollectionHookCallback, parallel: boolean): void; - preRemove(fun: RxCollectionHookCallback, parallel: boolean): void; - postInsert(fun: RxCollectionHookCallback, parallel: boolean): void; - postSave(fun: RxCollectionHookCallback, parallel: boolean): void; - postRemove(fun: RxCollectionHookCallback, parallel: boolean): void; - postCreate(fun: RxCollectionHookCallbackNonAsync): void; - - // only inMemory-collections - awaitPersistence(): Promise; -} - -/** - * Properties are possibly encrypted so type them as any. TODO this is no longer needed. - */ -export type RxDumpCollectionAsAny = { [P in keyof T]: any }; - -interface RxDumpCollectionBase { - name: string; - passwordHash?: string; - schemaHash: string; -} -export interface RxDumpCollection extends RxDumpCollectionBase { - docs: RxDocumentType[]; -} -/** - * All base properties are typed as any because they can be encrypted. - */ -export interface RxDumpCollectionAny extends RxDumpCollectionBase { - docs: RxDumpCollectionAsAny[]; -} diff --git a/dist/types/types/rx-database-internal-store.d.ts b/dist/types/types/rx-database-internal-store.d.ts deleted file mode 100644 index 5b1e3aff790..00000000000 --- a/dist/types/types/rx-database-internal-store.d.ts +++ /dev/null @@ -1,54 +0,0 @@ -import type { - RxMigrationStatus -} from '../plugins/migration-schema/index.ts'; -import type { RxJsonSchema } from './rx-schema.d.ts'; - -export type InternalStoreDocType = { - id: string; - key: string; - context: string; - data: Data; -}; - -/** - * Stores information about the collections. - * The collection.name is the 'key' value. - */ -export type InternalStoreStorageTokenDocType = InternalStoreDocType<{ - rxdbVersion: string; - token: string; - instanceToken: string; - passwordHash?: string; -}>; - -/** - * Stores information about the collections. - * The collection.name is the 'key' value. - */ -export type InternalStoreCollectionDocType = InternalStoreDocType<{ - /** - * Plain name of the collection - */ - name: string; - schema: RxJsonSchema; - schemaHash: string; - version: number; - - /** - * Storages that are connected to this collection - * so that when the collection is removed, - * these storages must also be removed. - * For example the replication meta storage - * must be reset when the collection is removed. - */ - connectedStorages: { - collectionName: string; - schema: RxJsonSchema; - }[]; - - /** - * Contains the migration status - * only if a migration has been started. - */ - migrationStatus?: RxMigrationStatus; -}>; diff --git a/dist/types/types/rx-database.d.ts b/dist/types/types/rx-database.d.ts deleted file mode 100644 index c6fb87f4669..00000000000 --- a/dist/types/types/rx-database.d.ts +++ /dev/null @@ -1,104 +0,0 @@ -import type { - RxCollection, - RxDumpCollection, - RxDumpCollectionAsAny -} from './rx-collection.d.ts'; -import type { - RxDatabaseBase -} from '../rx-database.d.ts'; -import { Observable } from 'rxjs'; -import type { RxStorage } from './rx-storage.interface.d.ts'; -import type { RxLocalDocument } from './plugins/local-documents.d.ts'; -import type { RxCleanupPolicy } from './plugins/cleanup.d.ts'; -import type { ById, HashFunction } from './util.d.ts'; -import type { RxReactivityFactory } from './plugins/reactivity.d.ts'; - -export interface RxDatabaseCreator { - storage: RxStorage; - instanceCreationOptions?: InstanceCreationOptions; - name: string; - password?: string | any; - multiInstance?: boolean; - eventReduce?: boolean; - ignoreDuplicate?: boolean; - options?: any; - cleanupPolicy?: Partial; - /** - * Set this to true if you want to store local documents - * in the RxDatabase instance. - */ - localDocuments?: boolean; - - /** - * Hash method used to hash strings and json-stringified objects. - * This hash does not have to be cryptographically secure, - * but it is very important that is does have not create - * collisions. - * Default is the sha256 from the ohash library - * @link https://www.npmjs.com/package/ohash - */ - hashFunction?: HashFunction; - - /** - * By default, count() queries in 'slow' mode are not allowed. - */ - allowSlowCount?: boolean; - - /** - * Can be used to add a custom reactivity Factory - * that is used on all getters and values that end with the double $$. - * For example you can use the signals api of your framework and vuejs ref() - */ - reactivity?: RxReactivityFactory; -} - -export type CollectionsOfDatabase = ById; -export type RxDatabase< - Collections = CollectionsOfDatabase, - Internals = any, - InstanceCreationOptions = any, - Reactivity = any -> = RxDatabaseBase< - Internals, - InstanceCreationOptions, - Collections, - Reactivity -> & Collections & RxDatabaseGenerated; - - -export interface RxLocalDocumentMutation { - insertLocal(id: string, data: LocalDocType): Promise< - RxLocalDocument - >; - upsertLocal(id: string, data: LocalDocType): Promise< - RxLocalDocument - >; - getLocal(id: string): Promise< - RxLocalDocument | null - >; - getLocal$(id: string): Observable< - RxLocalDocument | null - >; -} - -export interface RxDatabaseGenerated extends RxLocalDocumentMutation> { } - -/** - * Extract the **DocumentType** of a collection. - */ -type ExtractDTcol

= P extends RxCollection ? T : { [prop: string]: any; }; - -interface RxDumpDatabaseBase { - instanceToken: string; - name: string; - passwordHash: string | null; -} -export interface RxDumpDatabase extends RxDumpDatabaseBase { - collections: RxDumpCollection>[]; -} -/** - * All base properties are typed as any because they can be encrypted. - */ -export interface RxDumpDatabaseAny extends RxDumpDatabaseBase { - collections: RxDumpCollection>>[]; -} diff --git a/dist/types/types/rx-document.d.ts b/dist/types/types/rx-document.d.ts deleted file mode 100644 index 4e9861a8ba9..00000000000 --- a/dist/types/types/rx-document.d.ts +++ /dev/null @@ -1,140 +0,0 @@ -import { - Observable -} from 'rxjs'; - -import type { - RxCollection, -} from './rx-collection.d.ts'; -import type { - RxAttachment, - RxAttachmentCreator -} from './rx-attachment.d.ts'; -import type { RxDocumentData, WithDeleted } from './rx-storage.d.ts'; -import type { RxChangeEvent } from './rx-change-event.d.ts'; -import type { DeepReadonly, MaybePromise, PlainJsonValue } from './util.d.ts'; -import type { UpdateQuery } from './plugins/update.d.ts'; -import type { CRDTEntry } from './plugins/crdt.d.ts'; - - - -export type RxDocument = RxDocumentBase< - RxDocumentType, - OrmMethods, - Reactivity -> & RxDocumentType & OrmMethods & ExtendObservables & ExtendReactivity; - - -/** - * Extend the base properties by the property$ fields - * so it knows that RxDocument.age also has RxDocument.age$ which is - * an observable. - * TODO how to do this for the nested fields? - */ -type ExtendObservables = { - [P in keyof RxDocumentType as `${string & P}$`]: Observable; -}; - -type ExtendReactivity = { - [P in keyof RxDocumentType as `${string & P}$$`]: Reactivity; -}; - -/** - * The public facing modify update function. - * It only gets the document parts as input, that - * are mutateable by the user. - */ -export type ModifyFunction = ( - doc: WithDeleted -) => MaybePromise> | MaybePromise; - -/** - * Meta data that is attached to each document by RxDB. - */ -export type RxDocumentMeta = { - /** - * Last write time. - * Unix epoch in milliseconds. - */ - lwt: number; - - /** - * Any other value can be attached to the _meta data. - * Mostly done by plugins to mark documents. - */ - [k: string]: PlainJsonValue; -}; - -export declare interface RxDocumentBase { - isInstanceOfRxDocument: true; - collection: RxCollection; - readonly deleted: boolean; - - readonly $: Observable>; - readonly $$: Reactivity; - readonly deleted$: Observable; - readonly deleted$$: Reactivity; - - readonly primary: string; - readonly allAttachments$: Observable[]>; - - // internal things - _data: RxDocumentData; - primaryPath: string; - revision: string; - /** - * Used to de-duplicate the enriched property objects - * of the document. - */ - _propertyCache: Map; - $emit(cE: RxChangeEvent): void; - _saveData(newData: any, oldData: any): Promise>; - // /internal things - - // Returns the latest state of the document - getLatest(): RxDocument; - - - get$(path: string): Observable; - get$$(path: string): Reactivity; - get(objPath: string): DeepReadonly; - populate(objPath: string): Promise | any | null>; - - /** - * mutate the document with a function - */ - modify(mutationFunction: ModifyFunction, context?: string): Promise>; - incrementalModify(mutationFunction: ModifyFunction, context?: string): Promise>; - - /** - * patches the given properties - */ - patch(patch: Partial): Promise>; - incrementalPatch(patch: Partial): Promise>; - - update(updateObj: UpdateQuery): Promise>; - incrementalUpdate(updateObj: UpdateQuery): Promise>; - - updateCRDT(updateObj: CRDTEntry | CRDTEntry[]): Promise>; - - remove(): Promise>; - incrementalRemove(): Promise>; - - // only for temporary documents - set(objPath: string, value: any): RxDocument; - save(): Promise; - - // attachments - putAttachment( - creator: RxAttachmentCreator - ): Promise>; - getAttachment(id: string): RxAttachment | null; - allAttachments(): RxAttachment[]; - - toJSON(withRevAndAttachments: true): DeepReadonly>; - toJSON(withRevAndAttachments?: false): DeepReadonly; - - toMutableJSON(withRevAndAttachments: true): RxDocumentData; - toMutableJSON(withRevAndAttachments?: false): RxDocType; - - destroy(): void; -} diff --git a/dist/types/types/rx-error.d.ts b/dist/types/types/rx-error.d.ts deleted file mode 100644 index a0e04d80aec..00000000000 --- a/dist/types/types/rx-error.d.ts +++ /dev/null @@ -1,197 +0,0 @@ -import type { RxJsonSchema } from './rx-schema.d.ts'; -import { - RxSchema -} from '../rx-schema.ts'; -import type { RxPlugin } from './rx-plugin.d.ts'; -import { ERROR_MESSAGES } from '../plugins/dev-mode/error-messages.ts'; -import type { RxReplicationWriteToMasterRow } from './replication-protocol.d.ts'; -import type { BulkWriteRow, RxDocumentData } from './rx-storage.d.ts'; - -type KeyOf = Extract; -export type RxErrorKey = KeyOf; - -export type { - RxError, - RxTypeError -} from '../rx-error.ts'; - -/** - * this lists all possible parameters - */ -export interface RxErrorParameters { - readonly error?: PlainJsonError; - readonly errors?: PlainJsonError[]; - readonly writeError?: RxStorageWriteError; - readonly schemaPath?: string; - readonly objPath?: string; - readonly rootPath?: string; - readonly childpath?: string; - readonly obj?: any; - readonly document?: any; - readonly schema?: Readonly | RxSchema>; - readonly schemaObj?: any; - readonly pluginKey?: string; - readonly originalDoc?: Readonly; - readonly finalDoc?: Readonly; - readonly regex?: string; - readonly fieldName?: string; - readonly id?: string; - readonly data?: any; - readonly missingCollections?: string[]; - readonly primaryPath?: string; - readonly primary?: string; - readonly primaryKey?: string; - readonly have?: any; - readonly should?: any; - readonly name?: string; - readonly adapter?: any; - readonly link?: string; - readonly path?: string; - readonly value?: any; - readonly givenName?: string; - readonly fromVersion?: number; - readonly toVersion?: number; - readonly version?: number; - readonly args?: any; - readonly opts?: any; - readonly dataBefore?: any; - readonly dataAfter?: any; - readonly pull?: boolean; - readonly push?: boolean; - readonly url?: string; - readonly key?: string; - readonly queryObj?: any; - readonly query?: any; - readonly op?: string; - readonly skip?: any; - readonly limit?: any; - readonly passwordHash?: string; - readonly existingPasswordHash?: string; - readonly password?: string | any; - readonly minPassLength?: number; - readonly own?: any; - readonly source?: any; - readonly method?: any; - readonly field?: string; - readonly ref?: string; - readonly funName?: string; - readonly functionName?: string; - readonly schemaHash?: string; - readonly previousSchema?: Readonly>; - readonly previousSchemaHash?: string; - readonly type?: string; - readonly when?: string; - readonly parallel?: boolean; - readonly collection?: any; - readonly database?: any; - readonly storage?: string; - readonly indexes?: Array | Readonly>; - readonly index?: string | string[] | readonly string[]; - readonly plugin?: RxPlugin | any; - readonly plugins?: Set; - - // used in the replication plugin - - /** - * The checkpoint of the response from the last successful - * pull by the client. - * Null if there was no pull operation before - * so that there is no last pulled checkpoint. - */ - readonly checkpoint?: any; - /** - * The documents that failed to be pushed. - * Typed as 'any' because they might be modified by the push modifier. - */ - readonly pushRows?: RxReplicationWriteToMasterRow[]; - readonly direction?: 'pull' | 'push'; - -} - -/** - * Error-Items which are created by the jsonschema-validator - */ -export type RxValidationError = { - readonly field: string; - readonly message: string; -}; - -/** - * Use to have a transferable error object - * in plain json instead of a JavaScript Error instance. - */ -export type PlainJsonError = { - name: string; - message: string; - rxdb?: true; - code?: RxErrorKey; - url?: string; - extensions?: Record; - parameters?: RxErrorParameters; - stack?: string; -}; - - - - - -/** - * Error that can happen per document when - * RxStorage.bulkWrite() is called - */ -export type RxStorageWriteErrorBase = { - - status: number - | 409 // conflict - | 422 // schema validation error - | 510 // attachment data missing - ; - - /** - * set this property to make it easy - * to detect if the object is a RxStorageBulkWriteError - */ - isError: true; - - // primary key of the document - documentId: string; - - // the original document data that should have been written. - writeRow: BulkWriteRow; -}; - -export type RxStorageWriteErrorConflict = RxStorageWriteErrorBase & { - status: 409; - /** - * A conflict error state must contain the - * document state in the database. - * This ensures that we can continue resolving a conflict - * without having to pull the document out of the db first. - * Is not set if the error happens on an insert. - */ - documentInDb: RxDocumentData; -}; - -export type RxStorageWriteErrorValidation = RxStorageWriteErrorBase & { - status: 422; - /** - * Other properties that give - * information about the error, - * for example a schema validation error - * might contain the exact error from the validator here. - * Must be plain JSON! - */ - validationErrors: RxValidationError[]; -}; - -export type RxStorageWriteErrorAttachment = RxStorageWriteErrorBase & { - status: 510; - attachmentId: string; - documentInDb?: RxDocumentData; -}; - - -export type RxStorageWriteError = - RxStorageWriteErrorConflict | - RxStorageWriteErrorValidation | - RxStorageWriteErrorAttachment; diff --git a/dist/types/types/rx-plugin.d.ts b/dist/types/types/rx-plugin.d.ts deleted file mode 100644 index d6c3fcce5ae..00000000000 --- a/dist/types/types/rx-plugin.d.ts +++ /dev/null @@ -1,156 +0,0 @@ -import type { - RxQuery, - RxQueryOP, - MangoQuery -} from './rx-query.d.ts'; -import type { - RxCollection, - RxCollectionCreator -} from './rx-collection.d.ts'; -import type { - RxStorageInstanceCreationParams -} from './rx-storage.d.ts'; -import type { - DeepReadonly, - FilledMangoQuery, - RxDatabase, - RxDatabaseCreator, - RxDocument, - RxStorage, - RxReplicationWriteToMasterRow, - WithDeleted, - RxState, - BulkWriteRow, - RxStorageInstance -} from './index.d.ts'; -import type { RxSchema } from '../rx-schema.d.ts'; - -export type RxPluginPreCreateRxQueryArgs = { - op: RxQueryOP; - queryObj: MangoQuery; - collection: RxCollection; -}; - -export type RxPluginPreAddRxPluginArgs = { - // the plugin that is getting added - plugin: RxPlugin | any; - // previous added plugins - plugins: Set; -}; - -export type RxPluginPrePrepareQueryArgs = { - rxQuery: RxQuery; - mangoQuery: FilledMangoQuery; -}; - - -/** - * Depending on which plugins are used together, - * it is important that the plugin is able to define if - * the hooks must be added as first or as last array item. - * For example the encryption plugin must run encryption - * before the key-compression changes the fieldnames. - */ -export type RxPluginHooks = { - /** - * Hook function that is added as first. - */ - before?: (i: Input) => void; - /** - * Hook function that is added as last. - */ - after?: (i: Input) => void; -}; - -export interface RxPlugin { - /** - * A string to uniquely identifies the plugin. - * Can be used to throw when different versions of the same plugin are used. - * And also other checks. - * Use kebab-case. - */ - readonly name: string; - - /** - * set this to true so RxDB - * knows that this object in a rxdb plugin - */ - readonly rxdb: true; - - /** - * Init function where dependent plugins could be added. - * (optional) - */ - init?(): any; - - prototypes?: { - RxSchema?: (proto: RxSchema) => void; - RxDocument?: (proto: RxDocument) => void; - RxQuery?: (proto: RxQuery) => void; - RxCollection?: (proto: RxCollection) => void; - RxDatabase?: (proto: RxDatabase) => void; - }; - overwritable?: { - isDevMode?: () => boolean; - deepFreezeWhenDevMode?: (obj: T) => DeepReadonly; - validatePassword?: Function; - checkAdapter?: Function; - tunnelErrorMessage?: Function; - }; - hooks?: { - preAddRxPlugin?: RxPluginHooks; - preCreateRxDatabase?: RxPluginHooks; - createRxDatabase?: RxPluginHooks<{ - database: RxDatabase; - creator: RxDatabaseCreator; - }>; - preDestroyRxDatabase?: RxPluginHooks; - postRemoveRxDatabase?: RxPluginHooks<{ - databaseName: string; - storage: RxStorage; - }>; - createRxCollection?: RxPluginHooks<{ - collection: RxCollection; - creator: RxCollectionCreator; - }>; - createRxState?: RxPluginHooks<{ - collection: RxCollection; - state: RxState; - }>; - preCreateRxCollection?: RxPluginHooks & { - name: string; - database: RxDatabase; - }>; - postDestroyRxCollection?: RxPluginHooks; - postRemoveRxCollection?: RxPluginHooks<{ - storage: RxStorage; - databaseName: string; - collectionName: string; - }>; - preCreateRxSchema?: RxPluginHooks; - createRxSchema?: RxPluginHooks; - preCreateRxQuery?: RxPluginHooks; - prePrepareQuery?: RxPluginHooks; - createRxQuery?: RxPluginHooks; - createRxDocument?: RxPluginHooks; - postCreateRxDocument?: RxPluginHooks; - preCreateRxStorageInstance?: RxPluginHooks>; - /** - * Runs before a write to the storage instance of a RxCollection or RxDatabase. - */ - preStorageWrite?: RxPluginHooks<{ - storageInstance: RxStorageInstance; - rows: BulkWriteRow[]; - }>; - preMigrateDocument?: RxPluginHooks; - postMigrateDocument?: RxPluginHooks; - preReplicationMasterWrite?: RxPluginHooks<{ - rows: RxReplicationWriteToMasterRow[]; - collection: RxCollection; - }>; - preReplicationMasterWriteDocumentsHandle?: RxPluginHooks<{ - result: WithDeleted[]; - collection: RxCollection; - }>; - }; -} diff --git a/dist/types/types/rx-query.d.ts b/dist/types/types/rx-query.d.ts deleted file mode 100644 index f470487b292..00000000000 --- a/dist/types/types/rx-query.d.ts +++ /dev/null @@ -1,144 +0,0 @@ -import type { - RxQueryBase -} from '../rx-query.d.ts'; -import type { Paths, StringKeys } from './util.d.ts'; - -/** - * Typed Mango Query Selector - * @link https://github.com/mongodb/node-mongodb-native/blob/26bce4a8debb65df5a42dc8599e886c9c83de10d/src/mongo_types.ts - * @link https://stackoverflow.com/a/58436959/3443137 - */ - - -export type PropertyType = string extends Property - ? unknown - : Property extends keyof Type - ? Type[Property] - : Property extends `${number}` - ? Type extends ReadonlyArray - ? ArrayType - : unknown - : Property extends `${infer Key}.${infer Rest}` - ? Key extends `${number}` - ? Type extends ReadonlyArray - ? PropertyType - : unknown - : Key extends keyof Type - ? Type[Key] extends Map - ? MapType - : PropertyType - : unknown - : unknown; - - -export type MangoQueryRegexOptions = 'i' | 'g' | 'm' | 'gi' | 'ig' | 'igm' | string; - -/* - * The MongoDB query library is huge and we do not need all the operators. - * If you add an operator here, make sure that you properly add a test in - * the file /test/unit/rx-storage-query-correctness.test.ts - * - * @link https://github.com/kofrasa/mingo#es6 - */ -export interface MangoQueryOperators { - $eq?: PathValueType; - $gt?: PathValueType; - $gte?: PathValueType; - $lt?: PathValueType; - $lte?: PathValueType; - $ne?: PathValueType; - $in?: PathValueType[]; - $nin?: PathValueType[]; - $regex?: string; - $options?: MangoQueryRegexOptions; - $exists?: boolean; - $type?: 'null' | 'boolean' | 'number' | 'string' | 'array' | 'object'; - $mod?: number; - $not?: PathValueType; - $size?: number; - $elemMatch?: MangoQuerySelector; -} - -export type MangoQuerySelector = Partial<{ - [Property in Paths]: MangoQueryOperators | PropertyType; -}> & { - $and?: MangoQuerySelector[]; - $or?: MangoQuerySelector[]; - $nor?: MangoQuerySelector[]; -}; - -/** - * Discussion was at: - * @link https://github.com/pubkey/rxdb/issues/1972 - */ -export type MangoQuerySortDirection = 'asc' | 'desc'; -export type MangoQuerySortPart = { - [k in StringKeys | string]: MangoQuerySortDirection; -}; - -export type MangoQuerySelectorAndIndex = { - /** - * Selector is optional, - * if not given, the query matches all documents - * that are not _deleted=true. - */ - selector?: MangoQuerySelector; - /** - * By default, the RxStorage implementation - * decides which index to use when running the query. - * - * For better performance, a different index might be defined - * by setting it in the query. - * How this improves performance and if the defined index is used, - * depends on the RxStorage implementation. - */ - index?: string | string[]; -}; - -export type MangoQueryNoLimit = MangoQuerySelectorAndIndex & { - /** - * Sorting of the results. - * If no sort is set, RxDB will sort by the primary key. - * Also if sort is set, RxDB will add primaryKey sorting - * if the primaryKey was not in the sort parameters before. - * This ensures that there is a deterministic sorting of the - * results, not mather at which order the documents have been - * inserted into the storage. - */ - sort?: MangoQuerySortPart[]; -}; - -export type MangoQuery = MangoQueryNoLimit & { - skip?: number; - limit?: number; -}; - -export type RxQueryOP = 'find' | 'findOne' | 'count' | 'findByIds'; - -export declare class RxQuery< - RxDocumentType = any, - RxQueryResult = any, - OrmMethods = {}, - Reactivity = unknown -> extends RxQueryBase { - equals(queryObj: any): RxQuery; - eq(queryObj: any): RxQuery; - or(queryObj: keyof RxDocumentType | string | any[]): RxQuery; - nor(queryObj: keyof RxDocumentType | string | any[]): RxQuery; - and(queryObj: keyof RxDocumentType | string | any[]): RxQuery; - gt(queryObj: any): RxQuery; - gte(queryObj: any): RxQuery; - lt(queryObj: any): RxQuery; - lte(queryObj: any): RxQuery; - ne(queryObj: any): RxQuery; - in(queryObj: any[]): RxQuery; - nin(queryObj: any[]): RxQuery; - all(queryObj: any): RxQuery; - regex(queryObj: string | { - $regex: string; - $options: MangoQueryRegexOptions; - }): RxQuery; - exists(queryObj: any): RxQuery; - elemMatch(queryObj: any): RxQuery; - mod(p1: any, p2: any, p3: any): RxQuery; -} diff --git a/dist/types/types/rx-schema.d.ts b/dist/types/types/rx-schema.d.ts deleted file mode 100644 index 805cb56d8f2..00000000000 --- a/dist/types/types/rx-schema.d.ts +++ /dev/null @@ -1,185 +0,0 @@ -import { AsTyped } from 'as-typed'; -import type { CRDTSchemaOptions } from './plugins/crdt.d.ts'; -import type { StringKeys } from './util.d.ts'; - -/** - * @link https://github.com/types/lib-json-schema/blob/master/v4/index.d.ts - */ -export type JsonSchemaTypes = 'array' | 'boolean' | 'integer' | 'number' | 'null' | 'object' | 'string' | (string & {}); - -export type CompositePrimaryKey = { - /** - * The top level field of the document that will be used - * to store the composite key as string. - */ - key: StringKeys; - - /** - * The fields of the composite key, - * the fields must be required and final - * and have the type number, int, or string. - */ - fields: (StringKeys | string)[] | readonly (StringKeys | string)[]; - /** - * The separator which is used to concat the - * primary fields values. - * Choose a character as separator that is known - * to never appear inside of the primary fields values. - * I recommend to use the pipe char '|'. - */ - separator: string; -}; - -export type PrimaryKey = StringKeys | CompositePrimaryKey; - -export type JsonSchema = { - allOf?: JsonSchema[] | readonly JsonSchema[]; - anyOf?: JsonSchema[] | readonly JsonSchema[]; - oneOf?: JsonSchema[] | readonly JsonSchema[]; - additionalItems?: boolean | JsonSchema; - additionalProperties?: boolean | JsonSchema; - type?: JsonSchemaTypes | JsonSchemaTypes[] | readonly JsonSchemaTypes[]; - description?: string; - dependencies?: { - [key: string]: JsonSchema | string[] | readonly string[]; - }; - exclusiveMinimum?: boolean; - exclusiveMaximum?: boolean; - items?: JsonSchema | JsonSchema[] | readonly JsonSchema[]; - multipleOf?: number; - maxProperties?: number; - maximum?: number; - minimum?: number; - maxLength?: number; - minLength?: number; - maxItems?: number; - minItems?: number; - minProperties?: number; - pattern?: string; - patternProperties?: { - [key: string]: JsonSchema; - }; - properties?: { - [key in StringKeys]: JsonSchema; - }; - required?: string[] | readonly string[]; - uniqueItems?: boolean; - enum?: any[] | readonly any[]; - not?: JsonSchema; - definitions?: { - [key: string]: JsonSchema; - }; - format?: 'date-time' | 'email' | 'hostname' | 'ipv4' | 'ipv6' | 'uri' | string; - example?: any; - - // RxDB-specific - ref?: string; - final?: boolean; -}; - -export interface TopLevelProperty extends JsonSchema { - default?: any; -} - -/** - * @link https://developer.mozilla.org/en-US/docs/Web/API/Compression_Streams_API - */ -export type CompressionMode = 'deflate' | 'gzip'; - -export type RxJsonSchema< - /** - * The doctype must be given, and '=any' cannot be used, - * otherwise the keyof of primaryKey - * would be optional when the type of the document is not known. - */ - RxDocType -> = { - title?: string; - description?: string; - version: number; - - /** - * The primary key of the documents. - * Must be in the top level of the properties of the schema - * and that property must have the type 'string' - */ - primaryKey: PrimaryKey; - - /** - * TODO this looks like a typescript-bug - * we have to allows all string because the 'object'-literal is not recognized - * retry this in later typescript-versions - */ - type: 'object' | string; - properties: { [key in StringKeys]: TopLevelProperty }; - - /** - * On the top level the required-array must be set - * because we always have to set the primary key to required. - * - * TODO required should be made non-optional on the top level - */ - required?: StringKeys[] | readonly StringKeys[]; - - - /** - * Indexes that will be used for the queries. - * RxDB will internally prepend the _deleted field to the index - * because queries do NOT return documents with _deleted=true. - */ - indexes?: (string | string[])[] | (string | readonly string[])[] | readonly (string | string[])[] | readonly (string | readonly string[])[]; - - /** - * Internally used indexes that do not get _deleted prepended - * by RxDB. Use these to speed up queries that are run manually on the storage - * or to speed up requests when you use the RxDB server. - * These could also be utilised when you build a plugin that - * has to query documents without respecting the _deleted value. - */ - internalIndexes?: string[][] | readonly string[][]; - - - encrypted?: string[] | readonly string[]; - keyCompression?: boolean; - /** - * if not set, rxdb will set 'false' as default - * Having additionalProperties: true is not allowed on the root level to ensure - * that property names do not clash with properties of the RxDocument class - * or ORM methods. - */ - additionalProperties?: false; - attachments?: { - encrypted?: boolean; - /** - * @link https://developer.mozilla.org/en-US/docs/Web/API/Compression_Streams_API - */ - compression?: CompressionMode; - }; - /** - * Options for the sharding plugin of rxdb-premium. - * We set these on the schema because changing the shard amount or mode - * will require a migration. - * @link https://rxdb.info/rx-storage-sharding.html - */ - sharding?: { - /** - * Amount of shards. - * This value cannot be changed after you have stored data, - * if you change it anyway, you will loose the existing data. - */ - shards: number; - /** - * Either shard by collection or by database. - * For most use cases (IndexedDB based storages), sharding by collection is the way to go - * because it has a faster initial load time. - */ - mode: 'database' | 'collection'; - }; - crdt?: CRDTSchemaOptions; -}; - -/** - * Used to aggregate the document type from the schema. - * @link https://github.com/pubkey/rxdb/discussions/3467 - */ -export type ExtractDocumentTypeFromTypedRxJsonSchema = AsTyped; diff --git a/dist/types/types/rx-storage.d.ts b/dist/types/types/rx-storage.d.ts deleted file mode 100644 index 79ff9be3fef..00000000000 --- a/dist/types/types/rx-storage.d.ts +++ /dev/null @@ -1,347 +0,0 @@ -import type { ChangeEvent } from 'event-reduce-js'; -import type { RxChangeEvent } from './rx-change-event.d.ts'; -import type { RxDocumentMeta } from './rx-document.d.ts'; -import type { RxStorageWriteError } from './rx-error.d.ts'; -import type { RxJsonSchema } from './rx-schema.d.ts'; -import type { Override } from './util.d.ts'; - -/** - * The document data how it comes out of the storage instance. - * Contains all meta data like revision, attachments and deleted-flag. - */ -export type RxDocumentData = T & { - - /** - * As other NoSQL databases, - * RxDB also assumes that no data is finally deleted. - * Instead the documents are stored with _deleted: true - * which means they will not be returned at queries. - */ - _deleted: boolean; - - /** - * The attachments meta data is stored besides to document. - */ - _attachments: { - [attachmentId: string]: RxAttachmentData; - }; - - /** - * Contains a revision which is concatenated with a [height: number]-[identifier: string] - * like: '1-3hl4kj3l4kgj34g34glk'. - * The revision is used to detect write conflicts and have a document history. - * Revisions behave similar to couchdb revisions: - * @link https://docs.couchdb.org/en/stable/replication/conflicts.html#revision-tree - - * When writing a document, you must send the correct revision in the previous-field - * to make sure that you do not cause a write conflict. - * The revision of the 'new' document-field must be created, for example via util.createRevision(). - * Any revision that matches the [height]-[hash] format can be used. - */ - _rev: string; - _meta: RxDocumentMeta; -}; - -export type RxDocumentDataById = { - [documentId: string]: RxDocumentData; -}; - -/** - * The document data how it is send to the - * storage instance to save it. - */ -// We & T here instead of in RxDocumentData to preserver indexability by keyof T which the Override breaks -export type RxDocumentWriteData = T & Override, { - _attachments: { - /** - * To create a new attachment, set the write data - * To delete an attachment, leave it out on the _attachments property. - * To change an attachment, set the new write data. - * To not touch an attachment, just send the stub again - * which came out of the storage instance. - */ - [attachmentId: string]: RxAttachmentData | RxAttachmentWriteData; - }; -}>; - -export type WithDeleted = DocType & { - _deleted: boolean; -}; -export type WithDeletedAndAttachments = DocType & { - _deleted: boolean; - - /** - * Here the _attachments might exist - * or might not, depending one the use case. - */ - _attachments?: { - [attachmentId: string]: RxAttachmentData | RxAttachmentWriteData; - }; -}; - -/** - * Send to the bulkWrite() method of a storage instance. - */ -export type BulkWriteRow = { - /** - * The current document state in the storage engine, - * assumed by the application. - * Undefined if the document is a new insert. - * Notice that we send the full document data as 'previous', not just the revision. - * The reason is that to get the previous revision you anyway have to get the full - * previous document and so it is easier to just send it all to the storage instance. - * This will later allow us to use something different then the _rev key for conflict detection - * when we implement other storage instances. - */ - previous?: RxDocumentData; - /** - * The new document data to be stored in the storage instance. - */ - document: RxDocumentWriteData; -}; -export type BulkWriteRowById = { - [documentId: string]: BulkWriteRow; -}; - -/** - * After the RxStorage has processed all rows, - * we have this to work with afterwards. - */ -export type BulkWriteRowProcessed = BulkWriteRow & { - document: RxDocumentData; -}; - - -export type RxAttachmentData = { - /** - * Size of the attachments data - */ - length: number; - /** - * Content type like 'plain/text' - */ - type: string; - /** - * The hash of the attachments content. - * It is calculated by RxDB, and send to the storage. - * The only guarantee is that the digest will change when the attachments data changes. - * @link https://github.com/pouchdb/pouchdb/issues/3156#issuecomment-66831010 - * @link https://github.com/pubkey/rxdb/pull/4107 - */ - digest: string; -}; - -/** - * Data which is needed for new attachments - * that are send from RxDB to the RxStorage implementation. - */ -export type RxAttachmentWriteData = RxAttachmentData & { - /** - * The data of the attachment. As string in base64 format. - * In the past we used Blob internally but it created many - * problems because of then we need the full data (for encryption/compression) - * so we anyway have to get the string value out of the Blob. - * - * Also using Blob has no performance benefit because in some RxStorage implementations, - * it just keeps the transaction open for longer because the Blob - * has be be read. - */ - data: string; -}; - - -/** - * The returned data from RxStorageInstance.bulkWrite() - * For better performance, we do NOT use an indexed object, - * but only plain arrays. Because most of the time - * RxDB anyway only need the array data and we can save performance - * by not indexing the results. - */ -export type RxStorageBulkWriteResponse = { - /** - * contains all succeeded writes. - */ - success: RxDocumentData[]; - /** - * contains all errored writes. - */ - error: RxStorageWriteError[]; -}; - -/** - * We return a complex object instead of a single array - * so we are able to add additional fields in the future. - */ -export type RxStorageQueryResult = { - // the found documents, sort order is important. - documents: RxDocumentData[]; -}; - -export type RxStorageCountResult = { - count: number; - /** - * Returns the mode which was used by the storage - * to count the documents. - * If this returns 'slow', RxDB will throw by default - * if 'allowSlowCount' is not set. - */ - mode: 'fast' | 'slow'; -}; - -export type RxStorageInstanceCreationParams = { - - /** - * A string to uniquely identify the instance of the JavaScript object - * of the RxDatabase where this RxStorageInstance belongs to. - * In most cases you would use RxDatabase.token here. - * - * This is used so that we can add caching or reuse stuff that belongs to the same RxDatabase. - * For example the BroadcastChannel that is used for event propagation between multiple browser tabs - * is cached by this token. - * - * In theory we could just use the databaseName for that. But to make it easier in unit tests - * to simulate cross-tab usage, we cannot assume that the databaseName is unique in a single - * JavaScript process. Therefore we use the instance token instead. - */ - databaseInstanceToken: string; - - - databaseName: string; - collectionName: string; - schema: RxJsonSchema>; - options: InstanceCreationOptions; - /** - * If multiInstance is true, there can be more - * then one instance of the database, for example - * when multiple browser tabs exist or more then one Node.js - * process relies on the same storage. - */ - multiInstance: boolean; - password?: string | any; - - /** - * Some storages can do additional checks - * that are performance expensive - * and should only be done in dev-mode. - */ - devMode: boolean; -}; - -export type ChangeStreamOptions = { - - /** - * Sequence number of the first event to start with. - * If you want to get all ongoing events, - * first get the latest sequence number and input it here. - * - * Optional on changeStream, - * will start from the newest sequence. - */ - startSequence?: number; - /** - * limits the amount of results - */ - limit?: number; -}; - -/** - * In the past we handles each RxChangeEvent by its own. - * But it has been shown that this take way more performance then needed, - * especially when the events get transferred over a data layer - * like with WebWorkers or the BroadcastChannel. - * So we now process events as bulks internally. - */ -export type EventBulk = { - /** - * Unique id of the bulk, - * used to detect duplicate bulks - * that have already been processed. - */ - id: string; - events: EventType[]; - - /** - * Required for replication. - * Passing this checkpoint into getChangedDocumentsSince() - * must return all items that have been modified AFTER this write event. - */ - checkpoint: CheckpointType; - - /** - * The context that was given at the call to bulkWrite() - * that caused this EventBulk. - */ - context: string; - - /** - * Unix timestamp in milliseconds of when the operation was triggered - * and when it was finished. - * This is optional because we do not have this time - * for events that come from the internal storage instance changestream. - * TODO do we even need this values? - */ - startTime: number; - endTime: number; -}; - -export type ChangeStreamEvent = ChangeEvent> & { - /** - * An integer that is increasing - * and unique per event. - * Can be used to sort events or get information - * about how many events there are. - */ - sequence: number; - /** - * The value of the primary key - * of the changed document - */ - id: string; -}; - -export type RxStorageChangeEvent = Omit, 'isLocal' | 'collectionName'>; - -/** - * An example for how a RxStorage checkpoint can look like. - * NOTICE: Not all implementations use this type. - */ -export type RxStorageDefaultCheckpoint = { - id: string; - lwt: number; -}; - - - - -export type CategorizeBulkWriteRowsOutput = { - - // TODO only needs the document, not the row. - bulkInsertDocs: BulkWriteRowProcessed[]; - bulkUpdateDocs: BulkWriteRowProcessed[]; - - errors: RxStorageWriteError[]; - eventBulk: EventBulk>, any>; - attachmentsAdd: { - documentId: string; - attachmentId: string; - attachmentData: RxAttachmentWriteData; - digest: string; - }[]; - attachmentsRemove: { - documentId: string; - attachmentId: string; - digest: string; - }[]; - attachmentsUpdate: { - documentId: string; - attachmentId: string; - attachmentData: RxAttachmentWriteData; - digest: string; - }[]; - /** - * Contains the non-error document row that - * has the newest _meta.lwt time. - * Empty if no successful write exists. - */ - newestRow?: BulkWriteRowProcessed; -}; diff --git a/dist/types/types/rx-storage.interface.d.ts b/dist/types/types/rx-storage.interface.d.ts deleted file mode 100644 index e703089a95b..00000000000 --- a/dist/types/types/rx-storage.interface.d.ts +++ /dev/null @@ -1,323 +0,0 @@ -import type { - BulkWriteRow, - EventBulk, - RxDocumentData, - RxStorageBulkWriteResponse, - RxStorageChangeEvent, - RxStorageCountResult, - RxStorageInstanceCreationParams, - RxStorageQueryResult -} from './rx-storage.ts'; -import type { - MangoQuerySelector, - MangoQuerySortPart, - RxConflictResultionTask, - RxConflictResultionTaskSolution, - RxJsonSchema, - RxQueryPlan -} from './index.d.ts'; -import type { - Observable -} from 'rxjs'; - -/** - * RxStorage - * This is an interface that abstracts the storage engine. - * This allows us to use RxDB with different storage engines. - * - * @link https://rxdb.info/rx-storage.html - * @link https://github.com/pubkey/rxdb/issues/1636 - */ - - -/** - * A RxStorage is a module that acts - * as a factory that can create multiple RxStorageInstance - * objects. - * - * All data inputs and outputs of a StorageInstance must be plain json objects. - * Do not use Map, Set or anything else that cannot be JSON.stringify-ed. - * This will ensure that the storage can exchange data - * when it is a WebWorker or a WASM process or data is send via BroadcastChannel. - */ -export interface RxStorage { - /** - * name of the storage engine - * used to detect if plugins do not work so we can throw proper errors. - */ - readonly name: string; - - /** - * RxDB version is part of the storage - * so we can have fallbacks and stuff when - * multiple storages with different version are in use - * like in the storage migration plugin. - */ - readonly rxdbVersion: string; - - /** - * Creates a storage instance - * that can contain the NoSQL documents of a collection. - */ - createStorageInstance( - params: RxStorageInstanceCreationParams - ): Promise>; -} - - -/** - * User provided mango queries will be filled up by RxDB via normalizeMangoQuery() - * so we do not have to do many if-field-exist tests in the internals. - */ -export type FilledMangoQuery = { - /** - * The selector is required here. - */ - selector: MangoQuerySelector>; - - /** - * In contrast to the user-provided MangoQuery, - * the sorting is required here because - * RxDB has to ensure that the primary key is always - * part of the sort params. - */ - sort: MangoQuerySortPart>[]; - - /** - * In the normalized mango query, - * the index must always be a string[], - * never just a string. - * This makes it easier to use the query because - * we do not have to do an array check. - */ - index?: string[]; - - /** - * Skip must be set which defaults to 0 - */ - skip: number; - - limit?: number; -}; - - -/** - * Before sending a query to the storageInstance.query() - * we run it through the query planner and do some normalization - * stuff. Notice that the queryPlan is a hint for the storage and - * it is not required to use it when running queries. Some storages - * might use their own query planning instead. - */ -export type PreparedQuery = { - // original query from the input - query: FilledMangoQuery; - queryPlan: RxQueryPlan; -}; - -export interface RxStorageInstance< - /** - * The type of the documents that can be stored in this instance. - * All documents in an instance must comply to the same schema. - * Also all documents are RxDocumentData with the meta properties like - * _deleted or _rev etc. - */ - RxDocType, - Internals, - InstanceCreationOptions, - CheckpointType = any -> { - readonly databaseName: string; - /** - * Returns the internal data that is used by the storage engine. - */ - readonly internals: Readonly; - readonly options: Readonly; - /** - * The schema that defines the documents that are stored in this instance. - * Notice that the schema must be enhanced with the meta properties like - * _meta, _rev and _deleted etc. which are added by fillWithDefaultSettings() - */ - readonly schema: Readonly>>; - readonly collectionName: string; - - /** - * (Optional) reference to the underlying persistent storage instance. - * If set, things like replication will run on that storageInstance instead of the parent. - * This is mostly used in things like the memory-synced storage where we want to - * run replications and migrations on the persistent storage instead of the in-memory storage. - * - * Having this is the least hacky option. The only other option would be to toggle all calls to the - * storageInstance by checking the givent context-string. But this would make it impossible - * to run a replication on the parentStorage itself. - */ - readonly underlyingPersistentStorage?: RxStorageInstance; - - /** - * Writes multiple documents to the storage instance. - * The write for each single document is atomic, there - * is no transaction around all documents. - * The written documents must be the newest revision of that documents data. - * If the previous document is not the current newest revision, a conflict error - * must be returned. - * It must be possible that some document writes succeed - * and others error. We need this to have a similar behavior as most NoSQL databases. - */ - bulkWrite( - documentWrites: BulkWriteRow[], - /** - * Context will be used in all - * changeStream()-events that are emitted as a result - * of that bulkWrite() operation. - * Used in plugins so that we can detect that event X - * comes from operation Y. - */ - context: string - ): Promise>; - - /** - * Get Multiple documents by their primary value. - * This must also return deleted documents. - */ - findDocumentsById( - /** - * List of primary values - * of the documents to find. - */ - ids: string[], - /** - * If set to true, deleted documents will also be returned. - */ - withDeleted: boolean - - ): Promise< - /** - * For better performance, we return an array - * instead of an indexed object because most consumers - * of this anyway have to fill a Map() instance or - * even do only need the list at all. - */ - RxDocumentData[] - >; - - /** - * Runs a NoSQL 'mango' query over the storage - * and returns the found documents data. - * Having all storage instances behave similar - * is likely the most difficult thing when creating a new - * rx-storage implementation. - */ - query( - preparedQuery: PreparedQuery - ): Promise>; - - /** - * Returns the amount of non-deleted documents - * that match the given query. - * Sort, skip and limit of the query must be ignored! - */ - count( - preparedQuery: PreparedQuery - ): Promise; - - /** - * Returns the plain data of a single attachment. - */ - getAttachmentData( - documentId: string, - attachmentId: string, - digest: string - ): Promise; - - /** - * Returns the current (not the old!) data of all documents that have been changed AFTER the given checkpoint. - * If the returned array does not reach the limit, it can be assumed that the "end" is reached, when paginating over the changes. - * Also returns a new checkpoint for each document which can be used to continue with the pagination from that change on. - * Must never return the same document multiple times in the same call operation. - * This is used by RxDB to known what has changed since X so these docs can be handled by the backup or the replication - * plugin. - * - * Important: This method is optional. If not defined, - * RxDB will manually run a query and use the last returned document - * for checkpointing. In the future we might even remove this method completely - * and let RxDB do the work instead of the RxStorage. - */ - getChangedDocumentsSince?( - limit: number, - /** - * The checkpoint from with to start - * when the events are sorted in time. - * If we want to start from the beginning, - * undefined is used as a checkpoint. - */ - checkpoint?: CheckpointType - ): Promise<{ - documents: RxDocumentData[]; - /** - * The checkpoint contains data so that another - * call to getChangedDocumentsSince() will continue - * from exactly the last document that was returned before. - */ - checkpoint: CheckpointType; - }>; - - /** - * Returns an ongoing stream - * of all changes that happen to the - * storage instance. - * Do not forget to unsubscribe. - * - * If the RxStorage support multi-instance, - * and the storage is persistent, - * then the emitted changes of one RxStorageInstance - * must be also emitted to other instances with the same databaseName+collectionName. - * See ./rx-storage-multiinstance.ts - */ - changeStream(): Observable, CheckpointType>>; - - /** - * Runs a cleanup that removes all tompstones - * of documents that have _deleted set to true - * to free up disc space. - * - * Returns true if all cleanable documents have been removed. - * Returns false if there are more documents to be cleaned up, - * but not all have been purged because that would block the storage for too long. - */ - cleanup( - /** - * The minimum time in milliseconds - * of how long a document must have been deleted - * until it is purged by the cleanup. - */ - minimumDeletedTime: number - ): Promise< - /** - * True if all docs cleaned up, - * false if there are more docs to clean up - */ - boolean - >; - - /** - * Closes the storage instance so it cannot be used - * anymore and should clear all memory. - * The returned promise must resolve when everything is cleaned up. - */ - close(): Promise; - - /** - * Remove the database and - * deletes all of its data. - */ - remove(): Promise; - - /** - * Instead of passing the conflict-resolver function - * into the storage, we have to work with an observable that emits tasks - * and a resolver that takes resolved tasks. - * This is needed because the RxStorageInstance might run inside of a Worker - * other JavaScript process, so we cannot pass plain code. - */ - conflictResultionTasks(): Observable>; - resolveConflictResultionTask(taskSolution: RxConflictResultionTaskSolution): Promise; -} diff --git a/dist/types/types/util.d.ts b/dist/types/types/util.d.ts deleted file mode 100644 index 2a9dbdbb495..00000000000 --- a/dist/types/types/util.d.ts +++ /dev/null @@ -1,153 +0,0 @@ -import type { RxStorage } from './rx-storage.interface'; - -export type MaybePromise = Promise | T; - - -export type PlainJsonValue = string | number | boolean | PlainSimpleJsonObject | PlainSimpleJsonObject[] | PlainJsonValue[]; -export type PlainSimpleJsonObject = { - [k: string]: PlainJsonValue | PlainJsonValue[]; -}; - -/** - * @link https://stackoverflow.com/a/49670389/3443137 - */ -type DeepReadonly = - T extends (infer R)[] ? DeepReadonlyArray : - T extends Function ? T : - T extends object ? DeepReadonlyObject : - T; - -interface DeepReadonlyArray extends ReadonlyArray> { } - -type DeepReadonlyObject = { - readonly [P in keyof T]: DeepReadonly; -}; - -export type MaybeReadonly = T | Readonly; - - -/** - * Opposite of DeepReadonly, - * makes everything mutable again. - */ -type DeepMutable = ( - T extends object - ? { - -readonly [K in keyof T]: ( - T[K] extends object - ? DeepMutable - : T[K] - ) - } - : never -); - -/** - * Can be used like 'keyof' - * but only represents the string keys, not the Symbols or numbers. - * @link https://stackoverflow.com/a/51808262/3443137 - */ -export type StringKeys = Extract; - -export type AnyKeys = { [P in keyof T]?: T[P] | any }; -export interface AnyObject { - [k: string]: any; -} - -/** - * @link https://dev.to/vborodulin/ts-how-to-override-properties-with-type-intersection-554l - */ -export type Override = Omit & T2; - - - -export type ById = { - [id: string]: T; -}; - -/** - * Must be async to support async hashing like from the WebCrypto API. - */ -export type HashFunction = (input: string) => Promise; - -export declare type QueryMatcher = (doc: DocType | DeepReadonly) => boolean; - -/** - * To have a deterministic sorting, we cannot return 0, - * we only return 1 or -1. - * This ensures that we always end with the same output array, no mather of the - * pre-sorting of the input array. - */ -export declare type DeterministicSortComparator = (a: DocType, b: DocType) => 1 | -1; - -/** - * To test a storage, we need these - * configuration values. - */ -export type RxTestStorage = { - // can be used to setup async stuff - readonly init?: () => any; - // TODO remove name here, it can be read out already via getStorage().name - readonly name: string; - readonly getStorage: () => RxStorage; - /** - * Returns a storage that is used in performance tests. - * For example in a browser it should return the storage with an IndexedDB based adapter, - * while in node.js it must use the filesystem. - */ - readonly getPerformanceStorage: () => { - storage: RxStorage; - /** - * A description that describes the storage and setting. - * For example 'dexie-native'. - */ - description: string; - }; - /** - * True if the storage is able to - * keep data after an instance is closed and opened again. - */ - readonly hasPersistence: boolean; - readonly hasMultiInstance: boolean; - readonly hasAttachments: boolean; - - /** - * Some storages likes the memory-synced storage, - * are not able to provide a replication while guaranteeing - * data integrity. - */ - readonly hasReplication: boolean; - - /** - * To make it possible to test alternative encryption plugins, - * you can specify hasEncryption to signal - * the test runner that the given storage already contains an - * encryption plugin that should be used to test encryption tests. - * Otherwise the encryption-crypto-js plugin will be tested. - * - * hasEncryption must contain a function that is able - * to create a new password. - */ - readonly hasEncryption?: () => Promise; -}; - - -/** - * The paths as strings-type of nested object - * @link https://stackoverflow.com/a/58436959/3443137 - */ -type Join = K extends string | number ? - P extends string | number ? - `${K}${'' extends P ? '' : '.'}${P}` - : never : never; - -export type Paths = [D] extends [never] ? never : T extends object ? - { [K in keyof T]-?: K extends string | number ? - `${K}` | (Paths extends infer R ? Join : never) - : never - }[keyof T] : ''; - -export type Leaves = [D] extends [never] ? never : T extends object ? - { [K in keyof T]-?: Join> }[keyof T] : ''; -type Prev = [never, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, - 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, ...0[]]; diff --git a/package.json b/package.json index 6112ad39edb..7165bf25671 100644 --- a/package.json +++ b/package.json @@ -380,7 +380,7 @@ "test:browser:remote": " npm run transpile && cross-env CI=true DEFAULT_STORAGE=remote karma start ./config/karma.conf.cjs --single-run", "test:browser:remote:loop": "npm run test:browser:remote && npm run test:browser:remote:loop", "test:browser:custom": " npm run transpile && cross-env CI=true DEFAULT_STORAGE=custom karma start ./config/karma.conf.cjs --single-run", - "test:replication-firestore": "npm run transpile && firebase emulators:exec \"cross-env DEFAULT_STORAGE=dexie mocha --expose-gc --config ./config/.mocharc.cjs ./test_tmp/replication-firestore.test.js\" --only firestore --project 'rxdb-test'", + "test:replication-firestore": "npm run transpile && firebase emulators:exec \"cross-env DEFAULT_STORAGE=dexie mocha --expose-gc --config ./config/.mocharc.cjs ./test_tmp/replication-firestore.test.js\" --only firestore --project 'rxdsrc/rx-query.tsb-test'", "test:replication-couchdb": "npm run transpile && concurrently \"npm run couch:start\" \"cross-env NATIVE_COUCHDB=5984 DEFAULT_STORAGE=dexie mocha --config ./config/.mocharc.cjs ./test_tmp/replication-couchdb.test.js\" --success first --kill-others", "test:replication-nats": "npm run transpile && concurrently \"npm run nats:start\" \"cross-env DEFAULT_STORAGE=dexie mocha --config ./config/.mocharc.cjs ./test_tmp/replication-nats.test.js\" --success first --kill-others", "test:core": "npm run transpile && mocha ./test_tmp/unit/core.node.js", @@ -435,7 +435,8 @@ "preversion": "npm run lint && npm run test", "dev": "watch 'npm run test:node:memory' src/ test/", "dev:example": "watch 'npm run transpile:src && echo \"done\"' src/ test/", - "cloud-signaling-server": "node ./scripts/start-cloud-signaling-server.mjs --max-old-space-size=2048" + "cloud-signaling-server": "node ./scripts/start-cloud-signaling-server.mjs --max-old-space-size=2048", + "watch:transpile": "nodemon --watch src/ --ext ts --ignore 'src/plugins/*' --exec npm run transpile" }, "pre-commit": [ "lint" @@ -558,6 +559,7 @@ "nconf": "0.12.1", "node-datachannel": "0.9.1", "node-pre-gyp": "0.17.0", + "nodemon": "3.0.1", "pre-commit": "1.2.2", "process": "0.11.10", "querystring-es3": "0.2.1", @@ -582,4 +584,4 @@ "webpack-cli": "5.1.4", "webpack-dev-server": "5.0.4" } -} \ No newline at end of file +} diff --git a/src/event-reduce.ts b/src/event-reduce.ts index 34e7ec5e4fa..e9105487676 100644 --- a/src/event-reduce.ts +++ b/src/event-reduce.ts @@ -6,7 +6,19 @@ import { QueryMatcher, DeterministicSortComparator, StateResolveFunctionInput, - ChangeEvent + ChangeEvent, + hasLimit, + isUpdate, + isDelete, + isFindOne, + isInsert, + hasSkip, + wasResultsEmpty, + wasInResult, + wasSortedAfterLast, + wasLimitReached, + wasMatching, + doesMatchNow, } from 'event-reduce-js'; import type { RxQuery, @@ -31,6 +43,7 @@ export type EventReduceResultPos = { runFullQueryAgain: false; changed: boolean; newResults: RxDocumentType[]; + limitResultsRemoved: boolean; }; export type EventReduceResult = EventReduceResultNeg | EventReduceResultPos; @@ -112,6 +125,48 @@ export function getQueryParams( ); } +// This catches a specific case where we have a limit query (of say LIMIT items), and then +// a document is removed from the result set by the current change. In this case, +// the event-reduce library (rightly) tells us we need to recompute the query to get a +// full result set of LIMIT items. +// However, if we have a "limit buffer", we can instead fill in the missing result from there. +// For more info, see the rx-query.test tests under "Limit Buffer". +// This function checks if we are actually in the specific case where the limit buffer can be used. +function canFillResultSetFromLimitBuffer(s: StateResolveFunctionInput) { + // We figure out if this event is our special case using the same "state resolve" functions that event-reduce uses: + // https://github.com/pubkey/event-reduce/blob/fcb46947b29eac97c97dcb05e08af337f362fe5c/javascript/src/states/index.ts#L87 + // (we also keep the state resolve functions in the same order they're defined in event-reduce.js) + return ( + !isInsert(s) && // inserts can never cause + (isUpdate(s) || isDelete(s)) && // both updates and deletes can remove a doc from our results + hasLimit(s) && // only limit queries + !isFindOne(s) && // if it's a findOne, we have no buffer and have to re-compute + !hasSkip(s) && // we could potentially make skip queries work later, but for now ignore them -- too hard + !wasResultsEmpty(s) && // this should never happen + wasLimitReached(s) && // if not, the event reducer shouldn't have a problem + // any value of wasFirst(s), position is not relevant for this case, as wasInResults + // any value of wasLast(s) , position is not relevant for this case, as wasInResults + // any value of sortParamsChanged(s), eg a doc could be archived but also have last_status_update changed + wasInResult(s) && // we only care about docs already in the results set being removed + // any value of wasSortedBeforeFirst(s) -- this is true when the doc is first in the results set + !wasSortedAfterLast(s) && // I don't think this could be true anyways, but whatever + // any value of isSortedBeforeFirst(s) -- this is true when the doc is first in order (but it could still be filtered out) + // any value of isSortedAfterLast(s) + wasMatching(s) && // it couldn't have been wasInResult unless it was also matching + !doesMatchNow(s) // Limit buffer only cares rn when the changed doc was indeed removed (so no longer matching) + ); +} + + +function actionRemovesItemFromResults(action: ActionName): boolean { + return [ + 'removeFirstItem', + 'removeLastItem', + 'removeExisting', + 'runFullQueryAgain', + ].includes(action); +} + export function calculateNewResults( rxQuery: RxQuery, @@ -126,6 +181,7 @@ export function calculateNewResults( const previousResults: RxDocumentType[] = ensureNotFalsy(rxQuery._result).docsData.slice(0); const previousResultsMap: Map = ensureNotFalsy(rxQuery._result).docsDataMap; let changed: boolean = false; + let limitResultsRemoved: boolean = false; const eventReduceEvents: ChangeEvent[] = rxChangeEvents .map(cE => rxChangeEventToEventReduceChangeEvent(cE)) @@ -140,7 +196,31 @@ export function calculateNewResults( }; const actionName: ActionName = calculateActionName(stateResolveFunctionInput); + if (actionName === 'runFullQueryAgain') { + if (canFillResultSetFromLimitBuffer(stateResolveFunctionInput) && rxQuery._limitBufferResults !== null && rxQuery._limitBufferResults.length > 0) { + // replace the missing item with an item from our limit buffer! + const replacementItem = rxQuery._limitBufferResults.shift(); + if (replacementItem === undefined) { + return true; + } + + changed = true; + runAction( + 'removeExisting', + queryParams, + eventReduceEvent, + previousResults, + previousResultsMap, + ); + previousResults.push(replacementItem); + if (previousResultsMap) { + // We have to assume the primaryKey value is a string. According to the rxdb docs, this is always the case: + // https://github.com/pubkey/rxdb/blob/c8162c25c7b033fa9f70191512ee84d44d0dd913/docs/rx-schema.html#L2523 + previousResultsMap.set(replacementItem[rxQuery.collection.schema.primaryPath] as string, replacementItem); + } + return false; + } return true; } else if (actionName !== 'doNothing') { changed = true; @@ -151,6 +231,9 @@ export function calculateNewResults( previousResults, previousResultsMap ); + if (actionRemovesItemFromResults(actionName)) { + limitResultsRemoved = true; + } return false; } }); @@ -162,7 +245,8 @@ export function calculateNewResults( return { runFullQueryAgain: false, changed, - newResults: previousResults + newResults: previousResults, + limitResultsRemoved, }; } } diff --git a/src/rx-query-single-result.ts b/src/rx-query-single-result.ts index 1e4f91f01e7..13dc0dee114 100644 --- a/src/rx-query-single-result.ts +++ b/src/rx-query-single-result.ts @@ -73,4 +73,13 @@ export class RxQuerySingleResult{ map ); } + + get docsKeys(): string[] { + const keys = Array.from(this.docsMap.keys()); + return overwriteGetterForCaching( + this, + 'docsKeys', + keys + ); + } } diff --git a/src/rx-query.ts b/src/rx-query.ts index f097accf9b3..6806bcb8d6c 100644 --- a/src/rx-query.ts +++ b/src/rx-query.ts @@ -13,15 +13,14 @@ import { shareReplay } from 'rxjs/operators'; import { - sortObject, - pluginMissing, - overwriteGetterForCaching, + appendToArray, + areRxDocumentArraysEqual, now, - PROMISE_RESOLVE_FALSE, + overwriteGetterForCaching, + pluginMissing, + PROMISE_RESOLVE_FALSE, RX_META_LWT_MINIMUM, RXJS_SHARE_REPLAY_DEFAULTS, - ensureNotFalsy, - areRxDocumentArraysEqual, - appendToArray + sortObject } from './plugins/utils/index.ts'; import { newRxError @@ -30,33 +29,48 @@ import { runPluginHooks } from './hooks.ts'; import type { - RxCollection, - RxDocument, - RxQueryOP, - RxQuery, MangoQuery, - MangoQuerySortPart, - MangoQuerySelector, PreparedQuery, + QueryMatcher, RxChangeEvent, - RxDocumentWriteData, + RxCollection, + RxDocument, RxDocumentData, - QueryMatcher, RxJsonSchema, FilledMangoQuery, - ModifyFunction + ModifyFunction, + RxDocumentWriteData, + RxQuery, + RxQueryOP, MangoQuerySelector, MangoQuerySortPart } from './types/index.d.ts'; -import { calculateNewResults } from './event-reduce.ts'; -import { triggerCacheReplacement } from './query-cache.ts'; -import { getQueryMatcher, normalizeMangoQuery, runQueryUpdateFunction } from './rx-query-helper.ts'; +import { getQueryMatcher, getSortComparator, normalizeMangoQuery, runQueryUpdateFunction } from './rx-query-helper.ts'; import { RxQuerySingleResult } from './rx-query-single-result.ts'; import { getQueryPlan } from './query-planner.ts'; +import { calculateNewResults } from './event-reduce.ts'; +import { triggerCacheReplacement } from './query-cache.ts'; +import { ensureNotFalsy } from 'event-reduce-js'; +import { getChangedDocumentsSince } from './rx-storage-helper.ts'; + + +export interface QueryCacheBackend { + getItem(key: string): Promise; + setItem(key: string, value: T): Promise; +} let _queryCount = 0; const newQueryID = function (): number { return ++_queryCount; }; +// allow changes to be 100ms older than the actual lwt value +const RESTORE_QUERY_UPDATE_DRIFT = 100; + +// 5000 seems like a sane number where re-executing the query will be easier than trying to restore +const RESTORE_QUERY_MAX_DOCS_CHANGED = 5000; + +// If a query was persisted more than a week ago, just re-execute it +export const RESTORE_QUERY_MAX_TIME_AGO = 7 * 24 * 60 * 60 * 1000; + export class RxQueryBase< RxDocType, RxQueryResult, @@ -187,6 +201,16 @@ export class RxQueryBase< public _lastExecStart: number = 0; public _lastExecEnd: number = 0; + // Fields used for the Limit Buffer when enabled: + public _limitBufferSize: number | null = null; + public _limitBufferResults: RxDocumentData[] | null = null; + + // Fields used for the persistent query cache when enabled: + public _persistentQueryCacheResult?: string[] | string = undefined; + public _persistentQueryCacheResultLwt?: string = undefined; // lwt = latest write time + public _persistentQueryCacheLoaded?: Promise; + public _persistentQueryCacheBackend?: QueryCacheBackend; + /** * ensures that the exec-runs * are not run in parallel @@ -218,12 +242,24 @@ export class RxQueryBase< newResultData = Array.from((newResultData as Map>).values()); } - const newQueryResult = new RxQuerySingleResult( - this.collection, - newResultData, - newResultData.length - ); - this._result = newQueryResult; + const docsDataMap = new Map(); + const docsMap = new Map(); + + + const docs = newResultData.map(docData => this.collection._docCache.getCachedRxDocument(docData)); + + /** + * Instead of using the newResultData in the result cache, + * we directly use the objects that are stored in the RxDocument + * to ensure we do not store the same data twice and fill up the memory. + */ + const docsData = docs.map(doc => { + docsDataMap.set(doc.primary, doc._data); + docsMap.set(doc.primary, doc); + return doc._data; + }); + + this._result = new RxQuerySingleResult(this.collection, docsData, docsData.length); } /** @@ -357,6 +393,10 @@ export class RxQueryBase< return value; } + persistentQueryId() { + return String(this.collection.database.hashFunction(this.toString())); + } + /** * returns the prepared query * which can be send to the storage instance to query for documents. @@ -371,10 +411,16 @@ export class RxQueryBase< this.mangoQuery ) }; + (hookInput.mangoQuery.selector as any)._deleted = { $eq: false }; if (hookInput.mangoQuery.index) { hookInput.mangoQuery.index.unshift('_deleted'); } + + if (this._limitBufferSize !== null && hookInput.mangoQuery.limit) { + hookInput.mangoQuery.limit = hookInput.mangoQuery.limit + this._limitBufferSize; + } + runPluginHooks('prePrepareQuery', hookInput); const value = prepareQuery( @@ -478,6 +524,162 @@ export class RxQueryBase< limit(_amount: number | null): RxQuery { throw pluginMissing('query-builder'); } + + enableLimitBuffer(bufferSize: number) { + if (this._limitBufferSize !== null) { + // Limit buffer has already been enabled, do nothing: + return this; + } + if (this._lastExecStart !== 0) { + console.error('Can\'t use limit buffer if query has already executed'); + return this; + } + if (this.mangoQuery.skip || !this.mangoQuery.limit) { + console.error('Right now, limit buffer only works on non-skip, limit queries.'); + return this; + } + this._limitBufferSize = bufferSize; + return this; + } + + enablePersistentQueryCache(backend: QueryCacheBackend) { + if (this._persistentQueryCacheBackend) { + // We've already tried to enable the query cache + return this; + } + this._persistentQueryCacheBackend = backend; + this._persistentQueryCacheLoaded = this._restoreQueryCacheFromPersistedState(); + return this; + } + + private async _restoreQueryCacheFromPersistedState() { + if (!this._persistentQueryCacheBackend) { + // no cache backend provided, do nothing + return; + } + if (this._persistentQueryCacheResult) { + // we already restored the cache once, no need to run twice + return; + } + if (this.mangoQuery.skip || this.op === 'count') { + console.error('The persistent query cache only works on non-skip, non-count queries.'); + return; + } + + // First, check if there are any query results persisted: + const persistentQueryId = this.persistentQueryId(); + const value = await this._persistentQueryCacheBackend.getItem(`qc:${persistentQueryId}`); + if (!value || !Array.isArray(value) || value.length === 0) { + // eslint-disable-next-line no-console + console.log(`no persistent query cache found in the backend, returning early ${this.toString()}`); + return; + } + + // If there are persisted ids, create our two Sets of ids from the cache: + const persistedQueryCacheIds = new Set(); + const limitBufferIds = new Set(); + + for (const id of value) { + if (id.startsWith('lb-')) { + limitBufferIds.add(id.replace('lb-', '')); + } else { + persistedQueryCacheIds.add(id); + } + } + + // eslint-disable-next-line no-console + console.time(`Restoring persistent querycache ${this.toString()}`); + + // Next, pull the lwt from the cache: + // TODO: if lwt is too old, should we just give up here? What if there are too many changedDocs? + const lwt = (await this._persistentQueryCacheBackend.getItem(`qc:${persistentQueryId}:lwt`)) as string | null; + if (!lwt) { + return; + } + + // If the query was persisted too long ago, just re-execute it. + if (now() - Number(lwt) > RESTORE_QUERY_MAX_TIME_AGO) { + return; + } + + const primaryPath = this.collection.schema.primaryPath; + + const {documents: changedDocs} = await getChangedDocumentsSince(this.collection.storageInstance, + RESTORE_QUERY_MAX_DOCS_CHANGED, + // make sure we remove the monotonic clock (xxx.01, xxx.02) from the lwt timestamp to avoid issues with + // lookups in indices (dexie) + {id: '', lwt: Math.floor(Number(lwt)) - RESTORE_QUERY_UPDATE_DRIFT} + ); + + // If too many docs have changed, just give up and re-execute the query + if (changedDocs.length === RESTORE_QUERY_MAX_DOCS_CHANGED) { + return; + } + + const changedDocIds = new Set(changedDocs.map((d) => d[primaryPath] as string)); + + const docIdsWeNeedToFetch = [...persistedQueryCacheIds, ...limitBufferIds].filter((id) => !changedDocIds.has(id)); + + // We use _queryCollectionByIds to fetch the remaining docs we need efficiently, pulling + // from query cache if we can (and the storageInstance by ids if we can't): + const otherPotentialMatchingDocs: RxDocumentData[] = []; + await _queryCollectionByIds(this as any, otherPotentialMatchingDocs, docIdsWeNeedToFetch); + + // Now that we have all potential documents, we just filter (in-memory) the ones that still match our query: + let docsData: RxDocumentData[] = []; + for (const doc of changedDocs.concat(otherPotentialMatchingDocs)) { + if (this.doesDocumentDataMatch(doc)) { + docsData.push(doc); + } + } + + // Sort the documents by the query's sort field: + const normalizedMangoQuery = normalizeMangoQuery( + this.collection.schema.jsonSchema, + this.mangoQuery + ); + const sortComparator = getSortComparator(this.collection.schema.jsonSchema, normalizedMangoQuery); + const limit = normalizedMangoQuery.limit ? normalizedMangoQuery.limit : Infinity; + docsData = docsData.sort(sortComparator); + + // We know for sure that all persisted and limit buffer ids (and changed docs before them) are in the correct + // result set. And we can't be sure about any past that point. So cut it off there: + const lastValidIndex = docsData.findLastIndex((d) => limitBufferIds.has(d[primaryPath] as string) || persistedQueryCacheIds.has(d[primaryPath] as string)); + docsData = docsData.slice(0, lastValidIndex + 1); + + // Now this is the trickiest part. + // If we somehow have fewer docs than the limit of our query + // (and this wasn't the case because before persistence) + // then there is no way for us to know the correct results, and we re-exec: + const unchangedItemsMayNowBeInResults = ( + this.mangoQuery.limit && + docsData.length < this.mangoQuery.limit && + persistedQueryCacheIds.size >= this.mangoQuery.limit + ); + if (unchangedItemsMayNowBeInResults) { + return; + } + + // Our finalResults are the actual results of this query, and pastLimitItems are any remaining matching + // documents we have left over (past the limit). + const pastLimitItems = docsData.slice(limit); + const finalResults = docsData.slice(0, limit); + + // If there are still items past the first LIMIT items, try to restore the limit buffer with them: + if (limitBufferIds.size && pastLimitItems.length > 0) { + this._limitBufferResults = pastLimitItems; + } else { + this._limitBufferResults = []; + } + + // Finally, set the query's results to what we've pulled from disk: + this._lastEnsureEqual = now(); + this._latestChangeEvent = this.collection._changeEventBuffer.counter; + this._setResultData(finalResults); + + // eslint-disable-next-line no-console + console.timeEnd(`Restoring persistent querycache ${this.toString()}`); + } } export function _getDefaultQuery(): MangoQuery { @@ -512,6 +714,7 @@ export function createRxQuery( // ensure when created with same params, only one is created ret = tunnelQueryCache(ret); + // TODO: clear persistent query cache as well triggerCacheReplacement(collection); return ret; @@ -551,11 +754,14 @@ function _ensureEqual(rxQuery: RxQueryBase): Promise { return rxQuery._ensureEqualQueue; } + /** * ensures that the results of this query is equal to the results which a query over the database would give * @return true if results have changed */ -function __ensureEqual(rxQuery: RxQueryBase): Promise { +async function __ensureEqual(rxQuery: RxQueryBase): Promise { + await rxQuery._persistentQueryCacheLoaded; + rxQuery._lastEnsureEqual = now(); /** @@ -592,6 +798,18 @@ function __ensureEqual(rxQuery: RxQueryBase): Promise ._changeEventBuffer .reduceByLastOfDoc(missedChangeEvents); + if (rxQuery._limitBufferResults !== null) { + // Check if any item in our limit buffer was modified by a change event + for (const cE of runChangeEvents) { + if (rxQuery._limitBufferResults.find((doc) => doc[rxQuery.collection.schema.primaryPath] === cE.documentId)) { + // If so, the limit buffer is potential invalid -- let's just blow it up + // TODO: could we instead update the documents in the limit buffer? + rxQuery._limitBufferResults = null; + break; + } + } + } + if (rxQuery.op === 'count') { // 'count' query const previousCount = ensureNotFalsy(rxQuery._result).count; @@ -664,9 +882,73 @@ function __ensureEqual(rxQuery: RxQueryBase): Promise rxQuery._setResultData(newResultData as any); } return ret; + }) + .then(async (returnValue) => { + await updatePersistentQueryCache(rxQuery); + return returnValue; }); } - return Promise.resolve(ret); // true if results have changed + + return ret; // true if results have changed +} + + +async function updatePersistentQueryCache(rxQuery: RxQueryBase) { + if (!rxQuery._persistentQueryCacheBackend) { + return; + } + + const backend = rxQuery._persistentQueryCacheBackend; + + const key = rxQuery.persistentQueryId(); + + // update _persistedQueryCacheResult + rxQuery._persistentQueryCacheResult = rxQuery._result?.docsKeys ?? []; + + const idsToPersist = [...rxQuery._persistentQueryCacheResult]; + if (rxQuery._limitBufferResults) { + rxQuery._limitBufferResults.forEach((d) => { + idsToPersist.push(`lb-${d[rxQuery.collection.schema.primaryPath]}`); + }); + } + // eslint-disable-next-line no-console + console.time(`Query persistence: persisting results of ${JSON.stringify(rxQuery.mangoQuery)}`); + // persist query cache + const lwt = rxQuery._result?.time ?? RX_META_LWT_MINIMUM; + + await Promise.all([ + backend.setItem(`qc:${String(key)}`, idsToPersist), + backend.setItem(`qc:${String(key)}:lwt`, lwt.toString()), + ]); + + // eslint-disable-next-line no-console + console.timeEnd(`Query persistence: persisting results of ${JSON.stringify(rxQuery.mangoQuery)}`); +} + + +// Refactored out of `queryCollection`: modifies the docResults array to fill it with data +async function _queryCollectionByIds(rxQuery: RxQuery | RxQueryBase, docResults: RxDocumentData[], docIds: string[]) { + const collection = rxQuery.collection; + docIds = docIds.filter(docId => { + // first try to fill from docCache + const docData = rxQuery.collection._docCache.getLatestDocumentDataIfExists(docId); + if (docData) { + if (!docData._deleted) { + docResults.push(docData); + } + return false; + } else { + return true; + } + }); + + // otherwise get from storage + if (docIds.length > 0) { + const docsMap = await collection.storageInstance.findDocumentsById(docIds, false); + Object.values(docsMap).forEach(docData => { + docResults.push(docData); + }); + } } /** @@ -707,6 +989,8 @@ export function prepareQuery( export async function queryCollection( rxQuery: RxQuery | RxQueryBase ): Promise[]> { + await rxQuery._persistentQueryCacheLoaded; + let docs: RxDocumentData[] = []; const collection = rxQuery.collection; @@ -736,6 +1020,7 @@ export async function queryCollection( const docsFromStorage = await collection.storageInstance.findDocumentsById(docIds, false); appendToArray(docs, docsFromStorage); } + await _queryCollectionByIds(rxQuery, docs, rxQuery.isFindOneByIdQuery); } else { const docId = rxQuery.isFindOneByIdQuery; @@ -755,10 +1040,14 @@ export async function queryCollection( } else { const preparedQuery = rxQuery.getPreparedQuery(); const queryResult = await collection.storageInstance.query(preparedQuery); + if (rxQuery._limitBufferSize !== null && rxQuery.mangoQuery.limit && queryResult.documents.length > rxQuery.mangoQuery.limit) { + // If there are more than query.limit results, we pull out our buffer items from the + // last rxQuery._limitBufferSize items of the results. + rxQuery._limitBufferResults = queryResult.documents.splice(rxQuery.mangoQuery.limit); + } docs = queryResult.documents; } return docs; - } /** @@ -804,7 +1093,6 @@ export function isFindOneByIdQuery( } - export function isRxQuery(obj: any): boolean { return obj instanceof RxQueryBase; } diff --git a/test/helper/cache.ts b/test/helper/cache.ts new file mode 100644 index 00000000000..2be9accf43e --- /dev/null +++ b/test/helper/cache.ts @@ -0,0 +1,31 @@ +import {QueryCacheBackend, RxCollection} from '../../src'; + +export class Cache implements QueryCacheBackend { + private readonly items; + + constructor() { + this.items = new Map(); + } + + getItem(key: string) { + return this.items.get(key); + } + + async setItem(key: string, value: T) { + this.items.set(key, value); + return await Promise.resolve(value); + } + + get size() { + return this.items.size; + } + + getItems() { + return this.items; + } +} + +export function clearQueryCache(collection: RxCollection) { + const queryCache = collection._queryCache; + queryCache._map = new Map(); +} diff --git a/test/unit/rx-query.test.ts b/test/unit/rx-query.test.ts index c9ef6865efb..a0c300aca65 100644 --- a/test/unit/rx-query.test.ts +++ b/test/unit/rx-query.test.ts @@ -7,7 +7,8 @@ import { schemaObjects, schemas, humansCollection, - isNode + isNode, + HumanDocumentType, } from '../../plugins/test-utils/index.mjs'; import { @@ -17,11 +18,16 @@ import { promiseWait, randomCouchString, ensureNotFalsy, - deepFreeze + deepFreeze, + now, uncacheRxQuery, RxCollection, } from '../../plugins/core/index.mjs'; import { firstValueFrom } from 'rxjs'; +import {Cache, clearQueryCache} from '../helper/cache.ts'; + +const RESTORE_QUERY_MAX_TIME_AGO = 7 * 24 * 60 * 60 * 1000; + describe('rx-query.test.ts', () => { describeParallel('.constructor', () => { it('should throw dev-mode error on wrong query object', async () => { @@ -1553,4 +1559,830 @@ describe('rx-query.test.ts', () => { db.destroy(); }); }); + + async function setUpLimitBufferCollectionAndQuery(enableLimitBufferSize?: number, numRowsTotal=20, skipRows?: number) { + const limitRows = 10; + const collection = await humansCollection.create(numRowsTotal); + + // Setup a query where the limit buffer would be useful. + // This .find initially matches all docs in the collection + let query = collection.find({selector: { + firstName: { + $ne: 'Dollaritas' + } + }}).sort('-lastName').limit(limitRows); + + if (skipRows !== undefined) { + query = query.skip(skipRows); + } + + if (enableLimitBufferSize !== undefined) { + query.enableLimitBuffer(enableLimitBufferSize); + } + + const initialResults = await query.exec(); + + assert.strictEqual(initialResults.length, Math.min(limitRows, numRowsTotal)); + assert.strictEqual(query._execOverDatabaseCount, 1); + + // We already have a change event for each row from humansCollection.create: + assert.strictEqual(query._latestChangeEvent, numRowsTotal); + + return {query, collection, numRowsTotal, limitRows, initialResults}; + } + + async function removeSingleDocFromMatchingQuery(collection: Awaited>['collection'], doc: HumanDocumentType) { + await collection.find({selector: {passportId: doc.passportId}}).update({ + $set: { + firstName: 'Dollaritas' + } + }); + } + + describeParallel('Limit Buffer', () => { + it('By default, limit queries will have to re-exec when item is removed', async () => { + // Set up the query, without using the limit buffer: + const { query, collection, numRowsTotal, limitRows, initialResults } = await setUpLimitBufferCollectionAndQuery(undefined); + + // Now, make a change that removes a single doc from the result set + await removeSingleDocFromMatchingQuery(collection, initialResults[0]); + + // Re-exec the query: + const updatedResults = await query.exec(); + // Confirm the change was processed, and the results are correct: + assert.strictEqual(updatedResults.length, limitRows); + assert.notStrictEqual(updatedResults[0].passportId, initialResults[0].passportId); + assert.strictEqual(query.collection._changeEventBuffer.counter, numRowsTotal + 1); + assert.strictEqual(query._latestChangeEvent, numRowsTotal + 1); + + // Confirm that the query had to run via db again instead of using the query cache: + assert.strictEqual(query._execOverDatabaseCount, 2); + + collection.database.destroy(); + }); + it('Limit buffer works properly in usual cases', async () => { + const limitBufferSize = 5; + const {query, collection, numRowsTotal, limitRows, initialResults} = await setUpLimitBufferCollectionAndQuery(limitBufferSize, 30); + + // Now, make a change that removes a single doc from the result set + await removeSingleDocFromMatchingQuery(collection, initialResults[0]); + + // Re-exec the query: + const updatedResults = await query.exec(); + // Confirm the change was processed, and the results are correct: + assert.strictEqual(updatedResults.length, limitRows); + assert.notStrictEqual(updatedResults[0].passportId, initialResults[0].passportId); + assert.strictEqual(query.collection._changeEventBuffer.counter, numRowsTotal + 1); + assert.strictEqual(query._latestChangeEvent, numRowsTotal + 1); + + // Confirm that the query DID NOT exec over the db again, because it used the query cache via limit buffer: + assert.strictEqual(query._execOverDatabaseCount, 1); + // And that one item was taken from the limit buffer: + assert.strictEqual(query._limitBufferResults?.length, limitBufferSize - 1); + + // Do it all again to make sure this is consistent across multiple updates: + await removeSingleDocFromMatchingQuery(collection, initialResults[8]); + const updatedResultsAgain = await query.exec(); + assert.strictEqual(updatedResultsAgain.length, limitRows); + assert.strictEqual(query._execOverDatabaseCount, 1); + + // However, if we "use up" the whole limit buffer (5 documents), + // the query will have to re-exec. Let's remove 3 more items to show that: + for (const doc of initialResults.slice(1, 4)) { + await removeSingleDocFromMatchingQuery(collection, doc); + await query.exec(); + assert.strictEqual(query._execOverDatabaseCount, 1); + } + + // The Limit buffer should now be empty: + assert.strictEqual(query._limitBufferResults?.length, 0); + + // So removing one more item will require a re-exec on the db: + await removeSingleDocFromMatchingQuery(collection, initialResults[4]); + await query.exec(); + assert.strictEqual(query._execOverDatabaseCount, 2); + + // After this re-exec on the db, the limit buffer should be filled again: + assert.strictEqual(query._limitBufferResults?.length, limitBufferSize); + + // And further removals will use the new limit buffer again: + await removeSingleDocFromMatchingQuery(collection, initialResults[5]); + const finalResults = await query.exec(); + assert.strictEqual(finalResults.length, limitRows); + assert.strictEqual(query._execOverDatabaseCount, 2); + assert.strictEqual(query._limitBufferResults?.length, limitBufferSize - 1); + + collection.database.destroy(); + }); + it('Limit buffer doesn\'t do anything when fewer than LIMIT items', async () => { + // Set up with only 8 rows total, but a limit of 10 (and limit buffer 5): + const limitBufferSize = 5; + const {query, collection, numRowsTotal, initialResults} = await setUpLimitBufferCollectionAndQuery(limitBufferSize, 8); + + // Now, make a change that removes a single doc from the result set + await removeSingleDocFromMatchingQuery(collection, initialResults[0]); + + // Re-exec the query after removing one, so the results should be 7 docs now: + const updatedResults = await query.exec(); + // Confirm the change was processed, and the results are correct: + assert.strictEqual(updatedResults.length, numRowsTotal - 1); + assert.notStrictEqual(updatedResults[0].passportId, initialResults[0].passportId); + + // And the limitBuffer wasn't filled at all: + assert.strictEqual(query._limitBufferResults, null); + + // The query wouldn't have to re-exec because of the normal query cache: + assert.strictEqual(query._execOverDatabaseCount, 1); + + collection.database.destroy(); + }); + it('Limit buffer works with skip=0', async () => { + // Set up with a skip=0 (limit buffer should work normally) + const limitBufferSize = 5; + const {query, collection, initialResults} = await setUpLimitBufferCollectionAndQuery(limitBufferSize, 20, 0); + assert.strictEqual(query._limitBufferResults?.length, limitBufferSize); + await removeSingleDocFromMatchingQuery(collection, initialResults[1]); + await query.exec(); + assert.strictEqual(query._execOverDatabaseCount, 1); + collection.database.destroy(); + }); + it('Limit buffer does nothing with a non-zero skip', async () => { + const limitBufferSize = 5; + const {query, collection, initialResults} = await setUpLimitBufferCollectionAndQuery(limitBufferSize, 20, 10); + assert.strictEqual(query._limitBufferResults, null); + await removeSingleDocFromMatchingQuery(collection, initialResults[1]); + await query.exec(); + assert.strictEqual(query._execOverDatabaseCount, 2); + collection.database.destroy(); + }); + it('Limit buffer does nothing if item is removed from results due to sort changing only', async () => { + // Do a normal setup with the limit, and confirm the limit buffer gets filled: + const limitBufferSize = 5; + const {query, collection, initialResults} = await setUpLimitBufferCollectionAndQuery(limitBufferSize, 20); + assert.strictEqual(query._limitBufferResults?.length, limitBufferSize); + assert.strictEqual(query._execOverDatabaseCount, 1); + + // Instead of removing an item from the results by making it break the query selector + // (what removeSingleDocFromMatchingQuery does) just move it to the end of the sort + // which will kick it out of the query results due to the LIMIT + await collection.find({selector: {passportId: initialResults[0].passportId}}).update({ + $set: { + lastName: 'AAAAAAAAAAAAAAA' + } + }); + + // Explicitly, the limit buffer does not replace items in this case (although it technically + // could with little trouble in the future, we just haven't implemented it) + // so the query should re-run on the database to fill in the missing document: + const updatedResults = await query.exec(); + assert.strictEqual(query._execOverDatabaseCount, 2); + assert.notStrictEqual(updatedResults[0].passportId, initialResults[0].passportId); + collection.database.destroy(); + }); + it('Limit buffer omits buffered items that have been modified to no longer', async () => { + const limitBufferSize = 5; + const {query, collection, initialResults} = await setUpLimitBufferCollectionAndQuery(limitBufferSize, 20); + + if (query._limitBufferResults === null) { + throw new Error('_limitBufferResults not set'); + } + // Get the first item from the limit buffer, and change it so it no longer matches the query selector: + const firstBufferItem = query._limitBufferResults[0]; + await collection.find({selector: {passportId: firstBufferItem.passportId}}).update({ + $set: { + firstName: 'Dollaritas' + } + }); + // Now, remove an item from the initial results, so that the buffer _should_ be used + // to fill the last item in the updated results. + await removeSingleDocFromMatchingQuery(collection, initialResults[1]); + + // Make sure we DO NOT pull the modified item from the limit buffer, as it no longer matches query: + const updatedResults = await query.exec(); + assert.notStrictEqual(updatedResults[updatedResults.length - 1].passportId, firstBufferItem.passportId); + + collection.database.destroy(); + }); + }); + + async function setUpPersistentQueryCacheCollection() { + const collection = await humansCollection.create(0); + return {collection}; + } + + describeParallel('Persistent Query Cache', () => { + it('query fills cache', async () => { + const {collection} = await setUpPersistentQueryCacheCollection(); + + const query = collection.find({ limit: 1 }); + const cache = new Cache(); + query.enableLimitBuffer(5).enablePersistentQueryCache(cache); + + const human1 = schemaObjects.humanData(); + const human2 = schemaObjects.humanData(); + + await collection.bulkInsert([human1, human2]); + await query.exec(); + + assert.strictEqual(cache.size, 2); + + collection.database.destroy(); + }); + + it('does not query from database after restoring from persistent query cache', async () => { + const {collection} = await setUpPersistentQueryCacheCollection(); + + const human1 = schemaObjects.humanData(); + const human2 = schemaObjects.humanData(); + + await collection.bulkInsert([human1, human2]); + + const query = collection.find({ limit: 2 }); + + // fill cache + const queryId = query.persistentQueryId(); + const cache = new Cache(); + await cache.setItem(`qc:${queryId}`, [human1.passportId, human2.passportId]); + await cache.setItem(`qc:${queryId}:lwt`, `${now()}`); + query.enableLimitBuffer(5).enablePersistentQueryCache(cache); + + // execute query + const result = await query.exec(); + + assert.strictEqual(result.length, 2); + assert.strictEqual(query._execOverDatabaseCount, 0); + + collection.database.destroy(); + }); + + it('does not query from database after modifying a document', async () => { + const {collection} = await setUpPersistentQueryCacheCollection(); + + const human1 = schemaObjects.humanData(); + const human1Age = human1.age; + + await collection.bulkInsert([human1]); + + const query1 = collection.find({ selector: { age: human1Age }}); + + // fill cache + const queryId = query1.persistentQueryId(); + const cache = new Cache(); + await cache.setItem(`qc:${queryId}`, [human1.passportId]); + await cache.setItem(`qc:${queryId}:lwt`, `${now()}`); + query1.enableLimitBuffer(5).enablePersistentQueryCache(cache); + + // execute query + const result1 = await query1.exec(); + assert.strictEqual(result1.length, 1); + + const human1Doc = result1[0]; + await human1Doc.modify(data => { + data.age += 1; + return data; + }); + + clearQueryCache(collection); + + const query2 = collection.find({ selector: { age: human1Age }}); + query2.enableLimitBuffer(5).enablePersistentQueryCache(cache); + + const result2 = await query2.exec(); + + assert.strictEqual(result1.length, 1); + assert.strictEqual(result2.length, 0); + assert.strictEqual(query1._execOverDatabaseCount, 0); + assert.strictEqual(query2._execOverDatabaseCount, 0); + + collection.database.destroy(); + }); + + it('does not query from database after adding an object', async () => { + const {collection} = await setUpPersistentQueryCacheCollection(); + + const human1 = schemaObjects.humanData(); + const human2 = schemaObjects.humanData(); + const human3 = schemaObjects.humanData(); + + await collection.bulkInsert([human1, human2]); + + const query = collection.find({ limit: 3 }); + const queryId = query.persistentQueryId(); + const cache = new Cache(); + await cache.setItem(`qc:${queryId}`, [human1.passportId, human2.passportId]); + await cache.setItem(`qc:${queryId}:lwt`, `${now()}`); + query.enableLimitBuffer(5).enablePersistentQueryCache(cache); + + const result1 = await query.exec(); + + await collection.insert(human3); + + const result2 = await query.exec(); + + assert.strictEqual(result1.length, 2); + assert.strictEqual(result2.length, 3); + assert.strictEqual(query._execOverDatabaseCount, 0); + + collection.database.destroy(); + }); + + it('does return docs from cache in correct order and with limits applied', async () => { + const {collection} = await setUpPersistentQueryCacheCollection(); + + const human1 = schemaObjects.humanData('1', 30); + const human2 = schemaObjects.humanData('2', 40); + const human3 = schemaObjects.humanData('3', 50); + + await collection.bulkInsert([human2, human3]); + + const query1 = collection.find({ limit: 2, sort: [{age: 'asc'}] }); + const queryId = query1.persistentQueryId(); + const lwt = now(); + + const cache = new Cache(); + await cache.setItem(`qc:${queryId}`, [human2.passportId, human3.passportId]); + await cache.setItem(`qc:${queryId}:lwt`, `${lwt}`); + + await collection.insert(human1); + + clearQueryCache(collection); + + const query2 = collection.find({ limit: 2, sort: [{age: 'asc'}] }); + query2.enableLimitBuffer(5).enablePersistentQueryCache(cache); + + const result2 = await query2.exec(); + + assert.strictEqual(query1._execOverDatabaseCount, 0); + assert.strictEqual(query2._execOverDatabaseCount, 0); + assert.deepStrictEqual(result2.map(item => item.passportId), ['1', '2']); + + collection.database.destroy(); + }); + + it('removing an item from the database, but not from cache does not lead to wrong results after restoring', async () => { + const {collection} = await setUpPersistentQueryCacheCollection(); + + const human1 = schemaObjects.humanData('1', 30); + const human2 = schemaObjects.humanData('2', 40); + const human3 = schemaObjects.humanData('3', 50); + + await collection.bulkInsert([human1, human2, human3]); + + const query1 = collection.find({ limit: 2, sort: [{age: 'asc'}] }); + const queryId = query1.persistentQueryId(); + const lwt = now(); + + const cache = new Cache(); + await cache.setItem(`qc:${queryId}`, [human1.passportId, human2.passportId, human3.passportId]); + await cache.setItem(`qc:${queryId}:lwt`, `${lwt}`); + + const removeQuery = collection.find({ selector: { passportId: '2' }}); + await removeQuery.remove(); + + clearQueryCache(collection); + + const query2 = collection.find({ limit: 2, sort: [{age: 'asc'}] }); + query2.enableLimitBuffer(5).enablePersistentQueryCache(cache); + + assert.strictEqual(cache.getItem(`qc:${queryId}`).length, 3); + + const result2 = await query2.exec(); + + assert.strictEqual(query1._execOverDatabaseCount, 0); + assert.strictEqual(query2._execOverDatabaseCount, 0); + assert.deepStrictEqual(result2.map(item => item.passportId), ['1', '3']); + + collection.database.destroy(); + }); + + it('old cache values are updated when documents are modified', async () => { + const {collection} = await setUpPersistentQueryCacheCollection(); + + const human1 = schemaObjects.humanData('1', 30); + + await collection.bulkInsert([human1]); + + // fill cache + const cache = new Cache(); + const query1 = collection.find({limit: 1}); + query1.enableLimitBuffer(5).enablePersistentQueryCache(cache); + const queryId = query1.persistentQueryId(); + + const result1 = await query1.exec(); + assert.strictEqual(result1.length, 1); + assert.strictEqual(cache.size, 2); + + clearQueryCache(collection); + + // go back in time + const lwt = now() - 7200 * 1000; // go back in time (2hrs) + await cache.setItem(`qc:${queryId}:lwt`, `${lwt}`); + + const query2 = collection.find({limit: 1}); + query2.enableLimitBuffer(5).enablePersistentQueryCache(cache); + await query2._persistentQueryCacheLoaded; + + await result1[0].remove(); + + await query2.exec(); + + const currLwt = Number(await cache.getItem(`qc:${queryId}:lwt`)); + assert.strictEqual(currLwt > lwt, true); + + collection.database.destroy(); + }); + + it('query from database when cache is empty', async () => { + const {collection} = await setUpPersistentQueryCacheCollection(); + + const human1 = schemaObjects.humanData(); + await collection.bulkInsert([human1]); + + const query = collection.find({ limit: 3 }); + + const cache = new Cache(); + query.enableLimitBuffer(5).enablePersistentQueryCache(cache); + + const result = await query.exec(); + + assert.strictEqual(result.length, 1); + assert.strictEqual(query._execOverDatabaseCount, 1); + + collection.database.destroy(); + }); + + it('will re-execute queries if they were cached a long time ago', async () => { + const {collection} = await setUpPersistentQueryCacheCollection(); + + const human1 = schemaObjects.humanData('1', 30); + await collection.bulkInsert([human1]); + + // fill cache + const cache = new Cache(); + const query1 = collection.find({limit: 1}); + query1.enableLimitBuffer(5).enablePersistentQueryCache(cache); + const queryId = query1.persistentQueryId(); + + await query1.exec(); + clearQueryCache(collection); + + // If we restore the same query, it shouldn't need to re-exec: + const querySoon = collection.find({limit: 1}); + querySoon.enableLimitBuffer(5).enablePersistentQueryCache(cache); + await querySoon.exec(); + assert.strictEqual(querySoon._execOverDatabaseCount, 0); + + clearQueryCache(collection); + + // Now, simulate the query having been cached over a week ago. + // It should have to re-exec. + const lwt = now() - RESTORE_QUERY_MAX_TIME_AGO - 1000; + await cache.setItem(`qc:${queryId}:lwt`, `${lwt}`); + + const queryLater = collection.find({limit: 1}); + queryLater.enableLimitBuffer(5).enablePersistentQueryCache(cache); + + await queryLater.exec(); + assert.strictEqual(queryLater._execOverDatabaseCount, 1); + + collection.database.destroy(); + }); + + describe('persisting queries with limit buffers', () => { + async function setUpLimitBufferSituation() { + const {collection} = await setUpPersistentQueryCacheCollection(); + await collection.bulkInsert([ + schemaObjects.humanData('1', 30), + schemaObjects.humanData('2', 40), + schemaObjects.humanData('3', 50), + schemaObjects.humanData('4', 60), + schemaObjects.humanData('5', 70), + ]); + + // wait 1 second so that not all docs are included in lwt + await new Promise((resolve) => { + setTimeout(resolve, 500); + }); + + // Cache a limited query: + const query = collection.find({ limit: 2, sort: [{age: 'asc'}], selector: { age: { $gt: 10 } } }); + const cache = new Cache(); + + return { query, cache, collection }; + } + + function simulateNewSession(collection: RxCollection) { + clearQueryCache(collection); + collection._docCache.cacheItemByDocId.clear(); + } + + // This is how it should operate when we don't persist limit buffers: + it('limit buffer not enabled, still gives correct results through re-execution', async () => { + const { collection, query, cache} = await setUpLimitBufferSituation(); + + // persist with no limit buffer enabled + await query.enablePersistentQueryCache(cache); + const originalResults = await query.exec(); + assert.deepStrictEqual(originalResults.map(h => h.passportId), ['1', '2']); + + // Now, get into a state where that query is no longer in memory (eg new tab) + // (but, the query should still be persisted on disk) + simulateNewSession(collection); + assert.strictEqual(cache.size, 2); + + // while the query is not in memory, remove one of the items from the query results + await collection.find({selector: { passportId: '1'}}).update({ + $set: { age: 1 } + }); + + // now when we create the query again, it has no way of knowing how to fill the missing item + const queryAgain = collection.find(query.mangoQuery); + assert.strictEqual(queryAgain._execOverDatabaseCount, 0); + + await queryAgain.enablePersistentQueryCache(cache); + const updatedResults = await queryAgain.exec(); + + // We must re-exec the query to make it correct. + assert.strictEqual(queryAgain._execOverDatabaseCount, 1); + assert.deepStrictEqual(updatedResults.map(h => h.passportId), ['2', '3']); + collection.database.destroy(); + }); + + it('limit buffer enabled, restores normal changes, results correctly with no re-exec', async () => { + const { collection, query, cache} = await setUpLimitBufferSituation(); + + // Persist WITH the limit buffer enabled + query.enableLimitBuffer(5).enablePersistentQueryCache(cache); + + const originalResults = await query.exec(); + assert.deepStrictEqual(originalResults.map(h => h.passportId), ['1', '2']); + assert.strictEqual(query._limitBufferResults?.length, 3); + assert.strictEqual(cache.size, 2); + + // remove one of the items from the query results + await collection.find({ selector: { passportId: '1' } }).update({ + $set: { age: 1 } + }); + + simulateNewSession(collection); + + // now when we create the query again, it should fill in the missing element from the limit buffer + const queryAgain = collection.find(query.mangoQuery); + queryAgain.enableLimitBuffer(5).enablePersistentQueryCache(cache); + + const updatedResults = await queryAgain.exec(); + + // The query should use the limit buffer to restore the results, and not need to re-exec the query + assert.strictEqual(queryAgain._execOverDatabaseCount, 0); + assert.deepStrictEqual(updatedResults.map(h => h.passportId), ['2', '3']); + + // There should now only be 2 items left in the limit buffer, it used the first one up to fill the results + assert.strictEqual(queryAgain._limitBufferResults?.length, 2); + + collection.database.destroy(); + }); + + it('limit buffer enabled, restores missing changes, results correctly with no re-exec', async () => { + const { collection, query, cache} = await setUpLimitBufferSituation(); + + // Persist WITH the limit buffer enabled + query.enableLimitBuffer(5).enablePersistentQueryCache(cache); + + const originalResults = await query.exec(); + assert.deepStrictEqual(originalResults.map(h => h.passportId), ['1', '2']); + assert.strictEqual(query._limitBufferResults?.length, 3); + + // uncache the query first, before changes are made + simulateNewSession(collection); + assert.strictEqual(cache.size, 2); + + // remove one of the items from the query results while query is not listening in memory + await collection.find({ selector: { passportId: '1' } }).update({ + $set: { age: 1 } + }); + + // now when we create the query again, it will fill in the missing element from the limit buffer + const queryAgain = collection.find(query.mangoQuery); + queryAgain.enableLimitBuffer(5).enablePersistentQueryCache(cache); + + const updatedResults = await queryAgain.exec(); + + // The query should use the limit buffer to restore the results, and not need to re-exec the query + assert.strictEqual(queryAgain._execOverDatabaseCount, 0); + assert.deepStrictEqual(updatedResults.map(h => h.passportId), ['2', '3']); + + // There should now only be 2 items left in the limit buffer, it used the first one up to fill the results + assert.strictEqual(queryAgain._limitBufferResults?.length, 2); + + collection.database.destroy(); + }); + + it('limit buffer enabled, but gets exhausted', async () => { + const { collection, query, cache} = await setUpLimitBufferSituation(); + + // Persist WITH the limit buffer enabled, but only one doc + query.enableLimitBuffer(1).enablePersistentQueryCache(cache); + await query.exec(); + simulateNewSession(collection); + + // remove two of the items from the query results + await collection.find({ selector: { passportId: '1' } }).update({ + $set: { age: 1 } + }); + await collection.find({ selector: { passportId: '2' } }).update({ + $set: { age: 1 } + }); + + // now when we create the query again, it will fill in the missing element from the limit buffer + // but then still need another item to hit the limit=2 + const queryAgain = collection.find(query.mangoQuery); + queryAgain.enableLimitBuffer(1).enablePersistentQueryCache(cache); + + const updatedResults = await queryAgain.exec(); + + // The query will have to still re-exec, but give the correct results + assert.strictEqual(queryAgain._execOverDatabaseCount, 1); + assert.deepStrictEqual(updatedResults.map(h => h.passportId), ['3', '4']); + + // And re-fill the 1 item in limit buffer: + assert.strictEqual(queryAgain._limitBufferResults?.length, 1); + assert.strictEqual(queryAgain._limitBufferResults?.[0].passportId, '5'); + + collection.database.destroy(); + }); + + it('limit buffer enabled, with a bunch of deletions', async () => { + const { collection, query, cache} = await setUpLimitBufferSituation(); + + // Persist WITH the limit buffer enabled + query.enableLimitBuffer(3).enablePersistentQueryCache(cache); + await query.exec(); + simulateNewSession(collection); + + // delete one item from the results, and one item from the limit buffer: + await collection.find({ selector: { passportId: '1' } }).remove(); + await collection.find({ selector: { passportId: '3' } }).remove(); + + const queryAgain = collection.find(query.mangoQuery); + queryAgain.enableLimitBuffer(3).enablePersistentQueryCache(cache); + + const updatedResults = await queryAgain.exec(); + + // The query should be able to fill up from the limit buffer + assert.strictEqual(queryAgain._execOverDatabaseCount, 0); + assert.deepStrictEqual(updatedResults.map(h => h.passportId), ['2', '4']); + assert.strictEqual(queryAgain._limitBufferResults?.length, 1); + + // But if we go further, and use the last items from the limit buffer, we'll have to re-exec: + uncacheRxQuery(collection._queryCache, queryAgain); + await collection.find({ selector: { passportId: '4' } }).remove(); + await collection.find({ selector: { passportId: '5' } }).remove(); + + const queryFinal = collection.find(query.mangoQuery); + queryFinal.enableLimitBuffer(3).enablePersistentQueryCache(cache); + + const finalResults = await queryFinal.exec(); + assert.strictEqual(queryFinal._execOverDatabaseCount, 1); + assert.deepStrictEqual(finalResults.map(h => h.passportId), ['2']); + + collection.database.destroy(); + }); + + it('limit buffer enabled, doc added and limit buffer items changed, still restores correctly', async () => { + const { collection, query, cache} = await setUpLimitBufferSituation(); + + // Persist WITH the limit buffer enabled + query.enableLimitBuffer(5).enablePersistentQueryCache(cache); + + await query.exec(); + + simulateNewSession(collection); + + // Let's make 3 changes: + // 1. remove both of the original results + // 2. add in a new doc that should now be in the results + // 3. modify one of the items in the limit buffer to change the correct order there + await collection.find({ selector: { passportId: '1' } }).update({ + $set: { age: 1 } + }); + await collection.find({ selector: { passportId: '2' } }).update({ + $set: { age: 1 } + }); + // the new item should now be the first result, since it has the lowest age + await collection.bulkUpsert([ + schemaObjects.humanData('6', 20), + ]); + // change what would be the next result (passport id 3) to still match the filter, but now be last (so not in the results) + await collection.find({ selector: { passportId: '3' } }).update({ + $set: { age: 100 } + }); + + const queryAgain = collection.find(query.mangoQuery); + queryAgain.enableLimitBuffer(5).enablePersistentQueryCache(cache); + const updatedResults = await queryAgain.exec(); + + // The query should use the limit buffer to restore the results, and not need to re-exec the query + assert.strictEqual(queryAgain._execOverDatabaseCount, 0); + + // But it should also correctly fill in the new document into the correct position, and also handle the sort change + assert.deepStrictEqual(updatedResults.map(h => h.passportId), ['6', '4']); + + // The two items in limit buffer should be in the correct order: + assert.deepStrictEqual(queryAgain._limitBufferResults?.map((d) => d.passportId), ['5', '3']); + + collection.database.destroy(); + }); + + it('limit buffer enabled, all items in buffer used but we have more matching non-buffer items', async () => { + const { collection, query, cache} = await setUpLimitBufferSituation(); + + // Persist WITH the limit buffer enabled + query.enableLimitBuffer(2).enablePersistentQueryCache(cache); + await query.exec(); + simulateNewSession(collection); + + // remove the 2 results, so we use up the 2 items in the limit buffer: + await collection.find({ selector: { passportId: '1' } }).remove(); + await collection.find({ selector: { passportId: '2' } }).update({ + $set: { age: 1 } + }); + // But also add in some new docs, that match the filter but are sorted last + await collection.bulkUpsert([ + schemaObjects.humanData('6', 90), + schemaObjects.humanData('7', 90), + ]); + + const queryAgain = collection.find(query.mangoQuery); + queryAgain.enableLimitBuffer(2).enablePersistentQueryCache(cache); + + const updatedResults = await queryAgain.exec(); + + // In this case we can use the limit buffer without re-execing, and still get correct results: + assert.strictEqual(queryAgain._execOverDatabaseCount, 0); + assert.deepStrictEqual(updatedResults.map(h => h.passportId), ['3', '4']); + + // But the new limit buffer will be empty -- we can't use the new documents because we don't know + // how they would be sorted relative to other documents + assert.strictEqual(queryAgain._limitBufferResults?.length, 0); + + simulateNewSession(collection); + + // If one more doc is removed from our results, we will HAVE to re-exec to ensure + // correct results, test that: + await collection.find({ selector: { passportId: '3' } }).update({ + $set: { age: 1 } + }); + + const queryFinal = collection.find(query.mangoQuery); + queryFinal.enableLimitBuffer(2).enablePersistentQueryCache(cache); + + const finalResults = await queryFinal.exec(); + + // Query re-execs, and gives correct results: + assert.strictEqual(queryFinal._execOverDatabaseCount, 1); + assert.deepStrictEqual(finalResults.map(h => h.passportId), ['4', '5']); + + // When we re-exec, the limit buffer will also get filled: + assert.deepStrictEqual(queryFinal._limitBufferResults?.map(h => h.passportId), ['6', '7']); + + collection.database.destroy(); + }); + + it('Handles case where we have fewer than LIMIT matches', async () => { + const { collection, cache } = await setUpLimitBufferSituation(); + + const query = collection.find({ limit: 3, sort: [{age: 'asc'}], selector: { age: { $lt: 45 } } }); + query.enableLimitBuffer(2).enablePersistentQueryCache(cache); + await query.exec(); + simulateNewSession(collection); + + // Remove something, still correct and no-re-exec + await collection.find({ selector: { passportId: '1' } }).remove(); + + const queryRemoved = collection.find(query.mangoQuery); + queryRemoved.enableLimitBuffer(2).enablePersistentQueryCache(cache); + const removedResults = await queryRemoved.exec(); + assert.strictEqual(queryRemoved._execOverDatabaseCount, 0); + assert.deepStrictEqual(removedResults.map(h => h.passportId), ['2']); + + simulateNewSession(collection); + + // Now add some matching docs. Since they change, they should now be in results with no re-exec. + await collection.find({ selector: { passportId: '5' } }).update({ + $set: { age: 1 } + }); + await collection.bulkUpsert([ + schemaObjects.humanData('6', 2), + schemaObjects.humanData('7', 3), + ]); + const queryAdded = collection.find(query.mangoQuery); + queryAdded.enableLimitBuffer(2).enablePersistentQueryCache(cache); + const addedResults = await queryRemoved.exec(); + assert.strictEqual(queryAdded._execOverDatabaseCount, 0); + assert.deepStrictEqual(addedResults.map(h => h.passportId), ['5', '6', '7']); + + collection.database.destroy(); + }); + }); + }); }); diff --git a/tsconfig.json b/tsconfig.json index db8f130695a..e5e15a2cf6a 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -2,7 +2,7 @@ // @link https://iamturns.com/typescript-babel/ "compilerOptions": { // Target latest version of ECMAScript. - "target": "es2022", + "target": "esnext", // Search under node_modules for non-relative imports. // https://www.youtube.com/watch?v=H91aqUHn8sE "moduleResolution": "node", @@ -11,7 +11,7 @@ "allowJs": false, // Enable strictest settings like strictNullChecks & noImplicitAny. "strict": true, - // Disallow features that require + // Disallow features that require // cross-file information for emit. "isolatedModules": false, // Import non-ES modules as default imports.