From a2cae3127e10d24d7df0d21c128da27f54510438 Mon Sep 17 00:00:00 2001 From: Steven Oderayi Date: Tue, 20 Aug 2024 18:44:38 +0100 Subject: [PATCH] feat(mojaloop/#3817): harden fx quotes (#345) --- .circleci/config.yml | 20 - .ncurc.yaml | 2 +- Dockerfile | 2 +- docker-compose.yml | 41 +- package-lock.json | 1176 +++++++++------------ package.json | 2 +- scripts/env.sh | 12 + src/api/routes.js | 5 +- src/constants.js | 9 +- src/data/cachedDatabase.js | 2 +- src/handlers/init.js | 8 +- src/lib/util.js | 2 +- src/model/bulkQuotes.js | 13 +- src/model/fxQuotes.js | 146 ++- src/model/quotes.js | 108 +- test/integration/fxQuotes.test.js | 550 ++++++++++ test/integration/postRequest.test.js | 270 ++--- test/integration/putCallback.test.js | 532 ++-------- test/integration/scripts/env.sh | 5 +- test/integration/scripts/start.sh | 14 +- test/mocks.js | 189 +++- test/unit/api/routes.test.js | 42 + test/unit/handlers/QuotingHandler.test.js | 108 +- test/unit/handlers/init.test.js | 46 +- test/unit/lib/proxy.test.js | 2 +- test/unit/mocks.js | 108 +- test/unit/model/fxQuotes.test.js | 393 ++++++- test/unit/model/quotes.test.js | 27 + 28 files changed, 2320 insertions(+), 1514 deletions(-) create mode 100644 scripts/env.sh create mode 100644 test/integration/fxQuotes.test.js create mode 100644 test/unit/api/routes.test.js diff --git a/.circleci/config.yml b/.circleci/config.yml index 424e93e1..442728be 100755 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -281,19 +281,6 @@ jobs: - run: name: Create dir for test results command: mkdir -p ./test/results -# - run: -# name: Build and start the docker containers -# command: | -# ## This is not needed as we are only doing narrow-integration tests. -# # docker-compose build -# ## Lets pull only the Services needed for the Integration Test -# docker-compose pull mysql kafka init-kafka -# ## Lets startup only the Services needed for the Integration Test -# docker-compose up -d mysql kafka init-kafka -# ## Check straight away to see if any containers have exited -# docker-compose ps -# ## wait for services to be up and running -# npm run wait-4-docker - run: name: Prepare test environment command: | @@ -305,13 +292,6 @@ jobs: command: | npm rebuild npm run test:int -# environment: -# ENDPOINT_URL: http://localhost:4545/notification -# - store_artifacts: -# path: ./test/results -# destination: test -# - store_test_results: -# path: ./test/results vulnerability-check: executor: default-docker diff --git a/.ncurc.yaml b/.ncurc.yaml index cc4674fd..891f9b69 100644 --- a/.ncurc.yaml +++ b/.ncurc.yaml @@ -3,5 +3,5 @@ reject: [ "json-rules-engine", "eslint", "@mojaloop/sdk-standard-components", # Version 17.4.0 introduced the bug: this.logger.isDebugEnabled is not a function - "@mojaloop/central-services-shared" # This should be removed as soon as all vlaidations on the fx feature has been completed and cs-shared fx feature merged to masin + "@mojaloop/central-services-shared" # This should be removed as soon as all validations on the fx feature has been completed and cs-shared fx feature merged to main ] diff --git a/Dockerfile b/Dockerfile index 42647daf..fdbd6889 100644 --- a/Dockerfile +++ b/Dockerfile @@ -11,7 +11,7 @@ ARG NODE_VERSION=lts-alpine # # Build Image -FROM node:${NODE_VERSION} as builder +FROM node:${NODE_VERSION} AS builder USER root WORKDIR /opt/app diff --git a/docker-compose.yml b/docker-compose.yml index 92b702fd..c40f1391 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,5 +1,3 @@ -version: "3.7" - x-depends-on: &dependsOnMysqlAndKafka mysql: condition: service_healthy @@ -26,17 +24,23 @@ x-quoting-service: "ingServiceBase - ./secrets/:/opt/app/secrets/ depends_on: <<: *dependsOnMysqlAndKafka + extra_hosts: + - "redis-node-0:host-gateway" # central-ledger: # condition: service_healthy # to perform test dfsp onboarding +# @see https://uninterrupted.tech/blog/hassle-free-redis-cluster-deployment-using-docker/ x-redis-node: &REDIS_NODE image: docker.io/bitnami/redis-cluster:6.2.14 environment: &REDIS_ENVS ALLOW_EMPTY_PASSWORD: 'yes' - REDIS_NODES: redis-node-0 redis-node-1 redis-node-2 redis-node-3 redis-node-4 redis-node-5 + REDIS_CLUSTER_DYNAMIC_IPS: 'no' + REDIS_CLUSTER_ANNOUNCE_IP: ${REDIS_CLUSTER_ANNOUNCE_IP} + REDIS_NODES: localhost:6379 localhost:6380 localhost:6381 localhost:6382 localhost:6383 localhost:6384 healthcheck: test: [ "CMD", "redis-cli", "ping" ] timeout: 2s + network_mode: host x-healthcheck-params: &healthcheckParams interval: 30s @@ -56,7 +60,7 @@ services: quoting-service-handler: <<: *quotingServiceBase - command: npm run start:handlers + command: npm run start:handlers:debug ports: - "3003:3003" - "29229:9229" @@ -114,25 +118,48 @@ services: environment: <<: *REDIS_ENVS REDIS_CLUSTER_CREATOR: 'yes' + REDIS_PORT_NUMBER: 6379 depends_on: - redis-node-1 - redis-node-2 - redis-node-3 - redis-node-4 - redis-node-5 - ports: - - "6379:6379" - redis-node-1: <<: *REDIS_NODE + environment: + <<: *REDIS_ENVS + REDIS_PORT_NUMBER: 6380 + ports: + - "16380:16380" redis-node-2: <<: *REDIS_NODE + environment: + <<: *REDIS_ENVS + REDIS_PORT_NUMBER: 6381 + ports: + - "16381:16381" redis-node-3: <<: *REDIS_NODE + environment: + <<: *REDIS_ENVS + REDIS_PORT_NUMBER: 6382 + ports: + - "16382:16382" redis-node-4: <<: *REDIS_NODE + environment: + <<: *REDIS_ENVS + REDIS_PORT_NUMBER: 6383 + ports: + - "16383:16383" redis-node-5: <<: *REDIS_NODE + environment: + <<: *REDIS_ENVS + REDIS_PORT_NUMBER: 6384 + ports: + - "16384:16384" ## To use with proxyCache.type === 'redis' # redis: diff --git a/package-lock.json b/package-lock.json index 2dd5cd54..03711b47 100644 --- a/package-lock.json +++ b/package-lock.json @@ -77,9 +77,9 @@ } }, "node_modules/@apidevtools/json-schema-ref-parser": { - "version": "11.6.4", - "resolved": "https://registry.npmjs.org/@apidevtools/json-schema-ref-parser/-/json-schema-ref-parser-11.6.4.tgz", - "integrity": "sha512-9K6xOqeevacvweLGik6LnZCb1fBtCOSIWQs8d096XGeqoLKC33UVMGz9+77Gw44KvbH4pKcQPWo4ZpxkXYj05w==", + "version": "11.7.0", + "resolved": "https://registry.npmjs.org/@apidevtools/json-schema-ref-parser/-/json-schema-ref-parser-11.7.0.tgz", + "integrity": "sha512-pRrmXMCwnmrkS3MLgAIW5dXRzeTv6GLjkjb4HmxNnvAKXN1Nfzp4KmGADBQvlVUcqi+a5D+hfGDLLnd5NnYxog==", "dependencies": { "@jsdevtools/ono": "^7.1.3", "@types/json-schema": "^7.0.15", @@ -106,30 +106,30 @@ } }, "node_modules/@babel/compat-data": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.24.7.tgz", - "integrity": "sha512-qJzAIcv03PyaWqxRgO4mSU3lihncDT296vnyuE2O8uA4w3UHWI4S3hgeZd1L8W1Bft40w9JxJ2b412iDUFFRhw==", + "version": "7.25.2", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.25.2.tgz", + "integrity": "sha512-bYcppcpKBvX4znYaPEeFau03bp89ShqNMLs+rmdptMw+heSZh9+z84d2YG+K7cYLbWwzdjtDoW/uqZmPjulClQ==", "dev": true, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/core": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.24.7.tgz", - "integrity": "sha512-nykK+LEK86ahTkX/3TgauT0ikKoNCfKHEaZYTUVupJdTLzGNvrblu4u6fa7DhZONAltdf8e662t/abY8idrd/g==", + "version": "7.25.2", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.25.2.tgz", + "integrity": "sha512-BBt3opiCOxUr9euZ5/ro/Xv8/V7yJ5bjYMqG/C1YAo8MIKAnumZalCN+msbci3Pigy4lIQfPUpfMM27HMGaYEA==", "dev": true, "dependencies": { "@ampproject/remapping": "^2.2.0", "@babel/code-frame": "^7.24.7", - "@babel/generator": "^7.24.7", - "@babel/helper-compilation-targets": "^7.24.7", - "@babel/helper-module-transforms": "^7.24.7", - "@babel/helpers": "^7.24.7", - "@babel/parser": "^7.24.7", - "@babel/template": "^7.24.7", - "@babel/traverse": "^7.24.7", - "@babel/types": "^7.24.7", + "@babel/generator": "^7.25.0", + "@babel/helper-compilation-targets": "^7.25.2", + "@babel/helper-module-transforms": "^7.25.2", + "@babel/helpers": "^7.25.0", + "@babel/parser": "^7.25.0", + "@babel/template": "^7.25.0", + "@babel/traverse": "^7.25.2", + "@babel/types": "^7.25.2", "convert-source-map": "^2.0.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", @@ -154,12 +154,12 @@ } }, "node_modules/@babel/generator": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.24.7.tgz", - "integrity": "sha512-oipXieGC3i45Y1A41t4tAqpnEZWgB/lC6Ehh6+rOviR5XWpTtMmLN+fGjz9vOiNRt0p6RtO6DtD0pdU3vpqdSA==", + "version": "7.25.0", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.25.0.tgz", + "integrity": "sha512-3LEEcj3PVW8pW2R1SR1M89g/qrYk/m/mB/tLqn7dn4sbBUQyTqnlod+II2U4dqiGtUmkcnAmkMDralTFZttRiw==", "dev": true, "dependencies": { - "@babel/types": "^7.24.7", + "@babel/types": "^7.25.0", "@jridgewell/gen-mapping": "^0.3.5", "@jridgewell/trace-mapping": "^0.3.25", "jsesc": "^2.5.1" @@ -169,14 +169,14 @@ } }, "node_modules/@babel/helper-compilation-targets": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.24.7.tgz", - "integrity": "sha512-ctSdRHBi20qWOfy27RUb4Fhp07KSJ3sXcuSvTrXrc4aG8NSYDo1ici3Vhg9bg69y5bj0Mr1lh0aeEgTvc12rMg==", + "version": "7.25.2", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.25.2.tgz", + "integrity": "sha512-U2U5LsSaZ7TAt3cfaymQ8WHh0pxvdHoEk6HVpaexxixjyEquMh0L0YNJNM6CTGKMXV1iksi0iZkGw4AcFkPaaw==", "dev": true, "dependencies": { - "@babel/compat-data": "^7.24.7", - "@babel/helper-validator-option": "^7.24.7", - "browserslist": "^4.22.2", + "@babel/compat-data": "^7.25.2", + "@babel/helper-validator-option": "^7.24.8", + "browserslist": "^4.23.1", "lru-cache": "^5.1.1", "semver": "^6.3.1" }, @@ -202,49 +202,6 @@ "semver": "bin/semver.js" } }, - "node_modules/@babel/helper-compilation-targets/node_modules/yallist": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", - "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", - "dev": true - }, - "node_modules/@babel/helper-environment-visitor": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-environment-visitor/-/helper-environment-visitor-7.24.7.tgz", - "integrity": "sha512-DoiN84+4Gnd0ncbBOM9AZENV4a5ZiL39HYMyZJGZ/AZEykHYdJw0wW3kdcsh9/Kn+BRXHLkkklZ51ecPKmI1CQ==", - "dev": true, - "dependencies": { - "@babel/types": "^7.24.7" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-function-name": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.24.7.tgz", - "integrity": "sha512-FyoJTsj/PEUWu1/TYRiXTIHc8lbw+TDYkZuoE43opPS5TrI7MyONBE1oNvfguEXAD9yhQRrVBnXdXzSLQl9XnA==", - "dev": true, - "dependencies": { - "@babel/template": "^7.24.7", - "@babel/types": "^7.24.7" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-hoist-variables": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-hoist-variables/-/helper-hoist-variables-7.24.7.tgz", - "integrity": "sha512-MJJwhkoGy5c4ehfoRyrJ/owKeMl19U54h27YYftT0o2teQ3FJ3nQUf/I3LlJsX4l3qlw7WRXUmiyajvHXoTubQ==", - "dev": true, - "dependencies": { - "@babel/types": "^7.24.7" - }, - "engines": { - "node": ">=6.9.0" - } - }, "node_modules/@babel/helper-module-imports": { "version": "7.24.7", "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.24.7.tgz", @@ -259,16 +216,15 @@ } }, "node_modules/@babel/helper-module-transforms": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.24.7.tgz", - "integrity": "sha512-1fuJEwIrp+97rM4RWdO+qrRsZlAeL1lQJoPqtCYWv0NL115XM93hIH4CSRln2w52SqvmY5hqdtauB6QFCDiZNQ==", + "version": "7.25.2", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.25.2.tgz", + "integrity": "sha512-BjyRAbix6j/wv83ftcVJmBt72QtHI56C7JXZoG2xATiLpmoC7dpd8WnkikExHDVPpi/3qCmO6WY1EaXOluiecQ==", "dev": true, "dependencies": { - "@babel/helper-environment-visitor": "^7.24.7", "@babel/helper-module-imports": "^7.24.7", "@babel/helper-simple-access": "^7.24.7", - "@babel/helper-split-export-declaration": "^7.24.7", - "@babel/helper-validator-identifier": "^7.24.7" + "@babel/helper-validator-identifier": "^7.24.7", + "@babel/traverse": "^7.25.2" }, "engines": { "node": ">=6.9.0" @@ -278,9 +234,9 @@ } }, "node_modules/@babel/helper-plugin-utils": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.24.7.tgz", - "integrity": "sha512-Rq76wjt7yz9AAc1KnlRKNAi/dMSVWgDRx43FHoJEbcYU6xOWaE2dVPwcdTukJrjxS65GITyfbvEYHvkirZ6uEg==", + "version": "7.24.8", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.24.8.tgz", + "integrity": "sha512-FFWx5142D8h2Mgr/iPVGH5G7w6jDn4jUSpZTyDnQO0Yn7Ks2Kuz6Pci8H6MPCoUJegd/UZQ3tAvfLCxQSnWWwg==", "dev": true, "engines": { "node": ">=6.9.0" @@ -299,22 +255,10 @@ "node": ">=6.9.0" } }, - "node_modules/@babel/helper-split-export-declaration": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.24.7.tgz", - "integrity": "sha512-oy5V7pD+UvfkEATUKvIjvIAH/xCzfsFVw7ygW2SI6NClZzquT+mwdTfgfdbUiceh6iQO0CHtCPsyze/MZ2YbAA==", - "dev": true, - "dependencies": { - "@babel/types": "^7.24.7" - }, - "engines": { - "node": ">=6.9.0" - } - }, "node_modules/@babel/helper-string-parser": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.24.7.tgz", - "integrity": "sha512-7MbVt6xrwFQbunH2DNQsAP5sTGxfqQtErvBIvIMi6EQnbgUOuVYanvREcmFrOPhoXBrTtjhhP+lW+o5UfK+tDg==", + "version": "7.24.8", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.24.8.tgz", + "integrity": "sha512-pO9KhhRcuUyGnJWwyEgnRJTSIZHiT+vMD0kPeD+so0l7mxkMT19g3pjY9GTnHySck/hDzq+dtW/4VgnMkippsQ==", "dev": true, "engines": { "node": ">=6.9.0" @@ -330,22 +274,22 @@ } }, "node_modules/@babel/helper-validator-option": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.24.7.tgz", - "integrity": "sha512-yy1/KvjhV/ZCL+SM7hBrvnZJ3ZuT9OuZgIJAGpPEToANvc3iM6iDvBnRjtElWibHU6n8/LPR/EjX9EtIEYO3pw==", + "version": "7.24.8", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.24.8.tgz", + "integrity": "sha512-xb8t9tD1MHLungh/AIoWYN+gVHaB9kwlu8gffXGSt3FFEIT7RjS+xWbc2vUD1UTZdIpKj/ab3rdqJ7ufngyi2Q==", "dev": true, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helpers": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.24.7.tgz", - "integrity": "sha512-NlmJJtvcw72yRJRcnCmGvSi+3jDEg8qFu3z0AFoymmzLx5ERVWyzd9kVXr7Th9/8yIJi2Zc6av4Tqz3wFs8QWg==", + "version": "7.25.0", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.25.0.tgz", + "integrity": "sha512-MjgLZ42aCm0oGjJj8CtSM3DB8NOOf8h2l7DCTePJs29u+v7yO/RBX9nShlKMgFnRks/Q4tBAe7Hxnov9VkGwLw==", "dev": true, "dependencies": { - "@babel/template": "^7.24.7", - "@babel/types": "^7.24.7" + "@babel/template": "^7.25.0", + "@babel/types": "^7.25.0" }, "engines": { "node": ">=6.9.0" @@ -438,10 +382,13 @@ } }, "node_modules/@babel/parser": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.24.7.tgz", - "integrity": "sha512-9uUYRm6OqQrCqQdG1iCBwBPZgN8ciDBro2nIOFaiRz1/BCxaI7CNvQbDHvsArAC7Tw9Hda/B3U+6ui9u4HWXPw==", + "version": "7.25.3", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.25.3.tgz", + "integrity": "sha512-iLTJKDbJ4hMvFPgQwwsVoxtHyWpKKPBrxkANrSYewDPaPpT5py5yeVkgPIJ7XYXhndxJpaA3PyALSXQ7u8e/Dw==", "dev": true, + "dependencies": { + "@babel/types": "^7.25.2" + }, "bin": { "parser": "bin/babel-parser.js" }, @@ -627,33 +574,30 @@ } }, "node_modules/@babel/template": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.24.7.tgz", - "integrity": "sha512-jYqfPrU9JTF0PmPy1tLYHW4Mp4KlgxJD9l2nP9fD6yT/ICi554DmrWBAEYpIelzjHf1msDP3PxJIRt/nFNfBig==", + "version": "7.25.0", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.25.0.tgz", + "integrity": "sha512-aOOgh1/5XzKvg1jvVz7AVrx2piJ2XBi227DHmbY6y+bM9H2FlN+IfecYu4Xl0cNiiVejlsCri89LUsbj8vJD9Q==", "dev": true, "dependencies": { "@babel/code-frame": "^7.24.7", - "@babel/parser": "^7.24.7", - "@babel/types": "^7.24.7" + "@babel/parser": "^7.25.0", + "@babel/types": "^7.25.0" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/traverse": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.24.7.tgz", - "integrity": "sha512-yb65Ed5S/QAcewNPh0nZczy9JdYXkkAbIsEo+P7BE7yO3txAY30Y/oPa3QkQ5It3xVG2kpKMg9MsdxZaO31uKA==", + "version": "7.25.3", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.25.3.tgz", + "integrity": "sha512-HefgyP1x754oGCsKmV5reSmtV7IXj/kpaE1XYY+D9G5PvKKoFfSbiS4M77MdjuwlZKDIKFCffq9rPU+H/s3ZdQ==", "dev": true, "dependencies": { "@babel/code-frame": "^7.24.7", - "@babel/generator": "^7.24.7", - "@babel/helper-environment-visitor": "^7.24.7", - "@babel/helper-function-name": "^7.24.7", - "@babel/helper-hoist-variables": "^7.24.7", - "@babel/helper-split-export-declaration": "^7.24.7", - "@babel/parser": "^7.24.7", - "@babel/types": "^7.24.7", + "@babel/generator": "^7.25.0", + "@babel/parser": "^7.25.3", + "@babel/template": "^7.25.0", + "@babel/types": "^7.25.2", "debug": "^4.3.1", "globals": "^11.1.0" }, @@ -671,12 +615,12 @@ } }, "node_modules/@babel/types": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.24.7.tgz", - "integrity": "sha512-XEFXSlxiG5td2EJRe8vOmRbaXVgfcBlszKujvVmWIK/UpywWljQCfzAv3RQCGujWQ1RD4YYWEAqDXfuJiy8f5Q==", + "version": "7.25.2", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.25.2.tgz", + "integrity": "sha512-YTnYtra7W9e6/oAZEHj0bJehPRUlLH9/fbpT5LfB0NhQXyALCRkRs3zH9v07IYhkgpqX6Z78FnuccZr/l4Fs4Q==", "dev": true, "dependencies": { - "@babel/helper-string-parser": "^7.24.7", + "@babel/helper-string-parser": "^7.24.8", "@babel/helper-validator-identifier": "^7.24.7", "to-fast-properties": "^2.0.0" }, @@ -690,6 +634,14 @@ "integrity": "sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==", "dev": true }, + "node_modules/@colors/colors": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/@colors/colors/-/colors-1.6.0.tgz", + "integrity": "sha512-Ir+AOibqzrIsL6ajt3Rz3LskB7OiMVHqltZmspbW/TJuTVuyOMirVqAkjfY6JISiLHgyNqicAC8AyHHGzNd/dA==", + "engines": { + "node": ">=0.1.90" + } + }, "node_modules/@dabh/diagnostics": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/@dabh/diagnostics/-/diagnostics-2.0.3.tgz", @@ -716,9 +668,9 @@ } }, "node_modules/@eslint-community/regexpp": { - "version": "4.10.1", - "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.10.1.tgz", - "integrity": "sha512-Zm2NGpWELsQAD1xsJzGQpYfvICSsFkEpU0jxBjfdC6uNEWXcHnfs9hScFWtXVDVl+rBQJGrl4g1vcKIejpH9dA==", + "version": "4.11.0", + "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.11.0.tgz", + "integrity": "sha512-G/M/tIiMrTAxEWRfLfQJMmGNX28IxBg4PBz8XqQhqUHLFI6TL2htpIB1iQCj144V5ee/JaKyT9/WZ0MGZWfA7A==", "dev": true, "engines": { "node": "^12.0.0 || ^14.0.0 || >=16.0.0" @@ -784,9 +736,9 @@ "integrity": "sha512-5Aap/GaRupgNx/feGBwLLTVv8OQFfv3pq2lPRzPg9R+IOBnDgghTGW7l7EuVXOvg5cc/xSAlRW8rBrjIC3Nvqw==" }, "node_modules/@grpc/grpc-js": { - "version": "1.10.9", - "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.10.9.tgz", - "integrity": "sha512-5tcgUctCG0qoNyfChZifz2tJqbRbXVO9J7X6duFcOjY3HUNCxg5D0ZCK7EP9vIcZ0zRpLU9bWkyCqVCLZ46IbQ==", + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.11.1.tgz", + "integrity": "sha512-gyt/WayZrVPH2w/UTLansS7F9Nwld472JxxaETamrM8HNlsa+jSLNyKAZmhxI2Me4c3mQHFiS1wWHDY1g1Kthw==", "dependencies": { "@grpc/proto-loader": "^0.7.13", "@js-sdsl/ordered-map": "^4.4.2" @@ -1438,12 +1390,6 @@ "node": ">=8" } }, - "node_modules/@istanbuljs/load-nyc-config/node_modules/sprintf-js": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", - "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==", - "dev": true - }, "node_modules/@istanbuljs/schema": { "version": "0.1.3", "resolved": "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz", @@ -1764,9 +1710,9 @@ } }, "node_modules/@jridgewell/sourcemap-codec": { - "version": "1.4.15", - "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz", - "integrity": "sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==", + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz", + "integrity": "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==", "dev": true }, "node_modules/@jridgewell/trace-mapping": { @@ -1883,48 +1829,6 @@ "winston": "3.13.1" } }, - "node_modules/@mojaloop/central-services-logger/node_modules/@colors/colors": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/@colors/colors/-/colors-1.6.0.tgz", - "integrity": "sha512-Ir+AOibqzrIsL6ajt3Rz3LskB7OiMVHqltZmspbW/TJuTVuyOMirVqAkjfY6JISiLHgyNqicAC8AyHHGzNd/dA==", - "engines": { - "node": ">=0.1.90" - } - }, - "node_modules/@mojaloop/central-services-logger/node_modules/readable-stream": { - "version": "3.6.2", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", - "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", - "dependencies": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/@mojaloop/central-services-logger/node_modules/winston": { - "version": "3.13.1", - "resolved": "https://registry.npmjs.org/winston/-/winston-3.13.1.tgz", - "integrity": "sha512-SvZit7VFNvXRzbqGHsv5KSmgbEYR5EiQfDAL9gxYkRqa934Hnk++zze0wANKtMHcy/gI4W/3xmSDwlhf865WGw==", - "dependencies": { - "@colors/colors": "^1.6.0", - "@dabh/diagnostics": "^2.0.2", - "async": "^3.2.3", - "is-stream": "^2.0.0", - "logform": "^2.6.0", - "one-time": "^1.0.0", - "readable-stream": "^3.4.0", - "safe-stable-stringify": "^2.3.1", - "stack-trace": "0.0.x", - "triple-beam": "^1.3.0", - "winston-transport": "^4.7.0" - }, - "engines": { - "node": ">= 12.0.0" - } - }, "node_modules/@mojaloop/central-services-metrics": { "version": "12.0.8", "resolved": "https://registry.npmjs.org/@mojaloop/central-services-metrics/-/central-services-metrics-12.0.8.tgz", @@ -2068,11 +1972,45 @@ } } }, + "node_modules/@mojaloop/event-sdk/node_modules/readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, "node_modules/@mojaloop/event-sdk/node_modules/tslib": { "version": "2.6.3", "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.3.tgz", "integrity": "sha512-xNvxJEOUiWPGhUuUdQgAJPKOOJfGnIyKySOc09XkKsgdUV/3E2zvwZYdejjmRgPCgcym1juLH3226yA7sEFJKQ==" }, + "node_modules/@mojaloop/event-sdk/node_modules/winston": { + "version": "3.13.0", + "resolved": "https://registry.npmjs.org/winston/-/winston-3.13.0.tgz", + "integrity": "sha512-rwidmA1w3SE4j0E5MuIufFhyJPBDG7Nu71RkZor1p2+qHvJSZ9GYDA81AyleQcZbh/+V6HjeBdfnTZJm9rSeQQ==", + "dependencies": { + "@colors/colors": "^1.6.0", + "@dabh/diagnostics": "^2.0.2", + "async": "^3.2.3", + "is-stream": "^2.0.0", + "logform": "^2.4.0", + "one-time": "^1.0.0", + "readable-stream": "^3.4.0", + "safe-stable-stringify": "^2.3.1", + "stack-trace": "0.0.x", + "triple-beam": "^1.3.0", + "winston-transport": "^4.7.0" + }, + "engines": { + "node": ">= 12.0.0" + } + }, "node_modules/@mojaloop/inter-scheme-proxy-cache-lib": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/@mojaloop/inter-scheme-proxy-cache-lib/-/inter-scheme-proxy-cache-lib-2.2.0.tgz", @@ -2357,11 +2295,11 @@ "dev": true }, "node_modules/@types/node": { - "version": "20.14.2", - "resolved": "https://registry.npmjs.org/@types/node/-/node-20.14.2.tgz", - "integrity": "sha512-xyu6WAMVwv6AKFLB+e/7ySZVr/0zLCzOa7rSpq6jNwpqOrUbcACDWC+53d4n2QHOnDou0fbIsg8wZu/sxrnI4Q==", + "version": "22.1.0", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.1.0.tgz", + "integrity": "sha512-AOmuRF0R2/5j1knA3c6G3HOk523Ga+l+ZXltX8SF1+5oqcXijjfTd8fY3XRZqSihEu9XhtQnKYLmkFaoxgsJHw==", "dependencies": { - "undici-types": "~5.26.4" + "undici-types": "~6.13.0" } }, "node_modules/@types/normalize-package-data": { @@ -2382,9 +2320,9 @@ "integrity": "sha512-6WaYesThRMCl19iryMYP7/x2OVgCtbIVflDGFpWnb9irXI3UjYE4AzmYuiUKY1AJstGijoY+MgUszMgRxIYTYw==" }, "node_modules/@types/yargs": { - "version": "17.0.32", - "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.32.tgz", - "integrity": "sha512-xQ67Yc/laOG5uMfX/093MRlGGCIBzZMarVa+gfNKJxWAIgykYpVGkBdbqEzGDDfCrVUj6Hiff4mTZ5BA6TmAog==", + "version": "17.0.33", + "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.33.tgz", + "integrity": "sha512-WpxBCKWPLr4xSsHgz511rFJAM+wS28w2zEO1QDNY5zM/S8ok70NNfztH0xwhqKyaK0OHCbN98LDAZuy1ctxDkA==", "dev": true, "dependencies": { "@types/yargs-parser": "*" @@ -2397,16 +2335,16 @@ "dev": true }, "node_modules/@typescript-eslint/scope-manager": { - "version": "7.13.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-7.13.0.tgz", - "integrity": "sha512-ZrMCe1R6a01T94ilV13egvcnvVJ1pxShkE0+NDjDzH4nvG1wXpwsVI5bZCvE7AEDH1mXEx5tJSVR68bLgG7Dng==", + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.0.1.tgz", + "integrity": "sha512-NpixInP5dm7uukMiRyiHjRKkom5RIFA4dfiHvalanD2cF0CLUuQqxfg8PtEUo9yqJI2bBhF+pcSafqnG3UBnRQ==", "dev": true, "dependencies": { - "@typescript-eslint/types": "7.13.0", - "@typescript-eslint/visitor-keys": "7.13.0" + "@typescript-eslint/types": "8.0.1", + "@typescript-eslint/visitor-keys": "8.0.1" }, "engines": { - "node": "^18.18.0 || >=20.0.0" + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, "funding": { "type": "opencollective", @@ -2414,12 +2352,12 @@ } }, "node_modules/@typescript-eslint/types": { - "version": "7.13.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-7.13.0.tgz", - "integrity": "sha512-QWuwm9wcGMAuTsxP+qz6LBBd3Uq8I5Nv8xb0mk54jmNoCyDspnMvVsOxI6IsMmway5d1S9Su2+sCKv1st2l6eA==", + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.0.1.tgz", + "integrity": "sha512-PpqTVT3yCA/bIgJ12czBuE3iBlM3g4inRSC5J0QOdQFAn07TYrYEQBBKgXH1lQpglup+Zy6c1fxuwTk4MTNKIw==", "dev": true, "engines": { - "node": "^18.18.0 || >=20.0.0" + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, "funding": { "type": "opencollective", @@ -2427,13 +2365,13 @@ } }, "node_modules/@typescript-eslint/typescript-estree": { - "version": "7.13.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-7.13.0.tgz", - "integrity": "sha512-cAvBvUoobaoIcoqox1YatXOnSl3gx92rCZoMRPzMNisDiM12siGilSM4+dJAekuuHTibI2hVC2fYK79iSFvWjw==", + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.0.1.tgz", + "integrity": "sha512-8V9hriRvZQXPWU3bbiUV4Epo7EvgM6RTs+sUmxp5G//dBGy402S7Fx0W0QkB2fb4obCF8SInoUzvTYtc3bkb5w==", "dev": true, "dependencies": { - "@typescript-eslint/types": "7.13.0", - "@typescript-eslint/visitor-keys": "7.13.0", + "@typescript-eslint/types": "8.0.1", + "@typescript-eslint/visitor-keys": "8.0.1", "debug": "^4.3.4", "globby": "^11.1.0", "is-glob": "^4.0.3", @@ -2442,7 +2380,7 @@ "ts-api-utils": "^1.3.0" }, "engines": { - "node": "^18.18.0 || >=20.0.0" + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, "funding": { "type": "opencollective", @@ -2464,9 +2402,9 @@ } }, "node_modules/@typescript-eslint/typescript-estree/node_modules/minimatch": { - "version": "9.0.4", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.4.tgz", - "integrity": "sha512-KqWh+VchfxcMNRAJjj2tnsSJdNbHsVgnkBhTNrW7AjVo6OvLtxw8zfT9oLw1JSohlFzJ8jCoTgaoXvJ+kHt6fw==", + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", "dev": true, "dependencies": { "brace-expansion": "^2.0.1" @@ -2479,16 +2417,16 @@ } }, "node_modules/@typescript-eslint/visitor-keys": { - "version": "7.13.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-7.13.0.tgz", - "integrity": "sha512-nxn+dozQx+MK61nn/JP+M4eCkHDSxSLDpgE3WcQo0+fkjEolnaB5jswvIKC4K56By8MMgIho7f1PVxERHEo8rw==", + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.0.1.tgz", + "integrity": "sha512-W5E+o0UfUcK5EgchLZsyVWqARmsM7v54/qEq6PY3YI5arkgmCzHiuk0zKSJJbm71V0xdRna4BGomkCTXz2/LkQ==", "dev": true, "dependencies": { - "@typescript-eslint/types": "7.13.0", + "@typescript-eslint/types": "8.0.1", "eslint-visitor-keys": "^3.4.3" }, "engines": { - "node": "^18.18.0 || >=20.0.0" + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, "funding": { "type": "opencollective", @@ -2514,9 +2452,9 @@ } }, "node_modules/acorn": { - "version": "8.11.3", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.11.3.tgz", - "integrity": "sha512-Y9rRfJG5jcKOE0CLisYbojUjIrIEE7AGMzA/Sm4BslANhbS+cDMpgBdcPT91oJ7OuJ9hYJBx59RjbhxVnrF8Xg==", + "version": "8.12.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.12.1.tgz", + "integrity": "sha512-tcpGyI9zbizT9JbV6oYE477V6mTlXvvi0T0G3SNIYE2apm/G5huBa1+K89VGeovbg+jycCrfhl3ADxErOuO6Jg==", "dev": true, "bin": { "acorn": "bin/acorn" @@ -2835,18 +2773,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/array.prototype.toreversed": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/array.prototype.toreversed/-/array.prototype.toreversed-1.1.2.tgz", - "integrity": "sha512-wwDCoT4Ck4Cz7sLtgUmzR5UV3YF5mFHUlbChCzZBQZ+0m2cl/DH3tKgvphv1nKgFsJ48oCSg6p91q2Vm0I/ZMA==", - "dev": true, - "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1", - "es-shim-unscopables": "^1.0.0" - } - }, "node_modules/array.prototype.tosorted": { "version": "1.1.4", "resolved": "https://registry.npmjs.org/array.prototype.tosorted/-/array.prototype.tosorted-1.1.4.tgz", @@ -3168,17 +3094,6 @@ "ms": "2.0.0" } }, - "node_modules/body-parser/node_modules/iconv-lite": { - "version": "0.4.24", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", - "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", - "dependencies": { - "safer-buffer": ">= 2.1.2 < 3" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/body-parser/node_modules/ms": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", @@ -3229,9 +3144,9 @@ "integrity": "sha512-UcQusNAX7nnuXf9tvvLRC6DtZ8/YkDJRtTIbiA5ayb8MehwtSwtkvd5ZTXNLUTTtU6J/yJsi+1LJXqgRz1obwg==" }, "node_modules/browserslist": { - "version": "4.23.1", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.23.1.tgz", - "integrity": "sha512-TUfofFo/KsK/bWZ9TWQ5O26tsWW4Uhmt8IYklbnUa70udB6P2wA7w7o4PY4muaEPBQaAX+CEnmmIA41NVHtPVw==", + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.23.3.tgz", + "integrity": "sha512-btwCFJVjI4YWDNfau8RhZ+B1Q/VLoUITrm3RlP6y1tYGWIOa+InuYiRGXUBXo8nA1qKmHMyLB/iVQg5TT4eFoA==", "dev": true, "funding": [ { @@ -3248,10 +3163,10 @@ } ], "dependencies": { - "caniuse-lite": "^1.0.30001629", - "electron-to-chromium": "^1.4.796", - "node-releases": "^2.0.14", - "update-browserslist-db": "^1.0.16" + "caniuse-lite": "^1.0.30001646", + "electron-to-chromium": "^1.5.4", + "node-releases": "^2.0.18", + "update-browserslist-db": "^1.1.0" }, "bin": { "browserslist": "cli.js" @@ -3420,9 +3335,9 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001632", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001632.tgz", - "integrity": "sha512-udx3o7yHJfUxMLkGohMlVHCvFvWmirKh9JAH/d7WOLPetlH+LTL5cocMZ0t7oZx/mdlOWXti97xLZWc8uURRHg==", + "version": "1.0.30001651", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001651.tgz", + "integrity": "sha512-9Cf+Xv1jJNe1xPZLGuUXLNkE1BoDkqRqYyFJ9TDYSqhduqA4hu4oR9HluGoWYQC/aj8WHjsGVV+bwkh0+tegRg==", "dev": true, "funding": [ { @@ -3467,9 +3382,9 @@ } }, "node_modules/chance": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/chance/-/chance-1.1.11.tgz", - "integrity": "sha512-kqTg3WWywappJPqtgrdvbA380VoXO2eu9VCV895JgbyHsaErXdyHK9LOZ911OvAk6L0obK7kDk9CGs8+oBawVA==", + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/chance/-/chance-1.1.12.tgz", + "integrity": "sha512-vVBIGQVnwtUG+SYe0ge+3MvF78cvSpuCOEUJr7sVEk2vSBuMW6OXNJjSzdtzrlxNUEaoqH2GBd5Y/+18BEB01Q==", "dev": true }, "node_modules/char-regex": { @@ -3594,22 +3509,6 @@ "node": ">=12" } }, - "node_modules/cliui/node_modules/wrap-ansi": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", - "dependencies": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, "node_modules/clone": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/clone/-/clone-2.1.2.tgz", @@ -3943,45 +3842,6 @@ "node": ">=10" } }, - "node_modules/conventional-changelog-core/node_modules/hosted-git-info": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-4.1.0.tgz", - "integrity": "sha512-kyCuEOWjJqZuDbRHzL8V93NzQhwIB71oFWSyzVo+KPZI+pnQPPxucdkrOZvkLRnrf5URsQM+IJ09Dw29cRALIA==", - "dev": true, - "dependencies": { - "lru-cache": "^6.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/conventional-changelog-core/node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/conventional-changelog-core/node_modules/normalize-package-data": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-3.0.3.tgz", - "integrity": "sha512-p2W1sgqij3zMMyRC067Dg16bfzVH+w7hyegmpIvZ4JNjqtGOVAIvLmjBx3yP7YTe9vKJgkoNOPjwQGogDoMXFA==", - "dev": true, - "dependencies": { - "hosted-git-info": "^4.0.1", - "is-core-module": "^2.5.0", - "semver": "^7.3.4", - "validate-npm-package-license": "^3.0.1" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/conventional-changelog-ember": { "version": "2.0.9", "resolved": "https://registry.npmjs.org/conventional-changelog-ember/-/conventional-changelog-ember-2.0.9.tgz", @@ -4186,14 +4046,6 @@ "node": ">=6" } }, - "node_modules/convict/node_modules/yargs-parser": { - "version": "20.2.9", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz", - "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==", - "engines": { - "node": ">=10" - } - }, "node_modules/cookie": { "version": "0.5.0", "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.5.0.tgz", @@ -4364,9 +4216,9 @@ } }, "node_modules/debug": { - "version": "4.3.5", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.5.tgz", - "integrity": "sha512-pt0bNEmneDIvdL1Xsd9oDQ/wrQRkXDT4AUWlNZNPKvW5x/jyO9VFXkJUP07vQ2upmw5PlaITaPKc31jK13V+jg==", + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.6.tgz", + "integrity": "sha512-O/09Bd4Z1fBrU4VzkhFqVgpPzaGbw6Sm9FEkBT1A/YBXQFGuuSxa1dN2nxgxS34JmKXqYx8CZAwEVoJFImUXIg==", "dependencies": { "ms": "2.1.2" }, @@ -4603,6 +4455,15 @@ "node": ">=8" } }, + "node_modules/dir-glob/node_modules/path-type": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", + "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, "node_modules/doctrine": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", @@ -4851,9 +4712,9 @@ } }, "node_modules/electron-to-chromium": { - "version": "1.4.798", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.798.tgz", - "integrity": "sha512-by9J2CiM9KPGj9qfp5U4FcPSbXJG7FNzqnYaY4WLzX+v2PHieVGmnsA4dxfpGE3QEC7JofpPZmn7Vn1B9NR2+Q==", + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.5.tgz", + "integrity": "sha512-QR7/A7ZkMS8tZuoftC/jfqNkZLQO779SSW3YuZHP4eXpj3EffGLFcB/Xu9AAZQzLccTiCV+EmUo3ha4mQ9wnlA==", "dev": true }, "node_modules/emittery": { @@ -4886,16 +4747,6 @@ "node": ">= 0.8" } }, - "node_modules/encoding": { - "version": "0.1.13", - "resolved": "https://registry.npmjs.org/encoding/-/encoding-0.1.13.tgz", - "integrity": "sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A==", - "optional": true, - "peer": true, - "dependencies": { - "iconv-lite": "^0.6.2" - } - }, "node_modules/end-of-stream": { "version": "1.4.4", "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", @@ -5319,9 +5170,9 @@ } }, "node_modules/eslint-plugin-es-x": { - "version": "7.7.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-es-x/-/eslint-plugin-es-x-7.7.0.tgz", - "integrity": "sha512-aP3qj8BwiEDPttxQkZdI221DLKq9sI/qHolE2YSQL1/9+xk7dTV+tB1Fz8/IaCA+lnLA1bDEnvaS2LKs0k2Uig==", + "version": "7.8.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-es-x/-/eslint-plugin-es-x-7.8.0.tgz", + "integrity": "sha512-7Ds8+wAAoV3T+LAKeu39Y5BzXCrGKrcISfgKEqTS4BDN8SFEDQd0S43jiQ8vIa3wUKD07qitZdfzlenSi8/0qQ==", "dev": true, "funding": [ "https://github.com/sponsors/ota-meshi", @@ -5330,7 +5181,7 @@ "peer": true, "dependencies": { "@eslint-community/eslint-utils": "^4.1.2", - "@eslint-community/regexpp": "^4.6.0", + "@eslint-community/regexpp": "^4.11.0", "eslint-compat-utils": "^0.5.1" }, "engines": { @@ -5451,25 +5302,25 @@ } }, "node_modules/eslint-plugin-jest/node_modules/@typescript-eslint/utils": { - "version": "7.13.0", - "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-7.13.0.tgz", - "integrity": "sha512-jceD8RgdKORVnB4Y6BqasfIkFhl4pajB1wVxrF4akxD2QPM8GNYjgGwEzYS+437ewlqqrg7Dw+6dhdpjMpeBFQ==", + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.0.1.tgz", + "integrity": "sha512-CBFR0G0sCt0+fzfnKaciu9IBsKvEKYwN9UZ+eeogK1fYHg4Qxk1yf/wLQkLXlq8wbU2dFlgAesxt8Gi76E8RTA==", "dev": true, "dependencies": { "@eslint-community/eslint-utils": "^4.4.0", - "@typescript-eslint/scope-manager": "7.13.0", - "@typescript-eslint/types": "7.13.0", - "@typescript-eslint/typescript-estree": "7.13.0" + "@typescript-eslint/scope-manager": "8.0.1", + "@typescript-eslint/types": "8.0.1", + "@typescript-eslint/typescript-estree": "8.0.1" }, "engines": { - "node": "^18.18.0 || >=20.0.0" + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/typescript-eslint" }, "peerDependencies": { - "eslint": "^8.56.0" + "eslint": "^8.57.0 || ^9.0.0" } }, "node_modules/eslint-plugin-n": { @@ -5502,9 +5353,9 @@ } }, "node_modules/eslint-plugin-promise": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-promise/-/eslint-plugin-promise-6.2.0.tgz", - "integrity": "sha512-QmAqwizauvnKOlifxyDj2ObfULpHQawlg/zQdgEixur9vl0CvZGv/LCJV2rtj3210QCoeGBzVMfMXqGAOr/4fA==", + "version": "6.6.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-promise/-/eslint-plugin-promise-6.6.0.tgz", + "integrity": "sha512-57Zzfw8G6+Gq7axm2Pdo3gW/Rx3h9Yywgn61uE/3elTCOePEHVrn2i5CdfBwA1BLK0Q0WqctICIUSqXZW/VprQ==", "dev": true, "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" @@ -5517,35 +5368,35 @@ } }, "node_modules/eslint-plugin-react": { - "version": "7.34.2", - "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.34.2.tgz", - "integrity": "sha512-2HCmrU+/JNigDN6tg55cRDKCQWicYAPB38JGSFDQt95jDm8rrvSUo7YPkOIm5l6ts1j1zCvysNcasvfTMQzUOw==", + "version": "7.35.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.35.0.tgz", + "integrity": "sha512-v501SSMOWv8gerHkk+IIQBkcGRGrO2nfybfj5pLxuJNFTPxxA3PSryhXTK+9pNbtkggheDdsC0E9Q8CuPk6JKA==", "dev": true, "dependencies": { "array-includes": "^3.1.8", "array.prototype.findlast": "^1.2.5", "array.prototype.flatmap": "^1.3.2", - "array.prototype.toreversed": "^1.1.2", - "array.prototype.tosorted": "^1.1.3", + "array.prototype.tosorted": "^1.1.4", "doctrine": "^2.1.0", "es-iterator-helpers": "^1.0.19", "estraverse": "^5.3.0", + "hasown": "^2.0.2", "jsx-ast-utils": "^2.4.1 || ^3.0.0", "minimatch": "^3.1.2", "object.entries": "^1.1.8", "object.fromentries": "^2.0.8", - "object.hasown": "^1.1.4", "object.values": "^1.2.0", "prop-types": "^15.8.1", "resolve": "^2.0.0-next.5", "semver": "^6.3.1", - "string.prototype.matchall": "^4.0.11" + "string.prototype.matchall": "^4.0.11", + "string.prototype.repeat": "^1.0.0" }, "engines": { "node": ">=4" }, "peerDependencies": { - "eslint": "^3 || ^4 || ^5 || ^6 || ^7 || ^8" + "eslint": "^3 || ^4 || ^5 || ^6 || ^7 || ^8 || ^9.7" } }, "node_modules/eslint-plugin-react/node_modules/doctrine": { @@ -5702,9 +5553,9 @@ } }, "node_modules/esquery": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.5.0.tgz", - "integrity": "sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg==", + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.6.0.tgz", + "integrity": "sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==", "dev": true, "dependencies": { "estraverse": "^5.1.0" @@ -6500,57 +6351,16 @@ "wrap-ansi": "^7.0.0" } }, - "node_modules/get-pkg-repo/node_modules/hosted-git-info": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-4.1.0.tgz", - "integrity": "sha512-kyCuEOWjJqZuDbRHzL8V93NzQhwIB71oFWSyzVo+KPZI+pnQPPxucdkrOZvkLRnrf5URsQM+IJ09Dw29cRALIA==", - "dev": true, - "dependencies": { - "lru-cache": "^6.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/get-pkg-repo/node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/get-pkg-repo/node_modules/through2": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz", - "integrity": "sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==", + "node_modules/get-pkg-repo/node_modules/through2": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz", + "integrity": "sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==", "dev": true, "dependencies": { "readable-stream": "~2.3.6", "xtend": "~4.0.1" } }, - "node_modules/get-pkg-repo/node_modules/wrap-ansi": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", - "dev": true, - "dependencies": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, "node_modules/get-pkg-repo/node_modules/yargs": { "version": "16.2.0", "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", @@ -6569,6 +6379,15 @@ "node": ">=10" } }, + "node_modules/get-pkg-repo/node_modules/yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "dev": true, + "engines": { + "node": ">=12" + } + }, "node_modules/get-stdin": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/get-stdin/-/get-stdin-8.0.0.tgz", @@ -6611,9 +6430,9 @@ } }, "node_modules/get-tsconfig": { - "version": "4.7.5", - "resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.7.5.tgz", - "integrity": "sha512-ZCuZCnlqNzjb4QprAzXKdpp/gh6KTxSJuw3IBsPnV/7fV4NxC9ckB+vPTt8w7fJA0TaSD7c55BR47JD6MEDyDw==", + "version": "4.7.6", + "resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.7.6.tgz", + "integrity": "sha512-ZAqrLlu18NbDdRaHq+AKXzAmqIUPswPWKUchfytdAjiRFnCe5ojG2bstg6mRiZabkKfCoL/e98pbBELIV/YCeA==", "dev": true, "peer": true, "dependencies": { @@ -6703,12 +6522,6 @@ "ini": "^1.3.2" } }, - "node_modules/gitconfiglocal/node_modules/ini": { - "version": "1.3.8", - "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", - "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==", - "dev": true - }, "node_modules/glob": { "version": "7.2.3", "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", @@ -6887,9 +6700,9 @@ } }, "node_modules/handlebars/node_modules/uglify-js": { - "version": "3.18.0", - "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.18.0.tgz", - "integrity": "sha512-SyVVbcNBCk0dzr9XL/R/ySrmYf0s372K6/hFklzgcp2lBFyXtw4I7BOdDjlLhE1aVqaI/SHWXWmYdlZxuyF38A==", + "version": "3.19.1", + "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.19.1.tgz", + "integrity": "sha512-y/2wiW+ceTYR2TSSptAhfnEtpLaQ4Ups5zrjB2d3kuVxHj16j/QJwPl5PvuGy9uARb39J0+iKxcRPvtpsx4A4A==", "dev": true, "optional": true, "bin": { @@ -7097,6 +6910,36 @@ "integrity": "sha512-YXXAAhmF9zpQbC7LEcREFtXfGq5K1fmd+4PHkBq8NUqmzW3G+Dq10bI/i0KucLRwss3YYFQ0fSfoxBZYiGUqtQ==", "deprecated": "This module has moved and is now available at @hapi/hoek. Please update your dependencies as this version is no longer maintained an may contain bugs and security issues." }, + "node_modules/hosted-git-info": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-4.1.0.tgz", + "integrity": "sha512-kyCuEOWjJqZuDbRHzL8V93NzQhwIB71oFWSyzVo+KPZI+pnQPPxucdkrOZvkLRnrf5URsQM+IJ09Dw29cRALIA==", + "dev": true, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/hosted-git-info/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/hosted-git-info/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, "node_modules/html-escaper": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz", @@ -7301,13 +7144,11 @@ } }, "node_modules/iconv-lite": { - "version": "0.6.3", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", - "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", - "optional": true, - "peer": true, + "version": "0.4.24", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", + "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", "dependencies": { - "safer-buffer": ">= 2.1.2 < 3.0.0" + "safer-buffer": ">= 2.1.2 < 3" }, "engines": { "node": ">=0.10.0" @@ -7371,9 +7212,9 @@ } }, "node_modules/import-local": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/import-local/-/import-local-3.1.0.tgz", - "integrity": "sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/import-local/-/import-local-3.2.0.tgz", + "integrity": "sha512-2SPlun1JUPWoM6t3F0dw0FkCF/jWY8kttcY4f599GLTSjh2OCuuhdTkJQsEcZzBqbXZGKMK2OqW1oZsjtf/gQA==", "dev": true, "dependencies": { "pkg-dir": "^4.2.0", @@ -7423,6 +7264,11 @@ "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" }, + "node_modules/ini": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", + "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==" + }, "node_modules/internal-slot": { "version": "1.0.7", "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.7.tgz", @@ -7594,11 +7440,14 @@ } }, "node_modules/is-core-module": { - "version": "2.13.1", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.13.1.tgz", - "integrity": "sha512-hHrIjvZsftOsvKSn2TRYl63zvxsgE0K+0mYMoH6gD4omR5IWB2KynivBQczo3+wF1cCkjzvptnI9Q0sPU66ilw==", + "version": "2.15.0", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.15.0.tgz", + "integrity": "sha512-Dd+Lb2/zvk9SKy1TGCt1wFJFo/MWBPMX5x7KcvLajWTGuomczdQX61PvY5yK6SVACwpoexWo81IfFyoKY2QnTA==", "dependencies": { - "hasown": "^2.0.0" + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -8013,9 +7862,9 @@ } }, "node_modules/istanbul-lib-instrument": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-6.0.2.tgz", - "integrity": "sha512-1WUsZ9R1lA0HtBSohTkm39WTPlNKSJ5iFk7UwqXkBLoHQT+hfqPsfsTDVuZdKGaBwn7din9bS7SsnoAr943hvw==", + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-6.0.3.tgz", + "integrity": "sha512-Vtgk7L/R2JHyyGW07spoFlB8/lpjiOLTjMdms6AFMraYt3BaJauod/NGrfnVG/y4Ix1JEuMRPDPEj2ua+zz1/Q==", "dev": true, "dependencies": { "@babel/core": "^7.23.9", @@ -8045,18 +7894,6 @@ "node": ">=8" } }, - "node_modules/istanbul-lib-processinfo/node_modules/p-map": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/p-map/-/p-map-3.0.0.tgz", - "integrity": "sha512-d3qXVTF/s+W+CdJ5A29wywV2n8CQQYahlgz2bFiA+4eVNJbHJodPZ+/gXwPGh0bOqA+j8S+6+ckmvLGPk1QpxQ==", - "dev": true, - "dependencies": { - "aggregate-error": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/istanbul-lib-report": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.1.tgz", @@ -8112,9 +7949,9 @@ } }, "node_modules/jake": { - "version": "10.9.1", - "resolved": "https://registry.npmjs.org/jake/-/jake-10.9.1.tgz", - "integrity": "sha512-61btcOHNnLnsOdtLgA5efqQWjnSi/vow5HbI7HMdKKWqvrKR1bLK3BPlJn9gcSaP2ewuamUSMB5XEy76KUIS2w==", + "version": "10.9.2", + "resolved": "https://registry.npmjs.org/jake/-/jake-10.9.2.tgz", + "integrity": "sha512-2P4SQ0HrLQ+fw6llpLnOaGAvN2Zu6778SJMrCUwns4fOoG9ayrTiZk3VV8sCPkVZF8ab0zksVpS8FDY5pRCNBA==", "dependencies": { "async": "^3.2.3", "chalk": "^4.0.2", @@ -8864,12 +8701,6 @@ "integrity": "sha512-pi/dX/DqBA9O8FFTgdR2uuYBQoW40QIB0UW7vH1QcRpoTsYA/ANcWspzD7pFxyrs+P09/K5fKAVdr9k42twy3A==", "dev": true }, - "node_modules/json-schema-ref-parser/node_modules/sprintf-js": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", - "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==", - "dev": true - }, "node_modules/json-schema-traverse": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", @@ -9309,9 +9140,9 @@ "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==" }, "node_modules/logform": { - "version": "2.6.0", - "resolved": "https://registry.npmjs.org/logform/-/logform-2.6.0.tgz", - "integrity": "sha512-1ulHeNPp6k/LD8H91o7VYFBng5i1BDE7HoKxVbZiGFidS1Rj65qcywLxX+pVfAPoQJEjRdvKcusKwOupHCVOVQ==", + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/logform/-/logform-2.6.1.tgz", + "integrity": "sha512-CdaO738xRapbKIMVn2m4F6KTj4j7ooJ8POVnebSgKo3KBz5axNXRAL7ZdRjIV6NOr2Uf4vjtRkxrFETOioCqSA==", "dependencies": { "@colors/colors": "1.6.0", "@types/triple-beam": "^1.3.2", @@ -9324,14 +9155,6 @@ "node": ">= 12.0.0" } }, - "node_modules/logform/node_modules/@colors/colors": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/@colors/colors/-/colors-1.6.0.tgz", - "integrity": "sha512-Ir+AOibqzrIsL6ajt3Rz3LskB7OiMVHqltZmspbW/TJuTVuyOMirVqAkjfY6JISiLHgyNqicAC8AyHHGzNd/dA==", - "engines": { - "node": ">=0.1.90" - } - }, "node_modules/long": { "version": "5.2.3", "resolved": "https://registry.npmjs.org/long/-/long-5.2.3.tgz", @@ -9518,43 +9341,10 @@ } }, "node_modules/meow/node_modules/hosted-git-info": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-4.1.0.tgz", - "integrity": "sha512-kyCuEOWjJqZuDbRHzL8V93NzQhwIB71oFWSyzVo+KPZI+pnQPPxucdkrOZvkLRnrf5URsQM+IJ09Dw29cRALIA==", - "dev": true, - "dependencies": { - "lru-cache": "^6.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/meow/node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/meow/node_modules/normalize-package-data": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-3.0.3.tgz", - "integrity": "sha512-p2W1sgqij3zMMyRC067Dg16bfzVH+w7hyegmpIvZ4JNjqtGOVAIvLmjBx3yP7YTe9vKJgkoNOPjwQGogDoMXFA==", - "dev": true, - "dependencies": { - "hosted-git-info": "^4.0.1", - "is-core-module": "^2.5.0", - "semver": "^7.3.4", - "validate-npm-package-license": "^3.0.1" - }, - "engines": { - "node": ">=10" - } + "version": "2.8.9", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz", + "integrity": "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==", + "dev": true }, "node_modules/meow/node_modules/read-pkg": { "version": "5.2.0", @@ -9597,12 +9387,6 @@ "node": ">=8" } }, - "node_modules/meow/node_modules/read-pkg/node_modules/hosted-git-info": { - "version": "2.8.9", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz", - "integrity": "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==", - "dev": true - }, "node_modules/meow/node_modules/read-pkg/node_modules/normalize-package-data": { "version": "2.5.0", "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", @@ -9615,15 +9399,6 @@ "validate-npm-package-license": "^3.0.1" } }, - "node_modules/meow/node_modules/read-pkg/node_modules/semver": { - "version": "5.7.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", - "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", - "dev": true, - "bin": { - "semver": "bin/semver" - } - }, "node_modules/meow/node_modules/read-pkg/node_modules/type-fest": { "version": "0.6.0", "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.6.0.tgz", @@ -9633,6 +9408,15 @@ "node": ">=8" } }, + "node_modules/meow/node_modules/semver": { + "version": "5.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", + "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", + "dev": true, + "bin": { + "semver": "bin/semver" + } + }, "node_modules/meow/node_modules/type-fest": { "version": "0.18.1", "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.18.1.tgz", @@ -9645,15 +9429,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/meow/node_modules/yargs-parser": { - "version": "20.2.9", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz", - "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==", - "dev": true, - "engines": { - "node": ">=10" - } - }, "node_modules/merge-descriptors": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.3.tgz", @@ -9711,9 +9486,9 @@ } }, "node_modules/mime-db": { - "version": "1.52.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", - "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "version": "1.53.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.53.0.tgz", + "integrity": "sha512-oHlN/w+3MQ3rba9rqFr6V/ypF10LSkdwUysQL7GkXoTgIWeV+tcXGA852TBxH+gsh8UWoyhR1hKcoMJTuWflpg==", "engines": { "node": ">= 0.6" } @@ -9729,6 +9504,14 @@ "node": ">= 0.6" } }, + "node_modules/mime-types/node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "engines": { + "node": ">= 0.6" + } + }, "node_modules/mimic-fn": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", @@ -9874,9 +9657,9 @@ "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" }, "node_modules/nan": { - "version": "2.19.0", - "resolved": "https://registry.npmjs.org/nan/-/nan-2.19.0.tgz", - "integrity": "sha512-nO1xXxfh/RWNxfd/XPfbIfFk5vgLsAxUR9y5O0cHMJu/AW9U95JLXqthYHjEp+8gQ5p96K9jUp8nbVOxCdRbtw==" + "version": "2.20.0", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.20.0.tgz", + "integrity": "sha512-bk3gXBZDGILuuo/6sKtr0DQmSThYHLtNCdSdXk9YkxD/jK6X2vmCyyXBBxyqZ4XcnzTyYEAThfX3DCEnLf6igw==" }, "node_modules/nanoid": { "version": "3.3.7", @@ -10039,11 +9822,26 @@ "integrity": "sha512-SOp9Phqvqn7jtEUxPWdWfWoLmyt2VaJ6MpvP9Comy1MceMXqE6bxvaTu4iaxpYYPzhny28Lc+M87/c2cPK6lDg==" }, "node_modules/node-releases": { - "version": "2.0.14", - "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.14.tgz", - "integrity": "sha512-y10wOWt8yZpqXmOgRo77WaHEmhYQYGNA6y421PKsKYWEK8aW+cqAphborZDhqfyKrbZEN92CN1X2KbafY2s7Yw==", + "version": "2.0.18", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.18.tgz", + "integrity": "sha512-d9VeXT4SJ7ZeOqGX6R5EM022wpL+eWPooLI+5UpWn2jCT1aosUQEhQP214x33Wkwx3JQMvIm+tIoVOdodFS40g==", "dev": true }, + "node_modules/normalize-package-data": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-3.0.3.tgz", + "integrity": "sha512-p2W1sgqij3zMMyRC067Dg16bfzVH+w7hyegmpIvZ4JNjqtGOVAIvLmjBx3yP7YTe9vKJgkoNOPjwQGogDoMXFA==", + "dev": true, + "dependencies": { + "hosted-git-info": "^4.0.1", + "is-core-module": "^2.5.0", + "semver": "^7.3.4", + "validate-npm-package-license": "^3.0.1" + }, + "engines": { + "node": ">=10" + } + }, "node_modules/normalize-path": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", @@ -10170,18 +9968,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/nyc/node_modules/p-map": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/p-map/-/p-map-3.0.0.tgz", - "integrity": "sha512-d3qXVTF/s+W+CdJ5A29wywV2n8CQQYahlgz2bFiA+4eVNJbHJodPZ+/gXwPGh0bOqA+j8S+6+ckmvLGPk1QpxQ==", - "dev": true, - "dependencies": { - "aggregate-error": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/nyc/node_modules/resolve-from": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", @@ -10242,6 +10028,15 @@ "node": ">=8" } }, + "node_modules/nyc/node_modules/yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "dev": true, + "engines": { + "node": ">=12" + } + }, "node_modules/oas-kit-common": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/oas-kit-common/-/oas-kit-common-1.0.8.tgz", @@ -10341,9 +10136,12 @@ } }, "node_modules/object-inspect": { - "version": "1.13.1", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.1.tgz", - "integrity": "sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ==", + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.2.tgz", + "integrity": "sha512-IRZSRuzJiynemAXPYtPe5BoI/RESNYR7TYm50MC5Mqbd3Jmw5y790sErYw3V6SryFJD64b74qQQs9wn5Bg/k3g==", + "engines": { + "node": ">= 0.4" + }, "funding": { "url": "https://github.com/sponsors/ljharb" } @@ -10421,23 +10219,6 @@ "node": ">= 0.4" } }, - "node_modules/object.hasown": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/object.hasown/-/object.hasown-1.1.4.tgz", - "integrity": "sha512-FZ9LZt9/RHzGySlBARE3VF+gE26TxR38SdmqOqliuTnl9wrKulaQs+4dee1V+Io8VfxqzAfHu6YuRgUy8OHoTg==", - "dev": true, - "dependencies": { - "define-properties": "^1.2.1", - "es-abstract": "^1.23.2", - "es-object-atoms": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, "node_modules/object.values": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.2.0.tgz", @@ -10791,6 +10572,18 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/p-map": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-map/-/p-map-3.0.0.tgz", + "integrity": "sha512-d3qXVTF/s+W+CdJ5A29wywV2n8CQQYahlgz2bFiA+4eVNJbHJodPZ+/gXwPGh0bOqA+j8S+6+ckmvLGPk1QpxQ==", + "dev": true, + "dependencies": { + "aggregate-error": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/p-try": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", @@ -10923,12 +10716,24 @@ "integrity": "sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==" }, "node_modules/path-type": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", - "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-3.0.0.tgz", + "integrity": "sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg==", "dev": true, + "dependencies": { + "pify": "^3.0.0" + }, "engines": { - "node": ">=8" + "node": ">=4" + } + }, + "node_modules/path-type/node_modules/pify": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha512-C3FsVNH1udSEX48gGX1xfvwTWfsYWj5U+8/uK15BGzIGrKoUpghX8hWZwa/OFnakBiiVNmBvemTJR5mcy7iPcg==", + "dev": true, + "engines": { + "node": ">=4" } }, "node_modules/pause-stream": { @@ -11093,9 +10898,9 @@ } }, "node_modules/postcss": { - "version": "8.4.38", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.38.tgz", - "integrity": "sha512-Wglpdk03BSfXkHoQa3b/oulrotAkwrlLDRSOb9D0bN86FdRyE9lppSp33aHNPgBa0JKCoB+drFLZkQoRRYae5A==", + "version": "8.4.31", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.31.tgz", + "integrity": "sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ==", "funding": [ { "type": "opencollective", @@ -11111,9 +10916,9 @@ } ], "dependencies": { - "nanoid": "^3.3.7", + "nanoid": "^3.3.6", "picocolors": "^1.0.0", - "source-map-js": "^1.2.0" + "source-map-js": "^1.0.2" }, "engines": { "node": "^10 || ^12 || >=14" @@ -11388,6 +11193,7 @@ "version": "1.5.1", "resolved": "https://registry.npmjs.org/q/-/q-1.5.1.tgz", "integrity": "sha512-kV/CThkXo6xyFEZUugw/+pIOywXcDbFYgSct5cT3gqlbkBE1SJdwy6UQoZvodiWF/ckQLZyDE/Bu1M6gVu5lVw==", + "deprecated": "You or someone you depend on is using Q, the JavaScript Promise library that gave JavaScript developers strong feelings about promises. They can almost certainly migrate to the native JavaScript promise now. Thank you literally everyone for joining me in this bet against the odds. Be excellent to each other.\n\n(For a CapTP with native promises, see @endo/eventual-send and @endo/captp)", "dev": true, "engines": { "node": ">=0.6.0", @@ -11395,9 +11201,9 @@ } }, "node_modules/qs": { - "version": "6.12.1", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.12.1.tgz", - "integrity": "sha512-zWmv4RSuB9r2mYQw3zxQuHWeU+42aKi1wWig/j4ele4ygELZ7PEO6MM7rim9oAQH2A5MWfsAVf/jPvTPgCbvUQ==", + "version": "6.13.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.13.0.tgz", + "integrity": "sha512-+38qI9SOr8tfZ4QmJNplMUxqjbe7LKvvZgWdExBOmd+egZTtjLB67Gu0HRX3u/XOq7UU2Nx6nsjvS16Z9uwfpg==", "dependencies": { "side-channel": "^1.0.6" }, @@ -11477,17 +11283,6 @@ "node": ">= 0.8" } }, - "node_modules/raw-body/node_modules/iconv-lite": { - "version": "0.4.24", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", - "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", - "dependencies": { - "safer-buffer": ">= 2.1.2 < 3" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/rc": { "version": "1.2.8", "resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz", @@ -11502,11 +11297,6 @@ "rc": "cli.js" } }, - "node_modules/rc/node_modules/ini": { - "version": "1.3.8", - "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", - "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==" - }, "node_modules/rc/node_modules/strip-json-comments": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", @@ -11661,18 +11451,6 @@ "node": ">=4" } }, - "node_modules/read-pkg/node_modules/path-type": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/path-type/-/path-type-3.0.0.tgz", - "integrity": "sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg==", - "dev": true, - "dependencies": { - "pify": "^3.0.0" - }, - "engines": { - "node": ">=4" - } - }, "node_modules/read-pkg/node_modules/pify": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", @@ -12048,6 +11826,15 @@ "node": ">=8" } }, + "node_modules/replace/node_modules/yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "dev": true, + "engines": { + "node": ">=12" + } + }, "node_modules/require-directory": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", @@ -12291,9 +12078,9 @@ } }, "node_modules/semver": { - "version": "7.6.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.2.tgz", - "integrity": "sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w==", + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", "bin": { "semver": "bin/semver.js" }, @@ -12736,6 +12523,12 @@ "node": ">= 6" } }, + "node_modules/sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==", + "dev": true + }, "node_modules/sqlstring": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/sqlstring/-/sqlstring-2.3.1.tgz", @@ -13000,56 +12793,6 @@ "node": ">=4" } }, - "node_modules/standard-version/node_modules/wrap-ansi": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", - "dev": true, - "dependencies": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, - "node_modules/standard-version/node_modules/wrap-ansi/node_modules/ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "dev": true, - "dependencies": { - "color-convert": "^2.0.1" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/standard-version/node_modules/wrap-ansi/node_modules/color-convert": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", - "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", - "dev": true, - "dependencies": { - "color-name": "~1.1.4" - }, - "engines": { - "node": ">=7.0.0" - } - }, - "node_modules/standard-version/node_modules/wrap-ansi/node_modules/color-name": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", - "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", - "dev": true - }, "node_modules/standard-version/node_modules/yargs": { "version": "16.2.0", "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", @@ -13068,6 +12811,15 @@ "node": ">=10" } }, + "node_modules/standard-version/node_modules/yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "dev": true, + "engines": { + "node": ">=12" + } + }, "node_modules/standard/node_modules/@eslint/eslintrc": { "version": "2.1.4", "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.4.tgz", @@ -13348,6 +13100,16 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/string.prototype.repeat": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/string.prototype.repeat/-/string.prototype.repeat-1.0.0.tgz", + "integrity": "sha512-0u/TldDbKD8bFCQ/4f5+mNRrXwZ8hg2w7ZR8wa16e8z9XpePWl3eGEcUD0OXpEH/VJH/2G3gjUtR3ZOiBe2S/w==", + "dev": true, + "dependencies": { + "define-properties": "^1.1.3", + "es-abstract": "^1.17.5" + } + }, "node_modules/string.prototype.trim": { "version": "1.2.9", "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.9.tgz", @@ -13962,9 +13724,9 @@ } }, "node_modules/typescript": { - "version": "5.4.5", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.4.5.tgz", - "integrity": "sha512-vcI4UpRgg81oIRUFwR0WSIHKt11nJ7SAVlYNIu+QpqeyXP+gpQJy/Z4+F0aGxSE4MqwjyXvW/TzgkLAx2AGHwQ==", + "version": "5.5.4", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.4.tgz", + "integrity": "sha512-Mtq29sKDAEYP7aljRgtPOpTvOfbwRWlS6dPRzwjdE+C0R4brX/GUyhHSecbHMFLNBLcJIPt9nl9yG5TZ1weH+Q==", "dev": true, "peer": true, "bin": { @@ -14065,9 +13827,9 @@ } }, "node_modules/undici-types": { - "version": "5.26.5", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", - "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==" + "version": "6.13.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.13.0.tgz", + "integrity": "sha512-xtFJHudx8S2DSoujjMd1WeWvn7KKWFRESZTMeL1RptAYERu29D6jphMjjY+vn96jvN3kVPDNxU/E13VTaXj6jg==" }, "node_modules/unpipe": { "version": "1.0.0", @@ -14078,9 +13840,9 @@ } }, "node_modules/update-browserslist-db": { - "version": "1.0.16", - "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.0.16.tgz", - "integrity": "sha512-KVbTxlBYlckhF5wgfyZXTWnMn7MMZjMu9XG8bPlliUOP9ThaF4QnhP8qrjrH7DRzHfSk0oQv1wToW+iA5GajEQ==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.0.tgz", + "integrity": "sha512-EdRAaAyk2cUE1wOf2DkEhzxqOQvFOoRJFNS6NeyJ01Gp2beMRpBAINjM2iDXE3KCuKhwnvHIQCJm6ThL2Z+HzQ==", "dev": true, "funding": [ { @@ -14154,9 +13916,9 @@ "dev": true }, "node_modules/v8-to-istanbul": { - "version": "9.2.0", - "resolved": "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-9.2.0.tgz", - "integrity": "sha512-/EH/sDgxU2eGxajKdwLCDmQ4FWq+kpi3uCmBGpw1xJtnAxEjlD8j8PEiGWpCIMIs3ciNAgH0d3TTJiUkYzyZjA==", + "version": "9.3.0", + "resolved": "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-9.3.0.tgz", + "integrity": "sha512-kiGUalWN+rgBJ/1OHZsBtU4rXZOfj/7rKQxULKlIzwzQSvMJUUNgPwJEEh7gU6xEVxC0ahoOBvN2YI8GH6FNgA==", "dev": true, "dependencies": { "@jridgewell/trace-mapping": "^0.3.12", @@ -14275,13 +14037,13 @@ } }, "node_modules/which-builtin-type": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/which-builtin-type/-/which-builtin-type-1.1.3.tgz", - "integrity": "sha512-YmjsSMDBYsM1CaFiayOVT06+KJeXf0o5M/CAd4o1lTadFAtacTUM49zoYxr/oroopFDfhvN6iEcBxUyc3gvKmw==", + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/which-builtin-type/-/which-builtin-type-1.1.4.tgz", + "integrity": "sha512-bppkmBSsHFmIMSl8BO9TbsyzsvGjVoppt8xUiGzwiu/bhDCGxnpOKCxgqj6GuyHE0mINMDecBFPlOm2hzY084w==", "dev": true, "dependencies": { - "function.prototype.name": "^1.1.5", - "has-tostringtag": "^1.0.0", + "function.prototype.name": "^1.1.6", + "has-tostringtag": "^1.0.2", "is-async-function": "^2.0.0", "is-date-object": "^1.0.5", "is-finalizationregistry": "^1.0.2", @@ -14290,8 +14052,8 @@ "is-weakref": "^1.0.2", "isarray": "^2.0.5", "which-boxed-primitive": "^1.0.2", - "which-collection": "^1.0.1", - "which-typed-array": "^1.1.9" + "which-collection": "^1.0.2", + "which-typed-array": "^1.1.15" }, "engines": { "node": ">= 0.4" @@ -14596,6 +14358,14 @@ "yargs-parser": "^11.1.1" } }, + "node_modules/widdershins/node_modules/yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "engines": { + "node": ">=12" + } + }, "node_modules/window-size": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/window-size/-/window-size-0.1.0.tgz", @@ -14605,15 +14375,15 @@ } }, "node_modules/winston": { - "version": "3.13.0", - "resolved": "https://registry.npmjs.org/winston/-/winston-3.13.0.tgz", - "integrity": "sha512-rwidmA1w3SE4j0E5MuIufFhyJPBDG7Nu71RkZor1p2+qHvJSZ9GYDA81AyleQcZbh/+V6HjeBdfnTZJm9rSeQQ==", + "version": "3.13.1", + "resolved": "https://registry.npmjs.org/winston/-/winston-3.13.1.tgz", + "integrity": "sha512-SvZit7VFNvXRzbqGHsv5KSmgbEYR5EiQfDAL9gxYkRqa934Hnk++zze0wANKtMHcy/gI4W/3xmSDwlhf865WGw==", "dependencies": { "@colors/colors": "^1.6.0", "@dabh/diagnostics": "^2.0.2", "async": "^3.2.3", "is-stream": "^2.0.0", - "logform": "^2.4.0", + "logform": "^2.6.0", "one-time": "^1.0.0", "readable-stream": "^3.4.0", "safe-stable-stringify": "^2.3.1", @@ -14626,12 +14396,12 @@ } }, "node_modules/winston-transport": { - "version": "4.7.0", - "resolved": "https://registry.npmjs.org/winston-transport/-/winston-transport-4.7.0.tgz", - "integrity": "sha512-ajBj65K5I7denzer2IYW6+2bNIVqLGDHqDw3Ow8Ohh+vdW+rv4MZ6eiDvHoKhfJFZ2auyN8byXieDDJ96ViONg==", + "version": "4.7.1", + "resolved": "https://registry.npmjs.org/winston-transport/-/winston-transport-4.7.1.tgz", + "integrity": "sha512-wQCXXVgfv/wUPOfb2x0ruxzwkcZfxcktz6JIMUaPLmcNhO4bZTwA/WtDWK74xV3F2dKu8YadrFv0qhwYjVEwhA==", "dependencies": { - "logform": "^2.3.2", - "readable-stream": "^3.6.0", + "logform": "^2.6.1", + "readable-stream": "^3.6.2", "triple-beam": "^1.3.0" }, "engines": { @@ -14651,14 +14421,6 @@ "node": ">= 6" } }, - "node_modules/winston/node_modules/@colors/colors": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/@colors/colors/-/colors-1.6.0.tgz", - "integrity": "sha512-Ir+AOibqzrIsL6ajt3Rz3LskB7OiMVHqltZmspbW/TJuTVuyOMirVqAkjfY6JISiLHgyNqicAC8AyHHGzNd/dA==", - "engines": { - "node": ">=0.1.90" - } - }, "node_modules/winston/node_modules/readable-stream": { "version": "3.6.2", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", @@ -14687,6 +14449,22 @@ "integrity": "sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q==", "dev": true }, + "node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, "node_modules/wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", @@ -14738,9 +14516,9 @@ } }, "node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", + "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", "dev": true }, "node_modules/yaml": { @@ -14772,6 +14550,14 @@ } }, "node_modules/yargs-parser": { + "version": "20.2.9", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz", + "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==", + "engines": { + "node": ">=10" + } + }, + "node_modules/yargs/node_modules/yargs-parser": { "version": "21.1.1", "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", diff --git a/package.json b/package.json index a7c2784a..11a5a95e 100644 --- a/package.json +++ b/package.json @@ -66,7 +66,7 @@ "package-lock": "docker run --rm -it quoting-service:local cat package-lock.json > package-lock.json", "run": "docker run -p 3002:3002 --rm --link db:mysql quoting-service:local", "docker:build": "docker build --build-arg NODE_VERSION=\"$(cat .nvmrc)-alpine\" -t mojaloop/quoting-service:local -f ./Dockerfile .", - "docker:up": "docker-compose up", + "docker:up": ". ./scripts/env.sh && docker-compose up -d", "docker:stop": "docker-compose stop", "docker:rm": "docker-compose rm -f -v", "docker:down": "docker-compose down -v", diff --git a/scripts/env.sh b/scripts/env.sh new file mode 100644 index 00000000..e9aed2bf --- /dev/null +++ b/scripts/env.sh @@ -0,0 +1,12 @@ +#!/bin/sh + +get_external_ip() { + if [ "$(uname)" = "Linux" ]; then + echo "$(ip addr show docker0 | grep 'inet ' | awk '{print $2}' | cut -d/ -f1)" + else + # Need to find a way to support Windows here + echo "$(route get ifconfig.me | grep interface | sed -e 's/.*: //' | xargs ipconfig getifaddr)" + fi +} + +export REDIS_CLUSTER_ANNOUNCE_IP=$(get_external_ip) diff --git a/src/api/routes.js b/src/api/routes.js index ace8ee5d..9ccb2ed6 100644 --- a/src/api/routes.js +++ b/src/api/routes.js @@ -176,4 +176,7 @@ const APIRoutes = (api) => [ } ] -module.exports = { APIRoutes } +module.exports = { + APIRoutes, + handleRequest // Exposed for testing +} diff --git a/src/constants.js b/src/constants.js index df979f40..f9b85bab 100644 --- a/src/constants.js +++ b/src/constants.js @@ -15,7 +15,14 @@ const HEADERS = Object.freeze({ }) // todo: think, if it's better to use all headers keys in lowercase +const ERROR_MESSAGES = { + CALLBACK_UNSUCCESSFUL_HTTP_RESPONSE: 'Got non-success response sending error callback', + CALLBACK_NETWORK_ERROR: 'network error in sendErrorCallback', + NO_FX_CALLBACK_ENDPOINT: (fspiopSource, conversionRequestId) => `No FSPIOP_CALLBACK_URL_FX_QUOTES endpoint found for FSP '${fspiopSource}' while processing fxquote ${conversionRequestId}` +} + module.exports = { RESOURCES, - HEADERS + HEADERS, + ERROR_MESSAGES } diff --git a/src/data/cachedDatabase.js b/src/data/cachedDatabase.js index e155264d..dcf7b30b 100644 --- a/src/data/cachedDatabase.js +++ b/src/data/cachedDatabase.js @@ -127,7 +127,7 @@ class CachedDatabase extends Database { } histTimer({ success: true, queryName: type, hit: false }) } else { - this.log.error('Cache hit for : ', { type, params, value }) + this.log.debug('Cache hit for : ', { type, params, value }) histTimer({ success: true, queryName: type, hit: true }) } diff --git a/src/handlers/init.js b/src/handlers/init.js index 0ab5b7f1..f8c5584e 100644 --- a/src/handlers/init.js +++ b/src/handlers/init.js @@ -15,8 +15,8 @@ let proxyClient let consumersMap let monitoringServer -const startFn = async (handlerList) => { - const config = new Config() +const startFn = async (handlerList, appConfig = undefined) => { + const config = appConfig || new Config() db = new Database(config) await db.connect() @@ -24,8 +24,8 @@ const startFn = async (handlerList) => { if (!isDbOk) throw new Error('DB is not connected') if (config.proxyCache.enabled) { - const isProxyOk = proxyClient = createProxyClient({ proxyCacheConfig: config.proxyCache }) - await proxyClient.connect() + proxyClient = createProxyClient({ proxyCacheConfig: config.proxyCache }) + const isProxyOk = await proxyClient.connect() if (!isProxyOk) throw new Error('Proxy is not connected') Logger.isInfoEnabled && Logger.info('Proxy cache is connected') } diff --git a/src/lib/util.js b/src/lib/util.js index 6dd0f0f2..672e5133 100644 --- a/src/lib/util.js +++ b/src/lib/util.js @@ -267,7 +267,7 @@ const fetchParticipantInfo = async (source, destination, cache, proxyClient) => if (!cachedPayee && !requestPayee) { requestPayee = await axios.request({ url: `${url}/${destination}` }) cache && cache.put(`fetchParticipantInfo_${destination}`, requestPayee, Config.participantDataCacheExpiresInMs) - Logger.isDebugEnabled && Logger.debug(`[fetchParticipantInfo]: cache miss for payer ${source}`) + Logger.isDebugEnabled && Logger.debug(`[fetchParticipantInfo]: cache miss for payee ${destination}`) } else { Logger.isDebugEnabled && Logger.debug(`[fetchParticipantInfo]: cache hit for payee ${destination}`) } diff --git a/src/model/bulkQuotes.js b/src/model/bulkQuotes.js index 7dd68c29..896b1cb5 100644 --- a/src/model/bulkQuotes.js +++ b/src/model/bulkQuotes.js @@ -59,6 +59,7 @@ class BulkQuotesModel { this.db = deps.db this.requestId = deps.requestId this.proxyClient = deps.proxyClient + this.envConfig = deps.config || new Config() } /** @@ -139,7 +140,7 @@ class BulkQuotesModel { } const fullCallbackUrl = `${endpoint}${ENUM.EndPoints.FspEndpointTemplates.BULK_QUOTES_POST}` - const newHeaders = generateRequestHeaders(headers, this.db.config.protocolVersions) + const newHeaders = generateRequestHeaders(headers, this.envConfig.protocolVersions) this.writeLog(`Forwarding quote request to endpoint: ${fullCallbackUrl}`) this.writeLog(`Forwarding quote request headers: ${JSON.stringify(newHeaders)}`) @@ -223,7 +224,7 @@ class BulkQuotesModel { // we need to strip off the 'accept' header // for all PUT requests as per the API Specification Document // https://github.com/mojaloop/mojaloop-specification/blob/main/documents/v1.1-document-set/fspiop-v1.1-openapi2.yaml - const newHeaders = generateRequestHeaders(headers, this.db.config.protocolVersions, true) + const newHeaders = generateRequestHeaders(headers, this.envConfig.protocolVersions, true) this.writeLog(`Forwarding bulk quote response to endpoint: ${fullCallbackUrl}`) this.writeLog(`Forwarding bulk quote response headers: ${JSON.stringify(newHeaders)}`) @@ -297,7 +298,7 @@ class BulkQuotesModel { } const fullCallbackUrl = `${endpoint}/bulkQuotes/${bulkQuoteId}` - const newHeaders = generateRequestHeaders(headers, this.db.config.protocolVersions) + const newHeaders = generateRequestHeaders(headers, this.envConfig.protocolVersions) this.writeLog(`Forwarding quote get request to endpoint: ${fullCallbackUrl}`) @@ -378,7 +379,7 @@ class BulkQuotesModel { * @returns {promise} */ async sendErrorCallback (fspiopSource, fspiopError, bulkQuoteId, headers, span, modifyHeaders = true) { - const envConfig = new Config() + const { envConfig } = this const fspiopDest = headers[ENUM.Http.Headers.FSPIOP.DESTINATION] try { // look up the callback base url @@ -421,9 +422,9 @@ class BulkQuotesModel { // JWS Signer expects headers in lowercase if (envConfig.jws && envConfig.jws.jwsSign && fromSwitchHeaders['fspiop-source'] === envConfig.jws.fspiopSourceToSign) { - formattedHeaders = generateRequestHeadersForJWS(fromSwitchHeaders, this.db.config.protocolVersions, true) + formattedHeaders = generateRequestHeadersForJWS(fromSwitchHeaders, envConfig.protocolVersions, true) } else { - formattedHeaders = generateRequestHeaders(fromSwitchHeaders, this.db.config.protocolVersions, true) + formattedHeaders = generateRequestHeaders(fromSwitchHeaders, envConfig.protocolVersions, true) } let opts = { diff --git a/src/model/fxQuotes.js b/src/model/fxQuotes.js index 69475d20..84b80a69 100644 --- a/src/model/fxQuotes.js +++ b/src/model/fxQuotes.js @@ -24,13 +24,14 @@ const EventSdk = require('@mojaloop/event-sdk') const LibUtil = require('@mojaloop/central-services-shared').Util const Logger = require('@mojaloop/central-services-logger') const JwsSigner = require('@mojaloop/sdk-standard-components').Jws.signer +const Metrics = require('@mojaloop/central-services-metrics') const Config = require('../lib/config') const { loggerFactory } = require('../lib') const { httpRequest } = require('../lib/http') const { getStackOrInspect, generateRequestHeadersForJWS, generateRequestHeaders, getParticipantEndpoint, calculateRequestHash } = require('../lib/util') const LOCAL_ENUM = require('../lib/enum') -const { RESOURCES } = require('../constants') +const { RESOURCES, ERROR_MESSAGES } = require('../constants') axios.defaults.headers.common = {} @@ -40,6 +41,7 @@ class FxQuotesModel { this.requestId = deps.requestId this.proxyClient = deps.proxyClient this.envConfig = deps.envConfig || new Config() + this.httpRequest = deps.httpRequest || httpRequest this.log = deps.log || loggerFactory({ context: this.constructor.name, requestId: this.requestId @@ -52,16 +54,26 @@ class FxQuotesModel { * @returns {promise} - promise will reject if request is not valid */ async validateFxQuoteRequest (fspiopDestination, fxQuoteRequest) { - const currencies = [fxQuoteRequest.conversionTerms.sourceAmount.currency, fxQuoteRequest.conversionTerms.targetAmount.currency] - - // Ensure the proxy client is connected - if (this.proxyClient?.isConnected === false) await this.proxyClient.connect() - // if the payee dfsp has a proxy cache entry, we do not validate the dfsp here - const proxy = await this.proxyClient?.lookupProxyByDfspId(fspiopDestination) - if (!proxy) { - await Promise.all(currencies.map((currency) => { - return this.db.getParticipant(fspiopDestination, LOCAL_ENUM.COUNTERPARTY_FSP, currency, ENUM.Accounts.LedgerAccountType.POSITION) - })) + const histTimer = Metrics.getHistogram( + 'model_fxquote', + 'validateFxQuoteRequest - Metrics for fx quote model', + ['success', 'queryName'] + ).startTimer() + try { + const currencies = [fxQuoteRequest.conversionTerms.sourceAmount.currency, fxQuoteRequest.conversionTerms.targetAmount.currency] + // Ensure the proxy client is connected + if (this.proxyClient?.isConnected === false) await this.proxyClient.connect() + // if the payee dfsp has a proxy cache entry, we do not validate the dfsp here + const proxy = await this.proxyClient?.lookupProxyByDfspId(fspiopDestination) + if (!proxy) { + await Promise.all(currencies.map((currency) => { + return this.db.getParticipant(fspiopDestination, LOCAL_ENUM.COUNTERPARTY_FSP, currency, ENUM.Accounts.LedgerAccountType.POSITION) + })) + } + histTimer({ success: true, queryName: 'validateFxQuoteRequest' }) + } catch (error) { + histTimer({ success: false, queryName: 'validateFxQuoteRequest' }) + throw error } } @@ -149,6 +161,11 @@ class FxQuotesModel { * @returns {undefined} */ async handleFxQuoteRequest (headers, fxQuoteRequest, span) { + const histTimer = Metrics.getHistogram( + 'model_fxquote', + 'handleFxQuoteRequest - Metrics for fx quote model', + ['success', 'queryName'] + ).startTimer() let fspiopSource let txn const childSpan = span.getChild('qs_fxQuote_forwardFxQuoteRequest') @@ -160,8 +177,7 @@ class FxQuotesModel { await this.validateFxQuoteRequest(fspiopDestination, fxQuoteRequest) - const envConfig = new Config() - if (!envConfig.simpleRoutingMode) { + if (!this.envConfig.simpleRoutingMode) { // check if this is a resend or an erroneous duplicate const dupe = await this.checkDuplicateFxQuoteRequest(fxQuoteRequest) @@ -210,7 +226,9 @@ class FxQuotesModel { } await this.forwardFxQuoteRequest(headers, fxQuoteRequest.conversionRequestId, fxQuoteRequest, childSpan) + histTimer({ success: true, queryName: 'handleFxQuoteRequest' }) } catch (err) { + histTimer({ success: false, queryName: 'handleFxQuoteRequest' }) this.log.error('error in handleFxQuoteRequest', err) if (txn) { txn.rollback(err) @@ -229,6 +247,12 @@ class FxQuotesModel { * @returns {undefined} */ async forwardFxQuoteRequest (headers, conversionRequestId, originalFxQuoteRequest, span) { + const histTimer = Metrics.getHistogram( + 'model_fxquote', + 'forwardFxQuoteRequest - Metrics for fx quote model', + ['success', 'queryName'] + ).startTimer() + try { const fspiopSource = headers[ENUM.Http.Headers.FSPIOP.SOURCE] const fspiopDest = headers[ENUM.Http.Headers.FSPIOP.DESTINATION] @@ -237,14 +261,14 @@ class FxQuotesModel { // lookup the fxp callback endpoint const endpoint = await this._getParticipantEndpoint(fspiopDest) if (!endpoint) { - throw ErrorHandler.CreateFSPIOPError(ErrorHandler.Enums.FSPIOPErrorCodes.DESTINATION_FSP_ERROR, `No FSPIOP_CALLBACK_URL_FX_QUOTES endpoint found for FXP '${fspiopDest}' while processing fxQuote ${conversionRequestId}`, null, fspiopSource) + throw ErrorHandler.CreateFSPIOPError(ErrorHandler.Enums.FSPIOPErrorCodes.DESTINATION_FSP_ERROR, ERROR_MESSAGES.NO_FX_CALLBACK_ENDPOINT(fspiopDest, conversionRequestId), null, fspiopSource) } let opts = { method: ENUM.Http.RestMethods.POST, url: `${endpoint}${ENUM.EndPoints.FspEndpointTemplates.FX_QUOTES_POST}`, data: JSON.stringify(originalFxQuoteRequest), - headers: generateRequestHeaders(headers, this.db.config.protocolVersions, false, RESOURCES.fxQuotes) + headers: generateRequestHeaders(headers, this.envConfig.protocolVersions, false, RESOURCES.fxQuotes) } this.log.debug('Forwarding fxQuote request details', { conversionRequestId, opts }) @@ -253,8 +277,10 @@ class FxQuotesModel { span.audit(opts, EventSdk.AuditEventAction.egress) } - await httpRequest(opts, fspiopSource) + await this.httpRequest(opts, fspiopSource) + histTimer({ success: true, queryName: 'forwardFxQuoteRequest' }) } catch (err) { + histTimer({ success: false, queryName: 'forwardFxQuoteRequest' }) this.log.error('error in forwardFxQuoteRequest', err) throw ErrorHandler.ReformatFSPIOPError(err) } @@ -266,19 +292,24 @@ class FxQuotesModel { * @returns {undefined} */ async handleFxQuoteUpdate (headers, conversionRequestId, fxQuoteUpdateRequest, span) { + const histTimer = Metrics.getHistogram( + 'model_fxquote', + 'handleFxQuoteUpdate - Metrics for fx quote model', + ['success', 'queryName'] + ).startTimer() + let txn const fspiopSource = headers[ENUM.Http.Headers.FSPIOP.SOURCE] - if ('accept' in headers) { - throw ErrorHandler.CreateFSPIOPError(ErrorHandler.Enums.FSPIOPErrorCodes.VALIDATION_ERROR, - `Update for fxQuote ${conversionRequestId} failed: "accept" header should not be sent in callbacks.`, null, headers['fspiop-source']) - } - const childSpan = span.getChild('qs_fxQuote_forwardFxQuoteUpdate') try { await childSpan.audit({ headers, params: { conversionRequestId }, payload: fxQuoteUpdateRequest }, EventSdk.AuditEventAction.start) - const envConfig = new Config() - if (!envConfig.simpleRoutingMode) { + if ('accept' in headers) { + throw ErrorHandler.CreateFSPIOPError(ErrorHandler.Enums.FSPIOPErrorCodes.VALIDATION_ERROR, + `Update for fx quote ${conversionRequestId} failed: "accept" header should not be sent in callbacks.`, null, headers['fspiop-source']) + } + + if (!this.envConfig.simpleRoutingMode) { // check if this is a resend or an erroneous duplicate const dupe = await this.checkDuplicateFxQuoteResponse(conversionRequestId, fxQuoteUpdateRequest) @@ -343,7 +374,9 @@ class FxQuotesModel { } await this.forwardFxQuoteUpdate(headers, conversionRequestId, fxQuoteUpdateRequest, childSpan) + histTimer({ success: true, queryName: 'handleFxQuoteUpdate' }) } catch (err) { + histTimer({ success: false, queryName: 'handleFxQuoteUpdate' }) this.log.error('error in handleFxQuoteUpdate', err) const fspiopSource = headers[ENUM.Http.Headers.FSPIOP.SOURCE] if (txn) { @@ -363,6 +396,12 @@ class FxQuotesModel { * @returns {undefined} */ async forwardFxQuoteUpdate (headers, conversionRequestId, originalFxQuoteResponse, span) { + const histTimer = Metrics.getHistogram( + 'model_fxquote', + 'forwardFxQuoteUpdate - Metrics for fx quote model', + ['success', 'queryName'] + ).startTimer() + try { const fspiopSource = headers[ENUM.Http.Headers.FSPIOP.SOURCE] const fspiopDest = headers[ENUM.Http.Headers.FSPIOP.DESTINATION] @@ -378,7 +417,7 @@ class FxQuotesModel { method: ENUM.Http.RestMethods.PUT, url: `${endpoint}/fxQuotes/${conversionRequestId}`, data: JSON.stringify(originalFxQuoteResponse), - headers: generateRequestHeaders(headers, this.db.config.protocolVersions, true, RESOURCES.fxQuotes) + headers: generateRequestHeaders(headers, this.envConfig.protocolVersions, true, RESOURCES.fxQuotes) // we need to strip off the 'accept' header // for all PUT requests as per the API Specification Document // https://github.com/mojaloop/mojaloop-specification/blob/main/documents/v1.1-document-set/fspiop-v1.1-openapi2.yaml @@ -390,8 +429,10 @@ class FxQuotesModel { span.audit(opts, EventSdk.AuditEventAction.egress) } - await httpRequest(opts, fspiopSource) + await this.httpRequest(opts, fspiopSource) + histTimer({ success: true, queryName: 'forwardFxQuoteUpdate' }) } catch (err) { + histTimer({ success: false, queryName: 'forwardFxQuoteUpdate' }) this.log.error('error in forwardFxQuoteUpdate', err) throw ErrorHandler.ReformatFSPIOPError(err) } @@ -403,12 +444,19 @@ class FxQuotesModel { * @returns {undefined} */ async handleFxQuoteGet (headers, conversionRequestId, span) { + const histTimer = Metrics.getHistogram( + 'model_fxquote', + 'handleFxQuoteGet - Metrics for fx quote model', + ['success', 'queryName'] + ).startTimer() const fspiopSource = headers[ENUM.Http.Headers.FSPIOP.SOURCE] const childSpan = span.getChild('qs_fxQuote_forwardFxQuoteGet') try { await childSpan.audit({ headers, params: { conversionRequestId } }, EventSdk.AuditEventAction.start) await this.forwardFxQuoteGet(headers, conversionRequestId, childSpan) + histTimer({ success: true, queryName: 'handleFxQuoteGet' }) } catch (err) { + histTimer({ success: false, queryName: 'handleFxQuoteGet' }) this.log.error('error in handleFxQuoteGet', err) await this.handleException(fspiopSource, conversionRequestId, err, headers, childSpan) } finally { @@ -424,6 +472,11 @@ class FxQuotesModel { * @returns {undefined} */ async forwardFxQuoteGet (headers, conversionRequestId, span) { + const histTimer = Metrics.getHistogram( + 'model_fxquote', + 'forwardFxQuoteGet - Metrics for fx quote model', + ['success', 'queryName'] + ).startTimer() try { const fspiopSource = headers[ENUM.Http.Headers.FSPIOP.SOURCE] const fspiopDest = headers[ENUM.Http.Headers.FSPIOP.DESTINATION] @@ -431,13 +484,13 @@ class FxQuotesModel { // lookup fxp callback endpoint const endpoint = await this._getParticipantEndpoint(fspiopDest) if (!endpoint) { - throw ErrorHandler.CreateFSPIOPError(ErrorHandler.Enums.FSPIOPErrorCodes.DESTINATION_FSP_ERROR, `No FSPIOP_CALLBACK_URL_FX_QUOTES endpoint found for FXP '${fspiopDest}' while processing fxQuote GET ${conversionRequestId}`, null, fspiopSource) + throw ErrorHandler.CreateFSPIOPError(ErrorHandler.Enums.FSPIOPErrorCodes.DESTINATION_FSP_ERROR, ERROR_MESSAGES.NO_FX_CALLBACK_ENDPOINT(fspiopDest, conversionRequestId), null, fspiopSource) } let opts = { method: ENUM.Http.RestMethods.GET, url: `${endpoint}/fxQuotes/${conversionRequestId}`, - headers: generateRequestHeaders(headers, this.db.config.protocolVersions, false, RESOURCES.fxQuotes) + headers: generateRequestHeaders(headers, this.envConfig.protocolVersions, false, RESOURCES.fxQuotes) } this.log.debug('Forwarding fxQuote get request details:', { conversionRequestId, opts }) @@ -446,8 +499,10 @@ class FxQuotesModel { span.audit(opts, EventSdk.AuditEventAction.egress) } - await httpRequest(opts, fspiopSource) + await this.httpRequest(opts, fspiopSource) + histTimer({ success: true, queryName: 'forwardFxQuoteGet' }) } catch (err) { + histTimer({ success: false, queryName: 'forwardFxQuoteGet' }) this.log.error('error in forwardFxQuoteGet', err) throw ErrorHandler.ReformatFSPIOPError(err) } @@ -459,12 +514,16 @@ class FxQuotesModel { * @returns {undefined} */ async handleFxQuoteError (headers, conversionRequestId, error, span) { + const histTimer = Metrics.getHistogram( + 'model_fxquote', + 'handleFxQuoteError - Metrics for fx quote model', + ['success', 'queryName'] + ).startTimer() let txn const fspiopSource = headers[ENUM.Http.Headers.FSPIOP.SOURCE] const childSpan = span.getChild('qs_fxQuote_forwardFxQuoteError') try { - const envConfig = new Config() - if (!envConfig.simpleRoutingMode) { + if (!this.envConfig.simpleRoutingMode) { // do everything in a transaction so we can rollback multiple operations if something goes wrong txn = await this.db.newTransaction() @@ -478,10 +537,12 @@ class FxQuotesModel { txn.commit() } - const fspiopError = ErrorHandler.CreateFSPIOPErrorFromErrorInformation(error) await childSpan.audit({ headers, params: { conversionRequestId } }, EventSdk.AuditEventAction.start) + const fspiopError = ErrorHandler.CreateFSPIOPErrorFromErrorInformation(error) await this.sendErrorCallback(headers[ENUM.Http.Headers.FSPIOP.DESTINATION], fspiopError, conversionRequestId, headers, childSpan, false) + histTimer({ success: true, queryName: 'handleFxQuoteError' }) } catch (err) { + histTimer({ success: false, queryName: 'handleFxQuoteError' }) this.log.error('error in handleFxQuoteError', err) if (txn) { txn.rollback(err) @@ -574,14 +635,20 @@ class FxQuotesModel { * dfsp that initiated the request. */ async handleException (fspiopSource, conversionRequestId, error, headers, span) { + const histTimer = Metrics.getHistogram( + 'model_fxquote', + 'handleException - Metrics for fx quote model', + ['success', 'queryName'] + ).startTimer() this.log.info('Attempting to send error callback to fspiopSource:', { conversionRequestId, fspiopSource }) const fspiopError = ErrorHandler.ReformatFSPIOPError(error) - const childSpan = span.getChild('qs_fxQuote_sendErrorCallback') try { await childSpan.audit({ headers, params: { conversionRequestId } }, EventSdk.AuditEventAction.start) - return await this.sendErrorCallback(fspiopSource, fspiopError, conversionRequestId, headers, childSpan, true) + await this.sendErrorCallback(fspiopSource, fspiopError, conversionRequestId, headers, childSpan, true) + histTimer({ success: true, queryName: 'handleException' }) } catch (err) { + histTimer({ success: false, queryName: 'handleException' }) this.log.error('error in handleException, stop request processing!', err) } finally { if (!childSpan.isFinished) { @@ -598,13 +665,21 @@ class FxQuotesModel { * @returns {promise} */ async sendErrorCallback (fspiopSource, fspiopError, conversionRequestId, headers, span, modifyHeaders = true) { + const histTimer = Metrics.getHistogram( + 'model_fxquote', + 'sendErrorCallback - Metrics for fx quote model', + ['success', 'queryName'] + ).startTimer() const { envConfig, log } = this const fspiopDest = headers[ENUM.Http.Headers.FSPIOP.DESTINATION] try { const endpoint = await this._getParticipantEndpoint(fspiopSource) + + log.debug(`Resolved participant '${fspiopSource}' FSPIOP_CALLBACK_URL_FX_QUOTES to: '${endpoint}'`) + if (!endpoint) { - throw ErrorHandler.CreateFSPIOPError(ErrorHandler.Enums.FSPIOPErrorCodes.PARTY_NOT_FOUND, `No FSPIOP_CALLBACK_URL_FX_QUOTES endpoint found for FSP '${fspiopSource}', unable to make error callback`, null, fspiopSource) + throw ErrorHandler.CreateFSPIOPError(ErrorHandler.Enums.FSPIOPErrorCodes.PARTY_NOT_FOUND, ERROR_MESSAGES.NO_FX_CALLBACK_ENDPOINT(fspiopSource, conversionRequestId), null, fspiopSource) } const fspiopUri = `/fxQuotes/${conversionRequestId}/error` @@ -635,7 +710,7 @@ class FxQuotesModel { const generateHeadersFn = (envConfig.jws?.jwsSign && fromSwitchHeaders['fspiop-source'] === envConfig.jws.fspiopSourceToSign) ? generateRequestHeadersForJWS : generateRequestHeaders - const formattedHeaders = generateHeadersFn(fromSwitchHeaders, this.db.config.protocolVersions, true, RESOURCES.fxQuotes) + const formattedHeaders = generateHeadersFn(fromSwitchHeaders, envConfig.protocolVersions, true, RESOURCES.fxQuotes) let opts = { method: ENUM.Http.RestMethods.PUT, @@ -666,6 +741,7 @@ class FxQuotesModel { }, fspiopSource) } } catch (err) { + histTimer({ success: false, queryName: 'sendErrorCallback' }) log.error('Error in sendErrorCallback', err) const fspiopError = ErrorHandler.ReformatFSPIOPError(err) const state = new EventSdk.EventStateMetadata(EventSdk.EventStatusType.failed, fspiopError.apiErrorCode.code, fspiopError.apiErrorCode.message) diff --git a/src/model/quotes.js b/src/model/quotes.js index 22da342d..6bb4b7a6 100644 --- a/src/model/quotes.js +++ b/src/model/quotes.js @@ -67,6 +67,7 @@ class QuotesModel { this.db = deps.db this.requestId = deps.requestId this.proxyClient = deps.proxyClient + this.envConfig = deps.config || new Config() } async executeRules (headers, quoteRequest, payer, payee) { @@ -152,7 +153,7 @@ class QuotesModel { 'validateQuoteRequest - Metrics for quote model', ['success', 'queryName', 'duplicateResult'] ).startTimer() - const envConfig = new Config() + const { envConfig } = this // note that the framework should validate the form of the request // here we can do some hard-coded rule validations to ensure requests // do not lead to unsupported scenarios or use-cases. @@ -162,56 +163,53 @@ class QuotesModel { // throw ErrorHandler.CreateFSPIOPError(ErrorHandler.Enums.FSPIOPErrorCodes.NOT_IMPLEMENTED, 'Only PAYER initiated transactions are supported', null, fspiopSource) // } - // Any quoteRequest specific validations to be added here - if (!quoteRequest) { - // internal-error - histTimer({ success: false, queryName: 'quote_validateQuoteRequest' }) - throw ErrorHandler.CreateInternalServerFSPIOPError('Missing quoteRequest', null, fspiopSource) - } + try { + // Any quoteRequest specific validations to be added here + if (!quoteRequest) { + // internal-error + throw ErrorHandler.CreateInternalServerFSPIOPError('Missing quoteRequest', null, fspiopSource) + } - // Ensure the proxy client is connected if we need to use it down the road - if (this.proxyClient?.isConnected === false) await this.proxyClient.connect() + // Ensure the proxy client is connected if we need to use it down the road + if (this.proxyClient?.isConnected === false) await this.proxyClient.connect() - // In fspiop api spec 2.0, to support FX, `supportedCurrencies` can be optionally passed in via the payer property. - // If `supportedCurrencies` is present, then payer FSP must have position accounts for all those currencies. - if (quoteRequest.payer.supportedCurrencies && - quoteRequest.payer.supportedCurrencies.length > 0 && - // if the payer dfsp has a proxy cache entry, we do not validate the dfsp here - !(await this.proxyClient?.lookupProxyByDfspId(fspiopSource)) - ) { - await Promise.all(quoteRequest.payer.supportedCurrencies.map(currency => - this.db.getParticipant(fspiopSource, LOCAL_ENUM.PAYER_DFSP, currency, ENUM.Accounts.LedgerAccountType.POSITION) - )) - } else { - // If it is not passed in, then we validate payee against the `amount` currency. - // if the payee dfsp has a proxy cache entry, we do not validate the dfsp here - if (!(await this.proxyClient?.lookupProxyByDfspId(fspiopDestination))) { - await this.db.getParticipant(fspiopDestination, LOCAL_ENUM.PAYEE_DFSP, quoteRequest.amount.currency, ENUM.Accounts.LedgerAccountType.POSITION) + // In fspiop api spec 2.0, to support FX, `supportedCurrencies` can be optionally passed in via the payer property. + // If `supportedCurrencies` is present, then payer FSP must have position accounts for all those currencies. + if (quoteRequest.payer.supportedCurrencies && quoteRequest.payer.supportedCurrencies.length > 0) { + await Promise.all(quoteRequest.payer.supportedCurrencies.map(currency => + this.db.getParticipant(fspiopSource, LOCAL_ENUM.PAYER_DFSP, currency, ENUM.Accounts.LedgerAccountType.POSITION) + )) + } else { + // If it is not passed in, then we validate payee against the `amount` currency. + // if the payee dfsp has a proxy cache entry, we do not validate the dfsp here + if (!(await this.proxyClient?.lookupProxyByDfspId(fspiopDestination))) { + await this.db.getParticipant(fspiopDestination, LOCAL_ENUM.PAYEE_DFSP, quoteRequest.amount.currency, ENUM.Accounts.LedgerAccountType.POSITION) + } } - } - histTimer({ success: true, queryName: 'quote_validateQuoteRequest' }) - - // Following is the validation to make sure valid fsp's are used in the payload for simple routing mode - if (envConfig.simpleRoutingMode) { - // Lets make sure the optional fspId exists in the payer's partyIdInfo before we validate it - if ( - quoteRequest.payer?.partyIdInfo?.fspId && - quoteRequest.payer.partyIdInfo.fspId !== fspiopSource && - // if the payer dfsp has a proxy cache entry, we do not validate the dfsp here - !(await this.proxyClient?.lookupProxyByDfspId(quoteRequest.payer.partyIdInfo.fspId)) - ) { - await this.db.getParticipant(quoteRequest.payer.partyIdInfo.fspId, LOCAL_ENUM.PAYER_DFSP, quoteRequest.amount.currency, ENUM.Accounts.LedgerAccountType.POSITION) - } - // Lets make sure the optional fspId exists in the payee's partyIdInfo before we validate it - if ( - quoteRequest.payee?.partyIdInfo?.fspId && - quoteRequest.payee.partyIdInfo.fspId !== fspiopDestination && - // if the payee dfsp has a proxy cache entry, we do not validate the dfsp here - !(await this.proxyClient?.lookupProxyByDfspId(quoteRequest.payee.partyIdInfo.fspId)) - ) { - await this.db.getParticipant(quoteRequest.payee.partyIdInfo.fspId, LOCAL_ENUM.PAYEE_DFSP, quoteRequest.amount.currency, ENUM.Accounts.LedgerAccountType.POSITION) + // Following is the validation to make sure valid fsp's are used in the payload for simple routing mode + if (envConfig.simpleRoutingMode) { + // Lets make sure the optional fspId exists in the payer's partyIdInfo before we validate it + if ( + quoteRequest.payer?.partyIdInfo?.fspId && + quoteRequest.payer.partyIdInfo.fspId !== fspiopSource + ) { + await this.db.getParticipant(quoteRequest.payer.partyIdInfo.fspId, LOCAL_ENUM.PAYER_DFSP, quoteRequest.amount.currency, ENUM.Accounts.LedgerAccountType.POSITION) + } + // Lets make sure the optional fspId exists in the payee's partyIdInfo before we validate it + if ( + quoteRequest.payee?.partyIdInfo?.fspId && + quoteRequest.payee.partyIdInfo.fspId !== fspiopDestination && + // if the payee dfsp has a proxy cache entry, we do not validate the dfsp here + !(await this.proxyClient?.lookupProxyByDfspId(quoteRequest.payee.partyIdInfo.fspId)) + ) { + await this.db.getParticipant(quoteRequest.payee.partyIdInfo.fspId, LOCAL_ENUM.PAYEE_DFSP, quoteRequest.amount.currency, ENUM.Accounts.LedgerAccountType.POSITION) + } } + histTimer({ success: true, queryName: 'quote_validateQuoteRequest' }) + } catch (err) { + histTimer({ success: false, queryName: 'quote_validateQuoteRequest' }) + throw err } } @@ -245,7 +243,7 @@ class QuotesModel { 'handleQuoteRequest - Metrics for quote model', ['success', 'queryName', 'duplicateResult'] ).startTimer() - const envConfig = new Config() + const { envConfig } = this // accumulate enum ids const refs = {} @@ -489,7 +487,7 @@ class QuotesModel { } const fullCallbackUrl = `${endpoint}/quotes` - const newHeaders = generateRequestHeaders(headers, this.db.config.protocolVersions, false, RESOURCES.quotes, additionalHeaders) + const newHeaders = generateRequestHeaders(headers, this.envConfig.protocolVersions, false, RESOURCES.quotes, additionalHeaders) this.writeLog(`Forwarding quote request to endpoint: ${fullCallbackUrl}`) this.writeLog(`Forwarding quote request headers: ${JSON.stringify(newHeaders)}`) @@ -577,7 +575,7 @@ class QuotesModel { let txn = null let payeeParty = null const fspiopSource = headers[ENUM.Http.Headers.FSPIOP.SOURCE] - const envConfig = new Config() + const { envConfig } = this const handleQuoteUpdateSpan = span.getChild('qs_quote_handleQuoteUpdate') try { // ensure no 'accept' header is present in the request headers. @@ -755,7 +753,7 @@ class QuotesModel { // we need to strip off the 'accept' header // for all PUT requests as per the API Specification Document // https://github.com/mojaloop/mojaloop-specification/blob/main/documents/v1.1-document-set/fspiop-v1.1-openapi2.yaml - const newHeaders = generateRequestHeaders(headers, this.db.config.protocolVersions, true) + const newHeaders = generateRequestHeaders(headers, this.envConfig.protocolVersions, true) this.writeLog(`Forwarding quote response to endpoint: ${fullCallbackUrl}`) this.writeLog(`Forwarding quote response headers: ${JSON.stringify(newHeaders)}`) @@ -839,7 +837,7 @@ class QuotesModel { ['success', 'queryName', 'duplicateResult'] ).startTimer() let txn = null - const envConfig = new Config() + const { envConfig } = this let newError const childSpan = span.getChild('qs_quote_handleQuoteError') try { @@ -955,7 +953,7 @@ class QuotesModel { } const fullCallbackUrl = `${endpoint}/quotes/${quoteId}` - const newHeaders = generateRequestHeaders(headers, this.db.config.protocolVersions) + const newHeaders = generateRequestHeaders(headers, this.envConfig.protocolVersions) this.writeLog(`Forwarding quote get request to endpoint: ${fullCallbackUrl}`) @@ -1023,7 +1021,7 @@ class QuotesModel { 'sendErrorCallback - Metrics for quote model', ['success', 'queryName', 'duplicateResult'] ).startTimer() - const envConfig = new Config() + const { envConfig } = this const fspiopDest = headers[ENUM.Http.Headers.FSPIOP.DESTINATION] try { // look up the callback base url @@ -1066,9 +1064,9 @@ class QuotesModel { // JWS Signer expects headers in lowercase if (envConfig.jws && envConfig.jws.jwsSign && fromSwitchHeaders['fspiop-source'] === envConfig.jws.fspiopSourceToSign) { - formattedHeaders = generateRequestHeadersForJWS(fromSwitchHeaders, this.db.config.protocolVersions, true) + formattedHeaders = generateRequestHeadersForJWS(fromSwitchHeaders, envConfig.protocolVersions, true) } else { - formattedHeaders = generateRequestHeaders(fromSwitchHeaders, this.db.config.protocolVersions, true) + formattedHeaders = generateRequestHeaders(fromSwitchHeaders, envConfig.protocolVersions, true) } let opts = { diff --git a/test/integration/fxQuotes.test.js b/test/integration/fxQuotes.test.js new file mode 100644 index 00000000..7b332f1a --- /dev/null +++ b/test/integration/fxQuotes.test.js @@ -0,0 +1,550 @@ +/***** + License + -------------- + Copyright © 2020 Mojaloop Foundation + + The Mojaloop files are made available by the Mojaloop Foundation under the Apache License, Version 2.0 + (the "License") and you may not use these files except in compliance with the [License](http://www.apache.org/licenses/LICENSE-2.0). + + You may obtain a copy of the License at [http://www.apache.org/licenses/LICENSE-2.0](http://www.apache.org/licenses/LICENSE-2.0) + + Unless required by applicable law or agreed to in writing, the Mojaloop files are distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the [License](http://www.apache.org/licenses/LICENSE-2.0). + + Contributors + -------------- + This is the official list of the Mojaloop project contributors for this file. + Names of the original copyright holders (individuals or organizations) + should be listed with a '*' in the first column. People who have + contributed from an organization can be listed under the organization + that actually holds the copyright for their contributions (see the + Gates Foundation organization for an example). Those individuals should have + their names indented and be marked with a '-'. Email address can be added + optionally within square brackets . + * Gates Foundation + - Name Surname + + * Steven Oderayi + -------------- + ******/ + +const { Producer } = require('@mojaloop/central-services-stream').Util +const { createProxyClient } = require('../../src/lib/proxy') +const Config = require('../../src/lib/config') +const MockServerClient = require('./mockHttpServer/MockServerClient') +const dto = require('../../src/lib/dto') +const mocks = require('../mocks') +const { wrapWithRetries } = require('../util/helper') +const Database = require('../../src/data/cachedDatabase') + +const TEST_TIMEOUT = 20_000 + +describe('POST /fxQuotes request tests --> ', () => { + jest.setTimeout(TEST_TIMEOUT) + + let db + const config = new Config() + const { kafkaConfig, proxyCache, hubName } = config + const hubClient = new MockServerClient() + const retryConf = { + remainingRetries: process?.env?.TEST_INT_RETRY_COUNT || 20, + timeout: process?.env?.TEST_INT_RETRY_DELAY || 1 + } + + beforeAll(async () => { + db = new Database(config) + await db.connect() + const isDbOk = await db.isConnected() + if (!isDbOk) throw new Error('DB is not connected') + }) + + beforeEach(async () => { + await hubClient.clearHistory() + }) + + afterAll(async () => { + await db?.disconnect() + await Producer.disconnect() + }) + + const base64Encode = (data) => Buffer.from(data).toString('base64') + + const createFxQuote = async (from, to, payload) => { + const message = mocks.kafkaMessagePayloadPostDto({ from, to, id: payload.conversionRequestId, payloadBase64: base64Encode(JSON.stringify(payload)) }) + const { topic, config } = kafkaConfig.PRODUCER.FX_QUOTE.POST + const topicConfig = dto.topicConfigDto({ topicName: topic }) + const isOk = await Producer.produceMessage(message, topicConfig, config) + expect(isOk).toBe(true) + const response = await getResponseWithRetry() + expect(response.data.history.length).toBe(1) + await hubClient.clearHistory() + } + + const getResponseWithRetry = async () => { + return wrapWithRetries(() => hubClient.getHistory(), + retryConf.remainingRetries, + retryConf.timeout, + (result) => result.data.history.length > 0 + ) + } + + /** + * Produces a POST /fxQuotes message for a dfsp that is not registered in the hub + */ + test('should POST /fxQuotes (proxied)', async () => { + let response = await hubClient.getHistory() + expect(response.data.history.length).toBe(0) + + const from = 'pinkbank' + const to = 'redbank' // redbank not in the hub db + const proxyId = 'redbankproxy' + let proxyClient + + try { + proxyClient = createProxyClient({ proxyCacheConfig: proxyCache, logger: console }) + + // register proxy representative for redbank + const isAdded = await proxyClient.addDfspIdToProxyMapping(to, proxyId) + + // assert that the proxy representative is mapped in the cache + const key = `dfsp:${to}` + const proxy = await proxyClient.redisClient.get(key) + expect(isAdded).toBe(true) + expect(proxy).toBe(proxyId) + + const payload = mocks.postFxQuotesPayloadDto({ + initiatingFsp: from, + counterPartyFsp: to + }) + const message = mocks.kafkaMessagePayloadPostDto({ from, to, id: payload.conversionRequestId, payloadBase64: base64Encode(JSON.stringify(payload)) }) + const { topic, config } = kafkaConfig.PRODUCER.FX_QUOTE.POST + const topicConfig = dto.topicConfigDto({ topicName: topic }) + const isOk = await Producer.produceMessage(message, topicConfig, config) + expect(isOk).toBe(true) + + response = await getResponseWithRetry() + expect(response.data.history.length).toBe(1) + + // assert that the request was received by the proxy + const request = response.data.history[0] + expect(request.method).toBe('POST') + expect(request.url).toBe(`/${proxyId}/fxQuotes`) + expect(request.body).toEqual(payload) + expect(request.headers['fspiop-source']).toBe(from) + expect(request.headers['fspiop-destination']).toBe(to) + + // check fx quote details were saved to db + const fxQuoteDetails = await db._getFxQuoteDetails(payload.conversionRequestId) + expect(fxQuoteDetails).toEqual({ + conversionRequestId: payload.conversionRequestId, + conversionId: payload.conversionTerms.conversionId, + determiningTransferId: null, + amountTypeId: 1, + initiatingFsp: payload.conversionTerms.initiatingFsp, + counterPartyFsp: payload.conversionTerms.counterPartyFsp, + sourceAmount: payload.conversionTerms.sourceAmount.amount, + sourceCurrency: payload.conversionTerms.sourceAmount.currency, + targetAmount: null, + targetCurrency: payload.conversionTerms.targetAmount.currency, + extensions: expect.anything(), + expirationDate: expect.anything(), + createdDate: expect.anything() + }) + expect(JSON.parse(fxQuoteDetails.extensions)).toEqual(payload.conversionTerms.extensionList.extension) + } finally { + await proxyClient.removeDfspIdFromProxyMapping(to) + await proxyClient.disconnect() + } + }) + + /** + * Produces a PUT /fxQuotes/{ID} callback from a proxied payee + * Expects a PUT /fxQuotes/{ID} callback at the payer's endpoint + */ + test('should PUT /fxQuotes/{ID} callback (proxied)', async () => { + let response = await hubClient.getHistory() + expect(response.data.history.length).toBe(0) + + const from = 'redbank' + const to = 'pinkbank' + const proxyId = 'redbankproxy' + let proxyClient + + try { + proxyClient = createProxyClient({ proxyCacheConfig: proxyCache, logger: console }) + + // register proxy representative for redbank + const isAdded = await proxyClient.addDfspIdToProxyMapping(from, proxyId) + + // assert that the proxy representative is mapped in the cache + const key = `dfsp:${from}` + const proxy = await proxyClient.redisClient.get(key) + expect(isAdded).toBe(true) + expect(proxy).toBe(proxyId) + + // create subject fxquote + const fxQuotesPostPayload = mocks.postFxQuotesPayloadDto({ initiatingFsp: to, counterPartyFsp: from }) + await createFxQuote(to, from, fxQuotesPostPayload) + + // send put callback + const payload = mocks.putFxQuotesPayloadDto({ fxQuotesPostPayload }) + const message = mocks.kafkaMessageFxPayloadPutDto({ from, to, id: payload.conversionRequestId, payloadBase64: base64Encode(JSON.stringify(payload)) }) + const { topic, config } = kafkaConfig.PRODUCER.FX_QUOTE.PUT + const topicConfig = dto.topicConfigDto({ topicName: topic }) + const isOk = await Producer.produceMessage(message, topicConfig, config) + expect(isOk).toBe(true) + + response = await getResponseWithRetry() + expect(response.data.history.length).toBe(1) + + // assert that the callback was received by the payer dfsp + const request = response.data.history[0] + expect(request.method).toBe('PUT') + expect(request.url).toBe(`/${to}/fxQuotes/${payload.conversionRequestId}`) + expect(request.body).toEqual(payload) + expect(request.headers['fspiop-source']).toBe(from) + expect(request.headers['fspiop-destination']).toBe(to) + + // check fx quote response details were saved to db + const fxQuoteResponseDetails = await db._getFxQuoteResponseDetails(payload.conversionRequestId) + expect(fxQuoteResponseDetails).toEqual({ + conversionRequestId: payload.conversionRequestId, + fxQuoteResponseId: expect.anything(), + ilpCondition: payload.condition, + conversionId: payload.conversionTerms.conversionId, + amountTypeId: 1, + determiningTransferId: null, + counterPartyFsp: payload.conversionTerms.counterPartyFsp, + initiatingFsp: payload.conversionTerms.initiatingFsp, + sourceAmount: payload.conversionTerms.sourceAmount.amount, + sourceCurrency: payload.conversionTerms.sourceAmount.currency, + targetAmount: payload.conversionTerms.targetAmount.amount, + targetCurrency: payload.conversionTerms.targetAmount.currency, + expirationDate: expect.anything(), + createdDate: expect.anything(), + charges: expect.anything(), + extensions: expect.anything() + }) + expect(JSON.parse(fxQuoteResponseDetails.extensions)).toEqual(payload.conversionTerms.extensionList.extension) + const charges = JSON.parse(fxQuoteResponseDetails.charges) + const expectedCharges = charges.map(charge => ({ + chargeType: charge.chargeType, + sourceAmount: { + currency: charge.sourceCurrency, + amount: charge.sourceAmount + }, + targetAmount: { + currency: charge.targetCurrency, + amount: charge.targetAmount + } + })) + expect(expectedCharges).toEqual(payload.conversionTerms.charges) + } finally { + await proxyClient.removeDfspIdFromProxyMapping(from) + await proxyClient.disconnect() + } + }) + + /** + * Produces a POST /fxQuotes message for a dfsp that is registered in the hub + * Expects a POST /fxQuotes request at the payee dfsp's endpoint + */ + test('should POST fxquote (no proxy)', async () => { + let response = await hubClient.getHistory() + expect(response.data.history.length).toBe(0) + + const from = 'pinkbank' + const to = 'greenbank' + const payload = mocks.postFxQuotesPayloadDto({ + initiatingFsp: from, + counterPartyFsp: to + }) + const message = mocks.kafkaMessagePayloadPostDto({ from, to, id: payload.conversionRequestId, payloadBase64: base64Encode(JSON.stringify(payload)) }) + const { topic, config } = kafkaConfig.PRODUCER.FX_QUOTE.POST + const topicConfig = dto.topicConfigDto({ topicName: topic }) + + const isOk = await Producer.produceMessage(message, topicConfig, config) + expect(isOk).toBe(true) + + response = await getResponseWithRetry() + expect(response.data.history.length).toBe(1) + + // assert that the request was received by the payee dfsp + const request = response.data.history[0] + expect(request.method).toBe('POST') + expect(request.url).toBe(`/${to}/fxQuotes`) + expect(request.body).toEqual(payload) + expect(request.headers['fspiop-source']).toBe(from) + expect(request.headers['fspiop-destination']).toBe(to) + }) + + /** + * Produces a PUT /fxQuotes/{ID} callback for a dfsp that is registered in the hub + * Expects a PUT /fxQuotes/{ID} callback at the payer dfsp's endpoint + */ + test('should PUT fxquote callback (no proxy)', async () => { + let response = await hubClient.getHistory() + expect(response.data.history.length).toBe(0) + + const from = 'greenbank' + const to = 'pinkbank' + + // create subject fxquote + const fxQuotesPostPayload = mocks.postFxQuotesPayloadDto({ initiatingFsp: to, counterPartyFsp: from }) + await createFxQuote(to, from, fxQuotesPostPayload) + + // send put callback + const payload = mocks.putFxQuotesPayloadDto({ fxQuotesPostPayload }) + const message = mocks.kafkaMessageFxPayloadPutDto({ from, to, id: payload.conversionRequestId, payloadBase64: base64Encode(JSON.stringify(payload)) }) + const { topic, config } = kafkaConfig.PRODUCER.FX_QUOTE.PUT + const topicConfig = dto.topicConfigDto({ topicName: topic }) + const isOk = await Producer.produceMessage(message, topicConfig, config) + expect(isOk).toBe(true) + + response = await getResponseWithRetry() + expect(response.data.history.length).toBe(1) + + // assert that the callback was received by the payee dfsp + const request = response.data.history[0] + expect(request.method).toBe('PUT') + expect(request.url).toBe(`/${to}/fxQuotes/${payload.conversionRequestId}`) + expect(request.body).toEqual(payload) + expect(request.headers['fspiop-source']).toBe(from) + expect(request.headers['fspiop-destination']).toBe(to) + }) + + /** + * Produces a POST /fxQuotes request for an invalid dfsp + * Expects a PUT /fxQuotes/{ID} callback with an error at the sender's endpoint + */ + test('should POST fx quote to invalid participant', async () => { + let response = await hubClient.getHistory() + expect(response.data.history.length).toBe(0) + + const from = 'pinkbank' + const to = 'invalidbank' + const payload = mocks.postFxQuotesPayloadDto({ + initiatingFsp: from, + counterPartyFsp: to + }) + const message = mocks.kafkaMessagePayloadPostDto({ from, to, id: payload.conversionRequestId, payloadBase64: base64Encode(JSON.stringify(payload)) }) + const { topic, config } = kafkaConfig.PRODUCER.FX_QUOTE.POST + const topicConfig = dto.topicConfigDto({ topicName: topic }) + + const isOk = await Producer.produceMessage(message, topicConfig, config) + expect(isOk).toBe(true) + + response = await getResponseWithRetry() + expect(response.data.history.length).toBe(1) + + // assert that error callback was received by the payer dfsp + const request = response.data.history[0] + expect(request.method).toBe('PUT') + expect(request.url).toBe(`/${from}/fxQuotes/${payload.conversionRequestId}/error`) + expect(request.body.errorInformation.errorCode).toBe('3100') + expect(request.body.errorInformation.errorDescription).toBe(`Generic validation error - Unsupported participant '${to}'`) + expect(request.headers['fspiop-source']).toBe(hubName) + expect(request.headers['fspiop-destination']).toBe(from) + }) + + /** + * Produces a PUT /fxQuotes/{ID} callback with an error for a dfsp that is registered in the hub + * Expects a PUT /fxQuotes/{ID} callback with an error at the receiver's endpoint + */ + test('should PUT /fxQuotes/{ID}/error (no proxy)', async () => { + let response = await hubClient.getHistory() + expect(response.data.history.length).toBe(0) + + const from = 'greenbank' + const to = 'pinkbank' + + // create subject fxquote + const fxQuotesPostPayload = mocks.postFxQuotesPayloadDto({ initiatingFsp: to, counterPartyFsp: from }) + await createFxQuote(to, from, fxQuotesPostPayload) + + const conversionRequestId = fxQuotesPostPayload.conversionRequestId + const payload = { + errorInformation: { + errorCode: '3100', + errorDescription: 'Generic validation error' + } + } + const message = mocks.kafkaMessageFxPayloadPutDto({ from, to, id: conversionRequestId, payloadBase64: base64Encode(JSON.stringify(payload)) }) + const { topic, config } = kafkaConfig.PRODUCER.FX_QUOTE.PUT + const topicConfig = dto.topicConfigDto({ topicName: topic }) + + const isOk = await Producer.produceMessage(message, topicConfig, config) + expect(isOk).toBe(true) + + response = await getResponseWithRetry() + expect(response.data.history.length).toBe(1) + + // assert that the error callback was received by the payer dfsp + const request = response.data.history[0] + expect(request.method).toBe('PUT') + expect(request.url).toBe(`/${to}/fxQuotes/${conversionRequestId}/error`) + expect(request.body.errorInformation.errorCode).toBe('3100') + expect(request.body.errorInformation.errorDescription).toBe('Generic validation error') + expect(request.headers['fspiop-source']).toBe(from) + expect(request.headers['fspiop-destination']).toBe(to) + }) + + /** + * Produces a PUT /fxQuotes/{ID}/error for a proxied dfsp + * Expects a PUT /fxQuotes/{ID}/error callback with an error at the proxy's endpoint + */ + test('should PUT /fxQuotes/{ID}/error (proxied)', async () => { + let response = await hubClient.getHistory() + expect(response.data.history.length).toBe(0) + + const from = 'pinkbank' + const to = 'redbank' + const proxyId = 'redbankproxy' + let proxyClient + + try { + proxyClient = createProxyClient({ proxyCacheConfig: proxyCache, logger: console }) + + // register proxy representative for redbank + const isAdded = await proxyClient.addDfspIdToProxyMapping(to, proxyId) + + // assert that the proxy representative is mapped in the cache + const key = `dfsp:${to}` + const proxy = await proxyClient.redisClient.get(key) + expect(isAdded).toBe(true) + expect(proxy).toBe(proxyId) + + // create subject fxquote + const fxQuotesPostPayload = mocks.postFxQuotesPayloadDto({ initiatingFsp: to, counterPartyFsp: from }) + await createFxQuote(to, from, fxQuotesPostPayload) + + const conversionRequestId = fxQuotesPostPayload.conversionRequestId + const payload = { + errorInformation: { + errorCode: '3100', + errorDescription: 'Generic validation error' + } + } + const message = mocks.kafkaMessageFxPayloadPutDto({ from, to, id: conversionRequestId, payloadBase64: base64Encode(JSON.stringify(payload)) }) + const { topic, config } = kafkaConfig.PRODUCER.FX_QUOTE.PUT + const topicConfig = dto.topicConfigDto({ topicName: topic }) + + const isOk = await Producer.produceMessage(message, topicConfig, config) + expect(isOk).toBe(true) + + response = await getResponseWithRetry() + expect(response.data.history.length).toBe(1) + + // assert that the error callback was received by the proxy + const request = response.data.history[0] + expect(request.method).toBe('PUT') + expect(request.url).toBe(`/${proxyId}/fxQuotes/${conversionRequestId}/error`) + expect(request.body.errorInformation.errorCode).toBe('3100') + expect(request.body.errorInformation.errorDescription).toBe('Generic validation error') + expect(request.headers['fspiop-source']).toBe(from) + expect(request.headers['fspiop-destination']).toBe(to) + + // check fxquote error details were saved to db + const fxQuoteErrorDetails = await db._getFxQuoteErrorDetails(conversionRequestId) + expect(fxQuoteErrorDetails).toEqual({ + conversionRequestId, + fxQuoteResponseId: null, + fxQuoteErrorId: expect.anything(), + errorCode: Number(payload.errorInformation.errorCode), + errorDescription: payload.errorInformation.errorDescription, + createdDate: expect.anything() + }) + } finally { + await proxyClient.removeDfspIdFromProxyMapping(to) + await proxyClient.disconnect() + } + }) + + /** + * Produces a GET /fxQuotes/{ID} request for a dfsp that is registered in the hub + * Expects a GET /fxQuotes/{ID} request at the destination's endpoint + */ + test('should GET fx quote (no proxy)', async () => { + let response = await hubClient.getHistory() + expect(response.data.history.length).toBe(0) + + const from = 'pinkbank' + const to = 'greenbank' + + // create subject fxquote + const fxQuotesPostPayload = mocks.postFxQuotesPayloadDto({ initiatingFsp: to, counterPartyFsp: from }) + await createFxQuote(to, from, fxQuotesPostPayload) + + // get the fxquote + const conversionRequestId = fxQuotesPostPayload.conversionRequestId + const message = mocks.kafkaMessageFxPayloadGetDto({ from, to, id: conversionRequestId }) + const { topic, config } = kafkaConfig.PRODUCER.FX_QUOTE.GET + const topicConfig = dto.topicConfigDto({ topicName: topic }) + + const isOk = await Producer.produceMessage(message, topicConfig, config) + expect(isOk).toBe(true) + + response = await getResponseWithRetry() + expect(response.data.history.length).toBe(1) + + // assert that the callback was received by the destination dfsp's endpoint + const request = response.data.history[0] + expect(request.method).toBe('GET') + expect(request.url).toBe(`/${to}/fxQuotes/${conversionRequestId}`) + expect(request.body).toBeUndefined() + expect(request.headers['fspiop-source']).toBe(from) + expect(request.headers['fspiop-destination']).toBe(to) + }) + + /** + * Produces a GET /fxQuotes/{ID} for a proxied dfsp + * Expects a GET /fxQuotes/{ID} request at the proxy's endpoint + */ + test('should GET fx quote (proxied)', async () => { + let response = await hubClient.getHistory() + expect(response.data.history.length).toBe(0) + + const from = 'pinkbank' + const to = 'redbank' + const proxyId = 'redbankproxy' + let proxyClient + + try { + proxyClient = createProxyClient({ proxyCacheConfig: proxyCache, logger: console }) + + // register proxy representative for redbank + const isAdded = await proxyClient.addDfspIdToProxyMapping(to, proxyId) + + // assert that the proxy representative is mapped in the cache + const key = `dfsp:${to}` + const proxy = await proxyClient.redisClient.get(key) + expect(isAdded).toBe(true) + expect(proxy).toBe(proxyId) + + // create subject fxquote + const fxQuotesPostPayload = mocks.postFxQuotesPayloadDto({ initiatingFsp: to, counterPartyFsp: from }) + await createFxQuote(to, from, fxQuotesPostPayload) + + const conversionRequestId = fxQuotesPostPayload.conversionRequestId + const message = mocks.kafkaMessageFxPayloadGetDto({ from, to, id: conversionRequestId }) + const { topic, config } = kafkaConfig.PRODUCER.FX_QUOTE.GET + const topicConfig = dto.topicConfigDto({ topicName: topic }) + + const isOk = await Producer.produceMessage(message, topicConfig, config) + expect(isOk).toBe(true) + + response = await getResponseWithRetry() + expect(response.data.history.length).toBe(1) + + // assert that the callback was received by the proxy + const request = response.data.history[0] + expect(request.method).toBe('GET') + expect(request.url).toBe(`/${proxyId}/fxQuotes/${conversionRequestId}`) + expect(request.body).toBeUndefined() + expect(request.headers['fspiop-source']).toBe(from) + expect(request.headers['fspiop-destination']).toBe(to) + } finally { + await proxyClient.removeDfspIdFromProxyMapping(to) + await proxyClient.disconnect() + } + }) +}) diff --git a/test/integration/postRequest.test.js b/test/integration/postRequest.test.js index 93f1c54b..3d2ded3d 100644 --- a/test/integration/postRequest.test.js +++ b/test/integration/postRequest.test.js @@ -29,7 +29,6 @@ const { Producer } = require('@mojaloop/central-services-stream').Util const { createProxyClient } = require('../../src/lib/proxy') - const Config = require('../../src/lib/config') const dto = require('../../src/lib/dto') const mocks = require('../mocks') @@ -38,28 +37,18 @@ const uuid = require('crypto').randomUUID const { wrapWithRetries } = require('../util/helper') const Database = require('../../src/data/cachedDatabase') -const hubClient = new MockServerClient() -const base64Encode = (data) => Buffer.from(data).toString('base64') const TEST_TIMEOUT = 20_000 -const retryDelay = process?.env?.TEST_INT_RETRY_DELAY || 1 -const retryCount = process?.env?.TEST_INT_RETRY_COUNT || 20 -const retryOpts = { - retries: retryCount, - minTimeout: retryDelay, - maxTimeout: retryDelay -} -const wrapWithRetriesConf = { - remainingRetries: retryOpts?.retries || 10, // default 10 - timeout: retryOpts?.maxTimeout || 2 // default 2 -} - -let db - describe('POST request tests --> ', () => { jest.setTimeout(TEST_TIMEOUT) + let db const config = new Config() const { kafkaConfig, proxyCache } = config + const hubClient = new MockServerClient() + const retryConf = { + remainingRetries: process?.env?.TEST_INT_RETRY_COUNT || 20, + timeout: process?.env?.TEST_INT_RETRY_DELAY || 1 + } beforeEach(async () => { await hubClient.clearHistory() @@ -77,6 +66,16 @@ describe('POST request tests --> ', () => { await Producer.disconnect() }) + const base64Encode = (data) => Buffer.from(data).toString('base64') + + const getResponseWithRetry = async () => { + return wrapWithRetries(() => hubClient.getHistory(), + retryConf.remainingRetries, + retryConf.timeout, + (result) => result.data.history.length > 0 + ) + } + test('should pass validation for POST /quotes request if request amount currency is registered (position account exists) for the payer participant', async () => { let response = await hubClient.getHistory() expect(response.data.history.length).toBe(0) @@ -85,24 +84,12 @@ describe('POST request tests --> ', () => { const topicConfig = dto.topicConfigDto({ topicName: topic }) const from = 'pinkbank' const to = 'greenbank' - const payload = { - quoteId: uuid(), - transactionId: uuid(), - amountType: 'SEND', - amount: { amount: '100', currency: 'USD' }, - transactionType: { scenario: 'DEPOSIT', initiator: 'PAYER', initiatorType: 'CONSUMER' }, - payer: { partyIdInfo: { partyIdType: 'MSISDN', partyIdentifier: '987654321', fspId: from } }, - payee: { partyIdInfo: { partyIdType: 'MSISDN', partyIdentifier: '123456789', fspId: to } } - } + const payload = mocks.postQuotesPayloadDto({ from, to }) const message = mocks.kafkaMessagePayloadPostDto({ from, to, id: payload.quoteId, payloadBase64: base64Encode(JSON.stringify(payload)) }) const isOk = await Producer.produceMessage(message, topicConfig, config) expect(isOk).toBe(true) - response = await wrapWithRetries(() => hubClient.getHistory(), - wrapWithRetriesConf.remainingRetries, - wrapWithRetriesConf.timeout, - (result) => result.data.history.length > 0 - ) + response = await getResponseWithRetry() expect(response.data.history.length).toBeGreaterThan(0) const { url } = response.data.history[0] @@ -116,7 +103,7 @@ describe('POST request tests --> ', () => { const to = 'greenbank' let proxyClient try { - proxyClient = await createProxyClient({ proxyCacheConfig: proxyCache, logger: console }) + proxyClient = createProxyClient({ proxyCacheConfig: proxyCache, logger: console }) const { topic, config } = kafkaConfig.PRODUCER.QUOTE.POST const topicConfig = dto.topicConfigDto({ topicName: topic }) @@ -124,24 +111,12 @@ describe('POST request tests --> ', () => { const proxyId2 = 'proxyRB' await proxyClient.addDfspIdToProxyMapping(to, proxyId1) await proxyClient.addDfspIdToProxyMapping(from, proxyId2) - const payload = { - quoteId: uuid(), - transactionId: uuid(), - amountType: 'SEND', - amount: { amount: '100', currency: 'USD' }, - transactionType: { scenario: 'DEPOSIT', initiator: 'PAYER', initiatorType: 'CONSUMER' }, - payer: { partyIdInfo: { partyIdType: 'MSISDN', partyIdentifier: '987654321', fspId: from } }, - payee: { partyIdInfo: { partyIdType: 'MSISDN', partyIdentifier: '123456789', fspId: to } } - } + const payload = mocks.postQuotesPayloadDto({ from, to }) const message = mocks.kafkaMessagePayloadPostDto({ from, to, id: payload.quoteId, payloadBase64: base64Encode(JSON.stringify(payload)) }) const isOk = await Producer.produceMessage(message, topicConfig, config) expect(isOk).toBe(true) - response = await wrapWithRetries(() => hubClient.getHistory(), - wrapWithRetriesConf.remainingRetries, - wrapWithRetriesConf.timeout, - (result) => result.data.history.length > 0 - ) + response = await getResponseWithRetry() expect(response.data.history.length).toBe(1) const { url } = response.data.history[0] @@ -161,24 +136,12 @@ describe('POST request tests --> ', () => { const topicConfig = dto.topicConfigDto({ topicName: topic }) const from = 'pinkbank' const to = 'greenbank' - const payload = { - quoteId: uuid(), - transactionId: uuid(), - amountType: 'SEND', - amount: { amount: '100', currency: 'GBP' }, - transactionType: { scenario: 'DEPOSIT', initiator: 'PAYER', initiatorType: 'CONSUMER' }, - payer: { partyIdInfo: { partyIdType: 'MSISDN', partyIdentifier: '987654321', fspId: from } }, - payee: { partyIdInfo: { partyIdType: 'MSISDN', partyIdentifier: '123456789', fspId: to } } - } + const payload = mocks.postQuotesPayloadDto({ from, to, amount: { amount: '100', currency: 'GBP' } }) const message = mocks.kafkaMessagePayloadPostDto({ from, to, id: payload.quoteId, payloadBase64: base64Encode(JSON.stringify(payload)) }) const isOk = await Producer.produceMessage(message, topicConfig, config) expect(isOk).toBe(true) - response = await wrapWithRetries(() => hubClient.getHistory(), - wrapWithRetriesConf.remainingRetries, - wrapWithRetriesConf.timeout, - (result) => result.data.history.length > 0 - ) + response = await getResponseWithRetry() expect(response.data.history.length).toBe(1) const { url, body } = response.data.history[0] @@ -195,24 +158,16 @@ describe('POST request tests --> ', () => { const topicConfig = dto.topicConfigDto({ topicName: topic }) const from = 'pinkbank' const to = 'greenbank' - const payload = { - quoteId: uuid(), - transactionId: uuid(), - amountType: 'SEND', - amount: { amount: '100', currency: 'USD' }, - transactionType: { scenario: 'DEPOSIT', initiator: 'PAYER', initiatorType: 'CONSUMER' }, - payer: { partyIdInfo: { partyIdType: 'MSISDN', partyIdentifier: '987654321', fspId: from }, supportedCurrencies: ['USD', 'ZMW'] }, - payee: { partyIdInfo: { partyIdType: 'MSISDN', partyIdentifier: '123456789', fspId: to } } - } + const payload = mocks.postQuotesPayloadDto({ + from, + to, + payer: { partyIdInfo: { partyIdType: 'MSISDN', partyIdentifier: '987654321', fspId: from }, supportedCurrencies: ['USD', 'ZMW'] } + }) const message = mocks.kafkaMessagePayloadPostDto({ from, to, id: payload.quoteId, payloadBase64: base64Encode(JSON.stringify(payload)) }) const isOk = await Producer.produceMessage(message, topicConfig, config) expect(isOk).toBe(true) - response = await wrapWithRetries(() => hubClient.getHistory(), - wrapWithRetriesConf.remainingRetries, - wrapWithRetriesConf.timeout, - (result) => result.data.history.length > 0 - ) + response = await getResponseWithRetry() expect(response.data.history.length).toBe(1) const { url } = response.data.history[0] @@ -227,24 +182,16 @@ describe('POST request tests --> ', () => { const topicConfig = dto.topicConfigDto({ topicName: topic }) const from = 'pinkbank' const to = 'greenbank' - const payload = { - quoteId: uuid(), - transactionId: uuid(), - amountType: 'SEND', - amount: { amount: '100', currency: 'USD' }, - transactionType: { scenario: 'DEPOSIT', initiator: 'PAYER', initiatorType: 'CONSUMER' }, - payer: { partyIdInfo: { partyIdType: 'MSISDN', partyIdentifier: '987654321', fspId: from }, supportedCurrencies: ['USD', 'ZMW', 'GBP'] }, - payee: { partyIdInfo: { partyIdType: 'MSISDN', partyIdentifier: '123456789', fspId: to } } - } + const payload = mocks.postQuotesPayloadDto({ + from, + to, + payer: { partyIdInfo: { partyIdType: 'MSISDN', partyIdentifier: '987654321', fspId: from }, supportedCurrencies: ['USD', 'ZMW', 'GBP'] } + }) const message = mocks.kafkaMessagePayloadPostDto({ from, to, id: payload.quoteId, payloadBase64: base64Encode(JSON.stringify(payload)) }) const isOk = await Producer.produceMessage(message, topicConfig, config) expect(isOk).toBe(true) - response = await wrapWithRetries(() => hubClient.getHistory(), - wrapWithRetriesConf.remainingRetries, - wrapWithRetriesConf.timeout, - (result) => result.data.history.length > 0 - ) + response = await getResponseWithRetry() expect(response.data.history.length).toBe(1) const { url, body } = response.data.history[0] @@ -253,6 +200,31 @@ describe('POST request tests --> ', () => { expect(body.errorInformation.errorDescription).toBe(`Payer FSP ID not found - Unsupported participant '${message.from}'`) }) + test('should forward POST /quotes request to payee dfsp registered in the hub', async () => { + let response = await hubClient.getHistory() + expect(response.data.history.length).toBe(0) + + const { topic, config } = kafkaConfig.PRODUCER.QUOTE.POST + const topicConfig = dto.topicConfigDto({ topicName: topic }) + const from = 'pinkbank' + const to = 'greenbank' + + const payload = mocks.postQuotesPayloadDto({ from, to }) + const message = mocks.kafkaMessagePayloadPostDto({ from, to, id: payload.quoteId, payloadBase64: base64Encode(JSON.stringify(payload)) }) + const isOk = await Producer.produceMessage(message, topicConfig, config) + expect(isOk).toBe(true) + + response = await getResponseWithRetry() + expect([1, 2]).toContain(response.data.history.length) + + const request = response.data.history[0] + expect(request.method).toBe('POST') + expect(request.url).toBe(`/${to}/quotes`) + expect(request.body).toEqual(payload) + expect(request.headers['fspiop-source']).toBe(from) + expect(request.headers['fspiop-destination']).toBe(to) + }) + test('should forward POST /quotes request to proxy if the payee dfsp is not registered in the hub', async () => { let response = await hubClient.getHistory() expect(response.data.history.length).toBe(0) @@ -268,7 +240,7 @@ describe('POST request tests --> ', () => { let proxyClient try { - proxyClient = await createProxyClient({ proxyCacheConfig: proxyCache, logger: console }) + proxyClient = createProxyClient({ proxyCacheConfig: proxyCache, logger: console }) const isAdded = await proxyClient.addDfspIdToProxyMapping(to, proxyId) // assert that the proxy representative is mapped in the cache @@ -278,24 +250,12 @@ describe('POST request tests --> ', () => { expect(isAdded).toBe(true) expect(representative).toBe(proxyId) - const payload = { - quoteId: uuid(), - transactionId: uuid(), - amountType: 'SEND', - amount: { amount: '100', currency: 'USD' }, - transactionType: { scenario: 'DEPOSIT', initiator: 'PAYER', initiatorType: 'CONSUMER' }, - payer: { partyIdInfo: { partyIdType: 'MSISDN', partyIdentifier: '987654321', fspId: from } }, - payee: { partyIdInfo: { partyIdType: 'MSISDN', partyIdentifier: '123456789', fspId: to } } - } + const payload = mocks.postQuotesPayloadDto({ from, to }) const message = mocks.kafkaMessagePayloadPostDto({ from, to, id: payload.quoteId, payloadBase64: base64Encode(JSON.stringify(payload)) }) const isOk = await Producer.produceMessage(message, topicConfig, config) expect(isOk).toBe(true) - response = await wrapWithRetries(() => hubClient.getHistory(), - wrapWithRetriesConf.remainingRetries, - wrapWithRetriesConf.timeout, - (result) => result.data.history.length > 0 - ) + response = await getResponseWithRetry() expect([1, 2]).toContain(response.data.history.length) const request = response.data.history[0] @@ -310,96 +270,28 @@ describe('POST request tests --> ', () => { } }) - test('should forward POST /fxQuotes request to proxy if the payee dfsp is not registered in the hub', async () => { + test('should forward POST /bulkQuotes request to payee dfsp registered in the hub', async () => { let response = await hubClient.getHistory() expect(response.data.history.length).toBe(0) - const { topic, config } = kafkaConfig.PRODUCER.FX_QUOTE.POST + const { topic, config } = kafkaConfig.PRODUCER.BULK_QUOTE.POST const topicConfig = dto.topicConfigDto({ topicName: topic }) const from = 'pinkbank' - // redbank not in the hub db - const to = 'redbank' - - // register proxy representative for redbank - const proxyId = 'redbankproxy' - let proxyClient - - try { - proxyClient = await createProxyClient({ proxyCacheConfig: proxyCache, logger: console }) - const isAdded = await proxyClient.addDfspIdToProxyMapping(to, proxyId) - - // assert that the proxy representative is mapped in the cache - const key = `dfsp:${to}` - const representative = await proxyClient.redisClient.get(key) - - expect(isAdded).toBe(true) - expect(representative).toBe(proxyId) - - const payload = { - conversionRequestId: uuid(), - conversionTerms: { - conversionId: uuid(), - initiatingFsp: from, - counterPartyFsp: to, - amountType: 'SEND', - sourceAmount: { - currency: 'USD', - amount: 300 - }, - targetAmount: { - currency: 'TZS' - }, - expiration: new Date(Date.now() + 5 * 60 * 1000).toISOString(), - extensionList: { - extension: [ - { - key: 'Test', - value: 'Data' - } - ] - } - } - } - const message = mocks.kafkaMessagePayloadPostDto({ from, to, id: payload.conversionRequestId, payloadBase64: base64Encode(JSON.stringify(payload)) }) - const isOk = await Producer.produceMessage(message, topicConfig, config) - expect(isOk).toBe(true) + const to = 'greenbank' - response = await wrapWithRetries(() => hubClient.getHistory(), - wrapWithRetriesConf.remainingRetries, - wrapWithRetriesConf.timeout, - (result) => result.data.history.length > 0 - ) - expect(response.data.history.length).toBe(1) + const payload = mocks.postBulkQuotesPayloadDto({ from, to }) + const message = mocks.kafkaMessagePayloadPostDto({ from, to, id: payload.quoteId, payloadBase64: base64Encode(JSON.stringify(payload)) }) + const isOk = await Producer.produceMessage(message, topicConfig, config) + expect(isOk).toBe(true) - const request = response.data.history[0] - expect(request.url).toBe(`/${proxyId}/fxQuotes`) - expect(request.body).toEqual(payload) - expect(request.headers['fspiop-source']).toBe(from) - expect(request.headers['fspiop-destination']).toBe(to) + response = await getResponseWithRetry() + expect(response.data.history.length).toBe(1) - // check fx quote details were saved to db - const fxQuoteDetails = await db._getFxQuoteDetails(payload.conversionRequestId) - expect(fxQuoteDetails).toEqual({ - conversionRequestId: payload.conversionRequestId, - conversionId: payload.conversionTerms.conversionId, - determiningTransferId: null, - amountTypeId: 1, - initiatingFsp: payload.conversionTerms.initiatingFsp, - counterPartyFsp: payload.conversionTerms.counterPartyFsp, - sourceAmount: payload.conversionTerms.sourceAmount.amount, - sourceCurrency: payload.conversionTerms.sourceAmount.currency, - targetAmount: null, - targetCurrency: payload.conversionTerms.targetAmount.currency, - extensions: expect.anything(), - expirationDate: expect.anything(), - createdDate: expect.anything() - }) - expect(JSON.parse(fxQuoteDetails.extensions)).toEqual(payload.conversionTerms.extensionList.extension) - } finally { - await proxyClient.removeDfspIdFromProxyMapping(to) - await proxyClient.removeDfspIdFromProxyMapping(from) - await proxyClient.disconnect() - } + const request = response.data.history[0] + expect(request.url).toBe(`/${to}/bulkQuotes`) + expect(request.body).toEqual(payload) + expect(request.headers['fspiop-source']).toBe(from) + expect(request.headers['fspiop-destination']).toBe(to) }) test('should forward POST /bulkQuotes request to proxy if the payee dfsp is not registered in the hub', async () => { @@ -417,7 +309,7 @@ describe('POST request tests --> ', () => { let proxyClient try { - proxyClient = await createProxyClient({ proxyCacheConfig: proxyCache, logger: console }) + proxyClient = createProxyClient({ proxyCacheConfig: proxyCache, logger: console }) const isAdded = await proxyClient.addDfspIdToProxyMapping(to, proxyId) // assert that the proxy representative is mapped in the cache @@ -445,11 +337,7 @@ describe('POST request tests --> ', () => { const isOk = await Producer.produceMessage(message, topicConfig, config) expect(isOk).toBe(true) - response = await wrapWithRetries(() => hubClient.getHistory(), - wrapWithRetriesConf.remainingRetries, - wrapWithRetriesConf.timeout, - (result) => result.data.history.length > 0 - ) + response = await getResponseWithRetry() expect(response.data.history.length).toBe(1) const request = response.data.history[0] diff --git a/test/integration/putCallback.test.js b/test/integration/putCallback.test.js index a10f0a38..454a2969 100644 --- a/test/integration/putCallback.test.js +++ b/test/integration/putCallback.test.js @@ -28,66 +28,30 @@ -------------- ******/ +const uuid = require('crypto').randomUUID const { Producer } = require('@mojaloop/central-services-stream').Util const { createProxyClient } = require('../../src/lib/proxy') - const Config = require('../../src/lib/config') const dto = require('../../src/lib/dto') const mocks = require('../mocks') const MockServerClient = require('./mockHttpServer/MockServerClient') -const uuid = require('crypto').randomUUID const { wrapWithRetries } = require('../util/helper') const Database = require('../../src/data/cachedDatabase') -const hubClient = new MockServerClient() -const base64Encode = (data) => Buffer.from(data).toString('base64') const TEST_TIMEOUT = 20_000 const WAIT_TIMEOUT = 3_000 -const wait = (ms) => new Promise(resolve => setTimeout(resolve, ms)) -const retryDelay = process?.env?.TEST_INT_RETRY_DELAY || 1 -const retryCount = process?.env?.TEST_INT_RETRY_COUNT || 20 -const retryOpts = { - retries: retryCount, - minTimeout: retryDelay, - maxTimeout: retryDelay -} -const wrapWithRetriesConf = { - remainingRetries: retryOpts?.retries || 10, // default 10 - timeout: retryOpts?.maxTimeout || 2 // default 2 -} - -/** - * Publishes a test 'POST quote' message to the Kafka topic - */ -const createQuote = async ({ - from = 'pinkbank', - to = 'greenbank', - amount = { amount: '100', currency: 'USD' }, - amountType = 'SEND' -} = {}) => { - const { kafkaConfig } = new Config() - const { topic, config } = kafkaConfig.PRODUCER.QUOTE.POST - const topicConfig = dto.topicConfigDto({ topicName: topic }) - const payload = { - quoteId: uuid(), - transactionId: uuid(), - amountType, - amount, - transactionType: { scenario: 'DEPOSIT', initiator: 'PAYER', initiatorType: 'CONSUMER' }, - payee: { partyIdInfo: { partyIdType: 'MSISDN', partyIdentifier: '123456789', fspId: to } }, - payer: { partyIdInfo: { partyIdType: 'MSISDN', partyIdentifier: '987654321', fspId: from } } - } - const message = mocks.kafkaMessagePayloadPostDto({ from, to, id: payload.quoteId, payloadBase64: base64Encode(JSON.stringify(payload)) }) - const isOk = await Producer.produceMessage(message, topicConfig, config) - expect(isOk).toBe(true) - return payload -} - describe('PUT callback Tests --> ', () => { + jest.setTimeout(TEST_TIMEOUT) + + let db const config = new Config() const { kafkaConfig, proxyCache } = config - let db + const hubClient = new MockServerClient() + const retryConf = { + remainingRetries: process?.env?.TEST_INT_RETRY_COUNT || 20, + timeout: process?.env?.TEST_INT_RETRY_DELAY || 1 + } beforeEach(async () => { await hubClient.clearHistory() @@ -105,6 +69,36 @@ describe('PUT callback Tests --> ', () => { await Producer.disconnect() }) + const base64Encode = (data) => Buffer.from(data).toString('base64') + const wait = (ms) => new Promise(resolve => setTimeout(resolve, ms)) + + /** + * Publishes a test 'POST quote' message to the Kafka topic + */ + const createQuote = async ({ + from = 'pinkbank', + to = 'greenbank', + amount = { amount: '100', currency: 'USD' }, + amountType = 'SEND' + } = {}) => { + const { kafkaConfig } = new Config() + const { topic, config } = kafkaConfig.PRODUCER.QUOTE.POST + const topicConfig = dto.topicConfigDto({ topicName: topic }) + const payload = mocks.postQuotesPayloadDto({ from, to, amount, amountType }) + const message = mocks.kafkaMessagePayloadPostDto({ from, to, id: payload.quoteId, payloadBase64: base64Encode(JSON.stringify(payload)) }) + const isOk = await Producer.produceMessage(message, topicConfig, config) + expect(isOk).toBe(true) + return payload + } + + const getResponseWithRetry = async () => { + return wrapWithRetries(() => hubClient.getHistory(), + retryConf.remainingRetries, + retryConf.timeout, + (result) => result.data.history.length > 0 + ) + } + test('should handle the JWS signing when a switch error event is produced to the PUT topic', async () => { // create test quote to prevent db (row reference) error on PUT request const quoteCreated = await createQuote() @@ -120,12 +114,7 @@ describe('PUT callback Tests --> ', () => { const isOk = await Producer.produceMessage(message, topicConfig, config) expect(isOk).toBe(true) - - response = await wrapWithRetries(() => hubClient.getHistory(), - wrapWithRetriesConf.remainingRetries, - wrapWithRetriesConf.timeout, - (result) => result.data.history.length > 0 - ) + response = await getResponseWithRetry() expect(response.data.history.length).toBe(1) const { headers, url } = response.data.history[0] @@ -134,7 +123,7 @@ describe('PUT callback Tests --> ', () => { const { signature, protectedHeader } = JSON.parse(headers['fspiop-signature']) expect(signature).toBeTruthy() expect(protectedHeader).toBeTruthy() - }, TEST_TIMEOUT) + }) test('should pass validation for PUT /quotes/{ID} request if request transferAmount/payeeReceiveAmount currency is registered (position account exists) for the payee pariticpant', async () => { // create test quote to prevent db (row reference) error on PUT request @@ -147,12 +136,7 @@ describe('PUT callback Tests --> ', () => { const { topic, config } = kafkaConfig.PRODUCER.QUOTE.PUT const topicConfig = dto.topicConfigDto({ topicName: topic }) - const payload = { - transferAmount: { amount: '100', currency: 'USD' }, - payeeReceiveAmount: { amount: '100', currency: 'USD' }, - ilpPacket: 'test', - condition: 'test' - } + const payload = mocks.putQuotesPayloadDto() const message = mocks.kafkaMessagePayloadDto({ from: 'greenbank', to: 'pinkbank', @@ -163,16 +147,12 @@ describe('PUT callback Tests --> ', () => { const isOk = await Producer.produceMessage(message, topicConfig, config) expect(isOk).toBe(true) - response = await wrapWithRetries(() => hubClient.getHistory(), - wrapWithRetriesConf.remainingRetries, - wrapWithRetriesConf.timeout, - (result) => result.data.history.length > 0 - ) + response = await getResponseWithRetry() expect(response.data.history.length).toBe(1) const { url } = response.data.history[0] expect(url).toBe(`/${message.to}/quotes/${message.id}`) - }, TEST_TIMEOUT) + }) test('should pass validation for PUT /quotes/{ID} request if source is proxied participant', async () => { // create test quote to prevent db (row reference) error on PUT request @@ -189,17 +169,12 @@ describe('PUT callback Tests --> ', () => { const proxyId = 'greenbankproxy' let proxyClient try { - proxyClient = await createProxyClient({ proxyCacheConfig: proxyCache, logger: console }) + proxyClient = createProxyClient({ proxyCacheConfig: proxyCache, logger: console }) const isAdded = await proxyClient.addDfspIdToProxyMapping(from, proxyId) expect(isAdded).toBe(true) - const payload = { - transferAmount: { amount: '100', currency: 'USD' }, - payeeReceiveAmount: { amount: '100', currency: 'USD' }, - ilpPacket: 'test', - condition: 'test' - } + const payload = mocks.putQuotesPayloadDto() const message = mocks.kafkaMessagePayloadDto({ - from: 'greenbank', + from, to: 'pinkbank', id: quoteCreated.quoteId, payloadBase64: base64Encode(JSON.stringify(payload)) @@ -208,11 +183,7 @@ describe('PUT callback Tests --> ', () => { const isOk = await Producer.produceMessage(message, topicConfig, config) expect(isOk).toBe(true) - response = await wrapWithRetries(() => hubClient.getHistory(), - wrapWithRetriesConf.remainingRetries, - wrapWithRetriesConf.timeout, - (result) => result.data.history.length > 0 - ) + response = await getResponseWithRetry() expect(response.data.history.length).toBe(1) const { url } = response.data.history[0] @@ -221,7 +192,7 @@ describe('PUT callback Tests --> ', () => { await proxyClient.removeDfspIdFromProxyMapping(from) await proxyClient.disconnect() } - }, TEST_TIMEOUT) + }) test('should fail validation for PUT /quotes/{ID} request if request transferAmount/payeeReceiveAmount currency is not registered (position account does not exist) for the payee pariticpant', async () => { // test the same scenario with only transferAmount set @@ -235,11 +206,8 @@ describe('PUT callback Tests --> ', () => { const { topic, config } = kafkaConfig.PRODUCER.QUOTE.PUT const topicConfig = dto.topicConfigDto({ topicName: topic }) - const payload = { - transferAmount: { amount: '100', currency: 'ZKW' }, - ilpPacket: 'test', - condition: 'test' - } + const payload = mocks.putQuotesPayloadDto({ transferAmount: { amount: '100', currency: 'ZKW' } }) + delete payload.payeeReceiveAmount let message = mocks.kafkaMessagePayloadDto({ from: 'greenbank', to: 'pinkbank', @@ -250,11 +218,7 @@ describe('PUT callback Tests --> ', () => { let isOk = await Producer.produceMessage(message, topicConfig, config) expect(isOk).toBe(true) - response = await wrapWithRetries(() => hubClient.getHistory(), - wrapWithRetriesConf.remainingRetries, - wrapWithRetriesConf.timeout, - (result) => result.data.history.length > 0 - ) + response = await getResponseWithRetry() expect(response.data.history.length).toBe(1) const { url, body } = response.data.history[0] @@ -280,18 +244,14 @@ describe('PUT callback Tests --> ', () => { isOk = await Producer.produceMessage(message, topicConfig, config) expect(isOk).toBe(true) - response = await wrapWithRetries(() => hubClient.getHistory(), - wrapWithRetriesConf.remainingRetries, - wrapWithRetriesConf.timeout, - (result) => result.data.history.length > 0 - ) + response = await getResponseWithRetry() expect(response.data.history.length).toBe(1) const { url: url2, body: body2 } = response.data.history[0] expect(url2).toBe(`/${message.from}/quotes/${message.id}/error`) expect(body2.errorInformation.errorCode).toBe('3201') expect(body2.errorInformation.errorDescription).toBe(`Destination FSP Error - Unsupported participant '${message.from}'`) - }, TEST_TIMEOUT) + }) test('should forward PUT /quotes/{ID} request to proxy if the payer dfsp is not registered in the hub', async () => { let response = await hubClient.getHistory() @@ -311,7 +271,7 @@ describe('PUT callback Tests --> ', () => { let proxyClient try { - proxyClient = await createProxyClient({ proxyCacheConfig: proxyCache, logger: console }) + proxyClient = createProxyClient({ proxyCacheConfig: proxyCache, logger: console }) const isAdded = await proxyClient.addDfspIdToProxyMapping(to, proxyId) // assert that the proxy representative is mapped in the cache @@ -321,30 +281,12 @@ describe('PUT callback Tests --> ', () => { expect(isAdded).toBe(true) expect(representative).toBe(proxyId) - const quoteId = uuid() - const postPayload = { - quoteId, - transactionId: uuid(), - amountType: 'SEND', - amount: { amount: '100', currency: 'USD' }, - transactionType: { scenario: 'DEPOSIT', initiator: 'PAYER', initiatorType: 'CONSUMER' }, - payer: { partyIdInfo: { partyIdType: 'MSISDN', partyIdentifier: '987654321', fspId: from } }, - payee: { partyIdInfo: { partyIdType: 'MSISDN', partyIdentifier: '123456789', fspId: to } } - } - const postMessage = mocks.kafkaMessagePayloadPostDto({ - from, - to, - id: postPayload.quoteId, - payloadBase64: base64Encode(JSON.stringify(postPayload)) - }) + const postPayload = mocks.postQuotesPayloadDto({ from, to }) + const postMessage = mocks.kafkaMessagePayloadPostDto({ from, to, id: postPayload.quoteId, payloadBase64: base64Encode(JSON.stringify(postPayload)) }) const postIsOk = await Producer.produceMessage(postMessage, postTopicConfig, kafkaConfig.PRODUCER.QUOTE.POST.config) expect(postIsOk).toBe(true) - response = await wrapWithRetries(() => hubClient.getHistory(), - wrapWithRetriesConf.remainingRetries, - wrapWithRetriesConf.timeout, - (result) => result.data.history.length > 0 - ) + response = await getResponseWithRetry() expect([1, 2]).toContain(response.data.history.length) await hubClient.clearHistory() @@ -353,16 +295,12 @@ describe('PUT callback Tests --> ', () => { ilpPacket: 'test', condition: 'test' } - const message = mocks.kafkaMessagePayloadDto({ from, to, id: quoteId, payloadBase64: base64Encode(JSON.stringify(payload)) }) + const message = mocks.kafkaMessagePayloadDto({ from, to, id: postPayload.quoteId, payloadBase64: base64Encode(JSON.stringify(payload)) }) delete message.content.headers.accept const isOk = await Producer.produceMessage(message, topicConfig, config) expect(isOk).toBe(true) - response = await wrapWithRetries(() => hubClient.getHistory(), - wrapWithRetriesConf.remainingRetries, - wrapWithRetriesConf.timeout, - (result) => result.data.history.length > 0 - ) + response = await getResponseWithRetry() expect([1, 2]).toContain(response.data.history.length) const request = response.data.history[0] @@ -373,302 +311,7 @@ describe('PUT callback Tests --> ', () => { } finally { await proxyClient.disconnect() } - }, TEST_TIMEOUT) - - test('should forward PUT /fxQuotes/{ID} request to proxy if the payer dfsp is not registered in the hub', async () => { - let response = await hubClient.getHistory() - expect(response.data.history.length).toBe(0) - - const { topic, config } = kafkaConfig.PRODUCER.FX_QUOTE.PUT - const topicConfig = dto.topicConfigDto({ topicName: topic }) - const postTopicConfig = dto.topicConfigDto({ topicName: kafkaConfig.PRODUCER.FX_QUOTE.POST.topic }) - - const from = 'greenbank' - // redbank not in the hub db - const to = 'redbank' - - // register proxy representative for redbank - const proxyId = 'redbankproxy' - let proxyClient - - try { - proxyClient = await createProxyClient({ proxyCacheConfig: proxyCache, logger: console }) - const isAdded = await proxyClient.addDfspIdToProxyMapping(to, proxyId) - - // assert that the proxy representative is mapped in the cache - const key = `dfsp:${to}` - const representative = await proxyClient.redisClient.get(key) - - expect(isAdded).toBe(true) - expect(representative).toBe(proxyId) - const conversionRequestId = uuid() - const postPayload = { - conversionRequestId, - conversionTerms: { - conversionId: uuid(), - initiatingFsp: from, - counterPartyFsp: to, - amountType: 'SEND', - sourceAmount: { - currency: 'USD', - amount: 300 - }, - targetAmount: { - currency: 'TZS' - }, - expiration: new Date(Date.now() + 5 * 60 * 1000).toISOString(), - extensionList: { - extension: [ - { - key: 'Test', - value: 'Data' - } - ] - } - } - } - const postMessage = mocks.kafkaMessagePayloadPostDto({ from, to, id: conversionRequestId, payloadBase64: base64Encode(JSON.stringify(postPayload)) }) - const postIsOk = await Producer.produceMessage(postMessage, postTopicConfig, kafkaConfig.PRODUCER.FX_QUOTE.POST.config) - - expect(postIsOk).toBe(true) - - response = await wrapWithRetries(() => hubClient.getHistory(), - wrapWithRetriesConf.remainingRetries, - wrapWithRetriesConf.timeout, - (result) => result.data.history.length > 0 - ) - expect(response.data.history.length).toBe(1) - - // check fx quote details were saved to db - const fxQuoteDetails = await db._getFxQuoteDetails(postPayload.conversionRequestId) - expect(fxQuoteDetails).toEqual({ - conversionRequestId: postPayload.conversionRequestId, - conversionId: postPayload.conversionTerms.conversionId, - determiningTransferId: null, - amountTypeId: 1, - initiatingFsp: postPayload.conversionTerms.initiatingFsp, - counterPartyFsp: postPayload.conversionTerms.counterPartyFsp, - sourceAmount: postPayload.conversionTerms.sourceAmount.amount, - sourceCurrency: postPayload.conversionTerms.sourceAmount.currency, - targetAmount: null, - targetCurrency: postPayload.conversionTerms.targetAmount.currency, - extensions: expect.anything(), - expirationDate: expect.anything(), - createdDate: expect.anything() - }) - expect(JSON.parse(fxQuoteDetails.extensions)).toEqual(postPayload.conversionTerms.extensionList.extension) - await hubClient.clearHistory() - - const payload = { - condition: 'test', - conversionTerms: { - conversionId: uuid(), - initiatingFsp: from, - counterPartyFsp: to, - amountType: 'SEND', - sourceAmount: { amount: 100, currency: 'USD' }, - targetAmount: { amount: 100, currency: 'TZS' }, - expiration: new Date(Date.now() + 5 * 60 * 1000).toISOString(), - charges: [ - { - chargeType: 'TEST', - sourceAmount: { - currency: 'USD', - amount: 1 - }, - targetAmount: { - currency: 'TZS', - amount: 1 - } - } - ], - extensionList: { - extension: [ - { - key: 'Test', - value: 'Data' - } - ] - } - } - } - const message = mocks.kafkaMessagePayloadDto({ from, to, id: conversionRequestId, payloadBase64: base64Encode(JSON.stringify(payload)) }) - delete message.content.headers.accept - const isOk = await Producer.produceMessage(message, topicConfig, config) - expect(isOk).toBe(true) - - response = await wrapWithRetries(() => hubClient.getHistory(), - wrapWithRetriesConf.remainingRetries, - wrapWithRetriesConf.timeout, - (result) => result.data.history.length > 0 - ) - expect(response.data.history.length).toBe(1) - - const request = response.data.history[0] - expect(request.url).toBe(`/${proxyId}/fxQuotes/${message.id}`) - expect(request.body).toEqual(payload) - expect(request.headers['fspiop-source']).toBe(from) - expect(request.headers['fspiop-destination']).toBe(to) - - const fxQuoteResponseDetails = await db._getFxQuoteResponseDetails(conversionRequestId) - expect(fxQuoteResponseDetails).toEqual({ - conversionRequestId, - fxQuoteResponseId: expect.anything(), - ilpCondition: payload.condition, - conversionId: payload.conversionTerms.conversionId, - amountTypeId: 1, - determiningTransferId: null, - counterPartyFsp: payload.conversionTerms.counterPartyFsp, - initiatingFsp: payload.conversionTerms.initiatingFsp, - sourceAmount: payload.conversionTerms.sourceAmount.amount, - sourceCurrency: payload.conversionTerms.sourceAmount.currency, - targetAmount: payload.conversionTerms.targetAmount.amount, - targetCurrency: payload.conversionTerms.targetAmount.currency, - expirationDate: expect.anything(), - createdDate: expect.anything(), - charges: expect.anything(), - extensions: expect.anything() - }) - expect(JSON.parse(fxQuoteResponseDetails.extensions)).toEqual(payload.conversionTerms.extensionList.extension) - const charges = JSON.parse(fxQuoteResponseDetails.charges) - const expectedCharges = charges.map(charge => ({ - chargeType: charge.chargeType, - sourceAmount: { - currency: charge.sourceCurrency, - amount: charge.sourceAmount - }, - targetAmount: { - currency: charge.targetCurrency, - amount: charge.targetAmount - } - })) - expect(expectedCharges).toEqual(payload.conversionTerms.charges) - } finally { - await proxyClient.disconnect() - } - }, TEST_TIMEOUT) - - test('should forward PUT /fxQuotes/{ID}/error request to proxy if the payer dfsp is not registered in the hub', async () => { - let response = await hubClient.getHistory() - expect(response.data.history.length).toBe(0) - - const { topic, config } = kafkaConfig.PRODUCER.FX_QUOTE.PUT - const topicConfig = dto.topicConfigDto({ topicName: topic }) - const postTopicConfig = dto.topicConfigDto({ topicName: kafkaConfig.PRODUCER.FX_QUOTE.POST.topic }) - - const from = 'greenbank' - // redbank not in the hub db - const to = 'redbank' - - // register proxy representative for redbank - const proxyId = 'redbankproxy' - let proxyClient - - try { - proxyClient = await createProxyClient({ proxyCacheConfig: proxyCache, logger: console }) - const isAdded = await proxyClient.addDfspIdToProxyMapping(to, proxyId) - - // assert that the proxy representative is mapped in the cache - const key = `dfsp:${to}` - const representative = await proxyClient.redisClient.get(key) - - expect(isAdded).toBe(true) - expect(representative).toBe(proxyId) - const conversionRequestId = uuid() - const postPayload = { - conversionRequestId, - conversionTerms: { - conversionId: uuid(), - initiatingFsp: from, - counterPartyFsp: to, - amountType: 'SEND', - sourceAmount: { - currency: 'USD', - amount: 300 - }, - targetAmount: { - currency: 'TZS' - }, - expiration: new Date(Date.now() + 5 * 60 * 1000).toISOString(), - extensionList: { - extension: [ - { - key: 'Test', - value: 'Data' - } - ] - } - } - } - const postMessage = mocks.kafkaMessagePayloadPostDto({ from, to, id: conversionRequestId, payloadBase64: base64Encode(JSON.stringify(postPayload)) }) - const postIsOk = await Producer.produceMessage(postMessage, postTopicConfig, kafkaConfig.PRODUCER.FX_QUOTE.POST.config) - - expect(postIsOk).toBe(true) - - response = await wrapWithRetries(() => hubClient.getHistory(), - wrapWithRetriesConf.remainingRetries, - wrapWithRetriesConf.timeout, - (result) => result.data.history.length > 0 - ) - expect(response.data.history.length).toBe(1) - - // check fx quote details were saved to db - const fxQuoteDetails = await db._getFxQuoteDetails(postPayload.conversionRequestId) - expect(fxQuoteDetails).toEqual({ - conversionRequestId: postPayload.conversionRequestId, - conversionId: postPayload.conversionTerms.conversionId, - determiningTransferId: null, - amountTypeId: 1, - initiatingFsp: postPayload.conversionTerms.initiatingFsp, - counterPartyFsp: postPayload.conversionTerms.counterPartyFsp, - sourceAmount: postPayload.conversionTerms.sourceAmount.amount, - sourceCurrency: postPayload.conversionTerms.sourceAmount.currency, - targetAmount: null, - targetCurrency: postPayload.conversionTerms.targetAmount.currency, - extensions: expect.anything(), - expirationDate: expect.anything(), - createdDate: expect.anything() - }) - expect(JSON.parse(fxQuoteDetails.extensions)).toEqual(postPayload.conversionTerms.extensionList.extension) - await hubClient.clearHistory() - - const payload = { - errorInformation: { - errorCode: '5000', - errorDescription: 'Test error' - } - } - const message = mocks.kafkaMessagePayloadDto({ from, to, id: conversionRequestId, payloadBase64: base64Encode(JSON.stringify(payload)) }) - delete message.content.headers.accept - const isOk = await Producer.produceMessage(message, topicConfig, config) - expect(isOk).toBe(true) - - response = await wrapWithRetries(() => hubClient.getHistory(), - wrapWithRetriesConf.remainingRetries, - wrapWithRetriesConf.timeout, - (result) => result.data.history.length > 0 - ) - expect(response.data.history.length).toBe(1) - - const request = response.data.history[0] - expect(request.url).toBe(`/${proxyId}/fxQuotes/${message.id}/error`) - expect(request.body).toEqual(payload) - expect(request.headers['fspiop-source']).toBe(from) - expect(request.headers['fspiop-destination']).toBe(to) - - const fxQuoteErrorDetails = await db._getFxQuoteErrorDetails(conversionRequestId) - expect(fxQuoteErrorDetails).toEqual({ - conversionRequestId, - fxQuoteResponseId: null, - fxQuoteErrorId: expect.anything(), - errorCode: Number(payload.errorInformation.errorCode), - errorDescription: payload.errorInformation.errorDescription, - createdDate: expect.anything() - }) - } finally { - await proxyClient.disconnect() - } - }, TEST_TIMEOUT) + }) test('should forward PUT /bulkQuotes/{ID} request to proxy if the payer dfsp is not registered in the hub', async () => { let response = await hubClient.getHistory() @@ -687,7 +330,7 @@ describe('PUT callback Tests --> ', () => { let proxyClient try { - proxyClient = await createProxyClient({ proxyCacheConfig: proxyCache, logger: console }) + proxyClient = createProxyClient({ proxyCacheConfig: proxyCache, logger: console }) const isAdded = await proxyClient.addDfspIdToProxyMapping(to, proxyId) // assert that the proxy representative is mapped in the cache @@ -698,67 +341,32 @@ describe('PUT callback Tests --> ', () => { expect(representative).toBe(proxyId) const bulkQuoteId = uuid() - const quoteId = uuid() - const postPayload = { - bulkQuoteId, - payer: { partyIdInfo: { partyIdType: 'MSISDN', partyIdentifier: '987654321', fspId: from } }, - individualQuotes: [ - { - quoteId, - transactionId: uuid(), - payee: { partyIdInfo: { partyIdType: 'MSISDN', partyIdentifier: '123456789', fspId: to } }, - amountType: 'SEND', - amount: { amount: '100', currency: 'USD' }, - transactionType: { scenario: 'DEPOSIT', initiator: 'PAYER', initiatorType: 'CONSUMER' } - } - ] - } + const quoteIds = [uuid()] + const postPayload = mocks.postBulkQuotesPayloadDto({ from, to, bulkQuoteId, quoteIds }) const postMessage = mocks.kafkaMessagePayloadPostDto({ from, to, id: null, payloadBase64: base64Encode(JSON.stringify(postPayload)) }) const postIsOk = await Producer.produceMessage(postMessage, postTopicConfig, kafkaConfig.PRODUCER.BULK_QUOTE.POST.config) expect(postIsOk).toBe(true) - response = await wrapWithRetries(() => hubClient.getHistory(), - wrapWithRetriesConf.remainingRetries, - wrapWithRetriesConf.timeout, - (result) => result.data.history.length > 0 - ) + response = await getResponseWithRetry() expect(response.data.history.length).toBe(1) await hubClient.clearHistory() - const payload = { - individualQuoteResults: [ - { - quoteId, - payee: { partyIdInfo: { partyIdType: 'MSISDN', partyIdentifier: '123456789', fspId: to } }, - transferAmount: { amount: '100', currency: 'USD' }, - payeeReceiveAmount: { amount: '100', currency: 'USD' }, - payeeFspFee: { amount: '0', currency: 'USD' }, - payeeFspCommission: { amount: '0', currency: 'USD' }, - ilpPacket: 'test', - condition: 'test' - } - ], - expiration: new Date(Date.now() + 5 * 60 * 1000).toISOString() - } - const message = mocks.kafkaMessagePayloadDto({ from, to, id: uuid(), payloadBase64: base64Encode(JSON.stringify(payload)) }) + const putPayload = mocks.putQuotesPayloadDto({ to, quoteIds }) + const message = mocks.kafkaMessagePayloadDto({ from, to, id: uuid(), payloadBase64: base64Encode(JSON.stringify(putPayload)) }) delete message.content.headers.accept const isOk = await Producer.produceMessage(message, topicConfig, config) expect(isOk).toBe(true) - response = await wrapWithRetries(() => hubClient.getHistory(), - wrapWithRetriesConf.remainingRetries, - wrapWithRetriesConf.timeout, - (result) => result.data.history.length > 0 - ) + response = await getResponseWithRetry() expect(response.data.history.length).toBe(1) const request = response.data.history[0] expect(request.url).toBe(`/${proxyId}/bulkQuotes/${message.id}`) - expect(request.body).toEqual(payload) + expect(request.body).toEqual(putPayload) expect(request.headers['fspiop-source']).toBe(from) expect(request.headers['fspiop-destination']).toBe(to) } finally { await proxyClient.disconnect() } - }, TEST_TIMEOUT) + }) }) diff --git a/test/integration/scripts/env.sh b/test/integration/scripts/env.sh index e654df22..435c7f3b 100755 --- a/test/integration/scripts/env.sh +++ b/test/integration/scripts/env.sh @@ -1,7 +1,6 @@ #!/bin/bash -BASE_DIR=$(dirname "$0") -DEFAULT_CONFIG_FILE="$BASE_DIR/../../../docker/central-ledger/default.json" +DEFAULT_CONFIG_FILE="./docker/central-ledger/default.json" export HUB_NAME=$(cat "$DEFAULT_CONFIG_FILE" | jq -r '.HUB_PARTICIPANT.NAME') @@ -16,3 +15,5 @@ export CENTRAL_LEDGER_ADMIN_PORT=3001 export CENTRAL_LEDGER_ADMIN_BASE=/ export MIGRATION_TIMEOUT=60 + +source ./scripts/env.sh diff --git a/test/integration/scripts/start.sh b/test/integration/scripts/start.sh index 37c7d96b..51ebdbd6 100755 --- a/test/integration/scripts/start.sh +++ b/test/integration/scripts/start.sh @@ -6,18 +6,19 @@ then exit 1 fi -echo "Starting docker-compose..." -docker-compose up -d - -echo "Services started. Checking status..." -docker-compose ps - pwd SCRIPTS_FOLDER=./test/integration/scripts echo "Loading env vars..." +chmod +x $SCRIPTS_FOLDER/env.sh source $SCRIPTS_FOLDER/env.sh +echo "Starting docker-compose..." +docker-compose up -d + +echo "Services started. Checking status..." +docker-compose ps + echo "Waiting central-ledger migrations for $MIGRATION_TIMEOUT sec..." sleep $MIGRATION_TIMEOUT @@ -25,4 +26,3 @@ echo "Populating test data..." source $SCRIPTS_FOLDER/populateTestData.sh echo "Test environment is ready!" - diff --git a/test/mocks.js b/test/mocks.js index 3be1ab08..ca8ca12e 100644 --- a/test/mocks.js +++ b/test/mocks.js @@ -1,8 +1,24 @@ +const uuid = require('crypto').randomUUID const Config = new (require('../src/lib/config'))() const CONTENT_TYPE = 'application/vnd.interoperability.quotes+json;version={{API_VERSION}}' const contentTypeFn = ({ fspiopVersion = 1.0 }) => CONTENT_TYPE.replace('{{API_VERSION}}', fspiopVersion) +const proxyCacheConfigDto = ({ + type = 'redis' +} = {}) => Object.freeze({ + type, + proxyConfig: { + ...(type === 'redis' && { + host: 'localhost', port: 6379 + }), + ...(type === 'redis-cluster' && { + cluster: [{ host: 'localhost', port: 6379 }] + }) + }, + timeout: 5000 // is it used anywhere? +}) + const kafkaMessagePayloadDto = ({ action = 'put', from = Config.hubName, @@ -76,23 +92,172 @@ const kafkaMessagePayloadPostDto = (params = {}) => kafkaMessagePayloadDto({ operationId: 'Quotes' }) -const proxyCacheConfigDto = ({ - type = 'redis' -} = {}) => Object.freeze({ - type, - proxyConfig: { - ...(type === 'redis' && { - host: 'localhost', port: 6379 - }), - ...(type === 'redis-cluster' && { - cluster: [{ host: 'localhost', port: 6379 }] +const kafkaMessageFxPayloadPostDto = (params = {}) => kafkaMessagePayloadDto({ + ...params, + fspiopVersion: '2.0', + action: 'post', + type: 'fxquote', + operationId: 'FxQuotesPost' +}) + +const kafkaMessageFxPayloadPutDto = (params = {}) => { + const dto = { + ...kafkaMessagePayloadDto({ + ...params, + fspiopVersion: '2.0', + action: 'put', + type: 'fxquote', + operationId: 'FxQuotesPut' }) + } + delete dto.content.headers.accept + return dto +} + +const kafkaMessageFxPayloadGetDto = (params = {}) => kafkaMessagePayloadDto({ + ...params, + fspiopVersion: '2.0', + action: 'get', + type: 'fxquote', + operationId: 'FxQuotesGet' +}) + +const postFxQuotesPayloadDto = ({ + conversionRequestId = uuid(), + conversionId = uuid(), + initiatingFsp = 'pinkbank', + counterPartyFsp = 'redbank', + amountType = 'SEND', + sourceAmount = { + currency: 'USD', + amount: 300 }, - timeout: 5000 // is it used anywhere? + targetAmount = { + currency: 'ZMW' + }, + expiration = new Date(Date.now() + 5 * 60 * 1000).toISOString(), + extensionList = { + extension: [ + { + key: 'Test', + value: 'Data' + } + ] + } +} = {}) => ({ + conversionRequestId, + conversionTerms: { + conversionId, + initiatingFsp, + counterPartyFsp, + amountType, + sourceAmount, + targetAmount, + expiration, + extensionList + } +}) + +const putFxQuotesPayloadDto = ({ + fxQuotesPostPayload = postFxQuotesPayloadDto(), + condition = 'mock-condition', + charges = [{ chargeType: 'Tax', sourceAmount: { amount: 1, currency: 'USD' }, targetAmount: { amount: 100, currency: 'ZMW' } }] +} = {}) => { + const dto = { + ...fxQuotesPostPayload, + condition + } + dto.conversionTerms.targetAmount.amount = 600 + dto.conversionTerms.charges = charges + return dto +} + +const postQuotesPayloadDto = ({ + from = 'payer', + to = 'payee', + quoteId = uuid(), + transactionId = uuid(), + amountType = 'SEND', + amount = { amount: '100', currency: 'USD' }, + transactionType = { scenario: 'DEPOSIT', initiator: 'PAYER', initiatorType: 'CONSUMER' }, + payer = { partyIdInfo: { partyIdType: 'MSISDN', partyIdentifier: '987654321', fspId: from } }, + payee = { partyIdInfo: { partyIdType: 'MSISDN', partyIdentifier: '123456789', fspId: to } } +} = {}) => ({ + quoteId, + transactionId, + amountType, + amount, + transactionType, + payer, + payee +}) + +const putQuotesPayloadDto = ({ + transferAmount = { amount: '100', currency: 'USD' }, + payeeReceiveAmount = { amount: '100', currency: 'USD' }, + ilpPacket = 'test-ilp-packet', + condition = 'test-condition' +} = {}) => ({ + transferAmount, + payeeReceiveAmount, + ilpPacket, + condition +}) + +const postBulkQuotesPayloadDto = ({ + from = 'payer', + to = 'payee', + bulkQuoteId = uuid(), + quoteIds = [uuid()], + payer = { partyIdInfo: { partyIdType: 'MSISDN', partyIdentifier: '987654321', fspId: from } }, + individualQuotes = [ + { + quoteId: quoteIds[0], + transactionId: uuid(), + payee: { partyIdInfo: { partyIdType: 'MSISDN', partyIdentifier: '123456789', fspId: to } }, + amountType: 'SEND', + amount: { amount: '100', currency: 'USD' }, + transactionType: { scenario: 'DEPOSIT', initiator: 'PAYER', initiatorType: 'CONSUMER' } + } + ] +} = {}) => ({ + bulkQuoteId, + payer, + individualQuotes +}) + +const putBulkQuotesPayloadDto = ({ + to = 'payee', + quoteIds = [uuid()], + individualQuoteResults = [ + { + quoteId: quoteIds[0], + payee: { partyIdInfo: { partyIdType: 'MSISDN', partyIdentifier: '123456789', fspId: to } }, + transferAmount: { amount: '100', currency: 'USD' }, + payeeReceiveAmount: { amount: '100', currency: 'USD' }, + payeeFspFee: { amount: '0', currency: 'USD' }, + payeeFspCommission: { amount: '0', currency: 'USD' }, + ilpPacket: 'test-ilp-packet', + condition: 'test-condition' + } + ], + expiration = new Date(Date.now() + 5 * 60 * 1000).toISOString() +} = {}) => ({ + individualQuoteResults, + expiration }) module.exports = { kafkaMessagePayloadDto, kafkaMessagePayloadPostDto, - proxyCacheConfigDto + kafkaMessageFxPayloadPostDto, + kafkaMessageFxPayloadPutDto, + kafkaMessageFxPayloadGetDto, + proxyCacheConfigDto, + postFxQuotesPayloadDto, + putFxQuotesPayloadDto, + postQuotesPayloadDto, + putQuotesPayloadDto, + postBulkQuotesPayloadDto, + putBulkQuotesPayloadDto } diff --git a/test/unit/api/routes.test.js b/test/unit/api/routes.test.js new file mode 100644 index 00000000..20bc78af --- /dev/null +++ b/test/unit/api/routes.test.js @@ -0,0 +1,42 @@ +const { handleRequest } = require('../../../src/api/routes') + +describe('routes', () => { + let api + let h + + beforeEach(() => { + jest.resetModules() + api = { + handleRequest: jest.fn().mockReturnValue(200) + } + h = jest.fn() + }) + + const testCase = (method, path) => { + it(`should return 200 for ${method} ${path}`, async () => { + const req = { method, path, payload: {}, query: {}, headers: {} } + const result = handleRequest(api, req, h) + + expect(api.handleRequest).toHaveBeenCalled() + expect(result).toEqual(200) + const [args] = api.handleRequest.mock.calls[0] + expect(args.path).toEqual(req.path) + expect(args.method).toEqual(req.method) + }) + } + + testCase('PUT', '/quotes/{id}/error') + testCase('GET', '/quotes/{id}') + testCase('PUT', '/quotes/{id}') + testCase('POST', '/quotes') + testCase('PUT', '/bulkQuotes/{id}/error') + testCase('GET', '/bulkQuotes/{id}') + testCase('PUT', '/bulkQuotes/{id}') + testCase('POST', '/bulkQuotes') + testCase('GET', '/fxQuotes/{id}/error') + testCase('PUT', '/fxQuotes/{id}') + testCase('GET', '/fxQuotes/{id}') + testCase('POST', '/fxQuotes') + testCase('GET', '/health') + testCase('GET', '/metrics') +}) diff --git a/test/unit/handlers/QuotingHandler.test.js b/test/unit/handlers/QuotingHandler.test.js index 534c5c0d..745c57a3 100644 --- a/test/unit/handlers/QuotingHandler.test.js +++ b/test/unit/handlers/QuotingHandler.test.js @@ -4,10 +4,12 @@ const { Tracer } = require('@mojaloop/event-sdk') const Logger = require('@mojaloop/central-services-logger') jest.mock('../../../src/model/quotes') +jest.mock('../../../src/model/fxQuotes') jest.mock('../../../src/model/bulkQuotes') const QuotingHandler = require('../../../src/handlers/QuotingHandler') const QuotesModel = require('../../../src/model/quotes') +const FxQuotesModel = require('../../../src/model/fxQuotes') const BulkQuotesModel = require('../../../src/model/bulkQuotes') const Config = require('../../../src/lib/config') @@ -36,20 +38,24 @@ const createKafkaMessage = (topic) => ({ describe('QuotingHandler Tests -->', () => { let handler let quotesModel + let fxQuotesModel let bulkQuotesModel const quotesModelFactory = () => quotesModel + const fxQuotesModelFactory = () => fxQuotesModel const bulkQuotesModelFactory = () => bulkQuotesModel beforeEach(() => { const config = new Config() quotesModel = new QuotesModel({}) + fxQuotesModel = new FxQuotesModel({}) bulkQuotesModel = new BulkQuotesModel({}) handler = new QuotingHandler({ quotesModelFactory, bulkQuotesModelFactory, + fxQuotesModelFactory, config, logger: Logger, cache: new Cache(), @@ -218,8 +224,84 @@ describe('QuotingHandler Tests -->', () => { }) }) + describe('handlePostFxQuotes method Tests', () => { + it('should process POST /fxQuotes payload', async () => { + const requestData = createRequestData() + + const result = await handler.handlePostFxQuotes(requestData) + expect(result).toBe(true) + + expect(fxQuotesModel.handleFxQuoteRequest).toHaveBeenCalledTimes(1) + expect(fxQuotesModel.handleException).toHaveBeenCalledTimes(0) + }) + + it('should call handleException in case of error in handleFxQuoteRequest', async () => { + fxQuotesModel.handleFxQuoteRequest = jest.fn(async () => { throw new Error('Test Error') }) + const requestData = createRequestData() + + const result = await handler.handlePostFxQuotes(requestData) + expect(result).toBe(true) + expect(fxQuotesModel.handleException).toHaveBeenCalledTimes(1) + }) + }) + + describe('handlePutFxQuotes method Tests', () => { + it('should process success PUT /fxQuotes payload', async () => { + const requestData = createRequestData() + + const result = await handler.handlePutFxQuotes(requestData) + expect(result).toBe(true) + + expect(fxQuotesModel.handleFxQuoteUpdate).toHaveBeenCalledTimes(1) + expect(fxQuotesModel.handleException).toHaveBeenCalledTimes(0) + }) + + it('should process error PUT /fxQuotes payload', async () => { + const requestData = createRequestData({ + payload: { errorInformation: {} } + }) + + const result = await handler.handlePutFxQuotes(requestData) + expect(result).toBe(true) + + expect(fxQuotesModel.handleFxQuoteError).toHaveBeenCalledTimes(1) + expect(fxQuotesModel.handleFxQuoteUpdate).toHaveBeenCalledTimes(0) + expect(fxQuotesModel.handleException).toHaveBeenCalledTimes(0) + }) + + it('should call handleException in case of error in handleFxQuoteUpdate', async () => { + fxQuotesModel.handleFxQuoteUpdate = jest.fn(async () => { throw new Error('Test Error') }) + const requestData = createRequestData() + + const result = await handler.handlePutFxQuotes(requestData) + expect(result).toBe(true) + expect(fxQuotesModel.handleException).toHaveBeenCalledTimes(1) + }) + }) + + describe('handleGetFxQuotes method Tests', () => { + it('should process GET /fxQuotes payload', async () => { + const requestData = createRequestData() + + const result = await handler.handleGetFxQuotes(requestData) + expect(result).toBe(true) + + expect(fxQuotesModel.handleFxQuoteGet).toHaveBeenCalledTimes(1) + expect(fxQuotesModel.handleException).toHaveBeenCalledTimes(0) + }) + + it('should call handleException in case of error in handleFxQuoteGet', async () => { + fxQuotesModel.handleFxQuoteGet = jest.fn(async () => { throw new Error('Test Error') }) + const requestData = createRequestData() + + const result = await handler.handleGetFxQuotes(requestData) + expect(result).toBe(true) + expect(fxQuotesModel.handleException).toHaveBeenCalledTimes(1) + }) + }) + describe('defineHandlerByTopic method Tests', () => { - const { QUOTE, BULK_QUOTE } = (new Config()).kafkaConfig.CONSUMER + const { QUOTE, BULK_QUOTE, FX_QUOTE } = (new Config()).kafkaConfig.CONSUMER it('should skip message processing and log warn on incorrect topic name', async () => { const message = createKafkaMessage('wrong-topic') @@ -277,6 +359,30 @@ describe('QuotingHandler Tests -->', () => { await handler.defineHandlerByTopic(message) expect(handler.handleGetBulkQuotes).toHaveBeenCalledTimes(1) }) + + it('should define a handler for FX_QUOTE.POST.topic', async () => { + const message = createKafkaMessage(FX_QUOTE.POST.topic) + handler.handlePostFxQuotes = jest.fn() + + await handler.defineHandlerByTopic(message) + expect(handler.handlePostFxQuotes).toHaveBeenCalledTimes(1) + }) + + it('should define a handler for FX_QUOTE.PUT.topic', async () => { + const message = createKafkaMessage(FX_QUOTE.PUT.topic) + handler.handlePutFxQuotes = jest.fn() + + await handler.defineHandlerByTopic(message) + expect(handler.handlePutFxQuotes).toHaveBeenCalledTimes(1) + }) + + it('should define a handler for FX_QUOTE.GET.topic', async () => { + const message = createKafkaMessage(FX_QUOTE.GET.topic) + handler.handleGetFxQuotes = jest.fn() + + await handler.defineHandlerByTopic(message) + expect(handler.handleGetFxQuotes).toHaveBeenCalledTimes(1) + }) }) describe('handleMessages method Tests', () => { diff --git a/test/unit/handlers/init.test.js b/test/unit/handlers/init.test.js index 1e02e0fe..264dabf2 100644 --- a/test/unit/handlers/init.test.js +++ b/test/unit/handlers/init.test.js @@ -1,15 +1,12 @@ jest.mock('../../../src/handlers/createConsumers') jest.mock('../../../src/handlers/monitoringServer') -jest.mock('../../../src/lib/proxy', () => ({ - createProxyClient: () => ({ - connect: jest.fn(), - isConnected: false - }) -})) +jest.mock('../../../src/lib/proxy') const init = require('../../../src/handlers/init') const Database = require('../../../src/data/cachedDatabase') const { Functionalities } = require('../../../src/lib/enum') +const Config = require('../../../src/lib/config') +const { createProxyClient } = require('../../../src/lib/proxy') const handlerList = [Functionalities.QUOTE] @@ -26,6 +23,22 @@ describe('init Tests -->', () => { await expect(init.stopFn()).resolves.toBeUndefined() }) + test('should disconnect proxyCache if enabled', async () => { + isDbOk = true + const config = new Config() + config.proxyCache.enabled = true + const mockProxyCache = { + isConnected: true, + connect: jest.fn().mockResolvedValue(true), + disconnect: jest.fn().mockResolvedValue(true) + } + createProxyClient.mockReturnValue(mockProxyCache) + await init.startFn(handlerList, config) + + await expect(init.stopFn()).resolves.toBeUndefined() + expect(mockProxyCache.disconnect).toHaveBeenCalled() + }) + test('should execute startFn without error if DB is connected', async () => { isDbOk = true await expect(init.startFn(handlerList)) @@ -38,4 +51,25 @@ describe('init Tests -->', () => { await expect(init.startFn(handlerList)) .rejects.toThrowError('DB is not connected') }) + + test('should connect proxyCache if enabled', async () => { + isDbOk = true + const config = new Config() + config.proxyCache.enabled = true + const mockProxyCache = { connect: jest.fn().mockResolvedValue(true) } + createProxyClient.mockReturnValue(mockProxyCache) + + await expect(init.startFn(handlerList, config)).resolves.toBeTruthy() + expect(mockProxyCache.connect).toHaveBeenCalled() + }) + + test('should throw error if proxyCache is not connected', async () => { + isDbOk = true + const config = new Config() + config.proxyCache.enabled = true + const mockProxyCache = { connect: jest.fn().mockResolvedValue(false) } + createProxyClient.mockReturnValue(mockProxyCache) + + await expect(init.startFn(handlerList, config)).rejects.toThrowError('Proxy is not connected') + }) }) diff --git a/test/unit/lib/proxy.test.js b/test/unit/lib/proxy.test.js index b7ecae9b..42ecf66f 100644 --- a/test/unit/lib/proxy.test.js +++ b/test/unit/lib/proxy.test.js @@ -53,7 +53,7 @@ describe('createProxyClient', () => { }) it('should create a proxy client and return it', async () => { - const proxyClient = await createProxyClient({ proxyCacheConfig }) + const proxyClient = createProxyClient({ proxyCacheConfig }) expect(proxyClient).toBeDefined() expect(proxyClient.isConnected).toBe(true) diff --git a/test/unit/mocks.js b/test/unit/mocks.js index ee47983a..3f7db016 100644 --- a/test/unit/mocks.js +++ b/test/unit/mocks.js @@ -36,7 +36,113 @@ const createMockHapiHandler = () => { return { handler, code } } +const mockSpan = () => ({ + setTags: jest.fn(), + audit: jest.fn(), + finish: jest.fn(), + getChild: jest.fn(), + error: jest.fn(), + isFinished: false, + injectContextToHttpRequest: jest.fn().mockImplementation(param => param) +}) + +const fxQuoteMocks = { + fxQuoteRequest: ({ conversionRequestId = randomUUID() } = {}) => ({ + conversionRequestId, + conversionTerms: { + conversionId: randomUUID(), + determiningTransferId: randomUUID(), + initiatingFsp: 'mockInitiator', + counterPartyFsp: 'mockCounterParty', + amountType: 'SEND', + sourceAmount: { + currency: 'ZMW', + amount: '100' + }, + targetAmount: { + currency: 'TZS', + amount: '10395' + }, + expiration: new Date(Date.now() + 10_000).toISOString(), + charges: [ + { + chargeType: 'TRANSACTION FEE', + sourceAmount: { + currency: 'ZMW', + amount: '1' + }, + targetAmount: { + currency: 'TZS', + amount: '103' + } + } + ], + extensionList: { + extension: [ + { + key: 'key1', + value: 'value1' + } + ] + } + } + }), + fxQuoteUpdateRequest: ({ + condition = randomUUID(), + conversionTerms = fxQuoteMocks.fxQuoteRequest().conversionTerms + } = {}) => ({ + condition, + conversionTerms + }), + headers: () => ({ + accept: 'application/vnd.interoperability.fxquotes+json;version=1.0', + 'content-type': 'application/vnd.interoperability.fxquotes+json;version=1.0', + 'content-length': '100', + date: new Date().toISOString(), + 'fspiop-source': 'mockSource', + 'fspiop-destination': 'mockDestination' + }), + span: () => ({ + getChild: jest.fn().mockReturnValue(mockSpan()) + }), + source: 'mockSource', + destination: 'mockDestination', + initiatingFsp: 'mockInitiator', + counterPartyFsp: 'mockcCounterParty', + conversionRequestId: randomUUID(), + error: () => ({ + code: 2001, + message: 'Generic server error' + }), + httpRequestOptions: () => ({ + }), + db: ({ + getParticipant = jest.fn().mockResolvedValue({}), + getParticipantEndpoint = jest.fn().mockResolvedValue(undefined) + } = {}) => ({ + getParticipant, + getParticipantEndpoint + }), + proxyClient: ({ + isConnected = jest.fn().mockReturnValue(true), + connect = jest.fn().mockResolvedValue(true), + lookupProxyByDfspId = jest.fn().mockResolvedValue('mockProxy') + } = {}) => ({ + isConnected, + connect, + lookupProxyByDfspId + }), + logger: () => ({ + error: jest.fn(), + info: jest.fn(), + debug: jest.fn(), + verbose: jest.fn() + }) +} + module.exports = { mockHttpRequest, - createMockHapiHandler + createMockHapiHandler, + fxQuoteMocks, + mockSpan } diff --git a/test/unit/model/fxQuotes.test.js b/test/unit/model/fxQuotes.test.js index c62e59d4..a3192f5d 100644 --- a/test/unit/model/fxQuotes.test.js +++ b/test/unit/model/fxQuotes.test.js @@ -20,16 +20,27 @@ - Name Surname * Eugen Klymniuk + * Steven Oderayi -------------- **********/ process.env.LOG_LEVEL = 'debug' const { randomUUID } = require('node:crypto') +const fs = require('fs') +const path = require('path') +const axios = require('axios') + +jest.mock('axios') + const ErrorHandler = require('@mojaloop/central-services-error-handling') +const ENUM = require('@mojaloop/central-services-shared').Enum +const LOCAL_ENUM = require('../../../src/lib/enum') const FxQuotesModel = require('../../../src/model/fxQuotes') const Config = require('../../../src/lib/config') +const { FSPIOPError } = require('@mojaloop/central-services-error-handling/src/factory') const { makeAppInteroperabilityHeader } = require('../../../src/lib/util') -const { HEADERS, RESOURCES } = require('../../../src/constants') +const { HEADERS, RESOURCES, ERROR_MESSAGES } = require('../../../src/constants') +const { fxQuoteMocks } = require('../mocks') const config = new Config() @@ -38,6 +49,18 @@ describe('FxQuotesModel Tests -->', () => { let db let requestId let proxyClient + let log + let headers + let conversionRequestId + let request + let updateRequest + let span + let childSpan + let mockEndpoint + let destination + let httpRequest + + const endpointType = ENUM.EndPoints.FspEndpointTypes.FSPIOP_CALLBACK_URL_FX_QUOTES const createFxQuotesModel = () => { fxQuotesModel = new FxQuotesModel({ db, requestId, proxyClient }) @@ -47,8 +70,19 @@ describe('FxQuotesModel Tests -->', () => { } beforeEach(() => { - db = { config } // add needed functionality + db = fxQuoteMocks.db() + proxyClient = fxQuoteMocks.proxyClient() + log = fxQuoteMocks.logger() requestId = randomUUID() + headers = fxQuoteMocks.headers() + request = fxQuoteMocks.fxQuoteRequest() + conversionRequestId = request.conversionRequestId + updateRequest = fxQuoteMocks.fxQuoteUpdateRequest() + span = fxQuoteMocks.span() + childSpan = span.getChild() + mockEndpoint = 'https://some.endpoint' + destination = fxQuoteMocks.destination + httpRequest = jest.fn().mockResolvedValue({ status: 200 }) fxQuotesModel = createFxQuotesModel() }) @@ -56,7 +90,362 @@ describe('FxQuotesModel Tests -->', () => { jest.restoreAllMocks() }) + describe('validateFxQuoteRequest', () => { + test('should not function correctly with proxy cache disabled', async () => { + fxQuotesModel = new FxQuotesModel({ db, requestId, proxyClient: undefined, log }) + + await expect(fxQuotesModel.validateFxQuoteRequest(destination, request)).resolves.toBeUndefined() + + expect(db.getParticipant).toBeCalledTimes(2) + expect(db.getParticipant).toHaveBeenNthCalledWith(1, destination, LOCAL_ENUM.COUNTERPARTY_FSP, 'ZMW', ENUM.Accounts.LedgerAccountType.POSITION) + expect(db.getParticipant).toHaveBeenNthCalledWith(2, destination, LOCAL_ENUM.COUNTERPARTY_FSP, 'TZS', ENUM.Accounts.LedgerAccountType.POSITION) + }) + + test('should not validate participant if proxy cache returns a proxy', async () => { + proxyClient.isConnected = false + fxQuotesModel = new FxQuotesModel({ db, requestId, proxyClient, log }) + + await expect(fxQuotesModel.validateFxQuoteRequest(destination, request)).resolves.toBeUndefined() + + expect(proxyClient.lookupProxyByDfspId).toBeCalledTimes(1) + expect(db.getParticipant).not.toHaveBeenCalled() + }) + + test('should validate participant if proxy cache returns no proxy', async () => { + proxyClient.lookupProxyByDfspId = jest.fn().mockResolvedValue(undefined) + fxQuotesModel = new FxQuotesModel({ db, requestId, proxyClient, log }) + + await expect(fxQuotesModel.validateFxQuoteRequest(destination, request)).resolves.toBeUndefined() + + expect(proxyClient.lookupProxyByDfspId).toBeCalledTimes(1) + expect(db.getParticipant).toBeCalledTimes(2) + }) + + test('should throw error if participant validation fails', async () => { + proxyClient.lookupProxyByDfspId = jest.fn().mockResolvedValue(undefined) + db.getParticipant = jest.fn().mockRejectedValue(new Error('DB Error')) + fxQuotesModel = new FxQuotesModel({ db, requestId, proxyClient, log }) + + await expect(fxQuotesModel.validateFxQuoteRequest(destination, request)).rejects.toThrow() + + expect(proxyClient.lookupProxyByDfspId).toBeCalledTimes(1) + expect(db.getParticipant).toBeCalledTimes(2) + }) + }) + + describe('handleFxQuoteRequest', () => { + test('should handle fx quote request', async () => { + fxQuotesModel = new FxQuotesModel({ db, requestId, proxyClient, log }) + jest.spyOn(fxQuotesModel, 'forwardFxQuoteRequest').mockResolvedValue() + jest.spyOn(fxQuotesModel, 'validateFxQuoteRequest') + + await expect(fxQuotesModel.handleFxQuoteRequest(headers, request, span)).resolves.toBeUndefined() + + expect(fxQuotesModel.validateFxQuoteRequest).toBeCalledWith(headers['fspiop-destination'], request) + expect(fxQuotesModel.forwardFxQuoteRequest).toBeCalledWith(headers, request.conversionRequestId, request, span.getChild()) + }) + + test('should handle error thrown', async () => { + fxQuotesModel = new FxQuotesModel({ db, requestId, proxyClient, log }) + jest.spyOn(fxQuotesModel, 'forwardFxQuoteRequest').mockRejectedValue(new Error('Forward Error')) + jest.spyOn(fxQuotesModel, 'validateFxQuoteRequest') + jest.spyOn(fxQuotesModel, 'handleException').mockResolvedValue() + + await expect(fxQuotesModel.handleFxQuoteRequest(headers, request, span)).resolves.toBeUndefined() + + expect(fxQuotesModel.validateFxQuoteRequest).toBeCalledWith(headers['fspiop-destination'], request) + expect(fxQuotesModel.forwardFxQuoteRequest).toBeCalledWith(headers, request.conversionRequestId, request, span.getChild()) + expect(fxQuotesModel.handleException).toBeCalledWith(headers['fspiop-source'], request.conversionRequestId, expect.any(Error), headers, span.getChild()) + expect(span.getChild().finish).toBeCalledTimes(1) + }) + }) + + describe('forwardFxQuoteRequest', () => { + test('should forward fx quote request', async () => { + fxQuotesModel = new FxQuotesModel({ db, requestId, proxyClient, log, httpRequest }) + fxQuotesModel._getParticipantEndpoint = jest.fn().mockResolvedValue(mockEndpoint) + + await expect(fxQuotesModel.forwardFxQuoteRequest(headers, conversionRequestId, request, childSpan)).resolves.toBeUndefined() + + const expectedHeaders = { + Accept: headers.accept, + 'Content-Type': headers['content-type'], + 'FSPIOP-Source': headers['fspiop-source'], + 'FSPIOP-Destination': headers['fspiop-destination'], + Date: headers.date + } + expect(httpRequest).toHaveBeenCalledWith({ + headers: expectedHeaders, + method: ENUM.Http.RestMethods.POST, + url: `${mockEndpoint}${ENUM.EndPoints.FspEndpointTemplates.FX_QUOTES_POST}`, + data: JSON.stringify(request) + }, headers['fspiop-source']) + }) + + test('should forward quote request to proxy', async () => { + const mockProxyEndpoint = 'https://proxy.endpoint' + const mockProxy = 'mockProxy' + + proxyClient.lookupProxyByDfspId = jest.fn().mockResolvedValue(mockProxy) + db.getParticipantEndpoint = jest.fn().mockImplementation((fspId, _endpointType) => { + if (fspId === destination) return null + if (fspId === mockProxy) return mockProxyEndpoint + return 'https://some.other.endpoint' + }) + + fxQuotesModel = new FxQuotesModel({ db, requestId, proxyClient, log, httpRequest }) + await expect(fxQuotesModel.forwardFxQuoteRequest(headers, conversionRequestId, request, childSpan)).resolves.toBeUndefined() + + expect(httpRequest).toBeCalled() + expect(proxyClient.lookupProxyByDfspId).toBeCalledTimes(1) + expect(db.getParticipantEndpoint).toBeCalledTimes(2) + expect(db.getParticipantEndpoint).toHaveBeenNthCalledWith(1, destination, endpointType) + expect(db.getParticipantEndpoint).toHaveBeenNthCalledWith(2, mockProxy, endpointType) + }) + + test('should format error thrown and re-throw', async () => { + httpRequest.mockRejectedValue(new Error('HTTP Error')) + + fxQuotesModel = new FxQuotesModel({ db, requestId, proxyClient, log, httpRequest }) + fxQuotesModel._getParticipantEndpoint = jest.fn().mockResolvedValue(undefined) + + await expect(fxQuotesModel.forwardFxQuoteRequest(headers, conversionRequestId, request, childSpan)).rejects.toThrow(FSPIOPError) + }) + }) + + describe('handleFxQuoteUpdate', () => { + test('headers should not contain accept property', async () => { + fxQuotesModel = new FxQuotesModel({ db, requestId, proxyClient, log }) + jest.spyOn(fxQuotesModel, 'forwardFxQuoteUpdate').mockResolvedValue() + jest.spyOn(fxQuotesModel, 'handleException').mockResolvedValue() + + await expect(fxQuotesModel.handleFxQuoteUpdate(headers, conversionRequestId, updateRequest, span)).resolves.toBeUndefined() + + expect(fxQuotesModel.forwardFxQuoteUpdate).not.toBeCalled() + expect(fxQuotesModel.handleException).toBeCalledWith(headers['fspiop-source'], conversionRequestId, expect.any(Error), headers, span.getChild()) + }) + + test('should handle fx quote update', async () => { + delete headers.accept + + fxQuotesModel = new FxQuotesModel({ db, requestId, proxyClient, log }) + jest.spyOn(fxQuotesModel, 'forwardFxQuoteUpdate').mockResolvedValue() + jest.spyOn(fxQuotesModel, 'handleException').mockResolvedValue() + + await expect(fxQuotesModel.handleFxQuoteUpdate(headers, conversionRequestId, updateRequest, span)).resolves.toBeUndefined() + + expect(fxQuotesModel.forwardFxQuoteUpdate).toBeCalledWith(headers, conversionRequestId, updateRequest, span.getChild()) + }) + }) + + describe('forwardFxQuoteUpdate', () => { + test('should forward fx quote update', async () => { + fxQuotesModel = new FxQuotesModel({ db, requestId, proxyClient, log, httpRequest }) + jest.spyOn(fxQuotesModel, '_getParticipantEndpoint').mockResolvedValue(mockEndpoint) + + await expect(fxQuotesModel.forwardFxQuoteUpdate(headers, conversionRequestId, updateRequest, childSpan)).resolves.toBeUndefined() + + expect(httpRequest).toHaveBeenCalledWith({ + headers: { + 'Content-Type': headers['content-type'], + 'FSPIOP-Source': headers['fspiop-source'], + 'FSPIOP-Destination': headers['fspiop-destination'], + Date: headers.date + }, + method: ENUM.Http.RestMethods.PUT, + url: `${mockEndpoint}/fxQuotes/${conversionRequestId}`, + data: JSON.stringify(updateRequest) + }, headers['fspiop-source']) + }) + + test('should send error callback if no endpoint found', async () => { + fxQuotesModel = new FxQuotesModel({ db, requestId, proxyClient, log, httpRequest }) + jest.spyOn(fxQuotesModel, '_getParticipantEndpoint').mockResolvedValue(undefined) + jest.spyOn(fxQuotesModel, 'sendErrorCallback').mockResolvedValue() + + await expect(fxQuotesModel.forwardFxQuoteUpdate(headers, conversionRequestId, updateRequest, childSpan)).resolves.toBeUndefined() + expect(fxQuotesModel.sendErrorCallback).toBeCalledWith(headers['fspiop-source'], expect.any(Error), conversionRequestId, headers, childSpan, true) + expect(httpRequest).not.toBeCalled() + }) + + test('should format error thrown and re-throw', async () => { + const httpRequest = jest.fn().mockRejectedValue(new Error('HTTP Error')) + + fxQuotesModel = new FxQuotesModel({ db, requestId, proxyClient, log, httpRequest }) + fxQuotesModel._getParticipantEndpoint = jest.fn().mockResolvedValue(mockEndpoint) + + await expect(fxQuotesModel.forwardFxQuoteUpdate(headers, conversionRequestId, updateRequest, childSpan)).rejects.toThrow(FSPIOPError) + }) + }) + + describe('handleFxQuoteGet', () => { + test('should handle fx quote get', async () => { + fxQuotesModel = new FxQuotesModel({ db, requestId, proxyClient, log }) + jest.spyOn(fxQuotesModel, 'forwardFxQuoteGet').mockResolvedValue() + jest.spyOn(fxQuotesModel, 'handleException').mockResolvedValue() + + await expect(fxQuotesModel.handleFxQuoteGet(headers, conversionRequestId, span)).resolves.toBeUndefined() + + expect(fxQuotesModel.forwardFxQuoteGet).toBeCalledWith(headers, conversionRequestId, span.getChild()) + expect(fxQuotesModel.handleException).not.toBeCalled() + }) + + test('should handle error thrown', async () => { + fxQuotesModel = new FxQuotesModel({ db, requestId, proxyClient, log }) + jest.spyOn(fxQuotesModel, 'forwardFxQuoteGet').mockRejectedValue(new Error('Forward Error')) + jest.spyOn(fxQuotesModel, 'handleException').mockResolvedValue() + + await expect(fxQuotesModel.handleFxQuoteGet(headers, conversionRequestId, span)).resolves.toBeUndefined() + + expect(fxQuotesModel.forwardFxQuoteGet).toBeCalledWith(headers, conversionRequestId, span.getChild()) + expect(fxQuotesModel.handleException).toBeCalledWith(headers['fspiop-source'], conversionRequestId, expect.any(Error), headers, span.getChild()) + }) + }) + + describe('forwardFxQuoteGet', () => { + test('should forward fx quote get', async () => { + fxQuotesModel = new FxQuotesModel({ db, requestId, proxyClient, log, httpRequest }) + jest.spyOn(fxQuotesModel, '_getParticipantEndpoint').mockResolvedValue(mockEndpoint) + + await expect(fxQuotesModel.forwardFxQuoteGet(headers, conversionRequestId, childSpan)).resolves.toBeUndefined() + + expect(httpRequest).toHaveBeenCalledWith({ + headers: { + Accept: headers.accept, + 'Content-Type': headers['content-type'], + 'FSPIOP-Source': headers['fspiop-source'], + 'FSPIOP-Destination': headers['fspiop-destination'], + Date: headers.date + }, + method: ENUM.Http.RestMethods.GET, + url: `${mockEndpoint}/fxQuotes/${conversionRequestId}` + }, headers['fspiop-source']) + }) + + test('should format error thrown and re-throw', async () => { + fxQuotesModel = new FxQuotesModel({ db, requestId, proxyClient, log, httpRequest }) + fxQuotesModel._getParticipantEndpoint = jest.fn().mockResolvedValue(undefined) + + await expect(fxQuotesModel.forwardFxQuoteGet(headers, conversionRequestId, updateRequest, childSpan)).rejects.toThrow(FSPIOPError) + }) + }) + + describe('handleFxQuoteError', () => { + test('should handle fx quote error', async () => { + fxQuotesModel = new FxQuotesModel({ db, requestId, proxyClient, log }) + jest.spyOn(fxQuotesModel, 'sendErrorCallback').mockResolvedValue() + + const error = { errorCode: '3201', errorDescription: 'Destination FSP error' } + await expect(fxQuotesModel.handleFxQuoteError(headers, conversionRequestId, error, span)).resolves.toBeUndefined() + + const fspiopError = ErrorHandler.CreateFSPIOPErrorFromErrorInformation(error) + expect(fxQuotesModel.sendErrorCallback).toBeCalledWith(headers['fspiop-destination'], fspiopError, conversionRequestId, headers, childSpan, false) + }) + + test('should handle error thrown', async () => { + fxQuotesModel = new FxQuotesModel({ db, requestId, proxyClient, log }) + jest.spyOn(fxQuotesModel, 'sendErrorCallback').mockRejectedValue(new Error('Send Error Callback Error')) + jest.spyOn(fxQuotesModel, 'handleException').mockResolvedValue() + + const error = { errorCode: '3201', errorDescription: 'Destination FSP error' } + await expect(fxQuotesModel.handleFxQuoteError(headers, conversionRequestId, error, span)).resolves.toBeUndefined() + + const fspiopError = ErrorHandler.CreateFSPIOPErrorFromErrorInformation(error) + expect(fxQuotesModel.sendErrorCallback).toBeCalledWith(headers['fspiop-destination'], fspiopError, conversionRequestId, headers, childSpan, false) + expect(fxQuotesModel.handleException).toBeCalledWith(headers['fspiop-source'], conversionRequestId, expect.any(Error), headers, childSpan) + }) + }) + + describe('handleException', () => { + test('should handle exception', async () => { + fxQuotesModel = new FxQuotesModel({ db, requestId, proxyClient, log }) + jest.spyOn(fxQuotesModel, 'sendErrorCallback').mockResolvedValue() + + const error = new Error('Test Error') + await expect(fxQuotesModel.handleException(headers['fspiop-source'], conversionRequestId, error, headers, span)).resolves.toBeUndefined() + + const fspiopError = ErrorHandler.ReformatFSPIOPError(error) + expect(fxQuotesModel.sendErrorCallback).toBeCalledWith(headers['fspiop-source'], fspiopError, conversionRequestId, headers, childSpan, true) + }) + + test('should handle error thrown', async () => { + const error = new Error('Send Error Callback Error') + fxQuotesModel = new FxQuotesModel({ db, requestId, proxyClient, log }) + jest.spyOn(fxQuotesModel, 'sendErrorCallback').mockRejectedValue(error) + + await expect(fxQuotesModel.handleException(headers['fspiop-source'], conversionRequestId, error, headers, span)).resolves.toBeUndefined() + + const fspiopError = ErrorHandler.ReformatFSPIOPError(error) + expect(fxQuotesModel.sendErrorCallback).toBeCalledWith(headers['fspiop-source'], fspiopError, conversionRequestId, headers, childSpan, true) + expect(log.error).toBeCalledWith(expect.any(String), error) + }) + }) + describe('sendErrorCallback method Tests', () => { + test('should throw fspiop error if no destination found', async () => { + fxQuotesModel = new FxQuotesModel({ db, requestId, proxyClient, log }) + fxQuotesModel._getParticipantEndpoint = jest.fn().mockResolvedValue(undefined) + const fspiopError = ErrorHandler.CreateFSPIOPError({ code: 2001, message: 'Generic server error' }, '', new Error('Test error')) + await expect(fxQuotesModel.sendErrorCallback(headers['fspiop-source'], fspiopError, conversionRequestId, headers, childSpan)).rejects.toThrow(ERROR_MESSAGES.NO_FX_CALLBACK_ENDPOINT(headers['fspiop-source'], conversionRequestId)) + }) + + test('should send error callback with flag modifyHeaders === false', async () => { + headers['fspiop-signature'] = 'signature' + fxQuotesModel = new FxQuotesModel({ db, requestId, proxyClient }) + fxQuotesModel._getParticipantEndpoint = jest.fn().mockResolvedValue(mockEndpoint) + jest.spyOn(fxQuotesModel, 'sendHttpRequest') + jest.spyOn(axios, 'request').mockResolvedValue({ status: 200 }) + const fspiopError = ErrorHandler.CreateFSPIOPError({ code: 2001, message: 'Generic server error' }, '', new Error('Test error')) + + await expect(fxQuotesModel.sendErrorCallback(headers['fspiop-source'], fspiopError, conversionRequestId, headers, childSpan, false)).resolves.toBeUndefined() + + expect(fxQuotesModel.sendHttpRequest).toBeCalledTimes(1) + const [args] = fxQuotesModel.sendHttpRequest.mock.calls[0] + expect(args.headers['FSPIOP-Source']).toBe(headers['fspiop-source']) + expect(args.headers['FSPIOP-Destination']).toBe(headers['fspiop-destination']) + expect(args.headers['FSPIOP-Signature']).toBe(headers['fspiop-signature']) + expect(args.headers.Date).toBe(headers.date) + expect(args.headers['Content-Type']).toBe(headers['content-type']) + expect(args.headers.Accept).toBeUndefined() + expect(args.method).toBe(ENUM.Http.RestMethods.PUT) + expect(args.url).toBe(`${mockEndpoint}/fxQuotes/${conversionRequestId}/error`) + expect(args.data).toBe(JSON.stringify(fspiopError.toApiErrorObject())) + }) + + test('should reformat and re-throw http request error to fspiop error', async () => { + fxQuotesModel = new FxQuotesModel({ db, requestId, proxyClient, log }) + fxQuotesModel._getParticipantEndpoint = jest.fn().mockResolvedValue(mockEndpoint) + fxQuotesModel.sendHttpRequest = jest.fn(async () => { throw new Error('Test error') }) + const fspiopError = ErrorHandler.CreateFSPIOPError({ code: 2001, message: 'Generic server error' }, '', new Error('Test error')) + + await expect(fxQuotesModel.sendErrorCallback(headers['fspiop-source'], fspiopError, conversionRequestId, headers, childSpan, false)).rejects.toThrow(FSPIOPError) + }) + + test('should re-throw error response from callback if not OK', async () => { + fxQuotesModel = new FxQuotesModel({ db, requestId, proxyClient, log }) + fxQuotesModel._getParticipantEndpoint = jest.fn().mockResolvedValue(mockEndpoint) + fxQuotesModel.sendHttpRequest = jest.fn(async () => ({ status: 500 })) + const fspiopError = ErrorHandler.CreateFSPIOPError({ code: 2001, message: 'Generic server error' }, '', new Error('Test error')) + + await expect(fxQuotesModel.sendErrorCallback(headers['fspiop-source'], fspiopError, conversionRequestId, headers, childSpan, false)).rejects.toThrow(ERROR_MESSAGES.CALLBACK_UNSUCCESSFUL_HTTP_RESPONSE) + }) + + test('should jws sign the request if jwsSign is true', async () => { + const envConfig = new Config() + envConfig.jws.jwsSign = true + envConfig.jws.jwsSigningKey = fs.readFileSync(path.join(__dirname, '../../../secrets/jwsSigningKey.key'), 'utf-8') + fxQuotesModel = new FxQuotesModel({ db, requestId, proxyClient, log, envConfig }) + fxQuotesModel._getParticipantEndpoint = jest.fn().mockResolvedValue(mockEndpoint) + fxQuotesModel.sendHttpRequest = jest.fn(async () => ({ status: 200 })) + const fspiopError = ErrorHandler.CreateFSPIOPError({ code: 2001, message: 'Generic server error' }, '', new Error('Test error')) + + await expect(fxQuotesModel.sendErrorCallback(headers['fspiop-source'], fspiopError, conversionRequestId, headers, childSpan, true)).resolves.toBeUndefined() + + expect(fxQuotesModel.sendHttpRequest).toBeCalledTimes(1) + const [args] = fxQuotesModel.sendHttpRequest.mock.calls[0] + expect(args.headers['fspiop-signature']).toContain('signature') + }) + test('should send errorCallback with fspiopSource as hub (flag modifyHeaders === true) [CSI-414]', async () => { const apiErrorCode = { code: 2001, message: 'Generic server error' } const fspiopError = ErrorHandler.CreateFSPIOPError(apiErrorCode, '', new Error('XXX')) diff --git a/test/unit/model/quotes.test.js b/test/unit/model/quotes.test.js index cf08caa0..86583a58 100644 --- a/test/unit/model/quotes.test.js +++ b/test/unit/model/quotes.test.js @@ -629,6 +629,21 @@ describe('QuotesModel', () => { expect(quotesModel.db.getParticipant.mock.calls[0][0]).toBe(mockData.quoteRequest.payee.partyIdInfo.fspId) }) + + it('should validate payer supported currencies if supplied', async () => { + const fspiopSource = 'dfsp1' + const fspiopDestination = 'dfsp2' + const request = mockData.quoteRequest + request.payer.supportedCurrencies = ['ZMW', 'TZS'] + quotesModel.db.getParticipant.mockResolvedValueOnce({ accounts: [{ currency: 'ZMW' }] }) + quotesModel.db.getParticipant.mockResolvedValueOnce({ accounts: [{ currency: 'TZS' }] }) + + await expect(quotesModel.validateQuoteRequest(fspiopSource, fspiopDestination, request)).resolves.toBeUndefined() + + expect(quotesModel.db.getParticipant).toHaveBeenCalledTimes(2) + expect(quotesModel.db.getParticipant).toHaveBeenCalledWith(fspiopSource, 'PAYER_DFSP', 'ZMW', Enum.Accounts.LedgerAccountType.POSITION) + expect(quotesModel.db.getParticipant).toHaveBeenCalledWith(fspiopSource, 'PAYER_DFSP', 'TZS', Enum.Accounts.LedgerAccountType.POSITION) + }) }) describe('validateQuoteUpdate', () => { beforeEach(() => { @@ -1073,6 +1088,18 @@ describe('QuotesModel', () => { expect(quotesModel.handleException.mock.calls.length).toBe(1) expect(result).toEqual(expectedResult) }) + + it('calls handleQuoteRequestResend if request is duplicate and should resend', async () => { + expect.assertions(1) + quotesModel.checkDuplicateQuoteRequest = jest.fn(() => { + return { + isDuplicateId: true, + isResend: true + } + }) + await quotesModel.handleQuoteRequest(mockData.headers, mockData.quoteRequest, mockSpan) + expect(quotesModel.handleQuoteRequestResend).toHaveBeenCalledTimes(1) + }) }) }) })