From 1a2a51b5889f7bfd29a426887f9c0f60d14e0ad7 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 9 Jul 2024 09:35:26 +0100 Subject: [PATCH 001/247] chore: update Flagsmith environment document (#4310) Co-authored-by: matthewelwell --- api/integrations/flagsmith/data/environment.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/api/integrations/flagsmith/data/environment.json b/api/integrations/flagsmith/data/environment.json index 22404a9bc3d2..c3373b5c75b0 100644 --- a/api/integrations/flagsmith/data/environment.json +++ b/api/integrations/flagsmith/data/environment.json @@ -15,8 +15,8 @@ "multivariate_feature_state_values": [] }, { - "django_id": 543716, - "enabled": true, + "django_id": 554945, + "enabled": false, "feature": { "id": 96656, "name": "api_limiting_stop_serving_flags", @@ -24,7 +24,7 @@ }, "feature_segment": null, "feature_state_value": null, - "featurestate_uuid": "151b7878-c777-44ef-87ad-043cb5a130b3", + "featurestate_uuid": "e24072b4-3450-4c8e-8b9f-03d704acb5ac", "multivariate_feature_state_values": [] }, { From 8d048d8ac9781a05360ac106c9021a944a924dcb Mon Sep 17 00:00:00 2001 From: Gagan Date: Tue, 9 Jul 2024 14:06:17 +0530 Subject: [PATCH 002/247] deps(auth-controller): bump auth-controller (#4309) --- api/poetry.lock | 28 +++++++++------------------- api/pyproject.toml | 2 +- 2 files changed, 10 insertions(+), 20 deletions(-) diff --git a/api/poetry.lock b/api/poetry.lock index f69cc88b469d..03963f19b73b 100644 --- a/api/poetry.lock +++ b/api/poetry.lock @@ -1366,8 +1366,8 @@ django-multiselectfield = "0.1.12" [package.source] type = "git" url = "https://github.com/flagsmith/flagsmith-auth-controller" -reference = "v0.1.1" -resolved_reference = "24217c74a33cfa7635301fb752f1675a5009c7b4" +reference = "v0.1.2" +resolved_reference = "d0f73840b4d5a078077c2bb108458356476d0ee5" [[package]] name = "flagsmith-flag-engine" @@ -2203,16 +2203,6 @@ files = [ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, @@ -3732,24 +3722,24 @@ python-versions = ">=3.6" files = [ {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b42169467c42b692c19cf539c38d4602069d8c1505e97b86387fcf7afb766e1d"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:07238db9cbdf8fc1e9de2489a4f68474e70dffcb32232db7c08fa61ca0c7c462"}, + {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:d92f81886165cb14d7b067ef37e142256f1c6a90a65cd156b063a43da1708cfd"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fff3573c2db359f091e1589c3d7c5fc2f86f5bdb6f24252c2d8e539d4e45f412"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_24_aarch64.whl", hash = "sha256:aa2267c6a303eb483de8d02db2871afb5c5fc15618d894300b88958f729ad74f"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:840f0c7f194986a63d2c2465ca63af8ccbbc90ab1c6001b1978f05119b5e7334"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:024cfe1fc7c7f4e1aff4a81e718109e13409767e4f871443cbff3dba3578203d"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win32.whl", hash = "sha256:c69212f63169ec1cfc9bb44723bf2917cbbd8f6191a00ef3410f5a7fe300722d"}, {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win_amd64.whl", hash = "sha256:cabddb8d8ead485e255fe80429f833172b4cadf99274db39abc080e068cbcc31"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bef08cd86169d9eafb3ccb0a39edb11d8e25f3dae2b28f5c52fd997521133069"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:b16420e621d26fdfa949a8b4b47ade8810c56002f5389970db4ddda51dbff248"}, + {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:b5edda50e5e9e15e54a6a8a0070302b00c518a9d32accc2346ad6c984aacd279"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:25c515e350e5b739842fc3228d662413ef28f295791af5e5110b543cf0b57d9b"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_24_aarch64.whl", hash = "sha256:1707814f0d9791df063f8c19bb51b0d1278b8e9a2353abbb676c2f685dee6afe"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:46d378daaac94f454b3a0e3d8d78cafd78a026b1d71443f4966c696b48a6d899"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:09b055c05697b38ecacb7ac50bdab2240bfca1a0c4872b0fd309bb07dc9aa3a9"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win32.whl", hash = "sha256:53a300ed9cea38cf5a2a9b069058137c2ca1ce658a874b79baceb8f892f915a7"}, {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win_amd64.whl", hash = "sha256:c2a72e9109ea74e511e29032f3b670835f8a59bbdc9ce692c5b4ed91ccf1eedb"}, {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ebc06178e8821efc9692ea7544aa5644217358490145629914d8020042c24aa1"}, {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:edaef1c1200c4b4cb914583150dcaa3bc30e592e907c01117c08b13a07255ec2"}, + {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:7048c338b6c86627afb27faecf418768acb6331fc24cfa56c93e8c9780f815fa"}, {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d176b57452ab5b7028ac47e7b3cf644bcfdc8cacfecf7e71759f7f51a59e5c92"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_24_aarch64.whl", hash = "sha256:1dc67314e7e1086c9fdf2680b7b6c2be1c0d8e3a8279f2e993ca2a7545fecf62"}, {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3213ece08ea033eb159ac52ae052a4899b56ecc124bb80020d9bbceeb50258e9"}, {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aab7fd643f71d7946f2ee58cc88c9b7bfc97debd71dcc93e03e2d174628e7e2d"}, {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win32.whl", hash = "sha256:5c365d91c88390c8d0a8545df0b5857172824b1c604e867161e6b3d59a827eaa"}, @@ -3757,7 +3747,7 @@ files = [ {file = "ruamel.yaml.clib-0.2.8-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a5aa27bad2bb83670b71683aae140a1f52b0857a2deff56ad3f6c13a017a26ed"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c58ecd827313af6864893e7af0a3bb85fd529f862b6adbefe14643947cfe2942"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_12_0_arm64.whl", hash = "sha256:f481f16baec5290e45aebdc2a5168ebc6d35189ae6fea7a58787613a25f6e875"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_24_aarch64.whl", hash = "sha256:77159f5d5b5c14f7c34073862a6b7d34944075d9f93e681638f6d753606c6ce6"}, + {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:3fcc54cb0c8b811ff66082de1680b4b14cf8a81dce0d4fbf665c2265a81e07a1"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7f67a1ee819dc4562d444bbafb135832b0b909f81cc90f7aa00260968c9ca1b3"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4ecbf9c3e19f9562c7fdd462e8d18dd902a47ca046a2e64dba80699f0b6c09b7"}, {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:87ea5ff66d8064301a154b3933ae406b0863402a799b16e4a1d24d9fbbcbe0d3"}, @@ -3765,7 +3755,7 @@ files = [ {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win_amd64.whl", hash = "sha256:3f215c5daf6a9d7bbed4a0a4f760f3113b10e82ff4c5c44bec20a68c8014f675"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1b617618914cb00bf5c34d4357c37aa15183fa229b24767259657746c9077615"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:a6a9ffd280b71ad062eae53ac1659ad86a17f59a0fdc7699fd9be40525153337"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_24_aarch64.whl", hash = "sha256:305889baa4043a09e5b76f8e2a51d4ffba44259f6b4c72dec8ca56207d9c6fe1"}, + {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:665f58bfd29b167039f714c6998178d27ccd83984084c286110ef26b230f259f"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:700e4ebb569e59e16a976857c8798aee258dceac7c7d6b50cab63e080058df91"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e2b4c44b60eadec492926a7270abb100ef9f72798e18743939bdbf037aab8c28"}, {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e79e5db08739731b0ce4850bed599235d601701d5694c36570a99a0c5ca41a9d"}, @@ -3773,7 +3763,7 @@ files = [ {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win_amd64.whl", hash = "sha256:56f4252222c067b4ce51ae12cbac231bce32aee1d33fbfc9d17e5b8d6966c312"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03d1162b6d1df1caa3a4bd27aa51ce17c9afc2046c31b0ad60a0a96ec22f8001"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bba64af9fa9cebe325a62fa398760f5c7206b215201b0ec825005f1b18b9bccf"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_24_aarch64.whl", hash = "sha256:a1a45e0bb052edf6a1d3a93baef85319733a888363938e1fc9924cb00c8df24c"}, + {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:9eb5dee2772b0f704ca2e45b1713e4e5198c18f515b52743576d196348f374d3"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:da09ad1c359a728e112d60116f626cc9f29730ff3e0e7db72b9a2dbc2e4beed5"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:184565012b60405d93838167f425713180b949e9d8dd0bbc7b49f074407c5a8b"}, {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a75879bacf2c987c003368cf14bed0ffe99e8e85acfa6c0bfffc21a090f16880"}, @@ -4444,4 +4434,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = ">=3.11, <3.13" -content-hash = "dbd99b6598cac54d41be2929b2955226c4a0fe6f587639aa6e547b1520832e29" +content-hash = "09c44d79b4ee9002aeaa4dee988d5d212e128ab33bbf7147d645802378627380" diff --git a/api/pyproject.toml b/api/pyproject.toml index 0308854921d9..a6b317256b64 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -173,7 +173,7 @@ pyotp = "^2.9.0" optional = true [tool.poetry.group.auth-controller.dependencies] -flagsmith-auth-controller = { git = "https://github.com/flagsmith/flagsmith-auth-controller", tag = "v0.1.1" } +flagsmith-auth-controller = { git = "https://github.com/flagsmith/flagsmith-auth-controller", tag = "v0.1.2" } [tool.poetry.group.saml] optional = true From 007351c2338e85cacc051102780778e457f1568a Mon Sep 17 00:00:00 2001 From: Zach Aysan Date: Tue, 9 Jul 2024 04:41:01 -0400 Subject: [PATCH 003/247] fix: Set early return when influxdb range is empty (#4274) --- api/app_analytics/influxdb_wrapper.py | 4 ++++ .../test_unit_app_analytics_influxdb_wrapper.py | 11 +++++++++++ 2 files changed, 15 insertions(+) diff --git a/api/app_analytics/influxdb_wrapper.py b/api/app_analytics/influxdb_wrapper.py index 08cbcc19c6f8..4a88c596338b 100644 --- a/api/app_analytics/influxdb_wrapper.py +++ b/api/app_analytics/influxdb_wrapper.py @@ -83,6 +83,10 @@ def influx_query_manager( extra: str = "", bucket: str = read_bucket, ): + # Influx throws an error for an empty range, so just return a list. + if date_start == "-0d" and date_stop == "now()": + return [] + query_api = influxdb_client.query_api() drop_columns_input = str(list(drop_columns)).replace("'", '"') diff --git a/api/tests/unit/app_analytics/test_unit_app_analytics_influxdb_wrapper.py b/api/tests/unit/app_analytics/test_unit_app_analytics_influxdb_wrapper.py index f00f276a519f..85f8607d7947 100644 --- a/api/tests/unit/app_analytics/test_unit_app_analytics_influxdb_wrapper.py +++ b/api/tests/unit/app_analytics/test_unit_app_analytics_influxdb_wrapper.py @@ -440,3 +440,14 @@ def test_get_top_organisations_value_error( # The wrongly typed data does not stop the remaining data # from being returned. assert dataset == {456: 43} + + +def test_early_return_for_empty_range_for_influx_query_manager() -> None: + # When + results = InfluxDBWrapper.influx_query_manager( + date_start="-0d", + date_stop="now()", + ) + + # Then + assert results == [] From 8e9659e1a17278b8c5e3732daf5d0be70ff51b52 Mon Sep 17 00:00:00 2001 From: Ben Rometsch Date: Tue, 9 Jul 2024 11:20:38 +0100 Subject: [PATCH 004/247] chore(main): release 2.127.0 (#4261) Co-authored-by: Matthew Elwell --- .release-please-manifest.json | 2 +- CHANGELOG.md | 21 +++++++++++++++++++++ version.txt | 2 +- 3 files changed, 23 insertions(+), 2 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 8a32b099cc95..95fdae06e117 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "2.126.0" + ".": "2.127.0" } \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index ffd434cde67f..8ae9e83bdda2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,26 @@ # Changelog +## [2.127.0](https://github.com/Flagsmith/flagsmith/compare/v2.126.0...v2.127.0) (2024-07-09) + + +### Features + +* Add timestamps to segments models ([#4236](https://github.com/Flagsmith/flagsmith/issues/4236)) ([a5b2421](https://github.com/Flagsmith/flagsmith/commit/a5b24210419e6f33935b4f06d8627bcac4a039bb)) +* Announcement feature flag per page ([#4218](https://github.com/Flagsmith/flagsmith/issues/4218)) ([3bfad05](https://github.com/Flagsmith/flagsmith/commit/3bfad055a203f65af84b158daabddc2e3a776556)) +* Announcement per page accept an id list on the params key ([#4280](https://github.com/Flagsmith/flagsmith/issues/4280)) ([e2685e9](https://github.com/Flagsmith/flagsmith/commit/e2685e91abcdc78457dc0ecf56f134de877cb609)) +* Announcement per page FF accept params ([#4275](https://github.com/Flagsmith/flagsmith/issues/4275)) ([078bf1e](https://github.com/Flagsmith/flagsmith/commit/078bf1e1a8002b9c5142c866804c89df006ffaef)) +* **build:** Debian Bookworm base images ([#4263](https://github.com/Flagsmith/flagsmith/issues/4263)) ([0230b9a](https://github.com/Flagsmith/flagsmith/commit/0230b9a479cd8e513e883ae19cd694da088bbc59)) +* **build:** Docker build improvements ([#4272](https://github.com/Flagsmith/flagsmith/issues/4272)) ([627370f](https://github.com/Flagsmith/flagsmith/commit/627370f3fb7b92f911db7eba15720e96878b3cd4)) +* Create versioning for segments ([#4138](https://github.com/Flagsmith/flagsmith/issues/4138)) ([bc9b340](https://github.com/Flagsmith/flagsmith/commit/bc9b340b2a44c46e93326e9602c48dda55e8a6f8)) +* Group versions by date ([#4246](https://github.com/Flagsmith/flagsmith/issues/4246)) ([540d320](https://github.com/Flagsmith/flagsmith/commit/540d320d4fafa686f8d61aed734bafb1c4e82f20)) +* Update API usage notifications thresholds ([#4255](https://github.com/Flagsmith/flagsmith/issues/4255)) ([5162687](https://github.com/Flagsmith/flagsmith/commit/516268775ee0581a791fb4fd6244126792f579ba)) + + +### Bug Fixes + +* **build:** Avoid Docker Hub pull throttling by using public ECR registry ([#4292](https://github.com/Flagsmith/flagsmith/issues/4292)) ([30bed4e](https://github.com/Flagsmith/flagsmith/commit/30bed4efc3aff4e947d2d3aeb8589481730dbbfb)) +* Set early return when influxdb range is empty ([#4274](https://github.com/Flagsmith/flagsmith/issues/4274)) ([007351c](https://github.com/Flagsmith/flagsmith/commit/007351c2338e85cacc051102780778e457f1568a)) + ## [2.126.0](https://github.com/Flagsmith/flagsmith/compare/v2.125.0...v2.126.0) (2024-06-26) diff --git a/version.txt b/version.txt index 20b6d0677b11..2c6aae91b3a4 100644 --- a/version.txt +++ b/version.txt @@ -1 +1 @@ -2.126.0 +2.127.0 From 288a47efc6bec12374a05a48191380b645bb99b3 Mon Sep 17 00:00:00 2001 From: Matthew Elwell Date: Tue, 9 Jul 2024 11:57:24 +0100 Subject: [PATCH 005/247] fix(segments): add migration to set version on existing segments (#4315) --- .../0025_set_default_version_on_segment.py | 23 +++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 api/segments/migrations/0025_set_default_version_on_segment.py diff --git a/api/segments/migrations/0025_set_default_version_on_segment.py b/api/segments/migrations/0025_set_default_version_on_segment.py new file mode 100644 index 000000000000..cb4692e4ee50 --- /dev/null +++ b/api/segments/migrations/0025_set_default_version_on_segment.py @@ -0,0 +1,23 @@ +# Generated by Django 3.2.25 on 2024-07-09 10:41 +from django.apps.registry import Apps +from django.db import migrations +from django.db.backends.base.schema import BaseDatabaseSchemaEditor + + +def set_default_segment_version( + apps: Apps, + schema_editor: BaseDatabaseSchemaEditor, +) -> None: + segment_model_class = apps.get_model("segments", "Segment") + segment_model_class.objects.filter(version__isnull=True).update(version=1) + + +class Migration(migrations.Migration): + + dependencies = [ + ('segments', '0024_add_timestamps_to_segments'), + ] + + operations = [ + migrations.RunPython(set_default_segment_version, reverse_code=migrations.RunPython.noop), + ] From caa03f56264be1c78b4a294400b45f0120daad9d Mon Sep 17 00:00:00 2001 From: Matthew Elwell Date: Tue, 9 Jul 2024 12:11:42 +0100 Subject: [PATCH 006/247] chore(main): release 2.127.1 (#4317) --- .release-please-manifest.json | 2 +- CHANGELOG.md | 7 +++++++ version.txt | 2 +- 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 95fdae06e117..bd923a8a09a9 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "2.127.0" + ".": "2.127.1" } \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 8ae9e83bdda2..cade1b9ef252 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [2.127.1](https://github.com/Flagsmith/flagsmith/compare/v2.127.0...v2.127.1) (2024-07-09) + + +### Bug Fixes + +* **segments:** add migration to set version on existing segments ([#4315](https://github.com/Flagsmith/flagsmith/issues/4315)) ([288a47e](https://github.com/Flagsmith/flagsmith/commit/288a47efc6bec12374a05a48191380b645bb99b3)) + ## [2.127.0](https://github.com/Flagsmith/flagsmith/compare/v2.126.0...v2.127.0) (2024-07-09) diff --git a/version.txt b/version.txt index 2c6aae91b3a4..b6b0e8109096 100644 --- a/version.txt +++ b/version.txt @@ -1 +1 @@ -2.127.0 +2.127.1 From d32a3203036fa6fde820c1867862ff909c269b52 Mon Sep 17 00:00:00 2001 From: Kyle Johnson Date: Tue, 9 Jul 2024 13:40:41 +0100 Subject: [PATCH 007/247] feat: Selected options (#4311) --- frontend/web/components/EnvironmentSelect.tsx | 17 +++++++++------- frontend/web/project/project-components.js | 20 ++++++++++++++++++- 2 files changed, 29 insertions(+), 8 deletions(-) diff --git a/frontend/web/components/EnvironmentSelect.tsx b/frontend/web/components/EnvironmentSelect.tsx index 6fe61e556df6..38cd03ec6b06 100644 --- a/frontend/web/components/EnvironmentSelect.tsx +++ b/frontend/web/components/EnvironmentSelect.tsx @@ -25,11 +25,7 @@ const EnvironmentSelect: FC = ({ ...rest }) => { const { data } = useGetEnvironmentsQuery({ projectId: `${projectId}` }) - const foundValue = useMemo( - () => - data?.results?.find((environment) => `${environment[idField]}` === value), - [value, data, idField], - ) + const environments = useMemo(() => { return (data?.results || []) ?.map((v) => ({ @@ -43,8 +39,15 @@ const EnvironmentSelect: FC = ({ return true }) }, [data?.results, ignore, idField]) + + const foundValue = useMemo( + () => + environments.find((environment) => `${environment.value}` === `${value}`), + [value, environments], + ) + if (readOnly) { - return
{foundValue?.name}
+ return
{foundValue?.label}
} return (
@@ -52,7 +55,7 @@ const EnvironmentSelect: FC = ({ {...rest} value={ foundValue - ? { label: foundValue.name, value: `${foundValue.id}` } + ? foundValue : { label: label || diff --git a/frontend/web/project/project-components.js b/frontend/web/project/project-components.js index fa740f053778..4b25d4c2548a 100644 --- a/frontend/web/project/project-components.js +++ b/frontend/web/project/project-components.js @@ -1,5 +1,5 @@ import { PureComponent } from 'react' -import Select from 'react-select' +import Select, { components } from 'react-select' import Button from 'components/base/forms/Button' import Paging from 'components/Paging' import ToggleChip from 'components/ToggleChip' @@ -12,6 +12,8 @@ import ProjectProvider from 'common/providers/ProjectProvider' import AccountProvider from 'common/providers/AccountProvider' import OrganisationProvider from 'common/providers/OrganisationProvider' import Panel from 'components/base/grid/Panel' +import { checkmark, checkmarkCircle } from 'ionicons/icons' +import { IonIcon } from '@ionic/react' window.AppActions = require('../../common/dispatcher/app-actions') window.Actions = require('../../common/dispatcher/action-constants') @@ -80,6 +82,21 @@ window.Loader = class extends PureComponent { window.Tooltip = Tooltip global.ToggleChip = ToggleChip + +// Custom Option component to show the tick mark next to selected option in the dropdown +const Option = (props) => { + return ( + +
+ {props.data.label} + {props.isSelected && ( + + )} +
+
+ ) +} + global.Select = class extends PureComponent { static displayName = 'Select' @@ -122,6 +139,7 @@ global.Select = class extends PureComponent { className={`react-select ${props.size ? props.size : ''}`} classNamePrefix='react-select' {...props} + components={{ ...(props.components || {}), Option }} />
) From 25256367e706b611ed6e6c399b0b2fb8c672710c Mon Sep 17 00:00:00 2001 From: Kyle Johnson Date: Tue, 9 Jul 2024 18:05:24 +0100 Subject: [PATCH 008/247] fix: version diff overflow (#4313) --- frontend/web/components/diff/DiffFeature.tsx | 2 +- frontend/web/components/diff/DiffSegments.tsx | 2 +- frontend/web/components/diff/DiffString.tsx | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/frontend/web/components/diff/DiffFeature.tsx b/frontend/web/components/diff/DiffFeature.tsx index c6563564f71b..d4e0475906ff 100644 --- a/frontend/web/components/diff/DiffFeature.tsx +++ b/frontend/web/components/diff/DiffFeature.tsx @@ -109,7 +109,7 @@ const DiffFeature: FC = ({
{!hideValue && ( -
+
= ({ diff }) => { newValue={diff.deleted ? diff.oldPriority : diff.newPriority} />
-
+
= ({ return null } return ( -
+
Date: Wed, 10 Jul 2024 14:49:33 +0530
Subject: [PATCH 009/247] fix(get_permitted_projects): get rid of distinct
 (#4320)

---
 api/features/permissions.py                   |  4 ++
 api/permissions/permission_service.py         | 41 +++++++++++++++++--
 .../test_unit_feature_segments_views.py       |  4 +-
 .../test_unit_features_permissions.py         | 20 ---------
 .../unit/features/test_unit_features_views.py | 25 +++++++++--
 .../unit/segments/test_unit_segments_views.py |  2 +-
 6 files changed, 66 insertions(+), 30 deletions(-)

diff --git a/api/features/permissions.py b/api/features/permissions.py
index 3ddd9fcf1812..e116eee86657 100644
--- a/api/features/permissions.py
+++ b/api/features/permissions.py
@@ -45,6 +45,10 @@ def has_permission(self, request, view):
             # handled by has_object_permission
             return True
 
+        if view.action in ["list"]:
+            # handled by the view
+            return True
+
         try:
             project_id = view.kwargs.get("project_pk") or request.data.get("project")
             project = Project.objects.get(id=project_id)
diff --git a/api/permissions/permission_service.py b/api/permissions/permission_service.py
index 774a09fe89d6..aa938401f228 100644
--- a/api/permissions/permission_service.py
+++ b/api/permissions/permission_service.py
@@ -1,5 +1,6 @@
-from typing import TYPE_CHECKING, List, Union
+from typing import TYPE_CHECKING, List, Set, Union
 
+from django.conf import settings
 from django.db.models import Q, QuerySet
 
 from environments.models import Environment
@@ -76,7 +77,7 @@ def get_permitted_projects_for_user(
         - If `tag_ids` is a list of tag IDs, only project with one of those tags will
         be returned
     """
-    base_filter = get_base_permission_filter(
+    project_ids_from_base_filter = get_object_id_from_base_permission_filter(
         user, Project, permission_key, tag_ids=tag_ids
     )
 
@@ -84,8 +85,12 @@ def get_permitted_projects_for_user(
         organisation__userorganisation__user=user,
         organisation__userorganisation__role=OrganisationRole.ADMIN.name,
     )
-    filter_ = base_filter | organisation_filter
-    return Project.objects.filter(filter_).distinct()
+    project_ids_from_organisation = Project.objects.filter(
+        organisation_filter
+    ).values_list("id", flat=True)
+
+    project_ids = project_ids_from_base_filter | set(project_ids_from_organisation)
+    return Project.objects.filter(id__in=project_ids)
 
 
 def get_permitted_projects_for_master_api_key(
@@ -220,6 +225,34 @@ def get_base_permission_filter(
     return user_filter | group_filter | role_filter
 
 
+def get_object_id_from_base_permission_filter(
+    user: "FFAdminUser",
+    for_model: Union[Organisation, Project, Environment] = None,
+    permission_key: str = None,
+    allow_admin: bool = True,
+    tag_ids=None,
+) -> Set[int]:
+    object_ids = set()
+    user_filter = get_user_permission_filter(user, permission_key, allow_admin)
+    object_ids.update(
+        list(for_model.objects.filter(user_filter).values_list("id", flat=True))
+    )
+
+    group_filter = get_group_permission_filter(user, permission_key, allow_admin)
+
+    object_ids.update(
+        list(for_model.objects.filter(group_filter).values_list("id", flat=True))
+    )
+    if settings.IS_RBAC_INSTALLED:  # pragma: no cover
+        role_filter = get_role_permission_filter(
+            user, for_model, permission_key, allow_admin, tag_ids
+        )
+        object_ids.update(
+            list(for_model.objects.filter(role_filter).values_list("id", flat=True))
+        )
+    return object_ids
+
+
 def get_user_permission_filter(
     user: "FFAdminUser", permission_key: str = None, allow_admin: bool = True
 ) -> Q:
diff --git a/api/tests/unit/features/feature_segments/test_unit_feature_segments_views.py b/api/tests/unit/features/feature_segments/test_unit_feature_segments_views.py
index 0bfc0a895375..b2b55e3d303f 100644
--- a/api/tests/unit/features/feature_segments/test_unit_feature_segments_views.py
+++ b/api/tests/unit/features/feature_segments/test_unit_feature_segments_views.py
@@ -32,8 +32,8 @@
     [
         (
             lazy_fixture("admin_client"),
-            3,
-        ),  # 1 for paging, 1 for result, 1 for getting the current live version
+            6,
+        ),  # 1 for paging, 3 for permissions, 1 for result, 1 for getting the current live version
         (
             lazy_fixture("admin_master_api_key_client"),
             4,
diff --git a/api/tests/unit/features/test_unit_features_permissions.py b/api/tests/unit/features/test_unit_features_permissions.py
index 95812efc4770..e1dc4f864e64 100644
--- a/api/tests/unit/features/test_unit_features_permissions.py
+++ b/api/tests/unit/features/test_unit_features_permissions.py
@@ -85,26 +85,6 @@ def test_project_user_with_read_access_can_list_features(
     assert result is True
 
 
-def test_user_with_no_project_permissions_cannot_list_features(
-    staff_user: FFAdminUser,
-    project: Project,
-) -> None:
-    # Given
-    feature_permissions = FeaturePermissions()
-    mock_view = mock.MagicMock(
-        kwargs={"project_pk": project.id},
-        detail=False,
-        action="list",
-    )
-    mock_request = mock.MagicMock(data={}, user=staff_user)
-
-    # When
-    result = feature_permissions.has_permission(mock_request, mock_view)
-
-    # Then
-    assert result is False
-
-
 def test_organisation_admin_can_create_feature(
     admin_user: FFAdminUser,
     project: Project,
diff --git a/api/tests/unit/features/test_unit_features_views.py b/api/tests/unit/features/test_unit_features_views.py
index 20f15cf00b6a..c0496b26f497 100644
--- a/api/tests/unit/features/test_unit_features_views.py
+++ b/api/tests/unit/features/test_unit_features_views.py
@@ -2600,13 +2600,32 @@ def test_list_features_n_plus_1(
         v1_feature_state.clone(env=environment, version=i, live_from=timezone.now())
 
     # When
-    with django_assert_num_queries(17):
+    with django_assert_num_queries(16):
         response = staff_client.get(url)
 
     # Then
     assert response.status_code == status.HTTP_200_OK
 
 
+def test_list_features_from_different_project_returns_404(
+    staff_client: APIClient,
+    organisation_two_project_two: Project,
+    with_project_permissions: WithProjectPermissionsCallable,
+) -> None:
+    # Given
+    with_project_permissions([VIEW_PROJECT])
+
+    url = reverse(
+        "api-v1:projects:project-features-list", args=[organisation_two_project_two.id]
+    )
+
+    # When
+    response = staff_client.get(url)
+
+    # Then
+    assert response.status_code == status.HTTP_404_NOT_FOUND
+
+
 def test_list_features_with_union_tag(
     staff_client: APIClient,
     project: Project,
@@ -2808,7 +2827,7 @@ def test_list_features_with_feature_state(
     url = f"{base_url}?environment={environment.id}"
 
     # When
-    with django_assert_num_queries(17):
+    with django_assert_num_queries(16):
         response = staff_client.get(url)
 
     # Then
@@ -3102,7 +3121,7 @@ def test_feature_list_last_modified_values(
         Feature.objects.create(name=f"feature_{i}", project=project)
 
     # When
-    with django_assert_num_queries(19):  # TODO: reduce this number of queries!
+    with django_assert_num_queries(18):  # TODO: reduce this number of queries!
         response = staff_client.get(url)
 
     # Then
diff --git a/api/tests/unit/segments/test_unit_segments_views.py b/api/tests/unit/segments/test_unit_segments_views.py
index a5b67952ecc7..a59d5f39d82d 100644
--- a/api/tests/unit/segments/test_unit_segments_views.py
+++ b/api/tests/unit/segments/test_unit_segments_views.py
@@ -366,7 +366,7 @@ def test_get_segment_by_uuid(client, project, segment):
     "client, num_queries",
     [
         (lazy_fixture("admin_master_api_key_client"), 12),
-        (lazy_fixture("admin_client"), 11),
+        (lazy_fixture("admin_client"), 14),
     ],
 )
 def test_list_segments(

From 7f9cd623d80bd2abc9c98dc332acadc8450dda8c Mon Sep 17 00:00:00 2001
From: Flagsmith Bot <65724737+flagsmithdev@users.noreply.github.com>
Date: Wed, 10 Jul 2024 10:37:09 +0100
Subject: [PATCH 010/247] chore(main): release 2.128.0 (#4318)

---
 .release-please-manifest.json |  2 +-
 CHANGELOG.md                  | 13 +++++++++++++
 version.txt                   |  2 +-
 3 files changed, 15 insertions(+), 2 deletions(-)

diff --git a/.release-please-manifest.json b/.release-please-manifest.json
index bd923a8a09a9..1a307fe4b767 100644
--- a/.release-please-manifest.json
+++ b/.release-please-manifest.json
@@ -1,3 +1,3 @@
 {
-  ".": "2.127.1"
+  ".": "2.128.0"
 }
\ No newline at end of file
diff --git a/CHANGELOG.md b/CHANGELOG.md
index cade1b9ef252..b82419774ff6 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,18 @@
 # Changelog
 
+## [2.128.0](https://github.com/Flagsmith/flagsmith/compare/v2.127.1...v2.128.0) (2024-07-10)
+
+
+### Features
+
+* Selected options ([#4311](https://github.com/Flagsmith/flagsmith/issues/4311)) ([d32a320](https://github.com/Flagsmith/flagsmith/commit/d32a3203036fa6fde820c1867862ff909c269b52))
+
+
+### Bug Fixes
+
+* **get_permitted_projects:** get rid of distinct ([#4320](https://github.com/Flagsmith/flagsmith/issues/4320)) ([e7252cb](https://github.com/Flagsmith/flagsmith/commit/e7252cb056645ff6982772759ca8551cd1855811))
+* version diff overflow ([#4313](https://github.com/Flagsmith/flagsmith/issues/4313)) ([2525636](https://github.com/Flagsmith/flagsmith/commit/25256367e706b611ed6e6c399b0b2fb8c672710c))
+
 ## [2.127.1](https://github.com/Flagsmith/flagsmith/compare/v2.127.0...v2.127.1) (2024-07-09)
 
 
diff --git a/version.txt b/version.txt
index b6b0e8109096..0895236fd9de 100644
--- a/version.txt
+++ b/version.txt
@@ -1 +1 @@
-2.127.1
+2.128.0

From 103a94fe45c6e9ec677dd2aa14ab2009f5f0b44b Mon Sep 17 00:00:00 2001
From: Kyle Johnson 
Date: Wed, 10 Jul 2024 12:27:14 +0100
Subject: [PATCH 011/247] feat: Open payment modal if a plan was preselected,
 add annual plans (#4110)

---
 frontend/web/components/ProjectsPage.tsx  | 13 +++-
 frontend/web/components/modals/Payment.js | 88 +++++++++++++++++------
 frontend/web/components/pages/HomePage.js |  4 ++
 3 files changed, 83 insertions(+), 22 deletions(-)

diff --git a/frontend/web/components/ProjectsPage.tsx b/frontend/web/components/ProjectsPage.tsx
index 8bbaeb703a4e..dbeefb34a4a1 100644
--- a/frontend/web/components/ProjectsPage.tsx
+++ b/frontend/web/components/ProjectsPage.tsx
@@ -2,6 +2,9 @@ import React, { FC } from 'react'
 import ProjectManageWidget from './ProjectManageWidget'
 import OrganisationProvider from 'common/providers/OrganisationProvider'
 import ConfigProvider from 'common/providers/ConfigProvider'
+import Project from 'common/project'
+import { onPaymentLoad } from './modals/Payment'
+import makeAsyncScriptLoader from 'react-async-script'
 
 type ProjectsPageType = {
   match: {
@@ -24,4 +27,12 @@ const ProjectsPage: FC = ({ match }) => {
   )
 }
 
-export default ConfigProvider(ProjectsPage)
+const InnerComponent = ConfigProvider(ProjectsPage)
+const WrappedPayment = Project.chargebee?.site
+  ? makeAsyncScriptLoader('https://js.chargebee.com/v2/chargebee.js', {
+      removeOnUnmount: true,
+    })(InnerComponent)
+  : InnerComponent
+export default (props) => (
+  
+)
diff --git a/frontend/web/components/modals/Payment.js b/frontend/web/components/modals/Payment.js
index b2f5a95f799d..6a9be59bef6c 100644
--- a/frontend/web/components/modals/Payment.js
+++ b/frontend/web/components/modals/Payment.js
@@ -6,6 +6,10 @@ import Constants from 'common/constants'
 import InfoMessage from 'components/InfoMessage'
 import Icon from 'components/Icon'
 import firstpromoter from 'project/firstPromoter'
+import Utils from 'common/utils/utils'
+import AccountProvider from 'common/providers/AccountProvider'
+import classNames from 'classnames'
+import Switch from 'components/Switch'
 
 const PaymentButton = (props) => {
   const activeSubscription = AccountStore.getOrganisationPlan(
@@ -58,7 +62,9 @@ const Payment = class extends Component {
 
   constructor(props, context) {
     super(props, context)
-    this.state = {}
+    this.state = {
+      yearly: true,
+    }
   }
 
   componentDidMount = () => {
@@ -98,7 +104,7 @@ const Payment = class extends Component {
               ''
             return (
               
- +
Manage Payment Plan
{this.props.isDisableAccountText && ( @@ -115,6 +121,28 @@ const Payment = class extends Component { )}
+
+
+ Pay Yearly (Save 10%) +
+ { + this.setState({ yearly: !this.state.yearly }) + }} + /> +
+ Pay Monthly +
+
@@ -744,29 +772,47 @@ const Payment = class extends Component { } } +Payment.propTypes = {} +export const onPaymentLoad = () => { + if (!Project.chargebee?.site) { + return + } + const planId = API.getCookie('plan') + let link + if (planId && Utils.getFlagsmithHasFeature('payments_enabled')) { + ;(function () { + // Create a link element with data-cb-plan-id attribute + link = document.createElement('a') + link.setAttribute('data-cb-type', 'checkout') + link.setAttribute('data-cb-plan-id', planId) + link.setAttribute('href', 'javascript:void(0)') + // Append the link to the body + document.body.appendChild(link) + })() + } + Chargebee.init({ + site: Project.chargebee.site, + }) + Chargebee.registerAgain() + firstpromoter() + Chargebee.getInstance().setCheckoutCallbacks(() => ({ + success: (hostedPageId) => { + AppActions.updateSubscription(hostedPageId) + }, + })) + if (link) { + link.click() + document.body.removeChild(link) + API.setCookie('plan', null) + } +} + const WrappedPayment = makeAsyncScriptLoader( 'https://js.chargebee.com/v2/chargebee.js', { removeOnUnmount: true, }, )(ConfigProvider(Payment)) - -Payment.propTypes = {} - -module.exports = (props) => ( - { - Chargebee.init({ - site: Project.chargebee.site, - }) - Chargebee.registerAgain() - firstpromoter() - Chargebee.getInstance().setCheckoutCallbacks(() => ({ - success: (hostedPageId) => { - AppActions.updateSubscription(hostedPageId) - }, - })) - }} - /> +export default (props) => ( + ) diff --git a/frontend/web/components/pages/HomePage.js b/frontend/web/components/pages/HomePage.js index bb32be331c91..e58a5bc88572 100644 --- a/frontend/web/components/pages/HomePage.js +++ b/frontend/web/components/pages/HomePage.js @@ -65,6 +65,10 @@ const HomePage = class extends React.Component { } componentDidMount() { + const plan = Utils.fromParam().plan + if (plan) { + API.setCookie('plan', plan) + } if ( Project.albacross && this.props.location.pathname.indexOf('signup') !== -1 From f5a7eed20a24f5ef0c642e48030ed74b168dd41f Mon Sep 17 00:00:00 2001 From: Kyle Johnson Date: Wed, 10 Jul 2024 14:56:51 +0100 Subject: [PATCH 012/247] fix: annual plan ids and refreshing (#4323) --- frontend/env/project_dev.js | 4 +++ frontend/env/project_prod.js | 4 +++ frontend/web/components/modals/Payment.js | 38 ++++++++++++++--------- 3 files changed, 32 insertions(+), 14 deletions(-) diff --git a/frontend/env/project_dev.js b/frontend/env/project_dev.js index 23f61a13ecb8..fee495078741 100644 --- a/frontend/env/project_dev.js +++ b/frontend/env/project_dev.js @@ -17,5 +17,9 @@ module.exports = global.Project = { // This is used for Sentry tracking maintenance: false, useSecureCookies: true, + plans: { + scaleUp: { annual: 'scale-up-annual-v2', monthly: 'scale-up-v2' }, + startup: { annual: 'startup-annual-v2', monthly: 'startup-v2' }, + }, ...(globalThis.projectOverrides || {}), } diff --git a/frontend/env/project_prod.js b/frontend/env/project_prod.js index d6052afb3b23..d90a0c314444 100644 --- a/frontend/env/project_prod.js +++ b/frontend/env/project_prod.js @@ -23,5 +23,9 @@ module.exports = global.Project = { // This is used for Sentry tracking maintenance: false, useSecureCookies: true, + plans: { + scaleUp: { annual: 'scale-up-12-months-v2', monthly: 'scale-up-v2' }, + startup: { annual: 'start-up-12-months-v2', monthly: 'startup-v2' }, + }, ...(globalThis.projectOverrides || {}), } diff --git a/frontend/web/components/modals/Payment.js b/frontend/web/components/modals/Payment.js index 6a9be59bef6c..3503c1c42959 100644 --- a/frontend/web/components/modals/Payment.js +++ b/frontend/web/components/modals/Payment.js @@ -161,23 +161,28 @@ const Payment = class extends Component { Billed Monthly
{!viewOnly ? ( - this.state.yearly ? ( + <> {plan.includes('startup') ? 'Purchased' : 'Buy'} - ) : ( {plan.includes('startup') ? 'Purchased' : 'Buy'} - ) + ) : null}
@@ -380,23 +385,28 @@ const Payment = class extends Component { Billed Monthly
{!viewOnly ? ( - this.state.yearly ? ( + <> {plan.includes('scale-up') ? 'Purchased' : 'Buy'} - ) : ( {plan.includes('scale-up') ? 'Purchased' : 'Buy'} - ) + ) : null}
From f8babe892a3a066b3dcb80d47fe994e78d4e8ef0 Mon Sep 17 00:00:00 2001 From: Kim Gustyr Date: Fri, 12 Jul 2024 10:21:04 +0200 Subject: [PATCH 013/247] fix(e2e): Pass `GITHUB_ACTION_URL` to Docker E2E test runs (#4322) --- frontend/docker-compose-e2e-tests.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/frontend/docker-compose-e2e-tests.yml b/frontend/docker-compose-e2e-tests.yml index 98ec93ca60e2..bfe7769c7da9 100644 --- a/frontend/docker-compose-e2e-tests.yml +++ b/frontend/docker-compose-e2e-tests.yml @@ -48,6 +48,7 @@ services: DISABLE_ANALYTICS_FEATURES: 'true' FLAGSMITH_API: flagsmith-api:8000/api/v1/ SLACK_TOKEN: ${SLACK_TOKEN} + GITHUB_ACTION_URL: ${GITHUB_ACTION_URL} ports: - 8080:8080 depends_on: From cd121e8e01bce3ac036587d9741773cbd145b3e3 Mon Sep 17 00:00:00 2001 From: Kyle Johnson Date: Fri, 12 Jul 2024 09:22:53 +0100 Subject: [PATCH 014/247] fix: Prevent "Create Segment" button from disappearing when deleting the last segment (#4314) --- .../web/components/pages/SegmentsPage.tsx | 320 ++++++++---------- 1 file changed, 133 insertions(+), 187 deletions(-) diff --git a/frontend/web/components/pages/SegmentsPage.tsx b/frontend/web/components/pages/SegmentsPage.tsx index 1b8ed246a33d..babf1112f391 100644 --- a/frontend/web/components/pages/SegmentsPage.tsx +++ b/frontend/web/components/pages/SegmentsPage.tsx @@ -24,6 +24,7 @@ import PageTitle from 'components/PageTitle' import Switch from 'components/Switch' import { setModalTitle } from 'components/modals/base/ModalDefault' import classNames from 'classnames' +import InfoMessage from 'components/InfoMessage'; const CodeHelp = require('../../components/CodeHelp') type SegmentsPageType = { @@ -36,23 +37,6 @@ type SegmentsPageType = { } } -const HowToUseSegmentsMessage = () => ( -
-

- In order to use segments, please set the segment_operators remote config - value.{' '} - - . -

-
-) - const SegmentsPage: FC = (props) => { const { projectId } = props.match.params const environmentId = @@ -72,7 +56,6 @@ const SegmentsPage: FC = (props) => { q: search, }) const [removeSegment, { isLoading: isRemoving }] = useDeleteSegmentMutation() - const hasHadResults = useRef(false) const segmentsLimitAlert = Utils.calculateRemainingLimitsPercentage( ProjectStore.getTotalSegments(), @@ -155,11 +138,6 @@ const SegmentsPage: FC = (props) => { ) } - - if (data?.results.length) { - hasHadResults.current = true - } - const segments = data?.results return (
= (props) => { > - {renderWithPermission( - manageSegmentsPermission, - 'Manage segments', - , - )} - - ) : null + <> + {renderWithPermission( + manageSegmentsPermission, + 'Manage segments', + , + )} + } title={'Segments'} > @@ -212,7 +187,7 @@ const SegmentsPage: FC = (props) => {
- {isLoading && !hasHadResults.current && !segments && !searchInput && ( + {isLoading && !segments && !searchInput && (
@@ -220,156 +195,127 @@ const SegmentsPage: FC = (props) => { {(!isLoading || segments || searchInput) && (
{Utils.displayLimitAlert('segments', segmentsLimitAlert.percentage)} - {hasHadResults.current || - (segments && (segments.length || searchInput)) ? ( -
- {hasNoOperators && } - - - - - -
+
+ + + + +
+ } + renderSearchWithNoResults + className='no-pad' + id='segment-list' + title=' ' + renderFooter={() => ( + + )} + items={sortBy(segments, (v) => { + return `${v.feature ? 'a' : 'z'}${v.name}` + })} + renderRow={( + { description, feature, id, name }: Segment, + i: number, + ) => { + if (preselect.current === `${id}`) { + editSegment(preselect.current, !manageSegmentsPermission) + preselect.current = null } - renderSearchWithNoResults - className='no-pad' - id='segment-list' - title='Segments' - renderFooter={() => ( - - )} - items={sortBy(segments, (v) => { - return `${v.feature ? 'a' : 'z'}${v.name}` - })} - renderRow={( - { description, feature, id, name }: Segment, - i: number, - ) => { - if (preselect.current === `${id}`) { - editSegment( - preselect.current, - !manageSegmentsPermission, - ) - preselect.current = null - } - // TODO: remove this check - // I'm leaving this here for now so that we can deploy the FE and - // API independently, but we should remove this once PR #3430 is - // merged and released. - if (feature && !showFeatureSpecific) { - return null - } + // TODO: remove this check + // I'm leaving this here for now so that we can deploy the FE and + // API independently, but we should remove this once PR #3430 is + // merged and released. + if (feature && !showFeatureSpecific) { + return null + } - return renderWithPermission( - manageSegmentsPermission, - 'Manage segments', - - - editSegment(id, !manageSegmentsPermission) - : undefined - } + return renderWithPermission( + manageSegmentsPermission, + 'Manage segments', + + editSegment(id, !manageSegmentsPermission) + : undefined + } + > + - - {name} - {feature && ( -
- Feature-Specific -
- )} -
-
- {description || 'No description'} -
-
-
- + {name} + {feature && ( +
+ Feature-Specific +
+ )} + +
+ {description || 'No description'}
- , - ) - }} - paging={data} - nextPage={() => setPage(page + 1)} - prevPage={() => setPage(page - 1)} - goToPage={(page: number) => setPage(page)} - search={searchInput} - onChange={(e: any) => { - setSearchInput(Utils.safeParseEventValue(e)) - }} - renderNoResults={
} - filterRow={() => true} - /> - + +
+ +
+ , + ) + }} + paging={data} + nextPage={() => setPage(page + 1)} + prevPage={() => setPage(page - 1)} + goToPage={(page: number) => setPage(page)} + search={searchInput} + onChange={(e: any) => { + setSearchInput(Utils.safeParseEventValue(e)) + }} + filterRow={() => true} + /> + -

- Segments require you to identitfy users, setting traits will - add users to segments. -

- - - -
- ) : ( -
- - {renderWithPermission( - manageSegmentsPermission, - 'Manage segments', - , + + Segments require you to identitfy users, setting traits will add + users to segments. + + + - {hasNoOperators && } -
- )} + /> + +
)} From 9b927e8813d76ec63317ccd6edeb13a140d9ee3a Mon Sep 17 00:00:00 2001 From: Kim Gustyr Date: Fri, 12 Jul 2024 10:38:29 +0200 Subject: [PATCH 015/247] chore(build): API test image (#4266) --- .../docker-build-report-to-pr/action.yml | 46 +++ .github/docker_build_comment_template.md | 7 +- .github/workflows/.reusable-docker-build.yml | 73 +---- .../platform-docker-build-test-publish.yml | 8 + .github/workflows/platform-pull-request.yml | 48 ++- .pre-commit-config.yaml | 2 +- Dockerfile | 15 + api/poetry.lock | 294 ++---------------- api/pyproject.toml | 2 +- 9 files changed, 145 insertions(+), 350 deletions(-) create mode 100644 .github/actions/docker-build-report-to-pr/action.yml diff --git a/.github/actions/docker-build-report-to-pr/action.yml b/.github/actions/docker-build-report-to-pr/action.yml new file mode 100644 index 000000000000..fbb387b41619 --- /dev/null +++ b/.github/actions/docker-build-report-to-pr/action.yml @@ -0,0 +1,46 @@ +name: Report Docker Build status to PR +description: Create or update PR comment related to Docker build + +inputs: + image-tag: + description: Full image tag + required: false + build-status: + description: Build status in short format + required: false + security-report-status: + description: Security report status in short format + required: false + +runs: + using: composite + + steps: + - uses: peter-evans/find-comment@v3 + id: find-comment + with: + issue-number: ${{ github.event.pull_request.number }} + body-includes: 'Docker builds report' + + - uses: chuhlomin/render-template@v1.4 + if: ${{ !inputs.image-tag }} + id: render-header + with: + template: .github/docker_build_comment_template.md + + - uses: peter-evans/create-or-update-comment@v4 + if: ${{ !inputs.image-tag }} + with: + comment-id: ${{ steps.find-comment.outputs.comment-id }} + edit-mode: replace + issue-number: ${{ github.event.pull_request.number }} + body: ${{ steps.render-header.outputs.result }} + + - uses: peter-evans/create-or-update-comment@v4 + if: ${{ inputs.image-tag }} + with: + comment-id: ${{ steps.find-comment.outputs.comment-id }} + edit-mode: append + issue-number: ${{ github.event.pull_request.number }} + body: > + | `${{ inputs.image-tag }}` | ${{ inputs.build-status }} | ${{ inputs.security-report-status }} | diff --git a/.github/docker_build_comment_template.md b/.github/docker_build_comment_template.md index e6876212a1c3..4b5e4ea728fc 100644 --- a/.github/docker_build_comment_template.md +++ b/.github/docker_build_comment_template.md @@ -1,5 +1,4 @@ -{{ .message }} +#### Docker builds report -| Image | Build Status | Security report | -| --------------- | ------------------ | --------------------------- | -| {{ .imageTag }} | {{ .buildStatus }} | {{ .securityReportStatus }} | +| Image | Build Status | Security report | +| ----- | ------------ | --------------- | \ No newline at end of file diff --git a/.github/workflows/.reusable-docker-build.yml b/.github/workflows/.reusable-docker-build.yml index 913b2f335dbc..96e1a4c40bb7 100644 --- a/.github/workflows/.reusable-docker-build.yml +++ b/.github/workflows/.reusable-docker-build.yml @@ -96,33 +96,6 @@ jobs: ${{ inputs.registry-url }}/flagsmith/${{ inputs.image-name }} tags: ${{ inputs.tags }} - - uses: chuhlomin/render-template@v1.4 - if: inputs.comment - id: render-comment-initial - with: - template: .github/docker_build_comment_template.md - vars: | - message: "`${{ inputs.image-name }}` is being built... :hourglass_flowing_sand:" - imageTag: "Pending :hourglass_flowing_sand:" - buildStatus: "Building :hourglass_flowing_sand:" - securityReportStatus: "${{ inputs.scan && 'Pending :hourglass_flowing_sand:' || 'Skipped' }}" - - - uses: peter-evans/find-comment@v3 - if: inputs.comment - id: find-comment - with: - issue-number: ${{ github.event.pull_request.number }} - body-includes: '`${{ inputs.image-name }}` ' - - - uses: peter-evans/create-or-update-comment@v4 - if: inputs.comment - id: add-comment - with: - comment-id: ${{ steps.find-comment.outputs.comment-id }} - edit-mode: replace - issue-number: ${{ github.event.pull_request.number }} - body: ${{ steps.render-comment-initial.outputs.result }} - - name: Build and push image id: build uses: depot/build-push-action@v1 @@ -156,23 +129,13 @@ jobs: echo ::add-mask::$DEPOT_TOKEN echo depot-token=$DEPOT_TOKEN >> $GITHUB_OUTPUT - - uses: chuhlomin/render-template@v1.4 - if: inputs.comment - id: render-comment-on-finished-build + - name: Report build finish + uses: ./.github/actions/docker-build-report-to-pr + if: inputs.comment && !inputs.scan with: - template: .github/docker_build_comment_template.md - vars: | - message: "`${{ inputs.image-name }}` image build finished :sparkles: ${{ inputs.scan && 'Executing security scan...' || '' }}" - imageTag: "`${{ steps.image-tag.outputs.image-tag }}`" - buildStatus: "Finished :white_check_mark:" - securityReportStatus: "${{ inputs.scan && 'Pending :hourglass_flowing_sand:' || 'Skipped' }}" - - - uses: peter-evans/create-or-update-comment@v4 - if: inputs.comment - with: - comment-id: ${{ steps.add-comment.outputs.comment-id }} - edit-mode: replace - body: ${{ steps.render-comment-on-finished-build.outputs.result }} + image-tag: ${{ steps.image-tag.outputs.image-tag }} + build-status: 'Finished :white_check_mark:' + security-report-status: 'Skipped' - name: Run Trivy vulnerability scanner id: trivy @@ -194,25 +157,17 @@ jobs: - name: Render scan results URL id: scan-results-url + if: inputs.scan run: > echo scan-results-url=${{ format('{0}/{1}/security/code-scanning?query=pr%3A{2}+path%3Aflagsmith%2F{3}', github.server_url, github.repository, github.event.pull_request.number, inputs.image-name) }} >> $GITHUB_OUTPUT - - uses: chuhlomin/render-template@v1.4 - id: render-comment-on-finished-scan - if: inputs.scan && inputs.comment - with: - template: .github/docker_build_comment_template.md - vars: | - message: "`${{ inputs.image-name }}` image build and security scan finished :sparkles:" - imageTag: "`${{ steps.image-tag.outputs.image-tag }}`" - buildStatus: "Finished :white_check_mark:" - securityReportStatus: "${{ format('[Results]({0}) :white_check_mark:', steps.scan-results-url.outputs.scan-results-url) }}" - - - uses: peter-evans/create-or-update-comment@v4 - if: inputs.scan && inputs.comment + - name: Report scan results URL + uses: ./.github/actions/docker-build-report-to-pr + if: inputs.comment && inputs.scan with: - comment-id: ${{ steps.add-comment.outputs.comment-id }} - edit-mode: replace - body: ${{ steps.render-comment-on-finished-scan.outputs.result }} + image-tag: ${{ steps.image-tag.outputs.image-tag }} + build-status: 'Finished :white_check_mark:' + security-report-status: + "${{ format('[Results]({0}) :white_check_mark:', steps.scan-results-url.outputs.scan-results-url) }}" diff --git a/.github/workflows/platform-docker-build-test-publish.yml b/.github/workflows/platform-docker-build-test-publish.yml index cce41cdaa19b..2a0a5f8c14af 100644 --- a/.github/workflows/platform-docker-build-test-publish.yml +++ b/.github/workflows/platform-docker-build-test-publish.yml @@ -50,6 +50,14 @@ jobs: secrets: | github_private_cloud_token=${{ secrets.GH_PRIVATE_ACCESS_TOKEN }} + docker-build-api-test: + name: Build API Test Image + uses: ./.github/workflows/.reusable-docker-build.yml + with: + target: api-test + image-name: flagsmith-api-test + scan: false + docker-build-e2e: name: Build E2E Image uses: ./.github/workflows/.reusable-docker-build.yml diff --git a/.github/workflows/platform-pull-request.yml b/.github/workflows/platform-pull-request.yml index d314f41a4fa1..d4a9acf52165 100644 --- a/.github/workflows/platform-pull-request.yml +++ b/.github/workflows/platform-pull-request.yml @@ -10,9 +10,11 @@ on: - release-please-* jobs: - validate-pr-title: - name: Validate Conventional Commit title + conventional-commit: + name: Conventional Commit runs-on: ubuntu-latest + permissions: + pull-requests: write steps: - name: Check PR Conventional Commit title uses: amannn/action-semantic-pull-request@v5 @@ -30,13 +32,6 @@ jobs: refactor test chore - - add-labels: - name: Add labels based on Conventional Commit title - runs-on: ubuntu-latest - permissions: - pull-requests: write - steps: - name: Auto-label PR with Conventional Commit title uses: bcoe/conventional-release-labels@v1 with: @@ -66,9 +61,23 @@ jobs: with: require: write + docker-prepare-report-comment: + if: needs.check-permissions.outputs.can-write == 'true' + name: Prepare Docker report comment + needs: check-permissions + runs-on: ubuntu-latest + permissions: + pull-requests: write + steps: + - uses: actions/checkout@v4 + with: + sparse-checkout: .github/ + sparse-checkout-cone-mode: false + - uses: ./.github/actions/docker-build-report-to-pr + docker-build-unified: if: github.event.pull_request.draft == false - needs: check-permissions + needs: [check-permissions, docker-prepare-report-comment] name: Build Unified Image uses: ./.github/workflows/.reusable-docker-build.yml with: @@ -79,7 +88,7 @@ jobs: docker-build-api: if: github.event.pull_request.draft == false - needs: check-permissions + needs: [check-permissions, docker-prepare-report-comment] name: Build API Image uses: ./.github/workflows/.reusable-docker-build.yml with: @@ -90,7 +99,7 @@ jobs: docker-build-frontend: if: github.event.pull_request.draft == false - needs: check-permissions + needs: [check-permissions, docker-prepare-report-comment] name: Build Frontend Image uses: ./.github/workflows/.reusable-docker-build.yml with: @@ -99,9 +108,20 @@ jobs: image-name: flagsmith-frontend comment: ${{ needs.check-permissions.outputs.can-write == 'true' }} + docker-build-api-test: + if: github.event.pull_request.draft == false + needs: [check-permissions, docker-prepare-report-comment] + name: Build API Test Image + uses: ./.github/workflows/.reusable-docker-build.yml + with: + target: api-test + image-name: flagsmith-api-test + scan: false + comment: ${{ needs.check-permissions.outputs.can-write == 'true' }} + docker-build-e2e: if: github.event.pull_request.draft == false - needs: check-permissions + needs: [check-permissions, docker-prepare-report-comment] name: Build E2E Image uses: ./.github/workflows/.reusable-docker-build.yml with: @@ -113,7 +133,7 @@ jobs: docker-build-private-cloud: if: github.event.pull_request.draft == false && needs.check-permissions.outputs.can-write == 'true' - needs: check-permissions + needs: [check-permissions, docker-prepare-report-comment] name: Build Private Cloud Image uses: ./.github/workflows/.reusable-docker-build.yml with: diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 79b9d1f732b1..53f1c2b53ea4 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -30,7 +30,7 @@ repos: rev: v3.1.0 hooks: - id: prettier - exclude: ^(frontend/|CHANGELOG.md) + exclude: ^(frontend/|CHANGELOG.md|.github/docker_build_comment_template.md) - repo: https://github.com/python-poetry/poetry rev: 1.8.0 diff --git a/Dockerfile b/Dockerfile index 915e9661ba3a..87402d321dbc 100644 --- a/Dockerfile +++ b/Dockerfile @@ -37,6 +37,9 @@ # * api-runtime [python:slim] # * api-runtime-private [api-runtime] +# - Internal stages +# * api-test [build-python] + # - Target (shippable) stages # * private-cloud-api [api-runtime-private, build-python-private] # * private-cloud-unified [api-runtime-private, build-python-private, build-node-django] @@ -138,6 +141,18 @@ FROM api-runtime as api-runtime-private # Install SAML binary dependency RUN apt-get update && apt-get install -y xmlsec1 && rm -rf /var/lib/apt/lists/* +# - Internal stages +# * api-test [build-python] +FROM build-python AS api-test + +RUN make install-packages opts='--with dev' + +WORKDIR /app + +COPY api /app/ + +CMD ["make test"] + # - Target (shippable) stages # * private-cloud-api [api-runtime-private, build-python-private] FROM api-runtime-private as private-cloud-api diff --git a/api/poetry.lock b/api/poetry.lock index 03963f19b73b..2cedbc39dc9d 100644 --- a/api/poetry.lock +++ b/api/poetry.lock @@ -346,17 +346,6 @@ files = [ {file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"}, ] -[[package]] -name = "chardet" -version = "5.2.0" -description = "Universal encoding detector for Python 3" -optional = false -python-versions = ">=3.7" -files = [ - {file = "chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970"}, - {file = "chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7"}, -] - [[package]] name = "chargebee" version = "2.37.1" @@ -630,13 +619,13 @@ test-randomorder = ["pytest-randomly"] [[package]] name = "datamodel-code-generator" -version = "0.22.0" +version = "0.25.8" description = "Datamodel Code Generator" optional = false -python-versions = ">=3.7,<4.0" +python-versions = "<4.0,>=3.7" files = [ - {file = "datamodel_code_generator-0.22.0-py3-none-any.whl", hash = "sha256:5cf8fc4fb6fe7aa750595a558cd4fcd43e36e862f40b0fa4cc123b4548b16a1e"}, - {file = "datamodel_code_generator-0.22.0.tar.gz", hash = "sha256:73ebcefa498e39d0f210923856cb4a498bacc3b7bdea140cca7324e25f5c581b"}, + {file = "datamodel_code_generator-0.25.8-py3-none-any.whl", hash = "sha256:f9b216efad84d8dcb517273d2728875b6052b7e8dc4e5c13a597441cef236f6e"}, + {file = "datamodel_code_generator-0.25.8.tar.gz", hash = "sha256:b7838122b8133dae6e46f36a1cf25c0ccc66745da057988f490d00ab71121de7"}, ] [package.dependencies] @@ -646,15 +635,18 @@ genson = ">=1.2.1,<2.0" inflect = ">=4.1.0,<6.0" isort = ">=4.3.21,<6.0" jinja2 = ">=2.10.1,<4.0" -openapi-spec-validator = ">=0.2.8,<=0.5.7" packaging = "*" -prance = ">=0.18.2" -pydantic = {version = ">=1.10.0,<3.0", extras = ["email"], markers = "python_version >= \"3.11\" and python_version < \"4.0\""} -PySnooper = ">=0.4.1,<2.0.0" -toml = ">=0.10.0,<1.0.0" +pydantic = [ + {version = ">=1.10.0,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.4.0 || >2.4.0,<3.0", extras = ["email"], markers = "python_version >= \"3.12\" and python_version < \"4.0\""}, + {version = ">=1.10.0,<2.4.0 || >2.4.0,<3.0", extras = ["email"], markers = "python_version >= \"3.11\" and python_version < \"3.12\""}, +] +pyyaml = ">=6.0.1" [package.extras] +debug = ["PySnooper (>=0.4.1,<2.0.0)"] +graphql = ["graphql-core (>=3.2.3,<4.0.0)"] http = ["httpx"] +validation = ["openapi-spec-validator (>=0.2.8,<0.7.0)", "prance (>=0.18.2)"] [[package]] name = "defusedxml" @@ -2081,42 +2073,6 @@ files = [ {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, ] -[[package]] -name = "jsonschema" -version = "4.17.3" -description = "An implementation of JSON Schema validation for Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "jsonschema-4.17.3-py3-none-any.whl", hash = "sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6"}, - {file = "jsonschema-4.17.3.tar.gz", hash = "sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d"}, -] - -[package.dependencies] -attrs = ">=17.4.0" -pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2" - -[package.extras] -format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] -format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] - -[[package]] -name = "jsonschema-spec" -version = "0.1.6" -description = "JSONSchema Spec with object-oriented paths" -optional = false -python-versions = ">=3.7.0,<4.0.0" -files = [ - {file = "jsonschema_spec-0.1.6-py3-none-any.whl", hash = "sha256:f2206d18c89d1824c1f775ba14ed039743b41a9167bd2c5bdb774b66b3ca0bbf"}, - {file = "jsonschema_spec-0.1.6.tar.gz", hash = "sha256:90215863b56e212086641956b20127ccbf6d8a3a38343dad01d6a74d19482f76"}, -] - -[package.dependencies] -jsonschema = ">=4.0.0,<4.18.0" -pathable = ">=0.4.1,<0.5.0" -PyYAML = ">=5.1" -requests = ">=2.31.0,<3.0.0" - [[package]] name = "lazy-object-proxy" version = "1.9.0" @@ -2203,6 +2159,16 @@ files = [ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, @@ -2434,41 +2400,6 @@ rsa = ["cryptography (>=3.0.0)"] signals = ["blinker (>=1.4.0)"] signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] -[[package]] -name = "openapi-schema-validator" -version = "0.4.4" -description = "OpenAPI schema validation for Python" -optional = false -python-versions = ">=3.7.0,<4.0.0" -files = [ - {file = "openapi_schema_validator-0.4.4-py3-none-any.whl", hash = "sha256:79f37f38ef9fd5206b924ed7a6f382cea7b649b3b56383c47f1906082b7b9015"}, - {file = "openapi_schema_validator-0.4.4.tar.gz", hash = "sha256:c573e2be2c783abae56c5a1486ab716ca96e09d1c3eab56020d1dc680aa57bf8"}, -] - -[package.dependencies] -jsonschema = ">=4.0.0,<4.18.0" -rfc3339-validator = "*" - -[package.extras] -docs = ["sphinx (>=5.3.0,<6.0.0)", "sphinx-immaterial (>=0.11.0,<0.12.0)"] - -[[package]] -name = "openapi-spec-validator" -version = "0.5.7" -description = "OpenAPI 2.0 (aka Swagger) and OpenAPI 3 spec validator" -optional = false -python-versions = ">=3.7.0,<4.0.0" -files = [ - {file = "openapi_spec_validator-0.5.7-py3-none-any.whl", hash = "sha256:8712d2879db7692974ef89c47a3ebfc79436442921ec3a826ac0ce80cde8c549"}, - {file = "openapi_spec_validator-0.5.7.tar.gz", hash = "sha256:6c2d42180045a80fd6314de848b94310bdb0fa4949f4b099578b69f79d9fa5ac"}, -] - -[package.dependencies] -jsonschema = ">=4.0.0,<4.18.0" -jsonschema-spec = ">=0.1.1,<0.2.0" -lazy-object-proxy = ">=1.7.1,<2.0.0" -openapi-schema-validator = ">=0.4.2,<0.5.0" - [[package]] name = "opencensus" version = "0.11.2" @@ -2539,17 +2470,6 @@ files = [ {file = "packaging-23.0.tar.gz", hash = "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97"}, ] -[[package]] -name = "pathable" -version = "0.4.3" -description = "Object-oriented paths" -optional = false -python-versions = ">=3.7.0,<4.0.0" -files = [ - {file = "pathable-0.4.3-py3-none-any.whl", hash = "sha256:cdd7b1f9d7d5c8b8d3315dbf5a86b2596053ae845f056f57d97c0eefff84da14"}, - {file = "pathable-0.4.3.tar.gz", hash = "sha256:5c869d315be50776cc8a993f3af43e0c60dc01506b399643f919034ebf4cdcab"}, -] - [[package]] name = "pathspec" version = "0.11.2" @@ -2641,32 +2561,6 @@ docs = ["sphinx (>=1.7.1)"] redis = ["redis"] tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "pytest-timeout (>=2.1.0)", "redis", "sphinx (>=6.0.0)"] -[[package]] -name = "prance" -version = "23.6.21.0" -description = "Resolving Swagger/OpenAPI 2.0 and 3.0.0 Parser" -optional = false -python-versions = ">=3.8" -files = [ - {file = "prance-23.6.21.0-py3-none-any.whl", hash = "sha256:6a4276fa07ed9f22feda4331097d7503c4adc3097e46ffae97425f2c1026bd9f"}, - {file = "prance-23.6.21.0.tar.gz", hash = "sha256:d8c15f8ac34019751cc4945f866d8d964d7888016d10de3592e339567177cabe"}, -] - -[package.dependencies] -chardet = ">=3.0" -packaging = ">=21.3" -requests = ">=2.25" -"ruamel.yaml" = ">=0.17.10" -six = ">=1.15,<2.0" - -[package.extras] -cli = ["click (>=7.0)"] -dev = ["bumpversion (>=0.6)", "pytest (>=6.1)", "pytest-cov (>=2.11)", "sphinx (>=3.4)", "towncrier (>=19.2)", "tox (>=3.4)"] -flex = ["flex (>=6.13,<7.0)"] -icu = ["PyICU (>=2.4,<3.0)"] -osv = ["openapi-spec-validator (>=0.5.1,<0.6.0)"] -ssv = ["swagger-spec-validator (>=2.4,<3.0)"] - [[package]] name = "pre-commit" version = "3.0.4" @@ -3196,42 +3090,6 @@ files = [ {file = "pyrepl-0.9.0.tar.gz", hash = "sha256:292570f34b5502e871bbb966d639474f2b57fbfcd3373c2d6a2f3d56e681a775"}, ] -[[package]] -name = "pyrsistent" -version = "0.19.3" -description = "Persistent/Functional/Immutable data structures" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pyrsistent-0.19.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:20460ac0ea439a3e79caa1dbd560344b64ed75e85d8703943e0b66c2a6150e4a"}, - {file = "pyrsistent-0.19.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c18264cb84b5e68e7085a43723f9e4c1fd1d935ab240ce02c0324a8e01ccb64"}, - {file = "pyrsistent-0.19.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b774f9288dda8d425adb6544e5903f1fb6c273ab3128a355c6b972b7df39dcf"}, - {file = "pyrsistent-0.19.3-cp310-cp310-win32.whl", hash = "sha256:5a474fb80f5e0d6c9394d8db0fc19e90fa540b82ee52dba7d246a7791712f74a"}, - {file = "pyrsistent-0.19.3-cp310-cp310-win_amd64.whl", hash = "sha256:49c32f216c17148695ca0e02a5c521e28a4ee6c5089f97e34fe24163113722da"}, - {file = "pyrsistent-0.19.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f0774bf48631f3a20471dd7c5989657b639fd2d285b861237ea9e82c36a415a9"}, - {file = "pyrsistent-0.19.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ab2204234c0ecd8b9368dbd6a53e83c3d4f3cab10ecaf6d0e772f456c442393"}, - {file = "pyrsistent-0.19.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e42296a09e83028b3476f7073fcb69ffebac0e66dbbfd1bd847d61f74db30f19"}, - {file = "pyrsistent-0.19.3-cp311-cp311-win32.whl", hash = "sha256:64220c429e42a7150f4bfd280f6f4bb2850f95956bde93c6fda1b70507af6ef3"}, - {file = "pyrsistent-0.19.3-cp311-cp311-win_amd64.whl", hash = "sha256:016ad1afadf318eb7911baa24b049909f7f3bb2c5b1ed7b6a8f21db21ea3faa8"}, - {file = "pyrsistent-0.19.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c4db1bd596fefd66b296a3d5d943c94f4fac5bcd13e99bffe2ba6a759d959a28"}, - {file = "pyrsistent-0.19.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aeda827381f5e5d65cced3024126529ddc4289d944f75e090572c77ceb19adbf"}, - {file = "pyrsistent-0.19.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:42ac0b2f44607eb92ae88609eda931a4f0dfa03038c44c772e07f43e738bcac9"}, - {file = "pyrsistent-0.19.3-cp37-cp37m-win32.whl", hash = "sha256:e8f2b814a3dc6225964fa03d8582c6e0b6650d68a232df41e3cc1b66a5d2f8d1"}, - {file = "pyrsistent-0.19.3-cp37-cp37m-win_amd64.whl", hash = "sha256:c9bb60a40a0ab9aba40a59f68214eed5a29c6274c83b2cc206a359c4a89fa41b"}, - {file = "pyrsistent-0.19.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a2471f3f8693101975b1ff85ffd19bb7ca7dd7c38f8a81701f67d6b4f97b87d8"}, - {file = "pyrsistent-0.19.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc5d149f31706762c1f8bda2e8c4f8fead6e80312e3692619a75301d3dbb819a"}, - {file = "pyrsistent-0.19.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3311cb4237a341aa52ab8448c27e3a9931e2ee09561ad150ba94e4cfd3fc888c"}, - {file = "pyrsistent-0.19.3-cp38-cp38-win32.whl", hash = "sha256:f0e7c4b2f77593871e918be000b96c8107da48444d57005b6a6bc61fb4331b2c"}, - {file = "pyrsistent-0.19.3-cp38-cp38-win_amd64.whl", hash = "sha256:c147257a92374fde8498491f53ffa8f4822cd70c0d85037e09028e478cababb7"}, - {file = "pyrsistent-0.19.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b735e538f74ec31378f5a1e3886a26d2ca6351106b4dfde376a26fc32a044edc"}, - {file = "pyrsistent-0.19.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99abb85579e2165bd8522f0c0138864da97847875ecbd45f3e7e2af569bfc6f2"}, - {file = "pyrsistent-0.19.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a8cb235fa6d3fd7aae6a4f1429bbb1fec1577d978098da1252f0489937786f3"}, - {file = "pyrsistent-0.19.3-cp39-cp39-win32.whl", hash = "sha256:c74bed51f9b41c48366a286395c67f4e894374306b197e62810e0fdaf2364da2"}, - {file = "pyrsistent-0.19.3-cp39-cp39-win_amd64.whl", hash = "sha256:878433581fc23e906d947a6814336eee031a00e6defba224234169ae3d3d6a98"}, - {file = "pyrsistent-0.19.3-py3-none-any.whl", hash = "sha256:ccf0d6bd208f8111179f0c26fdf84ed7c3891982f2edaeae7422575f47e66b64"}, - {file = "pyrsistent-0.19.3.tar.gz", hash = "sha256:1a2994773706bbb4995c31a97bc94f1418314923bd1048c6d964837040376440"}, -] - [[package]] name = "pysaml2" version = "7.4.2" @@ -3255,20 +3113,6 @@ xmlschema = ">=1.2.1" [package.extras] s2repoze = ["paste", "repoze.who", "zope.interface"] -[[package]] -name = "pysnooper" -version = "1.2.0" -description = "A poor man's debugger for Python." -optional = false -python-versions = "*" -files = [ - {file = "PySnooper-1.2.0-py2.py3-none-any.whl", hash = "sha256:aa859aa9a746cffc1f35e4ee469d49c3cc5185b5fc0c571feb3af3c94d2eb625"}, - {file = "PySnooper-1.2.0.tar.gz", hash = "sha256:810669e162a250a066d8662e573adbc5af770e937c5b5578f28bb7355d1c859b"}, -] - -[package.extras] -tests = ["pytest"] - [[package]] name = "pytest" version = "7.2.2" @@ -3519,7 +3363,6 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, @@ -3667,20 +3510,6 @@ urllib3 = ">=1.25.10" [package.extras] tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=7.0.0)", "pytest-asyncio", "pytest-cov", "pytest-httpserver", "types-requests"] -[[package]] -name = "rfc3339-validator" -version = "0.1.4" -description = "A pure python RFC3339 validator" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "rfc3339_validator-0.1.4-py2.py3-none-any.whl", hash = "sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa"}, - {file = "rfc3339_validator-0.1.4.tar.gz", hash = "sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b"}, -] - -[package.dependencies] -six = "*" - [[package]] name = "rsa" version = "4.9" @@ -3695,83 +3524,6 @@ files = [ [package.dependencies] pyasn1 = ">=0.1.3" -[[package]] -name = "ruamel-yaml" -version = "0.17.32" -description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" -optional = false -python-versions = ">=3" -files = [ - {file = "ruamel.yaml-0.17.32-py3-none-any.whl", hash = "sha256:23cd2ed620231677564646b0c6a89d138b6822a0d78656df7abda5879ec4f447"}, - {file = "ruamel.yaml-0.17.32.tar.gz", hash = "sha256:ec939063761914e14542972a5cba6d33c23b0859ab6342f61cf070cfc600efc2"}, -] - -[package.dependencies] -"ruamel.yaml.clib" = {version = ">=0.2.7", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.12\""} - -[package.extras] -docs = ["ryd"] -jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] - -[[package]] -name = "ruamel-yaml-clib" -version = "0.2.8" -description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" -optional = false -python-versions = ">=3.6" -files = [ - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b42169467c42b692c19cf539c38d4602069d8c1505e97b86387fcf7afb766e1d"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:07238db9cbdf8fc1e9de2489a4f68474e70dffcb32232db7c08fa61ca0c7c462"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:d92f81886165cb14d7b067ef37e142256f1c6a90a65cd156b063a43da1708cfd"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fff3573c2db359f091e1589c3d7c5fc2f86f5bdb6f24252c2d8e539d4e45f412"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:840f0c7f194986a63d2c2465ca63af8ccbbc90ab1c6001b1978f05119b5e7334"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:024cfe1fc7c7f4e1aff4a81e718109e13409767e4f871443cbff3dba3578203d"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win32.whl", hash = "sha256:c69212f63169ec1cfc9bb44723bf2917cbbd8f6191a00ef3410f5a7fe300722d"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win_amd64.whl", hash = "sha256:cabddb8d8ead485e255fe80429f833172b4cadf99274db39abc080e068cbcc31"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bef08cd86169d9eafb3ccb0a39edb11d8e25f3dae2b28f5c52fd997521133069"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:b16420e621d26fdfa949a8b4b47ade8810c56002f5389970db4ddda51dbff248"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:b5edda50e5e9e15e54a6a8a0070302b00c518a9d32accc2346ad6c984aacd279"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:25c515e350e5b739842fc3228d662413ef28f295791af5e5110b543cf0b57d9b"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:46d378daaac94f454b3a0e3d8d78cafd78a026b1d71443f4966c696b48a6d899"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:09b055c05697b38ecacb7ac50bdab2240bfca1a0c4872b0fd309bb07dc9aa3a9"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win32.whl", hash = "sha256:53a300ed9cea38cf5a2a9b069058137c2ca1ce658a874b79baceb8f892f915a7"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win_amd64.whl", hash = "sha256:c2a72e9109ea74e511e29032f3b670835f8a59bbdc9ce692c5b4ed91ccf1eedb"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ebc06178e8821efc9692ea7544aa5644217358490145629914d8020042c24aa1"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:edaef1c1200c4b4cb914583150dcaa3bc30e592e907c01117c08b13a07255ec2"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:7048c338b6c86627afb27faecf418768acb6331fc24cfa56c93e8c9780f815fa"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d176b57452ab5b7028ac47e7b3cf644bcfdc8cacfecf7e71759f7f51a59e5c92"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3213ece08ea033eb159ac52ae052a4899b56ecc124bb80020d9bbceeb50258e9"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aab7fd643f71d7946f2ee58cc88c9b7bfc97debd71dcc93e03e2d174628e7e2d"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win32.whl", hash = "sha256:5c365d91c88390c8d0a8545df0b5857172824b1c604e867161e6b3d59a827eaa"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win_amd64.whl", hash = "sha256:1758ce7d8e1a29d23de54a16ae867abd370f01b5a69e1a3ba75223eaa3ca1a1b"}, - {file = "ruamel.yaml.clib-0.2.8-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a5aa27bad2bb83670b71683aae140a1f52b0857a2deff56ad3f6c13a017a26ed"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c58ecd827313af6864893e7af0a3bb85fd529f862b6adbefe14643947cfe2942"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_12_0_arm64.whl", hash = "sha256:f481f16baec5290e45aebdc2a5168ebc6d35189ae6fea7a58787613a25f6e875"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:3fcc54cb0c8b811ff66082de1680b4b14cf8a81dce0d4fbf665c2265a81e07a1"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7f67a1ee819dc4562d444bbafb135832b0b909f81cc90f7aa00260968c9ca1b3"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4ecbf9c3e19f9562c7fdd462e8d18dd902a47ca046a2e64dba80699f0b6c09b7"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:87ea5ff66d8064301a154b3933ae406b0863402a799b16e4a1d24d9fbbcbe0d3"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win32.whl", hash = "sha256:75e1ed13e1f9de23c5607fe6bd1aeaae21e523b32d83bb33918245361e9cc51b"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win_amd64.whl", hash = "sha256:3f215c5daf6a9d7bbed4a0a4f760f3113b10e82ff4c5c44bec20a68c8014f675"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1b617618914cb00bf5c34d4357c37aa15183fa229b24767259657746c9077615"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:a6a9ffd280b71ad062eae53ac1659ad86a17f59a0fdc7699fd9be40525153337"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:665f58bfd29b167039f714c6998178d27ccd83984084c286110ef26b230f259f"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:700e4ebb569e59e16a976857c8798aee258dceac7c7d6b50cab63e080058df91"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e2b4c44b60eadec492926a7270abb100ef9f72798e18743939bdbf037aab8c28"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e79e5db08739731b0ce4850bed599235d601701d5694c36570a99a0c5ca41a9d"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win32.whl", hash = "sha256:955eae71ac26c1ab35924203fda6220f84dce57d6d7884f189743e2abe3a9fbe"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win_amd64.whl", hash = "sha256:56f4252222c067b4ce51ae12cbac231bce32aee1d33fbfc9d17e5b8d6966c312"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03d1162b6d1df1caa3a4bd27aa51ce17c9afc2046c31b0ad60a0a96ec22f8001"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bba64af9fa9cebe325a62fa398760f5c7206b215201b0ec825005f1b18b9bccf"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:9eb5dee2772b0f704ca2e45b1713e4e5198c18f515b52743576d196348f374d3"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:da09ad1c359a728e112d60116f626cc9f29730ff3e0e7db72b9a2dbc2e4beed5"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:184565012b60405d93838167f425713180b949e9d8dd0bbc7b49f074407c5a8b"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a75879bacf2c987c003368cf14bed0ffe99e8e85acfa6c0bfffc21a090f16880"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win32.whl", hash = "sha256:84b554931e932c46f94ab306913ad7e11bba988104c5cff26d90d03f68258cd5"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win_amd64.whl", hash = "sha256:25ac8c08322002b06fa1d49d1646181f0b2c72f5cbc15a85e80b4c30a544bb15"}, - {file = "ruamel.yaml.clib-0.2.8.tar.gz", hash = "sha256:beb2e0404003de9a4cab9753a8805a8fe9320ee6673136ed7f04255fe60bb512"}, -] - [[package]] name = "rudder-sdk-python" version = "2.0.2" @@ -4434,4 +4186,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = ">=3.11, <3.13" -content-hash = "09c44d79b4ee9002aeaa4dee988d5d212e128ab33bbf7147d645802378627380" +content-hash = "ae99d6a086452f252539814bb8a94f770a3636006df584b401e955c7876c7c11" diff --git a/api/pyproject.toml b/api/pyproject.toml index a6b317256b64..85701b8a0e10 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -210,7 +210,7 @@ pytest = "~7.2.1" pytest-django = "^4.5.2" black = "~24.3.0" pytest-cov = "~4.1.0" -datamodel-code-generator = "~0.22" +datamodel-code-generator = "~0.25" requests-mock = "^1.11.0" django-extensions = "^3.2.3" pdbpp = "^0.10.3" From 759e745098d2e7cb582c0097c18c042e32533012 Mon Sep 17 00:00:00 2001 From: Kim Gustyr Date: Fri, 12 Jul 2024 11:08:00 +0200 Subject: [PATCH 016/247] feat(docker): Update entrypoint (#4262) --- .github/actions/api-deploy-ecs/action.yml | 2 +- api/scripts/run-docker.sh | 35 ++----------------- .../importing-and-exporting/organisations.md | 6 ++-- 3 files changed, 7 insertions(+), 36 deletions(-) diff --git a/.github/actions/api-deploy-ecs/action.yml b/.github/actions/api-deploy-ecs/action.yml index e6a367a830a3..a9b2dc984bfb 100644 --- a/.github/actions/api-deploy-ecs/action.yml +++ b/.github/actions/api-deploy-ecs/action.yml @@ -130,7 +130,7 @@ runs: }, "InputTransformer": { "InputPathsMap":{"project_id":"$.detail.project_id"}, - "InputTemplate": "{ \"containerOverrides\": [ { \"name\": \"flagsmith-api-migration\", \"command\": [\"migrate-identities\", ], \" environment \":[{\"PROJECT_METADATA_TABLE_NAME_DYNAMO\":\"flagsmith_project_metadata\"}]}]}" + "InputTemplate": "{ \"containerOverrides\": [ { \"name\": \"flagsmith-api-migration\", \"command\": [\"migrate_to_edge\", ], \" environment \":[{\"PROJECT_METADATA_TABLE_NAME_DYNAMO\":\"flagsmith_project_metadata\"}]}]}" } } ]' diff --git a/api/scripts/run-docker.sh b/api/scripts/run-docker.sh index 7fa0ff47dda9..d563b8946dc9 100755 --- a/api/scripts/run-docker.sh +++ b/api/scripts/run-docker.sh @@ -1,10 +1,6 @@ #!/bin/bash set -e -# The script can take 2 optional arguments: -# 1. The django target to run -# 2. For migrate, serve and migrate-and-serve, the number of seconds to sleep before running - function migrate () { python manage.py waitfordb && python manage.py migrate && python manage.py createcachetable } @@ -40,9 +36,6 @@ function run_task_processor() { --numthreads ${TASK_PROCESSOR_NUM_THREADS:-5} \ --queuepopsize ${TASK_PROCESSOR_QUEUE_POP_SIZE:-10} } -function migrate_identities(){ - python manage.py migrate_to_edge "$1" -} function migrate_analytics_db(){ # if `$ANALYTICS_DATABASE_URL` or DJANGO_DB_NAME_ANALYTICS is set # run the migration command @@ -51,47 +44,25 @@ function migrate_analytics_db(){ fi python manage.py migrate --database analytics } -function import_organisation_from_s3(){ - python manage.py importorganisationfroms3 "$1" "$2" -} -function dump_organisation_to_s3(){ - python manage.py dumporganisationtos3 "$1" "$2" "$3" -} -function dump_organisation_to_local_fs(){ - python manage.py dumporganisationtolocalfs "$1" "$2" -} function bootstrap(){ python manage.py bootstrap } -# Note: `go_to_sleep` is deprecated and will be removed in a future release. -function go_to_sleep(){ - echo "Sleeping for ${1} seconds before startup" - sleep ${1} +function default(){ + python manage.py "$@" } if [ "$1" == "migrate" ]; then - if [ $# -eq 2 ]; then go_to_sleep "$2"; fi migrate migrate_analytics_db elif [ "$1" == "serve" ]; then - if [ $# -eq 2 ]; then go_to_sleep "$2"; fi serve elif [ "$1" == "run-task-processor" ]; then run_task_processor elif [ "$1" == "migrate-and-serve" ]; then - if [ $# -eq 2 ]; then go_to_sleep "$2"; fi migrate migrate_analytics_db bootstrap serve -elif [ "$1" == "migrate-identities" ]; then - migrate_identities "$2" -elif [ "$1" == "import-organisation-from-s3" ]; then - import_organisation_from_s3 "$2" "$3" -elif [ "$1" == "dump-organisation-to-s3" ]; then - dump_organisation_to_s3 "$2" "$3" "$4" -elif [ "$1" == "dump-organisation-to-local-fs" ]; then - dump_organisation_to_local_fs "$2" "$3" else - echo "ERROR: unrecognised command '$1'" + default "$@" fi diff --git a/docs/docs/system-administration/importing-and-exporting/organisations.md b/docs/docs/system-administration/importing-and-exporting/organisations.md index 461002711813..0c184ea25cdd 100644 --- a/docs/docs/system-administration/importing-and-exporting/organisations.md +++ b/docs/docs/system-administration/importing-and-exporting/organisations.md @@ -116,7 +116,7 @@ services: environment: DATABASE_URL: postgresql://postgres:password@postgres:5432/flagsmith command: - - 'dump-organisation-to-local-fs' + - 'dumporganisationtolocalfs' - '1' - '/tmp/flagsmith-exporter/org-1.json' depends_on: @@ -164,13 +164,13 @@ This is coming soon - see https://github.com/Flagsmith/flagsmith/issues/2512 for ### Option 2 - S3 bucket ```bash -python manage.py import-organisation-from-s3 +python manage.py importorganisationfroms3 ``` e.g. ```bash -python manage.py import-organisation-from-s3 my-export-bucket exports/organisation-1.json +python manage.py importorganisationfroms3 my-export-bucket exports/organisation-1.json ``` #### Using localstack to achieve local/test imports with s3 can be done using From 249db141a9e3679d28cacbe40e35b67d82d245c3 Mon Sep 17 00:00:00 2001 From: Kim Gustyr Date: Fri, 12 Jul 2024 12:01:16 +0200 Subject: [PATCH 017/247] fix(build): Avoid E2E rate limiting by swithing to Postgres image hosted on GHCR (#4328) --- frontend/docker-compose-e2e-tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/docker-compose-e2e-tests.yml b/frontend/docker-compose-e2e-tests.yml index bfe7769c7da9..39442479b824 100644 --- a/frontend/docker-compose-e2e-tests.yml +++ b/frontend/docker-compose-e2e-tests.yml @@ -6,7 +6,7 @@ version: '3' services: db: - image: public.ecr.aws/docker/library/postgres:15.5-alpine + image: ghcr.io/cloudnative-pg/postgresql:15 environment: POSTGRES_PASSWORD: password POSTGRES_DB: flagsmith From b32a6cada4fa1270e615981f6166dd36039e6881 Mon Sep 17 00:00:00 2001 From: Matthew Elwell Date: Fri, 12 Jul 2024 13:30:49 +0100 Subject: [PATCH 018/247] refactor: replace task processor (#4189) --- api/app/settings/common.py | 2 +- api/app_analytics/tasks.py | 4 +- api/app_analytics/track.py | 2 +- api/audit/tasks.py | 4 +- api/conftest.py | 2 +- api/core/signals.py | 2 +- api/custom_auth/tasks.py | 2 +- .../identities/edge_request_forwarder.py | 4 +- api/edge_api/identities/tasks.py | 4 +- api/environments/tasks.py | 5 +- api/features/import_export/tasks.py | 8 +- api/features/tasks.py | 3 +- api/features/versioning/tasks.py | 2 +- api/integrations/github/tasks.py | 3 +- api/integrations/launch_darkly/tasks.py | 3 +- .../lead_tracking/hubspot/tasks.py | 1 - api/organisations/chargebee/tasks.py | 3 +- api/organisations/tasks.py | 8 +- api/poetry.lock | 36 +- api/projects/tasks.py | 1 - api/pyproject.toml | 4 +- api/segments/tasks.py | 3 +- api/sse/tasks.py | 8 +- api/task_processor/__init__.py | 0 api/task_processor/admin.py | 22 - api/task_processor/apps.py | 20 - api/task_processor/decorators.py | 196 -------- api/task_processor/exceptions.py | 10 - api/task_processor/health.py | 44 -- api/task_processor/management/__init__.py | 0 .../management/commands/__init__.py | 0 .../checktaskprocessorthreadhealth.py | 17 - .../management/commands/runprocessor.py | 115 ----- api/task_processor/managers.py | 11 - api/task_processor/migrations/0001_initial.py | 44 -- .../migrations/0002_healthcheckmodel.py | 21 - .../migrations/0003_add_completed_to_task.py | 22 - .../migrations/0004_recreate_task_indexes.py | 43 -- ...005_update_conditional_index_conditions.py | 45 -- .../migrations/0006_auto_20230221_0802.py | 45 -- .../migrations/0007_add_is_locked.py | 23 - .../0008_add_get_task_to_process_function.py | 31 -- ...009_add_recurring_task_run_first_run_at.py | 18 - .../migrations/0010_task_priority.py | 27 - ...11_add_priority_to_get_tasks_to_process.py | 27 - api/task_processor/migrations/__init__.py | 0 .../0008_get_recurring_tasks_to_process.sql | 30 -- .../sql/0008_get_tasks_to_process.sql | 30 -- .../sql/0011_get_tasks_to_process.sql | 30 -- api/task_processor/models.py | 220 -------- api/task_processor/processor.py | 116 ----- api/task_processor/serializers.py | 5 - api/task_processor/task_registry.py | 23 - api/task_processor/task_run_method.py | 7 - api/task_processor/tasks.py | 66 --- api/task_processor/thread_monitoring.py | 34 -- api/task_processor/threads.py | 49 -- api/task_processor/urls.py | 5 - api/task_processor/views.py | 17 - .../test_unit_hubspot_lead_tracking.py | 2 +- ...t_organisations_subscription_info_cache.py | 3 +- .../unit/projects/test_unit_projects_tasks.py | 2 +- .../unit/projects/test_unit_projects_views.py | 2 +- api/tests/unit/task_processor/__init__.py | 0 api/tests/unit/task_processor/conftest.py | 35 -- .../test_unit_task_processor_decorators.py | 214 -------- .../test_unit_task_processor_health.py | 35 -- .../test_unit_task_processor_models.py | 75 --- .../test_unit_task_processor_processor.py | 471 ------------------ .../test_unit_task_processor_tasks.py | 193 ------- ...t_unit_task_processor_thread_monitoring.py | 89 ---- .../test_unit_task_processor_threads.py | 42 -- api/users/tasks.py | 2 +- api/webhooks/webhooks.py | 4 +- 74 files changed, 58 insertions(+), 2638 deletions(-) delete mode 100644 api/task_processor/__init__.py delete mode 100644 api/task_processor/admin.py delete mode 100644 api/task_processor/apps.py delete mode 100644 api/task_processor/decorators.py delete mode 100644 api/task_processor/exceptions.py delete mode 100644 api/task_processor/health.py delete mode 100644 api/task_processor/management/__init__.py delete mode 100644 api/task_processor/management/commands/__init__.py delete mode 100644 api/task_processor/management/commands/checktaskprocessorthreadhealth.py delete mode 100644 api/task_processor/management/commands/runprocessor.py delete mode 100644 api/task_processor/managers.py delete mode 100644 api/task_processor/migrations/0001_initial.py delete mode 100644 api/task_processor/migrations/0002_healthcheckmodel.py delete mode 100644 api/task_processor/migrations/0003_add_completed_to_task.py delete mode 100644 api/task_processor/migrations/0004_recreate_task_indexes.py delete mode 100644 api/task_processor/migrations/0005_update_conditional_index_conditions.py delete mode 100644 api/task_processor/migrations/0006_auto_20230221_0802.py delete mode 100644 api/task_processor/migrations/0007_add_is_locked.py delete mode 100644 api/task_processor/migrations/0008_add_get_task_to_process_function.py delete mode 100644 api/task_processor/migrations/0009_add_recurring_task_run_first_run_at.py delete mode 100644 api/task_processor/migrations/0010_task_priority.py delete mode 100644 api/task_processor/migrations/0011_add_priority_to_get_tasks_to_process.py delete mode 100644 api/task_processor/migrations/__init__.py delete mode 100644 api/task_processor/migrations/sql/0008_get_recurring_tasks_to_process.sql delete mode 100644 api/task_processor/migrations/sql/0008_get_tasks_to_process.sql delete mode 100644 api/task_processor/migrations/sql/0011_get_tasks_to_process.sql delete mode 100644 api/task_processor/models.py delete mode 100644 api/task_processor/processor.py delete mode 100644 api/task_processor/serializers.py delete mode 100644 api/task_processor/task_registry.py delete mode 100644 api/task_processor/task_run_method.py delete mode 100644 api/task_processor/tasks.py delete mode 100644 api/task_processor/thread_monitoring.py delete mode 100644 api/task_processor/threads.py delete mode 100644 api/task_processor/urls.py delete mode 100644 api/task_processor/views.py delete mode 100644 api/tests/unit/task_processor/__init__.py delete mode 100644 api/tests/unit/task_processor/conftest.py delete mode 100644 api/tests/unit/task_processor/test_unit_task_processor_decorators.py delete mode 100644 api/tests/unit/task_processor/test_unit_task_processor_health.py delete mode 100644 api/tests/unit/task_processor/test_unit_task_processor_models.py delete mode 100644 api/tests/unit/task_processor/test_unit_task_processor_processor.py delete mode 100644 api/tests/unit/task_processor/test_unit_task_processor_tasks.py delete mode 100644 api/tests/unit/task_processor/test_unit_task_processor_thread_monitoring.py delete mode 100644 api/tests/unit/task_processor/test_unit_task_processor_threads.py diff --git a/api/app/settings/common.py b/api/app/settings/common.py index 4ad7ba16141e..3593e1579176 100644 --- a/api/app/settings/common.py +++ b/api/app/settings/common.py @@ -25,9 +25,9 @@ from django.core.exceptions import ImproperlyConfigured from django.core.management.utils import get_random_secret_key from environs import Env +from task_processor.task_run_method import TaskRunMethod from app.routers import ReplicaReadStrategy -from task_processor.task_run_method import TaskRunMethod env = Env() diff --git a/api/app_analytics/tasks.py b/api/app_analytics/tasks.py index b7818a232e4b..c1493ac5209f 100644 --- a/api/app_analytics/tasks.py +++ b/api/app_analytics/tasks.py @@ -5,13 +5,13 @@ from django.conf import settings from django.db.models import Count, Q, Sum from django.utils import timezone - -from environments.models import Environment from task_processor.decorators import ( register_recurring_task, register_task_handler, ) +from environments.models import Environment + from .models import ( APIUsageBucket, APIUsageRaw, diff --git a/api/app_analytics/track.py b/api/app_analytics/track.py index fc5b697a7bfd..5403d204df11 100644 --- a/api/app_analytics/track.py +++ b/api/app_analytics/track.py @@ -6,9 +6,9 @@ from django.conf import settings from django.core.cache import caches from six.moves.urllib.parse import quote # python 2/3 compatible urllib import +from task_processor.decorators import register_task_handler from environments.models import Environment -from task_processor.decorators import register_task_handler from util.util import postpone logger = logging.getLogger(__name__) diff --git a/api/audit/tasks.py b/api/audit/tasks.py index 0863580a50e3..0a94efa436ef 100644 --- a/api/audit/tasks.py +++ b/api/audit/tasks.py @@ -4,14 +4,14 @@ from django.contrib.auth import get_user_model from django.utils import timezone +from task_processor.decorators import register_task_handler +from task_processor.models import TaskPriority from audit.constants import ( FEATURE_STATE_UPDATED_BY_CHANGE_REQUEST_MESSAGE, FEATURE_STATE_WENT_LIVE_MESSAGE, ) from audit.models import AuditLog, RelatedObjectType -from task_processor.decorators import register_task_handler -from task_processor.models import TaskPriority logger = logging.getLogger(__name__) diff --git a/api/conftest.py b/api/conftest.py index 543998df2656..a54679405478 100644 --- a/api/conftest.py +++ b/api/conftest.py @@ -16,6 +16,7 @@ from pytest_mock import MockerFixture from rest_framework.authtoken.models import Token from rest_framework.test import APIClient +from task_processor.task_run_method import TaskRunMethod from urllib3.connectionpool import HTTPConnectionPool from xdist import get_xdist_worker_id @@ -65,7 +66,6 @@ from projects.permissions import VIEW_PROJECT from projects.tags.models import Tag from segments.models import Condition, Segment, SegmentRule -from task_processor.task_run_method import TaskRunMethod from tests.test_helpers import fix_issue_3869 from tests.types import ( WithEnvironmentPermissionsCallable, diff --git a/api/core/signals.py b/api/core/signals.py index d88cb466ecbf..db3d92bdc09e 100644 --- a/api/core/signals.py +++ b/api/core/signals.py @@ -5,9 +5,9 @@ from django.core.exceptions import ObjectDoesNotExist from django.utils import timezone from simple_history.models import HistoricalRecords +from task_processor.task_run_method import TaskRunMethod from audit import tasks -from task_processor.task_run_method import TaskRunMethod from users.models import FFAdminUser logger = logging.getLogger(__name__) diff --git a/api/custom_auth/tasks.py b/api/custom_auth/tasks.py index 670d1bc6ae0a..0213b9e5474a 100644 --- a/api/custom_auth/tasks.py +++ b/api/custom_auth/tasks.py @@ -2,9 +2,9 @@ from django.conf import settings from django.utils import timezone +from task_processor.decorators import register_recurring_task from custom_auth.models import UserPasswordResetRequest -from task_processor.decorators import register_recurring_task @register_recurring_task( diff --git a/api/edge_api/identities/edge_request_forwarder.py b/api/edge_api/identities/edge_request_forwarder.py index a506a489a480..db8e7d850124 100644 --- a/api/edge_api/identities/edge_request_forwarder.py +++ b/api/edge_api/identities/edge_request_forwarder.py @@ -4,11 +4,11 @@ from core.constants import FLAGSMITH_SIGNATURE_HEADER from core.signing import sign_payload from django.conf import settings - -from environments.dynamodb.migrator import IdentityMigrator from task_processor.decorators import register_task_handler from task_processor.models import TaskPriority +from environments.dynamodb.migrator import IdentityMigrator + def _should_forward(project_id: int) -> bool: migrator = IdentityMigrator(project_id) diff --git a/api/edge_api/identities/tasks.py b/api/edge_api/identities/tasks.py index 48d21dcb7829..e97e7208f8a0 100644 --- a/api/edge_api/identities/tasks.py +++ b/api/edge_api/identities/tasks.py @@ -2,6 +2,8 @@ import typing from django.utils import timezone +from task_processor.decorators import register_task_handler +from task_processor.models import TaskPriority from audit.models import AuditLog from audit.related_object_type import RelatedObjectType @@ -9,8 +11,6 @@ from environments.dynamodb import DynamoEnvironmentV2Wrapper from environments.models import Environment, Webhook from features.models import Feature, FeatureState -from task_processor.decorators import register_task_handler -from task_processor.models import TaskPriority from users.models import FFAdminUser from util.mappers import map_identity_changeset_to_identity_override_changeset from webhooks.webhooks import WebhookEventType, call_environment_webhooks diff --git a/api/environments/tasks.py b/api/environments/tasks.py index bfc0450f250e..75f66df5f1d4 100644 --- a/api/environments/tasks.py +++ b/api/environments/tasks.py @@ -1,3 +1,6 @@ +from task_processor.decorators import register_task_handler +from task_processor.models import TaskPriority + from audit.models import AuditLog from environments.dynamodb import DynamoIdentityWrapper from environments.models import ( @@ -9,8 +12,6 @@ send_environment_update_message_for_environment, send_environment_update_message_for_project, ) -from task_processor.decorators import register_task_handler -from task_processor.models import TaskPriority @register_task_handler(priority=TaskPriority.HIGH) diff --git a/api/features/import_export/tasks.py b/api/features/import_export/tasks.py index f313dfd1718b..b92a133233fd 100644 --- a/api/features/import_export/tasks.py +++ b/api/features/import_export/tasks.py @@ -5,6 +5,10 @@ from django.conf import settings from django.db.models import Q from django.utils import timezone +from task_processor.decorators import ( + register_recurring_task, + register_task_handler, +) from environments.models import Environment from features.models import Feature, FeatureStateValue @@ -12,10 +16,6 @@ from features.value_types import BOOLEAN, INTEGER, STRING from features.versioning.versioning_service import get_environment_flags_list from projects.models import Project -from task_processor.decorators import ( - register_recurring_task, - register_task_handler, -) from .constants import FAILED, OVERWRITE_DESTRUCTIVE, PROCESSING, SKIP, SUCCESS from .models import ( diff --git a/api/features/tasks.py b/api/features/tasks.py index 95ceade3ed30..411bec70ea52 100644 --- a/api/features/tasks.py +++ b/api/features/tasks.py @@ -1,8 +1,9 @@ import logging +from task_processor.decorators import register_task_handler + from environments.models import Webhook from features.models import Feature, FeatureState -from task_processor.decorators import register_task_handler from webhooks.constants import WEBHOOK_DATETIME_FORMAT from webhooks.webhooks import ( WebhookEventType, diff --git a/api/features/versioning/tasks.py b/api/features/versioning/tasks.py index 97032ccdffd7..c3f408d0cd3f 100644 --- a/api/features/versioning/tasks.py +++ b/api/features/versioning/tasks.py @@ -2,6 +2,7 @@ import typing from django.utils import timezone +from task_processor.decorators import register_task_handler from audit.constants import ENVIRONMENT_FEATURE_VERSION_PUBLISHED_MESSAGE from audit.models import AuditLog @@ -14,7 +15,6 @@ from features.versioning.versioning_service import ( get_environment_flags_queryset, ) -from task_processor.decorators import register_task_handler from webhooks.webhooks import WebhookEventType, call_environment_webhooks if typing.TYPE_CHECKING: diff --git a/api/integrations/github/tasks.py b/api/integrations/github/tasks.py index 384bb88f8960..8e94c9007b5d 100644 --- a/api/integrations/github/tasks.py +++ b/api/integrations/github/tasks.py @@ -2,10 +2,11 @@ from typing import Any, List from urllib.parse import urlparse +from task_processor.decorators import register_task_handler + from features.models import Feature from integrations.github.client import post_comment_to_github from integrations.github.dataclasses import CallGithubData -from task_processor.decorators import register_task_handler from webhooks.webhooks import WebhookEventType logger = logging.getLogger(__name__) diff --git a/api/integrations/launch_darkly/tasks.py b/api/integrations/launch_darkly/tasks.py index 833754fd8794..5943aff64979 100644 --- a/api/integrations/launch_darkly/tasks.py +++ b/api/integrations/launch_darkly/tasks.py @@ -1,6 +1,7 @@ +from task_processor.decorators import register_task_handler + from integrations.launch_darkly.models import LaunchDarklyImportRequest from integrations.launch_darkly.services import process_import_request -from task_processor.decorators import register_task_handler @register_task_handler() diff --git a/api/integrations/lead_tracking/hubspot/tasks.py b/api/integrations/lead_tracking/hubspot/tasks.py index 34f63d898420..d52cc45cbbc1 100644 --- a/api/integrations/lead_tracking/hubspot/tasks.py +++ b/api/integrations/lead_tracking/hubspot/tasks.py @@ -1,5 +1,4 @@ from django.conf import settings - from task_processor.decorators import register_task_handler diff --git a/api/organisations/chargebee/tasks.py b/api/organisations/chargebee/tasks.py index 24348a4a6383..8e376c25001e 100644 --- a/api/organisations/chargebee/tasks.py +++ b/api/organisations/chargebee/tasks.py @@ -1,6 +1,7 @@ -from organisations.chargebee.cache import ChargebeeCache from task_processor.decorators import register_task_handler +from organisations.chargebee.cache import ChargebeeCache + @register_task_handler() def update_chargebee_cache(): diff --git a/api/organisations/tasks.py b/api/organisations/tasks.py index fd8e7f281746..2afbb96ec04c 100644 --- a/api/organisations/tasks.py +++ b/api/organisations/tasks.py @@ -9,6 +9,10 @@ from django.db.models import F, Max from django.template.loader import render_to_string from django.utils import timezone +from task_processor.decorators import ( + register_recurring_task, + register_task_handler, +) from integrations.flagsmith.client import get_client from organisations import subscription_info_cache @@ -28,10 +32,6 @@ from organisations.subscriptions.subscription_service import ( get_subscription_metadata, ) -from task_processor.decorators import ( - register_recurring_task, - register_task_handler, -) from users.models import FFAdminUser from .constants import ( diff --git a/api/poetry.lock b/api/poetry.lock index 2cedbc39dc9d..bd728d24778e 100644 --- a/api/poetry.lock +++ b/api/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. [[package]] name = "annotated-types" @@ -1415,9 +1415,9 @@ simplejson = "~3.19.1" [package.source] type = "git" -url = "https://git@github.com/Flagsmith/flagsmith-task-processor" -reference = "0.1.0.alpha5" -resolved_reference = "d3018fa2ebf288625e20474d74b84643c2592391" +url = "https://github.com/Flagsmith/flagsmith-task-processor" +reference = "v1.0.0" +resolved_reference = "5f43c67eab5919412a74a61589563342642ced72" [[package]] name = "flake8" @@ -2159,16 +2159,6 @@ files = [ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, - {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, @@ -3350,7 +3340,6 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -3358,15 +3347,8 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -3383,7 +3365,6 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -3391,7 +3372,6 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -4061,13 +4041,13 @@ files = [] develop = false [package.dependencies] -flagsmith-task-processor = {git = "https://git@github.com/Flagsmith/flagsmith-task-processor", rev = "0.1.0.alpha5"} +flagsmith-task-processor = {git = "https://github.com/Flagsmith/flagsmith-task-processor", tag = "v1.0.0"} [package.source] type = "git" url = "https://github.com/flagsmith/flagsmith-workflows" -reference = "v2.3.6" -resolved_reference = "1cb875ae066d2fd7f0d3821c384a00eeef3d8114" +reference = "v2.3.7" +resolved_reference = "1f963178c57a2889e8f8277d828e1e8875d49809" [[package]] name = "wrapt" @@ -4186,4 +4166,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = ">=3.11, <3.13" -content-hash = "ae99d6a086452f252539814bb8a94f770a3636006df584b401e955c7876c7c11" +content-hash = "8950da8b0a6456eabce6f4d62e12a8c0df291099eafced2260188692f5697117" diff --git a/api/projects/tasks.py b/api/projects/tasks.py index 1c99990e660a..bbf54fa5b333 100644 --- a/api/projects/tasks.py +++ b/api/projects/tasks.py @@ -2,7 +2,6 @@ from django.conf import settings from django.db import transaction - from task_processor.decorators import register_task_handler diff --git a/api/pyproject.toml b/api/pyproject.toml index 85701b8a0e10..6509e65c0ee3 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -87,6 +87,7 @@ known_third_party = [ 'shortuuid', 'simple_history', 'six', + 'task_processor', 'telemetry', 'tests', 'trench', @@ -168,6 +169,7 @@ pygithub = "2.1.1" hubspot-api-client = "^8.2.1" djangorestframework-dataclasses = "^1.3.1" pyotp = "^2.9.0" +flagsmith-task-processor = { git = "https://github.com/Flagsmith/flagsmith-task-processor", tag = "v1.0.0" } [tool.poetry.group.auth-controller] optional = true @@ -191,7 +193,7 @@ flagsmith-ldap = { git = "https://github.com/flagsmith/flagsmith-ldap", tag = "v optional = true [tool.poetry.group.workflows.dependencies] -workflows-logic = { git = "https://github.com/flagsmith/flagsmith-workflows", tag = "v2.3.6" } +workflows-logic = { git = "https://github.com/flagsmith/flagsmith-workflows", tag = "v2.3.7" } [tool.poetry.group.dev.dependencies] django-test-migrations = "~1.2.0" diff --git a/api/segments/tasks.py b/api/segments/tasks.py index 50ba4901e7c7..bb572ceb8e2e 100644 --- a/api/segments/tasks.py +++ b/api/segments/tasks.py @@ -1,6 +1,7 @@ -from segments.models import Segment from task_processor.decorators import register_task_handler +from segments.models import Segment + @register_task_handler() def delete_segment(segment_id: int) -> None: diff --git a/api/sse/tasks.py b/api/sse/tasks.py index 937d281b71e5..c1763fabe79c 100644 --- a/api/sse/tasks.py +++ b/api/sse/tasks.py @@ -5,15 +5,15 @@ from app_analytics.influxdb_wrapper import influxdb_client from django.conf import settings from influxdb_client import Point, WriteOptions - -from environments.models import Environment -from projects.models import Project -from sse import sse_service from task_processor.decorators import ( register_recurring_task, register_task_handler, ) +from environments.models import Environment +from projects.models import Project +from sse import sse_service + from .exceptions import SSEAuthTokenNotSet logger = logging.getLogger(__name__) diff --git a/api/task_processor/__init__.py b/api/task_processor/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/api/task_processor/admin.py b/api/task_processor/admin.py deleted file mode 100644 index 6a32fc765cf6..000000000000 --- a/api/task_processor/admin.py +++ /dev/null @@ -1,22 +0,0 @@ -from typing import Optional - -from django.contrib import admin - -from task_processor.models import RecurringTask - - -@admin.register(RecurringTask) -class RecurringTaskAdmin(admin.ModelAdmin): - list_display = ( - "uuid", - "task_identifier", - "run_every", - "last_run_status", - "is_locked", - ) - readonly_fields = ("args", "kwargs") - - def last_run_status(self, instance: RecurringTask) -> Optional[str]: - if last_run := instance.task_runs.order_by("-started_at").first(): - return last_run.result - return None diff --git a/api/task_processor/apps.py b/api/task_processor/apps.py deleted file mode 100644 index 66e7e761717e..000000000000 --- a/api/task_processor/apps.py +++ /dev/null @@ -1,20 +0,0 @@ -from django.apps import AppConfig -from django.conf import settings -from health_check.plugins import plugin_dir - -from task_processor.task_run_method import TaskRunMethod - - -class TaskProcessorAppConfig(AppConfig): - name = "task_processor" - - def ready(self): - from . import tasks # noqa - - if ( - settings.ENABLE_TASK_PROCESSOR_HEALTH_CHECK - and settings.TASK_RUN_METHOD == TaskRunMethod.TASK_PROCESSOR - ): - from .health import TaskProcessorHealthCheckBackend - - plugin_dir.register(TaskProcessorHealthCheckBackend) diff --git a/api/task_processor/decorators.py b/api/task_processor/decorators.py deleted file mode 100644 index 1399b5344608..000000000000 --- a/api/task_processor/decorators.py +++ /dev/null @@ -1,196 +0,0 @@ -import logging -import os -import typing -from datetime import datetime, time, timedelta -from inspect import getmodule -from threading import Thread - -from django.conf import settings -from django.db.transaction import on_commit -from django.utils import timezone - -from task_processor.exceptions import InvalidArgumentsError, TaskQueueFullError -from task_processor.models import RecurringTask, Task, TaskPriority -from task_processor.task_registry import register_task -from task_processor.task_run_method import TaskRunMethod - -P = typing.ParamSpec("P") - -logger = logging.getLogger(__name__) - - -class TaskHandler(typing.Generic[P]): - __slots__ = ( - "unwrapped", - "queue_size", - "priority", - "transaction_on_commit", - "task_identifier", - ) - - unwrapped: typing.Callable[P, None] - - def __init__( - self, - f: typing.Callable[P, None], - *, - task_name: str | None = None, - queue_size: int | None = None, - priority: TaskPriority = TaskPriority.NORMAL, - transaction_on_commit: bool = True, - ) -> None: - self.unwrapped = f - self.queue_size = queue_size - self.priority = priority - self.transaction_on_commit = transaction_on_commit - - task_name = task_name or f.__name__ - task_module = getmodule(f).__name__.rsplit(".")[-1] - self.task_identifier = task_identifier = f"{task_module}.{task_name}" - register_task(task_identifier, f) - - def __call__(self, *args: P.args, **kwargs: P.kwargs) -> None: - _validate_inputs(*args, **kwargs) - return self.unwrapped(*args, **kwargs) - - def delay( - self, - *, - delay_until: datetime | None = None, - # TODO @khvn26 consider typing `args` and `kwargs` with `ParamSpec` - # (will require a change to the signature) - args: tuple[typing.Any, ...] = (), - kwargs: dict[str, typing.Any] | None = None, - ) -> Task | None: - logger.debug("Request to run task '%s' asynchronously.", self.task_identifier) - - kwargs = kwargs or {} - - if delay_until and settings.TASK_RUN_METHOD != TaskRunMethod.TASK_PROCESSOR: - logger.warning( - "Cannot schedule tasks to run in the future without task processor." - ) - return - - if settings.TASK_RUN_METHOD == TaskRunMethod.SYNCHRONOUSLY: - _validate_inputs(*args, **kwargs) - self.unwrapped(*args, **kwargs) - elif settings.TASK_RUN_METHOD == TaskRunMethod.SEPARATE_THREAD: - logger.debug("Running task '%s' in separate thread", self.task_identifier) - self.run_in_thread(args=args, kwargs=kwargs) - else: - logger.debug("Creating task for function '%s'...", self.task_identifier) - try: - task = Task.create( - task_identifier=self.task_identifier, - scheduled_for=delay_until or timezone.now(), - priority=self.priority, - queue_size=self.queue_size, - args=args, - kwargs=kwargs, - ) - except TaskQueueFullError as e: - logger.warning(e) - return - - task.save() - return task - - def run_in_thread( - self, - *, - args: tuple[typing.Any, ...] = (), - kwargs: dict[str, typing.Any] | None = None, - ) -> None: - kwargs = kwargs or {} - _validate_inputs(*args, **kwargs) - thread = Thread(target=self.unwrapped, args=args, kwargs=kwargs, daemon=True) - - def _start() -> None: - logger.info( - "Running function %s in unmanaged thread.", self.unwrapped.__name__ - ) - thread.start() - - if self.transaction_on_commit: - return on_commit(_start) - return _start() - - -def register_task_handler( # noqa: C901 - *, - task_name: str | None = None, - queue_size: int | None = None, - priority: TaskPriority = TaskPriority.NORMAL, - transaction_on_commit: bool = True, -) -> typing.Callable[[typing.Callable[P, None]], TaskHandler[P]]: - """ - Turn a function into an asynchronous task. - - :param str task_name: task name. Defaults to function name. - :param int queue_size: (`TASK_PROCESSOR` task run method only) - max queue size for the task. Task runs exceeding the max size get dropped by - the task processor Defaults to `None` (infinite). - :param TaskPriority priority: task priority. - :param bool transaction_on_commit: (`SEPARATE_THREAD` task run method only) - Whether to wrap the task call in `transanction.on_commit`. Defaults to `True`. - We need this for the task to be able to access data committed with the current - transaction. If the task is invoked outside of a transaction, it will start - immediately. - Pass `False` if you want the task to start immediately regardless of current - transaction. - :rtype: TaskHandler - """ - - def wrapper(f: typing.Callable[P, None]) -> TaskHandler[P]: - return TaskHandler( - f, - task_name=task_name, - queue_size=queue_size, - priority=priority, - transaction_on_commit=transaction_on_commit, - ) - - return wrapper - - -def register_recurring_task( - run_every: timedelta, - task_name: str | None = None, - args: tuple[typing.Any] = (), - kwargs: dict[str, typing.Any] | None = None, - first_run_time: time | None = None, -) -> typing.Callable[[typing.Callable[..., None]], RecurringTask]: - if not os.environ.get("RUN_BY_PROCESSOR"): - # Do not register recurring tasks if not invoked by task processor - return lambda f: f - - def decorator(f: typing.Callable[..., None]) -> RecurringTask: - nonlocal task_name - - task_name = task_name or f.__name__ - task_module = getmodule(f).__name__.rsplit(".")[-1] - task_identifier = f"{task_module}.{task_name}" - - register_task(task_identifier, f) - - task, _ = RecurringTask.objects.update_or_create( - task_identifier=task_identifier, - defaults={ - "serialized_args": RecurringTask.serialize_data(args or ()), - "serialized_kwargs": RecurringTask.serialize_data(kwargs or {}), - "run_every": run_every, - "first_run_time": first_run_time, - }, - ) - return task - - return decorator - - -def _validate_inputs(*args: typing.Any, **kwargs: typing.Any) -> None: - try: - Task.serialize_data(args or ()) - Task.serialize_data(kwargs or {}) - except TypeError as e: - raise InvalidArgumentsError("Inputs are not serializable.") from e diff --git a/api/task_processor/exceptions.py b/api/task_processor/exceptions.py deleted file mode 100644 index 7f697a6e7ba3..000000000000 --- a/api/task_processor/exceptions.py +++ /dev/null @@ -1,10 +0,0 @@ -class TaskProcessingError(Exception): - pass - - -class InvalidArgumentsError(TaskProcessingError): - pass - - -class TaskQueueFullError(Exception): - pass diff --git a/api/task_processor/health.py b/api/task_processor/health.py deleted file mode 100644 index 2d57f5a2bb5e..000000000000 --- a/api/task_processor/health.py +++ /dev/null @@ -1,44 +0,0 @@ -import uuid - -import backoff -from health_check.backends import BaseHealthCheckBackend -from health_check.exceptions import HealthCheckException - -from task_processor.models import HealthCheckModel -from task_processor.tasks import create_health_check_model - - -def is_processor_healthy(max_tries: int = 5, factor: float = 0.1) -> bool: - health_check_model_uuid = str(uuid.uuid4()) - - create_health_check_model.delay(args=(health_check_model_uuid,)) - - @backoff.on_predicate( - backoff.expo, - lambda m: m is None, - max_tries=max_tries, - factor=factor, - jitter=None, - ) - def get_health_check_model(): - return HealthCheckModel.objects.filter(uuid=health_check_model_uuid).first() - - health_check_model = get_health_check_model() - if health_check_model: - health_check_model.delete() - return True - - return False - - -class TaskProcessorHealthCheckBackend(BaseHealthCheckBackend): - #: The status endpoints will respond with a 200 status code - #: even if the check errors. - critical_service = False - - def check_status(self): - if not is_processor_healthy(): - raise HealthCheckException("Task processor is unable to process tasks.") - - def identifier(self): - return self.__class__.__name__ # Display name on the endpoint. diff --git a/api/task_processor/management/__init__.py b/api/task_processor/management/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/api/task_processor/management/commands/__init__.py b/api/task_processor/management/commands/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/api/task_processor/management/commands/checktaskprocessorthreadhealth.py b/api/task_processor/management/commands/checktaskprocessorthreadhealth.py deleted file mode 100644 index f3eda619b484..000000000000 --- a/api/task_processor/management/commands/checktaskprocessorthreadhealth.py +++ /dev/null @@ -1,17 +0,0 @@ -import logging -import sys - -from django.core.management import BaseCommand - -from task_processor.thread_monitoring import get_unhealthy_thread_names - -logger = logging.getLogger(__name__) - - -class Command(BaseCommand): - def handle(self, *args, **options): - if get_unhealthy_thread_names(): - sys.exit("Task processor has unhealthy threads.") - - logger.info("Task processor has no unhealthy threads.") - sys.exit(0) diff --git a/api/task_processor/management/commands/runprocessor.py b/api/task_processor/management/commands/runprocessor.py deleted file mode 100644 index 6f46d788a742..000000000000 --- a/api/task_processor/management/commands/runprocessor.py +++ /dev/null @@ -1,115 +0,0 @@ -import logging -import signal -import time -import typing -from argparse import ArgumentParser -from datetime import timedelta - -from django.core.management import BaseCommand -from django.utils import timezone - -from task_processor.task_registry import registered_tasks -from task_processor.thread_monitoring import ( - clear_unhealthy_threads, - write_unhealthy_threads, -) -from task_processor.threads import TaskRunner - -logger = logging.getLogger(__name__) - - -class Command(BaseCommand): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - signal.signal(signal.SIGINT, self._exit_gracefully) - signal.signal(signal.SIGTERM, self._exit_gracefully) - - self._threads: typing.List[TaskRunner] = [] - self._monitor_threads = True - - def add_arguments(self, parser: ArgumentParser): - parser.add_argument( - "--numthreads", - type=int, - help="Number of worker threads to run.", - default=5, - ) - parser.add_argument( - "--sleepintervalms", - type=int, - help="Number of millis each worker waits before checking for new tasks", - default=2000, - ) - parser.add_argument( - "--graceperiodms", - type=int, - help="Number of millis before running task is considered 'stuck'.", - default=20000, - ) - parser.add_argument( - "--queuepopsize", - type=int, - help="Number of tasks each worker will pop from the queue on each cycle.", - default=10, - ) - - def handle(self, *args, **options): - num_threads = options["numthreads"] - sleep_interval_ms = options["sleepintervalms"] - grace_period_ms = options["graceperiodms"] - queue_pop_size = options["queuepopsize"] - - logger.debug( - "Running task processor with args: %s", - ",".join([f"{k}={v}" for k, v in options.items()]), - ) - - self._threads.extend( - [ - TaskRunner( - sleep_interval_millis=sleep_interval_ms, - queue_pop_size=queue_pop_size, - ) - for _ in range(num_threads) - ] - ) - - logger.info( - "Processor starting. Registered tasks are: %s", - list(registered_tasks.keys()), - ) - - for thread in self._threads: - thread.start() - - clear_unhealthy_threads() - while self._monitor_threads: - time.sleep(1) - unhealthy_threads = self._get_unhealthy_threads( - ms_before_unhealthy=grace_period_ms + sleep_interval_ms - ) - if unhealthy_threads: - write_unhealthy_threads(unhealthy_threads) - - [t.join() for t in self._threads] - - def _exit_gracefully(self, *args): - self._monitor_threads = False - for t in self._threads: - t.stop() - - def _get_unhealthy_threads( - self, ms_before_unhealthy: int - ) -> typing.List[TaskRunner]: - unhealthy_threads = [] - healthy_threshold = timezone.now() - timedelta(milliseconds=ms_before_unhealthy) - - for thread in self._threads: - if ( - not thread.is_alive() - or not thread.last_checked_for_tasks - or thread.last_checked_for_tasks < healthy_threshold - ): - unhealthy_threads.append(thread) - return unhealthy_threads diff --git a/api/task_processor/managers.py b/api/task_processor/managers.py deleted file mode 100644 index 2d2306cfddf6..000000000000 --- a/api/task_processor/managers.py +++ /dev/null @@ -1,11 +0,0 @@ -from django.db.models import Manager - - -class TaskManager(Manager): - def get_tasks_to_process(self, num_tasks): - return self.raw("SELECT * FROM get_tasks_to_process(%s)", [num_tasks]) - - -class RecurringTaskManager(Manager): - def get_tasks_to_process(self, num_tasks): - return self.raw("SELECT * FROM get_recurringtasks_to_process(%s)", [num_tasks]) diff --git a/api/task_processor/migrations/0001_initial.py b/api/task_processor/migrations/0001_initial.py deleted file mode 100644 index 83a31195bb1c..000000000000 --- a/api/task_processor/migrations/0001_initial.py +++ /dev/null @@ -1,44 +0,0 @@ -# Generated by Django 3.2.14 on 2022-08-02 11:25 - -from django.db import migrations, models -import django.db.models.deletion -import django.utils.timezone -import uuid - - -class Migration(migrations.Migration): - - initial = True - - dependencies = [ - ] - - operations = [ - migrations.CreateModel( - name='Task', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('uuid', models.UUIDField(default=uuid.uuid4, unique=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('scheduled_for', models.DateTimeField(blank=True, default=django.utils.timezone.now, null=True)), - ('task_identifier', models.CharField(max_length=200)), - ('serialized_args', models.TextField(blank=True, null=True)), - ('serialized_kwargs', models.TextField(blank=True, null=True)), - ('num_failures', models.IntegerField(default=0)), - ], - options={ - 'index_together': {('scheduled_for', 'num_failures')}, - }, - ), - migrations.CreateModel( - name='TaskRun', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('started_at', models.DateTimeField()), - ('finished_at', models.DateTimeField(blank=True, null=True)), - ('result', models.CharField(blank=True, choices=[('SUCCESS', 'Success'), ('FAILURE', 'Failure')], db_index=True, max_length=50, null=True)), - ('error_details', models.TextField(blank=True, null=True)), - ('task', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='task_runs', to='task_processor.task')), - ], - ), - ] diff --git a/api/task_processor/migrations/0002_healthcheckmodel.py b/api/task_processor/migrations/0002_healthcheckmodel.py deleted file mode 100644 index f3938a54f3f4..000000000000 --- a/api/task_processor/migrations/0002_healthcheckmodel.py +++ /dev/null @@ -1,21 +0,0 @@ -# Generated by Django 3.2.14 on 2022-08-12 11:39 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ('task_processor', '0001_initial'), - ] - - operations = [ - migrations.CreateModel( - name='HealthCheckModel', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('uuid', models.UUIDField(unique=True)), - ], - ), - ] diff --git a/api/task_processor/migrations/0003_add_completed_to_task.py b/api/task_processor/migrations/0003_add_completed_to_task.py deleted file mode 100644 index 38255d2da28d..000000000000 --- a/api/task_processor/migrations/0003_add_completed_to_task.py +++ /dev/null @@ -1,22 +0,0 @@ -# Generated by Django 3.2.15 on 2022-08-24 13:53 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ('task_processor', '0002_healthcheckmodel'), - ] - - operations = [ - migrations.AddField( - model_name='task', - name='completed', - field=models.BooleanField(default=False), - ), - migrations.AlterIndexTogether( - name='task', - index_together={('scheduled_for', 'num_failures', 'completed')}, - ), - ] diff --git a/api/task_processor/migrations/0004_recreate_task_indexes.py b/api/task_processor/migrations/0004_recreate_task_indexes.py deleted file mode 100644 index 008ce33376ea..000000000000 --- a/api/task_processor/migrations/0004_recreate_task_indexes.py +++ /dev/null @@ -1,43 +0,0 @@ -# Generated by Django 3.2.15 on 2022-10-07 09:53 - -from django.db import migrations, models - -from core.migration_helpers import PostgresOnlyRunSQL - - -class Migration(migrations.Migration): - - atomic = False - - dependencies = [ - ("task_processor", "0003_add_completed_to_task"), - ] - - operations = [ - migrations.SeparateDatabaseAndState( - state_operations=[ - migrations.AlterIndexTogether( - name="task", - index_together=set(), - ), - migrations.AddIndex( - model_name="task", - index=models.Index( - condition=models.Q(("completed", False)), - fields=["num_failures", "scheduled_for"], - name="incomplete_tasks_idx", - ), - ), - ], - database_operations=[ - PostgresOnlyRunSQL( - "DROP INDEX CONCURRENTLY task_processor_task_scheduled_for_num_failur_17d6dc77_idx;", - reverse_sql='CREATE INDEX "task_processor_task_scheduled_for_num_failur_17d6dc77_idx" ON "task_processor_task" ("scheduled_for", "num_failures", "completed");', - ), - PostgresOnlyRunSQL( - 'CREATE INDEX CONCURRENTLY "incomplete_tasks_idx" ON "task_processor_task" ("num_failures", "scheduled_for") WHERE NOT "completed";', - reverse_sql='DROP INDEX CONCURRENTLY "incomplete_tasks_idx";', - ), - ], - ), - ] diff --git a/api/task_processor/migrations/0005_update_conditional_index_conditions.py b/api/task_processor/migrations/0005_update_conditional_index_conditions.py deleted file mode 100644 index 493007255bdd..000000000000 --- a/api/task_processor/migrations/0005_update_conditional_index_conditions.py +++ /dev/null @@ -1,45 +0,0 @@ -# Generated by Django 3.2.15 on 2022-10-07 11:16 - -from django.db import migrations, models - -from core.migration_helpers import PostgresOnlyRunSQL - - -class Migration(migrations.Migration): - - atomic = False - - dependencies = [ - ("task_processor", "0004_recreate_task_indexes"), - ] - - operations = [ - migrations.SeparateDatabaseAndState( - state_operations=[ - migrations.RemoveIndex( - model_name="task", - name="incomplete_tasks_idx", - ), - migrations.AddIndex( - model_name="task", - index=models.Index( - condition=models.Q( - ("completed", False), ("num_failures__lt", 3) - ), - fields=["scheduled_for"], - name="incomplete_tasks_idx", - ), - ), - ], - database_operations=[ - PostgresOnlyRunSQL( - 'DROP INDEX CONCURRENTLY "incomplete_tasks_idx";', - reverse_sql='CREATE INDEX CONCURRENTLY "incomplete_tasks_idx" ON "task_processor_task" ("num_failures", "scheduled_for") WHERE NOT "completed";', - ), - PostgresOnlyRunSQL( - 'CREATE INDEX CONCURRENTLY "incomplete_tasks_idx" ON "task_processor_task" ("scheduled_for") WHERE (NOT "completed" and "num_failures" < 3);', - reverse_sql='DROP INDEX CONCURRENTLY "incomplete_tasks_idx";', - ), - ], - ) - ] diff --git a/api/task_processor/migrations/0006_auto_20230221_0802.py b/api/task_processor/migrations/0006_auto_20230221_0802.py deleted file mode 100644 index f453aab5059a..000000000000 --- a/api/task_processor/migrations/0006_auto_20230221_0802.py +++ /dev/null @@ -1,45 +0,0 @@ -# Generated by Django 3.2.16 on 2023-02-21 08:02 - -from django.db import migrations, models -import django.db.models.deletion -import uuid - - -class Migration(migrations.Migration): - - dependencies = [ - ('task_processor', '0005_update_conditional_index_conditions'), - ] - - operations = [ - migrations.CreateModel( - name='RecurringTask', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('uuid', models.UUIDField(default=uuid.uuid4, unique=True)), - ('created_at', models.DateTimeField(auto_now_add=True)), - ('task_identifier', models.CharField(max_length=200)), - ('serialized_args', models.TextField(blank=True, null=True)), - ('serialized_kwargs', models.TextField(blank=True, null=True)), - ('run_every', models.DurationField()), - ], - ), - migrations.CreateModel( - name='RecurringTaskRun', - fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('started_at', models.DateTimeField()), - ('finished_at', models.DateTimeField(blank=True, null=True)), - ('result', models.CharField(blank=True, choices=[('SUCCESS', 'Success'), ('FAILURE', 'Failure')], db_index=True, max_length=50, null=True)), - ('error_details', models.TextField(blank=True, null=True)), - ('task', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='task_runs', to='task_processor.recurringtask')), - ], - options={ - 'abstract': False, - }, - ), - migrations.AddConstraint( - model_name='recurringtask', - constraint=models.UniqueConstraint(fields=('task_identifier', 'run_every'), name='unique_run_every_tasks'), - ), - ] diff --git a/api/task_processor/migrations/0007_add_is_locked.py b/api/task_processor/migrations/0007_add_is_locked.py deleted file mode 100644 index 71b21a3ac543..000000000000 --- a/api/task_processor/migrations/0007_add_is_locked.py +++ /dev/null @@ -1,23 +0,0 @@ -# Generated by Django 3.2.18 on 2023-04-20 02:43 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ('task_processor', '0006_auto_20230221_0802'), - ] - - operations = [ - migrations.AddField( - model_name='recurringtask', - name='is_locked', - field=models.BooleanField(default=False), - ), - migrations.AddField( - model_name='task', - name='is_locked', - field=models.BooleanField(default=False), - ), - ] diff --git a/api/task_processor/migrations/0008_add_get_task_to_process_function.py b/api/task_processor/migrations/0008_add_get_task_to_process_function.py deleted file mode 100644 index 49d047af124c..000000000000 --- a/api/task_processor/migrations/0008_add_get_task_to_process_function.py +++ /dev/null @@ -1,31 +0,0 @@ -# Generated by Django 3.2.18 on 2023-04-20 02:45 - -from django.db import migrations - -from core.migration_helpers import PostgresOnlyRunSQL -import os - - -class Migration(migrations.Migration): - dependencies = [ - ("task_processor", "0007_add_is_locked"), - ] - - operations = [ - PostgresOnlyRunSQL.from_sql_file( - os.path.join( - os.path.dirname(__file__), - "sql", - "0008_get_tasks_to_process.sql", - ), - reverse_sql="DROP FUNCTION IF EXISTS get_tasks_to_process", - ), - PostgresOnlyRunSQL.from_sql_file( - os.path.join( - os.path.dirname(__file__), - "sql", - "0008_get_recurring_tasks_to_process.sql", - ), - reverse_sql="DROP FUNCTION IF EXISTS get_recurringtasks_to_process", - ), - ] diff --git a/api/task_processor/migrations/0009_add_recurring_task_run_first_run_at.py b/api/task_processor/migrations/0009_add_recurring_task_run_first_run_at.py deleted file mode 100644 index c2d9147d8f53..000000000000 --- a/api/task_processor/migrations/0009_add_recurring_task_run_first_run_at.py +++ /dev/null @@ -1,18 +0,0 @@ -# Generated by Django 3.2.18 on 2023-04-05 13:47 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - - dependencies = [ - ('task_processor', '0008_add_get_task_to_process_function'), - ] - - operations = [ - migrations.AddField( - model_name='recurringtask', - name='first_run_time', - field=models.TimeField(blank=True, null=True), - ), - ] diff --git a/api/task_processor/migrations/0010_task_priority.py b/api/task_processor/migrations/0010_task_priority.py deleted file mode 100644 index c1b41211aab6..000000000000 --- a/api/task_processor/migrations/0010_task_priority.py +++ /dev/null @@ -1,27 +0,0 @@ -# Generated by Django 3.2.20 on 2023-10-13 06:04 - -from django.db import migrations, models - - -class Migration(migrations.Migration): - dependencies = [ - ("task_processor", "0009_add_recurring_task_run_first_run_at"), - ] - - operations = [ - migrations.AddField( - model_name="task", - name="priority", - field=models.SmallIntegerField( - choices=[ - (100, "Lower"), - (75, "Low"), - (50, "Normal"), - (25, "High"), - (0, "Highest"), - ], - default=None, - null=True, - ), - ), - ] diff --git a/api/task_processor/migrations/0011_add_priority_to_get_tasks_to_process.py b/api/task_processor/migrations/0011_add_priority_to_get_tasks_to_process.py deleted file mode 100644 index 48f2ed8f6703..000000000000 --- a/api/task_processor/migrations/0011_add_priority_to_get_tasks_to_process.py +++ /dev/null @@ -1,27 +0,0 @@ -# Generated by Django 3.2.20 on 2023-10-13 04:44 - -from django.db import migrations - -from core.migration_helpers import PostgresOnlyRunSQL -import os - - -class Migration(migrations.Migration): - dependencies = [ - ("task_processor", "0010_task_priority"), - ] - - operations = [ - PostgresOnlyRunSQL.from_sql_file( - os.path.join( - os.path.dirname(__file__), - "sql", - "0011_get_tasks_to_process.sql", - ), - reverse_sql=os.path.join( - os.path.dirname(__file__), - "sql", - "0008_get_tasks_to_process.sql", - ), - ), - ] diff --git a/api/task_processor/migrations/__init__.py b/api/task_processor/migrations/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/api/task_processor/migrations/sql/0008_get_recurring_tasks_to_process.sql b/api/task_processor/migrations/sql/0008_get_recurring_tasks_to_process.sql deleted file mode 100644 index acb9e4d482af..000000000000 --- a/api/task_processor/migrations/sql/0008_get_recurring_tasks_to_process.sql +++ /dev/null @@ -1,30 +0,0 @@ -CREATE OR REPLACE FUNCTION get_recurringtasks_to_process(num_tasks integer) -RETURNS SETOF task_processor_recurringtask AS $$ -DECLARE - row_to_return task_processor_recurringtask; -BEGIN - -- Select the tasks that needs to be processed - FOR row_to_return IN - SELECT * - FROM task_processor_recurringtask - WHERE is_locked = FALSE - ORDER BY id - LIMIT num_tasks - -- Select for update to ensure that no other workers can select these tasks while in this transaction block - FOR UPDATE SKIP LOCKED - LOOP - -- Lock every selected task(by updating `is_locked` to true) - UPDATE task_processor_recurringtask - -- Lock this row by setting is_locked True, so that no other workers can select these tasks after this - -- transaction is complete (but the tasks are still being executed by the current worker) - SET is_locked = TRUE - WHERE id = row_to_return.id; - -- If we don't explicitly update the `is_locked` column here, the client will receive the row that is actually locked but has the `is_locked` value set to `False`. - row_to_return.is_locked := TRUE; - RETURN NEXT row_to_return; - END LOOP; - - RETURN; -END; -$$ LANGUAGE plpgsql - diff --git a/api/task_processor/migrations/sql/0008_get_tasks_to_process.sql b/api/task_processor/migrations/sql/0008_get_tasks_to_process.sql deleted file mode 100644 index ba1072d670fc..000000000000 --- a/api/task_processor/migrations/sql/0008_get_tasks_to_process.sql +++ /dev/null @@ -1,30 +0,0 @@ -CREATE OR REPLACE FUNCTION get_tasks_to_process(num_tasks integer) -RETURNS SETOF task_processor_task AS $$ -DECLARE - row_to_return task_processor_task; -BEGIN - -- Select the tasks that needs to be processed - FOR row_to_return IN - SELECT * - FROM task_processor_task - WHERE num_failures < 3 AND scheduled_for < NOW() AND completed = FALSE AND is_locked = FALSE - ORDER BY scheduled_for ASC, created_at ASC - LIMIT num_tasks - -- Select for update to ensure that no other workers can select these tasks while in this transaction block - FOR UPDATE SKIP LOCKED - LOOP - -- Lock every selected task(by updating `is_locked` to true) - UPDATE task_processor_task - -- Lock this row by setting is_locked True, so that no other workers can select these tasks after this - -- transaction is complete (but the tasks are still being executed by the current worker) - SET is_locked = TRUE - WHERE id = row_to_return.id; - -- If we don't explicitly update the `is_locked` column here, the client will receive the row that is actually locked but has the `is_locked` value set to `False`. - row_to_return.is_locked := TRUE; - RETURN NEXT row_to_return; - END LOOP; - - RETURN; -END; -$$ LANGUAGE plpgsql - diff --git a/api/task_processor/migrations/sql/0011_get_tasks_to_process.sql b/api/task_processor/migrations/sql/0011_get_tasks_to_process.sql deleted file mode 100644 index 2dc6d60a3673..000000000000 --- a/api/task_processor/migrations/sql/0011_get_tasks_to_process.sql +++ /dev/null @@ -1,30 +0,0 @@ -CREATE OR REPLACE FUNCTION get_tasks_to_process(num_tasks integer) -RETURNS SETOF task_processor_task AS $$ -DECLARE - row_to_return task_processor_task; -BEGIN - -- Select the tasks that needs to be processed - FOR row_to_return IN - SELECT * - FROM task_processor_task - WHERE num_failures < 3 AND scheduled_for < NOW() AND completed = FALSE AND is_locked = FALSE - ORDER BY priority ASC, scheduled_for ASC, created_at ASC - LIMIT num_tasks - -- Select for update to ensure that no other workers can select these tasks while in this transaction block - FOR UPDATE SKIP LOCKED - LOOP - -- Lock every selected task(by updating `is_locked` to true) - UPDATE task_processor_task - -- Lock this row by setting is_locked True, so that no other workers can select these tasks after this - -- transaction is complete (but the tasks are still being executed by the current worker) - SET is_locked = TRUE - WHERE id = row_to_return.id; - -- If we don't explicitly update the `is_locked` column here, the client will receive the row that is actually locked but has the `is_locked` value set to `False`. - row_to_return.is_locked := TRUE; - RETURN NEXT row_to_return; - END LOOP; - - RETURN; -END; -$$ LANGUAGE plpgsql - diff --git a/api/task_processor/models.py b/api/task_processor/models.py deleted file mode 100644 index 09ec1c584f68..000000000000 --- a/api/task_processor/models.py +++ /dev/null @@ -1,220 +0,0 @@ -import typing -import uuid -from datetime import datetime - -import simplejson as json -from django.core.serializers.json import DjangoJSONEncoder -from django.db import models -from django.utils import timezone - -from task_processor.exceptions import TaskProcessingError, TaskQueueFullError -from task_processor.managers import RecurringTaskManager, TaskManager -from task_processor.task_registry import registered_tasks - -_django_json_encoder_default = DjangoJSONEncoder().default - - -class TaskPriority(models.IntegerChoices): - LOWER = 100 - LOW = 75 - NORMAL = 50 - HIGH = 25 - HIGHEST = 0 - - -class AbstractBaseTask(models.Model): - uuid = models.UUIDField(unique=True, default=uuid.uuid4) - created_at = models.DateTimeField(auto_now_add=True) - task_identifier = models.CharField(max_length=200) - serialized_args = models.TextField(blank=True, null=True) - serialized_kwargs = models.TextField(blank=True, null=True) - is_locked = models.BooleanField(default=False) - - class Meta: - abstract = True - - @property - def args(self) -> typing.List[typing.Any]: - if self.serialized_args: - return self.deserialize_data(self.serialized_args) - return [] - - @property - def kwargs(self) -> typing.Dict[str, typing.Any]: - if self.serialized_kwargs: - return self.deserialize_data(self.serialized_kwargs) - return {} - - @staticmethod - def serialize_data(data: typing.Any): - return json.dumps(data, default=_django_json_encoder_default) - - @staticmethod - def deserialize_data(data: typing.Any): - return json.loads(data) - - def mark_failure(self): - self.unlock() - - def mark_success(self): - self.unlock() - - def unlock(self): - self.is_locked = False - - def run(self): - return self.callable(*self.args, **self.kwargs) - - @property - def callable(self) -> typing.Callable: - try: - return registered_tasks[self.task_identifier] - except KeyError as e: - raise TaskProcessingError( - "No task registered with identifier '%s'. Ensure your task is " - "decorated with @register_task_handler.", - self.task_identifier, - ) from e - - -class Task(AbstractBaseTask): - scheduled_for = models.DateTimeField(blank=True, null=True, default=timezone.now) - - # denormalise failures and completion so that we can use select_for_update - num_failures = models.IntegerField(default=0) - completed = models.BooleanField(default=False) - objects = TaskManager() - priority = models.SmallIntegerField( - default=None, null=True, choices=TaskPriority.choices - ) - - class Meta: - # We have customised the migration in 0004 to only apply this change to postgres databases - # TODO: work out how to index the taskprocessor_task table for Oracle and MySQL - indexes = [ - models.Index( - name="incomplete_tasks_idx", - fields=["scheduled_for"], - condition=models.Q(completed=False, num_failures__lt=3), - ) - ] - - @classmethod - def create( - cls, - task_identifier: str, - scheduled_for: datetime, - priority: TaskPriority = TaskPriority.NORMAL, - queue_size: int = None, - *, - args: typing.Tuple[typing.Any] = None, - kwargs: typing.Dict[str, typing.Any] = None, - ) -> "Task": - if queue_size and cls._is_queue_full(task_identifier, queue_size): - raise TaskQueueFullError( - f"Queue for task {task_identifier} is full. " - f"Max queue size is {queue_size}" - ) - return Task( - task_identifier=task_identifier, - scheduled_for=scheduled_for, - priority=priority, - serialized_args=cls.serialize_data(args or tuple()), - serialized_kwargs=cls.serialize_data(kwargs or dict()), - ) - - @classmethod - def _is_queue_full(cls, task_identifier: str, queue_size: int) -> bool: - return ( - cls.objects.filter( - task_identifier=task_identifier, - completed=False, - num_failures__lt=3, - ).count() - > queue_size - ) - - def mark_failure(self): - super().mark_failure() - self.num_failures += 1 - - def mark_success(self): - super().mark_success() - self.completed = True - - -class RecurringTask(AbstractBaseTask): - run_every = models.DurationField() - first_run_time = models.TimeField(blank=True, null=True) - - objects = RecurringTaskManager() - - class Meta: - constraints = [ - models.UniqueConstraint( - fields=["task_identifier", "run_every"], - name="unique_run_every_tasks", - ), - ] - - @property - def should_execute(self) -> bool: - now = timezone.now() - last_task_run = self.task_runs.order_by("-started_at").first() - - if not last_task_run: - # If we have never run this task, then we should execute it only if - # the time has passed after which we want to ensure this task runs. - # This allows us to control when intensive tasks should be run. - return not (self.first_run_time and self.first_run_time > now.time()) - - # if the last run was at t- run_every, then we should execute it - if (timezone.now() - last_task_run.started_at) >= self.run_every: - return True - - # if the last run was not a success and we do not have - # more than 3 failures in t- run_every, then we should execute it - if ( - last_task_run.result != TaskResult.SUCCESS.name - and self.task_runs.filter(started_at__gte=(now - self.run_every)).count() - <= 3 - ): - return True - # otherwise, we should not execute it - return False - - @property - def is_task_registered(self) -> bool: - return self.task_identifier in registered_tasks - - -class TaskResult(models.Choices): - SUCCESS = "SUCCESS" - FAILURE = "FAILURE" - - -class AbstractTaskRun(models.Model): - started_at = models.DateTimeField() - finished_at = models.DateTimeField(blank=True, null=True) - result = models.CharField( - max_length=50, choices=TaskResult.choices, blank=True, null=True, db_index=True - ) - error_details = models.TextField(blank=True, null=True) - - class Meta: - abstract = True - - -class TaskRun(AbstractTaskRun): - task = models.ForeignKey(Task, on_delete=models.CASCADE, related_name="task_runs") - - -class RecurringTaskRun(AbstractTaskRun): - task = models.ForeignKey( - RecurringTask, on_delete=models.CASCADE, related_name="task_runs" - ) - - -class HealthCheckModel(models.Model): - created_at = models.DateTimeField(auto_now_add=True) - uuid = models.UUIDField(unique=True, blank=False, null=False) diff --git a/api/task_processor/processor.py b/api/task_processor/processor.py deleted file mode 100644 index c150e68ccb3c..000000000000 --- a/api/task_processor/processor.py +++ /dev/null @@ -1,116 +0,0 @@ -import logging -import traceback -import typing -from datetime import timedelta - -from django.utils import timezone - -from task_processor.models import ( - RecurringTask, - RecurringTaskRun, - Task, - TaskResult, - TaskRun, -) - -logger = logging.getLogger(__name__) - -UNREGISTERED_RECURRING_TASK_GRACE_PERIOD = timedelta(minutes=30) - - -def run_tasks(num_tasks: int = 1) -> typing.List[TaskRun]: - if num_tasks < 1: - raise ValueError("Number of tasks to process must be at least one") - - tasks = Task.objects.get_tasks_to_process(num_tasks) - - if tasks: - executed_tasks = [] - task_runs = [] - - for task in tasks: - task, task_run = _run_task(task) - - executed_tasks.append(task) - task_runs.append(task_run) - - if executed_tasks: - Task.objects.bulk_update( - executed_tasks, fields=["completed", "num_failures", "is_locked"] - ) - - if task_runs: - TaskRun.objects.bulk_create(task_runs) - - return task_runs - - logger.debug("No tasks to process.") - return [] - - -def run_recurring_tasks(num_tasks: int = 1) -> typing.List[RecurringTaskRun]: - if num_tasks < 1: - raise ValueError("Number of tasks to process must be at least one") - - # NOTE: We will probably see a lot of delay in the execution of recurring tasks - # if the tasks take longer then `run_every` to execute. This is not - # a problem for now, but we should be mindful of this limitation - tasks = RecurringTask.objects.get_tasks_to_process(num_tasks) - if tasks: - task_runs = [] - - for task in tasks: - if not task.is_task_registered: - # This is necessary to ensure that old instances of the task processor, - # which may still be running during deployment, do not remove tasks added by new instances. - # Reference: https://github.com/Flagsmith/flagsmith/issues/2551 - if ( - timezone.now() - task.created_at - ) > UNREGISTERED_RECURRING_TASK_GRACE_PERIOD: - task.delete() - continue - - if task.should_execute: - task, task_run = _run_task(task) - task_runs.append(task_run) - else: - task.unlock() - - # update all tasks that were not deleted - to_update = [task for task in tasks if task.id] - RecurringTask.objects.bulk_update(to_update, fields=["is_locked"]) - - if task_runs: - RecurringTaskRun.objects.bulk_create(task_runs) - - return task_runs - - logger.debug("No tasks to process.") - return [] - - -def _run_task(task: typing.Union[Task, RecurringTask]) -> typing.Tuple[Task, TaskRun]: - task_run = task.task_runs.model(started_at=timezone.now(), task=task) - - try: - task.run() - task_run.result = TaskResult.SUCCESS - - task_run.finished_at = timezone.now() - task.mark_success() - except Exception as e: - logger.warning( - "Failed to execute task '%s'. Exception was: %s", - task.task_identifier, - str(e), - exc_info=True, - ) - logger.debug("args: %s", str(task.args)) - logger.debug("kwargs: %s", str(task.kwargs)) - - task.mark_failure() - - task_run.result = TaskResult.FAILURE - task_run.error_details = str(traceback.format_exc()) - - return task, task_run diff --git a/api/task_processor/serializers.py b/api/task_processor/serializers.py deleted file mode 100644 index c25c0a1e4cec..000000000000 --- a/api/task_processor/serializers.py +++ /dev/null @@ -1,5 +0,0 @@ -from rest_framework import serializers - - -class MonitoringSerializer(serializers.Serializer): - waiting = serializers.IntegerField(read_only=True) diff --git a/api/task_processor/task_registry.py b/api/task_processor/task_registry.py deleted file mode 100644 index 50ae62ef685f..000000000000 --- a/api/task_processor/task_registry.py +++ /dev/null @@ -1,23 +0,0 @@ -import logging -import typing - -logger = logging.getLogger(__name__) - -registered_tasks: typing.Dict[str, typing.Callable] = {} - - -def register_task(task_identifier: str, callable_: typing.Callable): - global registered_tasks - - logger.debug("Registering task '%s'", task_identifier) - - registered_tasks[task_identifier] = callable_ - - logger.debug( - "Registered tasks now has the following tasks registered: %s", - list(registered_tasks.keys()), - ) - - -def get_task(task_identifier: str) -> typing.Callable: - return registered_tasks[task_identifier] diff --git a/api/task_processor/task_run_method.py b/api/task_processor/task_run_method.py deleted file mode 100644 index e7d471ec668a..000000000000 --- a/api/task_processor/task_run_method.py +++ /dev/null @@ -1,7 +0,0 @@ -from enum import Enum - - -class TaskRunMethod(Enum): - SYNCHRONOUSLY = "SYNCHRONOUSLY" - SEPARATE_THREAD = "SEPARATE_THREAD" - TASK_PROCESSOR = "TASK_PROCESSOR" diff --git a/api/task_processor/tasks.py b/api/task_processor/tasks.py deleted file mode 100644 index 80de37e17ac2..000000000000 --- a/api/task_processor/tasks.py +++ /dev/null @@ -1,66 +0,0 @@ -import logging -from datetime import timedelta - -from django.conf import settings -from django.db.models import Q -from django.utils import timezone - -from task_processor.decorators import ( - register_recurring_task, - register_task_handler, -) -from task_processor.models import HealthCheckModel, RecurringTaskRun, Task - -logger = logging.getLogger(__name__) - - -@register_task_handler() -def create_health_check_model(health_check_model_uuid: str): - logger.info("Creating health check model.") - HealthCheckModel.objects.create(uuid=health_check_model_uuid) - - -@register_recurring_task( - run_every=settings.TASK_DELETE_RUN_EVERY, - first_run_time=settings.TASK_DELETE_RUN_TIME, -) -def clean_up_old_tasks(): - if not settings.ENABLE_CLEAN_UP_OLD_TASKS: - return - - now = timezone.now() - delete_before = now - timedelta(days=settings.TASK_DELETE_RETENTION_DAYS) - - # build the query - query = Q(completed=True) - if settings.TASK_DELETE_INCLUDE_FAILED_TASKS: - query = query | Q(num_failures__gte=3) - query = Q(scheduled_for__lt=delete_before) & query - - # We define the query in the loop and in the delete query to avoid having - # an infinite loop since we need to verify if there are records in the qs - # on each iteration of the loop. Defining the queryset outside of the - # loop condition leads to queryset.exists() always returning true resulting - # in an infinite loop. - # TODO: validate if deleting in batches is more / less impactful on the DB - while Task.objects.filter(query).exists(): - # delete in batches of settings.TASK_DELETE_BATCH_SIZE - Task.objects.filter( - pk__in=Task.objects.filter(query).values_list("id", flat=True)[ - 0 : settings.TASK_DELETE_BATCH_SIZE # noqa:E203 - ] - ).delete() - - -@register_recurring_task( - run_every=settings.TASK_DELETE_RUN_EVERY, - first_run_time=settings.TASK_DELETE_RUN_TIME, -) -def clean_up_old_recurring_task_runs(): - if not settings.ENABLE_CLEAN_UP_OLD_TASKS: - return - - now = timezone.now() - delete_before = now - timedelta(days=settings.RECURRING_TASK_RUN_RETENTION_DAYS) - - RecurringTaskRun.objects.filter(finished_at__lt=delete_before).delete() diff --git a/api/task_processor/thread_monitoring.py b/api/task_processor/thread_monitoring.py deleted file mode 100644 index d8c80789ff06..000000000000 --- a/api/task_processor/thread_monitoring.py +++ /dev/null @@ -1,34 +0,0 @@ -import json -import logging -import os -import typing -from threading import Thread - -UNHEALTHY_THREADS_FILE_PATH = "/tmp/task-processor-unhealthy-threads.json" - -logger = logging.getLogger(__name__) - - -def clear_unhealthy_threads(): - if _unhealthy_threads_file_exists(): - os.remove(UNHEALTHY_THREADS_FILE_PATH) - - -def write_unhealthy_threads(unhealthy_threads: typing.List[Thread]): - unhealthy_thread_names = [t.name for t in unhealthy_threads] - logger.warning("Writing unhealthy threads: %s", unhealthy_thread_names) - - with open(UNHEALTHY_THREADS_FILE_PATH, "w+") as f: - f.write(json.dumps(unhealthy_thread_names)) - - -def get_unhealthy_thread_names() -> typing.List[str]: - if not _unhealthy_threads_file_exists(): - return [] - - with open(UNHEALTHY_THREADS_FILE_PATH, "r") as f: - return json.loads(f.read()) - - -def _unhealthy_threads_file_exists(): - return os.path.exists(UNHEALTHY_THREADS_FILE_PATH) diff --git a/api/task_processor/threads.py b/api/task_processor/threads.py deleted file mode 100644 index b9a3a2d7241a..000000000000 --- a/api/task_processor/threads.py +++ /dev/null @@ -1,49 +0,0 @@ -import logging -import time -from threading import Thread - -from django.db import close_old_connections -from django.utils import timezone - -from task_processor.processor import run_recurring_tasks, run_tasks - -logger = logging.getLogger(__name__) - - -class TaskRunner(Thread): - def __init__( - self, - *args, - sleep_interval_millis: int = 2000, - queue_pop_size: int = 1, - **kwargs, - ): - super(TaskRunner, self).__init__(*args, **kwargs) - self.sleep_interval_millis = sleep_interval_millis - self.queue_pop_size = queue_pop_size - self.last_checked_for_tasks = None - - self._stopped = False - - def run(self) -> None: - while not self._stopped: - self.last_checked_for_tasks = timezone.now() - self.run_iteration() - time.sleep(self.sleep_interval_millis / 1000) - - def run_iteration(self) -> None: - try: - run_tasks(self.queue_pop_size) - run_recurring_tasks(self.queue_pop_size) - except Exception as e: - # To prevent task threads from dying if they get an error retrieving the tasks from the - # database this will allow the thread to continue trying to retrieve tasks if it can - # successfully re-establish a connection to the database. - # TODO: is this also what is causing tasks to get stuck as locked? Can we unlock - # tasks here? - - logger.error("Received error retrieving tasks: %s.", e, exc_info=e) - close_old_connections() - - def stop(self): - self._stopped = True diff --git a/api/task_processor/urls.py b/api/task_processor/urls.py deleted file mode 100644 index 508e10fdcf35..000000000000 --- a/api/task_processor/urls.py +++ /dev/null @@ -1,5 +0,0 @@ -from django.urls import path - -from task_processor.views import monitoring - -urlpatterns = [path("monitoring/", monitoring)] diff --git a/api/task_processor/views.py b/api/task_processor/views.py deleted file mode 100644 index 791493bd5e04..000000000000 --- a/api/task_processor/views.py +++ /dev/null @@ -1,17 +0,0 @@ -from drf_yasg.utils import swagger_auto_schema -from rest_framework.decorators import api_view, permission_classes -from rest_framework.permissions import IsAdminUser, IsAuthenticated -from rest_framework.response import Response - -from task_processor.models import Task -from task_processor.serializers import MonitoringSerializer - - -@swagger_auto_schema(method="GET", responses={200: MonitoringSerializer()}) -@api_view(http_method_names=["GET"]) -@permission_classes([IsAuthenticated, IsAdminUser]) -def monitoring(request, **kwargs): - waiting_tasks = Task.objects.filter(num_failures__lt=3, completed=False).count() - return Response( - data={"waiting": waiting_tasks}, headers={"Content-Type": "application/json"} - ) diff --git a/api/tests/unit/integrations/lead_tracking/hubspot/test_unit_hubspot_lead_tracking.py b/api/tests/unit/integrations/lead_tracking/hubspot/test_unit_hubspot_lead_tracking.py index 680bb51ff659..66019231ec6d 100644 --- a/api/tests/unit/integrations/lead_tracking/hubspot/test_unit_hubspot_lead_tracking.py +++ b/api/tests/unit/integrations/lead_tracking/hubspot/test_unit_hubspot_lead_tracking.py @@ -2,13 +2,13 @@ from pytest_django.fixtures import SettingsWrapper from pytest_mock import MockerFixture +from task_processor.task_run_method import TaskRunMethod from organisations.models import ( HubspotOrganisation, Organisation, OrganisationRole, ) -from task_processor.task_run_method import TaskRunMethod from users.models import FFAdminUser, HubspotLead diff --git a/api/tests/unit/organisations/test_unit_organisations_subscription_info_cache.py b/api/tests/unit/organisations/test_unit_organisations_subscription_info_cache.py index 2cbe1daee0d8..bb59c1176b12 100644 --- a/api/tests/unit/organisations/test_unit_organisations_subscription_info_cache.py +++ b/api/tests/unit/organisations/test_unit_organisations_subscription_info_cache.py @@ -1,7 +1,8 @@ +from task_processor.task_run_method import TaskRunMethod + from organisations.chargebee.metadata import ChargebeeObjMetadata from organisations.subscription_info_cache import update_caches from organisations.subscriptions.constants import SubscriptionCacheEntity -from task_processor.task_run_method import TaskRunMethod def test_update_caches(mocker, organisation, chargebee_subscription, settings): diff --git a/api/tests/unit/projects/test_unit_projects_tasks.py b/api/tests/unit/projects/test_unit_projects_tasks.py index a983507bced3..cc29d43dbd97 100644 --- a/api/tests/unit/projects/test_unit_projects_tasks.py +++ b/api/tests/unit/projects/test_unit_projects_tasks.py @@ -3,6 +3,7 @@ import pytest from pytest_django.fixtures import SettingsWrapper from pytest_mock import MockerFixture +from task_processor.task_run_method import TaskRunMethod from environments.dynamodb.types import ( EdgeV2MigrationResult, @@ -16,7 +17,6 @@ migrate_project_environments_to_v2, ) from segments.models import Segment -from task_processor.task_run_method import TaskRunMethod @pytest.fixture diff --git a/api/tests/unit/projects/test_unit_projects_views.py b/api/tests/unit/projects/test_unit_projects_views.py index 77a8a9554824..8a313dffba14 100644 --- a/api/tests/unit/projects/test_unit_projects_views.py +++ b/api/tests/unit/projects/test_unit_projects_views.py @@ -9,6 +9,7 @@ from pytest_mock import MockerFixture from rest_framework import status from rest_framework.test import APIClient +from task_processor.task_run_method import TaskRunMethod from environments.dynamodb.types import ProjectIdentityMigrationStatus from environments.identities.models import Identity @@ -31,7 +32,6 @@ VIEW_PROJECT, ) from segments.models import Segment -from task_processor.task_run_method import TaskRunMethod from tests.types import WithProjectPermissionsCallable from users.models import FFAdminUser, UserPermissionGroup diff --git a/api/tests/unit/task_processor/__init__.py b/api/tests/unit/task_processor/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/api/tests/unit/task_processor/conftest.py b/api/tests/unit/task_processor/conftest.py deleted file mode 100644 index 6f38020ec571..000000000000 --- a/api/tests/unit/task_processor/conftest.py +++ /dev/null @@ -1,35 +0,0 @@ -import logging -import typing - -import pytest - - -@pytest.fixture -def run_by_processor(monkeypatch): - monkeypatch.setenv("RUN_BY_PROCESSOR", "True") - - -class GetTaskProcessorCaplog(typing.Protocol): - def __call__( - self, log_level: str | int = logging.INFO - ) -> pytest.LogCaptureFixture: ... - - -@pytest.fixture -def get_task_processor_caplog( - caplog: pytest.LogCaptureFixture, -) -> GetTaskProcessorCaplog: - # caplog doesn't allow you to capture logging outputs from loggers that don't - # propagate to root. Quick hack here to get the task_processor logger to - # propagate. - # TODO: look into using loguru. - - def _inner(log_level: str | int = logging.INFO) -> pytest.LogCaptureFixture: - task_processor_logger = logging.getLogger("task_processor") - task_processor_logger.propagate = True - # Assume required level for the logger. - task_processor_logger.setLevel(log_level) - caplog.set_level(log_level) - return caplog - - return _inner diff --git a/api/tests/unit/task_processor/test_unit_task_processor_decorators.py b/api/tests/unit/task_processor/test_unit_task_processor_decorators.py deleted file mode 100644 index f88046d38925..000000000000 --- a/api/tests/unit/task_processor/test_unit_task_processor_decorators.py +++ /dev/null @@ -1,214 +0,0 @@ -import json -from datetime import timedelta -from unittest.mock import MagicMock - -import pytest -from django_capture_on_commit_callbacks import capture_on_commit_callbacks -from pytest_django.fixtures import SettingsWrapper -from pytest_mock import MockerFixture - -from task_processor.decorators import ( - register_recurring_task, - register_task_handler, -) -from task_processor.exceptions import InvalidArgumentsError -from task_processor.models import RecurringTask, Task, TaskPriority -from task_processor.task_registry import get_task -from task_processor.task_run_method import TaskRunMethod -from tests.unit.task_processor.conftest import GetTaskProcessorCaplog - - -@pytest.fixture -def mock_thread_class( - mocker: MockerFixture, -) -> MagicMock: - mock_thread_class = mocker.patch( - "task_processor.decorators.Thread", - return_value=mocker.MagicMock(), - ) - return mock_thread_class - - -@pytest.mark.django_db -def test_register_task_handler_run_in_thread__transaction_commit__true__default( - get_task_processor_caplog: GetTaskProcessorCaplog, - mock_thread_class: MagicMock, -) -> None: - # Given - caplog = get_task_processor_caplog() - - @register_task_handler() - def my_function(*args: str, **kwargs: str) -> None: - pass - - mock_thread = mock_thread_class.return_value - - args = ("foo",) - kwargs = {"bar": "baz"} - - # When - # TODO Switch to pytest-django's django_capture_on_commit_callbacks - # fixture when migrating to Django 4 - with capture_on_commit_callbacks(execute=True): - my_function.run_in_thread(args=args, kwargs=kwargs) - - # Then - mock_thread_class.assert_called_once_with( - target=my_function.unwrapped, args=args, kwargs=kwargs, daemon=True - ) - mock_thread.start.assert_called_once() - - assert len(caplog.records) == 1 - assert ( - caplog.records[0].message == "Running function my_function in unmanaged thread." - ) - - -def test_register_task_handler_run_in_thread__transaction_commit__false( - get_task_processor_caplog: GetTaskProcessorCaplog, - mock_thread_class: MagicMock, -) -> None: - # Given - caplog = get_task_processor_caplog() - - @register_task_handler(transaction_on_commit=False) - def my_function(*args, **kwargs): - pass - - mock_thread = mock_thread_class.return_value - - args = ("foo",) - kwargs = {"bar": "baz"} - - # When - my_function.run_in_thread(args=args, kwargs=kwargs) - - # Then - mock_thread_class.assert_called_once_with( - target=my_function.unwrapped, args=args, kwargs=kwargs, daemon=True - ) - mock_thread.start.assert_called_once() - - assert len(caplog.records) == 1 - assert ( - caplog.records[0].message == "Running function my_function in unmanaged thread." - ) - - -def test_register_recurring_task(mocker, db, run_by_processor): - # Given - task_kwargs = {"first_arg": "foo", "second_arg": "bar"} - run_every = timedelta(minutes=10) - task_identifier = "test_unit_task_processor_decorators.a_function" - - # When - @register_recurring_task( - run_every=run_every, - kwargs=task_kwargs, - ) - def a_function(first_arg, second_arg): - return first_arg + second_arg - - # Then - task = RecurringTask.objects.get(task_identifier=task_identifier) - assert task.serialized_kwargs == json.dumps(task_kwargs) - assert task.run_every == run_every - - assert get_task(task_identifier) - assert task.run() == "foobar" - - -def test_register_recurring_task_does_nothing_if_not_run_by_processor(mocker, db): - # Given - - task_kwargs = {"first_arg": "foo", "second_arg": "bar"} - run_every = timedelta(minutes=10) - task_identifier = "test_unit_task_processor_decorators.some_function" - - # When - @register_recurring_task( - run_every=run_every, - kwargs=task_kwargs, - ) - def some_function(first_arg, second_arg): - return first_arg + second_arg - - # Then - assert not RecurringTask.objects.filter(task_identifier=task_identifier).exists() - with pytest.raises(KeyError): - assert get_task(task_identifier) - - -def test_register_task_handler_validates_inputs() -> None: - # Given - @register_task_handler() - def my_function(*args, **kwargs): - pass - - class NonSerializableObj: - pass - - # When - with pytest.raises(InvalidArgumentsError): - my_function(NonSerializableObj()) - - -@pytest.mark.parametrize( - "task_run_method", (TaskRunMethod.SEPARATE_THREAD, TaskRunMethod.SYNCHRONOUSLY) -) -def test_inputs_are_validated_when_run_without_task_processor( - settings: SettingsWrapper, task_run_method: TaskRunMethod -) -> None: - # Given - settings.TASK_RUN_METHOD = task_run_method - - @register_task_handler() - def my_function(*args, **kwargs): - pass - - class NonSerializableObj: - pass - - # When - with pytest.raises(InvalidArgumentsError): - my_function.delay(args=(NonSerializableObj(),)) - - -def test_delay_returns_none_if_task_queue_is_full(settings, db): - # Given - settings.TASK_RUN_METHOD = TaskRunMethod.TASK_PROCESSOR - - @register_task_handler(queue_size=1) - def my_function(*args, **kwargs): - pass - - for _ in range(10): - Task.objects.create( - task_identifier="test_unit_task_processor_decorators.my_function" - ) - - # When - task = my_function.delay() - - # Then - assert task is None - - -def test_can_create_task_with_priority(settings, db): - # Given - settings.TASK_RUN_METHOD = TaskRunMethod.TASK_PROCESSOR - - @register_task_handler(priority=TaskPriority.HIGH) - def my_function(*args, **kwargs): - pass - - for _ in range(10): - Task.objects.create( - task_identifier="test_unit_task_processor_decorators.my_function" - ) - - # When - task = my_function.delay() - - # Then - assert task.priority == TaskPriority.HIGH diff --git a/api/tests/unit/task_processor/test_unit_task_processor_health.py b/api/tests/unit/task_processor/test_unit_task_processor_health.py deleted file mode 100644 index b0929cb0a0b7..000000000000 --- a/api/tests/unit/task_processor/test_unit_task_processor_health.py +++ /dev/null @@ -1,35 +0,0 @@ -from task_processor.health import is_processor_healthy -from task_processor.models import HealthCheckModel -from task_processor.task_run_method import TaskRunMethod - - -def test_is_processor_healthy_returns_false_if_task_not_processed(mocker): - # Given - mocker.patch("task_processor.health.create_health_check_model") - mocked_health_check_model_class = mocker.patch( - "task_processor.health.HealthCheckModel" - ) - mocked_health_check_model_class.objects.filter.return_value.first.return_value = ( - None - ) - - # When - result = is_processor_healthy(max_tries=3) - - # Then - assert result is False - - -def test_is_processor_healthy_returns_true_if_task_processed(db, settings): - # Given - settings.TASK_RUN_METHOD = TaskRunMethod.SYNCHRONOUSLY - - # When - result = is_processor_healthy(max_tries=3) - - # Then - # the health is reported as success - assert result is True - - # but the health check model used to verify the health is deleted - assert not HealthCheckModel.objects.exists() diff --git a/api/tests/unit/task_processor/test_unit_task_processor_models.py b/api/tests/unit/task_processor/test_unit_task_processor_models.py deleted file mode 100644 index 8bdbaef813dc..000000000000 --- a/api/tests/unit/task_processor/test_unit_task_processor_models.py +++ /dev/null @@ -1,75 +0,0 @@ -from datetime import datetime, time, timedelta -from decimal import Decimal - -import pytest -from django.utils import timezone -from freezegun import freeze_time -from pytest_lazyfixture import lazy_fixture - -from task_processor.decorators import register_task_handler -from task_processor.models import RecurringTask, Task - - -@pytest.fixture -def current_datetime() -> datetime: - with freeze_time("2024-01-31T09:45:16.758115"): - yield timezone.now() - - -@pytest.fixture -def one_hour_ago(current_datetime: datetime) -> time: - return (current_datetime - timedelta(hours=1)).time() - - -@pytest.fixture -def one_hour_from_now(current_datetime: datetime) -> time: - return (current_datetime + timedelta(hours=1)).time() - - -@register_task_handler() -def my_callable(arg_one: str, arg_two: str = None): - """Example callable to use for tasks (needs to be global for registering to work)""" - return arg_one, arg_two - - -def test_task_run(): - # Given - args = ["foo"] - kwargs = {"arg_two": "bar"} - - task = Task.create( - my_callable.task_identifier, - scheduled_for=timezone.now(), - args=args, - kwargs=kwargs, - ) - - # When - result = task.run() - - # Then - assert result == my_callable(*args, **kwargs) - - -@pytest.mark.parametrize( - "input, expected_output", - ( - ({"value": Decimal("10")}, '{"value": 10}'), - ({"value": Decimal("10.12345")}, '{"value": 10.12345}'), - ), -) -def test_serialize_data_handles_decimal_objects(input, expected_output): - assert Task.serialize_data(input) == expected_output - - -@pytest.mark.parametrize( - "first_run_time, expected", - ((lazy_fixture("one_hour_ago"), True), (lazy_fixture("one_hour_from_now"), False)), -) -def test_recurring_task_run_should_execute_first_run_at(first_run_time, expected): - assert ( - RecurringTask( - first_run_time=first_run_time, run_every=timedelta(days=1) - ).should_execute - == expected - ) diff --git a/api/tests/unit/task_processor/test_unit_task_processor_processor.py b/api/tests/unit/task_processor/test_unit_task_processor_processor.py deleted file mode 100644 index a6c8dee30301..000000000000 --- a/api/tests/unit/task_processor/test_unit_task_processor_processor.py +++ /dev/null @@ -1,471 +0,0 @@ -import logging -import time -import uuid -from datetime import timedelta -from threading import Thread - -import pytest -from django.utils import timezone -from freezegun import freeze_time - -from organisations.models import Organisation -from task_processor.decorators import ( - register_recurring_task, - register_task_handler, -) -from task_processor.models import ( - RecurringTask, - RecurringTaskRun, - Task, - TaskPriority, - TaskResult, - TaskRun, -) -from task_processor.processor import ( - UNREGISTERED_RECURRING_TASK_GRACE_PERIOD, - run_recurring_tasks, - run_tasks, -) -from task_processor.task_registry import registered_tasks - - -def test_run_task_runs_task_and_creates_task_run_object_when_success(db): - # Given - organisation_name = f"test-org-{uuid.uuid4()}" - task = Task.create( - _create_organisation.task_identifier, - scheduled_for=timezone.now(), - args=(organisation_name,), - ) - task.save() - - # When - task_runs = run_tasks() - - # Then - assert Organisation.objects.filter(name=organisation_name).exists() - - assert len(task_runs) == TaskRun.objects.filter(task=task).count() == 1 - task_run = task_runs[0] - assert task_run.result == TaskResult.SUCCESS - assert task_run.started_at - assert task_run.finished_at - assert task_run.error_details is None - - task.refresh_from_db() - assert task.completed - - -def test_run_recurring_tasks_runs_task_and_creates_recurring_task_run_object_when_success( - db, - monkeypatch, -): - # Given - monkeypatch.setenv("RUN_BY_PROCESSOR", "True") - - organisation_name = f"test-org-{uuid.uuid4()}" - task_identifier = "test_unit_task_processor_processor._create_organisation" - - @register_recurring_task(run_every=timedelta(seconds=1), args=(organisation_name,)) - def _create_organisation(organisation_name): - Organisation.objects.create(name=organisation_name) - - task = RecurringTask.objects.get(task_identifier=task_identifier) - # When - task_runs = run_recurring_tasks() - - # Then - assert Organisation.objects.filter(name=organisation_name).count() == 1 - - assert len(task_runs) == RecurringTaskRun.objects.filter(task=task).count() == 1 - task_run = task_runs[0] - assert task_run.result == TaskResult.SUCCESS - assert task_run.started_at - assert task_run.finished_at - assert task_run.error_details is None - - -@pytest.mark.django_db(transaction=True) -def test_run_recurring_tasks_multiple_runs(db, run_by_processor): - # Given - organisation_name = "test-org" - task_identifier = "test_unit_task_processor_processor._create_organisation" - - @register_recurring_task( - run_every=timedelta(milliseconds=200), args=(organisation_name,) - ) - def _create_organisation(organisation_name): - Organisation.objects.create(name=organisation_name) - - task = RecurringTask.objects.get(task_identifier=task_identifier) - - # When - first_task_runs = run_recurring_tasks() - - # run the process again before the task is scheduled to run again to ensure - # that tasks are unlocked when they are picked up by the task processor but - # not executed. - no_task_runs = run_recurring_tasks() - - time.sleep(0.3) - - second_task_runs = run_recurring_tasks() - - # Then - assert len(first_task_runs) == 1 - assert len(no_task_runs) == 0 - assert len(second_task_runs) == 1 - - # we should still only have 2 organisations, despite executing the - # `run_recurring_tasks` function 3 times. - assert Organisation.objects.filter(name=organisation_name).count() == 2 - - all_task_runs = first_task_runs + second_task_runs - assert len(all_task_runs) == RecurringTaskRun.objects.filter(task=task).count() == 2 - for task_run in all_task_runs: - assert task_run.result == TaskResult.SUCCESS - assert task_run.started_at - assert task_run.finished_at - assert task_run.error_details is None - - -def test_run_recurring_tasks_only_executes_tasks_after_interval_set_by_run_every( - db, - run_by_processor, -): - # Given - organisation_name = "test-org" - task_identifier = "test_unit_task_processor_processor._create_organisation" - - @register_recurring_task( - run_every=timedelta(milliseconds=100), args=(organisation_name,) - ) - def _create_organisation(organisation_name): - Organisation.objects.create(name=organisation_name) - - task = RecurringTask.objects.get(task_identifier=task_identifier) - - # When - we call run_recurring_tasks twice - run_recurring_tasks() - run_recurring_tasks() - - # Then - we expect the task to have been run once - - assert Organisation.objects.filter(name=organisation_name).count() == 1 - - assert RecurringTaskRun.objects.filter(task=task).count() == 1 - - -def test_run_recurring_tasks_does_nothing_if_unregistered_task_is_new( - db: None, run_by_processor: None, caplog: pytest.LogCaptureFixture -) -> None: - # Given - task_processor_logger = logging.getLogger("task_processor") - task_processor_logger.propagate = True - - task_identifier = "test_unit_task_processor_processor._a_task" - - @register_recurring_task(run_every=timedelta(milliseconds=100)) - def _a_task(): - pass - - # now - remove the task from the registry - registered_tasks.pop(task_identifier) - - # When - task_runs = run_recurring_tasks() - - # Then - assert len(task_runs) == 0 - assert RecurringTask.objects.filter(task_identifier=task_identifier).exists() - - -def test_run_recurring_tasks_deletes_the_task_if_unregistered_task_is_old( - db: None, run_by_processor: None, caplog: pytest.LogCaptureFixture -) -> None: - # Given - task_processor_logger = logging.getLogger("task_processor") - task_processor_logger.propagate = True - - task_identifier = "test_unit_task_processor_processor._a_task" - - with freeze_time(timezone.now() - UNREGISTERED_RECURRING_TASK_GRACE_PERIOD): - - @register_recurring_task(run_every=timedelta(milliseconds=100)) - def _a_task(): - pass - - # now - remove the task from the registry - registered_tasks.pop(task_identifier) - - # When - task_runs = run_recurring_tasks() - - # Then - assert len(task_runs) == 0 - assert ( - RecurringTask.objects.filter(task_identifier=task_identifier).exists() is False - ) - - -def test_run_task_runs_task_and_creates_task_run_object_when_failure( - db: None, caplog: pytest.LogCaptureFixture -) -> None: - # Given - task_processor_logger = logging.getLogger("task_processor") - task_processor_logger.propagate = True - task_processor_logger.level = logging.DEBUG - - msg = "Error!" - task = Task.create( - _raise_exception.task_identifier, args=(msg,), scheduled_for=timezone.now() - ) - task.save() - - # When - task_runs = run_tasks() - - # Then - assert len(task_runs) == TaskRun.objects.filter(task=task).count() == 1 - task_run = task_runs[0] - assert task_run.result == TaskResult.FAILURE - assert task_run.started_at - assert task_run.finished_at is None - assert task_run.error_details is not None - - task.refresh_from_db() - assert not task.completed - - assert len(caplog.records) == 3 - - warning_log = caplog.records[0] - assert warning_log.levelname == "WARNING" - assert warning_log.message == ( - f"Failed to execute task '{task.task_identifier}'. Exception was: {msg}" - ) - - debug_log_args, debug_log_kwargs = caplog.records[1:] - assert debug_log_args.levelname == "DEBUG" - assert debug_log_args.message == f"args: ['{msg}']" - - assert debug_log_kwargs.levelname == "DEBUG" - assert debug_log_kwargs.message == "kwargs: {}" - - -def test_run_task_runs_failed_task_again(db): - # Given - task = Task.create(_raise_exception.task_identifier, scheduled_for=timezone.now()) - task.save() - - # When - first_task_runs = run_tasks() - - # Now, let's run the task again - second_task_runs = run_tasks() - - # Then - task_runs = first_task_runs + second_task_runs - assert len(task_runs) == TaskRun.objects.filter(task=task).count() == 2 - - # Then - for task_run in task_runs: - assert task_run.result == TaskResult.FAILURE - assert task_run.started_at - assert task_run.finished_at is None - assert task_run.error_details is not None - - task.refresh_from_db() - assert task.completed is False - assert task.is_locked is False - - -def test_run_recurring_task_runs_task_and_creates_recurring_task_run_object_when_failure( - db, - run_by_processor, -): - # Given - task_identifier = "test_unit_task_processor_processor._raise_exception" - - @register_recurring_task(run_every=timedelta(seconds=1)) - def _raise_exception(organisation_name): - raise RuntimeError("test exception") - - task = RecurringTask.objects.get(task_identifier=task_identifier) - - # When - task_runs = run_recurring_tasks() - - # Then - assert len(task_runs) == RecurringTaskRun.objects.filter(task=task).count() == 1 - task_run = task_runs[0] - assert task_run.result == TaskResult.FAILURE - assert task_run.started_at - assert task_run.finished_at is None - assert task_run.error_details is not None - - -def test_run_task_does_nothing_if_no_tasks(db): - # Given - no tasks - # When - result = run_tasks() - # Then - assert result == [] - assert not TaskRun.objects.exists() - - -@pytest.mark.django_db(transaction=True) -def test_run_task_runs_tasks_in_correct_priority(): - # Given - # 2 tasks - task_1 = Task.create( - _create_organisation.task_identifier, - scheduled_for=timezone.now(), - args=("task 1 organisation",), - priority=TaskPriority.HIGH, - ) - task_1.save() - - task_2 = Task.create( - _create_organisation.task_identifier, - scheduled_for=timezone.now(), - args=("task 2 organisation",), - priority=TaskPriority.HIGH, - ) - task_2.save() - - task_3 = Task.create( - _create_organisation.task_identifier, - scheduled_for=timezone.now(), - args=("task 3 organisation",), - priority=TaskPriority.HIGHEST, - ) - task_3.save() - - # When - task_runs_1 = run_tasks() - task_runs_2 = run_tasks() - task_runs_3 = run_tasks() - - # Then - assert task_runs_1[0].task == task_3 - assert task_runs_2[0].task == task_1 - assert task_runs_3[0].task == task_2 - - -@pytest.mark.django_db(transaction=True) -def test_run_tasks_skips_locked_tasks(): - """ - This test verifies that tasks are locked while being executed, and hence - new task runners are not able to pick up 'in progress' tasks. - """ - # Given - # 2 tasks - # One which is configured to just sleep for 3 seconds, to simulate a task - # being held for a short period of time - task_1 = Task.create( - _sleep.task_identifier, scheduled_for=timezone.now(), args=(3,) - ) - task_1.save() - - # and another which should create an organisation - task_2 = Task.create( - _create_organisation.task_identifier, - scheduled_for=timezone.now(), - args=("task 2 organisation",), - ) - task_2.save() - - # When - # we spawn a new thread to run the first task (configured to just sleep) - task_runner_thread = Thread(target=run_tasks) - task_runner_thread.start() - - # and subsequently attempt to run another task in the main thread - time.sleep(1) # wait for the thread to start and hold the task - task_runs = run_tasks() - - # Then - # the second task is run while the 1st task is held - assert task_runs[0].task == task_2 - - task_runner_thread.join() - - -def test_run_more_than_one_task(db): - # Given - num_tasks = 5 - - tasks = [] - for _ in range(num_tasks): - organisation_name = f"test-org-{uuid.uuid4()}" - tasks.append( - Task.create( - _create_organisation.task_identifier, - scheduled_for=timezone.now(), - args=(organisation_name,), - ) - ) - Task.objects.bulk_create(tasks) - - # When - task_runs = run_tasks(5) - - # Then - assert len(task_runs) == num_tasks - - for task_run in task_runs: - assert task_run.result == TaskResult.SUCCESS - assert task_run.started_at - assert task_run.finished_at - assert task_run.error_details is None - - for task in tasks: - task.refresh_from_db() - assert task.completed - - -def test_recurring_tasks_are_unlocked_if_picked_up_but_not_executed( - db, run_by_processor -): - # Given - @register_recurring_task(run_every=timedelta(days=1)) - def my_task(): - pass - - recurring_task = RecurringTask.objects.get( - task_identifier="test_unit_task_processor_processor.my_task" - ) - - # mimic the task having already been run so that it is next picked up, - # but not executed - now = timezone.now() - one_minute_ago = now - timedelta(minutes=1) - RecurringTaskRun.objects.create( - task=recurring_task, - started_at=one_minute_ago, - finished_at=now, - result=TaskResult.SUCCESS.name, - ) - - # When - run_recurring_tasks() - - # Then - recurring_task.refresh_from_db() - assert recurring_task.is_locked is False - - -@register_task_handler() -def _create_organisation(name: str): - """function used to test that task is being run successfully""" - Organisation.objects.create(name=name) - - -@register_task_handler() -def _raise_exception(msg: str): - raise Exception(msg) - - -@register_task_handler() -def _sleep(seconds: int): - time.sleep(seconds) diff --git a/api/tests/unit/task_processor/test_unit_task_processor_tasks.py b/api/tests/unit/task_processor/test_unit_task_processor_tasks.py deleted file mode 100644 index a0f7de8f09ab..000000000000 --- a/api/tests/unit/task_processor/test_unit_task_processor_tasks.py +++ /dev/null @@ -1,193 +0,0 @@ -from datetime import timedelta - -from django.utils import timezone -from pytest_django import DjangoAssertNumQueries -from pytest_django.fixtures import SettingsWrapper - -from task_processor.models import RecurringTask, RecurringTaskRun, Task -from task_processor.tasks import ( - clean_up_old_recurring_task_runs, - clean_up_old_tasks, -) - -now = timezone.now() -three_days_ago = now - timedelta(days=3) -one_day_ago = now - timedelta(days=1) -one_hour_from_now = now + timedelta(hours=1) -sixty_days_ago = now - timedelta(days=60) - - -def test_clean_up_old_tasks_does_nothing_when_no_tasks(db): - # Given - assert Task.objects.count() == 0 - - # When - clean_up_old_tasks() - - # Then - assert Task.objects.count() == 0 - - -def test_clean_up_old_recurring_task_runs_does_nothing_when_no_runs(db: None) -> None: - # Given - assert RecurringTaskRun.objects.count() == 0 - - # When - clean_up_old_recurring_task_runs() - - # Then - assert RecurringTaskRun.objects.count() == 0 - - -def test_clean_up_old_tasks( - settings: SettingsWrapper, - django_assert_num_queries: DjangoAssertNumQueries, - db: None, -) -> None: - # Given - settings.TASK_DELETE_RETENTION_DAYS = 2 - settings.TASK_DELETE_BATCH_SIZE = 1 - - # 2 completed tasks that were scheduled before retention period - for _ in range(2): - Task.objects.create( - task_identifier="some.identifier", - scheduled_for=three_days_ago, - completed=True, - ) - - # a task that has been completed but is within the retention period - task_in_retention_period = Task.objects.create( - task_identifier="some.identifier", scheduled_for=one_day_ago, completed=True - ) - - # and a task that has yet to be completed - future_task = Task.objects.create( - task_identifier="some.identifier", scheduled_for=one_hour_from_now - ) - - # and a task that failed - failed_task = Task.objects.create( - task_identifier="some.identifier", scheduled_for=three_days_ago, num_failures=3 - ) - - # When - with django_assert_num_queries(9): - # We expect 9 queries to be run here since we have set the delete batch size to 1 and there are 2 - # tasks we expect it to delete. Therefore, we have 2 loops, each consisting of 4 queries: - # 1. Check if any tasks matching the query exist - # 2. Grab the ids of any matching tasks - # 3. Delete all TaskRun objects for those task_id values - # 4. Delete all Task objects for those ids - # - # The final (9th) query is checking if any tasks exist again (which returns false). - clean_up_old_tasks() - - # Then - assert list(Task.objects.all()) == [ - task_in_retention_period, - future_task, - failed_task, - ] - - -def test_clean_up_old_recurring_task_runs( - settings: SettingsWrapper, - django_assert_num_queries: DjangoAssertNumQueries, - db: None, -) -> None: - # Given - settings.RECURRING_TASK_RUN_RETENTION_DAYS = 2 - settings.ENABLE_CLEAN_UP_OLD_TASKS = True - - recurring_task = RecurringTask.objects.create( - task_identifier="some_identifier", run_every=timedelta(seconds=1) - ) - - # 2 task runs finished before retention period - for _ in range(2): - RecurringTaskRun.objects.create( - started_at=three_days_ago, - task=recurring_task, - finished_at=three_days_ago, - ) - - # a task run that is within the retention period - task_in_retention_period = RecurringTaskRun.objects.create( - task=recurring_task, - started_at=one_day_ago, - finished_at=one_day_ago, - ) - - # When - with django_assert_num_queries(1): - clean_up_old_recurring_task_runs() - - # Then - assert list(RecurringTaskRun.objects.all()) == [task_in_retention_period] - - -def test_clean_up_old_tasks_include_failed_tasks( - settings: SettingsWrapper, - django_assert_num_queries: DjangoAssertNumQueries, - db: None, -) -> None: - # Given - settings.TASK_DELETE_RETENTION_DAYS = 2 - settings.TASK_DELETE_INCLUDE_FAILED_TASKS = True - - # a task that failed - Task.objects.create( - task_identifier="some.identifier", scheduled_for=three_days_ago, num_failures=3 - ) - - # When - clean_up_old_tasks() - - # Then - assert not Task.objects.exists() - - -def test_clean_up_old_tasks_does_not_run_if_disabled( - settings, django_assert_num_queries, db -): - # Given - settings.ENABLE_CLEAN_UP_OLD_TASKS = False - - task = Task.objects.create( - task_identifier="some.identifier", scheduled_for=sixty_days_ago - ) - - # When - with django_assert_num_queries(0): - clean_up_old_tasks() - - # Then - assert Task.objects.filter(id=task.id).exists() - - -def test_clean_up_old_recurring_task_runs_does_not_run_if_disabled( - settings: SettingsWrapper, - django_assert_num_queries: DjangoAssertNumQueries, - db: None, -) -> None: - # Given - settings.RECURRING_TASK_RUN_RETENTION_DAYS = 2 - settings.ENABLE_CLEAN_UP_OLD_TASKS = False - - recurring_task = RecurringTask.objects.create( - task_identifier="some_identifier", run_every=timedelta(seconds=1) - ) - - RecurringTaskRun.objects.create( - started_at=three_days_ago, - task=recurring_task, - finished_at=three_days_ago, - ) - - # When - with django_assert_num_queries(0): - clean_up_old_recurring_task_runs() - - # Then - assert RecurringTaskRun.objects.exists() diff --git a/api/tests/unit/task_processor/test_unit_task_processor_thread_monitoring.py b/api/tests/unit/task_processor/test_unit_task_processor_thread_monitoring.py deleted file mode 100644 index 2be1326384d8..000000000000 --- a/api/tests/unit/task_processor/test_unit_task_processor_thread_monitoring.py +++ /dev/null @@ -1,89 +0,0 @@ -import json -import logging -from threading import Thread -from unittest.mock import mock_open, patch - -from task_processor.thread_monitoring import ( - UNHEALTHY_THREADS_FILE_PATH, - clear_unhealthy_threads, - get_unhealthy_thread_names, - write_unhealthy_threads, -) - - -def test_clear_unhealthy_threads(mocker): - # Given - mocked_os = mocker.patch("task_processor.thread_monitoring.os") - - def os_path_side_effect(file_path): - return file_path == UNHEALTHY_THREADS_FILE_PATH - - mocked_os.path.exists.side_effect = os_path_side_effect - - # When - clear_unhealthy_threads() - - # Then - mocked_os.remove.assert_called_once_with(UNHEALTHY_THREADS_FILE_PATH) - - -def test_write_unhealthy_threads(caplog, settings): - # Given - # caplog doesn't allow you to capture logging outputs from loggers that don't - # propagate to root. Quick hack here to get the task_processor logger to - # propagate. - # TODO: look into using loguru. - task_processor_logger = logging.getLogger("task_processor") - task_processor_logger.propagate = True - - threads = [Thread(target=lambda: None)] - - # When - with patch("builtins.open", mock_open()) as mocked_open: - write_unhealthy_threads(threads) - - # Then - mocked_open.assert_called_once_with(UNHEALTHY_THREADS_FILE_PATH, "w+") - mocked_open.return_value.write.assert_called_once_with( - json.dumps([t.name for t in threads]) - ) - assert len(caplog.records) == 1 - assert caplog.record_tuples[0][1] == 30 # WARNING - assert caplog.record_tuples[0][2] == "Writing unhealthy threads: %s" % [ - t.name for t in threads - ] - - -def test_get_unhealthy_thread_names_returns_empty_list_if_file_does_not_exist(mocker): - # Given - mocked_os = mocker.patch("task_processor.thread_monitoring.os") - mocked_os.path.exists.return_value = False - - # When - unhealthy_thread_names = get_unhealthy_thread_names() - - # Then - assert unhealthy_thread_names == [] - - -def test_get_unhealthy_thread_names(mocker): - # Given - mocked_os = mocker.patch("task_processor.thread_monitoring.os") - - def os_path_side_effect(file_path): - return file_path == UNHEALTHY_THREADS_FILE_PATH - - mocked_os.path.exists.side_effect = os_path_side_effect - - expected_unhealthy_thread_names = ["Thread-1", "Thread-2"] - - # When - with patch( - "builtins.open", - mock_open(read_data=json.dumps(expected_unhealthy_thread_names)), - ) as mocked_open: - unhealthy_thread_names = get_unhealthy_thread_names() - - # Then - mocked_open.assert_called_once_with(UNHEALTHY_THREADS_FILE_PATH, "r") - assert unhealthy_thread_names == expected_unhealthy_thread_names diff --git a/api/tests/unit/task_processor/test_unit_task_processor_threads.py b/api/tests/unit/task_processor/test_unit_task_processor_threads.py deleted file mode 100644 index 09bc1f63ceef..000000000000 --- a/api/tests/unit/task_processor/test_unit_task_processor_threads.py +++ /dev/null @@ -1,42 +0,0 @@ -import logging -from typing import Type - -import pytest -from django.db import DatabaseError -from pytest_mock import MockerFixture - -from task_processor.threads import TaskRunner -from tests.unit.task_processor.conftest import GetTaskProcessorCaplog - - -@pytest.mark.parametrize( - "exception_class, exception_message", - [(DatabaseError, "Database error"), (Exception, "Generic error")], -) -def test_task_runner_is_resilient_to_errors( - db: None, - mocker: MockerFixture, - get_task_processor_caplog: GetTaskProcessorCaplog, - exception_class: Type[Exception], - exception_message: str, -) -> None: - # Given - caplog = get_task_processor_caplog(logging.DEBUG) - - task_runner = TaskRunner() - mocker.patch( - "task_processor.threads.run_tasks", - side_effect=exception_class(exception_message), - ) - - # When - task_runner.run_iteration() - - # Then - assert len(caplog.records) == 1 - - assert caplog.records[0].levelno == logging.ERROR - assert ( - caplog.records[0].message - == f"Received error retrieving tasks: {exception_message}." - ) diff --git a/api/users/tasks.py b/api/users/tasks.py index 38438ab35784..706dc85daaae 100644 --- a/api/users/tasks.py +++ b/api/users/tasks.py @@ -1,10 +1,10 @@ from django.core.mail import send_mail from django.template.loader import render_to_string +from task_processor.decorators import register_task_handler from integrations.lead_tracking.pipedrive.lead_tracker import ( PipedriveLeadTracker, ) -from task_processor.decorators import register_task_handler from users.models import FFAdminUser diff --git a/api/webhooks/webhooks.py b/api/webhooks/webhooks.py index 73e7a082d76d..75684e886614 100644 --- a/api/webhooks/webhooks.py +++ b/api/webhooks/webhooks.py @@ -13,12 +13,12 @@ from django.core.serializers.json import DjangoJSONEncoder from django.template.loader import get_template from django.utils import timezone +from task_processor.decorators import register_task_handler +from task_processor.task_run_method import TaskRunMethod from environments.models import Environment, Webhook from organisations.models import OrganisationWebhook from projects.models import Organisation -from task_processor.decorators import register_task_handler -from task_processor.task_run_method import TaskRunMethod from webhooks.sample_webhook_data import ( environment_webhook_data, organisation_webhook_data, From c04723373e4a5bfe236beae9a4c827fa819f6509 Mon Sep 17 00:00:00 2001 From: Matthew Elwell Date: Fri, 12 Jul 2024 14:53:57 +0100 Subject: [PATCH 019/247] fix: frontend fails to load when announcement flag isn't set (#4329) --- frontend/web/components/Announcement.tsx | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/frontend/web/components/Announcement.tsx b/frontend/web/components/Announcement.tsx index 291aebc53c6e..f5062eacc1d8 100644 --- a/frontend/web/components/Announcement.tsx +++ b/frontend/web/components/Announcement.tsx @@ -20,6 +20,11 @@ const Announcement: FC = () => { } const announcementValue = Utils.getFlagsmithJSONValue('announcement', null) + + if (!announcementValue) { + return null + } + const { buttonText, description, id, isClosable, title, url } = announcementValue as AnnouncementValueType const dismissed = flagsmith.getTrait('dismissed_announcement') From 1f9aeccff49ab5da37006924b5df1a0f307106d2 Mon Sep 17 00:00:00 2001 From: Kyle Johnson Date: Fri, 12 Jul 2024 15:00:47 +0100 Subject: [PATCH 020/247] fix: Fix "Create Project" button in the project selector not opening the project creation modal (#4294) --- frontend/web/components/ProjectManageWidget.tsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/frontend/web/components/ProjectManageWidget.tsx b/frontend/web/components/ProjectManageWidget.tsx index c1073de1cfd3..16bd6b05a74b 100644 --- a/frontend/web/components/ProjectManageWidget.tsx +++ b/frontend/web/components/ProjectManageWidget.tsx @@ -18,7 +18,7 @@ import CreateProjectModal from './modals/CreateProject' type SegmentsPageType = { router: RouterChildContext['router'] - organisationId: string | null + organisationId: number | null } const ProjectManageWidget: FC = ({ @@ -29,7 +29,7 @@ const ProjectManageWidget: FC = ({ const create = Utils.fromParam()?.create const { data: organisations } = useGetOrganisationsQuery({}) const organisation = useMemo( - () => organisations?.results?.find((v) => `${v.id}` === organisationId), + () => organisations?.results?.find((v) => v.id === organisationId), [organisations, organisationId], ) From 3a8ec0096c55f7dce085968b9d9d3a063991008d Mon Sep 17 00:00:00 2001 From: Flagsmith Bot <65724737+flagsmithdev@users.noreply.github.com> Date: Fri, 12 Jul 2024 16:10:25 +0100 Subject: [PATCH 021/247] chore(main): release 2.129.0 (#4321) --- .release-please-manifest.json | 2 +- CHANGELOG.md | 18 ++++++++++++++++++ version.txt | 2 +- 3 files changed, 20 insertions(+), 2 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 1a307fe4b767..40fe1f82a29d 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "2.128.0" + ".": "2.129.0" } \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index b82419774ff6..0a9ff48885ea 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,23 @@ # Changelog +## [2.129.0](https://github.com/Flagsmith/flagsmith/compare/v2.128.0...v2.129.0) (2024-07-12) + + +### Features + +* **docker:** Update entrypoint ([#4262](https://github.com/Flagsmith/flagsmith/issues/4262)) ([759e745](https://github.com/Flagsmith/flagsmith/commit/759e745098d2e7cb582c0097c18c042e32533012)) +* Open payment modal if a plan was preselected, add annual plans ([#4110](https://github.com/Flagsmith/flagsmith/issues/4110)) ([103a94f](https://github.com/Flagsmith/flagsmith/commit/103a94fe45c6e9ec677dd2aa14ab2009f5f0b44b)) + + +### Bug Fixes + +* annual plan ids and refreshing ([#4323](https://github.com/Flagsmith/flagsmith/issues/4323)) ([f5a7eed](https://github.com/Flagsmith/flagsmith/commit/f5a7eed20a24f5ef0c642e48030ed74b168dd41f)) +* **build:** Avoid E2E rate limiting by swithing to Postgres image hosted on GHCR ([#4328](https://github.com/Flagsmith/flagsmith/issues/4328)) ([249db14](https://github.com/Flagsmith/flagsmith/commit/249db141a9e3679d28cacbe40e35b67d82d245c3)) +* **e2e:** Pass `GITHUB_ACTION_URL` to Docker E2E test runs ([#4322](https://github.com/Flagsmith/flagsmith/issues/4322)) ([f8babe8](https://github.com/Flagsmith/flagsmith/commit/f8babe892a3a066b3dcb80d47fe994e78d4e8ef0)) +* Fix "Create Project" button in the project selector not opening the project creation modal ([#4294](https://github.com/Flagsmith/flagsmith/issues/4294)) ([1f9aecc](https://github.com/Flagsmith/flagsmith/commit/1f9aeccff49ab5da37006924b5df1a0f307106d2)) +* frontend fails to load when announcement flag isn't set ([#4329](https://github.com/Flagsmith/flagsmith/issues/4329)) ([c047233](https://github.com/Flagsmith/flagsmith/commit/c04723373e4a5bfe236beae9a4c827fa819f6509)) +* Prevent "Create Segment" button from disappearing when deleting the last segment ([#4314](https://github.com/Flagsmith/flagsmith/issues/4314)) ([cd121e8](https://github.com/Flagsmith/flagsmith/commit/cd121e8e01bce3ac036587d9741773cbd145b3e3)) + ## [2.128.0](https://github.com/Flagsmith/flagsmith/compare/v2.127.1...v2.128.0) (2024-07-10) diff --git a/version.txt b/version.txt index 0895236fd9de..fe893a7184a8 100644 --- a/version.txt +++ b/version.txt @@ -1 +1 @@ -2.128.0 +2.129.0 From 1cd8e0f503240e38e553bc555b4aedd8686d0097 Mon Sep 17 00:00:00 2001 From: Novak Zaballa <41410593+novakzaballa@users.noreply.github.com> Date: Fri, 12 Jul 2024 11:15:07 -0400 Subject: [PATCH 022/247] fix: The organisation setting page is broken locally (#4330) --- frontend/env/project_local.js | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/frontend/env/project_local.js b/frontend/env/project_local.js index c3a86d7453ff..88e405e99e1d 100644 --- a/frontend/env/project_local.js +++ b/frontend/env/project_local.js @@ -16,6 +16,10 @@ module.exports = global.Project = { flagsmithClientEdgeAPI: 'https://edge.api.flagsmith.com/api/v1/', // This is used for Sentry tracking maintenance: false, + plans: { + scaleUp: { annual: 'scale-up-annual-v2', monthly: 'scale-up-v2' }, + startup: { annual: 'startup-annual-v2', monthly: 'startup-v2' }, + }, useSecureCookies: false, ...(globalThis.projectOverrides || {}), } From 737cd6557d95fd67d33cda9dbb814f59a20ea0ed Mon Sep 17 00:00:00 2001 From: Kyle Johnson Date: Fri, 12 Jul 2024 16:28:26 +0100 Subject: [PATCH 023/247] chore: Rename metadata to custom fields (#4298) --- frontend/common/constants.ts | 2 +- .../metadata/AddMetadataToEntity.tsx | 10 +++---- .../web/components/metadata/MetadataPage.tsx | 26 +++++++++---------- .../web/components/metadata/MetadataTitle.tsx | 2 +- .../components/modals/CreateMetadataField.tsx | 4 +-- .../web/components/modals/CreateSegment.tsx | 9 +++---- .../components/pages/CreateEnvironmentPage.js | 4 +-- .../pages/EnvironmentSettingsPage.js | 4 +-- .../components/pages/ProjectSettingsPage.js | 2 +- 9 files changed, 31 insertions(+), 32 deletions(-) diff --git a/frontend/common/constants.ts b/frontend/common/constants.ts index 12a7c62c0222..9ae54b5aa612 100644 --- a/frontend/common/constants.ts +++ b/frontend/common/constants.ts @@ -519,7 +519,7 @@ export default { TAGS_DESCRIPTION: 'Organise your flags with tags, tagging your features as "protected" will prevent them from accidentally being deleted.', TOOLTIP_METADATA_DESCRIPTION: (entity: string) => - `Add Metadata in your ${entity}, you can create the Metadata Fields in the project settings.`, + `Add Custom fields in your ${entity}, you can define the custom fields in the project settings.`, USER_PROPERTY_DESCRIPTION: 'The name of the user trait or custom property belonging to the user, e.g. firstName', WEBHOOKS_DESCRIPTION: diff --git a/frontend/web/components/metadata/AddMetadataToEntity.tsx b/frontend/web/components/metadata/AddMetadataToEntity.tsx index ca3191d06543..675504382384 100644 --- a/frontend/web/components/metadata/AddMetadataToEntity.tsx +++ b/frontend/web/components/metadata/AddMetadataToEntity.tsx @@ -217,7 +217,7 @@ const AddMetadataToEntity: FC = ({ className='mt-1 no-pad' header={ - Metadata + Field Value } @@ -247,7 +247,7 @@ const AddMetadataToEntity: FC = ({ }} renderNoResults={ - No metadata configured for {entity} entity. Add metadata field in + No custom fields configured for {entity} entity. Add custom fields in your{' '} = ({ if (res?.error) { toast(res?.error?.data.metadata[0], 'danger') } else { - toast('Environment Metadata Updated') + toast('Environment Field Updated') } }) }} > - Save Metadata + Save Custom Field
)} @@ -347,7 +347,7 @@ const MetadataRow: FC = ({ }} className='mr-2' style={{ width: '250px' }} - placeholder='Metadata Value' + placeholder='Field Value' isValid={Utils.validateMetadataType( metadata?.type, metadataValue, diff --git a/frontend/web/components/metadata/MetadataPage.tsx b/frontend/web/components/metadata/MetadataPage.tsx index 0e8485cf832d..279d60594d6d 100644 --- a/frontend/web/components/metadata/MetadataPage.tsx +++ b/frontend/web/components/metadata/MetadataPage.tsx @@ -53,12 +53,12 @@ const MetadataPage: FC = ({ organisationId, projectId }) => { }, [metadataFieldList, MetadataModelFieldList]) const metadataCreatedToast = () => { - toast('Metadata Field Created') + toast('Custom Field Created') closeModal() } const createMetadataField = () => { openModal( - `Create Metadata Field`, + `Create Custom Field`, = ({ organisationId, projectId }) => { const editMetadata = (id: string, contentTypeList: MetadataModelField[]) => { openModal( - `Edit Metadata Field`, + `Edit Custom Field`, { - toast('Metadata Field Updated') + toast('Custom Field Updated') }} projectId={projectId} organisationId={organisationId} @@ -92,13 +92,13 @@ const MetadataPage: FC = ({ organisationId, projectId }) => {
{'Are you sure you want to delete '} {name} - {' metadata field?'} + {' custom field?'}
), destructive: true, onYes: () => - deleteMetadata({ id }).then(() => toast('Metadata Field Deleted')), - title: 'Delete Metadata Field', + deleteMetadata({ id }).then(() => toast('Custom Field Deleted')), + title: 'Delete Custom Field', yesText: 'Confirm', }) } @@ -107,18 +107,18 @@ const MetadataPage: FC = ({ organisationId, projectId }) => {
-
Metadata Fields
+
Custom Fields

- Manage metadata fields for selected core identities in your project{' '} + Manage custom fields for selected core identities in your project{' '}

- {Utils.getFlagsmithHasFeature('enable_metadata') && + {Utils.getPlansPermission('METADATA') && + Utils.getFlagsmithHasFeature('enable_metadata') && envContentType?.id && ( diff --git a/frontend/web/components/pages/EnvironmentSettingsPage.js b/frontend/web/components/pages/EnvironmentSettingsPage.js index da8d8a1d5bea..26745fca1e6c 100644 --- a/frontend/web/components/pages/EnvironmentSettingsPage.js +++ b/frontend/web/components/pages/EnvironmentSettingsPage.js @@ -82,7 +82,10 @@ const EnvironmentSettingsPage = class extends Component { }) }) - if (Utils.getFlagsmithHasFeature('enable_metadata')) { + if ( + Utils.getPlansPermission('METADATA') && + Utils.getFlagsmithHasFeature('enable_metadata') + ) { getSupportedContentType(getStore(), { organisation_id: AccountStore.getOrganisation().id, }).then((res) => { @@ -260,7 +263,9 @@ const EnvironmentSettingsPage = class extends Component { }, } = this const has4EyesPermission = Utils.getPlansPermission('4_EYES') - const metadataEnable = Utils.getFlagsmithHasFeature('enable_metadata') + const metadataEnable = + Utils.getPlansPermission('METADATA') && + Utils.getFlagsmithHasFeature('enable_metadata') return (
diff --git a/frontend/web/components/pages/ProjectSettingsPage.js b/frontend/web/components/pages/ProjectSettingsPage.js index 618c303396ff..eb667e32f8ce 100644 --- a/frontend/web/components/pages/ProjectSettingsPage.js +++ b/frontend/web/components/pages/ProjectSettingsPage.js @@ -167,7 +167,9 @@ const ProjectSettingsPage = class extends Component { const { name, stale_flags_limit_days } = this.state const hasStaleFlagsPermission = Utils.getPlansPermission('STALE_FLAGS') - const metadataEnable = Utils.getFlagsmithHasFeature('enable_metadata') + const metadataEnable = + Utils.getPlansPermission('METADATA') && + Utils.getFlagsmithHasFeature('enable_metadata') return (
diff --git a/frontend/web/components/pages/SegmentsPage.tsx b/frontend/web/components/pages/SegmentsPage.tsx index babf1112f391..8d5a9d7c1f04 100644 --- a/frontend/web/components/pages/SegmentsPage.tsx +++ b/frontend/web/components/pages/SegmentsPage.tsx @@ -24,7 +24,7 @@ import PageTitle from 'components/PageTitle' import Switch from 'components/Switch' import { setModalTitle } from 'components/modals/base/ModalDefault' import classNames from 'classnames' -import InfoMessage from 'components/InfoMessage'; +import InfoMessage from 'components/InfoMessage' const CodeHelp = require('../../components/CodeHelp') type SegmentsPageType = { From 04e8bc2657d8b3657e9f12b54803911b74508123 Mon Sep 17 00:00:00 2001 From: Zach Aysan Date: Tue, 30 Jul 2024 13:51:54 -0400 Subject: [PATCH 078/247] fix: Handle zero case for API usage limit (#4428) --- api/organisations/models.py | 2 +- api/organisations/task_helpers.py | 4 ++++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/api/organisations/models.py b/api/organisations/models.py index 9a421ca5467d..3b818c85a463 100644 --- a/api/organisations/models.py +++ b/api/organisations/models.py @@ -452,7 +452,7 @@ class OrganisationSubscriptionInformationCache(LifecycleModelMixin, models.Model api_calls_30d = models.IntegerField(default=0) allowed_seats = models.IntegerField(default=1) - allowed_30d_api_calls = models.IntegerField(default=50000) + allowed_30d_api_calls = models.IntegerField(default=MAX_API_CALLS_IN_FREE_PLAN) allowed_projects = models.IntegerField(default=1, blank=True, null=True) chargebee_email = models.EmailField(blank=True, max_length=254, null=True) diff --git a/api/organisations/task_helpers.py b/api/organisations/task_helpers.py index 09d04602e2e5..96e99c671790 100644 --- a/api/organisations/task_helpers.py +++ b/api/organisations/task_helpers.py @@ -13,6 +13,7 @@ OrganisationAPIUsageNotification, OrganisationRole, ) +from organisations.subscriptions.constants import MAX_API_CALLS_IN_FREE_PLAN from users.models import FFAdminUser from .constants import API_USAGE_ALERT_THRESHOLDS @@ -114,6 +115,9 @@ def handle_api_usage_notification_for_organisation(organisation: Organisation) - api_usage = get_current_api_usage(organisation.id, f"-{days}d") + # For some reason the allowed API calls is set to 0 so default to the max free plan. + allowed_api_calls = allowed_api_calls or MAX_API_CALLS_IN_FREE_PLAN + api_usage_percent = int(100 * api_usage / allowed_api_calls) matched_threshold = None From 02f7df7a245ec6fb4fb9122840315ccfb1a3fa15 Mon Sep 17 00:00:00 2001 From: Zach Aysan Date: Tue, 30 Jul 2024 13:55:03 -0400 Subject: [PATCH 079/247] fix: Check API usage before restricting serving flags and admin (#4422) --- api/organisations/tasks.py | 15 +- .../test_unit_organisations_tasks.py | 128 ++++++++++++++++++ 2 files changed, 141 insertions(+), 2 deletions(-) diff --git a/api/organisations/tasks.py b/api/organisations/tasks.py index 3d43f10d2ea6..0771abefbcb9 100644 --- a/api/organisations/tasks.py +++ b/api/organisations/tasks.py @@ -243,7 +243,6 @@ def restrict_use_due_to_api_limit_grace_period_over() -> None: Since free plans don't have predefined subscription periods, we use a rolling thirty day period to filter them. """ - grace_period = timezone.now() - timedelta(days=API_USAGE_GRACE_PERIOD) month_start = timezone.now() - timedelta(30) queryset = ( @@ -259,13 +258,14 @@ def restrict_use_due_to_api_limit_grace_period_over() -> None: organisation_ids = [] for result in queryset: organisation_ids.append(result["organisation"]) + organisations = ( Organisation.objects.filter( id__in=organisation_ids, subscription__plan=FREE_PLAN_ID, api_limit_access_block__isnull=True, ) - .select_related("subscription") + .select_related("subscription", "subscription_information_cache") .exclude( stop_serving_flags=True, block_access_to_admin=True, @@ -290,6 +290,17 @@ def restrict_use_due_to_api_limit_grace_period_over() -> None: if not stop_serving and not block_access: continue + if not organisation.has_subscription_information_cache(): + continue + + subscription_cache = organisation.subscription_information_cache + api_usage = get_current_api_usage(organisation.id, "30d") + if api_usage / subscription_cache.allowed_30d_api_calls < 1.0: + logger.info( + f"API use for organisation {organisation.id} has fallen to below limit, so not restricting use." + ) + continue + organisation.stop_serving_flags = stop_serving organisation.block_access_to_admin = block_access diff --git a/api/tests/unit/organisations/test_unit_organisations_tasks.py b/api/tests/unit/organisations/test_unit_organisations_tasks.py index a750c15306e9..3d75713df56d 100644 --- a/api/tests/unit/organisations/test_unit_organisations_tasks.py +++ b/api/tests/unit/organisations/test_unit_organisations_tasks.py @@ -1162,6 +1162,29 @@ def test_restrict_use_due_to_api_limit_grace_period_over( organisation4 = Organisation.objects.create(name="Org #4") organisation5 = Organisation.objects.create(name="Org #5") + for org in [ + organisation, + organisation2, + organisation3, + organisation4, + organisation5, + ]: + OrganisationSubscriptionInformationCache.objects.create( + organisation=org, + allowed_seats=10, + allowed_projects=3, + allowed_30d_api_calls=10_000, + chargebee_email="test@example.com", + ) + org.subscription.subscription_id = "fancy_sub_id23" + org.subscription.plan = FREE_PLAN_ID + org.subscription.save() + + mock_api_usage = mocker.patch( + "organisations.tasks.get_current_api_usage", + ) + mock_api_usage.return_value = 12_005 + # Add users to test email delivery for org in [organisation2, organisation3, organisation4, organisation5]: admin_user.add_organisation(org, role=OrganisationRole.ADMIN) @@ -1296,6 +1319,111 @@ def test_restrict_use_due_to_api_limit_grace_period_over( assert getattr(organisation, "api_limit_access_block", None) is None +@pytest.mark.freeze_time("2023-01-19T09:09:47.325132+00:00") +def test_restrict_use_due_to_api_limit_grace_period_over_missing_subscription_information_cache( + mocker: MockerFixture, + organisation: Organisation, + freezer: FrozenDateTimeFactory, + mailoutbox: list[EmailMultiAlternatives], +) -> None: + # Given + assert not organisation.has_subscription_information_cache() + + get_client_mock = mocker.patch("organisations.tasks.get_client") + client_mock = MagicMock() + get_client_mock.return_value = client_mock + client_mock.get_identity_flags.return_value.is_feature_enabled.return_value = True + + now = timezone.now() + organisation.subscription.subscription_id = "fancy_sub_id23" + organisation.subscription.plan = FREE_PLAN_ID + organisation.subscription.save() + + OrganisationAPIUsageNotification.objects.create( + notified_at=now, + organisation=organisation, + percent_usage=120, + ) + + now = now + timedelta(days=API_USAGE_GRACE_PERIOD + 1) + freezer.move_to(now) + + # When + restrict_use_due_to_api_limit_grace_period_over() + + # Then + organisation.refresh_from_db() + + # Organisations that missing the cache don't get blocked + assert organisation.stop_serving_flags is False + assert organisation.block_access_to_admin is False + assert not hasattr(organisation, "api_limit_access_block") + assert len(mailoutbox) == 0 + + +@pytest.mark.freeze_time("2023-01-19T09:09:47.325132+00:00") +def test_restrict_use_due_to_api_limit_grace_period_over_with_reduced_api_usage( + mocker: MockerFixture, + organisation: Organisation, + freezer: FrozenDateTimeFactory, + mailoutbox: list[EmailMultiAlternatives], + inspecting_handler: logging.Handler, +) -> None: + # Given + assert not organisation.has_subscription_information_cache() + + from organisations.tasks import logger + + logger.addHandler(inspecting_handler) + + get_client_mock = mocker.patch("organisations.tasks.get_client") + client_mock = MagicMock() + get_client_mock.return_value = client_mock + client_mock.get_identity_flags.return_value.is_feature_enabled.return_value = True + + now = timezone.now() + + OrganisationSubscriptionInformationCache.objects.create( + organisation=organisation, + allowed_seats=10, + allowed_projects=3, + allowed_30d_api_calls=10_000, + chargebee_email="test@example.com", + ) + organisation.subscription.subscription_id = "fancy_sub_id23" + organisation.subscription.plan = FREE_PLAN_ID + organisation.subscription.save() + + mock_api_usage = mocker.patch( + "organisations.tasks.get_current_api_usage", + ) + mock_api_usage.return_value = 8000 + + OrganisationAPIUsageNotification.objects.create( + notified_at=now, + organisation=organisation, + percent_usage=120, + ) + + now = now + timedelta(days=API_USAGE_GRACE_PERIOD + 1) + freezer.move_to(now) + + # When + restrict_use_due_to_api_limit_grace_period_over() + + # Then + organisation.refresh_from_db() + + # Organisations that missing the cache don't get blocked + assert organisation.stop_serving_flags is False + assert organisation.block_access_to_admin is False + assert not hasattr(organisation, "api_limit_access_block") + assert len(mailoutbox) == 0 + assert inspecting_handler.messages == [ + f"API use for organisation {organisation.id} has fallen to below limit, so not restricting use." + ] + + @pytest.mark.freeze_time("2023-01-19T09:09:47.325132+00:00") def test_unrestrict_after_api_limit_grace_period_is_stale( organisation: Organisation, From 104d66de60f29ff9b3d672fbb4b8bf36596c2833 Mon Sep 17 00:00:00 2001 From: Wesley Sum <77678835+sumtzehern@users.noreply.github.com> Date: Wed, 31 Jul 2024 05:04:47 -0400 Subject: [PATCH 080/247] feat: Surface password requirements on signup / dynamic validation (#4282) --- .../web/components/PasswordRequirements.js | 39 +++++++++++++++++++ frontend/web/components/pages/HomePage.js | 23 ++++++++++- 2 files changed, 61 insertions(+), 1 deletion(-) create mode 100644 frontend/web/components/PasswordRequirements.js diff --git a/frontend/web/components/PasswordRequirements.js b/frontend/web/components/PasswordRequirements.js new file mode 100644 index 000000000000..0e0454025cbf --- /dev/null +++ b/frontend/web/components/PasswordRequirements.js @@ -0,0 +1,39 @@ +import React, { useEffect } from 'react'; +import PropTypes from 'prop-types'; +import { close, checkmark } from 'ionicons/icons' +import { IonIcon } from '@ionic/react' + +const PasswordRequirements = ({ password, onRequirementsMet}) => { + const requirements = [ + { label: 'At least 8 characters', test: password.length >= 8 }, + { label: 'Contains a number', test: /\d/.test(password) }, + { label: 'Contains a special character', test: /[!@#$%^&*(),.?":{}|<>[\]\\\/_+=-]/.test(password) }, + { label: 'Contains an uppercase letter', test: /[A-Z]/.test(password) }, + { label: 'Contains a lowercase letter', test: /[a-z]/.test(password) }, + ]; + + const allRequirementsMet = requirements.every(req => req.test); + + useEffect(() => { + onRequirementsMet(allRequirementsMet); + }, [allRequirementsMet, onRequirementsMet]); + + return ( +
+
    + {requirements.map((req, index) => ( +

    + {req.label} +

    + ))} +
+
+ ); +}; + +PasswordRequirements.propTypes = { + password: PropTypes.string.isRequired, + onRequirementsMet: PropTypes.func.isRequired, + }; + +export default PasswordRequirements; diff --git a/frontend/web/components/pages/HomePage.js b/frontend/web/components/pages/HomePage.js index e58a5bc88572..83b5eb739b86 100644 --- a/frontend/web/components/pages/HomePage.js +++ b/frontend/web/components/pages/HomePage.js @@ -10,6 +10,7 @@ import ConfigProvider from 'common/providers/ConfigProvider' import Constants from 'common/constants' import ErrorMessage from 'components/ErrorMessage' import Button from 'components/base/forms/Button' +import PasswordRequirements from 'components/PasswordRequirements' import { informationCircleOutline } from 'ionicons/icons' import { IonIcon } from '@ionic/react' import classNames from 'classnames' @@ -32,8 +33,16 @@ const HomePage = class extends React.Component { // can handle always setting the marketing consent. API.setCookie('marketing_consent_given', 'true') this.state = { + email: '', + first_name: '', + last_name: '', + password: '', marketing_consent_given: true, + allRequirementsMet: false, } + + this.handlePasswordChange = this.handlePasswordChange.bind(this); + this.handleRequirementsMet = this.handleRequirementsMet.bind(this); } addAlbacross() { @@ -131,6 +140,14 @@ const HomePage = class extends React.Component { } } + handlePasswordChange(e) { + this.setState({ password: e.target.value }); + } + + handleRequirementsMet(allRequirementsMet) { + this.setState({ allRequirementsMet }); + } + showForgotPassword = (e) => { e.preventDefault() openModal( @@ -607,11 +624,15 @@ const HomePage = class extends React.Component { name='password' id='password' /> +
- {!isVersioned && ( + {!isVersioned && !changeRequest?.committedAt && (
) )} - + {changeRequest.committed_at ? (
Committed at{' '} From 670ede96e496554f9fe6ff71d57da4c9fccb082c Mon Sep 17 00:00:00 2001 From: Matthew Elwell Date: Wed, 31 Jul 2024 15:46:02 +0100 Subject: [PATCH 085/247] fix: deleting change requests with change sets throws 500 error (#4439) --- api/features/workflows/core/models.py | 10 ++++- .../core/test_unit_workflows_models.py | 44 ++++++++++++++++++- 2 files changed, 51 insertions(+), 3 deletions(-) diff --git a/api/features/workflows/core/models.py b/api/features/workflows/core/models.py index 1e4c7d2ac9c1..cd26ccc000aa 100644 --- a/api/features/workflows/core/models.py +++ b/api/features/workflows/core/models.py @@ -263,14 +263,20 @@ def prevent_change_request_delete_if_committed(self) -> None: @property def live_from(self) -> datetime | None: + # Note: a change request can only have one of either + # feature_states, change_sets or environment_feature_versions + # First we check if there are feature states associated with the change request # and, if so, we return the live_from of the feature state with the earliest # live_from. if first_feature_state := self.feature_states.order_by("live_from").first(): return first_feature_state.live_from - # Then we do the same for environment feature versions. Note that a change request - # can not have feature states and environment feature versions. + # Then we check the change sets. + elif first_change_set := self.change_sets.order_by("live_from").first(): + return first_change_set.live_from + + # Finally, we do the same for environment feature versions. elif first_environment_feature_version := self.environment_feature_versions.order_by( "live_from" ).first(): diff --git a/api/tests/unit/features/workflows/core/test_unit_workflows_models.py b/api/tests/unit/features/workflows/core/test_unit_workflows_models.py index c7fa0febcdc1..e17cbbafdd6f 100644 --- a/api/tests/unit/features/workflows/core/test_unit_workflows_models.py +++ b/api/tests/unit/features/workflows/core/test_unit_workflows_models.py @@ -1,5 +1,7 @@ +import json from datetime import timedelta +import freezegun import pytest from django.contrib.sites.models import Site from django.utils import timezone @@ -16,7 +18,10 @@ from audit.related_object_type import RelatedObjectType from environments.models import Environment from features.models import Feature, FeatureState -from features.versioning.models import EnvironmentFeatureVersion +from features.versioning.models import ( + EnvironmentFeatureVersion, + VersionChangeSet, +) from features.versioning.versioning_service import get_environment_flags_list from features.workflows.core.exceptions import ( CannotApproveOwnChangeRequest, @@ -731,3 +736,40 @@ def test_committing_change_request_with_environment_feature_versions_creates_pub related_object_type=RelatedObjectType.EF_VERSION.name, log=ENVIRONMENT_FEATURE_VERSION_PUBLISHED_MESSAGE % feature.name, ).exists() + + +def test_change_request_live_from_for_change_request_with_change_set( + feature: Feature, + environment_v2_versioning: Environment, + admin_user: FFAdminUser, +) -> None: + # Given + change_request = ChangeRequest.objects.create( + title="Test CR", + environment=environment_v2_versioning, + user=admin_user, + ) + VersionChangeSet.objects.create( + change_request=change_request, + feature=feature, + feature_states_to_update=json.dumps( + [ + { + "feature_segment": None, + "enabled": True, + "feature_state_value": { + "type": "unicode", + "string_value": "updated", + }, + } + ] + ), + ) + + # When + now = timezone.now() + with freezegun.freeze_time(now): + change_request.commit(admin_user) + + # Then + assert change_request.live_from == now From 093e26a7f7131798f1c8426567a439a8dc17c781 Mon Sep 17 00:00:00 2001 From: Matthew Elwell Date: Wed, 31 Jul 2024 15:46:31 +0100 Subject: [PATCH 086/247] docs: correct kotlin docs (#4430) --- docs/docs/clients/client-side/android.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/docs/clients/client-side/android.md b/docs/docs/clients/client-side/android.md index ebe8aa13e46b..376f7fead8fc 100644 --- a/docs/docs/clients/client-side/android.md +++ b/docs/docs/clients/client-side/android.md @@ -16,7 +16,7 @@ In your project path `app/build.gradle` add a new dependence ```groovy //flagsmith -implementation 'com.github.Flagsmith:flagsmith-kotlin-android-client:1.5.0' +implementation("com.github.Flagsmith:flagsmith-kotlin-android-client:v1.6.1") ``` You should be able to find the latest version in the @@ -31,7 +31,7 @@ repositories { google() mavenCentral() - maven { url "https://jitpack.io" } + maven("https://jitpack.io") } ``` From 40bdd5466f1a2aa838ac0dc86b939d5e04cf6605 Mon Sep 17 00:00:00 2001 From: Kyle Johnson Date: Wed, 31 Jul 2024 16:08:09 +0100 Subject: [PATCH 087/247] chore: move conventional commit (#4441) --- .github/workflows/conventional-commit.yml | 47 +++++++++++++++++++++ .github/workflows/platform-pull-request.yml | 40 ------------------ 2 files changed, 47 insertions(+), 40 deletions(-) create mode 100644 .github/workflows/conventional-commit.yml diff --git a/.github/workflows/conventional-commit.yml b/.github/workflows/conventional-commit.yml new file mode 100644 index 000000000000..ab9dc2ae9eff --- /dev/null +++ b/.github/workflows/conventional-commit.yml @@ -0,0 +1,47 @@ +name: Conventional Commit + +on: + pull_request: + types: [opened, synchronize, reopened, ready_for_review] + branches-ignore: + - release-please-* +jobs: + conventional-commit: + name: Conventional Commit + runs-on: ubuntu-latest + permissions: + pull-requests: write + steps: + - name: Check PR Conventional Commit title + uses: amannn/action-semantic-pull-request@v5 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + types: | # mirrors changelog-sections in the /release-please-config.json + feat + fix + infra + ci + docs + deps + perf + refactor + test + chore + - name: Auto-label PR with Conventional Commit title + uses: kramen22/conventional-release-labels@v1 + with: + type_labels: | + { + "feat": "feature", + "fix": "fix", + "infra": "infrastructure", + "ci": "ci-cd", + "docs": "docs", + "deps": "dependencies", + "perf": "performance", + "refactor": "refactor", + "test": "testing", + "chore": "chore" + } + ignored_types: '[]' diff --git a/.github/workflows/platform-pull-request.yml b/.github/workflows/platform-pull-request.yml index bc81034092be..550e484a5355 100644 --- a/.github/workflows/platform-pull-request.yml +++ b/.github/workflows/platform-pull-request.yml @@ -10,46 +10,6 @@ on: - release-please-* jobs: - conventional-commit: - name: Conventional Commit - runs-on: ubuntu-latest - permissions: - pull-requests: write - steps: - - name: Check PR Conventional Commit title - uses: amannn/action-semantic-pull-request@v5 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - types: | # mirrors changelog-sections in the /release-please-config.json - feat - fix - infra - ci - docs - deps - perf - refactor - test - chore - - name: Auto-label PR with Conventional Commit title - uses: kramen22/conventional-release-labels@v1 - with: - type_labels: | - { - "feat": "feature", - "fix": "fix", - "infra": "infrastructure", - "ci": "ci-cd", - "docs": "docs", - "deps": "dependencies", - "perf": "performance", - "refactor": "refactor", - "test": "testing", - "chore": "chore" - } - ignored_types: '[]' - check-permissions: name: Check actor permissions runs-on: ubuntu-latest From 4da906964b0ee2b676c025b253e76158b7f02fb6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rodrigo=20L=C3=B3pez=20Dato?= Date: Wed, 31 Jul 2024 19:34:22 -0300 Subject: [PATCH 088/247] docs: Add missing deployment overview redirect (#4444) --- docs/vercel.json | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/docs/vercel.json b/docs/vercel.json index b935bddc0a1f..363315994d92 100644 --- a/docs/vercel.json +++ b/docs/vercel.json @@ -34,7 +34,7 @@ }, { "source": "/deployment-overview/", - "destination": "/deployment/overview" + "destination": "/deployment" }, { "source": "/hosted-service/", @@ -323,6 +323,10 @@ { "source": "/basic-features/integrations", "destination": "/integrations" + }, + { + "source": "/deployment/overview", + "destination": "/deployment" } ] } From e2853d7494b6fdab513e5ad5abf232585d97a078 Mon Sep 17 00:00:00 2001 From: Zach Aysan Date: Thu, 1 Aug 2024 08:55:50 -0400 Subject: [PATCH 089/247] fix: Create a check for billing started at in API usage task helper (#4440) --- api/organisations/task_helpers.py | 8 +++++ .../test_unit_organisations_tasks.py | 36 +++++++++++++++++++ 2 files changed, 44 insertions(+) diff --git a/api/organisations/task_helpers.py b/api/organisations/task_helpers.py index 96e99c671790..509811cbf622 100644 --- a/api/organisations/task_helpers.py +++ b/api/organisations/task_helpers.py @@ -106,6 +106,14 @@ def handle_api_usage_notification_for_organisation(organisation: Organisation) - subscription_cache = organisation.subscription_information_cache billing_starts_at = subscription_cache.current_billing_term_starts_at + if billing_starts_at is None: + # Since the calling code is a list of many organisations + # log the error and return without raising an exception. + logger.error( + f"Paid organisation {organisation.id} is missing billing_starts_at datetime" + ) + return + # Truncate to the closest active month to get start of current period. month_delta = relativedelta(now, billing_starts_at).months period_starts_at = relativedelta(months=month_delta) + billing_starts_at diff --git a/api/tests/unit/organisations/test_unit_organisations_tasks.py b/api/tests/unit/organisations/test_unit_organisations_tasks.py index 61dab9fb774d..6c853f097c41 100644 --- a/api/tests/unit/organisations/test_unit_organisations_tasks.py +++ b/api/tests/unit/organisations/test_unit_organisations_tasks.py @@ -33,6 +33,9 @@ SCALE_UP, ) from organisations.subscriptions.xero.metadata import XeroSubscriptionMetadata +from organisations.task_helpers import ( + handle_api_usage_notification_for_organisation, +) from organisations.tasks import ( ALERT_EMAIL_MESSAGE, ALERT_EMAIL_SUBJECT, @@ -244,6 +247,39 @@ def test_send_org_subscription_cancelled_alert(db: None, mocker: MockerFixture) ) +def test_handle_api_usage_notification_for_organisation_when_billing_starts_at_is_none( + organisation: Organisation, + inspecting_handler: logging.Handler, + mocker: MockerFixture, +) -> None: + # Given + api_usage_mock = mocker.patch("organisations.task_helpers.get_current_api_usage") + organisation.subscription.plan = SCALE_UP + organisation.subscription.subscription_id = "fancy_id" + organisation.subscription.save() + OrganisationSubscriptionInformationCache.objects.create( + organisation=organisation, + allowed_seats=10, + allowed_projects=3, + allowed_30d_api_calls=100, + chargebee_email="test@example.com", + current_billing_term_starts_at=None, + current_billing_term_ends_at=None, + ) + from organisations.task_helpers import logger + + logger.addHandler(inspecting_handler) + + # When + handle_api_usage_notification_for_organisation(organisation) + + # Then + api_usage_mock.assert_not_called() + assert inspecting_handler.messages == [ + f"Paid organisation {organisation.id} is missing billing_starts_at datetime" + ] + + @pytest.mark.freeze_time("2023-01-19T09:09:47.325132+00:00") def test_handle_api_usage_notifications_when_feature_flag_is_off( mocker: MockerFixture, From 5cbdd7fd29c2ed1b1bcf2bbff34c1bc07bc9ab6b Mon Sep 17 00:00:00 2001 From: Kim Gustyr Date: Fri, 2 Aug 2024 19:55:24 +0100 Subject: [PATCH 090/247] ci: Enable E2E tests for external contributors (#4438) --- .dockerignore | 1 + .../workflows/.reusable-docker-e2e-tests.yml | 10 ++- .github/workflows/conventional-commit.yml | 22 +---- .github/workflows/platform-pull-request.yml | 83 ++++++++++++------- 4 files changed, 66 insertions(+), 50 deletions(-) diff --git a/.dockerignore b/.dockerignore index b39dd7f9c053..c32587f39ac0 100644 --- a/.dockerignore +++ b/.dockerignore @@ -12,3 +12,4 @@ Dockerfile .ebignore .ebextensions .direnv +.github \ No newline at end of file diff --git a/.github/workflows/.reusable-docker-e2e-tests.yml b/.github/workflows/.reusable-docker-e2e-tests.yml index 9634838d65f1..3078a8ceb47e 100644 --- a/.github/workflows/.reusable-docker-e2e-tests.yml +++ b/.github/workflows/.reusable-docker-e2e-tests.yml @@ -27,6 +27,10 @@ on: description: The runner label to use. Defaults to `ubuntu-latest` required: false default: ubuntu-latest + secrets: + gcr-token: + description: A token to use for logging into Github Container Registry. If not provided, login does not occur. + required: false jobs: run-e2e: @@ -38,16 +42,20 @@ jobs: packages: read id-token: write + env: + GCR_TOKEN: ${{ secrets.gcr-token }} + steps: - name: Cloning repo uses: actions/checkout@v4 - name: Login to Github Container Registry + if: ${{ env.GCR_TOKEN }} uses: docker/login-action@v3 with: registry: ghcr.io username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} + password: ${{ env.GCR_TOKEN }} - name: Set up Depot CLI uses: depot/setup-action@v1 diff --git a/.github/workflows/conventional-commit.yml b/.github/workflows/conventional-commit.yml index ab9dc2ae9eff..f05c2a640d12 100644 --- a/.github/workflows/conventional-commit.yml +++ b/.github/workflows/conventional-commit.yml @@ -3,14 +3,11 @@ name: Conventional Commit on: pull_request: types: [opened, synchronize, reopened, ready_for_review] - branches-ignore: - - release-please-* + jobs: conventional-commit: name: Conventional Commit runs-on: ubuntu-latest - permissions: - pull-requests: write steps: - name: Check PR Conventional Commit title uses: amannn/action-semantic-pull-request@v5 @@ -28,20 +25,3 @@ jobs: refactor test chore - - name: Auto-label PR with Conventional Commit title - uses: kramen22/conventional-release-labels@v1 - with: - type_labels: | - { - "feat": "feature", - "fix": "fix", - "infra": "infrastructure", - "ci": "ci-cd", - "docs": "docs", - "deps": "dependencies", - "perf": "performance", - "refactor": "refactor", - "test": "testing", - "chore": "chore" - } - ignored_types: '[]' diff --git a/.github/workflows/platform-pull-request.yml b/.github/workflows/platform-pull-request.yml index 550e484a5355..13c171bf9c94 100644 --- a/.github/workflows/platform-pull-request.yml +++ b/.github/workflows/platform-pull-request.yml @@ -6,11 +6,9 @@ on: paths-ignore: - docs/** - infrastructure/** - branches-ignore: - - release-please-* jobs: - check-permissions: + permissions-check: name: Check actor permissions runs-on: ubuntu-latest outputs: @@ -21,10 +19,36 @@ jobs: with: require: write + conventional-commit-label: + if: needs.permissions-check.outputs.can-write == 'true' + name: Add Conventional Commit labels + needs: permissions-check + runs-on: ubuntu-latest + permissions: + pull-requests: write + steps: + - name: Auto-label PR with Conventional Commit title + uses: kramen22/conventional-release-labels@v1 + with: + type_labels: | + { + "feat": "feature", + "fix": "fix", + "infra": "infrastructure", + "ci": "ci-cd", + "docs": "docs", + "deps": "dependencies", + "perf": "performance", + "refactor": "refactor", + "test": "testing", + "chore": "chore" + } + ignored_types: '[]' + docker-prepare-report-comment: - if: needs.check-permissions.outputs.can-write == 'true' + if: github.event.pull_request.draft == false && needs.permissions-check.outputs.can-write == 'true' name: Prepare Docker report comment - needs: check-permissions + needs: permissions-check runs-on: ubuntu-latest permissions: pull-requests: write @@ -36,64 +60,65 @@ jobs: - uses: ./.github/actions/docker-build-report-to-pr docker-build-unified: - if: github.event.pull_request.draft == false - needs: [check-permissions, docker-prepare-report-comment] + if: github.event.pull_request.draft == false && !cancelled() + needs: [permissions-check, docker-prepare-report-comment] name: Build Unified Image uses: ./.github/workflows/.reusable-docker-build.yml with: - ephemeral: ${{ needs.check-permissions.outputs.can-write == 'false' }} target: oss-unified image-name: flagsmith - comment: ${{ needs.check-permissions.outputs.can-write == 'true' }} + ephemeral: ${{ needs.permissions-check.outputs.can-write == 'false' }} + comment: ${{ needs.docker-prepare-report-comment.result == 'success' }} docker-build-api: - if: github.event.pull_request.draft == false - needs: [check-permissions, docker-prepare-report-comment] + if: github.event.pull_request.draft == false && !cancelled() + needs: [permissions-check, docker-prepare-report-comment] name: Build API Image uses: ./.github/workflows/.reusable-docker-build.yml with: - ephemeral: ${{ needs.check-permissions.outputs.can-write == 'false' }} target: oss-api image-name: flagsmith-api - comment: ${{ needs.check-permissions.outputs.can-write == 'true' }} + ephemeral: ${{ needs.permissions-check.outputs.can-write == 'false' }} + comment: ${{ needs.docker-prepare-report-comment.result == 'success' }} docker-build-frontend: - if: github.event.pull_request.draft == false - needs: [check-permissions, docker-prepare-report-comment] + if: github.event.pull_request.draft == false && !cancelled() + needs: [permissions-check, docker-prepare-report-comment] name: Build Frontend Image uses: ./.github/workflows/.reusable-docker-build.yml with: - ephemeral: ${{ needs.check-permissions.outputs.can-write == 'false' }} target: oss-frontend image-name: flagsmith-frontend - comment: ${{ needs.check-permissions.outputs.can-write == 'true' }} + ephemeral: ${{ needs.permissions-check.outputs.can-write == 'false' }} + comment: ${{ needs.docker-prepare-report-comment.result == 'success' }} docker-build-api-test: - if: github.event.pull_request.draft == false - needs: [check-permissions, docker-prepare-report-comment] + if: github.event.pull_request.draft == false && !cancelled() + needs: [permissions-check, docker-prepare-report-comment] name: Build API Test Image uses: ./.github/workflows/.reusable-docker-build.yml with: target: api-test image-name: flagsmith-api-test + ephemeral: ${{ needs.permissions-check.outputs.can-write == 'false' }} + comment: ${{ needs.docker-prepare-report-comment.result == 'success' }} scan: false - comment: ${{ needs.check-permissions.outputs.can-write == 'true' }} docker-build-e2e: - if: github.event.pull_request.draft == false - needs: [check-permissions, docker-prepare-report-comment] + if: github.event.pull_request.draft == false && !cancelled() + needs: [permissions-check, docker-prepare-report-comment] name: Build E2E Image uses: ./.github/workflows/.reusable-docker-build.yml with: - ephemeral: ${{ needs.check-permissions.outputs.can-write == 'false' }} file: frontend/Dockerfile.e2e image-name: flagsmith-e2e + ephemeral: ${{ needs.permissions-check.outputs.can-write == 'false' }} + comment: ${{ needs.docker-prepare-report-comment.result == 'success' }} scan: false - comment: ${{ needs.check-permissions.outputs.can-write == 'true' }} docker-build-private-cloud: - if: github.event.pull_request.draft == false && needs.check-permissions.outputs.can-write == 'true' - needs: [check-permissions, docker-prepare-report-comment] + if: github.event.pull_request.draft == false && needs.permissions-check.outputs.can-write == 'true' + needs: [permissions-check, docker-prepare-report-comment] name: Build Private Cloud Image uses: ./.github/workflows/.reusable-docker-build.yml with: @@ -105,7 +130,8 @@ jobs: github_private_cloud_token=${{ secrets.GH_PRIVATE_ACCESS_TOKEN }} run-e2e-tests: - needs: [docker-build-api, docker-build-e2e] + if: '!cancelled()' + needs: [permissions-check, docker-build-api, docker-build-e2e] uses: ./.github/workflows/.reusable-docker-e2e-tests.yml with: runs-on: ${{ matrix.runs-on }} @@ -113,7 +139,8 @@ jobs: api-image: ${{ needs.docker-build-api.outputs.image }} concurrency: ${{ matrix.args.concurrency }} tests: ${{ matrix.args.tests }} - secrets: inherit + secrets: + gcr-token: ${{ needs.permissions-check.outputs.can-write == 'true' && secrets.GITHUB_TOKEN || '' }} strategy: matrix: From d659f4aeda70676ad135cfb3812b50a84002b995 Mon Sep 17 00:00:00 2001 From: Flagsmith Bot <65724737+flagsmithdev@users.noreply.github.com> Date: Tue, 6 Aug 2024 10:37:03 +0100 Subject: [PATCH 091/247] chore(main): release 2.134.0 (#4429) --- .release-please-manifest.json | 2 +- CHANGELOG.md | 20 ++++++++++++++++++++ version.txt | 2 +- 3 files changed, 22 insertions(+), 2 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 9ed0c10ee3a3..b900f8a162a6 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "2.133.1" + ".": "2.134.0" } \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 72fbfe2b0d9a..affdbff3ed21 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,25 @@ # Changelog +## [2.134.0](https://github.com/Flagsmith/flagsmith/compare/v2.133.1...v2.134.0) (2024-08-02) + + +### Features + +* Add command for Edge V2 migration ([#4415](https://github.com/Flagsmith/flagsmith/issues/4415)) ([035fe77](https://github.com/Flagsmith/flagsmith/commit/035fe77881c7ae73206979f420f9dd0ff8bc318e)) +* Surface password requirements on signup / dynamic validation ([#4282](https://github.com/Flagsmith/flagsmith/issues/4282)) ([104d66d](https://github.com/Flagsmith/flagsmith/commit/104d66de60f29ff9b3d672fbb4b8bf36596c2833)) + + +### Bug Fixes + +* Catch full exception instead of runtime error in API usage task ([#4426](https://github.com/Flagsmith/flagsmith/issues/4426)) ([f03b479](https://github.com/Flagsmith/flagsmith/commit/f03b47986218e1c0a90f38200bb5f254ef4dc3a3)) +* Check API usage before restricting serving flags and admin ([#4422](https://github.com/Flagsmith/flagsmith/issues/4422)) ([02f7df7](https://github.com/Flagsmith/flagsmith/commit/02f7df7a245ec6fb4fb9122840315ccfb1a3fa15)) +* Create a check for billing started at in API usage task helper ([#4440](https://github.com/Flagsmith/flagsmith/issues/4440)) ([e2853d7](https://github.com/Flagsmith/flagsmith/commit/e2853d7494b6fdab513e5ad5abf232585d97a078)) +* Delete scheduled change request ([#4437](https://github.com/Flagsmith/flagsmith/issues/4437)) ([233ce50](https://github.com/Flagsmith/flagsmith/commit/233ce509dea479a12f62feca8000400d86c16ecb)) +* deleting change requests with change sets throws 500 error ([#4439](https://github.com/Flagsmith/flagsmith/issues/4439)) ([670ede9](https://github.com/Flagsmith/flagsmith/commit/670ede96e496554f9fe6ff71d57da4c9fccb082c)) +* Handle zero case for API usage limit ([#4428](https://github.com/Flagsmith/flagsmith/issues/4428)) ([04e8bc2](https://github.com/Flagsmith/flagsmith/commit/04e8bc2657d8b3657e9f12b54803911b74508123)) +* Metadata UI improvements ([#4327](https://github.com/Flagsmith/flagsmith/issues/4327)) ([d4006c0](https://github.com/Flagsmith/flagsmith/commit/d4006c031436778227f64fd16cbc36f897769def)) +* **tests:** Strong password for E2E ([#4435](https://github.com/Flagsmith/flagsmith/issues/4435)) ([1afb3e5](https://github.com/Flagsmith/flagsmith/commit/1afb3e5f5ee6c1e924c934db4f37d4874d46cb9d)) + ## [2.133.1](https://github.com/Flagsmith/flagsmith/compare/v2.133.0...v2.133.1) (2024-07-30) diff --git a/version.txt b/version.txt index 999e4507871a..6be1c8e9ff6e 100644 --- a/version.txt +++ b/version.txt @@ -1 +1 @@ -2.133.1 +2.134.0 From 3225c47043f9647a7426b7f05890bde29b681acc Mon Sep 17 00:00:00 2001 From: Zach Aysan Date: Tue, 6 Aug 2024 15:43:17 -0400 Subject: [PATCH 092/247] fix: Set grace period to a singular event (#4455) --- ...eate_organisation_breached_grace_period.py | 38 ++++++++++++ api/organisations/models.py | 8 +++ api/organisations/tasks.py | 24 ++++++-- .../test_unit_organisations_tasks.py | 59 +++++++++++++++++++ 4 files changed, 123 insertions(+), 6 deletions(-) create mode 100644 api/organisations/migrations/0056_create_organisation_breached_grace_period.py diff --git a/api/organisations/migrations/0056_create_organisation_breached_grace_period.py b/api/organisations/migrations/0056_create_organisation_breached_grace_period.py new file mode 100644 index 000000000000..cc51bf632745 --- /dev/null +++ b/api/organisations/migrations/0056_create_organisation_breached_grace_period.py @@ -0,0 +1,38 @@ +# Generated by Django 3.2.25 on 2024-08-06 17:46 + +import django.db.models.deletion +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ("organisations", "0055_alter_percent_usage"), + ] + + operations = [ + migrations.CreateModel( + name="OrganisationBreachedGracePeriod", + fields=[ + ( + "id", + models.AutoField( + auto_created=True, + primary_key=True, + serialize=False, + verbose_name="ID", + ), + ), + ("created_at", models.DateTimeField(auto_now_add=True, null=True)), + ("updated_at", models.DateTimeField(auto_now=True, null=True)), + ( + "organisation", + models.OneToOneField( + on_delete=django.db.models.deletion.CASCADE, + related_name="breached_grace_period", + to="organisations.organisation", + ), + ), + ], + ), + ] diff --git a/api/organisations/models.py b/api/organisations/models.py index 3b818c85a463..1451cacc30b3 100644 --- a/api/organisations/models.py +++ b/api/organisations/models.py @@ -476,6 +476,14 @@ class OrganisationAPIUsageNotification(models.Model): updated_at = models.DateTimeField(null=True, auto_now=True) +class OrganisationBreachedGracePeriod(models.Model): + organisation = models.OneToOneField( + Organisation, on_delete=models.CASCADE, related_name="breached_grace_period" + ) + created_at = models.DateTimeField(null=True, auto_now_add=True) + updated_at = models.DateTimeField(null=True, auto_now=True) + + class APILimitAccessBlock(models.Model): organisation = models.OneToOneField( Organisation, on_delete=models.CASCADE, related_name="api_limit_access_block" diff --git a/api/organisations/tasks.py b/api/organisations/tasks.py index 97eae8072491..0ebdc1f5d6ca 100644 --- a/api/organisations/tasks.py +++ b/api/organisations/tasks.py @@ -4,7 +4,7 @@ from app_analytics.influxdb_wrapper import get_current_api_usage from django.conf import settings -from django.db.models import F, Max +from django.db.models import F, Max, Q from django.utils import timezone from task_processor.decorators import ( register_recurring_task, @@ -22,6 +22,7 @@ Organisation, OrganisationAPIBilling, OrganisationAPIUsageNotification, + OrganisationBreachedGracePeriod, Subscription, ) from organisations.subscriptions.constants import FREE_PLAN_ID @@ -243,13 +244,22 @@ def restrict_use_due_to_api_limit_grace_period_over() -> None: Since free plans don't have predefined subscription periods, we use a rolling thirty day period to filter them. """ - grace_period = timezone.now() - timedelta(days=API_USAGE_GRACE_PERIOD) - month_start = timezone.now() - timedelta(30) + now = timezone.now() + grace_period = now - timedelta(days=API_USAGE_GRACE_PERIOD) + month_start = now - timedelta(30) queryset = ( OrganisationAPIUsageNotification.objects.filter( - notified_at__gt=month_start, - notified_at__lt=grace_period, - percent_usage__gte=100, + Q( + notified_at__gte=month_start, + notified_at__lte=grace_period, + percent_usage__gte=100, + ) + | Q( + notified_at__gte=month_start, + notified_at__lte=now, + percent_usage__gte=100, + organisation__breached_grace_period__isnull=False, + ) ) .values("organisation") .annotate(max_value=Max("percent_usage")) @@ -293,6 +303,8 @@ def restrict_use_due_to_api_limit_grace_period_over() -> None: if not organisation.has_subscription_information_cache(): continue + OrganisationBreachedGracePeriod.objects.get_or_create(organisation=organisation) + subscription_cache = organisation.subscription_information_cache api_usage = get_current_api_usage(organisation.id, "30d") if api_usage / subscription_cache.allowed_30d_api_calls < 1.0: diff --git a/api/tests/unit/organisations/test_unit_organisations_tasks.py b/api/tests/unit/organisations/test_unit_organisations_tasks.py index 6c853f097c41..62708193a94a 100644 --- a/api/tests/unit/organisations/test_unit_organisations_tasks.py +++ b/api/tests/unit/organisations/test_unit_organisations_tasks.py @@ -21,6 +21,7 @@ Organisation, OrganisationAPIBilling, OrganisationAPIUsageNotification, + OrganisationBreachedGracePeriod, OrganisationRole, OrganisationSubscriptionInformationCache, UserOrganisation, @@ -1410,6 +1411,64 @@ def test_restrict_use_due_to_api_limit_grace_period_over( assert getattr(organisation, "api_limit_access_block", None) is None +@pytest.mark.freeze_time("2023-01-19T09:09:47.325132+00:00") +def test_restrict_use_due_to_api_limit_grace_period_breached( + mocker: MockerFixture, + organisation: Organisation, + freezer: FrozenDateTimeFactory, + mailoutbox: list[EmailMultiAlternatives], + admin_user: FFAdminUser, + staff_user: FFAdminUser, +) -> None: + # Given + get_client_mock = mocker.patch("organisations.tasks.get_client") + client_mock = MagicMock() + get_client_mock.return_value = client_mock + client_mock.get_identity_flags.return_value.is_feature_enabled.return_value = True + + now = timezone.now() + + OrganisationBreachedGracePeriod.objects.create(organisation=organisation) + OrganisationSubscriptionInformationCache.objects.create( + organisation=organisation, + allowed_seats=10, + allowed_projects=3, + allowed_30d_api_calls=10_000, + chargebee_email="test@example.com", + ) + organisation.subscription.subscription_id = "fancy_sub_id23" + organisation.subscription.plan = FREE_PLAN_ID + organisation.subscription.save() + + mock_api_usage = mocker.patch( + "organisations.tasks.get_current_api_usage", + ) + mock_api_usage.return_value = 12_005 + + OrganisationAPIUsageNotification.objects.create( + notified_at=now, + organisation=organisation, + percent_usage=100, + ) + OrganisationAPIUsageNotification.objects.create( + notified_at=now, + organisation=organisation, + percent_usage=120, + ) + now = now + timedelta(days=API_USAGE_GRACE_PERIOD - 1) + freezer.move_to(now) + + # When + restrict_use_due_to_api_limit_grace_period_over() + + # Then + organisation.refresh_from_db() + + assert organisation.stop_serving_flags is True + assert organisation.block_access_to_admin is True + assert organisation.api_limit_access_block + + @pytest.mark.freeze_time("2023-01-19T09:09:47.325132+00:00") def test_restrict_use_due_to_api_limit_grace_period_over_missing_subscription_information_cache( mocker: MockerFixture, From fb27b5771a21312e6011f6dc9ef061c771325148 Mon Sep 17 00:00:00 2001 From: Kyle Johnson Date: Wed, 7 Aug 2024 10:54:43 +0100 Subject: [PATCH 093/247] chore: Add invite hash to signup endpoints (#4458) --- frontend/common/stores/account-store.js | 2 ++ 1 file changed, 2 insertions(+) diff --git a/frontend/common/stores/account-store.js b/frontend/common/stores/account-store.js index acfefa41ae85..bd847484fcd9 100644 --- a/frontend/common/stores/account-store.js +++ b/frontend/common/stores/account-store.js @@ -202,6 +202,7 @@ const controller = { : `${Project.api}auth/oauth/${type}/`, { ...(_data || {}), + invite_hash: API.getInvite() || undefined, sign_up_type: API.getInviteType(), }, ) @@ -245,6 +246,7 @@ const controller = { password, referrer: API.getReferrer() || '', sign_up_type: API.getInviteType(), + invite_hash: API.getInvite() || undefined, }) .then((res) => { data.setToken(res.key) From bee01c7f21cae19e7665ede3284f96989d33940f Mon Sep 17 00:00:00 2001 From: Matthew Elwell Date: Wed, 7 Aug 2024 11:54:51 +0100 Subject: [PATCH 094/247] fix: protect get environment document endpoint (#4459) --- api/environments/permissions/permissions.py | 2 ++ api/environments/views.py | 5 ++++- .../test_unit_environments_views.py | 21 +++++++++++++++++-- 3 files changed, 25 insertions(+), 3 deletions(-) diff --git a/api/environments/permissions/permissions.py b/api/environments/permissions/permissions.py index dc033d7249f1..020b7a065249 100644 --- a/api/environments/permissions/permissions.py +++ b/api/environments/permissions/permissions.py @@ -45,6 +45,8 @@ def has_permission(self, request, view): def has_object_permission(self, request, view, obj): if view.action == "clone": return request.user.has_project_permission(CREATE_ENVIRONMENT, obj.project) + elif view.action == "get_document": + return request.user.has_environment_permission(VIEW_ENVIRONMENT, obj) return request.user.is_environment_admin(obj) or view.action in [ "user_permissions" diff --git a/api/environments/views.py b/api/environments/views.py index 4afc0ab52567..085fa6853c10 100644 --- a/api/environments/views.py +++ b/api/environments/views.py @@ -225,7 +225,10 @@ def user_permissions(self, request, *args, **kwargs): @swagger_auto_schema(responses={200: SDKEnvironmentDocumentModel}) @action(detail=True, methods=["GET"], url_path="document") def get_document(self, request, api_key: str): - return Response(Environment.get_environment_document(api_key)) + environment = ( + self.get_object() + ) # use get_object to ensure permissions check is performed + return Response(Environment.get_environment_document(environment.api_key)) @swagger_auto_schema(request_body=no_body, responses={202: ""}) @action(detail=True, methods=["POST"], url_path="enable-v2-versioning") diff --git a/api/tests/unit/environments/test_unit_environments_views.py b/api/tests/unit/environments/test_unit_environments_views.py index 90760acec236..6329efae684b 100644 --- a/api/tests/unit/environments/test_unit_environments_views.py +++ b/api/tests/unit/environments/test_unit_environments_views.py @@ -766,11 +766,13 @@ def test_audit_log_entry_created_when_environment_updated( def test_get_document( environment: Environment, project: Project, - admin_client_new: APIClient, + staff_client: APIClient, feature: Feature, segment: Segment, + with_environment_permissions: WithEnvironmentPermissionsCallable, ) -> None: # Given + with_environment_permissions([VIEW_ENVIRONMENT]) # and some sample data to make sure we're testing all of the document segment_rule = SegmentRule.objects.create( @@ -786,13 +788,28 @@ def test_get_document( ) # When - response = admin_client_new.get(url) + response = staff_client.get(url) # Then assert response.status_code == status.HTTP_200_OK assert response.json() +def test_cannot_get_environment_document_without_permission( + staff_client: APIClient, environment: Environment +) -> None: + # Given + url = reverse( + "api-v1:environments:environment-get-document", args=[environment.api_key] + ) + + # When + response = staff_client.get(url) + + # Then + assert response.status_code == status.HTTP_403_FORBIDDEN + + def test_get_all_trait_keys_for_environment_only_returns_distinct_keys( identity: Identity, admin_client_new: APIClient, From 0e6deec6404c3e78edf5f36b36ea0f2dcef3dd06 Mon Sep 17 00:00:00 2001 From: Matthew Elwell Date: Wed, 7 Aug 2024 13:28:32 +0100 Subject: [PATCH 095/247] fix: don't allow bypassing `ALLOW_REGISTRATION_WITHOUT_INVITE` behaviour (#4454) --- api/custom_auth/oauth/serializers.py | 15 ++---- api/custom_auth/serializers.py | 39 +++++++++----- .../test_custom_auth_integration.py | 3 +- api/tests/unit/custom_auth/conftest.py | 9 ++++ .../oauth/test_unit_oauth_serializers.py | 45 +++++++++++++++- .../oauth/test_unit_oauth_views.py | 17 +++++- .../test_unit_custom_auth_serializer.py | 53 +++++++++++++++++++ 7 files changed, 155 insertions(+), 26 deletions(-) create mode 100644 api/tests/unit/custom_auth/conftest.py diff --git a/api/custom_auth/oauth/serializers.py b/api/custom_auth/oauth/serializers.py index 6a1e80ab90af..cf16c008191b 100644 --- a/api/custom_auth/oauth/serializers.py +++ b/api/custom_auth/oauth/serializers.py @@ -6,13 +6,11 @@ from django.db.models import F from rest_framework import serializers from rest_framework.authtoken.models import Token -from rest_framework.exceptions import PermissionDenied -from organisations.invites.models import Invite from users.auth_type import AuthType from users.models import SignUpType -from ..constants import USER_REGISTRATION_WITHOUT_INVITE_ERROR_MESSAGE +from ..serializers import InviteLinkValidationMixin from .github import GithubUser from .google import get_user_info @@ -20,7 +18,7 @@ UserModel = get_user_model() -class OAuthLoginSerializer(serializers.Serializer): +class OAuthLoginSerializer(InviteLinkValidationMixin, serializers.Serializer): access_token = serializers.CharField( required=True, help_text="Code or access token returned from the FE interaction with the third party login provider.", @@ -85,12 +83,9 @@ def _get_user(self, user_data: dict): if not existing_user: sign_up_type = self.validated_data.get("sign_up_type") - if not ( - settings.ALLOW_REGISTRATION_WITHOUT_INVITE - or sign_up_type == SignUpType.INVITE_LINK.value - or Invite.objects.filter(email=email).exists() - ): - raise PermissionDenied(USER_REGISTRATION_WITHOUT_INVITE_ERROR_MESSAGE) + self._validate_registration_invite( + email=email, sign_up_type=self.validated_data.get("sign_up_type") + ) return UserModel.objects.create( **user_data, email=email.lower(), sign_up_type=sign_up_type diff --git a/api/custom_auth/serializers.py b/api/custom_auth/serializers.py index 55bb43e595ae..11bd80828a6b 100644 --- a/api/custom_auth/serializers.py +++ b/api/custom_auth/serializers.py @@ -5,7 +5,7 @@ from rest_framework.exceptions import PermissionDenied from rest_framework.validators import UniqueValidator -from organisations.invites.models import Invite +from organisations.invites.models import Invite, InviteLink from users.auth_type import AuthType from users.constants import DEFAULT_DELETE_ORPHAN_ORGANISATIONS_VALUE from users.models import FFAdminUser, SignUpType @@ -23,7 +23,28 @@ class Meta: fields = ("key",) -class CustomUserCreateSerializer(UserCreateSerializer): +class InviteLinkValidationMixin: + invite_hash = serializers.CharField(required=False, write_only=True) + + def _validate_registration_invite(self, email: str, sign_up_type: str) -> None: + if settings.ALLOW_REGISTRATION_WITHOUT_INVITE: + return + + valid = False + + match sign_up_type: + case SignUpType.INVITE_LINK.value: + valid = InviteLink.objects.filter( + hash=self.initial_data.get("invite_hash") + ).exists() + case SignUpType.INVITE_EMAIL.value: + valid = Invite.objects.filter(email__iexact=email.lower()).exists() + + if not valid: + raise PermissionDenied(USER_REGISTRATION_WITHOUT_INVITE_ERROR_MESSAGE) + + +class CustomUserCreateSerializer(UserCreateSerializer, InviteLinkValidationMixin): key = serializers.SerializerMethodField() class Meta(UserCreateSerializer.Meta): @@ -58,6 +79,10 @@ def validate(self, attrs): self.context.get("request"), email=email, raise_exception=True ) + self._validate_registration_invite( + email=email, sign_up_type=attrs.get("sign_up_type") + ) + attrs["email"] = email.lower() return attrs @@ -66,16 +91,6 @@ def get_key(instance): token, _ = Token.objects.get_or_create(user=instance) return token.key - def save(self, **kwargs): - if not ( - settings.ALLOW_REGISTRATION_WITHOUT_INVITE - or self.validated_data.get("sign_up_type") == SignUpType.INVITE_LINK.value - or Invite.objects.filter(email=self.validated_data.get("email")) - ): - raise PermissionDenied(USER_REGISTRATION_WITHOUT_INVITE_ERROR_MESSAGE) - - return super(CustomUserCreateSerializer, self).save(**kwargs) - class CustomUserDelete(serializers.Serializer): current_password = serializers.CharField( diff --git a/api/tests/integration/custom_auth/end_to_end/test_custom_auth_integration.py b/api/tests/integration/custom_auth/end_to_end/test_custom_auth_integration.py index aec4b9b4327f..e274475119b5 100644 --- a/api/tests/integration/custom_auth/end_to_end/test_custom_auth_integration.py +++ b/api/tests/integration/custom_auth/end_to_end/test_custom_auth_integration.py @@ -12,7 +12,7 @@ from organisations.invites.models import Invite from organisations.models import Organisation -from users.models import FFAdminUser +from users.models import FFAdminUser, SignUpType def test_register_and_login_workflows(db: None, api_client: APIClient) -> None: @@ -124,6 +124,7 @@ def test_can_register_with_invite_if_registration_disabled_without_invite( "password": password, "first_name": "test", "last_name": "register", + "sign_up_type": SignUpType.INVITE_EMAIL.value, } Invite.objects.create(email=email, organisation=organisation) diff --git a/api/tests/unit/custom_auth/conftest.py b/api/tests/unit/custom_auth/conftest.py new file mode 100644 index 000000000000..17d5f760c4c1 --- /dev/null +++ b/api/tests/unit/custom_auth/conftest.py @@ -0,0 +1,9 @@ +import pytest + +from organisations.invites.models import InviteLink +from organisations.models import Organisation + + +@pytest.fixture() +def invite_link(organisation: Organisation) -> InviteLink: + return InviteLink.objects.create(organisation=organisation) diff --git a/api/tests/unit/custom_auth/oauth/test_unit_oauth_serializers.py b/api/tests/unit/custom_auth/oauth/test_unit_oauth_serializers.py index bd21e9fc5d08..11a0519e0b6f 100644 --- a/api/tests/unit/custom_auth/oauth/test_unit_oauth_serializers.py +++ b/api/tests/unit/custom_auth/oauth/test_unit_oauth_serializers.py @@ -1,5 +1,7 @@ +from typing import Type from unittest import mock +import pytest from django.test import RequestFactory from django.utils import timezone from pytest_django.fixtures import SettingsWrapper @@ -11,6 +13,7 @@ GoogleLoginSerializer, OAuthLoginSerializer, ) +from organisations.invites.models import InviteLink from users.models import FFAdminUser, SignUpType @@ -128,7 +131,11 @@ def test_OAuthLoginSerializer_calls_is_authentication_method_valid_correctly_if_ def test_OAuthLoginSerializer_allows_registration_if_sign_up_type_is_invite_link( - settings: SettingsWrapper, rf: RequestFactory, mocker: MockerFixture, db: None + settings: SettingsWrapper, + rf: RequestFactory, + mocker: MockerFixture, + db: None, + invite_link: InviteLink, ): # Given settings.ALLOW_REGISTRATION_WITHOUT_INVITE = False @@ -140,6 +147,7 @@ def test_OAuthLoginSerializer_allows_registration_if_sign_up_type_is_invite_link data={ "access_token": "some_token", "sign_up_type": SignUpType.INVITE_LINK.value, + "invite_hash": invite_link.hash, }, context={"request": request}, ) @@ -153,3 +161,38 @@ def test_OAuthLoginSerializer_allows_registration_if_sign_up_type_is_invite_link # Then assert user + + +@pytest.mark.parametrize( + "serializer_class", (GithubLoginSerializer, GithubLoginSerializer) +) +def test_OAuthLoginSerializer_allows_login_if_allow_registration_without_invite_is_false( + settings: SettingsWrapper, + rf: RequestFactory, + mocker: MockerFixture, + admin_user: FFAdminUser, + serializer_class: Type[OAuthLoginSerializer], +): + # Given + settings.ALLOW_REGISTRATION_WITHOUT_INVITE = False + + request = rf.post("/api/v1/auth/users/") + + serializer = serializer_class( + data={"access_token": "some_token"}, + context={"request": request}, + ) + # monkey patch the get_user_info method to return the mock user data + serializer.get_user_info = lambda: { + "email": admin_user.email, + "github_user_id": "abc123", + "google_user_id": "abc123", + } + + serializer.is_valid(raise_exception=True) + + # When + user = serializer.save() + + # Then + assert user diff --git a/api/tests/unit/custom_auth/oauth/test_unit_oauth_views.py b/api/tests/unit/custom_auth/oauth/test_unit_oauth_views.py index 0f742267b71b..99a451bab4eb 100644 --- a/api/tests/unit/custom_auth/oauth/test_unit_oauth_views.py +++ b/api/tests/unit/custom_auth/oauth/test_unit_oauth_views.py @@ -9,6 +9,7 @@ from organisations.invites.models import Invite from organisations.models import Organisation +from users.models import SignUpType @mock.patch("custom_auth.oauth.serializers.get_user_info") @@ -66,7 +67,13 @@ def test_can_register_with_google_with_invite_if_registration_disabled( Invite.objects.create(organisation=organisation, email=email) # When - response = client.post(url, data={"access_token": "some-token"}) + response = client.post( + url, + data={ + "access_token": "some-token", + "sign_up_type": SignUpType.INVITE_EMAIL.value, + }, + ) # Then assert response.status_code == status.HTTP_200_OK @@ -89,7 +96,13 @@ def test_can_register_with_github_with_invite_if_registration_disabled( Invite.objects.create(organisation=organisation, email=email) # When - response = client.post(url, data={"access_token": "some-token"}) + response = client.post( + url, + data={ + "access_token": "some-token", + "sign_up_type": SignUpType.INVITE_EMAIL.value, + }, + ) # Then assert response.status_code == status.HTTP_200_OK diff --git a/api/tests/unit/custom_auth/test_unit_custom_auth_serializer.py b/api/tests/unit/custom_auth/test_unit_custom_auth_serializer.py index 00f099e1ace6..010a861f30ab 100644 --- a/api/tests/unit/custom_auth/test_unit_custom_auth_serializer.py +++ b/api/tests/unit/custom_auth/test_unit_custom_auth_serializer.py @@ -1,7 +1,13 @@ +import pytest from django.test import RequestFactory from pytest_django.fixtures import SettingsWrapper +from rest_framework.exceptions import PermissionDenied +from custom_auth.constants import ( + USER_REGISTRATION_WITHOUT_INVITE_ERROR_MESSAGE, +) from custom_auth.serializers import CustomUserCreateSerializer +from organisations.invites.models import InviteLink from users.models import FFAdminUser, SignUpType user_dict = { @@ -70,6 +76,7 @@ def test_CustomUserCreateSerializer_calls_is_authentication_method_valid_correct def test_CustomUserCreateSerializer_allows_registration_if_sign_up_type_is_invite_link( + invite_link: InviteLink, db: None, settings: SettingsWrapper, rf: RequestFactory, @@ -80,6 +87,7 @@ def test_CustomUserCreateSerializer_allows_registration_if_sign_up_type_is_invit data = { **user_dict, "sign_up_type": SignUpType.INVITE_LINK.value, + "invite_hash": invite_link.hash, } serializer = CustomUserCreateSerializer( @@ -92,3 +100,48 @@ def test_CustomUserCreateSerializer_allows_registration_if_sign_up_type_is_invit # Then assert user + + +def test_invite_link_validation_mixin_validate_fails_if_invite_link_hash_not_provided( + settings: SettingsWrapper, + db: None, +) -> None: + # Given + settings.ALLOW_REGISTRATION_WITHOUT_INVITE = False + + serializer = CustomUserCreateSerializer( + data={ + **user_dict, + "sign_up_type": SignUpType.INVITE_LINK.value, + } + ) + + # When + with pytest.raises(PermissionDenied) as exc_info: + serializer.is_valid(raise_exception=True) + + # Then + assert exc_info.value.detail == USER_REGISTRATION_WITHOUT_INVITE_ERROR_MESSAGE + + +def test_invite_link_validation_mixin_validate_fails_if_invite_link_hash_not_valid( + invite_link: InviteLink, + settings: SettingsWrapper, +) -> None: + # Given + settings.ALLOW_REGISTRATION_WITHOUT_INVITE = False + + serializer = CustomUserCreateSerializer( + data={ + **user_dict, + "sign_up_type": SignUpType.INVITE_LINK.value, + "invite_hash": "invalid-hash", + } + ) + + # When + with pytest.raises(PermissionDenied) as exc_info: + serializer.is_valid(raise_exception=True) + + # Then + assert exc_info.value.detail == USER_REGISTRATION_WITHOUT_INVITE_ERROR_MESSAGE From 948a87aea8bb1f838731686be4cfce210470737c Mon Sep 17 00:00:00 2001 From: Zach Aysan Date: Wed, 7 Aug 2024 09:00:18 -0400 Subject: [PATCH 096/247] refactor: Use datetimes for InfluxDB queries (#4450) --- api/app_analytics/analytics_db_service.py | 29 ++-- api/app_analytics/influxdb_wrapper.py | 112 +++++++++++---- api/features/views.py | 11 +- api/organisations/subscription_info_cache.py | 16 ++- api/organisations/task_helpers.py | 3 +- api/organisations/tasks.py | 4 +- api/sales_dashboard/views.py | 20 ++- ...est_unit_app_analytics_influxdb_wrapper.py | 129 ++++++++++++++++-- .../test_unit_app_analytics_views.py | 15 +- .../unit/features/test_unit_features_views.py | 63 ++++++++- ...t_organisations_subscription_info_cache.py | 29 ++-- .../test_unit_organisations_tasks.py | 9 +- .../test_unit_organisations_views.py | 3 +- .../test_unit_sales_dashboard_views.py | 7 +- 14 files changed, 354 insertions(+), 96 deletions(-) diff --git a/api/app_analytics/analytics_db_service.py b/api/app_analytics/analytics_db_service.py index 5ccd612247db..59933d808966 100644 --- a/api/app_analytics/analytics_db_service.py +++ b/api/app_analytics/analytics_db_service.py @@ -35,8 +35,7 @@ def get_usage_data( ) -> list[UsageData]: now = timezone.now() - date_stop = date_start = None - period_starts_at = period_ends_at = None + date_start = date_stop = None match period: case constants.CURRENT_BILLING_PERIOD: @@ -47,10 +46,8 @@ def get_usage_data( days=30 ) month_delta = relativedelta(now, starts_at).months - period_starts_at = relativedelta(months=month_delta) + starts_at - period_ends_at = now - date_start = f"-{(now - period_starts_at).days}d" - date_stop = "now()" + date_start = relativedelta(months=month_delta) + starts_at + date_stop = now case constants.PREVIOUS_BILLING_PERIOD: if not getattr(organisation, "subscription_information_cache", None): @@ -61,16 +58,12 @@ def get_usage_data( ) month_delta = relativedelta(now, starts_at).months - 1 month_delta += relativedelta(now, starts_at).years * 12 - period_starts_at = relativedelta(months=month_delta) + starts_at - period_ends_at = relativedelta(months=month_delta + 1) + starts_at - date_start = f"-{(now - period_starts_at).days}d" - date_stop = f"-{(now - period_ends_at).days}d" + date_start = relativedelta(months=month_delta) + starts_at + date_stop = relativedelta(months=month_delta + 1) + starts_at case constants.NINETY_DAY_PERIOD: - period_starts_at = now - relativedelta(days=90) - period_ends_at = now - date_start = "-90d" - date_stop = "now()" + date_start = now - relativedelta(days=90) + date_stop = now if settings.USE_POSTGRES_FOR_ANALYTICS: kwargs = { @@ -79,10 +72,10 @@ def get_usage_data( "project_id": project_id, } - if period_starts_at: - assert period_ends_at - kwargs["date_start"] = period_starts_at - kwargs["date_stop"] = period_ends_at + if date_start: + assert date_stop + kwargs["date_start"] = date_start + kwargs["date_stop"] = date_stop return get_usage_data_from_local_db(**kwargs) diff --git a/api/app_analytics/influxdb_wrapper.py b/api/app_analytics/influxdb_wrapper.py index 4a88c596338b..4a61ece6ee17 100644 --- a/api/app_analytics/influxdb_wrapper.py +++ b/api/app_analytics/influxdb_wrapper.py @@ -1,8 +1,10 @@ import logging import typing from collections import defaultdict +from datetime import datetime, timedelta from django.conf import settings +from django.utils import timezone from influxdb_client import InfluxDBClient, Point from influxdb_client.client.exceptions import InfluxDBError from influxdb_client.client.write_api import SYNCHRONOUS @@ -20,11 +22,6 @@ influx_org = settings.INFLUXDB_ORG read_bucket = settings.INFLUXDB_BUCKET + "_downsampled_15m" -range_bucket_mappings = { - "-24h": settings.INFLUXDB_BUCKET + "_downsampled_15m", - "-7d": settings.INFLUXDB_BUCKET + "_downsampled_15m", - "-30d": settings.INFLUXDB_BUCKET + "_downsampled_1h", -} retries = Retry(connect=3, read=3, redirect=3) # Set a timeout to prevent threads being potentially stuck open due to network weirdness influxdb_client = InfluxDBClient( @@ -43,6 +40,13 @@ ) +def get_range_bucket_mappings(date_start: datetime) -> str: + now = timezone.now() + if (now - date_start).days > 10: + return settings.INFLUXDB_BUCKET + "_downsampled_1h" + return settings.INFLUXDB_BUCKET + "_downsampled_15m" + + class InfluxDBWrapper: def __init__(self, name): self.name = name @@ -76,15 +80,22 @@ def write(self): @staticmethod def influx_query_manager( - date_start: str = "-30d", - date_stop: str = "now()", + date_start: datetime | None = None, + date_stop: datetime | None = None, drop_columns: typing.Tuple[str, ...] = DEFAULT_DROP_COLUMNS, filters: str = "|> filter(fn:(r) => r._measurement == 'api_call')", extra: str = "", bucket: str = read_bucket, ): + now = timezone.now() + if date_start is None: + date_start = now - timedelta(days=30) + + if date_stop is None: + date_stop = now + # Influx throws an error for an empty range, so just return a list. - if date_start == "-0d" and date_stop == "now()": + if date_start == date_stop: return [] query_api = influxdb_client.query_api() @@ -92,7 +103,7 @@ def influx_query_manager( query = ( f'from(bucket:"{bucket}")' - f" |> range(start: {date_start}, stop: {date_stop})" + f" |> range(start: {date_start.isoformat()}, stop: {date_stop.isoformat()})" f" {filters}" f" |> drop(columns: {drop_columns_input})" f"{extra}" @@ -108,7 +119,9 @@ def influx_query_manager( def get_events_for_organisation( - organisation_id: id, date_start: str = "-30d", date_stop: str = "now()" + organisation_id: id, + date_start: datetime | None = None, + date_stop: datetime | None = None, ) -> int: """ Query influx db for usage for given organisation id @@ -116,6 +129,13 @@ def get_events_for_organisation( :param organisation_id: an id of the organisation to get usage for :return: a number of request counts for organisation """ + now = timezone.now() + if date_start is None: + date_start = now - timedelta(days=30) + + if date_stop is None: + date_stop = now + result = InfluxDBWrapper.influx_query_manager( filters=build_filter_string( [ @@ -145,7 +165,9 @@ def get_events_for_organisation( def get_event_list_for_organisation( - organisation_id: int, date_start: str = "-30d", date_stop: str = "now()" + organisation_id: int, + date_start: datetime | None = None, + date_stop: datetime | None = None, ) -> tuple[dict[str, list[int]], list[str]]: """ Query influx db for usage for given organisation id @@ -154,6 +176,13 @@ def get_event_list_for_organisation( :return: a number of request counts for organisation in chart.js scheme """ + now = timezone.now() + if date_start is None: + date_start = now - timedelta(days=30) + + if date_stop is None: + date_stop = now + results = InfluxDBWrapper.influx_query_manager( filters=f'|> filter(fn:(r) => r._measurement == "api_call") \ |> filter(fn: (r) => r["organisation_id"] == "{organisation_id}")', @@ -163,15 +192,12 @@ def get_event_list_for_organisation( ) dataset = defaultdict(list) labels = [] + + date_difference = date_stop - date_start + required_records = date_difference.days + 1 for result in results: for record in result.records: dataset[record["resource"]].append(record["_value"]) - if date_stop == "now()": - required_records = abs(int(date_start[:-1])) + 1 - else: - required_records = ( - abs(int(date_start[:-1])) - abs(int(date_stop[:-1])) + 1 - ) if len(labels) != required_records: labels.append(record.values["_time"].strftime("%Y-%m-%d")) return dataset, labels @@ -181,8 +207,8 @@ def get_multiple_event_list_for_organisation( organisation_id: int, project_id: int = None, environment_id: int = None, - date_start: str = "-30d", - date_stop: str = "now()", + date_start: datetime | None = None, + date_stop: datetime | None = None, ) -> list[UsageData]: """ Query influx db for usage for given organisation id @@ -193,6 +219,13 @@ def get_multiple_event_list_for_organisation( :return: a number of requests for flags, traits, identities, environment-document """ + now = timezone.now() + if date_start is None: + date_start = now - timedelta(days=30) + + if date_stop is None: + date_stop = now + filters = [ 'r._measurement == "api_call"', f'r["organisation_id"] == "{organisation_id}"', @@ -227,9 +260,16 @@ def get_usage_data( organisation_id: int, project_id: int | None = None, environment_id: int | None = None, - date_start: str = "-30d", - date_stop: str = "now()", + date_start: datetime | None = None, + date_stop: datetime | None = None, ) -> list[UsageData]: + now = timezone.now() + if date_start is None: + date_start = now - timedelta(days=30) + + if date_stop is None: + date_stop = now + events_list = get_multiple_event_list_for_organisation( organisation_id=organisation_id, project_id=project_id, @@ -243,7 +283,7 @@ def get_usage_data( def get_multiple_event_list_for_feature( environment_id: int, feature_name: str, - date_start: str = "-30d", + date_start: datetime | None = None, aggregate_every: str = "24h", ) -> list[dict]: """ @@ -264,11 +304,14 @@ def get_multiple_event_list_for_feature( :param environment_id: an id of the environment to get usage for :param feature_name: the name of the feature to get usage for - :param date_start: the influx time period to filter on, e.g. -30d, -7d, etc. + :param date_start: the influx datetime period to filter on :param aggregate_every: the influx time period to aggregate the data by, e.g. 24h :return: a list of dicts with feature and request count in a specific environment """ + now = timezone.now() + if date_start is None: + date_start = now - timedelta(days=30) results = InfluxDBWrapper.influx_query_manager( date_start=date_start, @@ -297,15 +340,20 @@ def get_multiple_event_list_for_feature( def get_feature_evaluation_data( feature_name: str, environment_id: int, period: str = "30d" ) -> typing.List[FeatureEvaluationData]: + assert period.endswith("d") + days = int(period[:-1]) + date_start = timezone.now() - timedelta(days=days) data = get_multiple_event_list_for_feature( feature_name=feature_name, environment_id=environment_id, - date_start=f"-{period}", + date_start=date_start, ) return FeatureEvaluationDataSchema(many=True).load(data) -def get_top_organisations(date_start: str, limit: str = ""): +def get_top_organisations( + date_start: datetime | None = None, limit: str = "" +) -> dict[int, int]: """ Query influx db top used organisations @@ -315,10 +363,14 @@ def get_top_organisations(date_start: str, limit: str = ""): :return: top organisations in descending order based on api calls. """ + now = timezone.now() + if date_start is None: + date_start = now - timedelta(days=30) + if limit: limit = f"|> limit(n:{limit})" - bucket = range_bucket_mappings[date_start] + bucket = get_range_bucket_mappings(date_start) results = InfluxDBWrapper.influx_query_manager( date_start=date_start, bucket=bucket, @@ -333,6 +385,7 @@ def get_top_organisations(date_start: str, limit: str = ""): ) dataset = {} + for result in results: for record in result.records: try: @@ -347,7 +400,9 @@ def get_top_organisations(date_start: str, limit: str = ""): return dataset -def get_current_api_usage(organisation_id: int, date_start: str) -> int: +def get_current_api_usage( + organisation_id: int, date_start: datetime | None = None +) -> int: """ Query influx db for api usage @@ -356,6 +411,9 @@ def get_current_api_usage(organisation_id: int, date_start: str) -> int: :return: number of current api calls """ + now = timezone.now() + if date_start is None: + date_start = now - timedelta(days=30) bucket = read_bucket results = InfluxDBWrapper.influx_query_manager( diff --git a/api/features/views.py b/api/features/views.py index ec0aad480f73..b276bd8f4b98 100644 --- a/api/features/views.py +++ b/api/features/views.py @@ -1,5 +1,6 @@ import logging import typing +from datetime import timedelta from functools import reduce from app_analytics.analytics_db_service import get_feature_evaluation_data @@ -9,6 +10,7 @@ from django.conf import settings from django.core.cache import caches from django.db.models import Max, Q, QuerySet +from django.utils import timezone from django.utils.decorators import method_decorator from django.views.decorators.cache import cache_page from drf_yasg import openapi @@ -352,8 +354,15 @@ def get_influx_data(self, request, pk, project_pk): query_serializer = GetInfluxDataQuerySerializer(data=request.query_params) query_serializer.is_valid(raise_exception=True) + period = query_serializer.data["period"] + now = timezone.now() + if period.endswith("h"): + date_start = now - timedelta(hours=int(period[:-1])) + elif period.endswith("d"): + date_start = now - timedelta(days=int(period[:-1])) + else: + raise ValidationError("Malformed period supplied") - date_start = f"-{query_serializer.data['period']}" events_list = get_multiple_event_list_for_feature( feature_name=feature.name, date_start=date_start, diff --git a/api/organisations/subscription_info_cache.py b/api/organisations/subscription_info_cache.py index 9fc0819b1d84..a5ec5a017e94 100644 --- a/api/organisations/subscription_info_cache.py +++ b/api/organisations/subscription_info_cache.py @@ -1,7 +1,9 @@ import typing +from datetime import timedelta from app_analytics.influxdb_wrapper import get_top_organisations from django.conf import settings +from django.utils import timezone from .chargebee import get_subscription_metadata_from_id from .models import Organisation, OrganisationSubscriptionInformationCache @@ -70,9 +72,19 @@ def _update_caches_with_influx_data( if not settings.INFLUXDB_TOKEN: return - for date_start, limit in (("-30d", ""), ("-7d", ""), ("-24h", "100")): - key = f"api_calls_{date_start[1:]}" + for _date_start, limit in (("-30d", ""), ("-7d", ""), ("-24h", "100")): + key = f"api_calls_{_date_start[1:]}" + + now = timezone.now() + if _date_start.endswith("d"): + date_start = now - timedelta(days=int(_date_start[1:-1])) + elif _date_start.endswith("h"): + date_start = now - timedelta(hours=int(_date_start[1:-1])) + else: + assert False, "Expecting either days (d) or hours (h)" # pragma: no cover + org_calls = get_top_organisations(date_start, limit) + for org_id, calls in org_calls.items(): subscription_info_cache = organisation_info_cache_dict.get(org_id) if not subscription_info_cache: diff --git a/api/organisations/task_helpers.py b/api/organisations/task_helpers.py index 509811cbf622..f9dc52dc22bc 100644 --- a/api/organisations/task_helpers.py +++ b/api/organisations/task_helpers.py @@ -118,10 +118,9 @@ def handle_api_usage_notification_for_organisation(organisation: Organisation) - month_delta = relativedelta(now, billing_starts_at).months period_starts_at = relativedelta(months=month_delta) + billing_starts_at - days = relativedelta(now, period_starts_at).days allowed_api_calls = subscription_cache.allowed_30d_api_calls - api_usage = get_current_api_usage(organisation.id, f"-{days}d") + api_usage = get_current_api_usage(organisation.id, period_starts_at) # For some reason the allowed API calls is set to 0 so default to the max free plan. allowed_api_calls = allowed_api_calls or MAX_API_CALLS_IN_FREE_PLAN diff --git a/api/organisations/tasks.py b/api/organisations/tasks.py index 0ebdc1f5d6ca..f03a9fba3e81 100644 --- a/api/organisations/tasks.py +++ b/api/organisations/tasks.py @@ -192,7 +192,7 @@ def charge_for_api_call_count_overages(): continue subscription_cache = organisation.subscription_information_cache - api_usage = get_current_api_usage(organisation.id, "30d") + api_usage = get_current_api_usage(organisation.id) # Grace period for organisations < 200% of usage. if api_usage / subscription_cache.allowed_30d_api_calls < 2.0: @@ -306,7 +306,7 @@ def restrict_use_due_to_api_limit_grace_period_over() -> None: OrganisationBreachedGracePeriod.objects.get_or_create(organisation=organisation) subscription_cache = organisation.subscription_information_cache - api_usage = get_current_api_usage(organisation.id, "30d") + api_usage = get_current_api_usage(organisation.id) if api_usage / subscription_cache.allowed_30d_api_calls < 1.0: logger.info( f"API use for organisation {organisation.id} has fallen to below limit, so not restricting use." diff --git a/api/sales_dashboard/views.py b/api/sales_dashboard/views.py index 27861b5b7022..53513ab87d8b 100644 --- a/api/sales_dashboard/views.py +++ b/api/sales_dashboard/views.py @@ -1,4 +1,5 @@ import json +from datetime import timedelta import re2 as re from app_analytics.influxdb_wrapper import ( @@ -19,6 +20,7 @@ from django.shortcuts import get_object_or_404 from django.template import loader from django.urls import reverse, reverse_lazy +from django.utils import timezone from django.utils.safestring import mark_safe from django.views.generic import ListView from django.views.generic.edit import FormView @@ -163,10 +165,11 @@ def organisation_info(request: HttpRequest, organisation_id: int) -> HttpRespons date_range = request.GET.get("date_range", "180d") context["date_range"] = date_range - date_start = f"-{date_range}" - date_stop = "now()" + assert date_range.endswith("d") + now = timezone.now() + date_start = now - timedelta(days=int(date_range[:-1])) event_list, labels = get_event_list_for_organisation( - organisation_id, date_start, date_stop + organisation_id, date_start ) context["event_list"] = event_list context["traits"] = mark_safe(json.dumps(event_list["traits"])) @@ -176,13 +179,16 @@ def organisation_info(request: HttpRequest, organisation_id: int) -> HttpRespons json.dumps(event_list["environment-document"]) ) context["labels"] = mark_safe(json.dumps(labels)) + + date_starts = {} + date_starts["24h"] = now - timedelta(days=1) + date_starts["7d"] = now - timedelta(days=7) + date_starts["30d"] = now - timedelta(days=30) context["api_calls"] = { # TODO: this could probably be reduced to a single influx request # rather than 3 - range_: get_events_for_organisation( - organisation_id, date_start=f"-{range_}" - ) - for range_ in ("24h", "7d", "30d") + period: get_events_for_organisation(organisation_id, date_start=_date_start) + for period, _date_start in date_starts.items() } return HttpResponse(template.render(context, request)) diff --git a/api/tests/unit/app_analytics/test_unit_app_analytics_influxdb_wrapper.py b/api/tests/unit/app_analytics/test_unit_app_analytics_influxdb_wrapper.py index 85f8607d7947..d82b6f162978 100644 --- a/api/tests/unit/app_analytics/test_unit_app_analytics_influxdb_wrapper.py +++ b/api/tests/unit/app_analytics/test_unit_app_analytics_influxdb_wrapper.py @@ -1,4 +1,4 @@ -from datetime import date, timedelta +from datetime import date, datetime, timedelta from typing import Generator, Type from unittest import mock from unittest.mock import MagicMock @@ -9,11 +9,13 @@ from app_analytics.influxdb_wrapper import ( InfluxDBWrapper, build_filter_string, + get_current_api_usage, get_event_list_for_organisation, get_events_for_organisation, get_feature_evaluation_data, get_multiple_event_list_for_feature, get_multiple_event_list_for_organisation, + get_range_bucket_mappings, get_top_organisations, get_usage_data, ) @@ -21,6 +23,7 @@ from django.utils import timezone from influxdb_client.client.exceptions import InfluxDBError from influxdb_client.rest import ApiException +from pytest_django.fixtures import SettingsWrapper from pytest_mock import MockerFixture from urllib3.exceptions import HTTPError @@ -84,10 +87,12 @@ def test_write_handles_errors( # but the exception was not raised +@pytest.mark.freeze_time("2023-01-19T09:09:47.325132+00:00") def test_influx_db_query_when_get_events_then_query_api_called(monkeypatch): expected_query = ( ( - f'from(bucket:"{read_bucket}") |> range(start: -30d, stop: now()) ' + f'from(bucket:"{read_bucket}") |> range(start: 2022-12-20T09:09:47.325132+00:00, ' + "stop: 2023-01-19T09:09:47.325132+00:00) " f'|> filter(fn:(r) => r._measurement == "api_call") ' f'|> filter(fn: (r) => r["_field"] == "request_count") ' f'|> filter(fn: (r) => r["organisation_id"] == "{org_id}") ' @@ -117,10 +122,11 @@ def test_influx_db_query_when_get_events_then_query_api_called(monkeypatch): assert call[2]["query"].replace(" ", "").replace("\n", "") == expected_query +@pytest.mark.freeze_time("2023-01-19T09:09:47.325132+00:00") def test_influx_db_query_when_get_events_list_then_query_api_called(monkeypatch): query = ( f'from(bucket:"{read_bucket}") ' - f"|> range(start: -30d, stop: now()) " + f"|> range(start: 2022-12-20T09:09:47.325132+00:00, stop: 2023-01-19T09:09:47.325132+00:00) " f'|> filter(fn:(r) => r._measurement == "api_call") ' f'|> filter(fn: (r) => r["organisation_id"] == "{org_id}") ' f'|> drop(columns: ["organisation", "organisation_id", "type", "project", ' @@ -180,6 +186,7 @@ def test_influx_db_query_when_get_events_list_then_query_api_called(monkeypatch) ), ), ) +@pytest.mark.freeze_time("2023-01-19T09:09:47.325132+00:00") def test_influx_db_query_when_get_multiple_events_for_organisation_then_query_api_called( monkeypatch, project_id, environment_id, expected_filters ): @@ -187,7 +194,7 @@ def test_influx_db_query_when_get_multiple_events_for_organisation_then_query_ap expected_query = ( ( f'from(bucket:"{read_bucket}") ' - "|> range(start: -30d, stop: now()) " + "|> range(start: 2022-12-20T09:09:47.325132+00:00, stop: 2023-01-19T09:09:47.325132+00:00) " f"{build_filter_string(expected_filters)}" '|> drop(columns: ["organisation", "organisation_id", "type", "project", ' '"project_id", "environment", "environment_id", "host"]) ' @@ -217,12 +224,13 @@ def test_influx_db_query_when_get_multiple_events_for_organisation_then_query_ap assert call[2]["query"].replace(" ", "").replace("\n", "") == expected_query +@pytest.mark.freeze_time("2023-01-19T09:09:47.325132+00:00") def test_influx_db_query_when_get_multiple_events_for_feature_then_query_api_called( monkeypatch, ): query = ( f'from(bucket:"{read_bucket}") ' - "|> range(start: -30d, stop: now()) " + "|> range(start: 2022-12-20T09:09:47.325132+00:00, stop: 2023-01-19T09:09:47.325132+00:00) " '|> filter(fn:(r) => r._measurement == "feature_evaluation") ' '|> filter(fn: (r) => r["_field"] == "request_count") ' f'|> filter(fn: (r) => r["environment_id"] == "{env_id}") ' @@ -248,6 +256,7 @@ def test_influx_db_query_when_get_multiple_events_for_feature_then_query_api_cal mock_query_api.query.assert_called_once_with(org=influx_org, query=query) +@pytest.mark.freeze_time("2023-01-19T09:09:47.325132+00:00") def test_get_usage_data(mocker): # Given influx_data = [ @@ -276,12 +285,14 @@ def test_get_usage_data(mocker): usage_data = get_usage_data(org_id) # Then + date_start = datetime.fromisoformat("2022-12-20T09:09:47.325132+00:00") + date_stop = datetime.fromisoformat("2023-01-19T09:09:47.325132+00:00") mocked_get_multiple_event_list_for_organisation.assert_called_once_with( organisation_id=org_id, environment_id=None, project_id=None, - date_start="-30d", - date_stop="now()", + date_start=date_start, + date_stop=date_stop, ) assert len(usage_data) == 2 @@ -299,6 +310,7 @@ def test_get_usage_data(mocker): assert usage_data[1].environment_document == 10 +@pytest.mark.freeze_time("2023-01-19T09:09:47.325132+00:00") def test_get_feature_evaluation_data(mocker): # Given influx_data = [ @@ -318,8 +330,9 @@ def test_get_feature_evaluation_data(mocker): ) # Then + date_start = datetime.fromisoformat("2022-12-20T09:09:47.325132+00:00") mocked_get_multiple_event_list_for_feature.assert_called_once_with( - feature_name=feature_name, environment_id=env_id, date_start="-30d" + feature_name=feature_name, environment_id=env_id, date_start=date_start ) assert len(feature_evaluation_data) == 2 @@ -331,17 +344,17 @@ def test_get_feature_evaluation_data(mocker): assert feature_evaluation_data[1].count == 200 -@pytest.mark.parametrize("date_stop", ["now()", "-5d"]) @pytest.mark.freeze_time("2023-01-19T09:09:47.325132+00:00") def test_get_event_list_for_organisation_with_date_stop_set_to_now_and_previously( - date_stop: str, mocker: MockerFixture, organisation: Organisation, ) -> None: # Given + now = timezone.now() one_day_ago = now - timedelta(days=1) two_days_ago = now - timedelta(days=2) + date_stop = now record_mock1 = mock.MagicMock() record_mock1.__getitem__.side_effect = lambda key: { @@ -377,6 +390,7 @@ def test_get_event_list_for_organisation_with_date_stop_set_to_now_and_previousl assert labels == ["2023-01-18", "2023-01-17"] +@pytest.mark.freeze_time("2023-01-19T09:09:47.325132+00:00") @pytest.mark.parametrize("limit", ["10", ""]) def test_get_top_organisations( limit: str, @@ -399,9 +413,11 @@ def test_get_top_organisations( ) influx_mock.return_value = [result] + now = timezone.now() + date_start = now - timedelta(days=30) # When - dataset = get_top_organisations(date_start="-30d", limit=limit) + dataset = get_top_organisations(date_start=date_start, limit=limit) # Then assert dataset == {123: 23, 456: 43} @@ -409,9 +425,10 @@ def test_get_top_organisations( influx_mock.assert_called_once() influx_query_call = influx_mock.call_args assert influx_query_call.kwargs["bucket"] == "test_bucket_downsampled_1h" - assert influx_query_call.kwargs["date_start"] == "-30d" + assert influx_query_call.kwargs["date_start"] == date_start +@pytest.mark.freeze_time("2023-01-19T09:09:47.325132+00:00") def test_get_top_organisations_value_error( mocker: MockerFixture, ) -> None: @@ -432,9 +449,11 @@ def test_get_top_organisations_value_error( ) influx_mock.return_value = [result] + now = timezone.now() + date_start = now - timedelta(days=30) # When - dataset = get_top_organisations(date_start="-30d") + dataset = get_top_organisations(date_start=date_start) # Then # The wrongly typed data does not stop the remaining data @@ -444,10 +463,90 @@ def test_get_top_organisations_value_error( def test_early_return_for_empty_range_for_influx_query_manager() -> None: # When + now = timezone.now() results = InfluxDBWrapper.influx_query_manager( - date_start="-0d", - date_stop="now()", + date_start=now, + date_stop=now, ) # Then assert results == [] + + +def test_get_range_bucket_mappings_when_less_than_10_days( + settings: SettingsWrapper, +) -> None: + # Given + two_days = timezone.now() - timedelta(days=2) + + # When + result = get_range_bucket_mappings(two_days) + + # Then + assert result == settings.INFLUXDB_BUCKET + "_downsampled_15m" + + +def test_get_range_bucket_mappings_when_more_than_10_days( + settings: SettingsWrapper, +) -> None: + # Given + twelve_days = timezone.now() - timedelta(days=12) + + # When + result = get_range_bucket_mappings(twelve_days) + + # Then + assert result == settings.INFLUXDB_BUCKET + "_downsampled_1h" + + +def test_influx_query_manager_when_date_start_is_set_to_none( + mocker: MockerFixture, +) -> None: + # Given + mock_client = mocker.patch("app_analytics.influxdb_wrapper.influxdb_client") + + # When + InfluxDBWrapper.influx_query_manager() + + # Then + mock_client.query_api.assert_called_once() + + +@pytest.mark.freeze_time("2023-01-19T09:09:47.325132+00:00") +def test_get_top_organisation_when_date_start_is_set_to_none( + mocker: MockerFixture, +) -> None: + # Given + influx_mock = mocker.patch( + "app_analytics.influxdb_wrapper.InfluxDBWrapper.influx_query_manager" + ) + now = timezone.now() + date_start = now - timedelta(days=30) + + # When + get_top_organisations() + + # Then + influx_query_call = influx_mock.call_args + assert influx_query_call.kwargs["bucket"] == "test_bucket_downsampled_1h" + assert influx_query_call.kwargs["date_start"] == date_start + + +def test_get_current_api_usage(mocker: MockerFixture) -> None: + # Given + influx_mock = mocker.patch( + "app_analytics.influxdb_wrapper.InfluxDBWrapper.influx_query_manager" + ) + record_mock = mock.MagicMock() + record_mock.values = {"organisation": "1-TestCorp"} + record_mock.get_value.return_value = 43 + + result = mock.MagicMock() + result.records = [record_mock] + influx_mock.return_value = [result] + + # When + result = get_current_api_usage(organisation_id=1) + + # Then + assert result == 43 diff --git a/api/tests/unit/app_analytics/test_unit_app_analytics_views.py b/api/tests/unit/app_analytics/test_unit_app_analytics_views.py index a9276fa868ea..15c374517714 100644 --- a/api/tests/unit/app_analytics/test_unit_app_analytics_views.py +++ b/api/tests/unit/app_analytics/test_unit_app_analytics_views.py @@ -168,8 +168,8 @@ def test_get_usage_data__current_billing_period( organisation_id=organisation.id, environment_id=None, project_id=None, - date_start="-28d", - date_stop="now()", + date_start=four_weeks_ago, + date_stop=now, ) @@ -195,6 +195,7 @@ def test_get_usage_data__previous_billing_period( now = timezone.now() week_from_now = now + timedelta(days=7) four_weeks_ago = now - timedelta(days=28) + target_start_at = now - timedelta(days=59) OrganisationSubscriptionInformationCache.objects.create( organisation=organisation, @@ -229,8 +230,8 @@ def test_get_usage_data__previous_billing_period( organisation_id=organisation.id, environment_id=None, project_id=None, - date_start="-59d", - date_stop="-28d", + date_start=target_start_at, + date_stop=four_weeks_ago, ) @@ -256,7 +257,7 @@ def test_get_usage_data__ninety_day_period( now = timezone.now() week_from_now = now + timedelta(days=7) four_weeks_ago = now - timedelta(days=28) - + ninety_days_ago = now - timedelta(days=90) OrganisationSubscriptionInformationCache.objects.create( organisation=organisation, current_billing_term_starts_at=four_weeks_ago, @@ -290,8 +291,8 @@ def test_get_usage_data__ninety_day_period( organisation_id=organisation.id, environment_id=None, project_id=None, - date_start="-90d", - date_stop="now()", + date_start=ninety_days_ago, + date_stop=now, ) diff --git a/api/tests/unit/features/test_unit_features_views.py b/api/tests/unit/features/test_unit_features_views.py index c0496b26f497..0226d4b1a615 100644 --- a/api/tests/unit/features/test_unit_features_views.py +++ b/api/tests/unit/features/test_unit_features_views.py @@ -425,6 +425,7 @@ def test_put_feature_does_not_update_feature_states( assert all(fs.enabled is False for fs in feature.feature_states.all()) +@pytest.mark.freeze_time("2023-01-19T09:09:47.325132+00:00") @mock.patch("features.views.get_multiple_event_list_for_feature") def test_get_project_features_influx_data( mock_get_event_list: mock.MagicMock, @@ -446,6 +447,7 @@ def test_get_project_features_influx_data( "datetime": datetime(2021, 2, 26, 12, 0, 0, tzinfo=pytz.UTC), } ] + one_day_ago = timezone.now() - timedelta(days=1) # When response = admin_client_new.get(url) @@ -455,11 +457,70 @@ def test_get_project_features_influx_data( mock_get_event_list.assert_called_once_with( feature_name=feature.name, environment_id=str(environment.id), # provided as a GET param - date_start="-24h", # this is the default but can be provided as a GET param + date_start=one_day_ago, # this is the default but can be provided as a GET param aggregate_every="24h", # this is the default but can be provided as a GET param ) +@pytest.mark.freeze_time("2023-01-19T09:09:47.325132+00:00") +@mock.patch("features.views.get_multiple_event_list_for_feature") +def test_get_project_features_influx_data_with_two_weeks_period( + mock_get_event_list: mock.MagicMock, + feature: Feature, + project: Project, + environment: Environment, + admin_client_new: APIClient, +) -> None: + # Given + base_url = reverse( + "api-v1:projects:project-features-get-influx-data", + args=[project.id, feature.id], + ) + url = f"{base_url}?environment_id={environment.id}&period=14d" + date_start = timezone.now() - timedelta(days=14) + + mock_get_event_list.return_value = [ + { + feature.name: 1, + "datetime": datetime(2021, 2, 26, 12, 0, 0, tzinfo=pytz.UTC), + } + ] + + # When + response = admin_client_new.get(url) + + # Then + assert response.status_code == status.HTTP_200_OK + mock_get_event_list.assert_called_once_with( + feature_name=feature.name, + environment_id=str(environment.id), + date_start=date_start, + aggregate_every="24h", + ) + + +@pytest.mark.freeze_time("2023-01-19T09:09:47.325132+00:00") +def test_get_project_features_influx_data_with_malformed_period( + feature: Feature, + project: Project, + environment: Environment, + admin_client_new: APIClient, +) -> None: + # Given + base_url = reverse( + "api-v1:projects:project-features-get-influx-data", + args=[project.id, feature.id], + ) + url = f"{base_url}?environment_id={environment.id}&period=baddata" + + # When + response = admin_client_new.get(url) + + # Then + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert response.data[0] == "Malformed period supplied" + + def test_regular_user_cannot_create_mv_options_when_creating_feature( staff_client: APIClient, with_project_permissions: WithProjectPermissionsCallable, diff --git a/api/tests/unit/organisations/test_unit_organisations_subscription_info_cache.py b/api/tests/unit/organisations/test_unit_organisations_subscription_info_cache.py index bb59c1176b12..12523040d262 100644 --- a/api/tests/unit/organisations/test_unit_organisations_subscription_info_cache.py +++ b/api/tests/unit/organisations/test_unit_organisations_subscription_info_cache.py @@ -1,3 +1,7 @@ +from datetime import timedelta + +import pytest +from django.utils import timezone from task_processor.task_run_method import TaskRunMethod from organisations.chargebee.metadata import ChargebeeObjMetadata @@ -5,18 +9,27 @@ from organisations.subscriptions.constants import SubscriptionCacheEntity +@pytest.mark.freeze_time("2023-01-19T09:09:47.325132+00:00") def test_update_caches(mocker, organisation, chargebee_subscription, settings): # Given settings.CHARGEBEE_API_KEY = "api-key" settings.INFLUXDB_TOKEN = "token" settings.TASK_RUN_METHOD = TaskRunMethod.SYNCHRONOUSLY - organisation_usage = {"24h": 25123, "7d": 182957, "30d": 804564} + now = timezone.now() + day_1 = now - timedelta(days=1) + day_7 = now - timedelta(days=7) + day_30 = now - timedelta(days=30) + organisation_usage = { + day_1: 25123, + day_7: 182957, + day_30: 804564, + } mocked_get_top_organisations = mocker.patch( "organisations.subscription_info_cache.get_top_organisations" ) mocked_get_top_organisations.side_effect = lambda t, _: { - organisation.id: organisation_usage.get(f"{t[1:]}") + organisation.id: organisation_usage[t] } chargebee_metadata = ChargebeeObjMetadata(seats=15, api_calls=1000000) @@ -35,15 +48,15 @@ def test_update_caches(mocker, organisation, chargebee_subscription, settings): # Then assert ( organisation.subscription_information_cache.api_calls_24h - == organisation_usage["24h"] + == organisation_usage[day_1] ) assert ( organisation.subscription_information_cache.api_calls_7d - == organisation_usage["7d"] + == organisation_usage[day_7] ) assert ( organisation.subscription_information_cache.api_calls_30d - == organisation_usage["30d"] + == organisation_usage[day_30] ) assert ( organisation.subscription_information_cache.allowed_seats @@ -60,7 +73,7 @@ def test_update_caches(mocker, organisation, chargebee_subscription, settings): assert mocked_get_top_organisations.call_count == 3 assert [call[0] for call in mocked_get_top_organisations.call_args_list] == [ - ("-30d", ""), - ("-7d", ""), - ("-24h", "100"), + (day_30, ""), + (day_7, ""), + (day_1, "100"), ] diff --git a/api/tests/unit/organisations/test_unit_organisations_tasks.py b/api/tests/unit/organisations/test_unit_organisations_tasks.py index 62708193a94a..f4038c8c1a3b 100644 --- a/api/tests/unit/organisations/test_unit_organisations_tasks.py +++ b/api/tests/unit/organisations/test_unit_organisations_tasks.py @@ -366,7 +366,7 @@ def test_handle_api_usage_notifications_below_100( handle_api_usage_notifications() # Then - mock_api_usage.assert_called_once_with(organisation.id, "-14d") + mock_api_usage.assert_called_once_with(organisation.id, now - timedelta(days=14)) assert len(mailoutbox) == 1 email = mailoutbox[0] @@ -452,7 +452,7 @@ def test_handle_api_usage_notifications_below_api_usage_alert_thresholds( handle_api_usage_notifications() # Then - mock_api_usage.assert_called_once_with(organisation.id, "-14d") + mock_api_usage.assert_called_once_with(organisation.id, now - timedelta(days=14)) assert len(mailoutbox) == 0 @@ -502,7 +502,7 @@ def test_handle_api_usage_notifications_above_100( handle_api_usage_notifications() # Then - mock_api_usage.assert_called_once_with(organisation.id, "-14d") + mock_api_usage.assert_called_once_with(organisation.id, now - timedelta(days=14)) assert len(mailoutbox) == 1 email = mailoutbox[0] @@ -612,6 +612,7 @@ def test_handle_api_usage_notifications_for_free_accounts( mailoutbox: list[EmailMultiAlternatives], ) -> None: # Given + now = timezone.now() assert organisation.is_paid is False assert organisation.subscription.is_free_plan is True assert organisation.subscription.max_api_calls == MAX_API_CALLS_IN_FREE_PLAN @@ -634,7 +635,7 @@ def test_handle_api_usage_notifications_for_free_accounts( handle_api_usage_notifications() # Then - mock_api_usage.assert_called_once_with(organisation.id, "-30d") + mock_api_usage.assert_called_once_with(organisation.id, now - timedelta(days=30)) assert len(mailoutbox) == 1 email = mailoutbox[0] diff --git a/api/tests/unit/organisations/test_unit_organisations_views.py b/api/tests/unit/organisations/test_unit_organisations_views.py index de129f3c9fc1..9ce266788f35 100644 --- a/api/tests/unit/organisations/test_unit_organisations_views.py +++ b/api/tests/unit/organisations/test_unit_organisations_views.py @@ -351,6 +351,7 @@ def test_user_can_get_projects_for_an_organisation( assert response.data[0]["name"] == project.name +@pytest.mark.freeze_time("2023-01-19T09:09:47.325132+00:00") @mock.patch("app_analytics.influxdb_wrapper.influxdb_client") def test_should_get_usage_for_organisation( mock_influxdb_client: MagicMock, @@ -365,7 +366,7 @@ def test_should_get_usage_for_organisation( expected_query = ( ( f'from(bucket:"{read_bucket}") ' - "|> range(start: -30d, stop: now()) " + "|> range(start: 2022-12-20T09:09:47.325132+00:00, stop: 2023-01-19T09:09:47.325132+00:00) " '|> filter(fn:(r) => r._measurement == "api_call") ' '|> filter(fn: (r) => r["_field"] == "request_count") ' f'|> filter(fn: (r) => r["organisation_id"] == "{organisation.id}") ' diff --git a/api/tests/unit/sales_dashboard/test_unit_sales_dashboard_views.py b/api/tests/unit/sales_dashboard/test_unit_sales_dashboard_views.py index 1ab1dc97fcf0..2cbeacd723cf 100644 --- a/api/tests/unit/sales_dashboard/test_unit_sales_dashboard_views.py +++ b/api/tests/unit/sales_dashboard/test_unit_sales_dashboard_views.py @@ -1,6 +1,9 @@ +from datetime import timedelta + import pytest from django.test import Client, RequestFactory from django.urls import reverse +from django.utils import timezone from pytest_django.fixtures import SettingsWrapper from pytest_mock import MockerFixture from rest_framework.test import APIClient @@ -39,6 +42,7 @@ def test_organisation_subscription_get_api_call_overage( assert result.overage == expected_overage +@pytest.mark.freeze_time("2023-01-19T09:09:47.325132+00:00") def test_get_organisation_info__get_event_list_for_organisation( organisation: Organisation, superuser_client: APIClient, @@ -65,7 +69,8 @@ def test_get_organisation_info__get_event_list_for_organisation( # Then assert "label1" in str(response.content) assert "label2" in str(response.content) - event_list_mock.assert_called_once_with(organisation.id, "-180d", "now()") + date_start = timezone.now() - timedelta(days=180) + event_list_mock.assert_called_once_with(organisation.id, date_start) def test_list_organisations_search_by_name( From a3368fe44e13770132ffbaa6bfd300869d67970a Mon Sep 17 00:00:00 2001 From: Flagsmith Bot <65724737+flagsmithdev@users.noreply.github.com> Date: Wed, 7 Aug 2024 17:49:46 +0100 Subject: [PATCH 097/247] chore(main): release 2.134.1 (#4456) --- .release-please-manifest.json | 2 +- CHANGELOG.md | 9 +++++++++ version.txt | 2 +- 3 files changed, 11 insertions(+), 2 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index b900f8a162a6..8e5862421d27 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "2.134.0" + ".": "2.134.1" } \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index affdbff3ed21..7c0e3712eb60 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,14 @@ # Changelog +## [2.134.1](https://github.com/Flagsmith/flagsmith/compare/v2.134.0...v2.134.1) (2024-08-07) + + +### Bug Fixes + +* don't allow bypassing `ALLOW_REGISTRATION_WITHOUT_INVITE` behaviour ([#4454](https://github.com/Flagsmith/flagsmith/issues/4454)) ([0e6deec](https://github.com/Flagsmith/flagsmith/commit/0e6deec6404c3e78edf5f36b36ea0f2dcef3dd06)) +* protect get environment document endpoint ([#4459](https://github.com/Flagsmith/flagsmith/issues/4459)) ([bee01c7](https://github.com/Flagsmith/flagsmith/commit/bee01c7f21cae19e7665ede3284f96989d33940f)) +* Set grace period to a singular event ([#4455](https://github.com/Flagsmith/flagsmith/issues/4455)) ([3225c47](https://github.com/Flagsmith/flagsmith/commit/3225c47043f9647a7426b7f05890bde29b681acc)) + ## [2.134.0](https://github.com/Flagsmith/flagsmith/compare/v2.133.1...v2.134.0) (2024-08-02) diff --git a/version.txt b/version.txt index 6be1c8e9ff6e..571e418a035f 100644 --- a/version.txt +++ b/version.txt @@ -1 +1 @@ -2.134.0 +2.134.1 From 13ad7ef7e6613bdd640cdfca7ce99a892b3893be Mon Sep 17 00:00:00 2001 From: Matthew Elwell Date: Wed, 7 Aug 2024 18:08:36 +0100 Subject: [PATCH 098/247] fix: show correct SAML Frontend URL on edit (#4462) --- frontend/web/components/modals/CreateSAML.tsx | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/frontend/web/components/modals/CreateSAML.tsx b/frontend/web/components/modals/CreateSAML.tsx index b81e6d153334..2e83592ebc0b 100644 --- a/frontend/web/components/modals/CreateSAML.tsx +++ b/frontend/web/components/modals/CreateSAML.tsx @@ -69,7 +69,7 @@ const CreateSAML: FC = ({ organisationId, samlName }) => { return regularExpresion.test(name) } - const convetToXmlFile = (fileName: string, data: string) => { + const convertToXmlFile = (fileName: string, data: string) => { const blob = new Blob([data], { type: 'application/xml' }) const link = document.createElement('a') link.download = `${fileName}.xml` @@ -82,7 +82,7 @@ const CreateSAML: FC = ({ organisationId, samlName }) => { getSamlConfigurationMetadata(getStore(), { name: previousName }) .then((res) => { if (res.error) { - convetToXmlFile(previousName, res.error.data) + convertToXmlFile(previousName, res.error.data) } }) .finally(() => { @@ -92,7 +92,7 @@ const CreateSAML: FC = ({ organisationId, samlName }) => { const downloadIDPMetadata = () => { const name = data?.name || samlName - convetToXmlFile(`IDP metadata ${name!}`, data?.idp_metadata_xml || '') + convertToXmlFile(`IDP metadata ${name!}`, data?.idp_metadata_xml || '') } const Tab1 = ( @@ -123,7 +123,7 @@ const CreateSAML: FC = ({ organisationId, samlName }) => { data-test='frontend-url' tooltip='The base URL of the Flagsmith dashboard' tooltipPlace='right' - value={frontendUrl} + value={data?.frontend_url || frontendUrl} onChange={(event: React.ChangeEvent) => { setFrontendUrl(Utils.safeParseEventValue(event)) }} From 2dfbf99cdc8d8529aa487a4e471df46c0dbc6878 Mon Sep 17 00:00:00 2001 From: Gagan Date: Thu, 8 Aug 2024 14:26:19 +0530 Subject: [PATCH 099/247] feat(app_analytics): Add cache for feature evaluation (#4418) --- api/app/settings/common.py | 4 + api/app_analytics/cache.py | 51 +++++++++- api/app_analytics/views.py | 34 ++----- .../test_unit_app_analytics_cache.py | 96 ++++++++++++++++++- .../test_unit_app_analytics_views.py | 50 ++-------- 5 files changed, 164 insertions(+), 71 deletions(-) diff --git a/api/app/settings/common.py b/api/app/settings/common.py index b5705b60770a..9eec2514f3fd 100644 --- a/api/app/settings/common.py +++ b/api/app/settings/common.py @@ -331,6 +331,10 @@ USE_POSTGRES_FOR_ANALYTICS = env.bool("USE_POSTGRES_FOR_ANALYTICS", default=False) USE_CACHE_FOR_USAGE_DATA = env.bool("USE_CACHE_FOR_USAGE_DATA", default=False) +FEATURE_EVALUATION_CACHE_SECONDS = env.int( + "FEATURE_EVALUATION_CACHE_SECONDS", default=60 +) + ENABLE_API_USAGE_TRACKING = env.bool("ENABLE_API_USAGE_TRACKING", default=True) if ENABLE_API_USAGE_TRACKING: diff --git a/api/app_analytics/cache.py b/api/app_analytics/cache.py index aea7c84f7184..5e5da9e5b370 100644 --- a/api/app_analytics/cache.py +++ b/api/app_analytics/cache.py @@ -1,4 +1,8 @@ -from app_analytics.tasks import track_request +from collections import defaultdict + +from app_analytics.tasks import track_feature_evaluation, track_request +from app_analytics.track import track_feature_evaluation_influxdb +from django.conf import settings from django.utils import timezone CACHE_FLUSH_INTERVAL = 60 # seconds @@ -31,3 +35,48 @@ def track_request(self, resource: int, host: str, environment_key: str): self._cache[key] += 1 if (timezone.now() - self._last_flushed_at).seconds > CACHE_FLUSH_INTERVAL: self._flush() + + +class FeatureEvaluationCache: + def __init__(self): + self._cache = {} + self._last_flushed_at = timezone.now() + + def _flush(self): + evaluation_data = defaultdict(dict) + for (environment_id, feature_name), eval_count in self._cache.items(): + evaluation_data[environment_id][feature_name] = eval_count + + for environment_id, feature_evaluations in evaluation_data.items(): + if settings.USE_POSTGRES_FOR_ANALYTICS: + track_feature_evaluation.delay( + kwargs={ + "environment_id": environment_id, + "feature_evaluations": feature_evaluations, + } + ) + + elif settings.INFLUXDB_TOKEN: + track_feature_evaluation_influxdb.delay( + kwargs={ + "environment_id": environment_id, + "feature_evaluations": feature_evaluations, + } + ) + + self._cache = {} + self._last_flushed_at = timezone.now() + + def track_feature_evaluation( + self, environment_id: int, feature_name: str, evaluation_count: int + ): + key = (environment_id, feature_name) + if key not in self._cache: + self._cache[key] = evaluation_count + else: + self._cache[key] += evaluation_count + + if ( + timezone.now() - self._last_flushed_at + ).seconds > settings.FEATURE_EVALUATION_CACHE_SECONDS: + self._flush() diff --git a/api/app_analytics/views.py b/api/app_analytics/views.py index d89b76e3fb97..1f71efc33a2a 100644 --- a/api/app_analytics/views.py +++ b/api/app_analytics/views.py @@ -4,14 +4,9 @@ get_total_events_count, get_usage_data, ) -from app_analytics.tasks import ( - track_feature_evaluation, - track_feature_evaluation_v2, -) -from app_analytics.track import ( - track_feature_evaluation_influxdb, - track_feature_evaluation_influxdb_v2, -) +from app_analytics.cache import FeatureEvaluationCache +from app_analytics.tasks import track_feature_evaluation_v2 +from app_analytics.track import track_feature_evaluation_influxdb_v2 from django.conf import settings from drf_yasg.utils import swagger_auto_schema from rest_framework import status @@ -38,6 +33,7 @@ ) logger = logging.getLogger(__name__) +feature_evaluation_cache = FeatureEvaluationCache() class SDKAnalyticsFlagsV2(CreateAPIView): @@ -141,26 +137,10 @@ def post(self, request, *args, **kwargs): content_type="application/json", status=status.HTTP_200_OK, ) - - if settings.USE_POSTGRES_FOR_ANALYTICS: - track_feature_evaluation.delay( - args=( - request.environment.id, - request.data, - ) + for feature_name, eval_count in self.request.data.items(): + feature_evaluation_cache.track_feature_evaluation( + request.environment.id, feature_name, eval_count ) - elif settings.INFLUXDB_TOKEN: - # Due to load issues on the task processor, we - # explicitly run this task in a separate thread. - # TODO: batch influx data to prevent large amounts - # of tasks. - track_feature_evaluation_influxdb.run_in_thread( - args=( - request.environment.id, - request.data, - ) - ) - return Response(status=status.HTTP_200_OK) def _is_data_valid(self) -> bool: diff --git a/api/tests/unit/app_analytics/test_unit_app_analytics_cache.py b/api/tests/unit/app_analytics/test_unit_app_analytics_cache.py index de5f9114d589..e6e6cde9b042 100644 --- a/api/tests/unit/app_analytics/test_unit_app_analytics_cache.py +++ b/api/tests/unit/app_analytics/test_unit_app_analytics_cache.py @@ -1,7 +1,12 @@ -from app_analytics.cache import CACHE_FLUSH_INTERVAL, APIUsageCache +from app_analytics.cache import ( + CACHE_FLUSH_INTERVAL, + APIUsageCache, + FeatureEvaluationCache, +) from app_analytics.models import Resource from django.utils import timezone from freezegun import freeze_time +from pytest_django.fixtures import SettingsWrapper from pytest_mock import MockerFixture @@ -71,3 +76,92 @@ def test_api_usage_cache(mocker: MockerFixture) -> None: # finally, make sure track_request task was not called assert not mocked_track_request_task.called + + +def test_feature_evaluation_cache( + mocker: MockerFixture, + settings: SettingsWrapper, +): + # Given + settings.FEATURE_EVALUATION_CACHE_SECONDS = 60 + settings.USE_POSTGRES_FOR_ANALYTICS = False + settings.INFLUXDB_TOKEN = "token" + + mocked_track_evaluation_task = mocker.patch( + "app_analytics.cache.track_feature_evaluation" + ) + mocked_track_feature_evaluation_influxdb_task = mocker.patch( + "app_analytics.cache.track_feature_evaluation_influxdb" + ) + environment_1_id = 1 + environment_2_id = 2 + feature_1_name = "feature_1_name" + feature_2_name = "feature_2_name" + + cache = FeatureEvaluationCache() + now = timezone.now() + + with freeze_time(now) as frozen_time: + # Track some feature evaluations + for _ in range(10): + cache.track_feature_evaluation(environment_1_id, feature_1_name, 1) + cache.track_feature_evaluation(environment_1_id, feature_2_name, 1) + cache.track_feature_evaluation(environment_2_id, feature_2_name, 1) + + # Make sure the internal tasks were not called + assert not mocked_track_evaluation_task.delay.called + assert not mocked_track_feature_evaluation_influxdb_task.delay.called + + # Now, let's move the time forward + frozen_time.tick(settings.FEATURE_EVALUATION_CACHE_SECONDS + 1) + + # track another evaluation(to trigger cache flush) + cache.track_feature_evaluation(environment_1_id, feature_1_name, 1) + + # Then + mocked_track_feature_evaluation_influxdb_task.delay.assert_has_calls( + [ + mocker.call( + kwargs={ + "environment_id": environment_1_id, + "feature_evaluations": { + feature_1_name: 11, + feature_2_name: 10, + }, + }, + ), + mocker.call( + kwargs={ + "environment_id": environment_2_id, + "feature_evaluations": {feature_2_name: 10}, + }, + ), + ] + ) + # task responsible for tracking evaluation using postgres was not called + assert not mocked_track_evaluation_task.delay.called + + # Next, let's enable postgres tracking + settings.USE_POSTGRES_FOR_ANALYTICS = True + + # rest the mock + mocked_track_feature_evaluation_influxdb_task.reset_mock() + + # Track another evaluation + cache.track_feature_evaluation(environment_1_id, feature_1_name, 1) + + # move time forward again + frozen_time.tick(settings.FEATURE_EVALUATION_CACHE_SECONDS + 1) + + # track another one(to trigger cache flush) + cache.track_feature_evaluation(environment_1_id, feature_1_name, 1) + + # Assert that the call was made with only the data tracked after the flush interval. + mocked_track_evaluation_task.delay.assert_called_once_with( + kwargs={ + "environment_id": environment_1_id, + "feature_evaluations": {feature_1_name: 2}, + } + ) + # and the task for influx was not called + assert not mocked_track_feature_evaluation_influxdb_task.delay.called diff --git a/api/tests/unit/app_analytics/test_unit_app_analytics_views.py b/api/tests/unit/app_analytics/test_unit_app_analytics_views.py index 15c374517714..371f54bf329a 100644 --- a/api/tests/unit/app_analytics/test_unit_app_analytics_views.py +++ b/api/tests/unit/app_analytics/test_unit_app_analytics_views.py @@ -36,8 +36,8 @@ def test_sdk_analytics_does_not_allow_bad_data(mocker, settings, environment): view = SDKAnalyticsFlags(request=request) - mocked_track_feature_eval = mocker.patch( - "app_analytics.views.track_feature_evaluation_influxdb" + mocked_feature_eval_cache = mocker.patch( + "app_analytics.views.feature_evaluation_cache" ) # When @@ -45,34 +45,7 @@ def test_sdk_analytics_does_not_allow_bad_data(mocker, settings, environment): # Then assert response.status_code == status.HTTP_200_OK - mocked_track_feature_eval.assert_not_called() - - -def test_sdk_analytics_allows_valid_data(mocker, settings, environment, feature): - # Given - settings.INFLUXDB_TOKEN = "some-token" - - data = {feature.name: 12} - request = mocker.MagicMock( - data=data, - environment=environment, - query_params={}, - ) - - view = SDKAnalyticsFlags(request=request) - - mocked_track_feature_eval = mocker.patch( - "app_analytics.views.track_feature_evaluation_influxdb" - ) - - # When - response = view.post(request) - - # Then - assert response.status_code == status.HTTP_200_OK - mocked_track_feature_eval.run_in_thread.assert_called_once_with( - args=(environment.id, data) - ) + mocked_feature_eval_cache.track_feature_evaluation.assert_not_called() def test_get_usage_data(mocker, admin_client, organisation): @@ -433,24 +406,20 @@ def test_set_sdk_analytics_flags_without_identifier( assert feature_evaluation_raw.evaluation_count is feature_request_count -def test_set_sdk_analytics_flags_v1_to_influxdb( +def test_sdk_analytics_flags_v1( api_client: APIClient, environment: Environment, feature: Feature, - identity: Identity, - settings: SettingsWrapper, mocker: MockerFixture, ) -> None: # Given - settings.INFLUXDB_TOKEN = "some-token" - url = reverse("api-v1:analytics-flags") api_client.credentials(HTTP_X_ENVIRONMENT_KEY=environment.api_key) feature_request_count = 2 data = {feature.name: feature_request_count} - mocked_track_feature_eval = mocker.patch( - "app_analytics.views.track_feature_evaluation_influxdb" + mocked_feature_evaluation_cache = mocker.patch( + "app_analytics.views.feature_evaluation_cache" ) # When @@ -460,9 +429,6 @@ def test_set_sdk_analytics_flags_v1_to_influxdb( # Then assert response.status_code == status.HTTP_200_OK - mocked_track_feature_eval.run_in_thread.assert_called_once_with( - args=( - environment.id, - data, - ) + mocked_feature_evaluation_cache.track_feature_evaluation.assert_called_once_with( + environment.id, feature.name, feature_request_count ) From 2ab73edc7352bec8324eb808ba70d6508fe5eed6 Mon Sep 17 00:00:00 2001 From: Kim Gustyr Date: Thu, 8 Aug 2024 17:52:20 +0100 Subject: [PATCH 100/247] fix: Identity overrides are not deleted when deleting Edge identities (#4460) --- api/edge_api/identities/models.py | 57 +++++++++++++------ api/edge_api/identities/views.py | 8 ++- .../edge_api/identities/conftest.py | 32 +++++++++++ .../identities/test_edge_identity_viewset.py | 42 +++++++++----- 4 files changed, 107 insertions(+), 32 deletions(-) diff --git a/api/edge_api/identities/models.py b/api/edge_api/identities/models.py index d67bcb5831c8..1e062ecd005e 100644 --- a/api/edge_api/identities/models.py +++ b/api/edge_api/identities/models.py @@ -167,15 +167,52 @@ def remove_feature_override(self, feature_state: FeatureStateModel) -> None: def save(self, user: FFAdminUser = None, master_api_key: MasterAPIKey = None): self.dynamo_wrapper.put_item(self.to_document()) - changes = self._get_changes() - if changes["feature_overrides"]: + changeset = self._get_changes() + self._update_feature_overrides( + changeset=changeset, + user=user, + master_api_key=master_api_key, + ) + self._reset_initial_state() + + def delete( + self, user: FFAdminUser = None, master_api_key: MasterAPIKey = None + ) -> None: + self.dynamo_wrapper.delete_item(self._engine_identity_model.composite_key) + self._engine_identity_model.identity_features.clear() + changeset = self._get_changes() + self._update_feature_overrides( + changeset=changeset, + user=user, + master_api_key=master_api_key, + ) + self._reset_initial_state() + + def synchronise_features(self, valid_feature_names: typing.Collection[str]) -> None: + identity_feature_names = { + fs.feature.name for fs in self._engine_identity_model.identity_features + } + if not identity_feature_names.issubset(valid_feature_names): + self._engine_identity_model.prune_features(list(valid_feature_names)) + sync_identity_document_features.delay(args=(str(self.identity_uuid),)) + + def to_document(self) -> dict: + return map_engine_identity_to_identity_document(self._engine_identity_model) + + def _update_feature_overrides( + self, + changeset: IdentityChangeset, + user: FFAdminUser = None, + master_api_key: MasterAPIKey = None, + ) -> None: + if changeset["feature_overrides"]: # TODO: would this be simpler if we put a wrapper around FeatureStateModel instead? generate_audit_log_records.delay( kwargs={ "environment_api_key": self.environment_api_key, "identifier": self.identifier, "user_id": getattr(user, "id", None), - "changes": changes, + "changes": changeset, "identity_uuid": str(self.identity_uuid), "master_api_key_id": getattr(master_api_key, "id", None), } @@ -183,23 +220,11 @@ def save(self, user: FFAdminUser = None, master_api_key: MasterAPIKey = None): update_flagsmith_environments_v2_identity_overrides.delay( kwargs={ "environment_api_key": self.environment_api_key, - "changes": changes, + "changes": changeset, "identity_uuid": str(self.identity_uuid), "identifier": self.identifier, } ) - self._reset_initial_state() - - def synchronise_features(self, valid_feature_names: typing.Collection[str]) -> None: - identity_feature_names = { - fs.feature.name for fs in self._engine_identity_model.identity_features - } - if not identity_feature_names.issubset(valid_feature_names): - self._engine_identity_model.prune_features(list(valid_feature_names)) - sync_identity_document_features.delay(args=(str(self.identity_uuid),)) - - def to_document(self) -> dict: - return map_engine_identity_to_identity_document(self._engine_identity_model) def _get_changes(self) -> IdentityChangeset: previous_instance = self._initial_state diff --git a/api/edge_api/identities/views.py b/api/edge_api/identities/views.py index c8ac3c0e3d50..9065324457c5 100644 --- a/api/edge_api/identities/views.py +++ b/api/edge_api/identities/views.py @@ -160,7 +160,11 @@ def get_environment_from_request(self): ) def perform_destroy(self, instance): - EdgeIdentity.dynamo_wrapper.delete_item(instance["composite_key"]) + edge_identity = EdgeIdentity.from_identity_document(instance) + edge_identity.delete( + user=self.request.user, + master_api_key=getattr(self.request, "master_api_key", None), + ) @swagger_auto_schema( responses={200: EdgeIdentityTraitsSerializer(many=True)}, @@ -281,7 +285,7 @@ def list(self, request, *args, **kwargs): def perform_destroy(self, instance): self.identity.remove_feature_override(instance) self.identity.save( - user=self.request.user.id, + user=self.request.user, master_api_key=getattr(self.request, "master_api_key", None), ) diff --git a/api/tests/integration/edge_api/identities/conftest.py b/api/tests/integration/edge_api/identities/conftest.py index f22e02366dda..298bd6165ee6 100644 --- a/api/tests/integration/edge_api/identities/conftest.py +++ b/api/tests/integration/edge_api/identities/conftest.py @@ -1,4 +1,13 @@ +from typing import Any + import pytest +from boto3.dynamodb.conditions import Key +from flag_engine.identities.models import IdentityModel + +from edge_api.identities.models import EdgeIdentity +from environments.dynamodb.wrappers.environment_wrapper import ( + DynamoEnvironmentV2Wrapper, +) @pytest.fixture() @@ -6,3 +15,26 @@ def webhook_mock(mocker): return mocker.patch( "edge_api.identities.serializers.call_environment_webhook_for_feature_state_change" ) + + +@pytest.fixture() +def identity_overrides_v2( + dynamo_enabled_environment: int, + identity_document_without_fs: dict[str, Any], + identity_document: dict[str, Any], + dynamodb_wrapper_v2: DynamoEnvironmentV2Wrapper, +) -> list[str]: + edge_identity = EdgeIdentity.from_identity_document(identity_document_without_fs) + for feature_override in IdentityModel.model_validate( + identity_document + ).identity_features: + edge_identity.add_feature_override(feature_override) + edge_identity.save() + return [ + item["document_key"] + for item in dynamodb_wrapper_v2.query_get_all_items( + KeyConditionExpression=Key("environment_id").eq( + str(dynamo_enabled_environment) + ), + ) + ] diff --git a/api/tests/integration/edge_api/identities/test_edge_identity_viewset.py b/api/tests/integration/edge_api/identities/test_edge_identity_viewset.py index ad9a95385a93..ffdadfe5d6a1 100644 --- a/api/tests/integration/edge_api/identities/test_edge_identity_viewset.py +++ b/api/tests/integration/edge_api/identities/test_edge_identity_viewset.py @@ -1,11 +1,20 @@ import json import urllib +from typing import Any +from boto3.dynamodb.conditions import Key from django.urls import reverse from rest_framework import status from rest_framework.exceptions import NotFound +from rest_framework.test import APIClient from edge_api.identities.views import EdgeIdentityViewSet +from environments.dynamodb.wrappers.environment_wrapper import ( + DynamoEnvironmentV2Wrapper, +) +from environments.dynamodb.wrappers.identity_wrapper import ( + DynamoIdentityWrapper, +) def test_get_identities_returns_bad_request_if_dynamo_is_not_enabled( @@ -125,12 +134,15 @@ def test_create_identity_returns_400_if_identity_already_exists( def test_delete_identity( - admin_client, - dynamo_enabled_environment, - environment_api_key, - identity_document, - edge_identity_dynamo_wrapper_mock, -): + admin_client: APIClient, + dynamo_enabled_environment: int, + environment_api_key: str, + identity_document_without_fs: dict[str, Any], + identity_document: dict[str, Any], + dynamodb_identity_wrapper: DynamoIdentityWrapper, + dynamodb_wrapper_v2: DynamoEnvironmentV2Wrapper, + identity_overrides_v2: list[str], +) -> None: # Given identity_uuid = identity_document["identity_uuid"] url = reverse( @@ -138,20 +150,22 @@ def test_delete_identity( args=[environment_api_key, identity_uuid], ) - edge_identity_dynamo_wrapper_mock.get_item_from_uuid_or_404.return_value = ( - identity_document - ) # When response = admin_client.delete(url) # Then assert response.status_code == status.HTTP_204_NO_CONTENT - edge_identity_dynamo_wrapper_mock.get_item_from_uuid_or_404.assert_called_with( - identity_uuid - ) - edge_identity_dynamo_wrapper_mock.delete_item.assert_called_with( - identity_document["composite_key"] + assert not dynamodb_identity_wrapper.query_items( + IndexName="identity_uuid-index", + KeyConditionExpression=Key("identity_uuid").eq(identity_uuid), + )["Count"] + assert not list( + dynamodb_wrapper_v2.query_get_all_items( + KeyConditionExpression=Key("environment_id").eq( + str(dynamo_enabled_environment) + ) + ) ) From 0014a5b4312d1ee7d7dd7b914434f26408ee18b7 Mon Sep 17 00:00:00 2001 From: Kim Gustyr Date: Fri, 9 Aug 2024 12:13:28 +0100 Subject: [PATCH 101/247] feat: Support blank identifiers, assume transient (#4449) Co-authored-by: Matthew Elwell --- api/environments/identities/models.py | 10 +- api/environments/identities/serializers.py | 1 + api/environments/identities/traits/fields.py | 6 +- api/environments/identities/views.py | 1 + api/environments/sdk/serializers.py | 61 +++++---- api/environments/sdk/services.py | 120 ++++++++++++++++++ api/environments/sdk/types.py | 14 ++ .../identities/test_integration_identities.py | 87 ++++++++++++- .../identities/test_unit_identities_views.py | 85 ++++++++++++- 9 files changed, 347 insertions(+), 38 deletions(-) create mode 100644 api/environments/sdk/services.py create mode 100644 api/environments/sdk/types.py diff --git a/api/environments/identities/models.py b/api/environments/identities/models.py index 9fe99df13df9..60f36f8a4464 100644 --- a/api/environments/identities/models.py +++ b/api/environments/identities/models.py @@ -4,12 +4,12 @@ from django.db import models from django.db.models import Prefetch, Q from django.utils import timezone -from flag_engine.identities.traits.types import TraitValue from flag_engine.segments.evaluator import evaluate_identity_in_segment from environments.identities.managers import IdentityManager from environments.identities.traits.models import Trait from environments.models import Environment +from environments.sdk.types import SDKTraitData from features.models import FeatureState from features.multivariate.models import MultivariateFeatureStateValue from segments.models import Segment @@ -196,7 +196,11 @@ def get_all_user_traits(self): def __str__(self): return "Account %s" % self.identifier - def generate_traits(self, trait_data_items, persist=False): + def generate_traits( + self, + trait_data_items: list[SDKTraitData], + persist: bool = False, + ) -> list[Trait]: """ Given a list of trait data items, validated by TraitSerializerFull, generate a list of TraitModel objects for the given identity. @@ -232,7 +236,7 @@ def generate_traits(self, trait_data_items, persist=False): def update_traits( self, - trait_data_items: list[dict[str, TraitValue]], + trait_data_items: list[SDKTraitData], ) -> list[Trait]: """ Given a list of traits, update any that already exist and create any new ones. diff --git a/api/environments/identities/serializers.py b/api/environments/identities/serializers.py index bd4b1ffaed1f..6c0d9df9229e 100644 --- a/api/environments/identities/serializers.py +++ b/api/environments/identities/serializers.py @@ -59,6 +59,7 @@ class _TraitSerializer(serializers.Serializer): help_text="Can be of type string, boolean, float or integer." ) + identifier = serializers.CharField() flags = serializers.ListField(child=SDKFeatureStateSerializer()) traits = serializers.ListSerializer(child=_TraitSerializer()) diff --git a/api/environments/identities/traits/fields.py b/api/environments/identities/traits/fields.py index fd5d3d335f9c..ceadc03b69c2 100644 --- a/api/environments/identities/traits/fields.py +++ b/api/environments/identities/traits/fields.py @@ -1,4 +1,5 @@ import logging +from typing import Any from rest_framework import serializers @@ -6,6 +7,7 @@ ACCEPTED_TRAIT_VALUE_TYPES, TRAIT_STRING_VALUE_MAX_LENGTH, ) +from environments.sdk.types import SDKTraitValueData from features.value_types import STRING logger = logging.getLogger(__name__) @@ -16,7 +18,7 @@ class TraitValueField(serializers.Field): Custom field to extract the type of the field on deserialization. """ - def to_internal_value(self, data): + def to_internal_value(self, data: Any) -> SDKTraitValueData: data_type = type(data).__name__ if data_type not in ACCEPTED_TRAIT_VALUE_TYPES: @@ -28,7 +30,7 @@ def to_internal_value(self, data): ) return {"type": data_type, "value": data} - def to_representation(self, value): + def to_representation(self, value: Any) -> Any: return_value = value.get("value") if isinstance(value, dict) else value if return_value is None: diff --git a/api/environments/identities/views.py b/api/environments/identities/views.py index f403b8001dc1..bbf9e2ce566d 100644 --- a/api/environments/identities/views.py +++ b/api/environments/identities/views.py @@ -309,6 +309,7 @@ def _get_all_feature_states_for_user_response( serializer = serializer_class( { "flags": all_feature_states, + "identifier": identity.identifier, "traits": identity.identity_traits.all(), }, context=self.get_serializer_context(), diff --git a/api/environments/sdk/serializers.py b/api/environments/sdk/serializers.py index 8ee5d254b94b..7029daa143b9 100644 --- a/api/environments/sdk/serializers.py +++ b/api/environments/sdk/serializers.py @@ -2,7 +2,6 @@ from collections import defaultdict from core.constants import BOOLEAN, FLOAT, INTEGER, STRING -from django.utils import timezone from rest_framework import serializers from environments.identities.models import Identity @@ -12,6 +11,12 @@ from environments.identities.traits.fields import TraitValueField from environments.identities.traits.models import Trait from environments.identities.traits.serializers import TraitSerializerBasic +from environments.sdk.services import ( + get_identified_transient_identity_and_traits, + get_persisted_identity_and_traits, + get_transient_identity_and_traits, +) +from environments.sdk.types import SDKTraitData from features.serializers import ( FeatureStateSerializerFull, SDKFeatureStateSerializer, @@ -125,7 +130,11 @@ def create(self, validated_data): class IdentifyWithTraitsSerializer( HideSensitiveFieldsSerializerMixin, serializers.Serializer ): - identifier = serializers.CharField(write_only=True, required=True) + identifier = serializers.CharField( + required=False, + allow_blank=True, + allow_null=True, + ) transient = serializers.BooleanField(write_only=True, default=False) traits = TraitSerializerBasic(required=False, many=True) flags = SDKFeatureStateSerializer(read_only=True, many=True) @@ -137,44 +146,46 @@ def save(self, **kwargs): Create the identity with the associated traits (optionally store traits if flag set on org) """ + identifier = self.validated_data.get("identifier") environment = self.context["environment"] - transient = self.validated_data["transient"] - trait_data_items = self.validated_data.get("traits", []) + sdk_trait_data: list[SDKTraitData] = self.validated_data.get("traits", []) - if transient: - identity = Identity( - created_date=timezone.now(), - identifier=self.validated_data["identifier"], + if not identifier: + # We have a fully transient identity that should never be persisted. + identity, traits = get_transient_identity_and_traits( environment=environment, + sdk_trait_data=sdk_trait_data, ) - trait_models = identity.generate_traits(trait_data_items, persist=False) - else: - identity, created = Identity.objects.get_or_create( - identifier=self.validated_data["identifier"], environment=environment + elif transient: + # Don't persist incoming data but load presently stored + # overrides and traits, if any. + identity, traits = get_identified_transient_identity_and_traits( + environment=environment, + identifier=identifier, + sdk_trait_data=sdk_trait_data, ) - if not created and environment.project.organisation.persist_trait_data: - # if this is an update and we're persisting traits, then we need to - # partially update any traits and return the full list - trait_models = identity.update_traits(trait_data_items) - else: - # generate traits for the identity and store them if configured to do so - trait_models = identity.generate_traits( - trait_data_items, - persist=environment.project.organisation.persist_trait_data, - ) + else: + # Persist the identity in accordance with individual trait transiency + # and persistence settings outside of request context. + identity, traits = get_persisted_identity_and_traits( + environment=environment, + identifier=identifier, + sdk_trait_data=sdk_trait_data, + ) all_feature_states = identity.get_all_feature_states( - traits=trait_models, + traits=traits, additional_filters=self.context.get("feature_states_additional_filters"), ) - identify_integrations(identity, all_feature_states, trait_models) + identify_integrations(identity, all_feature_states, traits) return { "identity": identity, - "traits": trait_models, + "identifier": identity.identifier, + "traits": traits, "flags": all_feature_states, } diff --git a/api/environments/sdk/services.py b/api/environments/sdk/services.py new file mode 100644 index 000000000000..500f35ac8b92 --- /dev/null +++ b/api/environments/sdk/services.py @@ -0,0 +1,120 @@ +import hashlib +import uuid +from itertools import chain +from operator import itemgetter +from typing import TypeAlias + +from django.utils import timezone + +from environments.identities.models import Identity +from environments.identities.traits.models import Trait +from environments.models import Environment +from environments.sdk.types import SDKTraitData + +IdentityAndTraits: TypeAlias = tuple[Identity, list[Trait]] + + +def get_transient_identity_and_traits( + environment: Environment, + sdk_trait_data: list[SDKTraitData], +) -> IdentityAndTraits: + """ + Get a transient `Identity` instance with a randomly generated identifier. + All traits are marked as transient. + """ + return ( + ( + identity := _get_transient_identity( + environment=environment, + identifier=get_transient_identifier(sdk_trait_data), + ) + ), + identity.generate_traits(_ensure_transient(sdk_trait_data), persist=False), + ) + + +def get_identified_transient_identity_and_traits( + environment: Environment, + identifier: str, + sdk_trait_data: list[SDKTraitData], +) -> IdentityAndTraits: + """ + Get a transient `Identity` instance. + If present in storage, it's a previously persisted identity with its traits, + combined with incoming traits provided to `sdk_trait_data` argument. + All traits constructed from `sdk_trait_data` are marked as transient. + """ + sdk_trait_data = _ensure_transient(sdk_trait_data) + if identity := Identity.objects.filter( + environment=environment, + identifier=identifier, + ).first(): + return identity, identity.update_traits(sdk_trait_data) + return ( + identity := _get_transient_identity( + environment=environment, + identifier=identifier, + ) + ), identity.generate_traits(sdk_trait_data, persist=False) + + +def get_persisted_identity_and_traits( + environment: Environment, + identifier: str, + sdk_trait_data: list[SDKTraitData], +) -> IdentityAndTraits: + """ + Retrieve a previously persisted `Identity` instance or persist a new one. + Traits are persisted based on the organisation-level setting or a + `"transient"` attribute provided with each individual trait. + """ + identity, created = Identity.objects.get_or_create( + environment=environment, + identifier=identifier, + ) + persist_trait_data = environment.project.organisation.persist_trait_data + if created: + return identity, identity.generate_traits( + sdk_trait_data, + persist=persist_trait_data, + ) + if persist_trait_data: + return identity, identity.update_traits(sdk_trait_data) + return identity, list( + { + trait.trait_key: trait + for trait in chain( + identity.identity_traits.all(), + identity.generate_traits(sdk_trait_data, persist=False), + ) + }.values() + ) + + +def get_transient_identifier(sdk_trait_data: list[SDKTraitData]) -> str: + if sdk_trait_data: + return hashlib.sha256( + "".join( + f'{trait["trait_key"]}{trait["trait_value"]["value"]}' + for trait in sorted(sdk_trait_data, key=itemgetter("trait_key")) + ).encode(), + usedforsecurity=False, + ).hexdigest() + return uuid.uuid4().hex + + +def _get_transient_identity( + environment: Environment, + identifier: str, +) -> Identity: + return Identity( + created_date=timezone.now(), + environment=environment, + identifier=identifier, + ) + + +def _ensure_transient(sdk_trait_data: list[SDKTraitData]) -> list[SDKTraitData]: + for sdk_trait_data_item in sdk_trait_data: + sdk_trait_data_item["transient"] = True + return sdk_trait_data diff --git a/api/environments/sdk/types.py b/api/environments/sdk/types.py new file mode 100644 index 000000000000..cf369f5a1907 --- /dev/null +++ b/api/environments/sdk/types.py @@ -0,0 +1,14 @@ +import typing + +from typing_extensions import NotRequired + + +class SDKTraitValueData(typing.TypedDict): + type: str + value: str + + +class SDKTraitData(typing.TypedDict): + trait_key: str + trait_value: SDKTraitValueData + transient: NotRequired[bool] diff --git a/api/tests/integration/environments/identities/test_integration_identities.py b/api/tests/integration/environments/identities/test_integration_identities.py index 445eedacfbb4..0537fb66f6ef 100644 --- a/api/tests/integration/environments/identities/test_integration_identities.py +++ b/api/tests/integration/environments/identities/test_integration_identities.py @@ -1,8 +1,11 @@ +import hashlib import json +from typing import Any, Generator from unittest import mock import pytest from django.urls import reverse +from pytest_lazyfixture import lazy_fixture from rest_framework import status from rest_framework.test import APIClient @@ -224,13 +227,64 @@ def test_get_feature_states_for_identity_only_makes_one_query_to_get_mv_feature_ assert len(second_identity_response_json["flags"]) == 3 -def test_get_feature_states_for_identity__transient_identity__segment_match_expected( +@pytest.fixture +def existing_identity_identifier_data( + identity_identifier: str, + identity: int, +) -> dict[str, Any]: + return {"identifier": identity_identifier} + + +@pytest.fixture +def transient_identifier( + segment_condition_property: str, + segment_condition_value: str, +) -> Generator[str, None, None]: + return hashlib.sha256( + f"avalue_a{segment_condition_property}{segment_condition_value}".encode() + ).hexdigest() + + +@pytest.mark.parametrize( + "transient_data", + [ + pytest.param({"transient": True}, id="with-transient-true"), + pytest.param({"transient": False}, id="with-transient-false"), + pytest.param({}, id="missing-transient"), + ], +) +@pytest.mark.parametrize( + "identifier_data,expected_identifier", + [ + pytest.param( + lazy_fixture("existing_identity_identifier_data"), + lazy_fixture("identity_identifier"), + id="existing-identifier", + ), + pytest.param({"identifier": "unseen"}, "unseen", id="new-identifier"), + pytest.param( + {"identifier": ""}, + lazy_fixture("transient_identifier"), + id="blank-identifier", + ), + pytest.param( + {"identifier": None}, + lazy_fixture("transient_identifier"), + id="null-identifier", + ), + pytest.param({}, lazy_fixture("transient_identifier"), id="missing-identifier"), + ], +) +def test_get_feature_states_for_identity__segment_match_expected( sdk_client: APIClient, feature: int, segment: int, segment_condition_property: str, segment_condition_value: str, segment_featurestate: int, + identifier_data: dict[str, Any], + transient_data: dict[str, Any], + expected_identifier: str, ) -> None: # Given url = reverse("api-v1:sdk-identities") @@ -242,14 +296,15 @@ def test_get_feature_states_for_identity__transient_identity__segment_match_expe url, data=json.dumps( { - "identifier": "unseen", + **identifier_data, + **transient_data, "traits": [ { "trait_key": segment_condition_property, "trait_value": segment_condition_value, - } + }, + {"trait_key": "a", "trait_value": "value_a"}, ], - "transient": True, } ), content_type="application/json", @@ -258,6 +313,7 @@ def test_get_feature_states_for_identity__transient_identity__segment_match_expe # Then assert response.status_code == status.HTTP_200_OK response_json = response.json() + assert response_json["identifier"] == expected_identifier assert ( flag_data := next( ( @@ -272,6 +328,29 @@ def test_get_feature_states_for_identity__transient_identity__segment_match_expe assert flag_data["feature_state_value"] == "segment override" +def test_get_feature_states_for_identity__empty_traits__random_identifier_expected( + sdk_client: APIClient, + environment: int, +) -> None: + # Given + url = reverse("api-v1:sdk-identities") + + # When + response_1 = sdk_client.post( + url, + data=json.dumps({"traits": []}), + content_type="application/json", + ) + response_2 = sdk_client.post( + url, + data=json.dumps({"traits": []}), + content_type="application/json", + ) + + # Then + assert response_1.json()["identifier"] != response_2.json()["identifier"] + + def test_get_feature_states_for_identity__transient_trait__segment_match_expected( sdk_client: APIClient, feature: int, diff --git a/api/tests/unit/environments/identities/test_unit_identities_views.py b/api/tests/unit/environments/identities/test_unit_identities_views.py index bf9e030058d0..b1e31cdb50b2 100644 --- a/api/tests/unit/environments/identities/test_unit_identities_views.py +++ b/api/tests/unit/environments/identities/test_unit_identities_views.py @@ -1,7 +1,9 @@ import json import urllib +from typing import Any from unittest import mock +import pytest from core.constants import FLAGSMITH_UPDATED_AT_HEADER, STRING from django.test import override_settings from django.urls import reverse @@ -1143,20 +1145,31 @@ def test_post_identities__server_key_only_feature__server_key_auth__return_expec assert response.json()["flags"] +@pytest.mark.parametrize( + "identity_data", + [ + pytest.param( + {"identifier": "transient", "transient": True}, + id="new-identifier-transient-true", + ), + pytest.param({"identifier": ""}, id="blank-identifier"), + pytest.param({"identifier": None}, id="null-identifier"), + pytest.param({}, id="missing_identifier"), + ], +) def test_post_identities__transient__no_persistence( environment: Environment, api_client: APIClient, + identity_data: dict[str, Any], ) -> None: # Given - identifier = "transient" trait_key = "trait_key" api_client.credentials(HTTP_X_ENVIRONMENT_KEY=environment.api_key) url = reverse("api-v1:sdk-identities") data = { - "identifier": identifier, + **identity_data, "traits": [{"trait_key": trait_key, "trait_value": "bar"}], - "transient": True, } # When @@ -1166,10 +1179,74 @@ def test_post_identities__transient__no_persistence( # Then assert response.status_code == status.HTTP_200_OK - assert not Identity.objects.filter(identifier=identifier).exists() + assert not Identity.objects.exists() assert not Trait.objects.filter(trait_key=trait_key).exists() +@pytest.mark.parametrize( + "trait_transiency_data", + [ + pytest.param({"transient": True}, id="trait-transient-true"), + pytest.param({"transient": False}, id="trait-transient-false"), + pytest.param({}, id="trait-default"), + ], +) +def test_post_identities__existing__transient__no_persistence( + environment: Environment, + identity: Identity, + trait: Trait, + identity_featurestate: FeatureState, + api_client: APIClient, + trait_transiency_data: dict[str, Any], +) -> None: + # Given + feature_state_value = "identity override" + identity_featurestate.feature_state_value.string_value = feature_state_value + identity_featurestate.feature_state_value.save() + + trait_key = "trait_key" + + api_client.credentials(HTTP_X_ENVIRONMENT_KEY=environment.api_key) + url = reverse("api-v1:sdk-identities") + data = { + "identifier": identity.identifier, + "transient": True, + "traits": [ + {"trait_key": trait_key, "trait_value": "bar", **trait_transiency_data} + ], + } + + # When + response = api_client.post( + url, data=json.dumps(data), content_type="application/json" + ) + + # Then + assert response.status_code == status.HTTP_200_OK + response_json = response.json() + + # identity overrides are correctly loaded + assert response_json["flags"][0]["feature_state_value"] == feature_state_value + + # previously persisted traits not provided in the request + # are not marked as transient in the response + assert response_json["traits"][0]["trait_key"] == trait.trait_key + assert not response_json["traits"][0].get("transient") + + # every trait provided in the request for a transient identity + # is marked as transient + assert response_json["traits"][1]["trait_key"] == trait_key + assert response_json["traits"][1]["transient"] + + assert ( + persisted_trait := Trait.objects.filter( + identity=identity, trait_key=trait.trait_key + ).first() + ) + assert persisted_trait.trait_value == trait.trait_value + assert not Trait.objects.filter(identity=identity, trait_key=trait_key).exists() + + def test_post_identities__transient_traits__no_persistence( environment: Environment, api_client: APIClient, From d096bff958e4b9537d5cf17954aa7d9371baa1b3 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 9 Aug 2024 13:33:40 +0100 Subject: [PATCH 102/247] ci: pre-commit autoupdate (#4416) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Matthew Elwell --- .pre-commit-config.yaml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 114dd8df44ab..b1219bd72fb5 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -6,14 +6,14 @@ repos: name: isort (python) - repo: https://github.com/psf/black - rev: 24.4.2 + rev: 24.8.0 hooks: - id: black language_version: python3 exclude: migrations - repo: https://github.com/pycqa/flake8 - rev: 7.1.0 + rev: 7.1.1 hooks: - id: flake8 name: flake8 @@ -31,6 +31,8 @@ repos: hooks: - id: prettier exclude: ^(frontend/|CHANGELOG.md|.github/docker_build_comment_template.md) + additional_dependencies: + - prettier@3.3.3 # SEE: https://github.com/pre-commit/pre-commit/issues/3133 - repo: https://github.com/python-poetry/poetry rev: 1.8.0 From aa83fa142e22a76004cf1a9c8eedc007f109c7c0 Mon Sep 17 00:00:00 2001 From: Matthew Elwell Date: Fri, 9 Aug 2024 16:30:17 +0100 Subject: [PATCH 103/247] chore: add success message to mgmt command for adding influx data (#4470) --- api/app_analytics/management/commands/sendapiusagetoinflux.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/api/app_analytics/management/commands/sendapiusagetoinflux.py b/api/app_analytics/management/commands/sendapiusagetoinflux.py index 92ecafbe184a..a7e00979cda8 100644 --- a/api/app_analytics/management/commands/sendapiusagetoinflux.py +++ b/api/app_analytics/management/commands/sendapiusagetoinflux.py @@ -101,3 +101,5 @@ def handle( write_api = influxdb_client.write_api(write_options=SYNCHRONOUS) write_api.write(bucket=bucket_name, record=record) + + self.stdout.write(self.style.SUCCESS("Successfully sent data to InfluxDB")) From ce05a15c345d1b3f205d87d25970b9749092f770 Mon Sep 17 00:00:00 2001 From: Flagsmith Bot <65724737+flagsmithdev@users.noreply.github.com> Date: Mon, 12 Aug 2024 09:59:35 +0100 Subject: [PATCH 104/247] chore(main): release 2.135.0 (#4464) --- .release-please-manifest.json | 2 +- CHANGELOG.md | 14 ++++++++++++++ version.txt | 2 +- 3 files changed, 16 insertions(+), 2 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 8e5862421d27..276de525c9e5 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "2.134.1" + ".": "2.135.0" } \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 7c0e3712eb60..7a8aa05ae6ab 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,19 @@ # Changelog +## [2.135.0](https://github.com/Flagsmith/flagsmith/compare/v2.134.1...v2.135.0) (2024-08-09) + + +### Features + +* **app_analytics:** Add cache for feature evaluation ([#4418](https://github.com/Flagsmith/flagsmith/issues/4418)) ([2dfbf99](https://github.com/Flagsmith/flagsmith/commit/2dfbf99cdc8d8529aa487a4e471df46c0dbc6878)) +* Support blank identifiers, assume transient ([#4449](https://github.com/Flagsmith/flagsmith/issues/4449)) ([0014a5b](https://github.com/Flagsmith/flagsmith/commit/0014a5b4312d1ee7d7dd7b914434f26408ee18b7)) + + +### Bug Fixes + +* Identity overrides are not deleted when deleting Edge identities ([#4460](https://github.com/Flagsmith/flagsmith/issues/4460)) ([2ab73ed](https://github.com/Flagsmith/flagsmith/commit/2ab73edc7352bec8324eb808ba70d6508fe5eed6)) +* show correct SAML Frontend URL on edit ([#4462](https://github.com/Flagsmith/flagsmith/issues/4462)) ([13ad7ef](https://github.com/Flagsmith/flagsmith/commit/13ad7ef7e6613bdd640cdfca7ce99a892b3893be)) + ## [2.134.1](https://github.com/Flagsmith/flagsmith/compare/v2.134.0...v2.134.1) (2024-08-07) diff --git a/version.txt b/version.txt index 571e418a035f..fd8fa45c3fb5 100644 --- a/version.txt +++ b/version.txt @@ -1 +1 @@ -2.134.1 +2.135.0 From abbf24bf987e8f74cb2ecf3ec3d82456d9892654 Mon Sep 17 00:00:00 2001 From: Gagan Date: Mon, 12 Aug 2024 15:12:53 +0530 Subject: [PATCH 105/247] infra: bump feature evaluation cache to 300 (#4471) --- infrastructure/aws/production/ecs-task-definition-web.json | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/infrastructure/aws/production/ecs-task-definition-web.json b/infrastructure/aws/production/ecs-task-definition-web.json index 32fce8246103..ceed5987901c 100644 --- a/infrastructure/aws/production/ecs-task-definition-web.json +++ b/infrastructure/aws/production/ecs-task-definition-web.json @@ -155,6 +155,10 @@ "name": "CACHE_BAD_ENVIRONMENTS_SECONDS", "value": "60" }, + { + "name": "FEATURE_EVALUATION_CACHE_SECONDS", + "value": "300" + }, { "name": "EDGE_RELEASE_DATETIME", "value": "2022-06-07T10:00:00Z" From da627a3885340b39f4ee3fcde969853cd78ab426 Mon Sep 17 00:00:00 2001 From: Flagsmith Bot <65724737+flagsmithdev@users.noreply.github.com> Date: Mon, 12 Aug 2024 10:57:59 +0100 Subject: [PATCH 106/247] chore(main): release 2.135.1 (#4472) --- .release-please-manifest.json | 2 +- CHANGELOG.md | 7 +++++++ version.txt | 2 +- 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 276de525c9e5..72a40d6930da 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "2.135.0" + ".": "2.135.1" } \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 7a8aa05ae6ab..889c3b636fe5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [2.135.1](https://github.com/Flagsmith/flagsmith/compare/v2.135.0...v2.135.1) (2024-08-12) + + +### Infrastructure (Flagsmith SaaS Only) + +* bump feature evaluation cache to 300 ([#4471](https://github.com/Flagsmith/flagsmith/issues/4471)) ([abbf24b](https://github.com/Flagsmith/flagsmith/commit/abbf24bf987e8f74cb2ecf3ec3d82456d9892654)) + ## [2.135.0](https://github.com/Flagsmith/flagsmith/compare/v2.134.1...v2.135.0) (2024-08-09) diff --git a/version.txt b/version.txt index fd8fa45c3fb5..42ee4cb9c629 100644 --- a/version.txt +++ b/version.txt @@ -1 +1 @@ -2.135.0 +2.135.1 From 50fd9a836e74dc18ff3cd042b726aed836d220d5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rodrigo=20L=C3=B3pez=20Dato?= Date: Mon, 12 Aug 2024 10:30:46 -0300 Subject: [PATCH 107/247] docs: Add missing trailing slash to Edge Proxy example API call (#4477) --- docs/docs/deployment/hosting/locally-edge-proxy.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/docs/deployment/hosting/locally-edge-proxy.md b/docs/docs/deployment/hosting/locally-edge-proxy.md index 5423e39c9525..29fa55191166 100644 --- a/docs/docs/deployment/hosting/locally-edge-proxy.md +++ b/docs/docs/deployment/hosting/locally-edge-proxy.md @@ -294,7 +294,7 @@ domain name and you're good to go. For example, lets say you had your proxy runn above: ```bash -curl "http://localhost:8000/api/v1/flags" -H "x-environment-key: 95DybY5oJoRNhxPZYLrxk4" | jq +curl "http://localhost:8000/api/v1/flags/" -H "x-environment-key: 95DybY5oJoRNhxPZYLrxk4" | jq [ { From 7920e8e22e15fc2f91dbd56582679b1f3064e4a9 Mon Sep 17 00:00:00 2001 From: Novak Zaballa <41410593+novakzaballa@users.noreply.github.com> Date: Mon, 12 Aug 2024 09:33:48 -0400 Subject: [PATCH 108/247] feat: Add automatic tagging for github integration (#4028) --- api/conftest.py | 42 ++- .../feature_external_resources/models.py | 46 ++- .../feature_external_resources/views.py | 60 +++- api/features/models.py | 7 +- api/features/serializers.py | 4 +- api/features/versioning/serializers.py | 4 +- api/integrations/github/client.py | 50 ++++ api/integrations/github/constants.py | 48 ++- api/integrations/github/github.py | 80 ++++- .../0004_githubrepository_tagging_enabled.py | 18 ++ api/integrations/github/models.py | 29 +- api/integrations/github/serializers.py | 1 + api/integrations/github/tasks.py | 15 +- api/integrations/github/views.py | 35 ++- .../tags/migrations/0006_alter_tag_type.py | 18 ++ api/projects/tags/models.py | 1 + ...t_unit_feature_external_resources_views.py | 160 +++++++--- .../github/test_unit_github_views.py | 275 +++++++++++++----- api/webhooks/webhooks.py | 3 - 19 files changed, 726 insertions(+), 170 deletions(-) create mode 100644 api/integrations/github/migrations/0004_githubrepository_tagging_enabled.py create mode 100644 api/projects/tags/migrations/0006_alter_tag_type.py diff --git a/api/conftest.py b/api/conftest.py index ab0aa198e9e3..ff6732ee44d3 100644 --- a/api/conftest.py +++ b/api/conftest.py @@ -1018,14 +1018,20 @@ def flagsmith_environments_v2_table(dynamodb: DynamoDBServiceResource) -> Table: @pytest.fixture() -def feature_external_resource( - feature: Feature, post_request_mock: MagicMock, mocker: MockerFixture -) -> FeatureExternalResource: - mocker.patch( +def mock_github_client_generate_token(mocker: MockerFixture) -> MagicMock: + return mocker.patch( "integrations.github.client.generate_token", return_value="mocked_token", ) + +@pytest.fixture() +def feature_external_resource( + feature: Feature, + post_request_mock: MagicMock, + mocker: MockerFixture, + mock_github_client_generate_token: MagicMock, +) -> FeatureExternalResource: return FeatureExternalResource.objects.create( url="https://github.com/repositoryownertest/repositorynametest/issues/11", type="GITHUB_ISSUE", @@ -1035,16 +1041,26 @@ def feature_external_resource( @pytest.fixture() -def feature_with_value_external_resource( - feature_with_value: Feature, +def feature_external_resource_gh_pr( + feature: Feature, post_request_mock: MagicMock, mocker: MockerFixture, + mock_github_client_generate_token: MagicMock, ) -> FeatureExternalResource: - mocker.patch( - "integrations.github.client.generate_token", - return_value="mocked_token", + return FeatureExternalResource.objects.create( + url="https://github.com/repositoryownertest/repositorynametest/pull/1", + type="GITHUB_PR", + feature=feature, + metadata='{"status": "open"}', ) + +@pytest.fixture() +def feature_with_value_external_resource( + feature_with_value: Feature, + post_request_mock: MagicMock, + mock_github_client_generate_token: MagicMock, +) -> FeatureExternalResource: return FeatureExternalResource.objects.create( url="https://github.com/repositoryownertest/repositorynametest/issues/11", type="GITHUB_ISSUE", @@ -1069,6 +1085,7 @@ def github_repository( repository_owner="repositoryownertest", repository_name="repositorynametest", project=project, + tagging_enabled=True, ) @@ -1120,3 +1137,10 @@ def handle(self, record: logging.LogRecord) -> None: self.messages.append(self.format(record)) return InspectingHandler() + + +@pytest.fixture +def set_github_webhook_secret() -> None: + from django.conf import settings + + settings.GITHUB_WEBHOOK_SECRET = "secret-key" diff --git a/api/features/feature_external_resources/models.py b/api/features/feature_external_resources/models.py index 6dcfc4d99a7f..8df2428e3158 100644 --- a/api/features/feature_external_resources/models.py +++ b/api/features/feature_external_resources/models.py @@ -1,3 +1,4 @@ +import json import logging from django.db import models @@ -11,9 +12,11 @@ from environments.models import Environment from features.models import Feature, FeatureState +from integrations.github.constants import GitHubEventType, GitHubTag from integrations.github.github import call_github_task +from integrations.github.models import GithubRepository from organisations.models import Organisation -from webhooks.webhooks import WebhookEventType +from projects.tags.models import Tag, TagType logger = logging.getLogger(__name__) @@ -24,6 +27,20 @@ class ResourceType(models.TextChoices): GITHUB_PR = "GITHUB_PR", "GitHub PR" +tag_by_type_and_state = { + ResourceType.GITHUB_ISSUE.value: { + "open": GitHubTag.ISSUE_OPEN.value, + "closed": GitHubTag.ISSUE_CLOSED.value, + }, + ResourceType.GITHUB_PR.value: { + "open": GitHubTag.PR_OPEN.value, + "closed": GitHubTag.PR_CLOSED.value, + "merged": GitHubTag.PR_MERGED.value, + "draft": GitHubTag.PR_DRAFT.value, + }, +} + + class FeatureExternalResource(LifecycleModelMixin, models.Model): url = models.URLField() type = models.CharField(max_length=20, choices=ResourceType.choices) @@ -49,12 +66,33 @@ class Meta: @hook(AFTER_SAVE) def execute_after_save_actions(self): + # Tag the feature with the external resource type + metadata = json.loads(self.metadata) if self.metadata else {} + state = metadata.get("state", "open") + # Add a comment to GitHub Issue/PR when feature is linked to the GH external resource + # and tag the feature with the corresponding tag if tagging is enabled if ( - Organisation.objects.prefetch_related("github_config") + github_configuration := Organisation.objects.prefetch_related( + "github_config" + ) .get(id=self.feature.project.organisation_id) .github_config.first() ): + github_repo = GithubRepository.objects.get( + github_configuration=github_configuration.id, + project=self.feature.project, + ) + if github_repo.tagging_enabled: + github_tag = Tag.objects.get( + label=tag_by_type_and_state[self.type][state], + project=self.feature.project, + is_system_tag=True, + type=TagType.GITHUB.value, + ) + self.feature.tags.add(github_tag) + self.feature.save() + feature_states: list[FeatureState] = [] environments = Environment.objects.filter( @@ -74,7 +112,7 @@ def execute_after_save_actions(self): call_github_task( organisation_id=self.feature.project.organisation_id, - type=WebhookEventType.FEATURE_EXTERNAL_RESOURCE_ADDED.value, + type=GitHubEventType.FEATURE_EXTERNAL_RESOURCE_ADDED.value, feature=self.feature, segment_name=None, url=None, @@ -92,7 +130,7 @@ def execute_before_save_actions(self) -> None: call_github_task( organisation_id=self.feature.project.organisation_id, - type=WebhookEventType.FEATURE_EXTERNAL_RESOURCE_REMOVED.value, + type=GitHubEventType.FEATURE_EXTERNAL_RESOURCE_REMOVED.value, feature=self.feature, segment_name=None, url=self.url, diff --git a/api/features/feature_external_resources/views.py b/api/features/feature_external_resources/views.py index c9636bba1132..641074215fe6 100644 --- a/api/features/feature_external_resources/views.py +++ b/api/features/feature_external_resources/views.py @@ -1,10 +1,16 @@ +import re + from django.shortcuts import get_object_or_404 from rest_framework import status, viewsets from rest_framework.response import Response from features.models import Feature from features.permissions import FeatureExternalResourcePermissions -from integrations.github.client import get_github_issue_pr_title_and_state +from integrations.github.client import ( + get_github_issue_pr_title_and_state, + label_github_issue_pr, +) +from integrations.github.models import GithubRepository from organisations.models import Organisation from .models import FeatureExternalResource @@ -48,14 +54,13 @@ def create(self, request, *args, **kwargs): ), ) - if not ( - ( - Organisation.objects.prefetch_related("github_config") - .get(id=feature.project.organisation_id) - .github_config.first() - ) - or not hasattr(feature.project, "github_project") - ): + github_configuration = ( + Organisation.objects.prefetch_related("github_config") + .get(id=feature.project.organisation_id) + .github_config.first() + ) + + if not github_configuration or not hasattr(feature.project, "github_project"): return Response( data={ "detail": "This Project doesn't have a valid GitHub integration configuration" @@ -63,7 +68,42 @@ def create(self, request, *args, **kwargs): content_type="application/json", status=status.HTTP_400_BAD_REQUEST, ) - return super().create(request, *args, **kwargs) + + # Get repository owner and name, and issue/PR number from the external resource URL + url = request.data.get("url") + if request.data.get("type") == "GITHUB_PR": + pattern = r"github.com/([^/]+)/([^/]+)/pull/(\d+)$" + elif request.data.get("type") == "GITHUB_ISSUE": + pattern = r"github.com/([^/]+)/([^/]+)/issues/(\d+)$" + else: + return Response( + data={"detail": "Incorrect GitHub type"}, + content_type="application/json", + status=status.HTTP_400_BAD_REQUEST, + ) + + match = re.search(pattern, url) + if match: + owner, repo, issue = match.groups() + if GithubRepository.objects.get( + github_configuration=github_configuration, + repository_owner=owner, + repository_name=repo, + ).tagging_enabled: + label_github_issue_pr( + installation_id=github_configuration.installation_id, + owner=owner, + repo=repo, + issue=issue, + ) + response = super().create(request, *args, **kwargs) + return response + else: + return Response( + data={"detail": "Invalid GitHub Issue/PR URL"}, + content_type="application/json", + status=status.HTTP_400_BAD_REQUEST, + ) def perform_update(self, serializer): external_resource_id = int(self.kwargs["pk"]) diff --git a/api/features/models.py b/api/features/models.py index 6a80c5c90251..1493a54c4849 100644 --- a/api/features/models.py +++ b/api/features/models.py @@ -74,6 +74,7 @@ STRING, ) from features.versioning.models import EnvironmentFeatureVersion +from integrations.github.constants import GitHubEventType from metadata.models import Metadata from projects.models import Project from projects.tags.models import Tag @@ -139,7 +140,6 @@ class Meta: @hook(AFTER_SAVE) def create_github_comment(self) -> None: from integrations.github.github import call_github_task - from webhooks.webhooks import WebhookEventType if ( self.external_resources.exists() @@ -150,7 +150,7 @@ def create_github_comment(self) -> None: call_github_task( organisation_id=self.project.organisation_id, - type=WebhookEventType.FLAG_DELETED.value, + type=GitHubEventType.FLAG_DELETED.value, feature=self, segment_name=None, url=None, @@ -406,7 +406,6 @@ def _get_environment(self) -> "Environment": @hook(AFTER_DELETE) def create_github_comment(self) -> None: from integrations.github.github import call_github_task - from webhooks.webhooks import WebhookEventType if ( self.feature.external_resources.exists() @@ -416,7 +415,7 @@ def create_github_comment(self) -> None: call_github_task( self.feature.project.organisation_id, - WebhookEventType.SEGMENT_OVERRIDE_DELETED.value, + GitHubEventType.SEGMENT_OVERRIDE_DELETED.value, self.feature, self.segment.name, None, diff --git a/api/features/serializers.py b/api/features/serializers.py index a2a297238637..5d4de0005517 100644 --- a/api/features/serializers.py +++ b/api/features/serializers.py @@ -19,6 +19,7 @@ from environments.sdk.serializers_mixins import ( HideSensitiveFieldsSerializerMixin, ) +from integrations.github.constants import GitHubEventType from integrations.github.github import call_github_task from metadata.serializers import MetadataSerializer, SerializerWithMetadata from projects.models import Project @@ -30,7 +31,6 @@ from util.drf_writable_nested.serializers import ( DeleteBeforeUpdateWritableNestedModelSerializer, ) -from webhooks.webhooks import WebhookEventType from .constants import INTERSECTION, UNION from .feature_segments.serializers import ( @@ -478,7 +478,7 @@ def save(self, **kwargs): call_github_task( organisation_id=feature_state.feature.project.organisation_id, - type=WebhookEventType.FLAG_UPDATED.value, + type=GitHubEventType.FLAG_UPDATED.value, feature=feature_state.feature, segment_name=None, url=None, diff --git a/api/features/versioning/serializers.py b/api/features/versioning/serializers.py index ec21cdfae5ae..90295f222b8a 100644 --- a/api/features/versioning/serializers.py +++ b/api/features/versioning/serializers.py @@ -8,10 +8,10 @@ CustomCreateSegmentOverrideFeatureStateSerializer, ) from features.versioning.models import EnvironmentFeatureVersion +from integrations.github.constants import GitHubEventType from integrations.github.github import call_github_task from segments.models import Segment from users.models import FFAdminUser -from webhooks.webhooks import WebhookEventType class CustomEnvironmentFeatureVersionFeatureStateSerializer( @@ -36,7 +36,7 @@ def save(self, **kwargs): call_github_task( organisation_id=feature_state.environment.project.organisation_id, - type=WebhookEventType.FLAG_UPDATED.value, + type=GitHubEventType.FLAG_UPDATED.value, feature=feature_state.feature, segment_name=None, url=None, diff --git a/api/integrations/github/client.py b/api/integrations/github/client.py index b4307fd736f9..4d4293b30a48 100644 --- a/api/integrations/github/client.py +++ b/api/integrations/github/client.py @@ -1,3 +1,4 @@ +import json import logging from enum import Enum from typing import Any @@ -5,11 +6,15 @@ import requests from django.conf import settings from github import Auth, Github +from requests.exceptions import HTTPError from integrations.github.constants import ( GITHUB_API_CALLS_TIMEOUT, GITHUB_API_URL, GITHUB_API_VERSION, + GITHUB_FLAGSMITH_LABEL, + GITHUB_FLAGSMITH_LABEL_COLOR, + GITHUB_FLAGSMITH_LABEL_DESCRIPTION, ) from integrations.github.dataclasses import ( IssueQueryParams, @@ -159,6 +164,9 @@ def fetch_search_github_resource( "id": i["id"], "title": i["title"], "number": i["number"], + "state": i["state"], + "merged": i.get("merged", False), + "draft": i.get("draft", False), } for i in json_response["items"] ] @@ -244,3 +252,45 @@ def fetch_github_repo_contributors( ] return build_paginated_response(results, response) + + +def create_flagsmith_flag_label( + installation_id: str, owner: str, repo: str +) -> dict[str, Any]: + # Create "Flagsmith Flag" label in linked repo + url = f"{GITHUB_API_URL}repos/{owner}/{repo}/labels" + headers = build_request_headers(installation_id) + payload = { + "name": GITHUB_FLAGSMITH_LABEL, + "color": GITHUB_FLAGSMITH_LABEL_COLOR, + "description": GITHUB_FLAGSMITH_LABEL_DESCRIPTION, + } + try: + response = requests.post( + url, json=payload, headers=headers, timeout=GITHUB_API_CALLS_TIMEOUT + ) + response.raise_for_status() + return response.json() + + except HTTPError: + response_content = response.content.decode("utf-8") + error_data = json.loads(response_content) + if any( + error["code"] == "already_exists" for error in error_data.get("errors", []) + ): + logger.warning("Label already exists") + return {"message": "Label already exists"}, 200 + + +def label_github_issue_pr( + installation_id: str, owner: str, repo: str, issue: str +) -> dict[str, Any]: + # Label linked GitHub Issue or PR with the "Flagsmith Flag" label + url = f"{GITHUB_API_URL}repos/{owner}/{repo}/issues/{issue}/labels" + headers = build_request_headers(installation_id) + payload = [GITHUB_FLAGSMITH_LABEL] + response = requests.post( + url, json=payload, headers=headers, timeout=GITHUB_API_CALLS_TIMEOUT + ) + response.raise_for_status() + return response.json() diff --git a/api/integrations/github/constants.py b/api/integrations/github/constants.py index 929ea690b87e..897b3a2c6725 100644 --- a/api/integrations/github/constants.py +++ b/api/integrations/github/constants.py @@ -1,3 +1,5 @@ +from enum import Enum + GITHUB_API_URL = "https://api.github.com/" GITHUB_API_VERSION = "2022-11-28" @@ -7,11 +9,49 @@ | :--- | :----- | :------ | :------ |\n""" FEATURE_TABLE_ROW = "| [%s](%s) | %s | %s | %s |\n" LINK_SEGMENT_TITLE = "Segment `%s` values:\n" -UNLINKED_FEATURE_TEXT = "### The feature flag `%s` was unlinked from the issue/PR" -UPDATED_FEATURE_TEXT = "Flagsmith Feature `%s` has been updated:\n" -DELETED_FEATURE_TEXT = "### The Feature Flag `%s` was deleted" +UNLINKED_FEATURE_TEXT = "**The feature flag `%s` was unlinked from the issue/PR**" +UPDATED_FEATURE_TEXT = "**Flagsmith Feature `%s` has been updated:**\n" +FEATURE_UPDATED_FROM_GHA_TEXT = ( + "**Flagsmith Feature `%s` has been updated from GHA:**\n" +) +DELETED_FEATURE_TEXT = "**The Feature Flag `%s` was deleted**" DELETED_SEGMENT_OVERRIDE_TEXT = ( - "### The Segment Override `%s` for Feature Flag `%s` was deleted" + "**The Segment Override `%s` for Feature Flag `%s` was deleted**" ) FEATURE_ENVIRONMENT_URL = "%s/project/%s/environment/%s/features?feature=%s&tab=%s" GITHUB_API_CALLS_TIMEOUT = 10 + +GITHUB_TAG_COLOR = "#838992" +GITHUB_FLAGSMITH_LABEL = "Flagsmith Flag" +GITHUB_FLAGSMITH_LABEL_DESCRIPTION = ( + "This GitHub Issue/PR is linked to a Flagsmith Feature Flag" +) +GITHUB_FLAGSMITH_LABEL_COLOR = "6633FF" + + +class GitHubEventType(Enum): + FLAG_UPDATED = "FLAG_UPDATED" + FLAG_DELETED = "FLAG_DELETED" + FLAG_UPDATED_FROM_GHA = "FLAG_UPDATED_FROM_GHA" + FEATURE_EXTERNAL_RESOURCE_ADDED = "FEATURE_EXTERNAL_RESOURCE_ADDED" + FEATURE_EXTERNAL_RESOURCE_REMOVED = "FEATURE_EXTERNAL_RESOURCE_REMOVED" + SEGMENT_OVERRIDE_DELETED = "SEGMENT_OVERRIDE_DELETED" + + +class GitHubTag(Enum): + PR_OPEN = "PR Open" + PR_MERGED = "PR Merged" + PR_CLOSED = "PR Closed" + PR_DRAFT = "PR Draft" + ISSUE_OPEN = "Issue Open" + ISSUE_CLOSED = "Issue Closed" + + +github_tag_description = { + GitHubTag.PR_OPEN.value: "This feature has a linked PR open", + GitHubTag.PR_MERGED.value: "This feature has a linked PR merged", + GitHubTag.PR_CLOSED.value: "This feature has a linked PR closed", + GitHubTag.PR_DRAFT.value: "This feature has a linked PR draft", + GitHubTag.ISSUE_OPEN.value: "This feature has a linked issue open", + GitHubTag.ISSUE_CLOSED.value: "This feature has a linked issue closed", +} diff --git a/api/integrations/github/github.py b/api/integrations/github/github.py index ddbbdcc04698..6b3573903dde 100644 --- a/api/integrations/github/github.py +++ b/api/integrations/github/github.py @@ -4,6 +4,7 @@ from typing import Any from core.helpers import get_current_site_url +from django.db.models import Q from django.utils.formats import get_format from features.models import Feature, FeatureState, FeatureStateValue @@ -17,14 +18,69 @@ LINK_SEGMENT_TITLE, UNLINKED_FEATURE_TEXT, UPDATED_FEATURE_TEXT, + GitHubEventType, + GitHubTag, ) from integrations.github.dataclasses import GithubData from integrations.github.models import GithubConfiguration from integrations.github.tasks import call_github_app_webhook_for_feature_state -from webhooks.webhooks import WebhookEventType +from projects.tags.models import Tag, TagType logger = logging.getLogger(__name__) +tag_by_event_type = { + "pull_request": { + "closed": GitHubTag.PR_CLOSED.value, + "converted_to_draft": GitHubTag.PR_DRAFT.value, + "opened": GitHubTag.PR_OPEN.value, + "reopened": GitHubTag.PR_OPEN.value, + "ready_for_review": GitHubTag.PR_OPEN.value, + "merged": GitHubTag.PR_MERGED.value, + }, + "issues": { + "closed": GitHubTag.ISSUE_CLOSED.value, + "opened": GitHubTag.ISSUE_OPEN.value, + "reopened": GitHubTag.ISSUE_OPEN.value, + }, +} + + +def tag_feature_per_github_event( + event_type: str, action: str, metadata: dict[str, Any] +) -> None: + + # Get Feature with external resource of type GITHUB and url matching the resource URL + feature = Feature.objects.filter( + Q(external_resources__type="GITHUB_PR") + | Q(external_resources__type="GITHUB_ISSUE"), + external_resources__url=metadata.get("html_url"), + ).first() + + if feature: + if ( + event_type == "pull_request" + and action == "closed" + and metadata.get("merged") + ): + action = "merged" + # Get corresponding project Tag to tag the feature + github_tag = Tag.objects.get( + label=tag_by_event_type[event_type][action], + project=feature.project_id, + is_system_tag=True, + type=TagType.GITHUB.value, + ) + tag_label_pattern = "Issue" if event_type == "issues" else "PR" + # Remove all GITHUB tags from the feature which label starts with issue or pr depending on event_type + feature.tags.remove( + *feature.tags.filter( + Q(type=TagType.GITHUB.value) & Q(label__startswith=tag_label_pattern) + ) + ) + + feature.tags.add(github_tag) + feature.save() + def handle_installation_deleted(payload: dict[str, Any]) -> None: installation_id = payload.get("installation", {}).get("id") @@ -42,6 +98,10 @@ def handle_installation_deleted(payload: dict[str, Any]) -> None: def handle_github_webhook_event(event_type: str, payload: dict[str, Any]) -> None: if event_type == "installation" and payload.get("action") == "deleted": handle_installation_deleted(payload) + elif event_type in tag_by_event_type: + action = str(payload.get("action")) + metadata = payload.get("issue", {}) or payload.get("pull_request", {}) + tag_feature_per_github_event(event_type, action, metadata) def generate_body_comment( @@ -53,13 +113,12 @@ def generate_body_comment( segment_name: str | None = None, ) -> str: - is_update = event_type == WebhookEventType.FLAG_UPDATED.value - is_removed = event_type == WebhookEventType.FEATURE_EXTERNAL_RESOURCE_REMOVED.value + is_removed = event_type == GitHubEventType.FEATURE_EXTERNAL_RESOURCE_REMOVED.value is_segment_override_deleted = ( - event_type == WebhookEventType.SEGMENT_OVERRIDE_DELETED.value + event_type == GitHubEventType.SEGMENT_OVERRIDE_DELETED.value ) - if event_type == WebhookEventType.FLAG_DELETED.value: + if event_type == GitHubEventType.FLAG_DELETED.value: return DELETED_FEATURE_TEXT % (name) if is_removed: @@ -68,7 +127,12 @@ def generate_body_comment( if is_segment_override_deleted and segment_name is not None: return DELETED_SEGMENT_OVERRIDE_TEXT % (segment_name, name) - result = UPDATED_FEATURE_TEXT % (name) if is_update else LINK_FEATURE_TITLE % (name) + result = "" + if event_type == GitHubEventType.FLAG_UPDATED.value: + result = UPDATED_FEATURE_TEXT % (name) + else: + result = LINK_FEATURE_TITLE % (name) + last_segment_name = "" if len(feature_states) > 0 and not feature_states[0].get("segment_name"): result += FEATURE_TABLE_HEADER @@ -125,7 +189,7 @@ def generate_data( if check_not_none(feature_state_value): feature_env_data["feature_state_value"] = feature_state_value - if type is not WebhookEventType.FEATURE_EXTERNAL_RESOURCE_REMOVED.value: + if type is not GitHubEventType.FEATURE_EXTERNAL_RESOURCE_REMOVED.value: feature_env_data["environment_name"] = feature_state.environment.name feature_env_data["enabled"] = feature_state.enabled feature_env_data["last_updated"] = feature_state.updated_at.strftime( @@ -150,7 +214,7 @@ def generate_data( type=type, url=( url - if type == WebhookEventType.FEATURE_EXTERNAL_RESOURCE_REMOVED.value + if type == GitHubEventType.FEATURE_EXTERNAL_RESOURCE_REMOVED.value else None ), feature_states=feature_states_list if feature_states else None, diff --git a/api/integrations/github/migrations/0004_githubrepository_tagging_enabled.py b/api/integrations/github/migrations/0004_githubrepository_tagging_enabled.py new file mode 100644 index 000000000000..a3ded07330d2 --- /dev/null +++ b/api/integrations/github/migrations/0004_githubrepository_tagging_enabled.py @@ -0,0 +1,18 @@ +# Generated by Django 3.2.25 on 2024-08-07 17:47 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('github', '0003_auto_20240528_0640'), + ] + + operations = [ + migrations.AddField( + model_name='githubrepository', + name='tagging_enabled', + field=models.BooleanField(default=False), + ), + ] diff --git a/api/integrations/github/models.py b/api/integrations/github/models.py index 532e0760b9ff..546b009d601e 100644 --- a/api/integrations/github/models.py +++ b/api/integrations/github/models.py @@ -3,8 +3,14 @@ from core.models import SoftDeleteExportableModel from django.db import models -from django_lifecycle import BEFORE_DELETE, LifecycleModelMixin, hook +from django_lifecycle import ( + AFTER_CREATE, + BEFORE_DELETE, + LifecycleModelMixin, + hook, +) +from integrations.github.constants import GITHUB_TAG_COLOR from organisations.models import Organisation logger: logging.Logger = logging.getLogger(name=__name__) @@ -48,6 +54,7 @@ class GithubRepository(LifecycleModelMixin, SoftDeleteExportableModel): null=False, on_delete=models.CASCADE, ) + tagging_enabled = models.BooleanField(default=False) class Meta: constraints = [ @@ -84,3 +91,23 @@ def delete_feature_external_resources( # Filter by url containing the repository owner and name url__regex=pattern, ).delete() + + @hook(AFTER_CREATE) + def create_github_tags( + self, + ) -> None: + from integrations.github.constants import ( + GitHubTag, + github_tag_description, + ) + from projects.tags.models import Tag, TagType + + for tag_label in GitHubTag: + tag, created = Tag.objects.get_or_create( + color=GITHUB_TAG_COLOR, + description=github_tag_description[tag_label.value], + label=tag_label.value, + project=self.project, + is_system_tag=True, + type=TagType.GITHUB.value, + ) diff --git a/api/integrations/github/serializers.py b/api/integrations/github/serializers.py index b3dc96c5263e..9d1cf3a81635 100644 --- a/api/integrations/github/serializers.py +++ b/api/integrations/github/serializers.py @@ -27,6 +27,7 @@ class Meta: "project", "repository_owner", "repository_name", + "tagging_enabled", ) read_only_fields = ( "id", diff --git a/api/integrations/github/tasks.py b/api/integrations/github/tasks.py index 8e94c9007b5d..970067daf0c4 100644 --- a/api/integrations/github/tasks.py +++ b/api/integrations/github/tasks.py @@ -6,8 +6,8 @@ from features.models import Feature from integrations.github.client import post_comment_to_github +from integrations.github.constants import GitHubEventType from integrations.github.dataclasses import CallGithubData -from webhooks.webhooks import WebhookEventType logger = logging.getLogger(__name__) @@ -29,8 +29,9 @@ def send_post_request(data: CallGithubData) -> None: ) if ( - event_type == WebhookEventType.FLAG_UPDATED.value - or event_type == WebhookEventType.FLAG_DELETED.value + event_type == GitHubEventType.FLAG_UPDATED.value + or event_type == GitHubEventType.FLAG_DELETED.value + or event_type == GitHubEventType.FLAG_UPDATED_FROM_GHA.value ): for resource in data.feature_external_resources: url = resource.get("url") @@ -40,7 +41,7 @@ def send_post_request(data: CallGithubData) -> None: installation_id, split_url[1], split_url[2], split_url[4], body ) - elif event_type == WebhookEventType.FEATURE_EXTERNAL_RESOURCE_REMOVED.value: + elif event_type == GitHubEventType.FEATURE_EXTERNAL_RESOURCE_REMOVED.value: url = data.github_data.url pathname = urlparse(url).path split_url = pathname.split("/") @@ -80,8 +81,8 @@ def generate_feature_external_resources( ] if ( - github_event_data.type == WebhookEventType.FLAG_DELETED.value - or github_event_data.type == WebhookEventType.SEGMENT_OVERRIDE_DELETED.value + github_event_data.type == GitHubEventType.FLAG_DELETED.value + or github_event_data.type == GitHubEventType.SEGMENT_OVERRIDE_DELETED.value ): feature_external_resources = generate_feature_external_resources( list( @@ -100,7 +101,7 @@ def generate_feature_external_resources( if ( github_event_data.type - == WebhookEventType.FEATURE_EXTERNAL_RESOURCE_REMOVED.value + == GitHubEventType.FEATURE_EXTERNAL_RESOURCE_REMOVED.value ): data = CallGithubData( event_type=github_event_data.type, diff --git a/api/integrations/github/views.py b/api/integrations/github/views.py index 4cc2f3efea57..8d48a17dfa84 100644 --- a/api/integrations/github/views.py +++ b/api/integrations/github/views.py @@ -15,13 +15,17 @@ from integrations.github.client import ( ResourceType, + create_flagsmith_flag_label, delete_github_installation, fetch_github_repo_contributors, fetch_github_repositories, fetch_search_github_resource, ) from integrations.github.exceptions import DuplicateGitHubIntegration -from integrations.github.github import handle_github_webhook_event +from integrations.github.github import ( + handle_github_webhook_event, + tag_by_event_type, +) from integrations.github.helpers import github_webhook_payload_is_valid from integrations.github.models import GithubConfiguration, GithubRepository from integrations.github.permissions import HasPermissionToGithubConfiguration @@ -136,10 +140,20 @@ def get_queryset(self): except ValueError: raise ValidationError({"github_pk": ["Must be an integer"]}) - def create(self, request, *args, **kwargs): + def create(self, request, *args, **kwargs) -> Response | None: try: - return super().create(request, *args, **kwargs) + response: Response = super().create(request, *args, **kwargs) + github_configuration: GithubConfiguration = GithubConfiguration.objects.get( + id=self.kwargs["github_pk"] + ) + if request.data.get("tagging_enabled", False): + create_flagsmith_flag_label( + installation_id=github_configuration.installation_id, + owner=request.data.get("repository_owner"), + repo=request.data.get("repository_name"), + ) + return response except IntegrityError as e: if re.search( @@ -150,6 +164,19 @@ def create(self, request, *args, **kwargs): detail="Duplication error. The GitHub repository already linked" ) + def update(self, request, *args, **kwargs) -> Response | None: + response: Response = super().update(request, *args, **kwargs) + github_configuration: GithubConfiguration = GithubConfiguration.objects.get( + id=self.kwargs["github_pk"] + ) + if request.data.get("tagging_enabled", False): + create_flagsmith_flag_label( + installation_id=github_configuration.installation_id, + owner=request.data.get("repository_owner"), + repo=request.data.get("repository_name"), + ) + return response + @api_view(["GET"]) @permission_classes([IsAuthenticated, HasPermissionToGithubConfiguration]) @@ -250,7 +277,7 @@ def github_webhook(request) -> Response: payload_body=payload, secret_token=secret, signature_header=signature ): data = json.loads(payload.decode("utf-8")) - if github_event == "installation": + if github_event == "installation" or github_event in tag_by_event_type: handle_github_webhook_event(event_type=github_event, payload=data) return Response({"detail": "Event processed"}, status=200) else: diff --git a/api/projects/tags/migrations/0006_alter_tag_type.py b/api/projects/tags/migrations/0006_alter_tag_type.py new file mode 100644 index 000000000000..84736c31bd3f --- /dev/null +++ b/api/projects/tags/migrations/0006_alter_tag_type.py @@ -0,0 +1,18 @@ +# Generated by Django 3.2.25 on 2024-05-27 15:03 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('tags', '0005_add_tag_fields_for_stale_flags_logic'), + ] + + operations = [ + migrations.AlterField( + model_name='tag', + name='type', + field=models.CharField(choices=[('NONE', 'None'), ('STALE', 'Stale'), ('GITHUB', 'Github')], default='NONE', help_text='Field used to provide a consistent identifier for the FE and API to use for business logic.', max_length=100), + ), + ] diff --git a/api/projects/tags/models.py b/api/projects/tags/models.py index 597bebdd0d9a..35c75eecfb7e 100644 --- a/api/projects/tags/models.py +++ b/api/projects/tags/models.py @@ -7,6 +7,7 @@ class TagType(models.Choices): NONE = "NONE" STALE = "STALE" + GITHUB = "GITHUB" class Tag(AbstractBaseExportableModel): diff --git a/api/tests/unit/features/test_unit_feature_external_resources_views.py b/api/tests/unit/features/test_unit_feature_external_resources_views.py index 901454b6add6..d69b05766c0e 100644 --- a/api/tests/unit/features/test_unit_feature_external_resources_views.py +++ b/api/tests/unit/features/test_unit_feature_external_resources_views.py @@ -75,17 +75,15 @@ def test_create_feature_external_resource( github_configuration: GithubConfiguration, github_repository: GithubRepository, post_request_mock: MagicMock, - mocker: MockerFixture, + mock_github_client_generate_token: MagicMock, ) -> None: # Given - mocker.patch( - "integrations.github.client.generate_token", - return_value="mocked_token", + repository_owner_name = ( + f"{github_repository.repository_owner}/{github_repository.repository_name}" ) - feature_external_resource_data = { "type": "GITHUB_ISSUE", - "url": "https://github.com/repoowner/repo-name/issues/35", + "url": f"https://github.com/{repository_owner_name}/issues/35", "feature": feature_with_value.id, "metadata": {"state": "open"}, } @@ -130,7 +128,7 @@ def test_create_feature_external_resource( ) ) post_request_mock.assert_called_with( - "https://api.github.com/repos/repoowner/repo-name/issues/35/comments", + f"https://api.github.com/repos/{repository_owner_name}/issues/35/comments", json={"body": f"{expected_comment_body}"}, headers={ "Accept": "application/vnd.github.v3+json", @@ -157,7 +155,7 @@ def test_create_feature_external_resource( # And When responses.add( method="GET", - url=f"{GITHUB_API_URL}repos/repoowner/repo-name/issues/35", + url=f"{GITHUB_API_URL}repos/{repository_owner_name}/issues/35", status=200, json={"title": "resource name", "state": "open"}, ) @@ -183,6 +181,66 @@ def test_create_feature_external_resource( ) +def test_cannot_create_feature_external_resource_with_an_invalid_gh_url( + admin_client_new: APIClient, + feature: Feature, + project: Project, + github_configuration: GithubConfiguration, + github_repository: GithubRepository, +) -> None: + # Given + feature_external_resource_data = { + "type": "GITHUB_ISSUE", + "url": "https://github.com/repoowner/repo-name/pull/1", + "feature": feature.id, + "metadata": {"state": "open"}, + } + + url = reverse( + "api-v1:projects:feature-external-resources-list", + kwargs={"project_pk": project.id, "feature_pk": feature.id}, + ) + + # When + response = admin_client_new.post( + url, data=feature_external_resource_data, format="json" + ) + + # Then + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert response.json()["detail"] == "Invalid GitHub Issue/PR URL" + + +def test_cannot_create_feature_external_resource_with_an_incorrect_gh_type( + admin_client_new: APIClient, + feature: Feature, + project: Project, + github_configuration: GithubConfiguration, + github_repository: GithubRepository, +) -> None: + # Given + feature_external_resource_data = { + "type": "GITHUB_INCORRECT_TYPE", + "url": "https://github.com/repoowner/repo-name/pull/1", + "feature": feature.id, + "metadata": {"state": "open"}, + } + + url = reverse( + "api-v1:projects:feature-external-resources-list", + kwargs={"project_pk": project.id, "feature_pk": feature.id}, + ) + + # When + response = admin_client_new.post( + url, data=feature_external_resource_data, format="json" + ) + + # Then + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert response.json()["detail"] == "Incorrect GitHub type" + + def test_cannot_create_feature_external_resource_when_doesnt_have_a_valid_github_integration( admin_client_new: APIClient, feature: Feature, @@ -297,11 +355,11 @@ def test_update_feature_external_resource( "integrations.github.client.generate_token", ) mock_generate_token.return_value = "mocked_token" - mock_generate_token.return_value = "mocked_token" feature_external_resource_data = { "type": "GITHUB_ISSUE", "url": "https://github.com/userexample/example-project-repo/issues/12", "feature": feature.id, + "metadata": '{"state": "open"}', } url = reverse( "api-v1:projects:feature-external-resources-detail", @@ -338,7 +396,7 @@ def test_delete_feature_external_resource( post_request_mock.assert_called_with( "https://api.github.com/repos/repositoryownertest/repositorynametest/issues/11/comments", json={ - "body": "### The feature flag `Test Feature1` was unlinked from the issue/PR" + "body": "**The feature flag `Test Feature1` was unlinked from the issue/PR**" }, headers={ "Accept": "application/vnd.github.v3+json", @@ -361,12 +419,9 @@ def test_get_feature_external_resources( github_configuration: GithubConfiguration, github_repository: GithubRepository, feature_external_resource: FeatureExternalResource, - mocker: MockerFixture, + mock_github_client_generate_token: MagicMock, ) -> None: # Given - mocker.patch( - "integrations.github.client.generate_token", - ) url = reverse( "api-v1:projects:feature-external-resources-list", kwargs={"project_pk": project.id, "feature_pk": feature.id}, @@ -446,7 +501,7 @@ def test_create_github_comment_on_feature_state_updated( ).updated_at.strftime(get_format("DATETIME_INPUT_FORMATS")[0]) expected_body_comment = ( - "Flagsmith Feature `Test Feature1` has been updated:\n" + "**Flagsmith Feature `Test Feature1` has been updated:**\n" + expected_default_body( project.id, environment.api_key, @@ -480,14 +535,9 @@ def test_create_github_comment_on_feature_was_deleted( github_repository: GithubRepository, feature_external_resource: FeatureExternalResource, post_request_mock: MagicMock, - mocker: MockerFixture, + mock_github_client_generate_token: MagicMock, ) -> None: # Given - mocker.patch( - "integrations.github.client.generate_token", - return_value="mocked_token", - ) - url = reverse( viewname="api-v1:projects:project-features-detail", kwargs={"project_pk": project.id, "pk": feature.id}, @@ -501,7 +551,7 @@ def test_create_github_comment_on_feature_was_deleted( post_request_mock.assert_called_with( "https://api.github.com/repos/repositoryownertest/repositorynametest/issues/11/comments", - json={"body": "### The Feature Flag `Test Feature1` was deleted"}, + json={"body": "**The Feature Flag `Test Feature1` was deleted**"}, headers={ "Accept": "application/vnd.github.v3+json", "X-GitHub-Api-Version": GITHUB_API_VERSION, @@ -518,18 +568,13 @@ def test_create_github_comment_on_segment_override_updated( github_configuration: GithubConfiguration, github_repository: GithubRepository, post_request_mock: MagicMock, - mocker: MockerFixture, environment: Environment, admin_client: APIClient, feature_with_value_external_resource: FeatureExternalResource, + mock_github_client_generate_token: MagicMock, ) -> None: # Given feature_state = segment_override_for_feature_with_value - mocker.patch( - "integrations.github.client.generate_token", - return_value="mocked_token", - ) - payload = dict(WritableNestedFeatureStateSerializer(instance=feature_state).data) payload["enabled"] = not feature_state.enabled @@ -549,7 +594,7 @@ def test_create_github_comment_on_segment_override_updated( ).updated_at.strftime(get_format("DATETIME_INPUT_FORMATS")[0]) expected_comment_body = ( - "Flagsmith Feature `feature_with_value` has been updated:\n" + "**Flagsmith Feature `feature_with_value` has been updated:**\n" + "\n" + expected_segment_comment_body( project.id, @@ -581,16 +626,11 @@ def test_create_github_comment_on_segment_override_deleted( github_configuration: GithubConfiguration, github_repository: GithubRepository, post_request_mock: MagicMock, - mocker: MockerFixture, admin_client_new: APIClient, feature_with_value_external_resource: FeatureExternalResource, + mock_github_client_generate_token: MagicMock, ) -> None: # Given - mocker.patch( - "integrations.github.client.generate_token", - return_value="mocked_token", - ) - url = reverse( viewname="api-v1:features:feature-segment-detail", kwargs={"pk": feature_with_value_segment.id}, @@ -606,7 +646,7 @@ def test_create_github_comment_on_segment_override_deleted( post_request_mock.assert_called_with( "https://api.github.com/repos/repositoryownertest/repositorynametest/issues/11/comments", json={ - "body": "### The Segment Override `segment` for Feature Flag `feature_with_value` was deleted" + "body": "**The Segment Override `segment` for Feature Flag `feature_with_value` was deleted**" }, headers={ "Accept": "application/vnd.github.v3+json", @@ -664,7 +704,7 @@ def test_create_github_comment_using_v2( response_data["updated_at"], format ).strftime(get_format("DATETIME_INPUT_FORMATS")[0]) expected_comment_body = ( - "Flagsmith Feature `Test Feature1` has been updated:\n" + "**Flagsmith Feature `Test Feature1` has been updated:**\n" + "\n" + expected_segment_comment_body( project.id, @@ -745,19 +785,17 @@ def test_create_feature_external_resource_on_environment_with_v2( segment_override_for_feature_with_value: FeatureState, environment_v2_versioning: Environment, post_request_mock: MagicMock, - mocker: MockerFixture, + mock_github_client_generate_token: MagicMock, ) -> None: # Given feature_id = segment_override_for_feature_with_value.feature_id - - mocker.patch( - "integrations.github.client.generate_token", - return_value="mocked_token", + repository_owner_name = ( + f"{github_repository.repository_owner}/{github_repository.repository_name}" ) feature_external_resource_data = { "type": "GITHUB_ISSUE", - "url": "https://github.com/repoowner/repo-name/issues/35", + "url": f"https://github.com/{repository_owner_name}/issues/35", "feature": feature_id, "metadata": {"state": "open"}, } @@ -804,7 +842,7 @@ def test_create_feature_external_resource_on_environment_with_v2( assert response.status_code == status.HTTP_201_CREATED post_request_mock.assert_called_with( - "https://api.github.com/repos/repoowner/repo-name/issues/35/comments", + f"https://api.github.com/repos/{repository_owner_name}/issues/35/comments", json={"body": f"{expected_comment_body}"}, headers={ "Accept": "application/vnd.github.v3+json", @@ -813,3 +851,37 @@ def test_create_feature_external_resource_on_environment_with_v2( }, timeout=10, ) + + +def test_cannot_create_feature_external_resource_for_the_same_feature_and_resource_uri( + admin_client_new: APIClient, + feature: Feature, + project: Project, + github_configuration: GithubConfiguration, + github_repository: GithubRepository, + feature_external_resource_gh_pr: FeatureExternalResource, +) -> None: + # Given + feature_external_resource_data = { + "type": "GITHUB_PR", + "url": "https://github.com/repositoryownertest/repositorynametest/pull/1", + "feature": feature.id, + "metadata": {"state": "open"}, + } + + url = reverse( + "api-v1:projects:feature-external-resources-list", + kwargs={"project_pk": project.id, "feature_pk": feature.id}, + ) + + # When + response = admin_client_new.post( + url, data=feature_external_resource_data, format="json" + ) + + # Then + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert ( + response.json()["non_field_errors"][0] + == "The fields feature, url must make a unique set." + ) diff --git a/api/tests/unit/integrations/github/test_unit_github_views.py b/api/tests/unit/integrations/github/test_unit_github_views.py index 07d3e9660f14..a1d9add23f36 100644 --- a/api/tests/unit/integrations/github/test_unit_github_views.py +++ b/api/tests/unit/integrations/github/test_unit_github_views.py @@ -1,10 +1,10 @@ import json from typing import Any +from unittest.mock import MagicMock import pytest import requests import responses -from django.conf import settings from django.urls import reverse from pytest_lazyfixture import lazy_fixture from pytest_mock import MockerFixture @@ -12,7 +12,9 @@ from rest_framework.response import Response from rest_framework.test import APIClient +from environments.models import Environment from features.feature_external_resources.models import FeatureExternalResource +from features.models import Feature from integrations.github.constants import GITHUB_API_URL from integrations.github.models import GithubConfiguration, GithubRepository from integrations.github.views import ( @@ -29,6 +31,17 @@ WEBHOOK_PAYLOAD_WITHOUT_INSTALLATION_ID = json.dumps( {"installation": {"test": 765432}, "action": "deleted"} ) +WEBHOOK_PAYLOAD_MERGED = json.dumps( + { + "pull_request": { + "id": 1234567, + "html_url": "https://github.com/repositoryownertest/repositorynametest/issues/11", + "merged": True, + }, + "action": "closed", + } +) + WEBHOOK_SIGNATURE = "sha1=57a1426e19cdab55dd6d0c191743e2958e50ccaa" WEBHOOK_SIGNATURE_WITH_AN_INVALID_INSTALLATION_ID = ( "sha1=081eef49d04df27552587d5df1c6b76e0fe20d21" @@ -36,6 +49,7 @@ WEBHOOK_SIGNATURE_WITHOUT_INSTALLATION_ID = ( "sha1=f99796bd3cebb902864e87ed960c5cca8772ff67" ) +WEBHOOK_MERGED_ACTION_SIGNATURE = "sha1=712ec7a5db14aad99d900da40738ebb9508ecad2" WEBHOOK_SECRET = "secret-key" @@ -246,11 +260,14 @@ def test_cannot_get_github_repository_when_github_pk_in_not_a_number( assert response.json() == {"github_pk": ["Must be an integer"]} +@responses.activate def test_create_github_repository( admin_client_new: APIClient, organisation: Organisation, github_configuration: GithubConfiguration, project: Project, + mocker: MockerFixture, + mock_github_client_generate_token: MagicMock, ) -> None: # Given data = { @@ -258,8 +275,16 @@ def test_create_github_repository( "repository_owner": "repositoryowner", "repository_name": "repositoryname", "project": project.id, + "tagging_enabled": True, } + responses.add( + method="POST", + url=f"{GITHUB_API_URL}repos/repositoryowner/repositoryname/labels", + status=status.HTTP_200_OK, + json={}, + ) + url = reverse( "api-v1:organisations:repositories-list", args=[organisation.id, github_configuration.id], @@ -272,6 +297,53 @@ def test_create_github_repository( assert GithubRepository.objects.filter(repository_owner="repositoryowner").exists() +@responses.activate +def test_create_github_repository_and_label_already_Existe( + admin_client_new: APIClient, + organisation: Organisation, + github_configuration: GithubConfiguration, + project: Project, + mocker: MockerFixture, + mock_github_client_generate_token: MagicMock, +) -> None: + # Given + mocker_logger = mocker.patch("integrations.github.client.logger") + + data = { + "github_configuration": github_configuration.id, + "repository_owner": "repositoryowner", + "repository_name": "repositoryname", + "project": project.id, + "tagging_enabled": True, + } + + mock_response = { + "message": "Validation Failed", + "errors": [{"resource": "Label", "code": "already_exists", "field": "name"}], + "documentation_url": "https://docs.github.com/rest/issues/labels#create-a-label", + "status": "422", + } + + responses.add( + method="POST", + url=f"{GITHUB_API_URL}repos/repositoryowner/repositoryname/labels", + status=status.HTTP_422_UNPROCESSABLE_ENTITY, + json=mock_response, + ) + + url = reverse( + "api-v1:organisations:repositories-list", + args=[organisation.id, github_configuration.id], + ) + # When + response = admin_client_new.post(url, data) + + # Then + mocker_logger.warning.assert_called_once_with("Label already exists") + assert response.status_code == status.HTTP_201_CREATED + assert GithubRepository.objects.filter(repository_owner="repositoryowner").exists() + + def test_cannot_create_github_repository_when_does_not_have_permissions( test_user_client: APIClient, organisation: Organisation, @@ -334,13 +406,9 @@ def test_github_delete_repository( github_configuration: GithubConfiguration, github_repository: GithubRepository, feature_external_resource: FeatureExternalResource, - mocker: MockerFixture, + mock_github_client_generate_token: MagicMock, ) -> None: # Given - mock_generate_token = mocker.patch( - "integrations.github.client.generate_token", - ) - mock_generate_token.return_value = "mocked_token" url = reverse( "api-v1:organisations:repositories-detail", args=[organisation.id, github_configuration.id, github_repository.id], @@ -409,14 +477,10 @@ def test_fetch_pull_requests( organisation: Organisation, github_configuration: GithubConfiguration, github_repository: GithubRepository, + mock_github_client_generate_token: MagicMock, mocker: MockerFixture, ) -> None: - # Given - mock_generate_token = mocker.patch( - "integrations.github.client.generate_token", - ) - mock_generate_token.return_value = "mocked_token" github_request_mock = mocker.patch( "requests.get", side_effect=mocked_requests_get_issues_and_pull_requests ) @@ -448,13 +512,10 @@ def test_fetch_issues( organisation: Organisation, github_configuration: GithubConfiguration, github_repository: GithubRepository, + mock_github_client_generate_token: MagicMock, mocker: MockerFixture, ) -> None: # Given - mock_generate_token = mocker.patch( - "integrations.github.client.generate_token", - ) - mock_generate_token.return_value = "mocked_token" github_request_mock = mocker.patch( "requests.get", side_effect=mocked_requests_get_issues_and_pull_requests ) @@ -491,13 +552,10 @@ def test_fetch_issues_returns_error_on_bad_response_from_github( organisation: Organisation, github_configuration: GithubConfiguration, github_repository: GithubRepository, + mock_github_client_generate_token: MagicMock, mocker: MockerFixture, ) -> None: # Given - mock_generate_token = mocker.patch( - "integrations.github.client.generate_token", - ) - mock_generate_token.return_value = "mocked_token" mocker.patch("requests.get", side_effect=mocked_requests_get_error) url = reverse("api-v1:organisations:get-github-issues", args=[organisation.id]) data = {"repo_owner": "owner", "repo_name": "repo"} @@ -519,13 +577,9 @@ def test_fetch_repositories( organisation: Organisation, github_configuration: GithubConfiguration, github_repository: GithubRepository, - mocker: MockerFixture, + mock_github_client_generate_token: MagicMock, ) -> None: # Given - mock_generate_token = mocker.patch( - "integrations.github.client.generate_token", - ) - mock_generate_token.return_value = "mocked_token" responses.add( method="GET", url=f"{GITHUB_API_URL}installation/repositories", @@ -573,13 +627,11 @@ def test_fetch_repositories( ], ) def test_fetch_issues_and_pull_requests_fails_with_status_400_when_integration_not_configured( - client: APIClient, organisation: Organisation, reverse_url: str, mocker + client: APIClient, + organisation: Organisation, + reverse_url: str, + mock_github_client_generate_token: MagicMock, ) -> None: - # Given - mock_generate_token = mocker.patch( - "integrations.github.client.generate_token", - ) - mock_generate_token.generate_token.return_value = "mocked_token" # When url = reverse(reverse_url, args=[organisation.id]) response = client.get(url) @@ -600,15 +652,9 @@ def test_cannot_fetch_issues_or_prs_when_does_not_have_permissions( organisation: Organisation, github_configuration: GithubConfiguration, github_repository: GithubRepository, - mocker, + mock_github_client_generate_token: MagicMock, reverse_url: str, ) -> None: - # Given - mock_generate_token = mocker.patch( - "integrations.github.client.generate_token", - ) - mock_generate_token.generate_token.return_value = "mocked_token" - # When url = reverse(reverse_url, args=[organisation.id]) response = test_user_client.get(url) @@ -656,9 +702,9 @@ def test_verify_github_webhook_payload_returns_false_on_no_signature_header() -> def test_github_webhook_delete_installation( api_client: APIClient, github_configuration: GithubConfiguration, + set_github_webhook_secret, ) -> None: # Given - settings.GITHUB_WEBHOOK_SECRET = WEBHOOK_SECRET url = reverse("api-v1:github-webhook") # When @@ -675,63 +721,88 @@ def test_github_webhook_delete_installation( assert not GithubConfiguration.objects.filter(installation_id=1234567).exists() -def test_github_webhook_with_non_existing_installation( +def test_github_webhook_merged_a_pull_request( api_client: APIClient, + feature: Feature, github_configuration: GithubConfiguration, + github_repository: GithubRepository, + feature_external_resource: FeatureExternalResource, + set_github_webhook_secret, +) -> None: + # Given + url = reverse("api-v1:github-webhook") + + # When + response = api_client.post( + path=url, + data=WEBHOOK_PAYLOAD_MERGED, + content_type="application/json", + HTTP_X_HUB_SIGNATURE=WEBHOOK_MERGED_ACTION_SIGNATURE, + HTTP_X_GITHUB_EVENT="pull_request", + ) + + # Then + feature.refresh_from_db() + assert response.status_code == status.HTTP_200_OK + assert feature.tags.first().label == "PR Merged" + + +def test_github_webhook_without_installation_id( + api_client: APIClient, mocker: MockerFixture, + set_github_webhook_secret, ) -> None: # Given - settings.GITHUB_WEBHOOK_SECRET = WEBHOOK_SECRET url = reverse("api-v1:github-webhook") mocker_logger = mocker.patch("integrations.github.github.logger") # When response = api_client.post( path=url, - data=WEBHOOK_PAYLOAD_WITH_AN_INVALID_INSTALLATION_ID, + data=WEBHOOK_PAYLOAD_WITHOUT_INSTALLATION_ID, content_type="application/json", - HTTP_X_HUB_SIGNATURE=WEBHOOK_SIGNATURE_WITH_AN_INVALID_INSTALLATION_ID, + HTTP_X_HUB_SIGNATURE=WEBHOOK_SIGNATURE_WITHOUT_INSTALLATION_ID, HTTP_X_GITHUB_EVENT="installation", ) # Then mocker_logger.error.assert_called_once_with( - "GitHub Configuration with installation_id 765432 does not exist" + "The installation_id is not present in the payload: {'installation': {'test': 765432}, 'action': 'deleted'}" ) assert response.status_code == status.HTTP_200_OK -def test_github_webhook_without_installation_id( +def test_github_webhook_with_non_existing_installation( api_client: APIClient, github_configuration: GithubConfiguration, mocker: MockerFixture, + set_github_webhook_secret, ) -> None: # Given - settings.GITHUB_WEBHOOK_SECRET = WEBHOOK_SECRET url = reverse("api-v1:github-webhook") mocker_logger = mocker.patch("integrations.github.github.logger") # When response = api_client.post( path=url, - data=WEBHOOK_PAYLOAD_WITHOUT_INSTALLATION_ID, + data=WEBHOOK_PAYLOAD_WITH_AN_INVALID_INSTALLATION_ID, content_type="application/json", - HTTP_X_HUB_SIGNATURE=WEBHOOK_SIGNATURE_WITHOUT_INSTALLATION_ID, + HTTP_X_HUB_SIGNATURE=WEBHOOK_SIGNATURE_WITH_AN_INVALID_INSTALLATION_ID, HTTP_X_GITHUB_EVENT="installation", ) # Then mocker_logger.error.assert_called_once_with( - "The installation_id is not present in the payload: {'installation': {'test': 765432}, 'action': 'deleted'}" + "GitHub Configuration with installation_id 765432 does not exist" ) assert response.status_code == status.HTTP_200_OK def test_github_webhook_fails_on_signature_header_missing( github_configuration: GithubConfiguration, + set_github_webhook_secret, ) -> None: # Given - settings.GITHUB_WEBHOOK_SECRET = WEBHOOK_SECRET url = reverse("api-v1:github-webhook") # When @@ -751,9 +822,9 @@ def test_github_webhook_fails_on_signature_header_missing( def test_github_webhook_fails_on_bad_signature_header_missing( github_configuration: GithubConfiguration, + set_github_webhook_secret, ) -> None: # Given - settings.GITHUB_WEBHOOK_SECRET = WEBHOOK_SECRET url = reverse("api-v1:github-webhook") # When @@ -774,9 +845,9 @@ def test_github_webhook_fails_on_bad_signature_header_missing( def test_github_webhook_bypass_event( github_configuration: GithubConfiguration, + set_github_webhook_secret, ) -> None: # Given - settings.GITHUB_WEBHOOK_SECRET = WEBHOOK_SECRET url = reverse("api-v1:github-webhook") # When @@ -800,15 +871,10 @@ def test_cannot_fetch_pull_requests_when_github_request_call_failed( organisation: Organisation, github_configuration: GithubConfiguration, github_repository: GithubRepository, - mocker, + mock_github_client_generate_token: MagicMock, ) -> None: - # Given data = {"repo_owner": "owner", "repo_name": "repo"} - mock_generate_token = mocker.patch( - "integrations.github.client.generate_token", - ) - mock_generate_token.return_value = "mocked_token" responses.add( method="GET", url=f"{GITHUB_API_URL}repos/{data['repo_owner']}/{data['repo_name']}/pulls", @@ -833,14 +899,10 @@ def test_cannot_fetch_pulls_when_the_github_response_was_invalid( organisation: Organisation, github_configuration: GithubConfiguration, github_repository: GithubRepository, - mocker, + mock_github_client_generate_token: MagicMock, ) -> None: # Given data = {"repo_owner": "owner", "repo_name": "repo"} - mock_generate_token = mocker.patch( - "integrations.github.client.generate_token", - ) - mock_generate_token.return_value = "mocked_token" responses.add( method="GET", url=f"{GITHUB_API_URL}repos/{data['repo_owner']}/{data['repo_name']}/pulls", @@ -877,7 +939,7 @@ def test_fetch_github_repo_contributors( organisation: Organisation, github_configuration: GithubConfiguration, github_repository: GithubRepository, - mocker: MockerFixture, + mock_github_client_generate_token: MagicMock, ) -> None: # Given url = reverse( @@ -905,11 +967,6 @@ def test_fetch_github_repo_contributors( expected_response = {"results": mocked_github_response} - mock_generate_token = mocker.patch( - "integrations.github.client.generate_token", - ) - mock_generate_token.return_value = "mocked_token" - # Add response for endpoint being tested responses.add( method=responses.GET, @@ -1063,3 +1120,85 @@ def test_send_the_invalid_type_page_or_page_size_param_returns_400( assert response.status_code == status.HTTP_400_BAD_REQUEST response_json = response.json() assert response_json == error_response + + +@responses.activate +def test_label_and_tags_no_added_when_tagging_is_disabled( + admin_client_new: APIClient, + project: Project, + environment: Environment, + github_repository: GithubRepository, + feature_with_value: Feature, + mock_github_client_generate_token: MagicMock, + post_request_mock: MagicMock, +) -> None: + # Given + github_repository.tagging_enabled = False + github_repository.save() + repository_owner_name = ( + f"{github_repository.repository_owner}/{github_repository.repository_name}" + ) + + feature_external_resource_data = { + "type": "GITHUB_ISSUE", + "url": f"https://github.com/{repository_owner_name}/issues/35", + "feature": feature_with_value.id, + "metadata": {"state": "open"}, + } + + url = reverse( + "api-v1:projects:feature-external-resources-list", + kwargs={"project_pk": project.id, "feature_pk": feature_with_value.id}, + ) + + # When + response = admin_client_new.post( + url, data=feature_external_resource_data, format="json" + ) + + # Then + assert response.status_code == status.HTTP_201_CREATED + assert feature_with_value.tags.count() == 0 + + +@responses.activate +def test_update_github_repository( + admin_client_new: APIClient, + organisation: Organisation, + github_configuration: GithubConfiguration, + github_repository: GithubRepository, + project: Project, + mocker: MockerFixture, + mock_github_client_generate_token: MagicMock, +) -> None: + # Given + github_repository.tagging_enabled = False + github_repository.save() + data = { + "github_configuration": github_configuration.id, + "repository_owner": "repositoryowner", + "repository_name": "repositoryname", + "project": project.id, + "tagging_enabled": True, + } + + responses.add( + method="POST", + url=f"{GITHUB_API_URL}repos/repositoryowner/repositoryname/labels", + status=status.HTTP_200_OK, + json={}, + ) + + url = reverse( + "api-v1:organisations:repositories-detail", + args=[organisation.id, github_configuration.id, github_repository.id], + ) + # When + response = admin_client_new.put(url, data) + + # Then + assert response.status_code == status.HTTP_200_OK + assert GithubRepository.objects.filter(repository_owner="repositoryowner").exists() + assert GithubRepository.objects.get( + repository_owner="repositoryowner" + ).tagging_enabled diff --git a/api/webhooks/webhooks.py b/api/webhooks/webhooks.py index 75684e886614..e314b5976f9f 100644 --- a/api/webhooks/webhooks.py +++ b/api/webhooks/webhooks.py @@ -39,9 +39,6 @@ class WebhookEventType(enum.Enum): FLAG_DELETED = "FLAG_DELETED" AUDIT_LOG_CREATED = "AUDIT_LOG_CREATED" NEW_VERSION_PUBLISHED = "NEW_VERSION_PUBLISHED" - FEATURE_EXTERNAL_RESOURCE_ADDED = "FEATURE_EXTERNAL_RESOURCE_ADDED" - FEATURE_EXTERNAL_RESOURCE_REMOVED = "FEATURE_EXTERNAL_RESOURCE_REMOVED" - SEGMENT_OVERRIDE_DELETED = "SEGMENT_OVERRIDE_DELETED" class WebhookType(enum.Enum): From 3c46a31f6060f7a12206fa27409073da946130d8 Mon Sep 17 00:00:00 2001 From: Novak Zaballa <41410593+novakzaballa@users.noreply.github.com> Date: Mon, 12 Aug 2024 09:34:18 -0400 Subject: [PATCH 109/247] feat: Add tags for GitHub integration FE (#4035) --- frontend/common/constants.ts | 1 - .../common/services/useGithubRepository.ts | 2 +- frontend/common/types/requests.ts | 6 + frontend/common/types/responses.ts | 9 +- frontend/common/useInfiniteScroll.ts | 8 +- .../components/ExternalResourcesLinkTab.tsx | 61 +++++--- .../GitHubResourceSelectProvider.tsx | 11 +- .../web/components/GitHubResourcesSelect.tsx | 40 +++-- .../components/GithubRepositoriesTable.tsx | 148 ++++++++++++------ frontend/web/components/Icon.tsx | 91 +++++++++++ frontend/web/components/modals/CreateFlag.js | 1 + frontend/web/components/tags/TagContent.tsx | 55 +++++-- 12 files changed, 327 insertions(+), 106 deletions(-) diff --git a/frontend/common/constants.ts b/frontend/common/constants.ts index 9ae54b5aa612..3e39d72ac042 100644 --- a/frontend/common/constants.ts +++ b/frontend/common/constants.ts @@ -228,7 +228,6 @@ export default { ), 'iOS': require('./code-help/traits/traits-ios')(envId, keywords, userId), }), - keys: { 'Java': 'java', 'JavaScript': 'javascript', diff --git a/frontend/common/services/useGithubRepository.ts b/frontend/common/services/useGithubRepository.ts index 40589d4c6af1..618c4adb4d79 100644 --- a/frontend/common/services/useGithubRepository.ts +++ b/frontend/common/services/useGithubRepository.ts @@ -43,7 +43,7 @@ export const githubRepositoryService = service >({ invalidatesTags: [{ id: 'LIST', type: 'GithubRepository' }], query: (query: Req['updateGithubRepository']) => ({ - body: query, + body: query.body, method: 'PUT', url: `organisations/${query.organisation_id}/integrations/github/${query.github_id}/repositories/${query.id}/`, }), diff --git a/frontend/common/types/requests.ts b/frontend/common/types/requests.ts index 6c1dd38321ea..b97b023de8c4 100644 --- a/frontend/common/types/requests.ts +++ b/frontend/common/types/requests.ts @@ -397,6 +397,12 @@ export type Req = { organisation_id: string github_id: string id: string + body: { + project: number + repository_name: string + repository_owner: string + tagging_enabled: boolean + } } deleteGithubRepository: { organisation_id: string diff --git a/frontend/common/types/responses.ts b/frontend/common/types/responses.ts index 15159fee182c..487c55ada915 100644 --- a/frontend/common/types/responses.ts +++ b/frontend/common/types/responses.ts @@ -115,7 +115,7 @@ export type ExternalResource = { url: string type: string project?: number - metadata?: { state?: string; title?: string } + metadata?: { [key: string]: string | number | boolean } feature: number } @@ -160,12 +160,14 @@ export type LaunchDarklyProjectImport = { project: number } -export type GithubResources = { +export type GithubResource = { html_url: string id: number number: number title: string state: string + merged: boolean + draft: boolean } export type GithubPaginatedRepos = { @@ -187,6 +189,7 @@ export type GithubRepository = { project: number repository_owner: string repository_name: string + tagging_enabled: boolean } export type githubIntegration = { @@ -685,7 +688,7 @@ export type Res = { externalResource: PagedResponse githubIntegrations: PagedResponse githubRepository: PagedResponse - githubResources: GitHubPagedResponse + githubResources: GitHubPagedResponse githubRepos: GithubPaginatedRepos segmentPriorities: {} featureSegment: FeatureState['feature_segment'] diff --git a/frontend/common/useInfiniteScroll.ts b/frontend/common/useInfiniteScroll.ts index 1dc1e3d975df..a10967cf4395 100644 --- a/frontend/common/useInfiniteScroll.ts +++ b/frontend/common/useInfiniteScroll.ts @@ -55,8 +55,11 @@ const useInfiniteScroll = < }, throttle) const refresh = useCallback(() => { - setLocalPage(1) - }, []) + queryResponse.refetch().then((newData) => { + setCombinedData(newData as unknown as RES) + setLocalPage(1) + }) + }, [queryResponse]) const loadMore = () => { if (queryResponse?.data?.next) { @@ -70,6 +73,7 @@ const useInfiniteScroll = < isLoading: queryResponse.isLoading, loadMore, loadingCombinedData: loadingCombinedData && queryResponse.isFetching, + // refetchData, refresh, response: queryResponse, searchItems, diff --git a/frontend/web/components/ExternalResourcesLinkTab.tsx b/frontend/web/components/ExternalResourcesLinkTab.tsx index 9af7a391042c..f3deb304f4f1 100644 --- a/frontend/web/components/ExternalResourcesLinkTab.tsx +++ b/frontend/web/components/ExternalResourcesLinkTab.tsx @@ -3,23 +3,26 @@ import MyRepositoriesSelect from './MyRepositoriesSelect' import ExternalResourcesTable, { ExternalResourcesTableBase, } from './ExternalResourcesTable' -import { ExternalResource } from 'common/types/responses' +import { ExternalResource, GithubResource } from 'common/types/responses' import { GitHubResourceSelectProvider } from './GitHubResourceSelectProvider' import { useCreateExternalResourceMutation } from 'common/services/useExternalResource' import Constants from 'common/constants' import Button from './base/forms/Button' import GitHubResourcesSelect from './GitHubResourcesSelect' import _ from 'lodash' +import AppActions from 'common/dispatcher/app-actions' type ExternalResourcesLinkTabType = { githubId: string organisationId: string featureId: string projectId: string + environmentId: string } type AddExternalResourceRowType = ExternalResourcesTableBase & { linkedExternalResources?: ExternalResource[] + environmentId: string } type GitHubStatusType = { @@ -28,6 +31,7 @@ type GitHubStatusType = { } const AddExternalResourceRow: FC = ({ + environmentId, featureId, linkedExternalResources, organisationId, @@ -36,8 +40,9 @@ const AddExternalResourceRow: FC = ({ repoOwner, }) => { const [externalResourceType, setExternalResourceType] = useState('') - const [featureExternalResource, setFeatureExternalResource] = - useState('') + const [featureExternalResource, setFeatureExternalResource] = useState< + GithubResource | undefined + >(undefined) const [lastSavedResource, setLastSavedResource] = useState< string | undefined >(undefined) @@ -68,11 +73,13 @@ const AddExternalResourceRow: FC = ({ repoOwner={repoOwner} repoName={repoName} githubResource={ - ( - _.find(_.values(Constants.resourceTypes), { - label: externalResourceType!, - }) as any - ).resourceType || '' + (externalResourceType && + ( + _.find(_.values(Constants.resourceTypes), { + label: externalResourceType!, + }) as any + ).resourceType) || + '' } > = ({ key as keyof typeof Constants.resourceTypes ].label === externalResourceType, ) - createExternalResource({ - body: { - feature: parseInt(featureId), - metadata: {}, - type: type!, - url: featureExternalResource, - }, - feature_id: featureId, - project_id: projectId, - }).then(() => { - toast('External Resource Added') - setLastSavedResource(featureExternalResource) - }) + if (type && featureExternalResource) { + createExternalResource({ + body: { + feature: parseInt(featureId), + metadata: { + 'draft': featureExternalResource.draft, + 'merged': featureExternalResource.merged, + 'state': featureExternalResource.state, + 'title': featureExternalResource.title, + }, + type: type, + url: featureExternalResource.html_url, + }, + feature_id: featureId, + project_id: projectId, + }).then(() => { + toast('External Resource Added') + setLastSavedResource(featureExternalResource.html_url) + AppActions.refreshFeatures(parseInt(projectId), environmentId) + }) + } else { + throw new Error('Invalid External Resource Data') + } }} > Save @@ -118,6 +135,7 @@ const AddExternalResourceRow: FC = ({ } const ExternalResourcesLinkTab: FC = ({ + environmentId, featureId, githubId, organisationId, @@ -157,6 +175,7 @@ const ExternalResourcesLinkTab: FC = ({ /> {repoName && repoOwner && ( void loadingCombinedData: boolean nextPage?: string searchItems: (search: string) => void + refresh: () => void } const GitHubResourceSelectContext = createContext< @@ -34,7 +35,7 @@ export const GitHubResourceSelectProvider: FC< GitHubResourceSelectProviderType > = ({ children, ...props }) => { const [externalResourcesSelect, setExternalResourcesSelect] = - useState() + useState() const throttleDelay = 300 @@ -44,6 +45,7 @@ export const GitHubResourceSelectProvider: FC< isLoading, loadMore, loadingCombinedData, + refresh, searchItems, } = useInfiniteScroll( useGetGithubResourcesQuery, @@ -62,7 +64,7 @@ export const GitHubResourceSelectProvider: FC< useEffect(() => { if (results && props.linkedExternalResources) { setExternalResourcesSelect( - results.filter((i: GithubResources) => { + results.filter((i: GithubResource) => { const same = props.linkedExternalResources?.some( (r) => i.html_url === r.url, ) @@ -84,6 +86,7 @@ export const GitHubResourceSelectProvider: FC< loadMore, loadingCombinedData, nextPage: next, + refresh, searchItems, }} > diff --git a/frontend/web/components/GitHubResourcesSelect.tsx b/frontend/web/components/GitHubResourcesSelect.tsx index cc708fb7a901..8b814ec62fb3 100644 --- a/frontend/web/components/GitHubResourcesSelect.tsx +++ b/frontend/web/components/GitHubResourcesSelect.tsx @@ -1,9 +1,13 @@ import React, { FC, useEffect, useRef, useState } from 'react' -import { GithubResources } from 'common/types/responses' +import { GithubResource } from 'common/types/responses' import Utils from 'common/utils/utils' import { FixedSizeList } from 'react-window' import InfiniteLoader from 'react-window-infinite-loader' import { useGitHubResourceSelectProvider } from './GitHubResourceSelectProvider' +import { components } from 'react-select' +import Button from './base/forms/Button' +import Icon from './Icon' +import Select from 'react-select' type MenuListType = { children: React.ReactNode @@ -106,6 +110,23 @@ const MenuList: FC = ({ ) } +const CustomControl = ({ + children, + ...props +}: { + children: React.ReactNode +}) => { + const { refresh } = useGitHubResourceSelectProvider() + return ( + + {children} + + + ) +} + export type GitHubResourcesSelectType = { onChange: (value: string) => void lastSavedResource: string | undefined @@ -119,15 +140,8 @@ const GitHubResourcesSelect: FC = ({ lastSavedResource, onChange, }) => { - const { - githubResources, - isFetching, - isLoading, - loadMore, - loadingCombinedData, - nextPage, - searchItems, - } = useGitHubResourceSelectProvider() + const { githubResources, isFetching, isLoading, searchItems } = + useGitHubResourceSelectProvider() const [selectedOption, setSelectedOption] = useState(null) const [searchText, setSearchText] = React.useState('') @@ -152,11 +166,10 @@ const GitHubResourcesSelect: FC = ({ onChange(v?.value) }} isClearable={true} - options={githubResources?.map((i: GithubResources) => { + options={githubResources?.map((i: GithubResource) => { return { label: `${i.title} #${i.number}`, - status: i.state, - value: i.html_url, + value: i, } })} noOptionsMessage={() => @@ -171,6 +184,7 @@ const GitHubResourcesSelect: FC = ({ searchItems(Utils.safeParseEventValue(e)) }} components={{ + Control: CustomControl, MenuList, }} data={{ searchText }} diff --git a/frontend/web/components/GithubRepositoriesTable.tsx b/frontend/web/components/GithubRepositoriesTable.tsx index d8fdc7f4eaf0..1898d5bb607c 100644 --- a/frontend/web/components/GithubRepositoriesTable.tsx +++ b/frontend/web/components/GithubRepositoriesTable.tsx @@ -1,9 +1,14 @@ import React, { FC, useEffect } from 'react' -import { useDeleteGithubRepositoryMutation } from 'common/services/useGithubRepository' +import { + useDeleteGithubRepositoryMutation, + useUpdateGithubRepositoryMutation, +} from 'common/services/useGithubRepository' import Button from './base/forms/Button' import Icon from './Icon' import PanelSearch from './PanelSearch' import { GithubRepository } from 'common/types/responses' +import Switch from './Switch' +import Tooltip from './Tooltip' export type GithubRepositoriesTableType = { repos: GithubRepository[] | undefined @@ -16,15 +21,6 @@ const GithubRepositoriesTable: FC = ({ organisationId, repos, }) => { - const [deleteGithubRepository, { isSuccess: isDeleted }] = - useDeleteGithubRepositoryMutation() - - useEffect(() => { - if (isDeleted) { - toast('Repository unlinked to Project') - } - }, [isDeleted]) - return (
= ({ header={ Repository +
+ {' '} + + {'Add Tags and Labels'} + +
+ } + place='top' + > + { + 'If enabled, features will be tagged with the GitHub resource type, and the Issue/PR will have the label "Flagsmith flag"' + } + +
Remove
} renderRow={(repo: GithubRepository) => ( - - -
{`${repo.repository_owner} - ${repo.repository_name}`}
-
-
- -
-
+ )} />
) } +const TableRow: FC<{ + githubId: string + organisationId: string + repo: GithubRepository +}> = ({ githubId, organisationId, repo }) => { + const [deleteGithubRepository, { isSuccess: isDeleted }] = + useDeleteGithubRepositoryMutation() + + useEffect(() => { + if (isDeleted) { + toast('Repository unlinked to Project') + } + }, [isDeleted]) + + const [updateGithubRepository] = useUpdateGithubRepositoryMutation() + const [taggingenEnabled, setTaggingEnabled] = React.useState( + repo.tagging_enabled || false, + ) + return ( + + +
{`${repo.repository_owner} - ${repo.repository_name}`}
+
+
+ { + updateGithubRepository({ + body: { + project: repo.project, + repository_name: repo.repository_name, + repository_owner: repo.repository_owner, + tagging_enabled: !repo.tagging_enabled || false, + }, + github_id: githubId, + id: `${repo.id}`, + organisation_id: organisationId, + }).then(() => { + setTaggingEnabled(!taggingenEnabled) + }) + }} + /> +
+
+ +
+
+ ) +} + export default GithubRepositoriesTable diff --git a/frontend/web/components/Icon.tsx b/frontend/web/components/Icon.tsx index dd4bdd56969d..6f23a68bfd4e 100644 --- a/frontend/web/components/Icon.tsx +++ b/frontend/web/components/Icon.tsx @@ -55,6 +55,12 @@ export type IconName = | 'required' | 'more-vertical' | 'open-external-link' + | 'issue-closed' + | 'issue-linked' + | 'pr-merged' + | 'pr-draft' + | 'pr-linked' + | 'pr-closed' export type IconType = React.DetailedHTMLProps< React.HTMLAttributes, @@ -1232,6 +1238,91 @@ const Icon: FC = ({ fill, fill2, height, name, width, ...rest }) => { ) } + case 'pr-merged': { + return ( + + + + ) + } + case 'issue-closed': { + return ( + + + + + ) + } + case 'issue-linked': { + return ( + + + + + ) + } + case 'pr-linked': { + return ( + + + + ) + } + case 'pr-closed': { + return ( + + + + ) + } + case 'pr-draft': { + return ( + + + + ) + } default: return null } diff --git a/frontend/web/components/modals/CreateFlag.js b/frontend/web/components/modals/CreateFlag.js index 9dd51992ddb5..f24b7ea3fbe9 100644 --- a/frontend/web/components/modals/CreateFlag.js +++ b/frontend/web/components/modals/CreateFlag.js @@ -1757,6 +1757,7 @@ const CreateFlag = class extends Component { } featureId={projectFlag.id} projectId={`${this.props.projectId}`} + environmentId={this.props.environmentId} /> )} diff --git a/frontend/web/components/tags/TagContent.tsx b/frontend/web/components/tags/TagContent.tsx index 3652d6ae9e08..106b56a25568 100644 --- a/frontend/web/components/tags/TagContent.tsx +++ b/frontend/web/components/tags/TagContent.tsx @@ -9,6 +9,7 @@ import { getTagColor } from './Tag' import OrganisationStore from 'common/stores/organisation-store' import Utils from 'common/utils/utils' import classNames from 'classnames' +import Icon from 'components/Icon' type TagContent = { tag: Partial } @@ -19,6 +20,44 @@ function escapeHTML(unsafe: string) { ) } +const renderIcon = (tagType: string, tagColor: string, tagLabel: string) => { + switch (tagType) { + case 'STALE': + return ( + + ) + case 'GITHUB': + switch (tagLabel) { + case 'PR Open': + return + case 'PR Merged': + return + case 'PR Closed': + return + case 'PR Draft': + return + case 'Issue Open': + return + case 'Issue Closed': + return + default: + return + } + default: + return ( + + ) + } +} + const getTooltip = (tag: TTag | undefined) => { if (!tag) { return null @@ -86,21 +125,7 @@ const TagContent: FC = ({ tag }) => { })} > {tagLabel} - {tag.type === 'STALE' ? ( - - ) : ( - tag.is_permanent && ( - - ) - )} + {renderIcon(tag.type!, tag.color!, tag.label!)} } > From b1d49a63b5d7c1fe319185586f486829626840cd Mon Sep 17 00:00:00 2001 From: Kyle Johnson Date: Mon, 12 Aug 2024 14:55:17 +0100 Subject: [PATCH 110/247] feat: Use environment feature state instead of fetching feature states (#4188) --- frontend/common/stores/feature-list-store.ts | 45 +++++++------------- 1 file changed, 15 insertions(+), 30 deletions(-) diff --git a/frontend/common/stores/feature-list-store.ts b/frontend/common/stores/feature-list-store.ts index d833f865aa9a..b2067cf5695b 100644 --- a/frontend/common/stores/feature-list-store.ts +++ b/frontend/common/stores/feature-list-store.ts @@ -36,23 +36,6 @@ import { getFeatureStates } from 'common/services/useFeatureState' import { getSegments } from 'common/services/useSegment' let createdFirstFeature = false const PAGE_SIZE = 50 -function recursivePageGet(url, parentRes) { - return data.get(url).then((res) => { - let response - if (parentRes) { - response = { - ...parentRes, - results: parentRes.results.concat(res.results), - } - } else { - response = res - } - if (res.next) { - return recursivePageGet(res.next, response) - } - return Promise.resolve(response) - }) -} const convertSegmentOverrideToFeatureState = ( override, @@ -129,15 +112,15 @@ const controller = { .then(() => Promise.all([ data.get(`${Project.api}projects/${projectId}/features/`), - data.get( - `${Project.api}environments/${environmentId}/featurestates/`, - ), - ]).then(([features, environmentFeatures]) => { + ]).then(([features]) => { + const environmentFeatures = features.results.map((v) => ({ + ...v.environment_feature_state, + feature: v.id, + })) store.model = { features: features.results, keyedEnvironmentFeatures: - environmentFeatures && - _.keyBy(environmentFeatures.results, 'feature'), + environmentFeatures && _.keyBy(environmentFeatures, 'feature'), } store.model.lastSaved = new Date().valueOf() store.saved({ createdFlag: flag.name }) @@ -863,16 +846,17 @@ const controller = { return Promise.all([ data.get(featuresEndpoint), - recursivePageGet( - `${Project.api}environments/${environmentId}/featurestates/?page_size=${PAGE_SIZE}`, - ), feature ? data.get( `${Project.api}projects/${projectId}/features/${feature}/`, ) : Promise.resolve(), ]) - .then(([features, environmentFeatures, feature]) => { + .then(([features, feature]) => { + const environmentFeatures = features.results.map((v) => ({ + ...v.environment_feature_state, + feature: v.id, + })) if (store.filter !== filter) { //The filter has been changed since, ignore the api response. This will be resolved when moving to RTK. return @@ -904,9 +888,10 @@ const controller = { store.model = { features: features.results.map(controller.parseFlag), - keyedEnvironmentFeatures: - environmentFeatures.results && - _.keyBy(environmentFeatures.results, 'feature'), + keyedEnvironmentFeatures: _.keyBy( + environmentFeatures, + 'feature', + ), } store.loaded() }) From 3b47ae07848c1330210d18bd8bb4194fa5d9262e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rodrigo=20L=C3=B3pez=20Dato?= Date: Mon, 12 Aug 2024 11:06:18 -0300 Subject: [PATCH 111/247] feat: Support Aptible deployments (#4340) --- api/app/urls.py | 3 + api/core/management/commands/waitfordb.py | 1 + api/scripts/run-docker.sh | 14 +++-- docs/docs/deployment/hosting/aptible.md | 75 +++++++++++++++++++++++ 4 files changed, 89 insertions(+), 4 deletions(-) create mode 100644 docs/docs/deployment/hosting/aptible.md diff --git a/api/app/urls.py b/api/app/urls.py index a110c7d111c0..c58409d0f3f3 100644 --- a/api/app/urls.py +++ b/api/app/urls.py @@ -15,6 +15,9 @@ re_path(r"^api/v2/", include("api.urls.v2", namespace="api-v2")), re_path(r"^admin/", admin.site.urls), re_path(r"^health", include("health_check.urls", namespace="health")), + # Aptible health checks must be on /healthcheck and cannot redirect + # see https://www.aptible.com/docs/core-concepts/apps/connecting-to-apps/app-endpoints/https-endpoints/health-checks + path("healthcheck", include("health_check.urls", namespace="health")), re_path(r"^version", views.version_info, name="version-info"), re_path( r"^sales-dashboard/", diff --git a/api/core/management/commands/waitfordb.py b/api/core/management/commands/waitfordb.py index 91c37468bc26..811af752994e 100644 --- a/api/core/management/commands/waitfordb.py +++ b/api/core/management/commands/waitfordb.py @@ -45,6 +45,7 @@ def handle( database: str, **options: Any, ) -> None: + start = time.monotonic() wait_between_checks = 0.25 diff --git a/api/scripts/run-docker.sh b/api/scripts/run-docker.sh index 939a339b504b..88ae045c98f5 100755 --- a/api/scripts/run-docker.sh +++ b/api/scripts/run-docker.sh @@ -1,8 +1,14 @@ #!/bin/sh set -e +function waitfordb() { + if [ -z "${SKIP_WAIT_FOR_DB}" ]; then + python manage.py waitfordb "$@" + fi +} + function migrate () { - python manage.py waitfordb && python manage.py migrate && python manage.py createcachetable + waitfordb && python manage.py migrate && python manage.py createcachetable } function serve() { # configuration parameters for statsd. Docs can be found here: @@ -10,7 +16,7 @@ function serve() { export STATSD_PORT=${STATSD_PORT:-8125} export STATSD_PREFIX=${STATSD_PREFIX:-flagsmith.api} - python manage.py waitfordb + waitfordb exec gunicorn --bind 0.0.0.0:8000 \ --worker-tmp-dir /dev/shm \ @@ -26,9 +32,9 @@ function serve() { app.wsgi } function run_task_processor() { - python manage.py waitfordb --waitfor 30 --migrations + waitfordb --waitfor 30 --migrations if [[ -n "$ANALYTICS_DATABASE_URL" || -n "$DJANGO_DB_NAME_ANALYTICS" ]]; then - python manage.py waitfordb --waitfor 30 --migrations --database analytics + waitfordb --waitfor 30 --migrations --database analytics fi RUN_BY_PROCESSOR=1 exec python manage.py runprocessor \ --sleepintervalms ${TASK_PROCESSOR_SLEEP_INTERVAL:-500} \ diff --git a/docs/docs/deployment/hosting/aptible.md b/docs/docs/deployment/hosting/aptible.md new file mode 100644 index 000000000000..92c0c135dd52 --- /dev/null +++ b/docs/docs/deployment/hosting/aptible.md @@ -0,0 +1,75 @@ +--- +title: Aptible +--- + +## Prerequisites + +The options and health check routes described in this document are available from Flagsmith 2.130.0. + +## Configuration + +Running Flagsmith on Aptible requires some configuration tweaks because of how Aptible's application lifecycle works: + +- Don't wait for the database to be available before the Flagsmith API starts. You can do this by setting the + `SKIP_WAIT_FOR_DB` environment variable. +- Add `containers` as an allowed host to comply with Aptible's + [strict health checks](https://www.aptible.com/docs/core-concepts/apps/connecting-to-apps/app-endpoints/https-endpoints/health-checks#strict-health-checks). +- Use the `before_release` tasks from `.aptible.yml` to run database migrations +- Use a Procfile to only start the API and not perform database migrations on startup + +This configuration can be applied by adding the Procfile and `.aptible.yml` configuration files to a +[Docker image](https://www.aptible.com/docs/core-concepts/apps/deploying-apps/image/deploying-with-docker-image/overview#how-do-i-deploy-from-docker-image) +that you build starting from a Flagsmith base image: + +```text title="Procfile" +cmd: serve +``` + +```yaml title=".aptible.yml" +before_release: + - migrate + - bootstrap +``` + +```dockerfile title="Dockerfile" +# Use flagsmith/flagsmith-private-cloud for the Enterprise image +FROM --platform=linux/amd64 flagsmith/flagsmith + +# Don't wait for the database to be available during startup for health checks to succeed +ENV SKIP_WAIT_FOR_DB=1 + +# Use root user to add Aptible files to the container +USER root +RUN mkdir /.aptible/ +ADD Procfile /.aptible/Procfile +ADD .aptible.yml /.aptible/.aptible.yml + +# Use non-root user at runtime +USER nobody +``` + +Before deploying, set the environment variables for your database URL and allowed hosts from the Aptible dashboard, or +using the Aptible CLI: + +```shell +aptible config:set --app flagsmith \ + DATABASE_URL=postgresql://aptible:...@...:23532/db \ + DJANGO_ALLOWED_HOSTS='containers,YOUR_APTIBLE_HOSTNAME' +``` + +## Deployment + +After your image is built and pushed to a container registry that Aptible can access, you can deploy it using the +Aptible CLI as you would any other application: + +```shell +aptible deploy --app flagsmith --docker-image example/my-flagsmith-aptible-image +``` + +Once Flagsmith is running in Aptible, make sure to create the first admin user by visiting `/api/v1/users/config/init/`. + +## Limitations + +The steps described in this document do not deploy the +[asynchronous task processor](/deployment/configuration/task-processor), which may affect performance in production +workloads. From 6ef7a742f0f56aef2335da380770dc7f307d53c5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rodrigo=20L=C3=B3pez=20Dato?= Date: Mon, 12 Aug 2024 11:30:43 -0300 Subject: [PATCH 112/247] fix: Remove warning about non-unique health namespace (#4479) --- api/app/urls.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/app/urls.py b/api/app/urls.py index c58409d0f3f3..65caa58df8ba 100644 --- a/api/app/urls.py +++ b/api/app/urls.py @@ -17,7 +17,7 @@ re_path(r"^health", include("health_check.urls", namespace="health")), # Aptible health checks must be on /healthcheck and cannot redirect # see https://www.aptible.com/docs/core-concepts/apps/connecting-to-apps/app-endpoints/https-endpoints/health-checks - path("healthcheck", include("health_check.urls", namespace="health")), + path("healthcheck", include("health_check.urls", namespace="aptible")), re_path(r"^version", views.version_info, name="version-info"), re_path( r"^sales-dashboard/", From 6660af56047e8d7083486b2dab20df44d0fc9303 Mon Sep 17 00:00:00 2001 From: Matthew Elwell Date: Mon, 12 Aug 2024 16:32:54 +0100 Subject: [PATCH 113/247] fix: ensure that usage notification logic is independent of other organisations notifications (#4480) --- api/organisations/task_helpers.py | 1 + .../test_unit_organisations_tasks.py | 26 +++++++++++++++++-- 2 files changed, 25 insertions(+), 2 deletions(-) diff --git a/api/organisations/task_helpers.py b/api/organisations/task_helpers.py index f9dc52dc22bc..daacfaf3d13a 100644 --- a/api/organisations/task_helpers.py +++ b/api/organisations/task_helpers.py @@ -139,6 +139,7 @@ def handle_api_usage_notification_for_organisation(organisation: Organisation) - return if OrganisationAPIUsageNotification.objects.filter( + organisation_id=organisation.id, notified_at__gt=period_starts_at, percent_usage__gte=matched_threshold, ).exists(): diff --git a/api/tests/unit/organisations/test_unit_organisations_tasks.py b/api/tests/unit/organisations/test_unit_organisations_tasks.py index f4038c8c1a3b..83d1bf006c97 100644 --- a/api/tests/unit/organisations/test_unit_organisations_tasks.py +++ b/api/tests/unit/organisations/test_unit_organisations_tasks.py @@ -362,11 +362,28 @@ def test_handle_api_usage_notifications_below_100( organisation=organisation, ).exists() + # Create an OrganisationApiUsageNotification object for another organisation + # to verify that only the correct organisation's notifications are taken into + # account. + another_organisation = Organisation.objects.create(name="Another Organisation") + OrganisationAPIUsageNotification.objects.create( + organisation=another_organisation, + percent_usage=100, + notified_at=now - timedelta(days=1), + ) + # When handle_api_usage_notifications() # Then - mock_api_usage.assert_called_once_with(organisation.id, now - timedelta(days=14)) + assert len(mock_api_usage.call_args_list) == 2 + + # We only care about the call for the main organisation, + # not the call for 'another_organisation' + assert mock_api_usage.call_args_list[0].args == ( + organisation.id, + now - timedelta(days=14), + ) assert len(mailoutbox) == 1 email = mailoutbox[0] @@ -410,7 +427,12 @@ def test_handle_api_usage_notifications_below_100( ).count() == 1 ) - assert OrganisationAPIUsageNotification.objects.first() == api_usage_notification + assert ( + OrganisationAPIUsageNotification.objects.filter( + organisation=organisation + ).first() + == api_usage_notification + ) @pytest.mark.freeze_time("2023-01-19T09:09:47.325132+00:00") From 4349149700ad92e824c115c93f1912c7009c9aa2 Mon Sep 17 00:00:00 2001 From: Gagan Date: Tue, 13 Aug 2024 14:17:10 +0530 Subject: [PATCH 114/247] infra: reduce task retention days to 7 (#4484) --- .../aws/production/ecs-task-definition-task-processor.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/infrastructure/aws/production/ecs-task-definition-task-processor.json b/infrastructure/aws/production/ecs-task-definition-task-processor.json index 769e2764e6e4..781de824f235 100644 --- a/infrastructure/aws/production/ecs-task-definition-task-processor.json +++ b/infrastructure/aws/production/ecs-task-definition-task-processor.json @@ -139,7 +139,7 @@ }, { "name": "TASK_DELETE_RETENTION_DAYS", - "value": "44" + "value": "7" }, { "name": "TASK_DELETE_BATCH_SIZE", From 16881a63e67e7be6baaad11ba78f4871b3c4d5bd Mon Sep 17 00:00:00 2001 From: Flagsmith Bot <65724737+flagsmithdev@users.noreply.github.com> Date: Tue, 13 Aug 2024 10:38:52 +0100 Subject: [PATCH 115/247] chore(main): release 2.136.0 (#4478) --- .release-please-manifest.json | 2 +- CHANGELOG.md | 21 +++++++++++++++++++++ version.txt | 2 +- 3 files changed, 23 insertions(+), 2 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 72a40d6930da..04bca1732ea1 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "2.135.1" + ".": "2.136.0" } \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 889c3b636fe5..3669c6f0b6c5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,26 @@ # Changelog +## [2.136.0](https://github.com/Flagsmith/flagsmith/compare/v2.135.1...v2.136.0) (2024-08-13) + + +### Features + +* Add automatic tagging for github integration ([#4028](https://github.com/Flagsmith/flagsmith/issues/4028)) ([7920e8e](https://github.com/Flagsmith/flagsmith/commit/7920e8e22e15fc2f91dbd56582679b1f3064e4a9)) +* Add tags for GitHub integration FE ([#4035](https://github.com/Flagsmith/flagsmith/issues/4035)) ([3c46a31](https://github.com/Flagsmith/flagsmith/commit/3c46a31f6060f7a12206fa27409073da946130d8)) +* Support Aptible deployments ([#4340](https://github.com/Flagsmith/flagsmith/issues/4340)) ([3b47ae0](https://github.com/Flagsmith/flagsmith/commit/3b47ae07848c1330210d18bd8bb4194fa5d9262e)) +* Use environment feature state instead of fetching feature states ([#4188](https://github.com/Flagsmith/flagsmith/issues/4188)) ([b1d49a6](https://github.com/Flagsmith/flagsmith/commit/b1d49a63b5d7c1fe319185586f486829626840cd)) + + +### Bug Fixes + +* ensure that usage notification logic is independent of other organisations notifications ([#4480](https://github.com/Flagsmith/flagsmith/issues/4480)) ([6660af5](https://github.com/Flagsmith/flagsmith/commit/6660af56047e8d7083486b2dab20df44d0fc9303)) +* Remove warning about non-unique health namespace ([#4479](https://github.com/Flagsmith/flagsmith/issues/4479)) ([6ef7a74](https://github.com/Flagsmith/flagsmith/commit/6ef7a742f0f56aef2335da380770dc7f307d53c5)) + + +### Infrastructure (Flagsmith SaaS Only) + +* reduce task retention days to 7 ([#4484](https://github.com/Flagsmith/flagsmith/issues/4484)) ([4349149](https://github.com/Flagsmith/flagsmith/commit/4349149700ad92e824c115c93f1912c7009c9aa2)) + ## [2.135.1](https://github.com/Flagsmith/flagsmith/compare/v2.135.0...v2.135.1) (2024-08-12) diff --git a/version.txt b/version.txt index 42ee4cb9c629..ac8faeddc8a7 100644 --- a/version.txt +++ b/version.txt @@ -1 +1 @@ -2.135.1 +2.136.0 From cd4fbe7bbf27b17a7d6bd1161c9f7c2431ae9a2f Mon Sep 17 00:00:00 2001 From: Gagan Date: Tue, 13 Aug 2024 15:37:56 +0530 Subject: [PATCH 116/247] feat: make pg usage cache timeout configurable (#4485) --- api/app/settings/common.py | 1 + api/app_analytics/cache.py | 6 +++--- .../test_unit_app_analytics_cache.py | 15 ++++++++------- 3 files changed, 12 insertions(+), 10 deletions(-) diff --git a/api/app/settings/common.py b/api/app/settings/common.py index 9eec2514f3fd..c925ef9344fd 100644 --- a/api/app/settings/common.py +++ b/api/app/settings/common.py @@ -330,6 +330,7 @@ USE_POSTGRES_FOR_ANALYTICS = env.bool("USE_POSTGRES_FOR_ANALYTICS", default=False) USE_CACHE_FOR_USAGE_DATA = env.bool("USE_CACHE_FOR_USAGE_DATA", default=False) +PG_API_USAGE_CACHE_SECONDS = env.int("PG_API_USAGE_CACHE_SECONDS", default=60) FEATURE_EVALUATION_CACHE_SECONDS = env.int( "FEATURE_EVALUATION_CACHE_SECONDS", default=60 diff --git a/api/app_analytics/cache.py b/api/app_analytics/cache.py index 5e5da9e5b370..f3f2a74416d7 100644 --- a/api/app_analytics/cache.py +++ b/api/app_analytics/cache.py @@ -5,8 +5,6 @@ from django.conf import settings from django.utils import timezone -CACHE_FLUSH_INTERVAL = 60 # seconds - class APIUsageCache: def __init__(self): @@ -33,7 +31,9 @@ def track_request(self, resource: int, host: str, environment_key: str): self._cache[key] = 1 else: self._cache[key] += 1 - if (timezone.now() - self._last_flushed_at).seconds > CACHE_FLUSH_INTERVAL: + if ( + timezone.now() - self._last_flushed_at + ).seconds > settings.PG_API_USAGE_CACHE_SECONDS: self._flush() diff --git a/api/tests/unit/app_analytics/test_unit_app_analytics_cache.py b/api/tests/unit/app_analytics/test_unit_app_analytics_cache.py index e6e6cde9b042..88ccc0cbe852 100644 --- a/api/tests/unit/app_analytics/test_unit_app_analytics_cache.py +++ b/api/tests/unit/app_analytics/test_unit_app_analytics_cache.py @@ -1,8 +1,4 @@ -from app_analytics.cache import ( - CACHE_FLUSH_INTERVAL, - APIUsageCache, - FeatureEvaluationCache, -) +from app_analytics.cache import APIUsageCache, FeatureEvaluationCache from app_analytics.models import Resource from django.utils import timezone from freezegun import freeze_time @@ -10,8 +6,13 @@ from pytest_mock import MockerFixture -def test_api_usage_cache(mocker: MockerFixture) -> None: +def test_api_usage_cache( + mocker: MockerFixture, + settings: SettingsWrapper, +) -> None: # Given + settings.PG_API_USAGE_CACHE_SECONDS = 60 + cache = APIUsageCache() now = timezone.now() mocked_track_request_task = mocker.patch("app_analytics.cache.track_request") @@ -30,7 +31,7 @@ def test_api_usage_cache(mocker: MockerFixture) -> None: assert not mocked_track_request_task.called # Now, let's move the time forward - frozen_time.tick(CACHE_FLUSH_INTERVAL + 1) + frozen_time.tick(settings.PG_API_USAGE_CACHE_SECONDS + 1) # let's track another request(to trigger flush) cache.track_request( From 36e634ca057e6aa55ffed41686a05df0883a1062 Mon Sep 17 00:00:00 2001 From: Zach Aysan Date: Tue, 13 Aug 2024 06:10:10 -0400 Subject: [PATCH 117/247] feat: Tweak email wording for grace periods (#4482) --- api/organisations/task_helpers.py | 1 + .../organisations/api_usage_notification.html | 4 ++-- .../organisations/api_usage_notification.txt | 4 ++-- .../organisations/api_usage_notification_limit.html | 4 ++-- .../organisations/api_usage_notification_limit.txt | 6 +++--- .../organisations/test_unit_organisations_tasks.py | 12 ++++++++++-- 6 files changed, 20 insertions(+), 11 deletions(-) diff --git a/api/organisations/task_helpers.py b/api/organisations/task_helpers.py index daacfaf3d13a..65d382358615 100644 --- a/api/organisations/task_helpers.py +++ b/api/organisations/task_helpers.py @@ -69,6 +69,7 @@ def _send_api_usage_notification( context = { "organisation": organisation, "matched_threshold": matched_threshold, + "grace_period": not hasattr(organisation, "breached_grace_period"), } send_mail( diff --git a/api/organisations/templates/organisations/api_usage_notification.html b/api/organisations/templates/organisations/api_usage_notification.html index 19b14d13b1b1..74deaac7fc7d 100644 --- a/api/organisations/templates/organisations/api_usage_notification.html +++ b/api/organisations/templates/organisations/api_usage_notification.html @@ -19,8 +19,8 @@ for overages after our first grace period of 30 days. {% else %} Please note that once 100% use has been breached, the serving of feature flags and admin access may - be disabled after a 7-day grace period. Please reach out to support@flagsmith.com in order to upgrade - your account. + be disabled{% if grace_period %} after a 7-day grace period{% endif %}. Please reach out to + support@flagsmith.com in order to upgrade your account. {% endif %} diff --git a/api/organisations/templates/organisations/api_usage_notification.txt b/api/organisations/templates/organisations/api_usage_notification.txt index f5646e87abcd..e02fe3f967c5 100644 --- a/api/organisations/templates/organisations/api_usage_notification.txt +++ b/api/organisations/templates/organisations/api_usage_notification.txt @@ -8,8 +8,8 @@ If this is expected, no action is required. If you are expecting to go over, you limits by reaching out to support@flagsmith.com. We will automatically charge for overages after our first grace period of 30 days. {% else %} -Please note that once 100% use has been breached, the serving of feature flags and admin access may be disabled after a -7-day grace period. Please reach out to support@flagsmith.com in order to upgrade your account. +Please note that once 100% use has been breached, the serving of feature flags and admin access may be disabled{% if grace_period %} +after a 7-day grace period{% endif %}. Please reach out to support@flagsmith.com in order to upgrade your account. {% endif %} Thank you! diff --git a/api/organisations/templates/organisations/api_usage_notification_limit.html b/api/organisations/templates/organisations/api_usage_notification_limit.html index 15f4bf9ea3e5..fa79e1196f96 100644 --- a/api/organisations/templates/organisations/api_usage_notification_limit.html +++ b/api/organisations/templates/organisations/api_usage_notification_limit.html @@ -18,8 +18,8 @@ more information. You can reach out to support@flagsmith.com if you’d like to take advantage of better contracted rates. {% else %} - Please note that the serving of feature flags and admin access will be disabled after a 7 day grace - period until the next subscription period. If you’d like to continue service you can upgrade your + Please note that the serving of feature flags and admin access will be disabled{% if grace_period %} after a 7 day grace + period{% endif %} until the next subscription period. If you’d like to continue service you can upgrade your organisation’s account (see pricing page). {% endif %} diff --git a/api/organisations/templates/organisations/api_usage_notification_limit.txt b/api/organisations/templates/organisations/api_usage_notification_limit.txt index faf301b74c22..65c8d1eaa48c 100644 --- a/api/organisations/templates/organisations/api_usage_notification_limit.txt +++ b/api/organisations/templates/organisations/api_usage_notification_limit.txt @@ -7,9 +7,9 @@ has reached {{ matched_threshold }}% of your API usage within the current subscr We will charge for overages after our first grace period of 30 days. Please see the pricing page for more information. You can reach out to support@flagsmith.com if you’d like to take advantage of better contracted rates. {% else %} -Please note that the serving of feature flags and admin access will be disabled after a 7 day grace period until the -next subscription period. If you’d like to continue service you can upgrade your organisation’s account (see pricing -page). +Please note that the serving of feature flags and admin access will be disabled{% if grace_period %} after a 7 day +grace period{% endif %} until the next subscription period. If you’d like to continue service you can upgrade your +organisation’s account (see pricing page). {% endif %} Thank you! diff --git a/api/tests/unit/organisations/test_unit_organisations_tasks.py b/api/tests/unit/organisations/test_unit_organisations_tasks.py index 83d1bf006c97..4d1a38dd9520 100644 --- a/api/tests/unit/organisations/test_unit_organisations_tasks.py +++ b/api/tests/unit/organisations/test_unit_organisations_tasks.py @@ -664,7 +664,11 @@ def test_handle_api_usage_notifications_for_free_accounts( assert email.subject == "Flagsmith API use has reached 100%" assert email.body == render_to_string( "organisations/api_usage_notification_limit.txt", - context={"organisation": organisation, "matched_threshold": 100}, + context={ + "organisation": organisation, + "matched_threshold": 100, + "grace_period": True, + }, ) assert len(email.alternatives) == 1 @@ -673,7 +677,11 @@ def test_handle_api_usage_notifications_for_free_accounts( assert email.alternatives[0][0] == render_to_string( "organisations/api_usage_notification_limit.html", - context={"organisation": organisation, "matched_threshold": 100}, + context={ + "organisation": organisation, + "matched_threshold": 100, + "grace_period": True, + }, ) assert email.from_email == "noreply@flagsmith.com" From 19d5af9d1f2566fb80bc659bb4961791541063c7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rodrigo=20L=C3=B3pez=20Dato?= Date: Tue, 13 Aug 2024 10:22:35 -0300 Subject: [PATCH 118/247] docs: Deprecate group-level permissions. Rewrite RBAC docs and add Mermaid diagrams (#4469) --- docs/docs/system-administration/rbac.md | 195 +- docs/docusaurus.config.js | 5 + docs/package-lock.json | 5469 +++++++++++++------ docs/package.json | 4 +- frontend/web/components/PermissionsTabs.tsx | 186 +- 5 files changed, 3972 insertions(+), 1887 deletions(-) diff --git a/docs/docs/system-administration/rbac.md b/docs/docs/system-administration/rbac.md index f838fcf84355..b57fcc3d6e85 100644 --- a/docs/docs/system-administration/rbac.md +++ b/docs/docs/system-administration/rbac.md @@ -1,100 +1,173 @@ --- -title: Role Based Access Control +title: Role-based access control --- -Flagsmith provides fine-grained permissions to help larger teams manage access and roles across organisations, projects -and environments. - :::info -The Permissions/Role Based Access features of Flagsmith are _not_ part of the Open Source version. If you want to use -these features as part of a self hosted/on premise solution, please [get in touch](https://flagsmith.com/contact-us/). +Role-based access control requires an [Enterprise subscription](https://www.flagsmith.com/pricing). ::: -## Users, Groups, and Roles +Role-based access control (RBAC) provides fine-grained access management of Flagsmith resources. Using RBAC, you can +ensure users only have the access they need within your Flagsmith organisation. -Permissions can be assigned to Flagsmith individual users, groups, or roles. +For example, RBAC allows you to achieve the following scenarios: -### Users +- Only allow certain users to modify your production environments. +- Grant a default set of permissions to all users that join your Flagsmith organisation. +- Lock down an [Admin API](/clients/rest/#private-admin-api-endpoints) key to a specific set of permissions. +- Provide Flagsmith permissions based on your enterprise identity provider's groups when using + [SAML single sign-on](/system-administration/authentication/SAML/). -Flagsmith Users can be defined as Organisation Administrators or Users. Organisation Administrator is effectively a -super-user role, and gives full read/write access to every Project, Environment, Flag, Remote Config and Segment within -that Organisation. +To add users to your Flagsmith organisation or to manage user permissions, click on your organisation name in the top +left and open the **Users and Permissions** tab. -Users that are not Organisation Administrators must have permissions assigned to them manually at the relevant levels. +## Core concepts -### Groups +The diagram below shows an overview of how permissions are assigned within your Flagsmith organisation: -Groups are a convenient way to manage permissions for multiple Flagsmith users. Groups can contain any number of -Flagsmith users. You can create groups with the Organisation Settings page. +
+```mermaid +graph LR; + R[Custom roles] -->|Assigned to| G[Groups]; + B[Built-in role] -->|Assigned to| U[Users]; + R -->|Assigned to| U; + R -->|Assigned to| A[Admin API keys]; + G -->|Contains many| U; +``` -Members of a group can be designated as an admin for that group. As a group admin, users can manage the membership for -that group, but not the permissions the group has on other entities. +
### Roles -A _Role_ is an entity to which you can attach a set of permissions. Permissions can allow privileges at Organization, -Project, and Environment levels. You can assign a role, along with its associated permissions, to a User or Group. You -will also be able to assign API keys to a Role in future versions. +A role is a set of permissions that, when assigned, allows performing specific actions on your organisation, projects or +project environments. + +**Built-in roles** are predefined by Flagsmith and cannot be modified. All users in your organisation have one of the +following built-in roles: + +- _Organisation Administrator_ grants full access to everything in your Flagsmith organisation. +- _User_ grants no access and requires you to assign permissions using custom roles and/or groups. + +**Custom roles** can be assigned to users, groups or [Admin API](/clients/rest/#private-admin-api-endpoints) keys. Any +number of custom roles can be created and assigned. + +Creating, modifying or assigning roles requires organisation administrator permissions. + +### Groups + +A group is a collection of users. If a custom role is assigned to a group, the role's permissions will be granted to all +group members. Users can belong to any number of groups. + +Creating or modifying existing groups requires organisation administrator permissions. + +Permissions to add or remove users from groups can be granted in two ways: + +- The _manage group membership_ permission allows modifying any group's membership +- A _group admin_ can manage membership only for that group + +## Add users to your organisation + +You can add users to your organisation by sending them an invitation email from Flagsmith, or by sharing an invitation +link directly with them. Both options require organisation administrator permissions, and are available from **Users and +Permissions > Members**. + +Users can also join your organisation directly by logging in to Flagsmith using +[single sign-on](/system-administration/authentication/SAML/). + +### Email invites + +:::info + +If you are self-hosting Flagsmith, you must +[configure an email provider](/deployment/hosting/locally-api#email-environment-variables) before using email invites. + +::: + +To send invitation emails to specific users, click on **Invite members**. Then, fill in the email address and built-in +role of each user you want to invite. + +When a user accepts their email invitation, they will be prompted to sign up for a Flagsmith account, or they can choose +to log in if they already have an account with the same email address. + +Users who have not yet accepted their invitations are listed in the "Pending invites" section at the bottom of this +page. From here you can also resend or revoke any pending invitations. + +### Invitation links + +:::warning + +Anyone with an invitation link can join your Flagsmith organisation at any time. Share these links with caution and +regenerate them if they are compromised. + +::: -#### Creating a Role +Direct links to join your organisation can be found in the **Team Members** section of this page. One direct link is +available for each built-in role that users will have when joining your organisation. -You can create a Role in the Organisation Settings page. +## Provision permissions -#### Add Permissions to a Role +If a user joins your organisation with the built-in _User_ role, they will not have any permissions to view or change +anything in your Flagsmith organisation. You can provide default fine-grained permissions to users with any of these +options: -Once the role is created you can assign the corresponding permissions. +- Add users by default to a group. When creating or editing a group, select the **Add new users by default** option. + When a user logs in for the first time to your organisation, they will automatically be added to all groups that have + this option enabled. +- [Use existing groups from your enterprise identity provider](/system-administration/authentication/SAML/#using-groups-from-your-saml-idp). + Any time a user logs in using single sign-on, they will be made a member of any groups with matching external IDs. -**E.g. Add Project permission:** +## Deprecated features -- Choose a Role. -- Go to the Projects tab. -- Select a Project and enable the relevant permissions. +Groups can grant permissions directly to their members in the same way that roles do. This functionality was deprecated +in Flagsmith 2.137.0. To grant permissions to all members of a group, create a role with the desired permissions and +assign it to the group instead. -### Assign Role to Users or Groups +Assigning roles to groups has several benefits over assigning permissions directly to a group: -After creating the Role, you can assign it to Users or Groups. +- Roles can be assigned to Admin API keys, but Admin API keys cannot belong to groups. +- If you need multiple groups or users with similar permissions, the common permissions can be defined in a role and + assigned to multiple groups or users instead of being duplicated. +- Having roles as the single place where permissions are defined makes auditing permissions easier. -**E.g. Assign role to a user:** +## Permissions reference -- Choose a role. -- Go to the Members tab. -- Select the Users tab. -- Click assign role to user button and select a user. +Permissions can be assigned at four levels: user group, organisation, project, and environment. -## Permissions +### User group -Permissions can be assigned at 3 levels: Organisation, Project, and Environment. +| Permission | Ability | +| ----------- | ------------------------------------------------ | +| Group Admin | Allows adding or removing users from this group. | ### Organisation -| **Permission** | **Ability** | -| ------------------ | --------------------------------------------------------------------------- | -| Create Project | Allows the user to create Projects in the given Organisation | -| Manage User Groups | Allows the user to manage the Groups in the Organisation and their members. | +| Permission | Ability | +| ------------------ | ------------------------------------------------------------------------------------------------------------------------------------ | +| Create Project | Allows creating projects in the organisation. Users are automatically granted Administrator permissions on any projects they create. | +| Manage User Groups | Allows adding or removing users from any group. | ### Project -| **Permission** | **Ability** | -| ------------------ | ------------------------------------------------------------------------------------------ | -| Administrator | Full Read/Write over all Environments, Feature Flag, Remote Config, Segment and Tag values | -| View Project | Can view the Project within their account | -| Create Environment | Can create new Environments within the Project | -| Create Feature | Can create a new Feature / Remote Config | -| Delete Feature | Can remove an existing Feature / Remote Config entirely from the Project | -| Manage Segments | Can create, delete and edit Segments within the Project | -| View audit log | Allows the user to view the audit logs for this Project. | +| Permission | Ability | +| ------------------ | -------------------------------------------------------------------------------------------------------------------------------------------- | +| Administrator | Grants full read and write access to all environments, features and segments. | +| View Project | Allows viewing this project. The project is hidden from users without this permission. | +| Create Environment | Allows creating new environments in this project. Users are automatically granted Administrator permissions on any environments they create. | +| Create Feature | Allows creating new features in all environments. | +| Delete Feature | Allows deleting features from all environments. | +| Manage Segments | Grants write access to segments in this project. | +| View audit log | Allows viewing all audit log entries for this project. | ### Environment -| **Permission** | **Ability** | -| ------------------------ | --------------------------------------------------------------- | -| Administrator | Can modify Feature Flag, Remote Config and Segment values | -| View Environment | Can see the Environment within their account | -| Update Feature State | Update the state or value for a given feature | -| Manage Identities | View and update Identities | -| Manage Segment Overrides | Permission to manage segment overrides in the given environment | -| Create Change Request | Creating a new Change Request | -| Approve Change Request | Approving or denying existing Change Requests | -| View Identities | Viewing Identities | +| Permission | Ability | +| ------------------------ | ----------------------------------------------------------------------------------------------------------------------- | +| Administrator | Grants full read and write access to all feature states, overrides, identities and change requests in this environment. | +| View Environment | Allows viewing this environment. The environment is hidden from users without this permission. | +| Update Feature State | Allows updating updating any feature state or values in this environment. | +| Manage Identities | Grants read and write access to identities in this environment. | +| Manage Segment Overrides | Grants write access to segment overrides in this environment. | +| Create Change Request | Allows creating change requests for features in this environment. | +| Approve Change Request | Allows approving or denying change requests in this environment. | +| View Identities | Grants read-only access to identities in this environment. | diff --git a/docs/docusaurus.config.js b/docs/docusaurus.config.js index fc921b527bc0..69bbabd26d13 100644 --- a/docs/docusaurus.config.js +++ b/docs/docusaurus.config.js @@ -12,6 +12,11 @@ const config = { onBrokenLinks: 'throw', onBrokenMarkdownLinks: 'warn', + markdown: { + mermaid: true, + }, + themes: ['@docusaurus/theme-mermaid'], + themeConfig: /** @type {import('@docusaurus/preset-classic').ThemeConfig} */ ({ diff --git a/docs/package-lock.json b/docs/package-lock.json index 2dd9af1290ce..85cf48e9f44f 100644 --- a/docs/package-lock.json +++ b/docs/package-lock.json @@ -11,6 +11,7 @@ "@docusaurus/core": "^3.4.0", "@docusaurus/plugin-google-tag-manager": "^3.4.0", "@docusaurus/preset-classic": "^3.4.0", + "@docusaurus/theme-mermaid": "^3.4.0", "@mdx-js/react": "^3.0.0", "clsx": "^2.0.0", "prism-react-renderer": "^2.3.0", @@ -2182,6 +2183,12 @@ "node": ">=6.9.0" } }, + "node_modules/@braintree/sanitize-url": { + "version": "6.0.4", + "resolved": "https://registry.npmjs.org/@braintree/sanitize-url/-/sanitize-url-6.0.4.tgz", + "integrity": "sha512-s3jaWicZd0pkP0jf5ysyHUI/RE7MHos6qlToFcGWXVp+ykHOy77OUMrfbgJ9it2C5bow7OIQwYYaHjk9XlBQ2A==", + "license": "MIT" + }, "node_modules/@colors/colors": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/@colors/colors/-/colors-1.5.0.tgz", @@ -2678,6 +2685,28 @@ "react-dom": "^18.0.0" } }, + "node_modules/@docusaurus/theme-mermaid": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/@docusaurus/theme-mermaid/-/theme-mermaid-3.4.0.tgz", + "integrity": "sha512-3w5QW0HEZ2O6x2w6lU3ZvOe1gNXP2HIoKDMJBil1VmLBc9PmpAG17VmfhI/p3L2etNmOiVs5GgniUqvn8AFEGQ==", + "license": "MIT", + "dependencies": { + "@docusaurus/core": "3.4.0", + "@docusaurus/module-type-aliases": "3.4.0", + "@docusaurus/theme-common": "3.4.0", + "@docusaurus/types": "3.4.0", + "@docusaurus/utils-validation": "3.4.0", + "mermaid": "^10.4.0", + "tslib": "^2.6.0" + }, + "engines": { + "node": ">=18.0" + }, + "peerDependencies": { + "react": "^18.0.0", + "react-dom": "^18.0.0" + } + }, "node_modules/@docusaurus/theme-search-algolia": { "version": "3.4.0", "resolved": "https://registry.npmjs.org/@docusaurus/theme-search-algolia/-/theme-search-algolia-3.4.0.tgz", @@ -3475,6 +3504,27 @@ "@types/node": "*" } }, + "node_modules/@types/d3-scale": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/@types/d3-scale/-/d3-scale-4.0.8.tgz", + "integrity": "sha512-gkK1VVTr5iNiYJ7vWDI+yUFFlszhNMtVeneJ6lUTKPjprsvLLI9/tgEGiXJOnlINJA8FyA88gfnQsHbybVZrYQ==", + "license": "MIT", + "dependencies": { + "@types/d3-time": "*" + } + }, + "node_modules/@types/d3-scale-chromatic": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/d3-scale-chromatic/-/d3-scale-chromatic-3.0.3.tgz", + "integrity": "sha512-laXM4+1o5ImZv3RpFAsTRn3TEkzqkytiOY0Dz0sq5cnd1dtNlk6sHLon4OvqaiJb28T0S/TdsBI3Sjsy+keJrw==", + "license": "MIT" + }, + "node_modules/@types/d3-time": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/d3-time/-/d3-time-3.0.3.tgz", + "integrity": "sha512-2p6olUZ4w3s+07q3Tm2dbiMZy5pCDfYwtLXXHUnVzXgQlZ/OyPtUz6OL382BkOuGlLXqfT+wqv8Fw2v8/0geBw==", + "license": "MIT" + }, "node_modules/@types/debug": { "version": "4.1.12", "resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.12.tgz", @@ -5248,6 +5298,15 @@ "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==" }, + "node_modules/cose-base": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/cose-base/-/cose-base-1.0.3.tgz", + "integrity": "sha512-s9whTXInMSgAp/NVXVNuVxVKzGH2qck3aQlVHxDCdAEPgtMKwc4Wq6/QKhgdEdgbLSi9rBTAcPoRa6JpiG4ksg==", + "license": "MIT", + "dependencies": { + "layout-base": "^1.0.0" + } + }, "node_modules/cosmiconfig": { "version": "8.3.6", "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-8.3.6.tgz", @@ -5587,514 +5646,561 @@ "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.2.tgz", "integrity": "sha512-I7K1Uu0MBPzaFKg4nI5Q7Vs2t+3gWWW648spaF+Rg7pI9ds18Ugn+lvg4SHczUdKlHI5LWBXyqfS8+DufyBsgQ==" }, - "node_modules/debounce": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/debounce/-/debounce-1.2.1.tgz", - "integrity": "sha512-XRRe6Glud4rd/ZGQfiV1ruXSfbvfJedlV9Y6zOlP+2K04vBYiJEte6stfFkCP03aMnY5tsipamumUjL14fofug==" + "node_modules/cytoscape": { + "version": "3.30.2", + "resolved": "https://registry.npmjs.org/cytoscape/-/cytoscape-3.30.2.tgz", + "integrity": "sha512-oICxQsjW8uSaRmn4UK/jkczKOqTrVqt5/1WL0POiJUT2EKNc9STM4hYFHv917yu55aTBMFNRzymlJhVAiWPCxw==", + "license": "MIT", + "engines": { + "node": ">=0.10" + } }, - "node_modules/debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "node_modules/cytoscape-cose-bilkent": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/cytoscape-cose-bilkent/-/cytoscape-cose-bilkent-4.1.0.tgz", + "integrity": "sha512-wgQlVIUJF13Quxiv5e1gstZ08rnZj2XaLHGoFMYXz7SkNfCDOOteKBE6SYRfA9WxxI/iBc3ajfDoc6hb/MRAHQ==", + "license": "MIT", "dependencies": { - "ms": "2.1.2" + "cose-base": "^1.0.0" + }, + "peerDependencies": { + "cytoscape": "^3.2.0" + } + }, + "node_modules/d3": { + "version": "7.9.0", + "resolved": "https://registry.npmjs.org/d3/-/d3-7.9.0.tgz", + "integrity": "sha512-e1U46jVP+w7Iut8Jt8ri1YsPOvFpg46k+K8TpCb0P+zjCkjkPnV7WzfDJzMHy1LnA+wj5pLT1wjO901gLXeEhA==", + "license": "ISC", + "dependencies": { + "d3-array": "3", + "d3-axis": "3", + "d3-brush": "3", + "d3-chord": "3", + "d3-color": "3", + "d3-contour": "4", + "d3-delaunay": "6", + "d3-dispatch": "3", + "d3-drag": "3", + "d3-dsv": "3", + "d3-ease": "3", + "d3-fetch": "3", + "d3-force": "3", + "d3-format": "3", + "d3-geo": "3", + "d3-hierarchy": "3", + "d3-interpolate": "3", + "d3-path": "3", + "d3-polygon": "3", + "d3-quadtree": "3", + "d3-random": "3", + "d3-scale": "4", + "d3-scale-chromatic": "3", + "d3-selection": "3", + "d3-shape": "3", + "d3-time": "3", + "d3-time-format": "4", + "d3-timer": "3", + "d3-transition": "3", + "d3-zoom": "3" }, "engines": { - "node": ">=6.0" + "node": ">=12" + } + }, + "node_modules/d3-array": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/d3-array/-/d3-array-3.2.4.tgz", + "integrity": "sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg==", + "license": "ISC", + "dependencies": { + "internmap": "1 - 2" }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } + "engines": { + "node": ">=12" } }, - "node_modules/decko": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/decko/-/decko-1.2.0.tgz", - "integrity": "sha512-m8FnyHXV1QX+S1cl+KPFDIl6NMkxtKsy6+U/aYyjrOqWMuwAwYWu7ePqrsUHtDR5Y8Yk2pi/KIDSgF+vT4cPOQ==" + "node_modules/d3-axis": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-axis/-/d3-axis-3.0.0.tgz", + "integrity": "sha512-IH5tgjV4jE/GhHkRV0HiVYPDtvfjHQlQfJHs0usq7M30XcSBvOotpmH1IgkcXsO/5gEQZD43B//fc7SRT5S+xw==", + "license": "ISC", + "engines": { + "node": ">=12" + } }, - "node_modules/decode-named-character-reference": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/decode-named-character-reference/-/decode-named-character-reference-1.0.2.tgz", - "integrity": "sha512-O8x12RzrUF8xyVcY0KJowWsmaJxQbmy0/EtnNtHRpsOcT7dFk5W598coHqBVpmWo1oQQfsCqfCmkZN5DJrZVdg==", + "node_modules/d3-brush": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-brush/-/d3-brush-3.0.0.tgz", + "integrity": "sha512-ALnjWlVYkXsVIGlOsuWH1+3udkYFI48Ljihfnh8FZPF2QS9o+PzGLBslO0PjzVoHLZ2KCVgAM8NVkXPJB2aNnQ==", + "license": "ISC", "dependencies": { - "character-entities": "^2.0.0" + "d3-dispatch": "1 - 3", + "d3-drag": "2 - 3", + "d3-interpolate": "1 - 3", + "d3-selection": "3", + "d3-transition": "3" }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" + "engines": { + "node": ">=12" } }, - "node_modules/decompress-response": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz", - "integrity": "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==", + "node_modules/d3-chord": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-chord/-/d3-chord-3.0.1.tgz", + "integrity": "sha512-VE5S6TNa+j8msksl7HwjxMHDM2yNK3XCkusIlpX5kwauBfXuyLAtNg9jCp/iHH61tgI4sb6R/EIMWCqEIdjT/g==", + "license": "ISC", "dependencies": { - "mimic-response": "^3.1.0" + "d3-path": "1 - 3" }, "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "node": ">=12" } }, - "node_modules/decompress-response/node_modules/mimic-response": { + "node_modules/d3-color": { "version": "3.1.0", - "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz", - "integrity": "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==", + "resolved": "https://registry.npmjs.org/d3-color/-/d3-color-3.1.0.tgz", + "integrity": "sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA==", + "license": "ISC", "engines": { - "node": ">=10" + "node": ">=12" + } + }, + "node_modules/d3-contour": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/d3-contour/-/d3-contour-4.0.2.tgz", + "integrity": "sha512-4EzFTRIikzs47RGmdxbeUvLWtGedDUNkTcmzoeyg4sP/dvCexO47AaQL7VKy/gul85TOxw+IBgA8US2xwbToNA==", + "license": "ISC", + "dependencies": { + "d3-array": "^3.2.0" }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "engines": { + "node": ">=12" } }, - "node_modules/deep-extend": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz", - "integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==", + "node_modules/d3-delaunay": { + "version": "6.0.4", + "resolved": "https://registry.npmjs.org/d3-delaunay/-/d3-delaunay-6.0.4.tgz", + "integrity": "sha512-mdjtIZ1XLAM8bm/hx3WwjfHt6Sggek7qH043O8KEjDXN40xi3vx/6pYSVTwLjEgiXQTbvaouWKynLBiUZ6SK6A==", + "license": "ISC", + "dependencies": { + "delaunator": "5" + }, "engines": { - "node": ">=4.0.0" + "node": ">=12" } }, - "node_modules/deepmerge": { - "version": "4.3.1", - "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz", - "integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==", + "node_modules/d3-dispatch": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-dispatch/-/d3-dispatch-3.0.1.tgz", + "integrity": "sha512-rzUyPU/S7rwUflMyLc1ETDeBj0NRuHKKAcvukozwhshr6g6c5d8zh4c2gQjY2bZ0dXeGLWc1PF174P2tVvKhfg==", + "license": "ISC", "engines": { - "node": ">=0.10.0" + "node": ">=12" } }, - "node_modules/default-gateway": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/default-gateway/-/default-gateway-6.0.3.tgz", - "integrity": "sha512-fwSOJsbbNzZ/CUFpqFBqYfYNLj1NbMPm8MMCIzHjC83iSJRBEGmDUxU+WP661BaBQImeC2yHwXtz+P/O9o+XEg==", + "node_modules/d3-drag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-drag/-/d3-drag-3.0.0.tgz", + "integrity": "sha512-pWbUJLdETVA8lQNJecMxoXfH6x+mO2UQo8rSmZ+QqxcbyA3hfeprFgIT//HW2nlHChWeIIMwS2Fq+gEARkhTkg==", + "license": "ISC", "dependencies": { - "execa": "^5.0.0" + "d3-dispatch": "1 - 3", + "d3-selection": "3" }, "engines": { - "node": ">= 10" + "node": ">=12" } }, - "node_modules/defer-to-connect": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/defer-to-connect/-/defer-to-connect-2.0.1.tgz", - "integrity": "sha512-4tvttepXG1VaYGrRibk5EwJd1t4udunSOVMdLSAL6mId1ix438oPwPZMALY41FCijukO1L0twNcGsdzS7dHgDg==", + "node_modules/d3-dsv": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-dsv/-/d3-dsv-3.0.1.tgz", + "integrity": "sha512-UG6OvdI5afDIFP9w4G0mNq50dSOsXHJaRE8arAS5o9ApWnIElp8GZw1Dun8vP8OyHOZ/QJUKUJwxiiCCnUwm+Q==", + "license": "ISC", + "dependencies": { + "commander": "7", + "iconv-lite": "0.6", + "rw": "1" + }, + "bin": { + "csv2json": "bin/dsv2json.js", + "csv2tsv": "bin/dsv2dsv.js", + "dsv2dsv": "bin/dsv2dsv.js", + "dsv2json": "bin/dsv2json.js", + "json2csv": "bin/json2dsv.js", + "json2dsv": "bin/json2dsv.js", + "json2tsv": "bin/json2dsv.js", + "tsv2csv": "bin/dsv2dsv.js", + "tsv2json": "bin/dsv2json.js" + }, "engines": { - "node": ">=10" + "node": ">=12" } }, - "node_modules/define-data-property": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", - "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", + "node_modules/d3-dsv/node_modules/commander": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz", + "integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==", + "license": "MIT", + "engines": { + "node": ">= 10" + } + }, + "node_modules/d3-dsv/node_modules/iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "license": "MIT", "dependencies": { - "es-define-property": "^1.0.0", - "es-errors": "^1.3.0", - "gopd": "^1.0.1" + "safer-buffer": ">= 2.1.2 < 3.0.0" }, "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" + "node": ">=0.10.0" } }, - "node_modules/define-lazy-prop": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz", - "integrity": "sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og==", + "node_modules/d3-ease": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-ease/-/d3-ease-3.0.1.tgz", + "integrity": "sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w==", + "license": "BSD-3-Clause", "engines": { - "node": ">=8" + "node": ">=12" } }, - "node_modules/define-properties": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.2.1.tgz", - "integrity": "sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==", + "node_modules/d3-fetch": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-fetch/-/d3-fetch-3.0.1.tgz", + "integrity": "sha512-kpkQIM20n3oLVBKGg6oHrUchHM3xODkTzjMoj7aWQFq5QEM+R6E4WkzT5+tojDY7yjez8KgCBRoj4aEr99Fdqw==", + "license": "ISC", "dependencies": { - "define-data-property": "^1.0.1", - "has-property-descriptors": "^1.0.0", - "object-keys": "^1.1.1" + "d3-dsv": "1 - 3" }, "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" + "node": ">=12" } }, - "node_modules/del": { - "version": "6.1.1", - "resolved": "https://registry.npmjs.org/del/-/del-6.1.1.tgz", - "integrity": "sha512-ua8BhapfP0JUJKC/zV9yHHDW/rDoDxP4Zhn3AkA6/xT6gY7jYXJiaeyBZznYVujhZZET+UgcbZiQ7sN3WqcImg==", + "node_modules/d3-force": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-force/-/d3-force-3.0.0.tgz", + "integrity": "sha512-zxV/SsA+U4yte8051P4ECydjD/S+qeYtnaIyAs9tgHCqfguma/aAQDjo85A9Z6EKhBirHRJHXIgJUlffT4wdLg==", + "license": "ISC", "dependencies": { - "globby": "^11.0.1", - "graceful-fs": "^4.2.4", - "is-glob": "^4.0.1", - "is-path-cwd": "^2.2.0", - "is-path-inside": "^3.0.2", - "p-map": "^4.0.0", - "rimraf": "^3.0.2", - "slash": "^3.0.0" + "d3-dispatch": "1 - 3", + "d3-quadtree": "1 - 3", + "d3-timer": "1 - 3" }, "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "node": ">=12" } }, - "node_modules/depd": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", - "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", + "node_modules/d3-format": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-format/-/d3-format-3.1.0.tgz", + "integrity": "sha512-YyUI6AEuY/Wpt8KWLgZHsIU86atmikuoOmCfommt0LYHiQSPjvX2AcFc38PX0CBpr2RCyZhjex+NS/LPOv6YqA==", + "license": "ISC", "engines": { - "node": ">= 0.8" + "node": ">=12" } }, - "node_modules/dequal": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz", - "integrity": "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==", + "node_modules/d3-geo": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/d3-geo/-/d3-geo-3.1.1.tgz", + "integrity": "sha512-637ln3gXKXOwhalDzinUgY83KzNWZRKbYubaG+fGVuc/dxO64RRljtCTnf5ecMyE1RIdtqpkVcq0IbtU2S8j2Q==", + "license": "ISC", + "dependencies": { + "d3-array": "2.5.0 - 3" + }, "engines": { - "node": ">=6" + "node": ">=12" } }, - "node_modules/destroy": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz", - "integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==", + "node_modules/d3-hierarchy": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/d3-hierarchy/-/d3-hierarchy-3.1.2.tgz", + "integrity": "sha512-FX/9frcub54beBdugHjDCdikxThEqjnR93Qt7PvQTOHxyiNCAlvMrHhclk3cD5VeAaq9fxmfRp+CnWw9rEMBuA==", + "license": "ISC", "engines": { - "node": ">= 0.8", - "npm": "1.2.8000 || >= 1.4.16" - } - }, - "node_modules/detect-node": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/detect-node/-/detect-node-2.1.0.tgz", - "integrity": "sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g==" - }, - "node_modules/detect-port": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/detect-port/-/detect-port-1.5.1.tgz", - "integrity": "sha512-aBzdj76lueB6uUst5iAs7+0H/oOjqI5D16XUWxlWMIMROhcM0rfsNVk93zTngq1dDNpoXRr++Sus7ETAExppAQ==", - "dependencies": { - "address": "^1.0.1", - "debug": "4" - }, - "bin": { - "detect": "bin/detect-port.js", - "detect-port": "bin/detect-port.js" + "node": ">=12" } }, - "node_modules/detect-port-alt": { - "version": "1.1.6", - "resolved": "https://registry.npmjs.org/detect-port-alt/-/detect-port-alt-1.1.6.tgz", - "integrity": "sha512-5tQykt+LqfJFBEYaDITx7S7cR7mJ/zQmLXZ2qt5w04ainYZw6tBf9dBunMjVeVOdYVRUzUOE4HkY5J7+uttb5Q==", + "node_modules/d3-interpolate": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-interpolate/-/d3-interpolate-3.0.1.tgz", + "integrity": "sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g==", + "license": "ISC", "dependencies": { - "address": "^1.0.1", - "debug": "^2.6.0" - }, - "bin": { - "detect": "bin/detect-port", - "detect-port": "bin/detect-port" + "d3-color": "1 - 3" }, "engines": { - "node": ">= 4.2.1" + "node": ">=12" } }, - "node_modules/detect-port-alt/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "dependencies": { - "ms": "2.0.0" + "node_modules/d3-path": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-path/-/d3-path-3.1.0.tgz", + "integrity": "sha512-p3KP5HCf/bvjBSSKuXid6Zqijx7wIfNW+J/maPs+iwR35at5JCbLUT0LzF1cnjbCHWhqzQTIN2Jpe8pRebIEFQ==", + "license": "ISC", + "engines": { + "node": ">=12" } }, - "node_modules/detect-port-alt/node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" - }, - "node_modules/devlop": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/devlop/-/devlop-1.1.0.tgz", - "integrity": "sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA==", - "dependencies": { - "dequal": "^2.0.0" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" + "node_modules/d3-polygon": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-polygon/-/d3-polygon-3.0.1.tgz", + "integrity": "sha512-3vbA7vXYwfe1SYhED++fPUQlWSYTTGmFmQiany/gdbiWgU/iEyQzyymwL9SkJjFFuCS4902BSzewVGsHHmHtXg==", + "license": "ISC", + "engines": { + "node": ">=12" } }, - "node_modules/dir-glob": { + "node_modules/d3-quadtree": { "version": "3.0.1", - "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", - "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", - "dependencies": { - "path-type": "^4.0.0" - }, + "resolved": "https://registry.npmjs.org/d3-quadtree/-/d3-quadtree-3.0.1.tgz", + "integrity": "sha512-04xDrxQTDTCFwP5H6hRhsRcb9xxv2RzkcsygFzmkSIOJy3PeRJP7sNk3VRIbKXcog561P9oU0/rVH6vDROAgUw==", + "license": "ISC", "engines": { - "node": ">=8" + "node": ">=12" } }, - "node_modules/dns-equal": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/dns-equal/-/dns-equal-1.0.0.tgz", - "integrity": "sha512-z+paD6YUQsk+AbGCEM4PrOXSss5gd66QfcVBFTKR/HpFL9jCqikS94HYwKww6fQyO7IxrIIyUu+g0Ka9tUS2Cg==" + "node_modules/d3-random": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-random/-/d3-random-3.0.1.tgz", + "integrity": "sha512-FXMe9GfxTxqd5D6jFsQ+DJ8BJS4E/fT5mqqdjovykEB2oFbTMDVdg1MGFxfQW+FBOGoB++k8swBrgwSHT1cUXQ==", + "license": "ISC", + "engines": { + "node": ">=12" + } }, - "node_modules/dns-packet": { - "version": "5.6.1", - "resolved": "https://registry.npmjs.org/dns-packet/-/dns-packet-5.6.1.tgz", - "integrity": "sha512-l4gcSouhcgIKRvyy99RNVOgxXiicE+2jZoNmaNmZ6JXiGajBOJAesk1OBlJuM5k2c+eudGdLxDqXuPCKIj6kpw==", + "node_modules/d3-sankey": { + "version": "0.12.3", + "resolved": "https://registry.npmjs.org/d3-sankey/-/d3-sankey-0.12.3.tgz", + "integrity": "sha512-nQhsBRmM19Ax5xEIPLMY9ZmJ/cDvd1BG3UVvt5h3WRxKg5zGRbvnteTyWAbzeSvlh3tW7ZEmq4VwR5mB3tutmQ==", + "license": "BSD-3-Clause", "dependencies": { - "@leichtgewicht/ip-codec": "^2.0.1" - }, - "engines": { - "node": ">=6" + "d3-array": "1 - 2", + "d3-shape": "^1.2.0" } }, - "node_modules/dom-converter": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/dom-converter/-/dom-converter-0.2.0.tgz", - "integrity": "sha512-gd3ypIPfOMr9h5jIKq8E3sHOTCjeirnl0WK5ZdS1AW0Odt0b1PaWaHdJ4Qk4klv+YB9aJBS7mESXjFoDQPu6DA==", + "node_modules/d3-sankey/node_modules/d3-array": { + "version": "2.12.1", + "resolved": "https://registry.npmjs.org/d3-array/-/d3-array-2.12.1.tgz", + "integrity": "sha512-B0ErZK/66mHtEsR1TkPEEkwdy+WDesimkM5gpZr5Dsg54BiTA5RXtYW5qTLIAcekaS9xfZrzBLF/OAkB3Qn1YQ==", + "license": "BSD-3-Clause", "dependencies": { - "utila": "~0.4" + "internmap": "^1.0.0" } }, - "node_modules/dom-serializer": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-2.0.0.tgz", - "integrity": "sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==", + "node_modules/d3-sankey/node_modules/d3-path": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/d3-path/-/d3-path-1.0.9.tgz", + "integrity": "sha512-VLaYcn81dtHVTjEHd8B+pbe9yHWpXKZUC87PzoFmsFrJqgFwDe/qxfp5MlfsfM1V5E/iVt0MmEbWQ7FVIXh/bg==", + "license": "BSD-3-Clause" + }, + "node_modules/d3-sankey/node_modules/d3-shape": { + "version": "1.3.7", + "resolved": "https://registry.npmjs.org/d3-shape/-/d3-shape-1.3.7.tgz", + "integrity": "sha512-EUkvKjqPFUAZyOlhY5gzCxCeI0Aep04LwIRpsZ/mLFelJiUfnK56jo5JMDSE7yyP2kLSb6LtF+S5chMk7uqPqw==", + "license": "BSD-3-Clause", "dependencies": { - "domelementtype": "^2.3.0", - "domhandler": "^5.0.2", - "entities": "^4.2.0" - }, - "funding": { - "url": "https://github.com/cheeriojs/dom-serializer?sponsor=1" + "d3-path": "1" } }, - "node_modules/domelementtype": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz", - "integrity": "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/fb55" - } - ] + "node_modules/d3-sankey/node_modules/internmap": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/internmap/-/internmap-1.0.1.tgz", + "integrity": "sha512-lDB5YccMydFBtasVtxnZ3MRBHuaoE8GKsppq+EchKL2U4nK/DmEpPHNH8MZe5HkMtpSiTSOZwfN0tzYjO/lJEw==", + "license": "ISC" }, - "node_modules/domhandler": { - "version": "5.0.3", - "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-5.0.3.tgz", - "integrity": "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==", + "node_modules/d3-scale": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/d3-scale/-/d3-scale-4.0.2.tgz", + "integrity": "sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ==", + "license": "ISC", "dependencies": { - "domelementtype": "^2.3.0" + "d3-array": "2.10.0 - 3", + "d3-format": "1 - 3", + "d3-interpolate": "1.2.0 - 3", + "d3-time": "2.1.1 - 3", + "d3-time-format": "2 - 4" }, "engines": { - "node": ">= 4" - }, - "funding": { - "url": "https://github.com/fb55/domhandler?sponsor=1" + "node": ">=12" } }, - "node_modules/dompurify": { - "version": "2.4.7", - "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-2.4.7.tgz", - "integrity": "sha512-kxxKlPEDa6Nc5WJi+qRgPbOAbgTpSULL+vI3NUXsZMlkJxTqYI9wg5ZTay2sFrdZRWHPWNi+EdAhcJf81WtoMQ==" - }, - "node_modules/domutils": { + "node_modules/d3-scale-chromatic": { "version": "3.1.0", - "resolved": "https://registry.npmjs.org/domutils/-/domutils-3.1.0.tgz", - "integrity": "sha512-H78uMmQtI2AhgDJjWeQmHwJJ2bLPD3GMmO7Zja/ZZh84wkm+4ut+IUnUdRa8uCGX88DiVx1j6FRe1XfxEgjEZA==", + "resolved": "https://registry.npmjs.org/d3-scale-chromatic/-/d3-scale-chromatic-3.1.0.tgz", + "integrity": "sha512-A3s5PWiZ9YCXFye1o246KoscMWqf8BsD9eRiJ3He7C9OBaxKhAd5TFCdEx/7VbKtxxTsu//1mMJFrEt572cEyQ==", + "license": "ISC", "dependencies": { - "dom-serializer": "^2.0.0", - "domelementtype": "^2.3.0", - "domhandler": "^5.0.3" + "d3-color": "1 - 3", + "d3-interpolate": "1 - 3" }, - "funding": { - "url": "https://github.com/fb55/domutils?sponsor=1" + "engines": { + "node": ">=12" } }, - "node_modules/dot-case": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/dot-case/-/dot-case-3.0.4.tgz", - "integrity": "sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w==", - "dependencies": { - "no-case": "^3.0.4", - "tslib": "^2.0.3" + "node_modules/d3-selection": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-selection/-/d3-selection-3.0.0.tgz", + "integrity": "sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ==", + "license": "ISC", + "engines": { + "node": ">=12" } }, - "node_modules/dot-prop": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-6.0.1.tgz", - "integrity": "sha512-tE7ztYzXHIeyvc7N+hR3oi7FIbf/NIjVP9hmAt3yMXzrQ072/fpjGLx2GxNxGxUl5V73MEqYzioOMoVhGMJ5cA==", + "node_modules/d3-shape": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/d3-shape/-/d3-shape-3.2.0.tgz", + "integrity": "sha512-SaLBuwGm3MOViRq2ABk3eLoxwZELpH6zhl3FbAoJ7Vm1gofKx6El1Ib5z23NUEhF9AsGl7y+dzLe5Cw2AArGTA==", + "license": "ISC", "dependencies": { - "is-obj": "^2.0.0" + "d3-path": "^3.1.0" }, "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "node": ">=12" } }, - "node_modules/dot-prop/node_modules/is-obj": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz", - "integrity": "sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==", + "node_modules/d3-time": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-time/-/d3-time-3.1.0.tgz", + "integrity": "sha512-VqKjzBLejbSMT4IgbmVgDjpkYrNWUYJnbCGo874u7MMKIWsILRX+OpX/gTk8MqjpT1A/c6HY2dCA77ZN0lkQ2Q==", + "license": "ISC", + "dependencies": { + "d3-array": "2 - 3" + }, "engines": { - "node": ">=8" + "node": ">=12" } }, - "node_modules/duplexer": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/duplexer/-/duplexer-0.1.2.tgz", - "integrity": "sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg==" - }, - "node_modules/eastasianwidth": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", - "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==" - }, - "node_modules/ee-first": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", - "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==" - }, - "node_modules/electron-to-chromium": { - "version": "1.4.722", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.722.tgz", - "integrity": "sha512-5nLE0TWFFpZ80Crhtp4pIp8LXCztjYX41yUcV6b+bKR2PqzjskTMOOlBi1VjBHlvHwS+4gar7kNKOrsbsewEZQ==" - }, - "node_modules/emoji-regex": { - "version": "9.2.2", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", - "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==" - }, - "node_modules/emojilib": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/emojilib/-/emojilib-2.4.0.tgz", - "integrity": "sha512-5U0rVMU5Y2n2+ykNLQqMoqklN9ICBT/KsvC1Gz6vqHbz2AXXGkG+Pm5rMWk/8Vjrr/mY9985Hi8DYzn1F09Nyw==" - }, - "node_modules/emojis-list": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/emojis-list/-/emojis-list-3.0.0.tgz", - "integrity": "sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q==", + "node_modules/d3-time-format": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/d3-time-format/-/d3-time-format-4.1.0.tgz", + "integrity": "sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg==", + "license": "ISC", + "dependencies": { + "d3-time": "1 - 3" + }, "engines": { - "node": ">= 4" + "node": ">=12" } }, - "node_modules/emoticon": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/emoticon/-/emoticon-4.0.1.tgz", - "integrity": "sha512-dqx7eA9YaqyvYtUhJwT4rC1HIp82j5ybS1/vQ42ur+jBe17dJMwZE4+gvL1XadSFfxaPFFGt3Xsw+Y8akThDlw==", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" + "node_modules/d3-timer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-timer/-/d3-timer-3.0.1.tgz", + "integrity": "sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA==", + "license": "ISC", + "engines": { + "node": ">=12" } }, - "node_modules/encodeurl": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", - "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==", + "node_modules/d3-transition": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-transition/-/d3-transition-3.0.1.tgz", + "integrity": "sha512-ApKvfjsSR6tg06xrL434C0WydLr7JewBB3V+/39RMHsaXTOG0zmt/OAXeng5M5LBm0ojmxJrpomQVZ1aPvBL4w==", + "license": "ISC", + "dependencies": { + "d3-color": "1 - 3", + "d3-dispatch": "1 - 3", + "d3-ease": "1 - 3", + "d3-interpolate": "1 - 3", + "d3-timer": "1 - 3" + }, "engines": { - "node": ">= 0.8" + "node": ">=12" + }, + "peerDependencies": { + "d3-selection": "2 - 3" } }, - "node_modules/enhanced-resolve": { - "version": "5.15.0", - "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.15.0.tgz", - "integrity": "sha512-LXYT42KJ7lpIKECr2mAXIaMldcNCh/7E0KBKOu4KSfkHmP+mZmSs+8V5gBAqisWBy0OO4W5Oyys0GO1Y8KtdKg==", + "node_modules/d3-zoom": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-zoom/-/d3-zoom-3.0.0.tgz", + "integrity": "sha512-b8AmV3kfQaqWAuacbPuNbL6vahnOJflOhexLzMMNLga62+/nh0JzvJ0aO/5a5MVgUFGS7Hu1P9P03o3fJkDCyw==", + "license": "ISC", "dependencies": { - "graceful-fs": "^4.2.4", - "tapable": "^2.2.0" + "d3-dispatch": "1 - 3", + "d3-drag": "2 - 3", + "d3-interpolate": "1 - 3", + "d3-selection": "2 - 3", + "d3-transition": "2 - 3" }, "engines": { - "node": ">=10.13.0" + "node": ">=12" } }, - "node_modules/entities": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", - "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", + "node_modules/dagre-d3-es": { + "version": "7.0.10", + "resolved": "https://registry.npmjs.org/dagre-d3-es/-/dagre-d3-es-7.0.10.tgz", + "integrity": "sha512-qTCQmEhcynucuaZgY5/+ti3X/rnszKZhEQH/ZdWdtP1tA/y3VoHJzcVrO9pjjJCNpigfscAtoUB5ONcd2wNn0A==", + "license": "MIT", + "dependencies": { + "d3": "^7.8.2", + "lodash-es": "^4.17.21" + } + }, + "node_modules/dayjs": { + "version": "1.11.12", + "resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.11.12.tgz", + "integrity": "sha512-Rt2g+nTbLlDWZTwwrIXjy9MeiZmSDI375FvZs72ngxx8PDC6YXOeR3q5LAuPzjZQxhiWdRKac7RKV+YyQYfYIg==", + "license": "MIT" + }, + "node_modules/debounce": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/debounce/-/debounce-1.2.1.tgz", + "integrity": "sha512-XRRe6Glud4rd/ZGQfiV1ruXSfbvfJedlV9Y6zOlP+2K04vBYiJEte6stfFkCP03aMnY5tsipamumUjL14fofug==" + }, + "node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dependencies": { + "ms": "2.1.2" + }, "engines": { - "node": ">=0.12" + "node": ">=6.0" }, - "funding": { - "url": "https://github.com/fb55/entities?sponsor=1" + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } } }, - "node_modules/error-ex": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", - "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", + "node_modules/decko": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/decko/-/decko-1.2.0.tgz", + "integrity": "sha512-m8FnyHXV1QX+S1cl+KPFDIl6NMkxtKsy6+U/aYyjrOqWMuwAwYWu7ePqrsUHtDR5Y8Yk2pi/KIDSgF+vT4cPOQ==" + }, + "node_modules/decode-named-character-reference": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/decode-named-character-reference/-/decode-named-character-reference-1.0.2.tgz", + "integrity": "sha512-O8x12RzrUF8xyVcY0KJowWsmaJxQbmy0/EtnNtHRpsOcT7dFk5W598coHqBVpmWo1oQQfsCqfCmkZN5DJrZVdg==", "dependencies": { - "is-arrayish": "^0.2.1" + "character-entities": "^2.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" } }, - "node_modules/es-define-property": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz", - "integrity": "sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==", + "node_modules/decompress-response": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz", + "integrity": "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==", "dependencies": { - "get-intrinsic": "^1.2.4" + "mimic-response": "^3.1.0" }, "engines": { - "node": ">= 0.4" + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/es-errors": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", - "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/es-module-lexer": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.4.1.tgz", - "integrity": "sha512-cXLGjP0c4T3flZJKQSuziYoq7MlT+rnvfZjfp7h+I7K9BNX54kP9nyWvdbwjQ4u1iWbOL4u96fgeZLToQlZC7w==" - }, - "node_modules/es6-promise": { - "version": "3.3.1", - "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-3.3.1.tgz", - "integrity": "sha512-SOp9Phqvqn7jtEUxPWdWfWoLmyt2VaJ6MpvP9Comy1MceMXqE6bxvaTu4iaxpYYPzhny28Lc+M87/c2cPK6lDg==" - }, - "node_modules/escalade": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", - "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", - "engines": { - "node": ">=6" - } - }, - "node_modules/escape-goat": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/escape-goat/-/escape-goat-4.0.0.tgz", - "integrity": "sha512-2Sd4ShcWxbx6OY1IHyla/CVNwvg7XwZVoXZHcSu9w9SReNP1EzzD5T8NWKIR38fIqEns9kDWKUQTXXAmlDrdPg==", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/escape-html": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", - "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==" - }, - "node_modules/escape-string-regexp": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", - "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "node_modules/decompress-response/node_modules/mimic-response": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz", + "integrity": "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==", "engines": { "node": ">=10" }, @@ -6102,272 +6208,171 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/eslint-scope": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz", - "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==", - "dependencies": { - "esrecurse": "^4.3.0", - "estraverse": "^4.1.1" - }, + "node_modules/deep-extend": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz", + "integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==", "engines": { - "node": ">=8.0.0" + "node": ">=4.0.0" } }, - "node_modules/esprima": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", - "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", - "bin": { - "esparse": "bin/esparse.js", - "esvalidate": "bin/esvalidate.js" - }, + "node_modules/deepmerge": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz", + "integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==", "engines": { - "node": ">=4" + "node": ">=0.10.0" } }, - "node_modules/esrecurse": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", - "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "node_modules/default-gateway": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/default-gateway/-/default-gateway-6.0.3.tgz", + "integrity": "sha512-fwSOJsbbNzZ/CUFpqFBqYfYNLj1NbMPm8MMCIzHjC83iSJRBEGmDUxU+WP661BaBQImeC2yHwXtz+P/O9o+XEg==", "dependencies": { - "estraverse": "^5.2.0" + "execa": "^5.0.0" }, "engines": { - "node": ">=4.0" - } - }, - "node_modules/esrecurse/node_modules/estraverse": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", - "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", - "engines": { - "node": ">=4.0" + "node": ">= 10" } }, - "node_modules/estraverse": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", - "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", + "node_modules/defer-to-connect": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/defer-to-connect/-/defer-to-connect-2.0.1.tgz", + "integrity": "sha512-4tvttepXG1VaYGrRibk5EwJd1t4udunSOVMdLSAL6mId1ix438oPwPZMALY41FCijukO1L0twNcGsdzS7dHgDg==", "engines": { - "node": ">=4.0" + "node": ">=10" } }, - "node_modules/estree-util-attach-comments": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/estree-util-attach-comments/-/estree-util-attach-comments-3.0.0.tgz", - "integrity": "sha512-cKUwm/HUcTDsYh/9FgnuFqpfquUbwIqwKM26BVCGDPVgvaCl/nDCCjUfiLlx6lsEZ3Z4RFxNbOQ60pkaEwFxGw==", + "node_modules/define-data-property": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", + "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", "dependencies": { - "@types/estree": "^1.0.0" + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "gopd": "^1.0.1" }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/estree-util-build-jsx": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/estree-util-build-jsx/-/estree-util-build-jsx-3.0.1.tgz", - "integrity": "sha512-8U5eiL6BTrPxp/CHbs2yMgP8ftMhR5ww1eIKoWRMlqvltHF8fZn5LRDvTKuxD3DUn+shRbLGqXemcP51oFCsGQ==", - "dependencies": { - "@types/estree-jsx": "^1.0.0", - "devlop": "^1.0.0", - "estree-util-is-identifier-name": "^3.0.0", - "estree-walker": "^3.0.0" + "engines": { + "node": ">= 0.4" }, "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" - } - }, - "node_modules/estree-util-is-identifier-name": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/estree-util-is-identifier-name/-/estree-util-is-identifier-name-3.0.0.tgz", - "integrity": "sha512-hFtqIDZTIUZ9BXLb8y4pYGyk6+wekIivNVTcmvk8NoOh+VeRn5y6cEHzbURrWbfp1fIqdVipilzj+lfaadNZmg==", - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/estree-util-to-js": { + "node_modules/define-lazy-prop": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/estree-util-to-js/-/estree-util-to-js-2.0.0.tgz", - "integrity": "sha512-WDF+xj5rRWmD5tj6bIqRi6CkLIXbbNQUcxQHzGysQzvHmdYG2G7p/Tf0J0gpxGgkeMZNTIjT/AoSvC9Xehcgdg==", - "dependencies": { - "@types/estree-jsx": "^1.0.0", - "astring": "^1.8.0", - "source-map": "^0.7.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" + "resolved": "https://registry.npmjs.org/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz", + "integrity": "sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og==", + "engines": { + "node": ">=8" } }, - "node_modules/estree-util-value-to-estree": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/estree-util-value-to-estree/-/estree-util-value-to-estree-3.1.1.tgz", - "integrity": "sha512-5mvUrF2suuv5f5cGDnDphIy4/gW86z82kl5qG6mM9z04SEQI4FB5Apmaw/TGEf3l55nLtMs5s51dmhUzvAHQCA==", + "node_modules/define-properties": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.2.1.tgz", + "integrity": "sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==", "dependencies": { - "@types/estree": "^1.0.0", - "is-plain-obj": "^4.0.0" + "define-data-property": "^1.0.1", + "has-property-descriptors": "^1.0.0", + "object-keys": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" }, "funding": { - "url": "https://github.com/sponsors/remcohaszing" + "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/estree-util-visit": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/estree-util-visit/-/estree-util-visit-2.0.0.tgz", - "integrity": "sha512-m5KgiH85xAhhW8Wta0vShLcUvOsh3LLPI2YVwcbio1l7E09NTLL1EyMZFM1OyWowoH0skScNbhOPl4kcBgzTww==", + "node_modules/del": { + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/del/-/del-6.1.1.tgz", + "integrity": "sha512-ua8BhapfP0JUJKC/zV9yHHDW/rDoDxP4Zhn3AkA6/xT6gY7jYXJiaeyBZznYVujhZZET+UgcbZiQ7sN3WqcImg==", "dependencies": { - "@types/estree-jsx": "^1.0.0", - "@types/unist": "^3.0.0" + "globby": "^11.0.1", + "graceful-fs": "^4.2.4", + "is-glob": "^4.0.1", + "is-path-cwd": "^2.2.0", + "is-path-inside": "^3.0.2", + "p-map": "^4.0.0", + "rimraf": "^3.0.2", + "slash": "^3.0.0" + }, + "engines": { + "node": ">=10" }, "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/estree-walker": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-3.0.3.tgz", - "integrity": "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==", + "node_modules/delaunator": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/delaunator/-/delaunator-5.0.1.tgz", + "integrity": "sha512-8nvh+XBe96aCESrGOqMp/84b13H9cdKbG5P2ejQCh4d4sK9RL4371qou9drQjMhvnPmhWl5hnmqbEE0fXr9Xnw==", + "license": "ISC", "dependencies": { - "@types/estree": "^1.0.0" + "robust-predicates": "^3.0.2" } }, - "node_modules/esutils": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", - "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "node_modules/depd": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", + "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", "engines": { - "node": ">=0.10.0" + "node": ">= 0.8" } }, - "node_modules/eta": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/eta/-/eta-2.2.0.tgz", - "integrity": "sha512-UVQ72Rqjy/ZKQalzV5dCCJP80GrmPrMxh6NlNf+erV6ObL0ZFkhCstWRawS85z3smdr3d2wXPsZEY7rDPfGd2g==", + "node_modules/dequal": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz", + "integrity": "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==", "engines": { - "node": ">=6.0.0" - }, - "funding": { - "url": "https://github.com/eta-dev/eta?sponsor=1" + "node": ">=6" } }, - "node_modules/etag": { - "version": "1.8.1", - "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", - "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==", + "node_modules/destroy": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz", + "integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==", "engines": { - "node": ">= 0.6" + "node": ">= 0.8", + "npm": "1.2.8000 || >= 1.4.16" } }, - "node_modules/eval": { - "version": "0.1.8", - "resolved": "https://registry.npmjs.org/eval/-/eval-0.1.8.tgz", - "integrity": "sha512-EzV94NYKoO09GLXGjXj9JIlXijVck4ONSr5wiCWDvhsvj5jxSrzTmRU/9C1DyB6uToszLs8aifA6NQ7lEQdvFw==", - "dependencies": { - "@types/node": "*", - "require-like": ">= 0.1.1" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/eventemitter3": { - "version": "4.0.7", - "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-4.0.7.tgz", - "integrity": "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==" - }, - "node_modules/events": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", - "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==", - "engines": { - "node": ">=0.8.x" - } + "node_modules/detect-node": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/detect-node/-/detect-node-2.1.0.tgz", + "integrity": "sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g==" }, - "node_modules/execa": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", - "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", + "node_modules/detect-port": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/detect-port/-/detect-port-1.5.1.tgz", + "integrity": "sha512-aBzdj76lueB6uUst5iAs7+0H/oOjqI5D16XUWxlWMIMROhcM0rfsNVk93zTngq1dDNpoXRr++Sus7ETAExppAQ==", "dependencies": { - "cross-spawn": "^7.0.3", - "get-stream": "^6.0.0", - "human-signals": "^2.1.0", - "is-stream": "^2.0.0", - "merge-stream": "^2.0.0", - "npm-run-path": "^4.0.1", - "onetime": "^5.1.2", - "signal-exit": "^3.0.3", - "strip-final-newline": "^2.0.0" - }, - "engines": { - "node": ">=10" + "address": "^1.0.1", + "debug": "4" }, - "funding": { - "url": "https://github.com/sindresorhus/execa?sponsor=1" + "bin": { + "detect": "bin/detect-port.js", + "detect-port": "bin/detect-port.js" } }, - "node_modules/express": { - "version": "4.19.2", - "resolved": "https://registry.npmjs.org/express/-/express-4.19.2.tgz", - "integrity": "sha512-5T6nhjsT+EOMzuck8JjBHARTHfMht0POzlA60WV2pMD3gyXw2LZnZ+ueGdNxG+0calOJcWKbpFcuzLZ91YWq9Q==", + "node_modules/detect-port-alt": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/detect-port-alt/-/detect-port-alt-1.1.6.tgz", + "integrity": "sha512-5tQykt+LqfJFBEYaDITx7S7cR7mJ/zQmLXZ2qt5w04ainYZw6tBf9dBunMjVeVOdYVRUzUOE4HkY5J7+uttb5Q==", "dependencies": { - "accepts": "~1.3.8", - "array-flatten": "1.1.1", - "body-parser": "1.20.2", - "content-disposition": "0.5.4", - "content-type": "~1.0.4", - "cookie": "0.6.0", - "cookie-signature": "1.0.6", - "debug": "2.6.9", - "depd": "2.0.0", - "encodeurl": "~1.0.2", - "escape-html": "~1.0.3", - "etag": "~1.8.1", - "finalhandler": "1.2.0", - "fresh": "0.5.2", - "http-errors": "2.0.0", - "merge-descriptors": "1.0.1", - "methods": "~1.1.2", - "on-finished": "2.4.1", - "parseurl": "~1.3.3", - "path-to-regexp": "0.1.7", - "proxy-addr": "~2.0.7", - "qs": "6.11.0", - "range-parser": "~1.2.1", - "safe-buffer": "5.2.1", - "send": "0.18.0", - "serve-static": "1.15.0", - "setprototypeof": "1.2.0", - "statuses": "2.0.1", - "type-is": "~1.6.18", - "utils-merge": "1.0.1", - "vary": "~1.1.2" + "address": "^1.0.1", + "debug": "^2.6.0" }, - "engines": { - "node": ">= 0.10.0" - } - }, - "node_modules/express/node_modules/array-flatten": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", - "integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==" - }, - "node_modules/express/node_modules/content-disposition": { - "version": "0.5.4", - "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", - "integrity": "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==", - "dependencies": { - "safe-buffer": "5.2.1" + "bin": { + "detect": "bin/detect-port", + "detect-port": "bin/detect-port" }, "engines": { - "node": ">= 0.6" + "node": ">= 4.2.1" } }, - "node_modules/express/node_modules/debug": { + "node_modules/detect-port-alt/node_modules/debug": { "version": "2.6.9", "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", @@ -6375,834 +6380,1411 @@ "ms": "2.0.0" } }, - "node_modules/express/node_modules/ms": { + "node_modules/detect-port-alt/node_modules/ms": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" }, - "node_modules/express/node_modules/path-to-regexp": { - "version": "0.1.7", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", - "integrity": "sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==" + "node_modules/devlop": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/devlop/-/devlop-1.1.0.tgz", + "integrity": "sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA==", + "dependencies": { + "dequal": "^2.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } }, - "node_modules/express/node_modules/range-parser": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", - "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", + "node_modules/diff": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-5.2.0.tgz", + "integrity": "sha512-uIFDxqpRZGZ6ThOk84hEfqWoHx2devRFvpTZcTHur85vImfaxUbTW9Ryh4CpCuDnToOP1CEtXKIgytHBPVff5A==", + "license": "BSD-3-Clause", "engines": { - "node": ">= 0.6" + "node": ">=0.3.1" } }, - "node_modules/extend": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", - "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==" - }, - "node_modules/extend-shallow": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", - "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", + "node_modules/dir-glob": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", + "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", "dependencies": { - "is-extendable": "^0.1.0" + "path-type": "^4.0.0" }, "engines": { - "node": ">=0.10.0" + "node": ">=8" } }, - "node_modules/fast-deep-equal": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", - "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==" + "node_modules/dns-equal": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/dns-equal/-/dns-equal-1.0.0.tgz", + "integrity": "sha512-z+paD6YUQsk+AbGCEM4PrOXSss5gd66QfcVBFTKR/HpFL9jCqikS94HYwKww6fQyO7IxrIIyUu+g0Ka9tUS2Cg==" }, - "node_modules/fast-glob": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.2.tgz", - "integrity": "sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==", + "node_modules/dns-packet": { + "version": "5.6.1", + "resolved": "https://registry.npmjs.org/dns-packet/-/dns-packet-5.6.1.tgz", + "integrity": "sha512-l4gcSouhcgIKRvyy99RNVOgxXiicE+2jZoNmaNmZ6JXiGajBOJAesk1OBlJuM5k2c+eudGdLxDqXuPCKIj6kpw==", "dependencies": { - "@nodelib/fs.stat": "^2.0.2", - "@nodelib/fs.walk": "^1.2.3", - "glob-parent": "^5.1.2", - "merge2": "^1.3.0", - "micromatch": "^4.0.4" + "@leichtgewicht/ip-codec": "^2.0.1" }, "engines": { - "node": ">=8.6.0" + "node": ">=6" } }, - "node_modules/fast-json-stable-stringify": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", - "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==" - }, - "node_modules/fast-safe-stringify": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", - "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==" - }, - "node_modules/fast-url-parser": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/fast-url-parser/-/fast-url-parser-1.1.3.tgz", - "integrity": "sha512-5jOCVXADYNuRkKFzNJ0dCCewsZiYo0dz8QNYljkOpFC6r2U4OBmKtvm/Tsuh4w1YYdDqDb31a8TVhBJ2OJKdqQ==", + "node_modules/dom-converter": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/dom-converter/-/dom-converter-0.2.0.tgz", + "integrity": "sha512-gd3ypIPfOMr9h5jIKq8E3sHOTCjeirnl0WK5ZdS1AW0Odt0b1PaWaHdJ4Qk4klv+YB9aJBS7mESXjFoDQPu6DA==", "dependencies": { - "punycode": "^1.3.2" + "utila": "~0.4" } }, - "node_modules/fastq": { - "version": "1.15.0", - "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.15.0.tgz", - "integrity": "sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw==", + "node_modules/dom-serializer": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-2.0.0.tgz", + "integrity": "sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==", "dependencies": { - "reusify": "^1.0.4" + "domelementtype": "^2.3.0", + "domhandler": "^5.0.2", + "entities": "^4.2.0" + }, + "funding": { + "url": "https://github.com/cheeriojs/dom-serializer?sponsor=1" } }, - "node_modules/fault": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/fault/-/fault-2.0.1.tgz", - "integrity": "sha512-WtySTkS4OKev5JtpHXnib4Gxiurzh5NCGvWrFaZ34m6JehfTUhKZvn9njTfw48t6JumVQOmrKqpmGcdwxnhqBQ==", + "node_modules/domelementtype": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz", + "integrity": "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fb55" + } + ] + }, + "node_modules/domhandler": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-5.0.3.tgz", + "integrity": "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==", "dependencies": { - "format": "^0.2.0" + "domelementtype": "^2.3.0" + }, + "engines": { + "node": ">= 4" }, "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" + "url": "https://github.com/fb55/domhandler?sponsor=1" } }, - "node_modules/faye-websocket": { - "version": "0.11.4", - "resolved": "https://registry.npmjs.org/faye-websocket/-/faye-websocket-0.11.4.tgz", - "integrity": "sha512-CzbClwlXAuiRQAlUyfqPgvPoNKTckTPGfwZV4ZdAhVcP2lh9KUxJg2b5GkE7XbjKQ3YJnQ9z6D9ntLAlB+tP8g==", + "node_modules/dompurify": { + "version": "2.4.7", + "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-2.4.7.tgz", + "integrity": "sha512-kxxKlPEDa6Nc5WJi+qRgPbOAbgTpSULL+vI3NUXsZMlkJxTqYI9wg5ZTay2sFrdZRWHPWNi+EdAhcJf81WtoMQ==" + }, + "node_modules/domutils": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/domutils/-/domutils-3.1.0.tgz", + "integrity": "sha512-H78uMmQtI2AhgDJjWeQmHwJJ2bLPD3GMmO7Zja/ZZh84wkm+4ut+IUnUdRa8uCGX88DiVx1j6FRe1XfxEgjEZA==", "dependencies": { - "websocket-driver": ">=0.5.1" + "dom-serializer": "^2.0.0", + "domelementtype": "^2.3.0", + "domhandler": "^5.0.3" }, - "engines": { - "node": ">=0.8.0" + "funding": { + "url": "https://github.com/fb55/domutils?sponsor=1" } }, - "node_modules/feed": { - "version": "4.2.2", - "resolved": "https://registry.npmjs.org/feed/-/feed-4.2.2.tgz", - "integrity": "sha512-u5/sxGfiMfZNtJ3OvQpXcvotFpYkL0n9u9mM2vkui2nGo8b4wvDkJ8gAkYqbA8QpGyFCv3RK0Z+Iv+9veCS9bQ==", + "node_modules/dot-case": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/dot-case/-/dot-case-3.0.4.tgz", + "integrity": "sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w==", "dependencies": { - "xml-js": "^1.6.11" - }, - "engines": { - "node": ">=0.4.0" + "no-case": "^3.0.4", + "tslib": "^2.0.3" } }, - "node_modules/file-loader": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/file-loader/-/file-loader-6.2.0.tgz", - "integrity": "sha512-qo3glqyTa61Ytg4u73GultjHGjdRyig3tG6lPtyX/jOEJvHif9uB0/OCI2Kif6ctF3caQTW2G5gym21oAsI4pw==", + "node_modules/dot-prop": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-6.0.1.tgz", + "integrity": "sha512-tE7ztYzXHIeyvc7N+hR3oi7FIbf/NIjVP9hmAt3yMXzrQ072/fpjGLx2GxNxGxUl5V73MEqYzioOMoVhGMJ5cA==", "dependencies": { - "loader-utils": "^2.0.0", - "schema-utils": "^3.0.0" + "is-obj": "^2.0.0" }, "engines": { - "node": ">= 10.13.0" + "node": ">=10" }, "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - }, - "peerDependencies": { - "webpack": "^4.0.0 || ^5.0.0" + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/file-loader/node_modules/ajv": { - "version": "6.12.6", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", - "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", - "dependencies": { - "fast-deep-equal": "^3.1.1", - "fast-json-stable-stringify": "^2.0.0", - "json-schema-traverse": "^0.4.1", - "uri-js": "^4.2.2" - }, + "node_modules/dot-prop/node_modules/is-obj": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz", + "integrity": "sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==", + "engines": { + "node": ">=8" + } + }, + "node_modules/duplexer": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/duplexer/-/duplexer-0.1.2.tgz", + "integrity": "sha512-jtD6YG370ZCIi/9GTaJKQxWTZD045+4R4hTk/x1UyoqadyJ9x9CgSi1RlVDQF8U2sxLLSnFkCaMihqljHIWgMg==" + }, + "node_modules/eastasianwidth": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", + "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==" + }, + "node_modules/ee-first": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==" + }, + "node_modules/electron-to-chromium": { + "version": "1.4.722", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.722.tgz", + "integrity": "sha512-5nLE0TWFFpZ80Crhtp4pIp8LXCztjYX41yUcV6b+bKR2PqzjskTMOOlBi1VjBHlvHwS+4gar7kNKOrsbsewEZQ==" + }, + "node_modules/elkjs": { + "version": "0.9.3", + "resolved": "https://registry.npmjs.org/elkjs/-/elkjs-0.9.3.tgz", + "integrity": "sha512-f/ZeWvW/BCXbhGEf1Ujp29EASo/lk1FDnETgNKwJrsVvGZhUWCZyg3xLJjAsxfOmt8KjswHmI5EwCQcPMpOYhQ==", + "license": "EPL-2.0" + }, + "node_modules/emoji-regex": { + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==" + }, + "node_modules/emojilib": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/emojilib/-/emojilib-2.4.0.tgz", + "integrity": "sha512-5U0rVMU5Y2n2+ykNLQqMoqklN9ICBT/KsvC1Gz6vqHbz2AXXGkG+Pm5rMWk/8Vjrr/mY9985Hi8DYzn1F09Nyw==" + }, + "node_modules/emojis-list": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/emojis-list/-/emojis-list-3.0.0.tgz", + "integrity": "sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q==", + "engines": { + "node": ">= 4" + } + }, + "node_modules/emoticon": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/emoticon/-/emoticon-4.0.1.tgz", + "integrity": "sha512-dqx7eA9YaqyvYtUhJwT4rC1HIp82j5ybS1/vQ42ur+jBe17dJMwZE4+gvL1XadSFfxaPFFGt3Xsw+Y8akThDlw==", "funding": { "type": "github", - "url": "https://github.com/sponsors/epoberezkin" + "url": "https://github.com/sponsors/wooorm" } }, - "node_modules/file-loader/node_modules/ajv-keywords": { - "version": "3.5.2", - "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz", - "integrity": "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==", - "peerDependencies": { - "ajv": "^6.9.1" + "node_modules/encodeurl": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", + "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==", + "engines": { + "node": ">= 0.8" } }, - "node_modules/file-loader/node_modules/json-schema-traverse": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", - "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==" - }, - "node_modules/file-loader/node_modules/schema-utils": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.3.0.tgz", - "integrity": "sha512-pN/yOAvcC+5rQ5nERGuwrjLlYvLTbCibnZ1I7B1LaiAz9BRBlE9GMgE/eqV30P7aJQUf7Ddimy/RsbYO/GrVGg==", + "node_modules/enhanced-resolve": { + "version": "5.15.0", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.15.0.tgz", + "integrity": "sha512-LXYT42KJ7lpIKECr2mAXIaMldcNCh/7E0KBKOu4KSfkHmP+mZmSs+8V5gBAqisWBy0OO4W5Oyys0GO1Y8KtdKg==", "dependencies": { - "@types/json-schema": "^7.0.8", - "ajv": "^6.12.5", - "ajv-keywords": "^3.5.2" + "graceful-fs": "^4.2.4", + "tapable": "^2.2.0" }, "engines": { - "node": ">= 10.13.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" + "node": ">=10.13.0" } }, - "node_modules/filesize": { - "version": "8.0.7", - "resolved": "https://registry.npmjs.org/filesize/-/filesize-8.0.7.tgz", - "integrity": "sha512-pjmC+bkIF8XI7fWaH8KxHcZL3DPybs1roSKP4rKDvy20tAWwIObE4+JIseG2byfGKhud5ZnM4YSGKBz7Sh0ndQ==", + "node_modules/entities": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", + "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", "engines": { - "node": ">= 0.4.0" + "node": ">=0.12" + }, + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" } }, - "node_modules/fill-range": { - "version": "7.1.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", - "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "node_modules/error-ex": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", + "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", "dependencies": { - "to-regex-range": "^5.0.1" - }, - "engines": { - "node": ">=8" + "is-arrayish": "^0.2.1" } }, - "node_modules/finalhandler": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.2.0.tgz", - "integrity": "sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg==", + "node_modules/es-define-property": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz", + "integrity": "sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==", "dependencies": { - "debug": "2.6.9", - "encodeurl": "~1.0.2", - "escape-html": "~1.0.3", - "on-finished": "2.4.1", - "parseurl": "~1.3.3", - "statuses": "2.0.1", - "unpipe": "~1.0.0" + "get-intrinsic": "^1.2.4" }, "engines": { - "node": ">= 0.8" + "node": ">= 0.4" } }, - "node_modules/finalhandler/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "dependencies": { - "ms": "2.0.0" + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "engines": { + "node": ">= 0.4" } }, - "node_modules/finalhandler/node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" + "node_modules/es-module-lexer": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-1.4.1.tgz", + "integrity": "sha512-cXLGjP0c4T3flZJKQSuziYoq7MlT+rnvfZjfp7h+I7K9BNX54kP9nyWvdbwjQ4u1iWbOL4u96fgeZLToQlZC7w==" }, - "node_modules/find-cache-dir": { + "node_modules/es6-promise": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-3.3.1.tgz", + "integrity": "sha512-SOp9Phqvqn7jtEUxPWdWfWoLmyt2VaJ6MpvP9Comy1MceMXqE6bxvaTu4iaxpYYPzhny28Lc+M87/c2cPK6lDg==" + }, + "node_modules/escalade": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", + "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-goat": { "version": "4.0.0", - "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-4.0.0.tgz", - "integrity": "sha512-9ZonPT4ZAK4a+1pUPVPZJapbi7O5qbbJPdYw/NOQWZZbVLdDTYM3A4R9z/DpAM08IDaFGsvPgiGZ82WEwUDWjg==", - "dependencies": { - "common-path-prefix": "^3.0.0", - "pkg-dir": "^7.0.0" - }, + "resolved": "https://registry.npmjs.org/escape-goat/-/escape-goat-4.0.0.tgz", + "integrity": "sha512-2Sd4ShcWxbx6OY1IHyla/CVNwvg7XwZVoXZHcSu9w9SReNP1EzzD5T8NWKIR38fIqEns9kDWKUQTXXAmlDrdPg==", "engines": { - "node": ">=14.16" + "node": ">=12" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/find-up": { - "version": "6.3.0", - "resolved": "https://registry.npmjs.org/find-up/-/find-up-6.3.0.tgz", - "integrity": "sha512-v2ZsoEuVHYy8ZIlYqwPe/39Cy+cFDzp4dXPaxNvkEuouymu+2Jbz0PxpKarJHYJTmv2HWT3O382qY8l4jMWthw==", - "dependencies": { - "locate-path": "^7.1.0", - "path-exists": "^5.0.0" - }, + "node_modules/escape-html": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", + "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==" + }, + "node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + "node": ">=10" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/flat": { - "version": "5.0.2", - "resolved": "https://registry.npmjs.org/flat/-/flat-5.0.2.tgz", - "integrity": "sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==", - "bin": { - "flat": "cli.js" + "node_modules/eslint-scope": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz", + "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==", + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^4.1.1" + }, + "engines": { + "node": ">=8.0.0" } }, - "node_modules/follow-redirects": { - "version": "1.15.6", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.6.tgz", - "integrity": "sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==", - "funding": [ - { - "type": "individual", - "url": "https://github.com/sponsors/RubenVerborgh" - } - ], - "engines": { - "node": ">=4.0" + "node_modules/esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "bin": { + "esparse": "bin/esparse.js", + "esvalidate": "bin/esvalidate.js" }, - "peerDependenciesMeta": { - "debug": { - "optional": true - } + "engines": { + "node": ">=4" } }, - "node_modules/foreach": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/foreach/-/foreach-2.0.6.tgz", - "integrity": "sha512-k6GAGDyqLe9JaebCsFCoudPPWfihKu8pylYXRlqP1J7ms39iPoTtk2fviNglIeQEwdh0bQeKJ01ZPyuyQvKzwg==" - }, - "node_modules/fork-ts-checker-webpack-plugin": { - "version": "6.5.3", - "resolved": "https://registry.npmjs.org/fork-ts-checker-webpack-plugin/-/fork-ts-checker-webpack-plugin-6.5.3.tgz", - "integrity": "sha512-SbH/l9ikmMWycd5puHJKTkZJKddF4iRLyW3DeZ08HTI7NGyLS38MXd/KGgeWumQO7YNQbW2u/NtPT2YowbPaGQ==", + "node_modules/esrecurse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", "dependencies": { - "@babel/code-frame": "^7.8.3", - "@types/json-schema": "^7.0.5", - "chalk": "^4.1.0", - "chokidar": "^3.4.2", - "cosmiconfig": "^6.0.0", - "deepmerge": "^4.2.2", - "fs-extra": "^9.0.0", - "glob": "^7.1.6", - "memfs": "^3.1.2", - "minimatch": "^3.0.4", - "schema-utils": "2.7.0", - "semver": "^7.3.2", - "tapable": "^1.0.0" + "estraverse": "^5.2.0" }, "engines": { - "node": ">=10", - "yarn": ">=1.0.0" - }, - "peerDependencies": { - "eslint": ">= 6", - "typescript": ">= 2.7", - "vue-template-compiler": "*", - "webpack": ">= 4" - }, - "peerDependenciesMeta": { - "eslint": { - "optional": true - }, - "vue-template-compiler": { - "optional": true - } + "node": ">=4.0" } }, - "node_modules/fork-ts-checker-webpack-plugin/node_modules/ajv": { - "version": "6.12.6", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", - "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", - "dependencies": { - "fast-deep-equal": "^3.1.1", - "fast-json-stable-stringify": "^2.0.0", - "json-schema-traverse": "^0.4.1", - "uri-js": "^4.2.2" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/epoberezkin" + "node_modules/esrecurse/node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "engines": { + "node": ">=4.0" } }, - "node_modules/fork-ts-checker-webpack-plugin/node_modules/ajv-keywords": { - "version": "3.5.2", - "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz", - "integrity": "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==", - "peerDependencies": { - "ajv": "^6.9.1" + "node_modules/estraverse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", + "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", + "engines": { + "node": ">=4.0" } }, - "node_modules/fork-ts-checker-webpack-plugin/node_modules/cosmiconfig": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-6.0.0.tgz", - "integrity": "sha512-xb3ZL6+L8b9JLLCx3ZdoZy4+2ECphCMo2PwqgP1tlfVq6M6YReyzBJtvWWtbDSpNr9hn96pkCiZqUcFEc+54Qg==", + "node_modules/estree-util-attach-comments": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/estree-util-attach-comments/-/estree-util-attach-comments-3.0.0.tgz", + "integrity": "sha512-cKUwm/HUcTDsYh/9FgnuFqpfquUbwIqwKM26BVCGDPVgvaCl/nDCCjUfiLlx6lsEZ3Z4RFxNbOQ60pkaEwFxGw==", "dependencies": { - "@types/parse-json": "^4.0.0", - "import-fresh": "^3.1.0", - "parse-json": "^5.0.0", - "path-type": "^4.0.0", - "yaml": "^1.7.2" + "@types/estree": "^1.0.0" }, - "engines": { - "node": ">=8" + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/fork-ts-checker-webpack-plugin/node_modules/fs-extra": { - "version": "9.1.0", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-9.1.0.tgz", - "integrity": "sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ==", + "node_modules/estree-util-build-jsx": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/estree-util-build-jsx/-/estree-util-build-jsx-3.0.1.tgz", + "integrity": "sha512-8U5eiL6BTrPxp/CHbs2yMgP8ftMhR5ww1eIKoWRMlqvltHF8fZn5LRDvTKuxD3DUn+shRbLGqXemcP51oFCsGQ==", "dependencies": { - "at-least-node": "^1.0.0", - "graceful-fs": "^4.2.0", - "jsonfile": "^6.0.1", - "universalify": "^2.0.0" + "@types/estree-jsx": "^1.0.0", + "devlop": "^1.0.0", + "estree-util-is-identifier-name": "^3.0.0", + "estree-walker": "^3.0.0" }, - "engines": { - "node": ">=10" + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/fork-ts-checker-webpack-plugin/node_modules/json-schema-traverse": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", - "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==" + "node_modules/estree-util-is-identifier-name": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/estree-util-is-identifier-name/-/estree-util-is-identifier-name-3.0.0.tgz", + "integrity": "sha512-hFtqIDZTIUZ9BXLb8y4pYGyk6+wekIivNVTcmvk8NoOh+VeRn5y6cEHzbURrWbfp1fIqdVipilzj+lfaadNZmg==", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } }, - "node_modules/fork-ts-checker-webpack-plugin/node_modules/schema-utils": { - "version": "2.7.0", - "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-2.7.0.tgz", - "integrity": "sha512-0ilKFI6QQF5nxDZLFn2dMjvc4hjg/Wkg7rHd3jK6/A4a1Hl9VFdQWvgB1UMGoU94pad1P/8N7fMcEnLnSiju8A==", + "node_modules/estree-util-to-js": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/estree-util-to-js/-/estree-util-to-js-2.0.0.tgz", + "integrity": "sha512-WDF+xj5rRWmD5tj6bIqRi6CkLIXbbNQUcxQHzGysQzvHmdYG2G7p/Tf0J0gpxGgkeMZNTIjT/AoSvC9Xehcgdg==", "dependencies": { - "@types/json-schema": "^7.0.4", - "ajv": "^6.12.2", - "ajv-keywords": "^3.4.1" - }, - "engines": { - "node": ">= 8.9.0" + "@types/estree-jsx": "^1.0.0", + "astring": "^1.8.0", + "source-map": "^0.7.0" }, "funding": { "type": "opencollective", - "url": "https://opencollective.com/webpack" + "url": "https://opencollective.com/unified" } }, - "node_modules/fork-ts-checker-webpack-plugin/node_modules/tapable": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/tapable/-/tapable-1.1.3.tgz", - "integrity": "sha512-4WK/bYZmj8xLr+HUCODHGF1ZFzsYffasLUgEiMBY4fgtltdO6B4WJtlSbPaDTLpYTcGVwM2qLnFTICEcNxs3kA==", - "engines": { - "node": ">=6" + "node_modules/estree-util-value-to-estree": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/estree-util-value-to-estree/-/estree-util-value-to-estree-3.1.1.tgz", + "integrity": "sha512-5mvUrF2suuv5f5cGDnDphIy4/gW86z82kl5qG6mM9z04SEQI4FB5Apmaw/TGEf3l55nLtMs5s51dmhUzvAHQCA==", + "dependencies": { + "@types/estree": "^1.0.0", + "is-plain-obj": "^4.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/remcohaszing" } }, - "node_modules/form-data-encoder": { - "version": "2.1.4", - "resolved": "https://registry.npmjs.org/form-data-encoder/-/form-data-encoder-2.1.4.tgz", - "integrity": "sha512-yDYSgNMraqvnxiEXO4hi88+YZxaHC6QKzb5N84iRCTDeRO7ZALpir/lVmf/uXUhnwUr2O4HU8s/n6x+yNjQkHw==", - "engines": { - "node": ">= 14.17" + "node_modules/estree-util-visit": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/estree-util-visit/-/estree-util-visit-2.0.0.tgz", + "integrity": "sha512-m5KgiH85xAhhW8Wta0vShLcUvOsh3LLPI2YVwcbio1l7E09NTLL1EyMZFM1OyWowoH0skScNbhOPl4kcBgzTww==", + "dependencies": { + "@types/estree-jsx": "^1.0.0", + "@types/unist": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/format": { - "version": "0.2.2", - "resolved": "https://registry.npmjs.org/format/-/format-0.2.2.tgz", - "integrity": "sha512-wzsgA6WOq+09wrU1tsJ09udeR/YZRaeArL9e1wPbFg3GG2yDnC2ldKpxs4xunpFF9DgqCqOIra3bc1HWrJ37Ww==", - "engines": { - "node": ">=0.4.x" + "node_modules/estree-walker": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-3.0.3.tgz", + "integrity": "sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==", + "dependencies": { + "@types/estree": "^1.0.0" } }, - "node_modules/forwarded": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", - "integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==", + "node_modules/esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", "engines": { - "node": ">= 0.6" + "node": ">=0.10.0" } }, - "node_modules/fraction.js": { - "version": "4.3.7", - "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-4.3.7.tgz", - "integrity": "sha512-ZsDfxO51wGAXREY55a7la9LScWpwv9RxIrYABrlvOFBlH/ShPnrtsXeuUIfXKKOVicNxQ+o8JTbJvjS4M89yew==", + "node_modules/eta": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/eta/-/eta-2.2.0.tgz", + "integrity": "sha512-UVQ72Rqjy/ZKQalzV5dCCJP80GrmPrMxh6NlNf+erV6ObL0ZFkhCstWRawS85z3smdr3d2wXPsZEY7rDPfGd2g==", "engines": { - "node": "*" + "node": ">=6.0.0" }, "funding": { - "type": "patreon", - "url": "https://github.com/sponsors/rawify" + "url": "https://github.com/eta-dev/eta?sponsor=1" } }, - "node_modules/fresh": { - "version": "0.5.2", - "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", - "integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==", + "node_modules/etag": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", + "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==", "engines": { "node": ">= 0.6" } }, - "node_modules/fs-extra": { - "version": "11.2.0", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.2.0.tgz", - "integrity": "sha512-PmDi3uwK5nFuXh7XDTlVnS17xJS7vW36is2+w3xcv8SVxiB4NyATf4ctkVY5bkSjX0Y4nbvZCq1/EjtEyr9ktw==", + "node_modules/eval": { + "version": "0.1.8", + "resolved": "https://registry.npmjs.org/eval/-/eval-0.1.8.tgz", + "integrity": "sha512-EzV94NYKoO09GLXGjXj9JIlXijVck4ONSr5wiCWDvhsvj5jxSrzTmRU/9C1DyB6uToszLs8aifA6NQ7lEQdvFw==", "dependencies": { - "graceful-fs": "^4.2.0", - "jsonfile": "^6.0.1", - "universalify": "^2.0.0" + "@types/node": "*", + "require-like": ">= 0.1.1" }, "engines": { - "node": ">=14.14" - } - }, - "node_modules/fs-monkey": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/fs-monkey/-/fs-monkey-1.0.5.tgz", - "integrity": "sha512-8uMbBjrhzW76TYgEV27Y5E//W2f/lTFmx78P2w19FZSxarhI/798APGQyuGCwmkNxgwGRhrLfvWyLBvNtuOmew==" - }, - "node_modules/fs.realpath": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==" - }, - "node_modules/fsevents": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", - "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", - "hasInstallScript": true, - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": "^8.16.0 || ^10.6.0 || >=11.0.0" - } - }, - "node_modules/function-bind": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", - "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", - "funding": { - "url": "https://github.com/sponsors/ljharb" + "node": ">= 0.8" } }, - "node_modules/gensync": { - "version": "1.0.0-beta.2", - "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", - "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", - "engines": { - "node": ">=6.9.0" - } + "node_modules/eventemitter3": { + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-4.0.7.tgz", + "integrity": "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==" }, - "node_modules/get-caller-file": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", - "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "node_modules/events": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", + "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==", "engines": { - "node": "6.* || 8.* || >= 10.*" + "node": ">=0.8.x" } }, - "node_modules/get-intrinsic": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz", - "integrity": "sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==", + "node_modules/execa": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", + "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", "dependencies": { - "es-errors": "^1.3.0", - "function-bind": "^1.1.2", - "has-proto": "^1.0.1", - "has-symbols": "^1.0.3", - "hasown": "^2.0.0" - }, - "engines": { - "node": ">= 0.4" + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.0", + "human-signals": "^2.1.0", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.1", + "onetime": "^5.1.2", + "signal-exit": "^3.0.3", + "strip-final-newline": "^2.0.0" }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/get-own-enumerable-property-symbols": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/get-own-enumerable-property-symbols/-/get-own-enumerable-property-symbols-3.0.2.tgz", - "integrity": "sha512-I0UBV/XOz1XkIJHEUDMZAbzCThU/H8DxmSfmdGcKPnVhu2VfFqr34jr9777IyaTYvxjedWhqVIilEDsCdP5G6g==" - }, - "node_modules/get-stream": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", - "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", "engines": { "node": ">=10" }, "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "url": "https://github.com/sindresorhus/execa?sponsor=1" } }, - "node_modules/github-slugger": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/github-slugger/-/github-slugger-1.5.0.tgz", - "integrity": "sha512-wIh+gKBI9Nshz2o46B0B3f5k/W+WI9ZAv6y5Dn5WJ5SK1t0TnDimB4WE5rmTD05ZAIn8HALCZVmCsvj0w0v0lw==" - }, - "node_modules/glob": { - "version": "7.2.3", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", - "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "node_modules/express": { + "version": "4.19.2", + "resolved": "https://registry.npmjs.org/express/-/express-4.19.2.tgz", + "integrity": "sha512-5T6nhjsT+EOMzuck8JjBHARTHfMht0POzlA60WV2pMD3gyXw2LZnZ+ueGdNxG+0calOJcWKbpFcuzLZ91YWq9Q==", "dependencies": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.1.1", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" + "accepts": "~1.3.8", + "array-flatten": "1.1.1", + "body-parser": "1.20.2", + "content-disposition": "0.5.4", + "content-type": "~1.0.4", + "cookie": "0.6.0", + "cookie-signature": "1.0.6", + "debug": "2.6.9", + "depd": "2.0.0", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "finalhandler": "1.2.0", + "fresh": "0.5.2", + "http-errors": "2.0.0", + "merge-descriptors": "1.0.1", + "methods": "~1.1.2", + "on-finished": "2.4.1", + "parseurl": "~1.3.3", + "path-to-regexp": "0.1.7", + "proxy-addr": "~2.0.7", + "qs": "6.11.0", + "range-parser": "~1.2.1", + "safe-buffer": "5.2.1", + "send": "0.18.0", + "serve-static": "1.15.0", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "type-is": "~1.6.18", + "utils-merge": "1.0.1", + "vary": "~1.1.2" }, "engines": { - "node": "*" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" + "node": ">= 0.10.0" } }, - "node_modules/glob-parent": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", - "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "node_modules/express/node_modules/array-flatten": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", + "integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==" + }, + "node_modules/express/node_modules/content-disposition": { + "version": "0.5.4", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", + "integrity": "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==", "dependencies": { - "is-glob": "^4.0.1" + "safe-buffer": "5.2.1" }, "engines": { - "node": ">= 6" + "node": ">= 0.6" } }, - "node_modules/glob-to-regexp": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz", - "integrity": "sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==" - }, - "node_modules/global-dirs": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/global-dirs/-/global-dirs-3.0.1.tgz", - "integrity": "sha512-NBcGGFbBA9s1VzD41QXDG+3++t9Mn5t1FpLdhESY6oKY4gYTFpX4wO3sqGUa0Srjtbfj3szX0RnemmrVRUdULA==", + "node_modules/express/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", "dependencies": { - "ini": "2.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "ms": "2.0.0" } }, - "node_modules/global-dirs/node_modules/ini": { + "node_modules/express/node_modules/ms": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ini/-/ini-2.0.0.tgz", - "integrity": "sha512-7PnF4oN3CvZF23ADhA5wRaYEQpJ8qygSkbtTXWBeXWXmEVRXK+1ITciHWwHhsjv1TmW0MgacIv6hEi5pX5NQdA==", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" + }, + "node_modules/express/node_modules/path-to-regexp": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", + "integrity": "sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==" + }, + "node_modules/express/node_modules/range-parser": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", + "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", "engines": { - "node": ">=10" + "node": ">= 0.6" } }, - "node_modules/global-modules": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/global-modules/-/global-modules-2.0.0.tgz", - "integrity": "sha512-NGbfmJBp9x8IxyJSd1P+otYK8vonoJactOogrVfFRIAEY1ukil8RSKDz2Yo7wh1oihl51l/r6W4epkeKJHqL8A==", + "node_modules/extend": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", + "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==" + }, + "node_modules/extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==", "dependencies": { - "global-prefix": "^3.0.0" + "is-extendable": "^0.1.0" }, "engines": { - "node": ">=6" + "node": ">=0.10.0" } }, - "node_modules/global-prefix": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/global-prefix/-/global-prefix-3.0.0.tgz", - "integrity": "sha512-awConJSVCHVGND6x3tmMaKcQvwXLhjdkmomy2W+Goaui8YPgYgXJZewhg3fWC+DlfqqQuWg8AwqjGTD2nAPVWg==", + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==" + }, + "node_modules/fast-glob": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.2.tgz", + "integrity": "sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==", "dependencies": { - "ini": "^1.3.5", - "kind-of": "^6.0.2", - "which": "^1.3.1" + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.4" }, "engines": { - "node": ">=6" + "node": ">=8.6.0" } }, - "node_modules/global-prefix/node_modules/which": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", - "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "which": "bin/which" + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==" + }, + "node_modules/fast-safe-stringify": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz", + "integrity": "sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA==" + }, + "node_modules/fast-url-parser": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/fast-url-parser/-/fast-url-parser-1.1.3.tgz", + "integrity": "sha512-5jOCVXADYNuRkKFzNJ0dCCewsZiYo0dz8QNYljkOpFC6r2U4OBmKtvm/Tsuh4w1YYdDqDb31a8TVhBJ2OJKdqQ==", + "dependencies": { + "punycode": "^1.3.2" } }, - "node_modules/globals": { - "version": "11.12.0", - "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", - "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", - "engines": { - "node": ">=4" + "node_modules/fastq": { + "version": "1.15.0", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.15.0.tgz", + "integrity": "sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw==", + "dependencies": { + "reusify": "^1.0.4" } }, - "node_modules/globby": { - "version": "11.1.0", - "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", - "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", + "node_modules/fault": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/fault/-/fault-2.0.1.tgz", + "integrity": "sha512-WtySTkS4OKev5JtpHXnib4Gxiurzh5NCGvWrFaZ34m6JehfTUhKZvn9njTfw48t6JumVQOmrKqpmGcdwxnhqBQ==", "dependencies": { - "array-union": "^2.1.0", - "dir-glob": "^3.0.1", - "fast-glob": "^3.2.9", - "ignore": "^5.2.0", - "merge2": "^1.4.1", - "slash": "^3.0.0" - }, - "engines": { - "node": ">=10" + "format": "^0.2.0" }, "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "type": "github", + "url": "https://github.com/sponsors/wooorm" } }, - "node_modules/gopd": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz", - "integrity": "sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==", + "node_modules/faye-websocket": { + "version": "0.11.4", + "resolved": "https://registry.npmjs.org/faye-websocket/-/faye-websocket-0.11.4.tgz", + "integrity": "sha512-CzbClwlXAuiRQAlUyfqPgvPoNKTckTPGfwZV4ZdAhVcP2lh9KUxJg2b5GkE7XbjKQ3YJnQ9z6D9ntLAlB+tP8g==", "dependencies": { - "get-intrinsic": "^1.1.3" + "websocket-driver": ">=0.5.1" }, - "funding": { - "url": "https://github.com/sponsors/ljharb" + "engines": { + "node": ">=0.8.0" } }, - "node_modules/got": { - "version": "12.6.1", - "resolved": "https://registry.npmjs.org/got/-/got-12.6.1.tgz", - "integrity": "sha512-mThBblvlAF1d4O5oqyvN+ZxLAYwIJK7bpMxgYqPD9okW0C3qm5FFn7k811QrcuEBwaogR3ngOFoCfs6mRv7teQ==", + "node_modules/feed": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/feed/-/feed-4.2.2.tgz", + "integrity": "sha512-u5/sxGfiMfZNtJ3OvQpXcvotFpYkL0n9u9mM2vkui2nGo8b4wvDkJ8gAkYqbA8QpGyFCv3RK0Z+Iv+9veCS9bQ==", "dependencies": { - "@sindresorhus/is": "^5.2.0", - "@szmarczak/http-timer": "^5.0.1", - "cacheable-lookup": "^7.0.0", - "cacheable-request": "^10.2.8", - "decompress-response": "^6.0.0", - "form-data-encoder": "^2.1.2", - "get-stream": "^6.0.1", - "http2-wrapper": "^2.1.10", - "lowercase-keys": "^3.0.0", - "p-cancelable": "^3.0.0", - "responselike": "^3.0.0" + "xml-js": "^1.6.11" }, "engines": { - "node": ">=14.16" - }, - "funding": { - "url": "https://github.com/sindresorhus/got?sponsor=1" + "node": ">=0.4.0" } }, - "node_modules/got/node_modules/@sindresorhus/is": { - "version": "5.6.0", - "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-5.6.0.tgz", - "integrity": "sha512-TV7t8GKYaJWsn00tFDqBw8+Uqmr8A0fRU1tvTQhyZzGv0sJCGRQL3JGMI3ucuKo3XIZdUP+Lx7/gh2t3lewy7g==", + "node_modules/file-loader": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/file-loader/-/file-loader-6.2.0.tgz", + "integrity": "sha512-qo3glqyTa61Ytg4u73GultjHGjdRyig3tG6lPtyX/jOEJvHif9uB0/OCI2Kif6ctF3caQTW2G5gym21oAsI4pw==", + "dependencies": { + "loader-utils": "^2.0.0", + "schema-utils": "^3.0.0" + }, "engines": { - "node": ">=14.16" + "node": ">= 10.13.0" }, "funding": { - "url": "https://github.com/sindresorhus/is?sponsor=1" + "type": "opencollective", + "url": "https://opencollective.com/webpack" + }, + "peerDependencies": { + "webpack": "^4.0.0 || ^5.0.0" } }, - "node_modules/graceful-fs": { - "version": "4.2.11", - "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", - "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==" - }, - "node_modules/gray-matter": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/gray-matter/-/gray-matter-4.0.3.tgz", - "integrity": "sha512-5v6yZd4JK3eMI3FqqCouswVqwugaA9r4dNZB1wwcmrD02QkV5H0y7XBQW8QwQqEaZY1pM9aqORSORhJRdNK44Q==", + "node_modules/file-loader/node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", "dependencies": { - "js-yaml": "^3.13.1", - "kind-of": "^6.0.2", - "section-matter": "^1.0.0", - "strip-bom-string": "^1.0.0" + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" }, - "engines": { - "node": ">=6.0" + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" } }, - "node_modules/gray-matter/node_modules/argparse": { - "version": "1.0.10", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", - "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", - "dependencies": { - "sprintf-js": "~1.0.2" + "node_modules/file-loader/node_modules/ajv-keywords": { + "version": "3.5.2", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz", + "integrity": "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==", + "peerDependencies": { + "ajv": "^6.9.1" } }, - "node_modules/gray-matter/node_modules/js-yaml": { - "version": "3.14.1", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", - "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", - "dependencies": { - "argparse": "^1.0.7", - "esprima": "^4.0.0" - }, - "bin": { - "js-yaml": "bin/js-yaml.js" - } + "node_modules/file-loader/node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==" }, - "node_modules/gzip-size": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/gzip-size/-/gzip-size-6.0.0.tgz", - "integrity": "sha512-ax7ZYomf6jqPTQ4+XCpUGyXKHk5WweS+e05MBO4/y3WJ5RkmPXNKvX+bx1behVILVwr6JSQvZAku021CHPXG3Q==", + "node_modules/file-loader/node_modules/schema-utils": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.3.0.tgz", + "integrity": "sha512-pN/yOAvcC+5rQ5nERGuwrjLlYvLTbCibnZ1I7B1LaiAz9BRBlE9GMgE/eqV30P7aJQUf7Ddimy/RsbYO/GrVGg==", "dependencies": { - "duplexer": "^0.1.2" + "@types/json-schema": "^7.0.8", + "ajv": "^6.12.5", + "ajv-keywords": "^3.5.2" }, "engines": { - "node": ">=10" + "node": ">= 10.13.0" }, "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "type": "opencollective", + "url": "https://opencollective.com/webpack" } }, - "node_modules/handle-thing": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/handle-thing/-/handle-thing-2.0.1.tgz", - "integrity": "sha512-9Qn4yBxelxoh2Ow62nP+Ka/kMnOXRi8BXnRaUwezLNhqelnN49xKz4F/dPP8OYLxLxq6JDtZb2i9XznUQbNPTg==" - }, - "node_modules/has-flag": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", - "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "node_modules/filesize": { + "version": "8.0.7", + "resolved": "https://registry.npmjs.org/filesize/-/filesize-8.0.7.tgz", + "integrity": "sha512-pjmC+bkIF8XI7fWaH8KxHcZL3DPybs1roSKP4rKDvy20tAWwIObE4+JIseG2byfGKhud5ZnM4YSGKBz7Sh0ndQ==", "engines": { - "node": ">=8" + "node": ">= 0.4.0" } }, - "node_modules/has-property-descriptors": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", - "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", + "node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", "dependencies": { - "es-define-property": "^1.0.0" + "to-regex-range": "^5.0.1" }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/has-proto": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.1.tgz", - "integrity": "sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg==", "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" + "node": ">=8" } }, - "node_modules/has-symbols": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", - "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", - "engines": { - "node": ">= 0.4" + "node_modules/finalhandler": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.2.0.tgz", + "integrity": "sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg==", + "dependencies": { + "debug": "2.6.9", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "on-finished": "2.4.1", + "parseurl": "~1.3.3", + "statuses": "2.0.1", + "unpipe": "~1.0.0" }, - "funding": { - "url": "https://github.com/sponsors/ljharb" + "engines": { + "node": ">= 0.8" } }, - "node_modules/has-yarn": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/has-yarn/-/has-yarn-3.0.0.tgz", - "integrity": "sha512-IrsVwUHhEULx3R8f/aA8AHuEzAorplsab/v8HBzEiIukwq5i/EC+xmOW+HfP1OaDP+2JkgT1yILHN2O3UFIbcA==", - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "node_modules/finalhandler/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dependencies": { + "ms": "2.0.0" } }, - "node_modules/hasown": { + "node_modules/finalhandler/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" + }, + "node_modules/find-cache-dir": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-4.0.0.tgz", + "integrity": "sha512-9ZonPT4ZAK4a+1pUPVPZJapbi7O5qbbJPdYw/NOQWZZbVLdDTYM3A4R9z/DpAM08IDaFGsvPgiGZ82WEwUDWjg==", + "dependencies": { + "common-path-prefix": "^3.0.0", + "pkg-dir": "^7.0.0" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/find-up": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-6.3.0.tgz", + "integrity": "sha512-v2ZsoEuVHYy8ZIlYqwPe/39Cy+cFDzp4dXPaxNvkEuouymu+2Jbz0PxpKarJHYJTmv2HWT3O382qY8l4jMWthw==", + "dependencies": { + "locate-path": "^7.1.0", + "path-exists": "^5.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/flat": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/flat/-/flat-5.0.2.tgz", + "integrity": "sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==", + "bin": { + "flat": "cli.js" + } + }, + "node_modules/follow-redirects": { + "version": "1.15.6", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.6.tgz", + "integrity": "sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==", + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/RubenVerborgh" + } + ], + "engines": { + "node": ">=4.0" + }, + "peerDependenciesMeta": { + "debug": { + "optional": true + } + } + }, + "node_modules/foreach": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/foreach/-/foreach-2.0.6.tgz", + "integrity": "sha512-k6GAGDyqLe9JaebCsFCoudPPWfihKu8pylYXRlqP1J7ms39iPoTtk2fviNglIeQEwdh0bQeKJ01ZPyuyQvKzwg==" + }, + "node_modules/fork-ts-checker-webpack-plugin": { + "version": "6.5.3", + "resolved": "https://registry.npmjs.org/fork-ts-checker-webpack-plugin/-/fork-ts-checker-webpack-plugin-6.5.3.tgz", + "integrity": "sha512-SbH/l9ikmMWycd5puHJKTkZJKddF4iRLyW3DeZ08HTI7NGyLS38MXd/KGgeWumQO7YNQbW2u/NtPT2YowbPaGQ==", + "dependencies": { + "@babel/code-frame": "^7.8.3", + "@types/json-schema": "^7.0.5", + "chalk": "^4.1.0", + "chokidar": "^3.4.2", + "cosmiconfig": "^6.0.0", + "deepmerge": "^4.2.2", + "fs-extra": "^9.0.0", + "glob": "^7.1.6", + "memfs": "^3.1.2", + "minimatch": "^3.0.4", + "schema-utils": "2.7.0", + "semver": "^7.3.2", + "tapable": "^1.0.0" + }, + "engines": { + "node": ">=10", + "yarn": ">=1.0.0" + }, + "peerDependencies": { + "eslint": ">= 6", + "typescript": ">= 2.7", + "vue-template-compiler": "*", + "webpack": ">= 4" + }, + "peerDependenciesMeta": { + "eslint": { + "optional": true + }, + "vue-template-compiler": { + "optional": true + } + } + }, + "node_modules/fork-ts-checker-webpack-plugin/node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/fork-ts-checker-webpack-plugin/node_modules/ajv-keywords": { + "version": "3.5.2", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz", + "integrity": "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==", + "peerDependencies": { + "ajv": "^6.9.1" + } + }, + "node_modules/fork-ts-checker-webpack-plugin/node_modules/cosmiconfig": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-6.0.0.tgz", + "integrity": "sha512-xb3ZL6+L8b9JLLCx3ZdoZy4+2ECphCMo2PwqgP1tlfVq6M6YReyzBJtvWWtbDSpNr9hn96pkCiZqUcFEc+54Qg==", + "dependencies": { + "@types/parse-json": "^4.0.0", + "import-fresh": "^3.1.0", + "parse-json": "^5.0.0", + "path-type": "^4.0.0", + "yaml": "^1.7.2" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/fork-ts-checker-webpack-plugin/node_modules/fs-extra": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-9.1.0.tgz", + "integrity": "sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ==", + "dependencies": { + "at-least-node": "^1.0.0", + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/fork-ts-checker-webpack-plugin/node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==" + }, + "node_modules/fork-ts-checker-webpack-plugin/node_modules/schema-utils": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-2.7.0.tgz", + "integrity": "sha512-0ilKFI6QQF5nxDZLFn2dMjvc4hjg/Wkg7rHd3jK6/A4a1Hl9VFdQWvgB1UMGoU94pad1P/8N7fMcEnLnSiju8A==", + "dependencies": { + "@types/json-schema": "^7.0.4", + "ajv": "^6.12.2", + "ajv-keywords": "^3.4.1" + }, + "engines": { + "node": ">= 8.9.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + } + }, + "node_modules/fork-ts-checker-webpack-plugin/node_modules/tapable": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/tapable/-/tapable-1.1.3.tgz", + "integrity": "sha512-4WK/bYZmj8xLr+HUCODHGF1ZFzsYffasLUgEiMBY4fgtltdO6B4WJtlSbPaDTLpYTcGVwM2qLnFTICEcNxs3kA==", + "engines": { + "node": ">=6" + } + }, + "node_modules/form-data-encoder": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/form-data-encoder/-/form-data-encoder-2.1.4.tgz", + "integrity": "sha512-yDYSgNMraqvnxiEXO4hi88+YZxaHC6QKzb5N84iRCTDeRO7ZALpir/lVmf/uXUhnwUr2O4HU8s/n6x+yNjQkHw==", + "engines": { + "node": ">= 14.17" + } + }, + "node_modules/format": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/format/-/format-0.2.2.tgz", + "integrity": "sha512-wzsgA6WOq+09wrU1tsJ09udeR/YZRaeArL9e1wPbFg3GG2yDnC2ldKpxs4xunpFF9DgqCqOIra3bc1HWrJ37Ww==", + "engines": { + "node": ">=0.4.x" + } + }, + "node_modules/forwarded": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", + "integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/fraction.js": { + "version": "4.3.7", + "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-4.3.7.tgz", + "integrity": "sha512-ZsDfxO51wGAXREY55a7la9LScWpwv9RxIrYABrlvOFBlH/ShPnrtsXeuUIfXKKOVicNxQ+o8JTbJvjS4M89yew==", + "engines": { + "node": "*" + }, + "funding": { + "type": "patreon", + "url": "https://github.com/sponsors/rawify" + } + }, + "node_modules/fresh": { + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", + "integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/fs-extra": { + "version": "11.2.0", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.2.0.tgz", + "integrity": "sha512-PmDi3uwK5nFuXh7XDTlVnS17xJS7vW36is2+w3xcv8SVxiB4NyATf4ctkVY5bkSjX0Y4nbvZCq1/EjtEyr9ktw==", + "dependencies": { + "graceful-fs": "^4.2.0", + "jsonfile": "^6.0.1", + "universalify": "^2.0.0" + }, + "engines": { + "node": ">=14.14" + } + }, + "node_modules/fs-monkey": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/fs-monkey/-/fs-monkey-1.0.5.tgz", + "integrity": "sha512-8uMbBjrhzW76TYgEV27Y5E//W2f/lTFmx78P2w19FZSxarhI/798APGQyuGCwmkNxgwGRhrLfvWyLBvNtuOmew==" + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==" + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "hasInstallScript": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/gensync": { + "version": "1.0.0-beta.2", + "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", + "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/get-intrinsic": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz", + "integrity": "sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "has-proto": "^1.0.1", + "has-symbols": "^1.0.3", + "hasown": "^2.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-own-enumerable-property-symbols": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/get-own-enumerable-property-symbols/-/get-own-enumerable-property-symbols-3.0.2.tgz", + "integrity": "sha512-I0UBV/XOz1XkIJHEUDMZAbzCThU/H8DxmSfmdGcKPnVhu2VfFqr34jr9777IyaTYvxjedWhqVIilEDsCdP5G6g==" + }, + "node_modules/get-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", + "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/github-slugger": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/github-slugger/-/github-slugger-1.5.0.tgz", + "integrity": "sha512-wIh+gKBI9Nshz2o46B0B3f5k/W+WI9ZAv6y5Dn5WJ5SK1t0TnDimB4WE5rmTD05ZAIn8HALCZVmCsvj0w0v0lw==" + }, + "node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/glob-to-regexp": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz", + "integrity": "sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==" + }, + "node_modules/global-dirs": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/global-dirs/-/global-dirs-3.0.1.tgz", + "integrity": "sha512-NBcGGFbBA9s1VzD41QXDG+3++t9Mn5t1FpLdhESY6oKY4gYTFpX4wO3sqGUa0Srjtbfj3szX0RnemmrVRUdULA==", + "dependencies": { + "ini": "2.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/global-dirs/node_modules/ini": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ini/-/ini-2.0.0.tgz", + "integrity": "sha512-7PnF4oN3CvZF23ADhA5wRaYEQpJ8qygSkbtTXWBeXWXmEVRXK+1ITciHWwHhsjv1TmW0MgacIv6hEi5pX5NQdA==", + "engines": { + "node": ">=10" + } + }, + "node_modules/global-modules": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/global-modules/-/global-modules-2.0.0.tgz", + "integrity": "sha512-NGbfmJBp9x8IxyJSd1P+otYK8vonoJactOogrVfFRIAEY1ukil8RSKDz2Yo7wh1oihl51l/r6W4epkeKJHqL8A==", + "dependencies": { + "global-prefix": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/global-prefix": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/global-prefix/-/global-prefix-3.0.0.tgz", + "integrity": "sha512-awConJSVCHVGND6x3tmMaKcQvwXLhjdkmomy2W+Goaui8YPgYgXJZewhg3fWC+DlfqqQuWg8AwqjGTD2nAPVWg==", + "dependencies": { + "ini": "^1.3.5", + "kind-of": "^6.0.2", + "which": "^1.3.1" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/global-prefix/node_modules/which": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", + "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "which": "bin/which" + } + }, + "node_modules/globals": { + "version": "11.12.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", + "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", + "engines": { + "node": ">=4" + } + }, + "node_modules/globby": { + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", + "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", + "dependencies": { + "array-union": "^2.1.0", + "dir-glob": "^3.0.1", + "fast-glob": "^3.2.9", + "ignore": "^5.2.0", + "merge2": "^1.4.1", + "slash": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/gopd": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz", + "integrity": "sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==", + "dependencies": { + "get-intrinsic": "^1.1.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/got": { + "version": "12.6.1", + "resolved": "https://registry.npmjs.org/got/-/got-12.6.1.tgz", + "integrity": "sha512-mThBblvlAF1d4O5oqyvN+ZxLAYwIJK7bpMxgYqPD9okW0C3qm5FFn7k811QrcuEBwaogR3ngOFoCfs6mRv7teQ==", + "dependencies": { + "@sindresorhus/is": "^5.2.0", + "@szmarczak/http-timer": "^5.0.1", + "cacheable-lookup": "^7.0.0", + "cacheable-request": "^10.2.8", + "decompress-response": "^6.0.0", + "form-data-encoder": "^2.1.2", + "get-stream": "^6.0.1", + "http2-wrapper": "^2.1.10", + "lowercase-keys": "^3.0.0", + "p-cancelable": "^3.0.0", + "responselike": "^3.0.0" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sindresorhus/got?sponsor=1" + } + }, + "node_modules/got/node_modules/@sindresorhus/is": { + "version": "5.6.0", + "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-5.6.0.tgz", + "integrity": "sha512-TV7t8GKYaJWsn00tFDqBw8+Uqmr8A0fRU1tvTQhyZzGv0sJCGRQL3JGMI3ucuKo3XIZdUP+Lx7/gh2t3lewy7g==", + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sindresorhus/is?sponsor=1" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==" + }, + "node_modules/gray-matter": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/gray-matter/-/gray-matter-4.0.3.tgz", + "integrity": "sha512-5v6yZd4JK3eMI3FqqCouswVqwugaA9r4dNZB1wwcmrD02QkV5H0y7XBQW8QwQqEaZY1pM9aqORSORhJRdNK44Q==", + "dependencies": { + "js-yaml": "^3.13.1", + "kind-of": "^6.0.2", + "section-matter": "^1.0.0", + "strip-bom-string": "^1.0.0" + }, + "engines": { + "node": ">=6.0" + } + }, + "node_modules/gray-matter/node_modules/argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dependencies": { + "sprintf-js": "~1.0.2" + } + }, + "node_modules/gray-matter/node_modules/js-yaml": { + "version": "3.14.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", + "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", + "dependencies": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/gzip-size": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/gzip-size/-/gzip-size-6.0.0.tgz", + "integrity": "sha512-ax7ZYomf6jqPTQ4+XCpUGyXKHk5WweS+e05MBO4/y3WJ5RkmPXNKvX+bx1behVILVwr6JSQvZAku021CHPXG3Q==", + "dependencies": { + "duplexer": "^0.1.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/handle-thing": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/handle-thing/-/handle-thing-2.0.1.tgz", + "integrity": "sha512-9Qn4yBxelxoh2Ow62nP+Ka/kMnOXRi8BXnRaUwezLNhqelnN49xKz4F/dPP8OYLxLxq6JDtZb2i9XznUQbNPTg==" + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "engines": { + "node": ">=8" + } + }, + "node_modules/has-property-descriptors": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", + "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", + "dependencies": { + "es-define-property": "^1.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.1.tgz", + "integrity": "sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-symbols": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", + "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-yarn": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-yarn/-/has-yarn-3.0.0.tgz", + "integrity": "sha512-IrsVwUHhEULx3R8f/aA8AHuEzAorplsab/v8HBzEiIukwq5i/EC+xmOW+HfP1OaDP+2JkgT1yILHN2O3UFIbcA==", + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/hasown": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.0.tgz", "integrity": "sha512-vUptKVTpIJhcczKBbgnS+RtcuYMB8+oNzPK2/Hp3hanz8JmpATdmmgLgSaadVREkDm+e2giHwY3ZRkyjSIDDFA==", @@ -7814,6 +8396,15 @@ "resolved": "https://registry.npmjs.org/inline-style-parser/-/inline-style-parser-0.1.1.tgz", "integrity": "sha512-7NXolsK4CAS5+xvdj5OMMbI962hU/wvwoxk+LWR9Ek9bVtyuuYScDN6eS0rUm6TxApFpw7CX1o4uJzcd4AyD3Q==" }, + "node_modules/internmap": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/internmap/-/internmap-2.0.3.tgz", + "integrity": "sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, "node_modules/interpret": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/interpret/-/interpret-1.4.0.tgz", @@ -8263,6 +8854,31 @@ "graceful-fs": "^4.1.6" } }, + "node_modules/katex": { + "version": "0.16.11", + "resolved": "https://registry.npmjs.org/katex/-/katex-0.16.11.tgz", + "integrity": "sha512-RQrI8rlHY92OLf3rho/Ts8i/XvjgguEjOkO1BEXcU3N8BqPpSzBNwV/G0Ukr+P/l3ivvJUE/Fa/CwbS6HesGNQ==", + "funding": [ + "https://opencollective.com/katex", + "https://github.com/sponsors/katex" + ], + "license": "MIT", + "dependencies": { + "commander": "^8.3.0" + }, + "bin": { + "katex": "cli.js" + } + }, + "node_modules/katex/node_modules/commander": { + "version": "8.3.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz", + "integrity": "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==", + "license": "MIT", + "engines": { + "node": ">= 12" + } + }, "node_modules/keyv": { "version": "4.5.4", "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", @@ -8271,6 +8887,11 @@ "json-buffer": "3.0.1" } }, + "node_modules/khroma": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/khroma/-/khroma-2.1.0.tgz", + "integrity": "sha512-Ls993zuzfayK269Svk9hzpeGUKob/sIgZzyHYdjQoAdQetRKpOLj+k/QQQ/6Qi0Yz65mlROrfd+Ev+1+7dz9Kw==" + }, "node_modules/kind-of": { "version": "6.0.3", "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", @@ -8287,355 +8908,733 @@ "node": ">=6" } }, - "node_modules/latest-version": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/latest-version/-/latest-version-7.0.0.tgz", - "integrity": "sha512-KvNT4XqAMzdcL6ka6Tl3i2lYeFDgXNCuIX+xNx6ZMVR1dFq+idXd9FLKNMOIx0t9mJ9/HudyX4oZWXZQ0UJHeg==", + "node_modules/latest-version": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/latest-version/-/latest-version-7.0.0.tgz", + "integrity": "sha512-KvNT4XqAMzdcL6ka6Tl3i2lYeFDgXNCuIX+xNx6ZMVR1dFq+idXd9FLKNMOIx0t9mJ9/HudyX4oZWXZQ0UJHeg==", + "dependencies": { + "package-json": "^8.1.0" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/launch-editor": { + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/launch-editor/-/launch-editor-2.6.1.tgz", + "integrity": "sha512-eB/uXmFVpY4zezmGp5XtU21kwo7GBbKB+EQ+UZeWtGb9yAM5xt/Evk+lYH3eRNAtId+ej4u7TYPFZ07w4s7rRw==", + "dependencies": { + "picocolors": "^1.0.0", + "shell-quote": "^1.8.1" + } + }, + "node_modules/layout-base": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/layout-base/-/layout-base-1.0.2.tgz", + "integrity": "sha512-8h2oVEZNktL4BH2JCOI90iD1yXwL6iNW7KcCKT2QZgQJR2vbqDsldCTPRU9NifTCqHZci57XvQQ15YTu+sTYPg==", + "license": "MIT" + }, + "node_modules/leven": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz", + "integrity": "sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==", + "engines": { + "node": ">=6" + } + }, + "node_modules/lilconfig": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-3.1.1.tgz", + "integrity": "sha512-O18pf7nyvHTckunPWCV1XUNXU1piu01y2b7ATJ0ppkUkk8ocqVWBrYjJBCwHDjD/ZWcfyrA0P4gKhzWGi5EINQ==", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/antonk52" + } + }, + "node_modules/lines-and-columns": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", + "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==" + }, + "node_modules/load-script": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/load-script/-/load-script-1.0.0.tgz", + "integrity": "sha512-kPEjMFtZvwL9TaZo0uZ2ml+Ye9HUMmPwbYRJ324qF9tqMejwykJ5ggTyvzmrbBeapCAbk98BSbTeovHEEP1uCA==" + }, + "node_modules/loader-runner": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/loader-runner/-/loader-runner-4.3.0.tgz", + "integrity": "sha512-3R/1M+yS3j5ou80Me59j7F9IMs4PXs3VqRrm0TU3AbKPxlmpoY1TNscJV/oGJXo8qCatFGTfDbY6W6ipGOYXfg==", + "engines": { + "node": ">=6.11.5" + } + }, + "node_modules/loader-utils": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-2.0.4.tgz", + "integrity": "sha512-xXqpXoINfFhgua9xiqD8fPFHgkoq1mmmpE92WlDbm9rNRd/EbRb+Gqf908T2DMfuHjjJlksiK2RbHVOdD/MqSw==", + "dependencies": { + "big.js": "^5.2.2", + "emojis-list": "^3.0.0", + "json5": "^2.1.2" + }, + "engines": { + "node": ">=8.9.0" + } + }, + "node_modules/locate-path": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-7.2.0.tgz", + "integrity": "sha512-gvVijfZvn7R+2qyPX8mAuKcFGDf6Nc61GdvGafQsHL0sBIxfKzA+usWn4GFC/bk+QdwPUD4kWFJLhElipq+0VA==", + "dependencies": { + "p-locate": "^6.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" + }, + "node_modules/lodash-es": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.21.tgz", + "integrity": "sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==", + "license": "MIT" + }, + "node_modules/lodash.debounce": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/lodash.debounce/-/lodash.debounce-4.0.8.tgz", + "integrity": "sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow==" + }, + "node_modules/lodash.isequal": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/lodash.isequal/-/lodash.isequal-4.5.0.tgz", + "integrity": "sha512-pDo3lu8Jhfjqls6GkMgpahsF9kCyayhgykjyLMNFTKWrpVdAQtYyB4muAMWozBB4ig/dtWAmsMxLEI8wuz+DYQ==" + }, + "node_modules/lodash.memoize": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", + "integrity": "sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==" + }, + "node_modules/lodash.uniq": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/lodash.uniq/-/lodash.uniq-4.5.0.tgz", + "integrity": "sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ==" + }, + "node_modules/longest-streak": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/longest-streak/-/longest-streak-3.1.0.tgz", + "integrity": "sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/loose-envify": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", + "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", + "dependencies": { + "js-tokens": "^3.0.0 || ^4.0.0" + }, + "bin": { + "loose-envify": "cli.js" + } + }, + "node_modules/lower-case": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/lower-case/-/lower-case-2.0.2.tgz", + "integrity": "sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg==", + "dependencies": { + "tslib": "^2.0.3" + } + }, + "node_modules/lowercase-keys": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-3.0.0.tgz", + "integrity": "sha512-ozCC6gdQ+glXOQsveKD0YsDy8DSQFjDTz4zyzEHNV5+JP5D62LmfDZ6o1cycFx9ouG940M5dE8C8CTewdj2YWQ==", + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "dependencies": { + "yallist": "^3.0.2" + } + }, + "node_modules/lunr": { + "version": "2.3.9", + "resolved": "https://registry.npmjs.org/lunr/-/lunr-2.3.9.tgz", + "integrity": "sha512-zTU3DaZaF3Rt9rhN3uBMGQD3dD2/vFQqnvZCDv4dl5iOzq2IZQqTxu90r4E5J+nP70J3ilqVCrbho2eWaeW8Ow==" + }, + "node_modules/mark.js": { + "version": "8.11.1", + "resolved": "https://registry.npmjs.org/mark.js/-/mark.js-8.11.1.tgz", + "integrity": "sha512-1I+1qpDt4idfgLQG+BNWmrqku+7/2bi5nLf4YwF8y8zXvmfiTBY3PV3ZibfrjBueCByROpuBjLLFCajqkgYoLQ==" + }, + "node_modules/markdown-extensions": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/markdown-extensions/-/markdown-extensions-2.0.0.tgz", + "integrity": "sha512-o5vL7aDWatOTX8LzaS1WMoaoxIiLRQJuIKKe2wAw6IeULDHaqbiqiggmx+pKvZDb1Sj+pE46Sn1T7lCqfFtg1Q==", + "engines": { + "node": ">=16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/markdown-table": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/markdown-table/-/markdown-table-3.0.3.tgz", + "integrity": "sha512-Z1NL3Tb1M9wH4XESsCDEksWoKTdlUafKc4pt0GRwjUyXaCFZ+dc3g2erqB6zm3szA2IUSi7VnPI+o/9jnxh9hw==", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/marked": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/marked/-/marked-4.3.0.tgz", + "integrity": "sha512-PRsaiG84bK+AMvxziE/lCFss8juXjNaWzVbN5tXAm4XjeaS9NAHhop+PjQxz2A9h8Q4M/xGmzP8vqNwy6JeK0A==", + "bin": { + "marked": "bin/marked.js" + }, + "engines": { + "node": ">= 12" + } + }, + "node_modules/mdast-util-directive": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-directive/-/mdast-util-directive-3.0.0.tgz", + "integrity": "sha512-JUpYOqKI4mM3sZcNxmF/ox04XYFFkNwr0CFlrQIkCwbvH0xzMCqkMqAde9wRd80VAhaUrwFwKm2nxretdT1h7Q==", "dependencies": { - "package-json": "^8.1.0" - }, - "engines": { - "node": ">=14.16" + "@types/mdast": "^4.0.0", + "@types/unist": "^3.0.0", + "devlop": "^1.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0", + "parse-entities": "^4.0.0", + "stringify-entities": "^4.0.0", + "unist-util-visit-parents": "^6.0.0" }, "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/launch-editor": { - "version": "2.6.1", - "resolved": "https://registry.npmjs.org/launch-editor/-/launch-editor-2.6.1.tgz", - "integrity": "sha512-eB/uXmFVpY4zezmGp5XtU21kwo7GBbKB+EQ+UZeWtGb9yAM5xt/Evk+lYH3eRNAtId+ej4u7TYPFZ07w4s7rRw==", + "node_modules/mdast-util-find-and-replace": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/mdast-util-find-and-replace/-/mdast-util-find-and-replace-3.0.1.tgz", + "integrity": "sha512-SG21kZHGC3XRTSUhtofZkBzZTJNM5ecCi0SK2IMKmSXR8vO3peL+kb1O0z7Zl83jKtutG4k5Wv/W7V3/YHvzPA==", "dependencies": { - "picocolors": "^1.0.0", - "shell-quote": "^1.8.1" + "@types/mdast": "^4.0.0", + "escape-string-regexp": "^5.0.0", + "unist-util-is": "^6.0.0", + "unist-util-visit-parents": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/leven": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz", - "integrity": "sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==", + "node_modules/mdast-util-find-and-replace/node_modules/escape-string-regexp": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz", + "integrity": "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==", "engines": { - "node": ">=6" + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/lilconfig": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-3.1.1.tgz", - "integrity": "sha512-O18pf7nyvHTckunPWCV1XUNXU1piu01y2b7ATJ0ppkUkk8ocqVWBrYjJBCwHDjD/ZWcfyrA0P4gKhzWGi5EINQ==", - "engines": { - "node": ">=14" + "node_modules/mdast-util-from-markdown": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/mdast-util-from-markdown/-/mdast-util-from-markdown-2.0.1.tgz", + "integrity": "sha512-aJEUyzZ6TzlsX2s5B4Of7lN7EQtAxvtradMMglCQDyaTFgse6CmtmdJ15ElnVRlCg1vpNyVtbem0PWzlNieZsA==", + "dependencies": { + "@types/mdast": "^4.0.0", + "@types/unist": "^3.0.0", + "decode-named-character-reference": "^1.0.0", + "devlop": "^1.0.0", + "mdast-util-to-string": "^4.0.0", + "micromark": "^4.0.0", + "micromark-util-decode-numeric-character-reference": "^2.0.0", + "micromark-util-decode-string": "^2.0.0", + "micromark-util-normalize-identifier": "^2.0.0", + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0", + "unist-util-stringify-position": "^4.0.0" }, "funding": { - "url": "https://github.com/sponsors/antonk52" + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/lines-and-columns": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", - "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==" + "node_modules/mdast-util-from-markdown/node_modules/micromark-util-symbol": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.0.tgz", + "integrity": "sha512-8JZt9ElZ5kyTnO94muPxIGS8oyElRJaiJO8EzV6ZSyGQ1Is8xwl4Q45qU5UOg+bGH4AikWziz0iN4sFLWs8PGw==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ] }, - "node_modules/load-script": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/load-script/-/load-script-1.0.0.tgz", - "integrity": "sha512-kPEjMFtZvwL9TaZo0uZ2ml+Ye9HUMmPwbYRJ324qF9tqMejwykJ5ggTyvzmrbBeapCAbk98BSbTeovHEEP1uCA==" + "node_modules/mdast-util-frontmatter": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/mdast-util-frontmatter/-/mdast-util-frontmatter-2.0.1.tgz", + "integrity": "sha512-LRqI9+wdgC25P0URIJY9vwocIzCcksduHQ9OF2joxQoyTNVduwLAFUzjoopuRJbJAReaKrNQKAZKL3uCMugWJA==", + "dependencies": { + "@types/mdast": "^4.0.0", + "devlop": "^1.0.0", + "escape-string-regexp": "^5.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0", + "micromark-extension-frontmatter": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } }, - "node_modules/loader-runner": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/loader-runner/-/loader-runner-4.3.0.tgz", - "integrity": "sha512-3R/1M+yS3j5ou80Me59j7F9IMs4PXs3VqRrm0TU3AbKPxlmpoY1TNscJV/oGJXo8qCatFGTfDbY6W6ipGOYXfg==", + "node_modules/mdast-util-frontmatter/node_modules/escape-string-regexp": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz", + "integrity": "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==", "engines": { - "node": ">=6.11.5" + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/loader-utils": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-2.0.4.tgz", - "integrity": "sha512-xXqpXoINfFhgua9xiqD8fPFHgkoq1mmmpE92WlDbm9rNRd/EbRb+Gqf908T2DMfuHjjJlksiK2RbHVOdD/MqSw==", + "node_modules/mdast-util-gfm": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm/-/mdast-util-gfm-3.0.0.tgz", + "integrity": "sha512-dgQEX5Amaq+DuUqf26jJqSK9qgixgd6rYDHAv4aTBuA92cTknZlKpPfa86Z/s8Dj8xsAQpFfBmPUHWJBWqS4Bw==", "dependencies": { - "big.js": "^5.2.2", - "emojis-list": "^3.0.0", - "json5": "^2.1.2" + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-gfm-autolink-literal": "^2.0.0", + "mdast-util-gfm-footnote": "^2.0.0", + "mdast-util-gfm-strikethrough": "^2.0.0", + "mdast-util-gfm-table": "^2.0.0", + "mdast-util-gfm-task-list-item": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" }, - "engines": { - "node": ">=8.9.0" + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/locate-path": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-7.2.0.tgz", - "integrity": "sha512-gvVijfZvn7R+2qyPX8mAuKcFGDf6Nc61GdvGafQsHL0sBIxfKzA+usWn4GFC/bk+QdwPUD4kWFJLhElipq+0VA==", + "node_modules/mdast-util-gfm-autolink-literal": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-autolink-literal/-/mdast-util-gfm-autolink-literal-2.0.0.tgz", + "integrity": "sha512-FyzMsduZZHSc3i0Px3PQcBT4WJY/X/RCtEJKuybiC6sjPqLv7h1yqAkmILZtuxMSsUyaLUWNp71+vQH2zqp5cg==", "dependencies": { - "p-locate": "^6.0.0" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + "@types/mdast": "^4.0.0", + "ccount": "^2.0.0", + "devlop": "^1.0.0", + "mdast-util-find-and-replace": "^3.0.0", + "micromark-util-character": "^2.0.0" }, "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" - }, - "node_modules/lodash.debounce": { - "version": "4.0.8", - "resolved": "https://registry.npmjs.org/lodash.debounce/-/lodash.debounce-4.0.8.tgz", - "integrity": "sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow==" - }, - "node_modules/lodash.isequal": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/lodash.isequal/-/lodash.isequal-4.5.0.tgz", - "integrity": "sha512-pDo3lu8Jhfjqls6GkMgpahsF9kCyayhgykjyLMNFTKWrpVdAQtYyB4muAMWozBB4ig/dtWAmsMxLEI8wuz+DYQ==" - }, - "node_modules/lodash.memoize": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", - "integrity": "sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==" - }, - "node_modules/lodash.uniq": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/lodash.uniq/-/lodash.uniq-4.5.0.tgz", - "integrity": "sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ==" - }, - "node_modules/longest-streak": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/longest-streak/-/longest-streak-3.1.0.tgz", - "integrity": "sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g==", - "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" + "node_modules/mdast-util-gfm-autolink-literal/node_modules/micromark-util-character": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.0.tgz", + "integrity": "sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-symbol": "^2.0.0", + "micromark-util-types": "^2.0.0" } }, - "node_modules/loose-envify": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", - "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", + "node_modules/mdast-util-gfm-autolink-literal/node_modules/micromark-util-symbol": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.0.tgz", + "integrity": "sha512-8JZt9ElZ5kyTnO94muPxIGS8oyElRJaiJO8EzV6ZSyGQ1Is8xwl4Q45qU5UOg+bGH4AikWziz0iN4sFLWs8PGw==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ] + }, + "node_modules/mdast-util-gfm-footnote": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-footnote/-/mdast-util-gfm-footnote-2.0.0.tgz", + "integrity": "sha512-5jOT2boTSVkMnQ7LTrd6n/18kqwjmuYqo7JUPe+tRCY6O7dAuTFMtTPauYYrMPpox9hlN0uOx/FL8XvEfG9/mQ==", "dependencies": { - "js-tokens": "^3.0.0 || ^4.0.0" + "@types/mdast": "^4.0.0", + "devlop": "^1.1.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0", + "micromark-util-normalize-identifier": "^2.0.0" }, - "bin": { - "loose-envify": "cli.js" + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/lower-case": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/lower-case/-/lower-case-2.0.2.tgz", - "integrity": "sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg==", + "node_modules/mdast-util-gfm-strikethrough": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-strikethrough/-/mdast-util-gfm-strikethrough-2.0.0.tgz", + "integrity": "sha512-mKKb915TF+OC5ptj5bJ7WFRPdYtuHv0yTRxK2tJvi+BDqbkiG7h7u/9SI89nRAYcmap2xHQL9D+QG/6wSrTtXg==", "dependencies": { - "tslib": "^2.0.3" - } - }, - "node_modules/lowercase-keys": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-3.0.0.tgz", - "integrity": "sha512-ozCC6gdQ+glXOQsveKD0YsDy8DSQFjDTz4zyzEHNV5+JP5D62LmfDZ6o1cycFx9ouG940M5dE8C8CTewdj2YWQ==", - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + "@types/mdast": "^4.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" }, "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/lru-cache": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", - "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "node_modules/mdast-util-gfm-table": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-gfm-table/-/mdast-util-gfm-table-2.0.0.tgz", + "integrity": "sha512-78UEvebzz/rJIxLvE7ZtDd/vIQ0RHv+3Mh5DR96p7cS7HsBhYIICDBCu8csTNWNO6tBWfqXPWekRuj2FNOGOZg==", "dependencies": { - "yallist": "^3.0.2" + "@types/mdast": "^4.0.0", + "devlop": "^1.0.0", + "markdown-table": "^3.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/lunr": { - "version": "2.3.9", - "resolved": "https://registry.npmjs.org/lunr/-/lunr-2.3.9.tgz", - "integrity": "sha512-zTU3DaZaF3Rt9rhN3uBMGQD3dD2/vFQqnvZCDv4dl5iOzq2IZQqTxu90r4E5J+nP70J3ilqVCrbho2eWaeW8Ow==" - }, - "node_modules/mark.js": { - "version": "8.11.1", - "resolved": "https://registry.npmjs.org/mark.js/-/mark.js-8.11.1.tgz", - "integrity": "sha512-1I+1qpDt4idfgLQG+BNWmrqku+7/2bi5nLf4YwF8y8zXvmfiTBY3PV3ZibfrjBueCByROpuBjLLFCajqkgYoLQ==" - }, - "node_modules/markdown-extensions": { + "node_modules/mdast-util-gfm-task-list-item": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/markdown-extensions/-/markdown-extensions-2.0.0.tgz", - "integrity": "sha512-o5vL7aDWatOTX8LzaS1WMoaoxIiLRQJuIKKe2wAw6IeULDHaqbiqiggmx+pKvZDb1Sj+pE46Sn1T7lCqfFtg1Q==", - "engines": { - "node": ">=16" + "resolved": "https://registry.npmjs.org/mdast-util-gfm-task-list-item/-/mdast-util-gfm-task-list-item-2.0.0.tgz", + "integrity": "sha512-IrtvNvjxC1o06taBAVJznEnkiHxLFTzgonUdy8hzFVeDun0uTjxxrRGVaNFqkU1wJR3RBPEfsxmU6jDWPofrTQ==", + "dependencies": { + "@types/mdast": "^4.0.0", + "devlop": "^1.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" }, "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/markdown-table": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/markdown-table/-/markdown-table-3.0.3.tgz", - "integrity": "sha512-Z1NL3Tb1M9wH4XESsCDEksWoKTdlUafKc4pt0GRwjUyXaCFZ+dc3g2erqB6zm3szA2IUSi7VnPI+o/9jnxh9hw==", + "node_modules/mdast-util-mdx": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-mdx/-/mdast-util-mdx-3.0.0.tgz", + "integrity": "sha512-JfbYLAW7XnYTTbUsmpu0kdBUVe+yKVJZBItEjwyYJiDJuZ9w4eeaqks4HQO+R7objWgS2ymV60GYpI14Ug554w==", + "dependencies": { + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-mdx-expression": "^2.0.0", + "mdast-util-mdx-jsx": "^3.0.0", + "mdast-util-mdxjs-esm": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" + }, "funding": { - "type": "github", - "url": "https://github.com/sponsors/wooorm" + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/marked": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/marked/-/marked-4.3.0.tgz", - "integrity": "sha512-PRsaiG84bK+AMvxziE/lCFss8juXjNaWzVbN5tXAm4XjeaS9NAHhop+PjQxz2A9h8Q4M/xGmzP8vqNwy6JeK0A==", - "bin": { - "marked": "bin/marked.js" + "node_modules/mdast-util-mdx-expression": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-mdx-expression/-/mdast-util-mdx-expression-2.0.0.tgz", + "integrity": "sha512-fGCu8eWdKUKNu5mohVGkhBXCXGnOTLuFqOvGMvdikr+J1w7lDJgxThOKpwRWzzbyXAU2hhSwsmssOY4yTokluw==", + "dependencies": { + "@types/estree-jsx": "^1.0.0", + "@types/hast": "^3.0.0", + "@types/mdast": "^4.0.0", + "devlop": "^1.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" }, - "engines": { - "node": ">= 12" + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/mdast-util-directive": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/mdast-util-directive/-/mdast-util-directive-3.0.0.tgz", - "integrity": "sha512-JUpYOqKI4mM3sZcNxmF/ox04XYFFkNwr0CFlrQIkCwbvH0xzMCqkMqAde9wRd80VAhaUrwFwKm2nxretdT1h7Q==", + "node_modules/mdast-util-mdx-jsx": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/mdast-util-mdx-jsx/-/mdast-util-mdx-jsx-3.1.2.tgz", + "integrity": "sha512-eKMQDeywY2wlHc97k5eD8VC+9ASMjN8ItEZQNGwJ6E0XWKiW/Z0V5/H8pvoXUf+y+Mj0VIgeRRbujBmFn4FTyA==", "dependencies": { + "@types/estree-jsx": "^1.0.0", + "@types/hast": "^3.0.0", "@types/mdast": "^4.0.0", "@types/unist": "^3.0.0", - "devlop": "^1.0.0", + "ccount": "^2.0.0", + "devlop": "^1.1.0", "mdast-util-from-markdown": "^2.0.0", "mdast-util-to-markdown": "^2.0.0", "parse-entities": "^4.0.0", "stringify-entities": "^4.0.0", - "unist-util-visit-parents": "^6.0.0" + "unist-util-remove-position": "^5.0.0", + "unist-util-stringify-position": "^4.0.0", + "vfile-message": "^4.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, - "node_modules/mdast-util-find-and-replace": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/mdast-util-find-and-replace/-/mdast-util-find-and-replace-3.0.1.tgz", - "integrity": "sha512-SG21kZHGC3XRTSUhtofZkBzZTJNM5ecCi0SK2IMKmSXR8vO3peL+kb1O0z7Zl83jKtutG4k5Wv/W7V3/YHvzPA==", + "node_modules/mdast-util-mdxjs-esm": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/mdast-util-mdxjs-esm/-/mdast-util-mdxjs-esm-2.0.1.tgz", + "integrity": "sha512-EcmOpxsZ96CvlP03NghtH1EsLtr0n9Tm4lPUJUBccV9RwUOneqSycg19n5HGzCf+10LozMRSObtVr3ee1WoHtg==", "dependencies": { + "@types/estree-jsx": "^1.0.0", + "@types/hast": "^3.0.0", "@types/mdast": "^4.0.0", - "escape-string-regexp": "^5.0.0", - "unist-util-is": "^6.0.0", - "unist-util-visit-parents": "^6.0.0" + "devlop": "^1.0.0", + "mdast-util-from-markdown": "^2.0.0", + "mdast-util-to-markdown": "^2.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, - "node_modules/mdast-util-find-and-replace/node_modules/escape-string-regexp": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz", - "integrity": "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==", - "engines": { - "node": ">=12" + "node_modules/mdast-util-phrasing": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/mdast-util-phrasing/-/mdast-util-phrasing-4.1.0.tgz", + "integrity": "sha512-TqICwyvJJpBwvGAMZjj4J2n0X8QWp21b9l0o7eXyVJ25YNWYbJDVIyD1bZXE6WtV6RmKJVYmQAKWa0zWOABz2w==", + "dependencies": { + "@types/mdast": "^4.0.0", + "unist-util-is": "^6.0.0" }, "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "type": "opencollective", + "url": "https://opencollective.com/unified" } }, - "node_modules/mdast-util-from-markdown": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/mdast-util-from-markdown/-/mdast-util-from-markdown-2.0.1.tgz", - "integrity": "sha512-aJEUyzZ6TzlsX2s5B4Of7lN7EQtAxvtradMMglCQDyaTFgse6CmtmdJ15ElnVRlCg1vpNyVtbem0PWzlNieZsA==", + "node_modules/mdast-util-to-hast": { + "version": "13.2.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-13.2.0.tgz", + "integrity": "sha512-QGYKEuUsYT9ykKBCMOEDLsU5JRObWQusAolFMeko/tYPufNkRffBAQjIE+99jbA87xv6FgmjLtwjh9wBWajwAA==", "dependencies": { + "@types/hast": "^3.0.0", "@types/mdast": "^4.0.0", - "@types/unist": "^3.0.0", - "decode-named-character-reference": "^1.0.0", + "@ungap/structured-clone": "^1.0.0", "devlop": "^1.0.0", + "micromark-util-sanitize-uri": "^2.0.0", + "trim-lines": "^3.0.0", + "unist-util-position": "^5.0.0", + "unist-util-visit": "^5.0.0", + "vfile": "^6.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-to-markdown": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-2.1.0.tgz", + "integrity": "sha512-SR2VnIEdVNCJbP6y7kVTJgPLifdr8WEU440fQec7qHoHOUz/oJ2jmNRqdDQ3rbiStOXb2mCDGTuwsK5OPUgYlQ==", + "dependencies": { + "@types/mdast": "^4.0.0", + "@types/unist": "^3.0.0", + "longest-streak": "^3.0.0", + "mdast-util-phrasing": "^4.0.0", "mdast-util-to-string": "^4.0.0", - "micromark": "^4.0.0", - "micromark-util-decode-numeric-character-reference": "^2.0.0", "micromark-util-decode-string": "^2.0.0", - "micromark-util-normalize-identifier": "^2.0.0", - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0", - "unist-util-stringify-position": "^4.0.0" + "unist-util-visit": "^5.0.0", + "zwitch": "^2.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, - "node_modules/mdast-util-from-markdown/node_modules/micromark-util-symbol": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.0.tgz", - "integrity": "sha512-8JZt9ElZ5kyTnO94muPxIGS8oyElRJaiJO8EzV6ZSyGQ1Is8xwl4Q45qU5UOg+bGH4AikWziz0iN4sFLWs8PGw==", - "funding": [ - { - "type": "GitHub Sponsors", - "url": "https://github.com/sponsors/unifiedjs" - }, - { - "type": "OpenCollective", - "url": "https://opencollective.com/unified" - } - ] - }, - "node_modules/mdast-util-frontmatter": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/mdast-util-frontmatter/-/mdast-util-frontmatter-2.0.1.tgz", - "integrity": "sha512-LRqI9+wdgC25P0URIJY9vwocIzCcksduHQ9OF2joxQoyTNVduwLAFUzjoopuRJbJAReaKrNQKAZKL3uCMugWJA==", + "node_modules/mdast-util-to-string": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-4.0.0.tgz", + "integrity": "sha512-0H44vDimn51F0YwvxSJSm0eCDOJTRlmN0R1yBh4HLj9wiV1Dn0QoXGbvFAWj2hSItVTlCmBF1hqKlIyUBVFLPg==", "dependencies": { - "@types/mdast": "^4.0.0", - "devlop": "^1.0.0", - "escape-string-regexp": "^5.0.0", - "mdast-util-from-markdown": "^2.0.0", - "mdast-util-to-markdown": "^2.0.0", - "micromark-extension-frontmatter": "^2.0.0" + "@types/mdast": "^4.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, - "node_modules/mdast-util-frontmatter/node_modules/escape-string-regexp": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz", - "integrity": "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==", + "node_modules/mdn-data": { + "version": "2.0.30", + "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.30.tgz", + "integrity": "sha512-GaqWWShW4kv/G9IEucWScBx9G1/vsFZZJUO+tD26M8J8z3Kw5RDQjaoZe03YAClgeS/SWPOcb4nkFBTEi5DUEA==" + }, + "node_modules/media-typer": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", + "integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/memfs": { + "version": "3.5.3", + "resolved": "https://registry.npmjs.org/memfs/-/memfs-3.5.3.tgz", + "integrity": "sha512-UERzLsxzllchadvbPs5aolHh65ISpKpM+ccLbOJ8/vvpBKmAWf+la7dXFy7Mr0ySHbdHrFv5kGFCUHHe6GFEmw==", + "dependencies": { + "fs-monkey": "^1.0.4" + }, + "engines": { + "node": ">= 4.0.0" + } + }, + "node_modules/memoize-one": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/memoize-one/-/memoize-one-5.2.1.tgz", + "integrity": "sha512-zYiwtZUcYyXKo/np96AGZAckk+FWWsUdJ3cHGGmld7+AhvcWmQyGCYUh1hc4Q/pkOhb65dQR/pqCyK0cOaHz4Q==" + }, + "node_modules/merge-descriptors": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", + "integrity": "sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w==" + }, + "node_modules/merge-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==" + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" + "node": ">= 8" } }, - "node_modules/mdast-util-gfm": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/mdast-util-gfm/-/mdast-util-gfm-3.0.0.tgz", - "integrity": "sha512-dgQEX5Amaq+DuUqf26jJqSK9qgixgd6rYDHAv4aTBuA92cTknZlKpPfa86Z/s8Dj8xsAQpFfBmPUHWJBWqS4Bw==", + "node_modules/mermaid": { + "version": "10.9.1", + "resolved": "https://registry.npmjs.org/mermaid/-/mermaid-10.9.1.tgz", + "integrity": "sha512-Mx45Obds5W1UkW1nv/7dHRsbfMM1aOKA2+Pxs/IGHNonygDHwmng8xTHyS9z4KWVi0rbko8gjiBmuwwXQ7tiNA==", + "license": "MIT", + "dependencies": { + "@braintree/sanitize-url": "^6.0.1", + "@types/d3-scale": "^4.0.3", + "@types/d3-scale-chromatic": "^3.0.0", + "cytoscape": "^3.28.1", + "cytoscape-cose-bilkent": "^4.1.0", + "d3": "^7.4.0", + "d3-sankey": "^0.12.3", + "dagre-d3-es": "7.0.10", + "dayjs": "^1.11.7", + "dompurify": "^3.0.5", + "elkjs": "^0.9.0", + "katex": "^0.16.9", + "khroma": "^2.0.0", + "lodash-es": "^4.17.21", + "mdast-util-from-markdown": "^1.3.0", + "non-layered-tidy-tree-layout": "^2.0.2", + "stylis": "^4.1.3", + "ts-dedent": "^2.2.0", + "uuid": "^9.0.0", + "web-worker": "^1.2.0" + } + }, + "node_modules/mermaid/node_modules/@types/mdast": { + "version": "3.0.15", + "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-3.0.15.tgz", + "integrity": "sha512-LnwD+mUEfxWMa1QpDraczIn6k0Ee3SMicuYSSzS6ZYl2gKS09EClnJYGd8Du6rfc5r/GZEk5o1mRb8TaTj03sQ==", + "license": "MIT", + "dependencies": { + "@types/unist": "^2" + } + }, + "node_modules/mermaid/node_modules/@types/unist": { + "version": "2.0.10", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.10.tgz", + "integrity": "sha512-IfYcSBWE3hLpBg8+X2SEa8LVkJdJEkT2Ese2aaLs3ptGdVtABxndrMaxuFlQ1qdFf9Q5rDvDpxI3WwgvKFAsQA==", + "license": "MIT" + }, + "node_modules/mermaid/node_modules/dompurify": { + "version": "3.1.6", + "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.1.6.tgz", + "integrity": "sha512-cTOAhc36AalkjtBpfG6O8JimdTMWNXjiePT2xQH/ppBGi/4uIpmj8eKyIkMJErXWARyINV/sB38yf8JCLF5pbQ==", + "license": "(MPL-2.0 OR Apache-2.0)" + }, + "node_modules/mermaid/node_modules/mdast-util-from-markdown": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/mdast-util-from-markdown/-/mdast-util-from-markdown-1.3.1.tgz", + "integrity": "sha512-4xTO/M8c82qBcnQc1tgpNtubGUW/Y1tBQ1B0i5CtSoelOLKFYlElIr3bvgREYYO5iRqbMY1YuqZng0GVOI8Qww==", + "license": "MIT", "dependencies": { - "mdast-util-from-markdown": "^2.0.0", - "mdast-util-gfm-autolink-literal": "^2.0.0", - "mdast-util-gfm-footnote": "^2.0.0", - "mdast-util-gfm-strikethrough": "^2.0.0", - "mdast-util-gfm-table": "^2.0.0", - "mdast-util-gfm-task-list-item": "^2.0.0", - "mdast-util-to-markdown": "^2.0.0" + "@types/mdast": "^3.0.0", + "@types/unist": "^2.0.0", + "decode-named-character-reference": "^1.0.0", + "mdast-util-to-string": "^3.1.0", + "micromark": "^3.0.0", + "micromark-util-decode-numeric-character-reference": "^1.0.0", + "micromark-util-decode-string": "^1.0.0", + "micromark-util-normalize-identifier": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0", + "unist-util-stringify-position": "^3.0.0", + "uvu": "^0.5.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, - "node_modules/mdast-util-gfm-autolink-literal": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/mdast-util-gfm-autolink-literal/-/mdast-util-gfm-autolink-literal-2.0.0.tgz", - "integrity": "sha512-FyzMsduZZHSc3i0Px3PQcBT4WJY/X/RCtEJKuybiC6sjPqLv7h1yqAkmILZtuxMSsUyaLUWNp71+vQH2zqp5cg==", + "node_modules/mermaid/node_modules/mdast-util-to-string": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-3.2.0.tgz", + "integrity": "sha512-V4Zn/ncyN1QNSqSBxTrMOLpjr+IKdHl2v3KVLoWmDPscP4r9GcCi71gjgvUV1SFSKh92AjAG4peFuBl2/YgCJg==", + "license": "MIT", "dependencies": { - "@types/mdast": "^4.0.0", - "ccount": "^2.0.0", - "devlop": "^1.0.0", - "mdast-util-find-and-replace": "^3.0.0", - "micromark-util-character": "^2.0.0" + "@types/mdast": "^3.0.0" }, "funding": { "type": "opencollective", "url": "https://opencollective.com/unified" } }, - "node_modules/mdast-util-gfm-autolink-literal/node_modules/micromark-util-character": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.0.tgz", - "integrity": "sha512-KvOVV+X1yLBfs9dCBSopq/+G1PcgT3lAK07mC4BzXi5E7ahzMAF8oIupDDJ6mievI6F+lAATkbQQlQixJfT3aQ==", + "node_modules/mermaid/node_modules/micromark": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/micromark/-/micromark-3.2.0.tgz", + "integrity": "sha512-uD66tJj54JLYq0De10AhWycZWGQNUvDI55xPgk2sQM5kn1JYlhbCMTtEeT27+vAhW2FBQxLlOmS3pmA7/2z4aA==", "funding": [ { "type": "GitHub Sponsors", @@ -8646,15 +9645,31 @@ "url": "https://opencollective.com/unified" } ], + "license": "MIT", "dependencies": { - "micromark-util-symbol": "^2.0.0", - "micromark-util-types": "^2.0.0" + "@types/debug": "^4.0.0", + "debug": "^4.0.0", + "decode-named-character-reference": "^1.0.0", + "micromark-core-commonmark": "^1.0.1", + "micromark-factory-space": "^1.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-chunked": "^1.0.0", + "micromark-util-combine-extensions": "^1.0.0", + "micromark-util-decode-numeric-character-reference": "^1.0.0", + "micromark-util-encode": "^1.0.0", + "micromark-util-normalize-identifier": "^1.0.0", + "micromark-util-resolve-all": "^1.0.0", + "micromark-util-sanitize-uri": "^1.0.0", + "micromark-util-subtokenize": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.1", + "uvu": "^0.5.0" } }, - "node_modules/mdast-util-gfm-autolink-literal/node_modules/micromark-util-symbol": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.0.tgz", - "integrity": "sha512-8JZt9ElZ5kyTnO94muPxIGS8oyElRJaiJO8EzV6ZSyGQ1Is8xwl4Q45qU5UOg+bGH4AikWziz0iN4sFLWs8PGw==", + "node_modules/mermaid/node_modules/micromark-core-commonmark": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-core-commonmark/-/micromark-core-commonmark-1.1.0.tgz", + "integrity": "sha512-BgHO1aRbolh2hcrzL2d1La37V0Aoz73ymF8rAcKnohLy93titmv62E0gP8Hrx9PKcKrqCZ1BbLGbP3bEhoXYlw==", "funding": [ { "type": "GitHub Sponsors", @@ -8664,252 +9679,368 @@ "type": "OpenCollective", "url": "https://opencollective.com/unified" } - ] - }, - "node_modules/mdast-util-gfm-footnote": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/mdast-util-gfm-footnote/-/mdast-util-gfm-footnote-2.0.0.tgz", - "integrity": "sha512-5jOT2boTSVkMnQ7LTrd6n/18kqwjmuYqo7JUPe+tRCY6O7dAuTFMtTPauYYrMPpox9hlN0uOx/FL8XvEfG9/mQ==", + ], + "license": "MIT", "dependencies": { - "@types/mdast": "^4.0.0", - "devlop": "^1.1.0", - "mdast-util-from-markdown": "^2.0.0", - "mdast-util-to-markdown": "^2.0.0", - "micromark-util-normalize-identifier": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" + "decode-named-character-reference": "^1.0.0", + "micromark-factory-destination": "^1.0.0", + "micromark-factory-label": "^1.0.0", + "micromark-factory-space": "^1.0.0", + "micromark-factory-title": "^1.0.0", + "micromark-factory-whitespace": "^1.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-chunked": "^1.0.0", + "micromark-util-classify-character": "^1.0.0", + "micromark-util-html-tag-name": "^1.0.0", + "micromark-util-normalize-identifier": "^1.0.0", + "micromark-util-resolve-all": "^1.0.0", + "micromark-util-subtokenize": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.1", + "uvu": "^0.5.0" } }, - "node_modules/mdast-util-gfm-strikethrough": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/mdast-util-gfm-strikethrough/-/mdast-util-gfm-strikethrough-2.0.0.tgz", - "integrity": "sha512-mKKb915TF+OC5ptj5bJ7WFRPdYtuHv0yTRxK2tJvi+BDqbkiG7h7u/9SI89nRAYcmap2xHQL9D+QG/6wSrTtXg==", + "node_modules/mermaid/node_modules/micromark-factory-destination": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-factory-destination/-/micromark-factory-destination-1.1.0.tgz", + "integrity": "sha512-XaNDROBgx9SgSChd69pjiGKbV+nfHGDPVYFs5dOoDd7ZnMAE+Cuu91BCpsY8RT2NP9vo/B8pds2VQNCLiu0zhg==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", "dependencies": { - "@types/mdast": "^4.0.0", - "mdast-util-from-markdown": "^2.0.0", - "mdast-util-to-markdown": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" + "micromark-util-character": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0" } }, - "node_modules/mdast-util-gfm-table": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/mdast-util-gfm-table/-/mdast-util-gfm-table-2.0.0.tgz", - "integrity": "sha512-78UEvebzz/rJIxLvE7ZtDd/vIQ0RHv+3Mh5DR96p7cS7HsBhYIICDBCu8csTNWNO6tBWfqXPWekRuj2FNOGOZg==", + "node_modules/mermaid/node_modules/micromark-factory-label": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-factory-label/-/micromark-factory-label-1.1.0.tgz", + "integrity": "sha512-OLtyez4vZo/1NjxGhcpDSbHQ+m0IIGnT8BoPamh+7jVlzLJBH98zzuCoUeMxvM6WsNeh8wx8cKvqLiPHEACn0w==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", "dependencies": { - "@types/mdast": "^4.0.0", - "devlop": "^1.0.0", - "markdown-table": "^3.0.0", - "mdast-util-from-markdown": "^2.0.0", - "mdast-util-to-markdown": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" + "micromark-util-character": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0", + "uvu": "^0.5.0" } }, - "node_modules/mdast-util-gfm-task-list-item": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/mdast-util-gfm-task-list-item/-/mdast-util-gfm-task-list-item-2.0.0.tgz", - "integrity": "sha512-IrtvNvjxC1o06taBAVJznEnkiHxLFTzgonUdy8hzFVeDun0uTjxxrRGVaNFqkU1wJR3RBPEfsxmU6jDWPofrTQ==", + "node_modules/mermaid/node_modules/micromark-factory-title": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-factory-title/-/micromark-factory-title-1.1.0.tgz", + "integrity": "sha512-J7n9R3vMmgjDOCY8NPw55jiyaQnH5kBdV2/UXCtZIpnHH3P6nHUKaH7XXEYuWwx/xUJcawa8plLBEjMPU24HzQ==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", "dependencies": { - "@types/mdast": "^4.0.0", - "devlop": "^1.0.0", - "mdast-util-from-markdown": "^2.0.0", - "mdast-util-to-markdown": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" + "micromark-factory-space": "^1.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0" } }, - "node_modules/mdast-util-mdx": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/mdast-util-mdx/-/mdast-util-mdx-3.0.0.tgz", - "integrity": "sha512-JfbYLAW7XnYTTbUsmpu0kdBUVe+yKVJZBItEjwyYJiDJuZ9w4eeaqks4HQO+R7objWgS2ymV60GYpI14Ug554w==", + "node_modules/mermaid/node_modules/micromark-factory-whitespace": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-factory-whitespace/-/micromark-factory-whitespace-1.1.0.tgz", + "integrity": "sha512-v2WlmiymVSp5oMg+1Q0N1Lxmt6pMhIHD457whWM7/GUlEks1hI9xj5w3zbc4uuMKXGisksZk8DzP2UyGbGqNsQ==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", "dependencies": { - "mdast-util-from-markdown": "^2.0.0", - "mdast-util-mdx-expression": "^2.0.0", - "mdast-util-mdx-jsx": "^3.0.0", - "mdast-util-mdxjs-esm": "^2.0.0", - "mdast-util-to-markdown": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" + "micromark-factory-space": "^1.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0" } }, - "node_modules/mdast-util-mdx-expression": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/mdast-util-mdx-expression/-/mdast-util-mdx-expression-2.0.0.tgz", - "integrity": "sha512-fGCu8eWdKUKNu5mohVGkhBXCXGnOTLuFqOvGMvdikr+J1w7lDJgxThOKpwRWzzbyXAU2hhSwsmssOY4yTokluw==", + "node_modules/mermaid/node_modules/micromark-util-chunked": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-chunked/-/micromark-util-chunked-1.1.0.tgz", + "integrity": "sha512-Ye01HXpkZPNcV6FiyoW2fGZDUw4Yc7vT0E9Sad83+bEDiCJ1uXu0S3mr8WLpsz3HaG3x2q0HM6CTuPdcZcluFQ==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", "dependencies": { - "@types/estree-jsx": "^1.0.0", - "@types/hast": "^3.0.0", - "@types/mdast": "^4.0.0", - "devlop": "^1.0.0", - "mdast-util-from-markdown": "^2.0.0", - "mdast-util-to-markdown": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" + "micromark-util-symbol": "^1.0.0" } }, - "node_modules/mdast-util-mdx-jsx": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/mdast-util-mdx-jsx/-/mdast-util-mdx-jsx-3.1.2.tgz", - "integrity": "sha512-eKMQDeywY2wlHc97k5eD8VC+9ASMjN8ItEZQNGwJ6E0XWKiW/Z0V5/H8pvoXUf+y+Mj0VIgeRRbujBmFn4FTyA==", - "dependencies": { - "@types/estree-jsx": "^1.0.0", - "@types/hast": "^3.0.0", - "@types/mdast": "^4.0.0", - "@types/unist": "^3.0.0", - "ccount": "^2.0.0", - "devlop": "^1.1.0", - "mdast-util-from-markdown": "^2.0.0", - "mdast-util-to-markdown": "^2.0.0", - "parse-entities": "^4.0.0", - "stringify-entities": "^4.0.0", - "unist-util-remove-position": "^5.0.0", - "unist-util-stringify-position": "^4.0.0", - "vfile-message": "^4.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" + "node_modules/mermaid/node_modules/micromark-util-classify-character": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-classify-character/-/micromark-util-classify-character-1.1.0.tgz", + "integrity": "sha512-SL0wLxtKSnklKSUplok1WQFoGhUdWYKggKUiqhX+Swala+BtptGCu5iPRc+xvzJ4PXE/hwM3FNXsfEVgoZsWbw==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-character": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0" } }, - "node_modules/mdast-util-mdxjs-esm": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/mdast-util-mdxjs-esm/-/mdast-util-mdxjs-esm-2.0.1.tgz", - "integrity": "sha512-EcmOpxsZ96CvlP03NghtH1EsLtr0n9Tm4lPUJUBccV9RwUOneqSycg19n5HGzCf+10LozMRSObtVr3ee1WoHtg==", + "node_modules/mermaid/node_modules/micromark-util-combine-extensions": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-combine-extensions/-/micromark-util-combine-extensions-1.1.0.tgz", + "integrity": "sha512-Q20sp4mfNf9yEqDL50WwuWZHUrCO4fEyeDCnMGmG5Pr0Cz15Uo7KBs6jq+dq0EgX4DPwwrh9m0X+zPV1ypFvUA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", "dependencies": { - "@types/estree-jsx": "^1.0.0", - "@types/hast": "^3.0.0", - "@types/mdast": "^4.0.0", - "devlop": "^1.0.0", - "mdast-util-from-markdown": "^2.0.0", - "mdast-util-to-markdown": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" + "micromark-util-chunked": "^1.0.0", + "micromark-util-types": "^1.0.0" } }, - "node_modules/mdast-util-phrasing": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/mdast-util-phrasing/-/mdast-util-phrasing-4.1.0.tgz", - "integrity": "sha512-TqICwyvJJpBwvGAMZjj4J2n0X8QWp21b9l0o7eXyVJ25YNWYbJDVIyD1bZXE6WtV6RmKJVYmQAKWa0zWOABz2w==", + "node_modules/mermaid/node_modules/micromark-util-decode-numeric-character-reference": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-decode-numeric-character-reference/-/micromark-util-decode-numeric-character-reference-1.1.0.tgz", + "integrity": "sha512-m9V0ExGv0jB1OT21mrWcuf4QhP46pH1KkfWy9ZEezqHKAxkj4mPCy3nIH1rkbdMlChLHX531eOrymlwyZIf2iw==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", "dependencies": { - "@types/mdast": "^4.0.0", - "unist-util-is": "^6.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" + "micromark-util-symbol": "^1.0.0" } }, - "node_modules/mdast-util-to-hast": { - "version": "13.2.0", - "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-13.2.0.tgz", - "integrity": "sha512-QGYKEuUsYT9ykKBCMOEDLsU5JRObWQusAolFMeko/tYPufNkRffBAQjIE+99jbA87xv6FgmjLtwjh9wBWajwAA==", + "node_modules/mermaid/node_modules/micromark-util-decode-string": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-decode-string/-/micromark-util-decode-string-1.1.0.tgz", + "integrity": "sha512-YphLGCK8gM1tG1bd54azwyrQRjCFcmgj2S2GoJDNnh4vYtnL38JS8M4gpxzOPNyHdNEpheyWXCTnnTDY3N+NVQ==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", "dependencies": { - "@types/hast": "^3.0.0", - "@types/mdast": "^4.0.0", - "@ungap/structured-clone": "^1.0.0", - "devlop": "^1.0.0", - "micromark-util-sanitize-uri": "^2.0.0", - "trim-lines": "^3.0.0", - "unist-util-position": "^5.0.0", - "unist-util-visit": "^5.0.0", - "vfile": "^6.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" + "decode-named-character-reference": "^1.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-decode-numeric-character-reference": "^1.0.0", + "micromark-util-symbol": "^1.0.0" } }, - "node_modules/mdast-util-to-markdown": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-2.1.0.tgz", - "integrity": "sha512-SR2VnIEdVNCJbP6y7kVTJgPLifdr8WEU440fQec7qHoHOUz/oJ2jmNRqdDQ3rbiStOXb2mCDGTuwsK5OPUgYlQ==", + "node_modules/mermaid/node_modules/micromark-util-encode": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-1.1.0.tgz", + "integrity": "sha512-EuEzTWSTAj9PA5GOAs992GzNh2dGQO52UvAbtSOMvXTxv3Criqb6IOzJUBCmEqrrXSblJIJBbFFv6zPxpreiJw==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT" + }, + "node_modules/mermaid/node_modules/micromark-util-html-tag-name": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/micromark-util-html-tag-name/-/micromark-util-html-tag-name-1.2.0.tgz", + "integrity": "sha512-VTQzcuQgFUD7yYztuQFKXT49KghjtETQ+Wv/zUjGSGBioZnkA4P1XXZPT1FHeJA6RwRXSF47yvJ1tsJdoxwO+Q==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT" + }, + "node_modules/mermaid/node_modules/micromark-util-normalize-identifier": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-normalize-identifier/-/micromark-util-normalize-identifier-1.1.0.tgz", + "integrity": "sha512-N+w5vhqrBihhjdpM8+5Xsxy71QWqGn7HYNUvch71iV2PM7+E3uWGox1Qp90loa1ephtCxG2ftRV/Conitc6P2Q==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", "dependencies": { - "@types/mdast": "^4.0.0", - "@types/unist": "^3.0.0", - "longest-streak": "^3.0.0", - "mdast-util-phrasing": "^4.0.0", - "mdast-util-to-string": "^4.0.0", - "micromark-util-decode-string": "^2.0.0", - "unist-util-visit": "^5.0.0", - "zwitch": "^2.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" + "micromark-util-symbol": "^1.0.0" } }, - "node_modules/mdast-util-to-string": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-4.0.0.tgz", - "integrity": "sha512-0H44vDimn51F0YwvxSJSm0eCDOJTRlmN0R1yBh4HLj9wiV1Dn0QoXGbvFAWj2hSItVTlCmBF1hqKlIyUBVFLPg==", + "node_modules/mermaid/node_modules/micromark-util-resolve-all": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-resolve-all/-/micromark-util-resolve-all-1.1.0.tgz", + "integrity": "sha512-b/G6BTMSg+bX+xVCshPTPyAu2tmA0E4X98NSR7eIbeC6ycCqCeE7wjfDIgzEbkzdEVJXRtOG4FbEm/uGbCRouA==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", "dependencies": { - "@types/mdast": "^4.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/unified" + "micromark-util-types": "^1.0.0" } }, - "node_modules/mdn-data": { - "version": "2.0.30", - "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.30.tgz", - "integrity": "sha512-GaqWWShW4kv/G9IEucWScBx9G1/vsFZZJUO+tD26M8J8z3Kw5RDQjaoZe03YAClgeS/SWPOcb4nkFBTEi5DUEA==" - }, - "node_modules/media-typer": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", - "integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==", - "engines": { - "node": ">= 0.6" + "node_modules/mermaid/node_modules/micromark-util-sanitize-uri": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-1.2.0.tgz", + "integrity": "sha512-QO4GXv0XZfWey4pYFndLUKEAktKkG5kZTdUNaTAkzbuJxn2tNBOr+QtxR2XpWaMhbImT2dPzyLrPXLlPhph34A==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", + "dependencies": { + "micromark-util-character": "^1.0.0", + "micromark-util-encode": "^1.0.0", + "micromark-util-symbol": "^1.0.0" } }, - "node_modules/memfs": { - "version": "3.5.3", - "resolved": "https://registry.npmjs.org/memfs/-/memfs-3.5.3.tgz", - "integrity": "sha512-UERzLsxzllchadvbPs5aolHh65ISpKpM+ccLbOJ8/vvpBKmAWf+la7dXFy7Mr0ySHbdHrFv5kGFCUHHe6GFEmw==", + "node_modules/mermaid/node_modules/micromark-util-subtokenize": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-subtokenize/-/micromark-util-subtokenize-1.1.0.tgz", + "integrity": "sha512-kUQHyzRoxvZO2PuLzMt2P/dwVsTiivCK8icYTeR+3WgbuPqfHgPPy7nFKbeqRivBvn/3N3GBiNC+JRTMSxEC7A==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT", "dependencies": { - "fs-monkey": "^1.0.4" - }, - "engines": { - "node": ">= 4.0.0" + "micromark-util-chunked": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0", + "uvu": "^0.5.0" } }, - "node_modules/memoize-one": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/memoize-one/-/memoize-one-5.2.1.tgz", - "integrity": "sha512-zYiwtZUcYyXKo/np96AGZAckk+FWWsUdJ3cHGGmld7+AhvcWmQyGCYUh1hc4Q/pkOhb65dQR/pqCyK0cOaHz4Q==" - }, - "node_modules/merge-descriptors": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", - "integrity": "sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w==" + "node_modules/mermaid/node_modules/micromark-util-types": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-1.1.0.tgz", + "integrity": "sha512-ukRBgie8TIAcacscVHSiddHjO4k/q3pnedmzMQ4iwDcK0FtFCohKOlFbaOL/mPgfnPsL3C1ZyxJa4sbWrBl3jg==", + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "license": "MIT" }, - "node_modules/merge-stream": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", - "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==" + "node_modules/mermaid/node_modules/unist-util-stringify-position": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-3.0.3.tgz", + "integrity": "sha512-k5GzIBZ/QatR8N5X2y+drfpWG8IDBzdnVj6OInRNWm1oXrzydiaAT2OQiA8DPRRZyAKb9b6I2a6PxYklZD0gKg==", + "license": "MIT", + "dependencies": { + "@types/unist": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } }, - "node_modules/merge2": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", - "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", - "engines": { - "node": ">= 8" + "node_modules/mermaid/node_modules/uuid": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", + "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==", + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "license": "MIT", + "bin": { + "uuid": "dist/bin/uuid" } }, "node_modules/methods": { @@ -10749,6 +11880,15 @@ } } }, + "node_modules/mri": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/mri/-/mri-1.2.0.tgz", + "integrity": "sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA==", + "license": "MIT", + "engines": { + "node": ">=4" + } + }, "node_modules/mrmime": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/mrmime/-/mrmime-1.0.1.tgz", @@ -10878,6 +12018,12 @@ "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.14.tgz", "integrity": "sha512-y10wOWt8yZpqXmOgRo77WaHEmhYQYGNA6y421PKsKYWEK8aW+cqAphborZDhqfyKrbZEN92CN1X2KbafY2s7Yw==" }, + "node_modules/non-layered-tidy-tree-layout": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/non-layered-tidy-tree-layout/-/non-layered-tidy-tree-layout-2.0.2.tgz", + "integrity": "sha512-gkXMxRzUH+PB0ax9dUN0yYF0S25BqeAYqhgMaLUFmpXLEk7Fcu8f4emJuOAY0V8kjDICxROIKsTAKsV/v355xw==", + "license": "MIT" + }, "node_modules/normalize-path": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", @@ -13105,6 +14251,12 @@ "url": "https://github.com/sponsors/isaacs" } }, + "node_modules/robust-predicates": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/robust-predicates/-/robust-predicates-3.0.2.tgz", + "integrity": "sha512-IXgzBWvWQwE6PrDI05OvmXUIruQTcoMDzRsOd5CDvHCVLcLHMTSYvOK5Cm46kWqlV3yAbuSpBZdJ5oP5OUoStg==", + "license": "Unlicense" + }, "node_modules/rtl-detect": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/rtl-detect/-/rtl-detect-1.1.2.tgz", @@ -13146,7 +14298,25 @@ } ], "dependencies": { - "queue-microtask": "^1.2.2" + "queue-microtask": "^1.2.2" + } + }, + "node_modules/rw": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/rw/-/rw-1.3.3.tgz", + "integrity": "sha512-PdhdWy89SiZogBLaw42zdeqtRJ//zFd2PgQavcICDUgJT5oW10QCRKbJ6bg4r0/UY2M6BWd5tkxuGFRvCkgfHQ==", + "license": "BSD-3-Clause" + }, + "node_modules/sade": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/sade/-/sade-1.8.1.tgz", + "integrity": "sha512-xal3CZX1Xlo/k4ApwCFrHVACi9fBqJ7V+mwhBsuf/1IOKbBy098Fex+Wa/5QMubw09pSZ/u8EY8PWgevJsXp1A==", + "license": "MIT", + "dependencies": { + "mri": "^1.1.0" + }, + "engines": { + "node": ">=6" } }, "node_modules/safe-buffer": { @@ -13960,8 +15130,7 @@ "node_modules/stylis": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/stylis/-/stylis-4.3.0.tgz", - "integrity": "sha512-E87pIogpwUsUwXw7dNyU4QDjdgVMy52m+XEOPEKUn161cCzWjjhPSQhByfd1CcNvrOLnXQ6OnnZDwnJrz/Z4YQ==", - "peer": true + "integrity": "sha512-E87pIogpwUsUwXw7dNyU4QDjdgVMy52m+XEOPEKUn161cCzWjjhPSQhByfd1CcNvrOLnXQ6OnnZDwnJrz/Z4YQ==" }, "node_modules/supports-color": { "version": "7.2.0", @@ -14261,6 +15430,15 @@ "url": "https://github.com/sponsors/wooorm" } }, + "node_modules/ts-dedent": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/ts-dedent/-/ts-dedent-2.2.0.tgz", + "integrity": "sha512-q5W7tVM71e2xjHZTlgfTDoPF/SmqKG5hddq9SzR49CH2hayqRKJtQ4mtRlSxKaJlR/+9rEM+mnBHf7I2/BQcpQ==", + "license": "MIT", + "engines": { + "node": ">=6.10" + } + }, "node_modules/tslib": { "version": "2.6.2", "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", @@ -14758,6 +15936,33 @@ "uuid": "dist/bin/uuid" } }, + "node_modules/uvu": { + "version": "0.5.6", + "resolved": "https://registry.npmjs.org/uvu/-/uvu-0.5.6.tgz", + "integrity": "sha512-+g8ENReyr8YsOc6fv/NVJs2vFdHBnBNdfE49rshrTzDWOlUx4Gq7KOS2GD8eqhy2j+Ejq29+SbKH8yjkAqXqoA==", + "license": "MIT", + "dependencies": { + "dequal": "^2.0.0", + "diff": "^5.0.0", + "kleur": "^4.0.3", + "sade": "^1.7.3" + }, + "bin": { + "uvu": "bin.js" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/uvu/node_modules/kleur": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/kleur/-/kleur-4.1.5.tgz", + "integrity": "sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "node_modules/value-equal": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/value-equal/-/value-equal-1.0.1.tgz", @@ -14840,6 +16045,12 @@ "url": "https://github.com/sponsors/wooorm" } }, + "node_modules/web-worker": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/web-worker/-/web-worker-1.3.0.tgz", + "integrity": "sha512-BSR9wyRsy/KOValMgd5kMyr3JzpdeoR9KVId8u5GVlTTAtNChlsE4yTxeY7zMdNSyOmoKBv8NH2qeRY9Tg+IaA==", + "license": "Apache-2.0" + }, "node_modules/webidl-conversions": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", @@ -16928,6 +18139,11 @@ "to-fast-properties": "^2.0.0" } }, + "@braintree/sanitize-url": { + "version": "6.0.4", + "resolved": "https://registry.npmjs.org/@braintree/sanitize-url/-/sanitize-url-6.0.4.tgz", + "integrity": "sha512-s3jaWicZd0pkP0jf5ysyHUI/RE7MHos6qlToFcGWXVp+ykHOy77OUMrfbgJ9it2C5bow7OIQwYYaHjk9XlBQ2A==" + }, "@colors/colors": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/@colors/colors/-/colors-1.5.0.tgz", @@ -17294,6 +18510,20 @@ "utility-types": "^3.10.0" } }, + "@docusaurus/theme-mermaid": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/@docusaurus/theme-mermaid/-/theme-mermaid-3.4.0.tgz", + "integrity": "sha512-3w5QW0HEZ2O6x2w6lU3ZvOe1gNXP2HIoKDMJBil1VmLBc9PmpAG17VmfhI/p3L2etNmOiVs5GgniUqvn8AFEGQ==", + "requires": { + "@docusaurus/core": "3.4.0", + "@docusaurus/module-type-aliases": "3.4.0", + "@docusaurus/theme-common": "3.4.0", + "@docusaurus/types": "3.4.0", + "@docusaurus/utils-validation": "3.4.0", + "mermaid": "^10.4.0", + "tslib": "^2.6.0" + } + }, "@docusaurus/theme-search-algolia": { "version": "3.4.0", "resolved": "https://registry.npmjs.org/@docusaurus/theme-search-algolia/-/theme-search-algolia-3.4.0.tgz", @@ -17865,6 +19095,24 @@ "@types/node": "*" } }, + "@types/d3-scale": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/@types/d3-scale/-/d3-scale-4.0.8.tgz", + "integrity": "sha512-gkK1VVTr5iNiYJ7vWDI+yUFFlszhNMtVeneJ6lUTKPjprsvLLI9/tgEGiXJOnlINJA8FyA88gfnQsHbybVZrYQ==", + "requires": { + "@types/d3-time": "*" + } + }, + "@types/d3-scale-chromatic": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/d3-scale-chromatic/-/d3-scale-chromatic-3.0.3.tgz", + "integrity": "sha512-laXM4+1o5ImZv3RpFAsTRn3TEkzqkytiOY0Dz0sq5cnd1dtNlk6sHLon4OvqaiJb28T0S/TdsBI3Sjsy+keJrw==" + }, + "@types/d3-time": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/d3-time/-/d3-time-3.0.3.tgz", + "integrity": "sha512-2p6olUZ4w3s+07q3Tm2dbiMZy5pCDfYwtLXXHUnVzXgQlZ/OyPtUz6OL382BkOuGlLXqfT+wqv8Fw2v8/0geBw==" + }, "@types/debug": { "version": "4.1.12", "resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.12.tgz", @@ -19254,6 +20502,14 @@ "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==" }, + "cose-base": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/cose-base/-/cose-base-1.0.3.tgz", + "integrity": "sha512-s9whTXInMSgAp/NVXVNuVxVKzGH2qck3aQlVHxDCdAEPgtMKwc4Wq6/QKhgdEdgbLSi9rBTAcPoRa6JpiG4ksg==", + "requires": { + "layout-base": "^1.0.0" + } + }, "cosmiconfig": { "version": "8.3.6", "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-8.3.6.tgz", @@ -19342,130 +20598,472 @@ "nth-check": "^2.0.1" } }, - "css-to-react-native": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/css-to-react-native/-/css-to-react-native-3.2.0.tgz", - "integrity": "sha512-e8RKaLXMOFii+02mOlqwjbD00KSEKqblnpO9e++1aXS1fPQOpS1YoqdVHBqPjHNoxeF2mimzVqawm2KCbEdtHQ==", - "peer": true, + "css-to-react-native": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/css-to-react-native/-/css-to-react-native-3.2.0.tgz", + "integrity": "sha512-e8RKaLXMOFii+02mOlqwjbD00KSEKqblnpO9e++1aXS1fPQOpS1YoqdVHBqPjHNoxeF2mimzVqawm2KCbEdtHQ==", + "peer": true, + "requires": { + "camelize": "^1.0.0", + "css-color-keywords": "^1.0.0", + "postcss-value-parser": "^4.0.2" + } + }, + "css-tree": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-2.3.1.tgz", + "integrity": "sha512-6Fv1DV/TYw//QF5IzQdqsNDjx/wc8TrMBZsqjL9eW01tWb7R7k/mq+/VXfJCl7SoD5emsJop9cOByJZfs8hYIw==", + "requires": { + "mdn-data": "2.0.30", + "source-map-js": "^1.0.1" + } + }, + "css-what": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/css-what/-/css-what-6.1.0.tgz", + "integrity": "sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw==" + }, + "cssesc": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz", + "integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==" + }, + "cssnano": { + "version": "6.1.2", + "resolved": "https://registry.npmjs.org/cssnano/-/cssnano-6.1.2.tgz", + "integrity": "sha512-rYk5UeX7VAM/u0lNqewCdasdtPK81CgX8wJFLEIXHbV2oldWRgJAsZrdhRXkV1NJzA2g850KiFm9mMU2HxNxMA==", + "requires": { + "cssnano-preset-default": "^6.1.2", + "lilconfig": "^3.1.1" + } + }, + "cssnano-preset-advanced": { + "version": "6.1.2", + "resolved": "https://registry.npmjs.org/cssnano-preset-advanced/-/cssnano-preset-advanced-6.1.2.tgz", + "integrity": "sha512-Nhao7eD8ph2DoHolEzQs5CfRpiEP0xa1HBdnFZ82kvqdmbwVBUr2r1QuQ4t1pi+D1ZpqpcO4T+wy/7RxzJ/WPQ==", + "requires": { + "autoprefixer": "^10.4.19", + "browserslist": "^4.23.0", + "cssnano-preset-default": "^6.1.2", + "postcss-discard-unused": "^6.0.5", + "postcss-merge-idents": "^6.0.3", + "postcss-reduce-idents": "^6.0.3", + "postcss-zindex": "^6.0.2" + } + }, + "cssnano-preset-default": { + "version": "6.1.2", + "resolved": "https://registry.npmjs.org/cssnano-preset-default/-/cssnano-preset-default-6.1.2.tgz", + "integrity": "sha512-1C0C+eNaeN8OcHQa193aRgYexyJtU8XwbdieEjClw+J9d94E41LwT6ivKH0WT+fYwYWB0Zp3I3IZ7tI/BbUbrg==", + "requires": { + "browserslist": "^4.23.0", + "css-declaration-sorter": "^7.2.0", + "cssnano-utils": "^4.0.2", + "postcss-calc": "^9.0.1", + "postcss-colormin": "^6.1.0", + "postcss-convert-values": "^6.1.0", + "postcss-discard-comments": "^6.0.2", + "postcss-discard-duplicates": "^6.0.3", + "postcss-discard-empty": "^6.0.3", + "postcss-discard-overridden": "^6.0.2", + "postcss-merge-longhand": "^6.0.5", + "postcss-merge-rules": "^6.1.1", + "postcss-minify-font-values": "^6.1.0", + "postcss-minify-gradients": "^6.0.3", + "postcss-minify-params": "^6.1.0", + "postcss-minify-selectors": "^6.0.4", + "postcss-normalize-charset": "^6.0.2", + "postcss-normalize-display-values": "^6.0.2", + "postcss-normalize-positions": "^6.0.2", + "postcss-normalize-repeat-style": "^6.0.2", + "postcss-normalize-string": "^6.0.2", + "postcss-normalize-timing-functions": "^6.0.2", + "postcss-normalize-unicode": "^6.1.0", + "postcss-normalize-url": "^6.0.2", + "postcss-normalize-whitespace": "^6.0.2", + "postcss-ordered-values": "^6.0.2", + "postcss-reduce-initial": "^6.1.0", + "postcss-reduce-transforms": "^6.0.2", + "postcss-svgo": "^6.0.3", + "postcss-unique-selectors": "^6.0.4" + } + }, + "cssnano-utils": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/cssnano-utils/-/cssnano-utils-4.0.2.tgz", + "integrity": "sha512-ZR1jHg+wZ8o4c3zqf1SIUSTIvm/9mU343FMR6Obe/unskbvpGhZOo1J6d/r8D1pzkRQYuwbcH3hToOuoA2G7oQ==", + "requires": {} + }, + "csso": { + "version": "5.0.5", + "resolved": "https://registry.npmjs.org/csso/-/csso-5.0.5.tgz", + "integrity": "sha512-0LrrStPOdJj+SPCCrGhzryycLjwcgUSHBtxNA8aIDxf0GLsRh1cKYhB00Gd1lDOS4yGH69+SNn13+TWbVHETFQ==", + "requires": { + "css-tree": "~2.2.0" + }, + "dependencies": { + "css-tree": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-2.2.1.tgz", + "integrity": "sha512-OA0mILzGc1kCOCSJerOeqDxDQ4HOh+G8NbOJFOTgOCzpw7fCBubk0fEyxp8AgOL/jvLgYA/uV0cMbe43ElF1JA==", + "requires": { + "mdn-data": "2.0.28", + "source-map-js": "^1.0.1" + } + }, + "mdn-data": { + "version": "2.0.28", + "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.28.tgz", + "integrity": "sha512-aylIc7Z9y4yzHYAJNuESG3hfhC+0Ibp/MAMiaOZgNv4pmEdFyfZhhhny4MNiAfWdBQ1RQ2mfDWmM1x8SvGyp8g==" + } + } + }, + "csstype": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.2.tgz", + "integrity": "sha512-I7K1Uu0MBPzaFKg4nI5Q7Vs2t+3gWWW648spaF+Rg7pI9ds18Ugn+lvg4SHczUdKlHI5LWBXyqfS8+DufyBsgQ==" + }, + "cytoscape": { + "version": "3.30.2", + "resolved": "https://registry.npmjs.org/cytoscape/-/cytoscape-3.30.2.tgz", + "integrity": "sha512-oICxQsjW8uSaRmn4UK/jkczKOqTrVqt5/1WL0POiJUT2EKNc9STM4hYFHv917yu55aTBMFNRzymlJhVAiWPCxw==" + }, + "cytoscape-cose-bilkent": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/cytoscape-cose-bilkent/-/cytoscape-cose-bilkent-4.1.0.tgz", + "integrity": "sha512-wgQlVIUJF13Quxiv5e1gstZ08rnZj2XaLHGoFMYXz7SkNfCDOOteKBE6SYRfA9WxxI/iBc3ajfDoc6hb/MRAHQ==", + "requires": { + "cose-base": "^1.0.0" + } + }, + "d3": { + "version": "7.9.0", + "resolved": "https://registry.npmjs.org/d3/-/d3-7.9.0.tgz", + "integrity": "sha512-e1U46jVP+w7Iut8Jt8ri1YsPOvFpg46k+K8TpCb0P+zjCkjkPnV7WzfDJzMHy1LnA+wj5pLT1wjO901gLXeEhA==", + "requires": { + "d3-array": "3", + "d3-axis": "3", + "d3-brush": "3", + "d3-chord": "3", + "d3-color": "3", + "d3-contour": "4", + "d3-delaunay": "6", + "d3-dispatch": "3", + "d3-drag": "3", + "d3-dsv": "3", + "d3-ease": "3", + "d3-fetch": "3", + "d3-force": "3", + "d3-format": "3", + "d3-geo": "3", + "d3-hierarchy": "3", + "d3-interpolate": "3", + "d3-path": "3", + "d3-polygon": "3", + "d3-quadtree": "3", + "d3-random": "3", + "d3-scale": "4", + "d3-scale-chromatic": "3", + "d3-selection": "3", + "d3-shape": "3", + "d3-time": "3", + "d3-time-format": "4", + "d3-timer": "3", + "d3-transition": "3", + "d3-zoom": "3" + } + }, + "d3-array": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/d3-array/-/d3-array-3.2.4.tgz", + "integrity": "sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg==", + "requires": { + "internmap": "1 - 2" + } + }, + "d3-axis": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-axis/-/d3-axis-3.0.0.tgz", + "integrity": "sha512-IH5tgjV4jE/GhHkRV0HiVYPDtvfjHQlQfJHs0usq7M30XcSBvOotpmH1IgkcXsO/5gEQZD43B//fc7SRT5S+xw==" + }, + "d3-brush": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-brush/-/d3-brush-3.0.0.tgz", + "integrity": "sha512-ALnjWlVYkXsVIGlOsuWH1+3udkYFI48Ljihfnh8FZPF2QS9o+PzGLBslO0PjzVoHLZ2KCVgAM8NVkXPJB2aNnQ==", + "requires": { + "d3-dispatch": "1 - 3", + "d3-drag": "2 - 3", + "d3-interpolate": "1 - 3", + "d3-selection": "3", + "d3-transition": "3" + } + }, + "d3-chord": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-chord/-/d3-chord-3.0.1.tgz", + "integrity": "sha512-VE5S6TNa+j8msksl7HwjxMHDM2yNK3XCkusIlpX5kwauBfXuyLAtNg9jCp/iHH61tgI4sb6R/EIMWCqEIdjT/g==", + "requires": { + "d3-path": "1 - 3" + } + }, + "d3-color": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-color/-/d3-color-3.1.0.tgz", + "integrity": "sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA==" + }, + "d3-contour": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/d3-contour/-/d3-contour-4.0.2.tgz", + "integrity": "sha512-4EzFTRIikzs47RGmdxbeUvLWtGedDUNkTcmzoeyg4sP/dvCexO47AaQL7VKy/gul85TOxw+IBgA8US2xwbToNA==", + "requires": { + "d3-array": "^3.2.0" + } + }, + "d3-delaunay": { + "version": "6.0.4", + "resolved": "https://registry.npmjs.org/d3-delaunay/-/d3-delaunay-6.0.4.tgz", + "integrity": "sha512-mdjtIZ1XLAM8bm/hx3WwjfHt6Sggek7qH043O8KEjDXN40xi3vx/6pYSVTwLjEgiXQTbvaouWKynLBiUZ6SK6A==", + "requires": { + "delaunator": "5" + } + }, + "d3-dispatch": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-dispatch/-/d3-dispatch-3.0.1.tgz", + "integrity": "sha512-rzUyPU/S7rwUflMyLc1ETDeBj0NRuHKKAcvukozwhshr6g6c5d8zh4c2gQjY2bZ0dXeGLWc1PF174P2tVvKhfg==" + }, + "d3-drag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-drag/-/d3-drag-3.0.0.tgz", + "integrity": "sha512-pWbUJLdETVA8lQNJecMxoXfH6x+mO2UQo8rSmZ+QqxcbyA3hfeprFgIT//HW2nlHChWeIIMwS2Fq+gEARkhTkg==", + "requires": { + "d3-dispatch": "1 - 3", + "d3-selection": "3" + } + }, + "d3-dsv": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-dsv/-/d3-dsv-3.0.1.tgz", + "integrity": "sha512-UG6OvdI5afDIFP9w4G0mNq50dSOsXHJaRE8arAS5o9ApWnIElp8GZw1Dun8vP8OyHOZ/QJUKUJwxiiCCnUwm+Q==", + "requires": { + "commander": "7", + "iconv-lite": "0.6", + "rw": "1" + }, + "dependencies": { + "commander": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz", + "integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==" + }, + "iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "requires": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + } + } + } + }, + "d3-ease": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-ease/-/d3-ease-3.0.1.tgz", + "integrity": "sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w==" + }, + "d3-fetch": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-fetch/-/d3-fetch-3.0.1.tgz", + "integrity": "sha512-kpkQIM20n3oLVBKGg6oHrUchHM3xODkTzjMoj7aWQFq5QEM+R6E4WkzT5+tojDY7yjez8KgCBRoj4aEr99Fdqw==", + "requires": { + "d3-dsv": "1 - 3" + } + }, + "d3-force": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-force/-/d3-force-3.0.0.tgz", + "integrity": "sha512-zxV/SsA+U4yte8051P4ECydjD/S+qeYtnaIyAs9tgHCqfguma/aAQDjo85A9Z6EKhBirHRJHXIgJUlffT4wdLg==", + "requires": { + "d3-dispatch": "1 - 3", + "d3-quadtree": "1 - 3", + "d3-timer": "1 - 3" + } + }, + "d3-format": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-format/-/d3-format-3.1.0.tgz", + "integrity": "sha512-YyUI6AEuY/Wpt8KWLgZHsIU86atmikuoOmCfommt0LYHiQSPjvX2AcFc38PX0CBpr2RCyZhjex+NS/LPOv6YqA==" + }, + "d3-geo": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/d3-geo/-/d3-geo-3.1.1.tgz", + "integrity": "sha512-637ln3gXKXOwhalDzinUgY83KzNWZRKbYubaG+fGVuc/dxO64RRljtCTnf5ecMyE1RIdtqpkVcq0IbtU2S8j2Q==", + "requires": { + "d3-array": "2.5.0 - 3" + } + }, + "d3-hierarchy": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/d3-hierarchy/-/d3-hierarchy-3.1.2.tgz", + "integrity": "sha512-FX/9frcub54beBdugHjDCdikxThEqjnR93Qt7PvQTOHxyiNCAlvMrHhclk3cD5VeAaq9fxmfRp+CnWw9rEMBuA==" + }, + "d3-interpolate": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-interpolate/-/d3-interpolate-3.0.1.tgz", + "integrity": "sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g==", + "requires": { + "d3-color": "1 - 3" + } + }, + "d3-path": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-path/-/d3-path-3.1.0.tgz", + "integrity": "sha512-p3KP5HCf/bvjBSSKuXid6Zqijx7wIfNW+J/maPs+iwR35at5JCbLUT0LzF1cnjbCHWhqzQTIN2Jpe8pRebIEFQ==" + }, + "d3-polygon": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-polygon/-/d3-polygon-3.0.1.tgz", + "integrity": "sha512-3vbA7vXYwfe1SYhED++fPUQlWSYTTGmFmQiany/gdbiWgU/iEyQzyymwL9SkJjFFuCS4902BSzewVGsHHmHtXg==" + }, + "d3-quadtree": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-quadtree/-/d3-quadtree-3.0.1.tgz", + "integrity": "sha512-04xDrxQTDTCFwP5H6hRhsRcb9xxv2RzkcsygFzmkSIOJy3PeRJP7sNk3VRIbKXcog561P9oU0/rVH6vDROAgUw==" + }, + "d3-random": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-random/-/d3-random-3.0.1.tgz", + "integrity": "sha512-FXMe9GfxTxqd5D6jFsQ+DJ8BJS4E/fT5mqqdjovykEB2oFbTMDVdg1MGFxfQW+FBOGoB++k8swBrgwSHT1cUXQ==" + }, + "d3-sankey": { + "version": "0.12.3", + "resolved": "https://registry.npmjs.org/d3-sankey/-/d3-sankey-0.12.3.tgz", + "integrity": "sha512-nQhsBRmM19Ax5xEIPLMY9ZmJ/cDvd1BG3UVvt5h3WRxKg5zGRbvnteTyWAbzeSvlh3tW7ZEmq4VwR5mB3tutmQ==", + "requires": { + "d3-array": "1 - 2", + "d3-shape": "^1.2.0" + }, + "dependencies": { + "d3-array": { + "version": "2.12.1", + "resolved": "https://registry.npmjs.org/d3-array/-/d3-array-2.12.1.tgz", + "integrity": "sha512-B0ErZK/66mHtEsR1TkPEEkwdy+WDesimkM5gpZr5Dsg54BiTA5RXtYW5qTLIAcekaS9xfZrzBLF/OAkB3Qn1YQ==", + "requires": { + "internmap": "^1.0.0" + } + }, + "d3-path": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/d3-path/-/d3-path-1.0.9.tgz", + "integrity": "sha512-VLaYcn81dtHVTjEHd8B+pbe9yHWpXKZUC87PzoFmsFrJqgFwDe/qxfp5MlfsfM1V5E/iVt0MmEbWQ7FVIXh/bg==" + }, + "d3-shape": { + "version": "1.3.7", + "resolved": "https://registry.npmjs.org/d3-shape/-/d3-shape-1.3.7.tgz", + "integrity": "sha512-EUkvKjqPFUAZyOlhY5gzCxCeI0Aep04LwIRpsZ/mLFelJiUfnK56jo5JMDSE7yyP2kLSb6LtF+S5chMk7uqPqw==", + "requires": { + "d3-path": "1" + } + }, + "internmap": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/internmap/-/internmap-1.0.1.tgz", + "integrity": "sha512-lDB5YccMydFBtasVtxnZ3MRBHuaoE8GKsppq+EchKL2U4nK/DmEpPHNH8MZe5HkMtpSiTSOZwfN0tzYjO/lJEw==" + } + } + }, + "d3-scale": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/d3-scale/-/d3-scale-4.0.2.tgz", + "integrity": "sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ==", "requires": { - "camelize": "^1.0.0", - "css-color-keywords": "^1.0.0", - "postcss-value-parser": "^4.0.2" + "d3-array": "2.10.0 - 3", + "d3-format": "1 - 3", + "d3-interpolate": "1.2.0 - 3", + "d3-time": "2.1.1 - 3", + "d3-time-format": "2 - 4" } }, - "css-tree": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-2.3.1.tgz", - "integrity": "sha512-6Fv1DV/TYw//QF5IzQdqsNDjx/wc8TrMBZsqjL9eW01tWb7R7k/mq+/VXfJCl7SoD5emsJop9cOByJZfs8hYIw==", + "d3-scale-chromatic": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-scale-chromatic/-/d3-scale-chromatic-3.1.0.tgz", + "integrity": "sha512-A3s5PWiZ9YCXFye1o246KoscMWqf8BsD9eRiJ3He7C9OBaxKhAd5TFCdEx/7VbKtxxTsu//1mMJFrEt572cEyQ==", "requires": { - "mdn-data": "2.0.30", - "source-map-js": "^1.0.1" + "d3-color": "1 - 3", + "d3-interpolate": "1 - 3" } }, - "css-what": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/css-what/-/css-what-6.1.0.tgz", - "integrity": "sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw==" - }, - "cssesc": { + "d3-selection": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz", - "integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==" + "resolved": "https://registry.npmjs.org/d3-selection/-/d3-selection-3.0.0.tgz", + "integrity": "sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ==" }, - "cssnano": { - "version": "6.1.2", - "resolved": "https://registry.npmjs.org/cssnano/-/cssnano-6.1.2.tgz", - "integrity": "sha512-rYk5UeX7VAM/u0lNqewCdasdtPK81CgX8wJFLEIXHbV2oldWRgJAsZrdhRXkV1NJzA2g850KiFm9mMU2HxNxMA==", + "d3-shape": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/d3-shape/-/d3-shape-3.2.0.tgz", + "integrity": "sha512-SaLBuwGm3MOViRq2ABk3eLoxwZELpH6zhl3FbAoJ7Vm1gofKx6El1Ib5z23NUEhF9AsGl7y+dzLe5Cw2AArGTA==", "requires": { - "cssnano-preset-default": "^6.1.2", - "lilconfig": "^3.1.1" + "d3-path": "^3.1.0" } }, - "cssnano-preset-advanced": { - "version": "6.1.2", - "resolved": "https://registry.npmjs.org/cssnano-preset-advanced/-/cssnano-preset-advanced-6.1.2.tgz", - "integrity": "sha512-Nhao7eD8ph2DoHolEzQs5CfRpiEP0xa1HBdnFZ82kvqdmbwVBUr2r1QuQ4t1pi+D1ZpqpcO4T+wy/7RxzJ/WPQ==", + "d3-time": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-time/-/d3-time-3.1.0.tgz", + "integrity": "sha512-VqKjzBLejbSMT4IgbmVgDjpkYrNWUYJnbCGo874u7MMKIWsILRX+OpX/gTk8MqjpT1A/c6HY2dCA77ZN0lkQ2Q==", "requires": { - "autoprefixer": "^10.4.19", - "browserslist": "^4.23.0", - "cssnano-preset-default": "^6.1.2", - "postcss-discard-unused": "^6.0.5", - "postcss-merge-idents": "^6.0.3", - "postcss-reduce-idents": "^6.0.3", - "postcss-zindex": "^6.0.2" + "d3-array": "2 - 3" } }, - "cssnano-preset-default": { - "version": "6.1.2", - "resolved": "https://registry.npmjs.org/cssnano-preset-default/-/cssnano-preset-default-6.1.2.tgz", - "integrity": "sha512-1C0C+eNaeN8OcHQa193aRgYexyJtU8XwbdieEjClw+J9d94E41LwT6ivKH0WT+fYwYWB0Zp3I3IZ7tI/BbUbrg==", + "d3-time-format": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/d3-time-format/-/d3-time-format-4.1.0.tgz", + "integrity": "sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg==", "requires": { - "browserslist": "^4.23.0", - "css-declaration-sorter": "^7.2.0", - "cssnano-utils": "^4.0.2", - "postcss-calc": "^9.0.1", - "postcss-colormin": "^6.1.0", - "postcss-convert-values": "^6.1.0", - "postcss-discard-comments": "^6.0.2", - "postcss-discard-duplicates": "^6.0.3", - "postcss-discard-empty": "^6.0.3", - "postcss-discard-overridden": "^6.0.2", - "postcss-merge-longhand": "^6.0.5", - "postcss-merge-rules": "^6.1.1", - "postcss-minify-font-values": "^6.1.0", - "postcss-minify-gradients": "^6.0.3", - "postcss-minify-params": "^6.1.0", - "postcss-minify-selectors": "^6.0.4", - "postcss-normalize-charset": "^6.0.2", - "postcss-normalize-display-values": "^6.0.2", - "postcss-normalize-positions": "^6.0.2", - "postcss-normalize-repeat-style": "^6.0.2", - "postcss-normalize-string": "^6.0.2", - "postcss-normalize-timing-functions": "^6.0.2", - "postcss-normalize-unicode": "^6.1.0", - "postcss-normalize-url": "^6.0.2", - "postcss-normalize-whitespace": "^6.0.2", - "postcss-ordered-values": "^6.0.2", - "postcss-reduce-initial": "^6.1.0", - "postcss-reduce-transforms": "^6.0.2", - "postcss-svgo": "^6.0.3", - "postcss-unique-selectors": "^6.0.4" + "d3-time": "1 - 3" } }, - "cssnano-utils": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/cssnano-utils/-/cssnano-utils-4.0.2.tgz", - "integrity": "sha512-ZR1jHg+wZ8o4c3zqf1SIUSTIvm/9mU343FMR6Obe/unskbvpGhZOo1J6d/r8D1pzkRQYuwbcH3hToOuoA2G7oQ==", - "requires": {} + "d3-timer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-timer/-/d3-timer-3.0.1.tgz", + "integrity": "sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA==" }, - "csso": { - "version": "5.0.5", - "resolved": "https://registry.npmjs.org/csso/-/csso-5.0.5.tgz", - "integrity": "sha512-0LrrStPOdJj+SPCCrGhzryycLjwcgUSHBtxNA8aIDxf0GLsRh1cKYhB00Gd1lDOS4yGH69+SNn13+TWbVHETFQ==", + "d3-transition": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-transition/-/d3-transition-3.0.1.tgz", + "integrity": "sha512-ApKvfjsSR6tg06xrL434C0WydLr7JewBB3V+/39RMHsaXTOG0zmt/OAXeng5M5LBm0ojmxJrpomQVZ1aPvBL4w==", "requires": { - "css-tree": "~2.2.0" - }, - "dependencies": { - "css-tree": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-2.2.1.tgz", - "integrity": "sha512-OA0mILzGc1kCOCSJerOeqDxDQ4HOh+G8NbOJFOTgOCzpw7fCBubk0fEyxp8AgOL/jvLgYA/uV0cMbe43ElF1JA==", - "requires": { - "mdn-data": "2.0.28", - "source-map-js": "^1.0.1" - } - }, - "mdn-data": { - "version": "2.0.28", - "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.0.28.tgz", - "integrity": "sha512-aylIc7Z9y4yzHYAJNuESG3hfhC+0Ibp/MAMiaOZgNv4pmEdFyfZhhhny4MNiAfWdBQ1RQ2mfDWmM1x8SvGyp8g==" - } + "d3-color": "1 - 3", + "d3-dispatch": "1 - 3", + "d3-ease": "1 - 3", + "d3-interpolate": "1 - 3", + "d3-timer": "1 - 3" } }, - "csstype": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.2.tgz", - "integrity": "sha512-I7K1Uu0MBPzaFKg4nI5Q7Vs2t+3gWWW648spaF+Rg7pI9ds18Ugn+lvg4SHczUdKlHI5LWBXyqfS8+DufyBsgQ==" + "d3-zoom": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-zoom/-/d3-zoom-3.0.0.tgz", + "integrity": "sha512-b8AmV3kfQaqWAuacbPuNbL6vahnOJflOhexLzMMNLga62+/nh0JzvJ0aO/5a5MVgUFGS7Hu1P9P03o3fJkDCyw==", + "requires": { + "d3-dispatch": "1 - 3", + "d3-drag": "2 - 3", + "d3-interpolate": "1 - 3", + "d3-selection": "2 - 3", + "d3-transition": "2 - 3" + } + }, + "dagre-d3-es": { + "version": "7.0.10", + "resolved": "https://registry.npmjs.org/dagre-d3-es/-/dagre-d3-es-7.0.10.tgz", + "integrity": "sha512-qTCQmEhcynucuaZgY5/+ti3X/rnszKZhEQH/ZdWdtP1tA/y3VoHJzcVrO9pjjJCNpigfscAtoUB5ONcd2wNn0A==", + "requires": { + "d3": "^7.8.2", + "lodash-es": "^4.17.21" + } + }, + "dayjs": { + "version": "1.11.12", + "resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.11.12.tgz", + "integrity": "sha512-Rt2g+nTbLlDWZTwwrIXjy9MeiZmSDI375FvZs72ngxx8PDC6YXOeR3q5LAuPzjZQxhiWdRKac7RKV+YyQYfYIg==" }, "debounce": { "version": "1.2.1", @@ -19571,6 +21169,14 @@ "slash": "^3.0.0" } }, + "delaunator": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/delaunator/-/delaunator-5.0.1.tgz", + "integrity": "sha512-8nvh+XBe96aCESrGOqMp/84b13H9cdKbG5P2ejQCh4d4sK9RL4371qou9drQjMhvnPmhWl5hnmqbEE0fXr9Xnw==", + "requires": { + "robust-predicates": "^3.0.2" + } + }, "depd": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", @@ -19632,6 +21238,11 @@ "dequal": "^2.0.0" } }, + "diff": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-5.2.0.tgz", + "integrity": "sha512-uIFDxqpRZGZ6ThOk84hEfqWoHx2devRFvpTZcTHur85vImfaxUbTW9Ryh4CpCuDnToOP1CEtXKIgytHBPVff5A==" + }, "dir-glob": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", @@ -19743,6 +21354,11 @@ "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.722.tgz", "integrity": "sha512-5nLE0TWFFpZ80Crhtp4pIp8LXCztjYX41yUcV6b+bKR2PqzjskTMOOlBi1VjBHlvHwS+4gar7kNKOrsbsewEZQ==" }, + "elkjs": { + "version": "0.9.3", + "resolved": "https://registry.npmjs.org/elkjs/-/elkjs-0.9.3.tgz", + "integrity": "sha512-f/ZeWvW/BCXbhGEf1Ujp29EASo/lk1FDnETgNKwJrsVvGZhUWCZyg3xLJjAsxfOmt8KjswHmI5EwCQcPMpOYhQ==" + }, "emoji-regex": { "version": "9.2.2", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", @@ -21083,6 +22699,11 @@ "resolved": "https://registry.npmjs.org/inline-style-parser/-/inline-style-parser-0.1.1.tgz", "integrity": "sha512-7NXolsK4CAS5+xvdj5OMMbI962hU/wvwoxk+LWR9Ek9bVtyuuYScDN6eS0rUm6TxApFpw7CX1o4uJzcd4AyD3Q==" }, + "internmap": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/internmap/-/internmap-2.0.3.tgz", + "integrity": "sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg==" + }, "interpret": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/interpret/-/interpret-1.4.0.tgz", @@ -21393,6 +23014,21 @@ "universalify": "^2.0.0" } }, + "katex": { + "version": "0.16.11", + "resolved": "https://registry.npmjs.org/katex/-/katex-0.16.11.tgz", + "integrity": "sha512-RQrI8rlHY92OLf3rho/Ts8i/XvjgguEjOkO1BEXcU3N8BqPpSzBNwV/G0Ukr+P/l3ivvJUE/Fa/CwbS6HesGNQ==", + "requires": { + "commander": "^8.3.0" + }, + "dependencies": { + "commander": { + "version": "8.3.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz", + "integrity": "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==" + } + } + }, "keyv": { "version": "4.5.4", "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", @@ -21401,6 +23037,11 @@ "json-buffer": "3.0.1" } }, + "khroma": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/khroma/-/khroma-2.1.0.tgz", + "integrity": "sha512-Ls993zuzfayK269Svk9hzpeGUKob/sIgZzyHYdjQoAdQetRKpOLj+k/QQQ/6Qi0Yz65mlROrfd+Ev+1+7dz9Kw==" + }, "kind-of": { "version": "6.0.3", "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", @@ -21428,6 +23069,11 @@ "shell-quote": "^1.8.1" } }, + "layout-base": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/layout-base/-/layout-base-1.0.2.tgz", + "integrity": "sha512-8h2oVEZNktL4BH2JCOI90iD1yXwL6iNW7KcCKT2QZgQJR2vbqDsldCTPRU9NifTCqHZci57XvQQ15YTu+sTYPg==" + }, "leven": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz", @@ -21476,6 +23122,11 @@ "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" }, + "lodash-es": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.21.tgz", + "integrity": "sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==" + }, "lodash.debounce": { "version": "4.0.8", "resolved": "https://registry.npmjs.org/lodash.debounce/-/lodash.debounce-4.0.8.tgz", @@ -21865,6 +23516,281 @@ "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==" }, + "mermaid": { + "version": "10.9.1", + "resolved": "https://registry.npmjs.org/mermaid/-/mermaid-10.9.1.tgz", + "integrity": "sha512-Mx45Obds5W1UkW1nv/7dHRsbfMM1aOKA2+Pxs/IGHNonygDHwmng8xTHyS9z4KWVi0rbko8gjiBmuwwXQ7tiNA==", + "requires": { + "@braintree/sanitize-url": "^6.0.1", + "@types/d3-scale": "^4.0.3", + "@types/d3-scale-chromatic": "^3.0.0", + "cytoscape": "^3.28.1", + "cytoscape-cose-bilkent": "^4.1.0", + "d3": "^7.4.0", + "d3-sankey": "^0.12.3", + "dagre-d3-es": "7.0.10", + "dayjs": "^1.11.7", + "dompurify": "^3.0.5", + "elkjs": "^0.9.0", + "katex": "^0.16.9", + "khroma": "^2.0.0", + "lodash-es": "^4.17.21", + "mdast-util-from-markdown": "^1.3.0", + "non-layered-tidy-tree-layout": "^2.0.2", + "stylis": "^4.1.3", + "ts-dedent": "^2.2.0", + "uuid": "^9.0.0", + "web-worker": "^1.2.0" + }, + "dependencies": { + "@types/mdast": { + "version": "3.0.15", + "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-3.0.15.tgz", + "integrity": "sha512-LnwD+mUEfxWMa1QpDraczIn6k0Ee3SMicuYSSzS6ZYl2gKS09EClnJYGd8Du6rfc5r/GZEk5o1mRb8TaTj03sQ==", + "requires": { + "@types/unist": "^2" + } + }, + "@types/unist": { + "version": "2.0.10", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.10.tgz", + "integrity": "sha512-IfYcSBWE3hLpBg8+X2SEa8LVkJdJEkT2Ese2aaLs3ptGdVtABxndrMaxuFlQ1qdFf9Q5rDvDpxI3WwgvKFAsQA==" + }, + "dompurify": { + "version": "3.1.6", + "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.1.6.tgz", + "integrity": "sha512-cTOAhc36AalkjtBpfG6O8JimdTMWNXjiePT2xQH/ppBGi/4uIpmj8eKyIkMJErXWARyINV/sB38yf8JCLF5pbQ==" + }, + "mdast-util-from-markdown": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/mdast-util-from-markdown/-/mdast-util-from-markdown-1.3.1.tgz", + "integrity": "sha512-4xTO/M8c82qBcnQc1tgpNtubGUW/Y1tBQ1B0i5CtSoelOLKFYlElIr3bvgREYYO5iRqbMY1YuqZng0GVOI8Qww==", + "requires": { + "@types/mdast": "^3.0.0", + "@types/unist": "^2.0.0", + "decode-named-character-reference": "^1.0.0", + "mdast-util-to-string": "^3.1.0", + "micromark": "^3.0.0", + "micromark-util-decode-numeric-character-reference": "^1.0.0", + "micromark-util-decode-string": "^1.0.0", + "micromark-util-normalize-identifier": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0", + "unist-util-stringify-position": "^3.0.0", + "uvu": "^0.5.0" + } + }, + "mdast-util-to-string": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-3.2.0.tgz", + "integrity": "sha512-V4Zn/ncyN1QNSqSBxTrMOLpjr+IKdHl2v3KVLoWmDPscP4r9GcCi71gjgvUV1SFSKh92AjAG4peFuBl2/YgCJg==", + "requires": { + "@types/mdast": "^3.0.0" + } + }, + "micromark": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/micromark/-/micromark-3.2.0.tgz", + "integrity": "sha512-uD66tJj54JLYq0De10AhWycZWGQNUvDI55xPgk2sQM5kn1JYlhbCMTtEeT27+vAhW2FBQxLlOmS3pmA7/2z4aA==", + "requires": { + "@types/debug": "^4.0.0", + "debug": "^4.0.0", + "decode-named-character-reference": "^1.0.0", + "micromark-core-commonmark": "^1.0.1", + "micromark-factory-space": "^1.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-chunked": "^1.0.0", + "micromark-util-combine-extensions": "^1.0.0", + "micromark-util-decode-numeric-character-reference": "^1.0.0", + "micromark-util-encode": "^1.0.0", + "micromark-util-normalize-identifier": "^1.0.0", + "micromark-util-resolve-all": "^1.0.0", + "micromark-util-sanitize-uri": "^1.0.0", + "micromark-util-subtokenize": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.1", + "uvu": "^0.5.0" + } + }, + "micromark-core-commonmark": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-core-commonmark/-/micromark-core-commonmark-1.1.0.tgz", + "integrity": "sha512-BgHO1aRbolh2hcrzL2d1La37V0Aoz73ymF8rAcKnohLy93titmv62E0gP8Hrx9PKcKrqCZ1BbLGbP3bEhoXYlw==", + "requires": { + "decode-named-character-reference": "^1.0.0", + "micromark-factory-destination": "^1.0.0", + "micromark-factory-label": "^1.0.0", + "micromark-factory-space": "^1.0.0", + "micromark-factory-title": "^1.0.0", + "micromark-factory-whitespace": "^1.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-chunked": "^1.0.0", + "micromark-util-classify-character": "^1.0.0", + "micromark-util-html-tag-name": "^1.0.0", + "micromark-util-normalize-identifier": "^1.0.0", + "micromark-util-resolve-all": "^1.0.0", + "micromark-util-subtokenize": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.1", + "uvu": "^0.5.0" + } + }, + "micromark-factory-destination": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-factory-destination/-/micromark-factory-destination-1.1.0.tgz", + "integrity": "sha512-XaNDROBgx9SgSChd69pjiGKbV+nfHGDPVYFs5dOoDd7ZnMAE+Cuu91BCpsY8RT2NP9vo/B8pds2VQNCLiu0zhg==", + "requires": { + "micromark-util-character": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0" + } + }, + "micromark-factory-label": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-factory-label/-/micromark-factory-label-1.1.0.tgz", + "integrity": "sha512-OLtyez4vZo/1NjxGhcpDSbHQ+m0IIGnT8BoPamh+7jVlzLJBH98zzuCoUeMxvM6WsNeh8wx8cKvqLiPHEACn0w==", + "requires": { + "micromark-util-character": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0", + "uvu": "^0.5.0" + } + }, + "micromark-factory-title": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-factory-title/-/micromark-factory-title-1.1.0.tgz", + "integrity": "sha512-J7n9R3vMmgjDOCY8NPw55jiyaQnH5kBdV2/UXCtZIpnHH3P6nHUKaH7XXEYuWwx/xUJcawa8plLBEjMPU24HzQ==", + "requires": { + "micromark-factory-space": "^1.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0" + } + }, + "micromark-factory-whitespace": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-factory-whitespace/-/micromark-factory-whitespace-1.1.0.tgz", + "integrity": "sha512-v2WlmiymVSp5oMg+1Q0N1Lxmt6pMhIHD457whWM7/GUlEks1hI9xj5w3zbc4uuMKXGisksZk8DzP2UyGbGqNsQ==", + "requires": { + "micromark-factory-space": "^1.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0" + } + }, + "micromark-util-chunked": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-chunked/-/micromark-util-chunked-1.1.0.tgz", + "integrity": "sha512-Ye01HXpkZPNcV6FiyoW2fGZDUw4Yc7vT0E9Sad83+bEDiCJ1uXu0S3mr8WLpsz3HaG3x2q0HM6CTuPdcZcluFQ==", + "requires": { + "micromark-util-symbol": "^1.0.0" + } + }, + "micromark-util-classify-character": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-classify-character/-/micromark-util-classify-character-1.1.0.tgz", + "integrity": "sha512-SL0wLxtKSnklKSUplok1WQFoGhUdWYKggKUiqhX+Swala+BtptGCu5iPRc+xvzJ4PXE/hwM3FNXsfEVgoZsWbw==", + "requires": { + "micromark-util-character": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0" + } + }, + "micromark-util-combine-extensions": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-combine-extensions/-/micromark-util-combine-extensions-1.1.0.tgz", + "integrity": "sha512-Q20sp4mfNf9yEqDL50WwuWZHUrCO4fEyeDCnMGmG5Pr0Cz15Uo7KBs6jq+dq0EgX4DPwwrh9m0X+zPV1ypFvUA==", + "requires": { + "micromark-util-chunked": "^1.0.0", + "micromark-util-types": "^1.0.0" + } + }, + "micromark-util-decode-numeric-character-reference": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-decode-numeric-character-reference/-/micromark-util-decode-numeric-character-reference-1.1.0.tgz", + "integrity": "sha512-m9V0ExGv0jB1OT21mrWcuf4QhP46pH1KkfWy9ZEezqHKAxkj4mPCy3nIH1rkbdMlChLHX531eOrymlwyZIf2iw==", + "requires": { + "micromark-util-symbol": "^1.0.0" + } + }, + "micromark-util-decode-string": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-decode-string/-/micromark-util-decode-string-1.1.0.tgz", + "integrity": "sha512-YphLGCK8gM1tG1bd54azwyrQRjCFcmgj2S2GoJDNnh4vYtnL38JS8M4gpxzOPNyHdNEpheyWXCTnnTDY3N+NVQ==", + "requires": { + "decode-named-character-reference": "^1.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-decode-numeric-character-reference": "^1.0.0", + "micromark-util-symbol": "^1.0.0" + } + }, + "micromark-util-encode": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-1.1.0.tgz", + "integrity": "sha512-EuEzTWSTAj9PA5GOAs992GzNh2dGQO52UvAbtSOMvXTxv3Criqb6IOzJUBCmEqrrXSblJIJBbFFv6zPxpreiJw==" + }, + "micromark-util-html-tag-name": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/micromark-util-html-tag-name/-/micromark-util-html-tag-name-1.2.0.tgz", + "integrity": "sha512-VTQzcuQgFUD7yYztuQFKXT49KghjtETQ+Wv/zUjGSGBioZnkA4P1XXZPT1FHeJA6RwRXSF47yvJ1tsJdoxwO+Q==" + }, + "micromark-util-normalize-identifier": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-normalize-identifier/-/micromark-util-normalize-identifier-1.1.0.tgz", + "integrity": "sha512-N+w5vhqrBihhjdpM8+5Xsxy71QWqGn7HYNUvch71iV2PM7+E3uWGox1Qp90loa1ephtCxG2ftRV/Conitc6P2Q==", + "requires": { + "micromark-util-symbol": "^1.0.0" + } + }, + "micromark-util-resolve-all": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-resolve-all/-/micromark-util-resolve-all-1.1.0.tgz", + "integrity": "sha512-b/G6BTMSg+bX+xVCshPTPyAu2tmA0E4X98NSR7eIbeC6ycCqCeE7wjfDIgzEbkzdEVJXRtOG4FbEm/uGbCRouA==", + "requires": { + "micromark-util-types": "^1.0.0" + } + }, + "micromark-util-sanitize-uri": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-1.2.0.tgz", + "integrity": "sha512-QO4GXv0XZfWey4pYFndLUKEAktKkG5kZTdUNaTAkzbuJxn2tNBOr+QtxR2XpWaMhbImT2dPzyLrPXLlPhph34A==", + "requires": { + "micromark-util-character": "^1.0.0", + "micromark-util-encode": "^1.0.0", + "micromark-util-symbol": "^1.0.0" + } + }, + "micromark-util-subtokenize": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-subtokenize/-/micromark-util-subtokenize-1.1.0.tgz", + "integrity": "sha512-kUQHyzRoxvZO2PuLzMt2P/dwVsTiivCK8icYTeR+3WgbuPqfHgPPy7nFKbeqRivBvn/3N3GBiNC+JRTMSxEC7A==", + "requires": { + "micromark-util-chunked": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0", + "uvu": "^0.5.0" + } + }, + "micromark-util-types": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-1.1.0.tgz", + "integrity": "sha512-ukRBgie8TIAcacscVHSiddHjO4k/q3pnedmzMQ4iwDcK0FtFCohKOlFbaOL/mPgfnPsL3C1ZyxJa4sbWrBl3jg==" + }, + "unist-util-stringify-position": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-3.0.3.tgz", + "integrity": "sha512-k5GzIBZ/QatR8N5X2y+drfpWG8IDBzdnVj6OInRNWm1oXrzydiaAT2OQiA8DPRRZyAKb9b6I2a6PxYklZD0gKg==", + "requires": { + "@types/unist": "^2.0.0" + } + }, + "uuid": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", + "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==" + } + } + }, "methods": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", @@ -22826,6 +24752,11 @@ "integrity": "sha512-NkJREyFTSUXR772Qaai51BnE1voWx56LOL80xG7qkZr6vo8vEaLF3sz1JNUVh+rxmUzxYaqOhfuxTfqUh0FXUg==", "requires": {} }, + "mri": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/mri/-/mri-1.2.0.tgz", + "integrity": "sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA==" + }, "mrmime": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/mrmime/-/mrmime-1.0.1.tgz", @@ -22914,6 +24845,11 @@ "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.14.tgz", "integrity": "sha512-y10wOWt8yZpqXmOgRo77WaHEmhYQYGNA6y421PKsKYWEK8aW+cqAphborZDhqfyKrbZEN92CN1X2KbafY2s7Yw==" }, + "non-layered-tidy-tree-layout": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/non-layered-tidy-tree-layout/-/non-layered-tidy-tree-layout-2.0.2.tgz", + "integrity": "sha512-gkXMxRzUH+PB0ax9dUN0yYF0S25BqeAYqhgMaLUFmpXLEk7Fcu8f4emJuOAY0V8kjDICxROIKsTAKsV/v355xw==" + }, "normalize-path": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", @@ -24450,6 +26386,11 @@ "glob": "^7.1.3" } }, + "robust-predicates": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/robust-predicates/-/robust-predicates-3.0.2.tgz", + "integrity": "sha512-IXgzBWvWQwE6PrDI05OvmXUIruQTcoMDzRsOd5CDvHCVLcLHMTSYvOK5Cm46kWqlV3yAbuSpBZdJ5oP5OUoStg==" + }, "rtl-detect": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/rtl-detect/-/rtl-detect-1.1.2.tgz", @@ -24474,6 +26415,19 @@ "queue-microtask": "^1.2.2" } }, + "rw": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/rw/-/rw-1.3.3.tgz", + "integrity": "sha512-PdhdWy89SiZogBLaw42zdeqtRJ//zFd2PgQavcICDUgJT5oW10QCRKbJ6bg4r0/UY2M6BWd5tkxuGFRvCkgfHQ==" + }, + "sade": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/sade/-/sade-1.8.1.tgz", + "integrity": "sha512-xal3CZX1Xlo/k4ApwCFrHVACi9fBqJ7V+mwhBsuf/1IOKbBy098Fex+Wa/5QMubw09pSZ/u8EY8PWgevJsXp1A==", + "requires": { + "mri": "^1.1.0" + } + }, "safe-buffer": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", @@ -25107,8 +27061,7 @@ "stylis": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/stylis/-/stylis-4.3.0.tgz", - "integrity": "sha512-E87pIogpwUsUwXw7dNyU4QDjdgVMy52m+XEOPEKUn161cCzWjjhPSQhByfd1CcNvrOLnXQ6OnnZDwnJrz/Z4YQ==", - "peer": true + "integrity": "sha512-E87pIogpwUsUwXw7dNyU4QDjdgVMy52m+XEOPEKUn161cCzWjjhPSQhByfd1CcNvrOLnXQ6OnnZDwnJrz/Z4YQ==" }, "supports-color": { "version": "7.2.0", @@ -25312,6 +27265,11 @@ "resolved": "https://registry.npmjs.org/trough/-/trough-2.2.0.tgz", "integrity": "sha512-tmMpK00BjZiUyVyvrBK7knerNgmgvcV/KLVyuma/SC+TQN167GrMRciANTz09+k3zW8L8t60jWO1GpfkZdjTaw==" }, + "ts-dedent": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/ts-dedent/-/ts-dedent-2.2.0.tgz", + "integrity": "sha512-q5W7tVM71e2xjHZTlgfTDoPF/SmqKG5hddq9SzR49CH2hayqRKJtQ4mtRlSxKaJlR/+9rEM+mnBHf7I2/BQcpQ==" + }, "tslib": { "version": "2.6.2", "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", @@ -25645,6 +27603,24 @@ "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==" }, + "uvu": { + "version": "0.5.6", + "resolved": "https://registry.npmjs.org/uvu/-/uvu-0.5.6.tgz", + "integrity": "sha512-+g8ENReyr8YsOc6fv/NVJs2vFdHBnBNdfE49rshrTzDWOlUx4Gq7KOS2GD8eqhy2j+Ejq29+SbKH8yjkAqXqoA==", + "requires": { + "dequal": "^2.0.0", + "diff": "^5.0.0", + "kleur": "^4.0.3", + "sade": "^1.7.3" + }, + "dependencies": { + "kleur": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/kleur/-/kleur-4.1.5.tgz", + "integrity": "sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ==" + } + } + }, "value-equal": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/value-equal/-/value-equal-1.0.1.tgz", @@ -25705,6 +27681,11 @@ "resolved": "https://registry.npmjs.org/web-namespaces/-/web-namespaces-2.0.1.tgz", "integrity": "sha512-bKr1DkiNa2krS7qxNtdrtHAmzuYGFQLiQ13TsorsdT6ULTkPLKuu5+GsFpDlg6JFjUTwX2DyhMPG2be8uPrqsQ==" }, + "web-worker": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/web-worker/-/web-worker-1.3.0.tgz", + "integrity": "sha512-BSR9wyRsy/KOValMgd5kMyr3JzpdeoR9KVId8u5GVlTTAtNChlsE4yTxeY7zMdNSyOmoKBv8NH2qeRY9Tg+IaA==" + }, "webidl-conversions": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", diff --git a/docs/package.json b/docs/package.json index 845743eb6158..cb1a64c4956b 100644 --- a/docs/package.json +++ b/docs/package.json @@ -13,11 +13,13 @@ "write-translations": "docusaurus write-translations", "write-heading-ids": "docusaurus write-heading-ids", "prettier": "npx prettier --check docs", - "prettier:fix": "npx prettier --write --check docs"}, + "prettier:fix": "npx prettier --write --check docs" + }, "dependencies": { "@docusaurus/core": "^3.4.0", "@docusaurus/plugin-google-tag-manager": "^3.4.0", "@docusaurus/preset-classic": "^3.4.0", + "@docusaurus/theme-mermaid": "^3.4.0", "@mdx-js/react": "^3.0.0", "clsx": "^2.0.0", "prism-react-renderer": "^2.3.0", diff --git a/frontend/web/components/PermissionsTabs.tsx b/frontend/web/components/PermissionsTabs.tsx index 7a3bad391131..2a7bae84487d 100644 --- a/frontend/web/components/PermissionsTabs.tsx +++ b/frontend/web/components/PermissionsTabs.tsx @@ -14,6 +14,7 @@ import Utils from 'common/utils/utils' import RolePermissionsList from './RolePermissionsList' import ProjectFilter from './ProjectFilter' import OrganisationStore from 'common/stores/organisation-store' +import WarningMessage from './WarningMessage' type PermissionsTabsType = { orgId?: number @@ -55,96 +56,119 @@ const PermissionsTabs: FC = ({ return } - return ( - - Organisation} + const deprecationMessage = ( +
+ ) + + return ( + <> + {!!group && } + - -
Permissions
- - setSearchEnv(Utils.safeParseEventValue(e)) - } - size='small' - placeholder='Search' - search - /> -
-
- Organisation} + > + -
- {environments.length > 0 && ( + + Project} + > + +
Permissions
+ + setSearchProject(Utils.safeParseEventValue(e)) + } + size='small' + placeholder='Search' + search + /> +
{ - return { - id: role ? v.id : v.api_key, - name: v.name, - } - })} + orgId={orgId} + filter={searchProject} + mainItems={projectData} role={role} - level={'environment'} + level={'project'} ref={tabRef} /> - )} -
-
+ + Environment} + > + +
Permissions
+ + setSearchEnv(Utils.safeParseEventValue(e)) + } + size='small' + placeholder='Search' + search + /> +
+
+ +
+ {environments.length > 0 && ( + { + return { + id: role ? v.id : v.api_key, + name: v.name, + } + })} + role={role} + level={'environment'} + ref={tabRef} + /> + )} +
+ + ) } From bf82b9d64976e16cc7d598e6f5cd124cafbe30ca Mon Sep 17 00:00:00 2001 From: Novak Zaballa <41410593+novakzaballa@users.noreply.github.com> Date: Tue, 13 Aug 2024 10:36:37 -0400 Subject: [PATCH 119/247] fix: Rename match variable in external feature resources (#4490) --- api/features/feature_external_resources/views.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/api/features/feature_external_resources/views.py b/api/features/feature_external_resources/views.py index 641074215fe6..002a8e5da89a 100644 --- a/api/features/feature_external_resources/views.py +++ b/api/features/feature_external_resources/views.py @@ -82,9 +82,9 @@ def create(self, request, *args, **kwargs): status=status.HTTP_400_BAD_REQUEST, ) - match = re.search(pattern, url) - if match: - owner, repo, issue = match.groups() + url_match = re.search(pattern, url) + if url_match: + owner, repo, issue = url_match.groups() if GithubRepository.objects.get( github_configuration=github_configuration, repository_owner=owner, From a6a0f918394d0f044994d159d4e440c3a9ebcdef Mon Sep 17 00:00:00 2001 From: Matthew Elwell Date: Tue, 13 Aug 2024 16:26:06 +0100 Subject: [PATCH 120/247] fix: add reverse sql to versioning migration (#4491) --- .../0023_add_versioning_to_segments.py | 20 +- ...023_add_versioning_to_segments_reverse.sql | 3 + api/tests/unit/segments/test_migrations.py | 104 -------- .../segments/test_unit_segments_migrations.py | 239 ++++++++++++++++++ 4 files changed, 253 insertions(+), 113 deletions(-) create mode 100644 api/segments/migrations/sql/0023_add_versioning_to_segments_reverse.sql delete mode 100644 api/tests/unit/segments/test_migrations.py create mode 100644 api/tests/unit/segments/test_unit_segments_migrations.py diff --git a/api/segments/migrations/0023_add_versioning_to_segments.py b/api/segments/migrations/0023_add_versioning_to_segments.py index 0e7c8f0d5a03..16473f1cf587 100644 --- a/api/segments/migrations/0023_add_versioning_to_segments.py +++ b/api/segments/migrations/0023_add_versioning_to_segments.py @@ -1,9 +1,17 @@ # Generated by Django 3.2.25 on 2024-06-10 15:31 -import os +from pathlib import Path import django.db.models.deletion from django.db import migrations, models +parent_dir = Path(__file__).parent.resolve() + +with open(parent_dir / "sql/0023_add_versioning_to_segments.sql") as f: + segment_versioning_sql_forwards = f.read() + +with open(parent_dir / "sql/0023_add_versioning_to_segments_reverse.sql") as f: + segment_versioning_sql_reverse = f.read() + class Migration(migrations.Migration): @@ -46,13 +54,7 @@ class Migration(migrations.Migration): ), ), migrations.RunSQL( - sql=open( - os.path.join( - os.path.dirname(__file__), - "sql", - "0023_add_versioning_to_segments.sql", - ) - ).read(), - reverse_sql=migrations.RunSQL.noop, + sql=segment_versioning_sql_forwards, + reverse_sql=segment_versioning_sql_reverse, ), ] diff --git a/api/segments/migrations/sql/0023_add_versioning_to_segments_reverse.sql b/api/segments/migrations/sql/0023_add_versioning_to_segments_reverse.sql new file mode 100644 index 000000000000..7d78f144d9ec --- /dev/null +++ b/api/segments/migrations/sql/0023_add_versioning_to_segments_reverse.sql @@ -0,0 +1,3 @@ +UPDATE segments_segment +SET deleted_at = now() +WHERE version_of_id <> id; diff --git a/api/tests/unit/segments/test_migrations.py b/api/tests/unit/segments/test_migrations.py deleted file mode 100644 index f2b181a81165..000000000000 --- a/api/tests/unit/segments/test_migrations.py +++ /dev/null @@ -1,104 +0,0 @@ -import pytest -from django.conf import settings as test_settings -from django_test_migrations.migrator import Migrator -from flag_engine.segments import constants -from pytest_django.fixtures import SettingsWrapper - - -@pytest.mark.skipif( - test_settings.SKIP_MIGRATION_TESTS is True, - reason="Skip migration tests to speed up tests where necessary", -) -def test_create_whitelisted_segments_migration( - migrator: Migrator, - settings: SettingsWrapper, -) -> None: - # Given - The migration state is at 0020 (before the migration we want to test). - old_state = migrator.apply_initial_migration( - ("segments", "0020_detach_segment_from_project_cascade_delete") - ) - - Organisation = old_state.apps.get_model("organisations", "Organisation") - Project = old_state.apps.get_model("projects", "Project") - SegmentRule = old_state.apps.get_model("segments", "SegmentRule") - Segment = old_state.apps.get_model("segments", "Segment") - Condition = old_state.apps.get_model("segments", "Condition") - - # Set the limit lower to allow for a faster test. - settings.SEGMENT_RULES_CONDITIONS_LIMIT = 3 - - # Next, create the setup data. - organisation = Organisation.objects.create(name="Big Corp Incorporated") - project = Project.objects.create(name="Huge Project", organisation=organisation) - - segment_1 = Segment.objects.create(name="Segment1", project=project) - segment_2 = Segment.objects.create(name="Segment1", project=project) - segment_rule_1 = SegmentRule.objects.create( - segment=segment_1, - type="ALL", - ) - - # Subnested segment rules. - segment_rule_2 = SegmentRule.objects.create( - rule=segment_rule_1, - type="ALL", - ) - segment_rule_3 = SegmentRule.objects.create( - rule=segment_rule_1, - type="ALL", - ) - - # Lonely segment rules for pass criteria for segment_2. - segment_rule_4 = SegmentRule.objects.create( - segment=segment_2, - type="ALL", - ) - segment_rule_5 = SegmentRule.objects.create( - rule=segment_rule_4, - type="ALL", - ) - - Condition.objects.create( - operator=constants.EQUAL, - property="age", - value="21", - rule=segment_rule_2, - ) - Condition.objects.create( - operator=constants.GREATER_THAN, - property="height", - value="210", - rule=segment_rule_2, - ) - Condition.objects.create( - operator=constants.GREATER_THAN, - property="waist", - value="36", - rule=segment_rule_3, - ) - Condition.objects.create( - operator=constants.LESS_THAN, - property="shoes", - value="12", - rule=segment_rule_3, - ) - - # Sole criteria for segment_2 conditions. - Condition.objects.create( - operator=constants.LESS_THAN, - property="toy_count", - value="7", - rule=segment_rule_5, - ) - - # When we run the migration. - new_state = migrator.apply_tested_migration( - ("segments", "0021_create_whitelisted_segments") - ) - - # Then the first segment is in the whitelist while the second is not. - NewSegment = new_state.apps.get_model("segments", "Segment") - new_segment_1 = NewSegment.objects.get(id=segment_1.id) - new_segment_2 = NewSegment.objects.get(id=segment_2.id) - assert new_segment_1.whitelisted_segment - assert getattr(new_segment_2, "whitelisted_segment", None) is None diff --git a/api/tests/unit/segments/test_unit_segments_migrations.py b/api/tests/unit/segments/test_unit_segments_migrations.py new file mode 100644 index 000000000000..b6f85808bba5 --- /dev/null +++ b/api/tests/unit/segments/test_unit_segments_migrations.py @@ -0,0 +1,239 @@ +import uuid + +import pytest +from django.conf import settings as test_settings +from django_test_migrations.migrator import Migrator +from flag_engine.segments import constants +from pytest_django.fixtures import SettingsWrapper + + +@pytest.mark.skipif( + test_settings.SKIP_MIGRATION_TESTS is True, + reason="Skip migration tests to speed up tests where necessary", +) +def test_create_whitelisted_segments_migration( + migrator: Migrator, + settings: SettingsWrapper, +) -> None: + # Given - The migration state is at 0020 (before the migration we want to test). + old_state = migrator.apply_initial_migration( + ("segments", "0020_detach_segment_from_project_cascade_delete") + ) + + Organisation = old_state.apps.get_model("organisations", "Organisation") + Project = old_state.apps.get_model("projects", "Project") + SegmentRule = old_state.apps.get_model("segments", "SegmentRule") + Segment = old_state.apps.get_model("segments", "Segment") + Condition = old_state.apps.get_model("segments", "Condition") + + # Set the limit lower to allow for a faster test. + settings.SEGMENT_RULES_CONDITIONS_LIMIT = 3 + + # Next, create the setup data. + organisation = Organisation.objects.create(name="Big Corp Incorporated") + project = Project.objects.create(name="Huge Project", organisation=organisation) + + segment_1 = Segment.objects.create(name="Segment1", project=project) + segment_2 = Segment.objects.create(name="Segment1", project=project) + segment_rule_1 = SegmentRule.objects.create( + segment=segment_1, + type="ALL", + ) + + # Subnested segment rules. + segment_rule_2 = SegmentRule.objects.create( + rule=segment_rule_1, + type="ALL", + ) + segment_rule_3 = SegmentRule.objects.create( + rule=segment_rule_1, + type="ALL", + ) + + # Lonely segment rules for pass criteria for segment_2. + segment_rule_4 = SegmentRule.objects.create( + segment=segment_2, + type="ALL", + ) + segment_rule_5 = SegmentRule.objects.create( + rule=segment_rule_4, + type="ALL", + ) + + Condition.objects.create( + operator=constants.EQUAL, + property="age", + value="21", + rule=segment_rule_2, + ) + Condition.objects.create( + operator=constants.GREATER_THAN, + property="height", + value="210", + rule=segment_rule_2, + ) + Condition.objects.create( + operator=constants.GREATER_THAN, + property="waist", + value="36", + rule=segment_rule_3, + ) + Condition.objects.create( + operator=constants.LESS_THAN, + property="shoes", + value="12", + rule=segment_rule_3, + ) + + # Sole criteria for segment_2 conditions. + Condition.objects.create( + operator=constants.LESS_THAN, + property="toy_count", + value="7", + rule=segment_rule_5, + ) + + # When we run the migration. + new_state = migrator.apply_tested_migration( + ("segments", "0021_create_whitelisted_segments") + ) + + # Then the first segment is in the whitelist while the second is not. + NewSegment = new_state.apps.get_model("segments", "Segment") + new_segment_1 = NewSegment.objects.get(id=segment_1.id) + new_segment_2 = NewSegment.objects.get(id=segment_2.id) + assert new_segment_1.whitelisted_segment + assert getattr(new_segment_2, "whitelisted_segment", None) is None + + +@pytest.mark.skipif( + test_settings.SKIP_MIGRATION_TESTS is True, + reason="Skip migration tests to speed up tests where necessary", +) +def test_add_versioning_to_segments_forwards(migrator: Migrator) -> None: + # Given - The migration state is at 0021 (before the migration we want to test). + old_state = migrator.apply_initial_migration( + ("segments", "0022_add_soft_delete_to_segment_rules_and_conditions") + ) + + Organisation = old_state.apps.get_model("organisations", "Organisation") + Project = old_state.apps.get_model("projects", "Project") + SegmentRule = old_state.apps.get_model("segments", "SegmentRule") + Segment = old_state.apps.get_model("segments", "Segment") + Condition = old_state.apps.get_model("segments", "Condition") + + # Next, create the setup data. + organisation = Organisation.objects.create(name="Test Org") + project = Project.objects.create(name="Test Project", organisation_id=organisation.id) + + segment = Segment.objects.create(name="Segment1", project_id=project.id) + segment_rule_1 = SegmentRule.objects.create( + segment_id=segment.id, + type="ALL", + ) + + # Subnested segment rules. + segment_rule_2 = SegmentRule.objects.create( + rule_id=segment_rule_1.id, + type="ALL", + ) + + Condition.objects.create( + operator=constants.EQUAL, + property="age", + value="21", + rule_id=segment_rule_2.id, + ) + + # When we run the migration. + new_state = migrator.apply_tested_migration( + ("segments", "0023_add_versioning_to_segments") + ) + + # Then the version_of attribute is correctly set. + NewSegment = new_state.apps.get_model("segments", "Segment") + new_segment = NewSegment.objects.get(id=segment.id) + assert new_segment.version_of == new_segment + + +@pytest.mark.skipif( + test_settings.SKIP_MIGRATION_TESTS is True, + reason="Skip migration tests to speed up tests where necessary", +) +def test_add_versioning_to_segments_reverse(migrator: Migrator) -> None: + # Given - The migration state is at 0023 (after the migration we want to test). + old_state = migrator.apply_initial_migration( + ("segments", "0023_add_versioning_to_segments") + ) + + Organisation = old_state.apps.get_model("organisations", "Organisation") + Project = old_state.apps.get_model("projects", "Project") + SegmentRule = old_state.apps.get_model("segments", "SegmentRule") + Segment = old_state.apps.get_model("segments", "Segment") + Condition = old_state.apps.get_model("segments", "Condition") + + # Next, create the setup data. + organisation = Organisation.objects.create(name="Test Org") + project = Project.objects.create(name="Test Project", organisation=organisation) + + # Set the version manually since this is normally done via a lifecycle hook + # that doesn't run for models created in a migration state. + segment = Segment.objects.create(name="Segment1", project=project, version=1) + segment_rule_1 = SegmentRule.objects.create( + segment=segment, + type="ALL", + ) + + # We ideally want to call Segment.deep_clone but that's not + # possible when working in a migration state. As such, we + # do the basic amount necessary from that method to allow + # us to test the migration behaviour. + def _deep_clone(segment: Segment) -> Segment: + cloned_segment = Segment.objects.create( + name=segment.name, + project_id=segment.project_id, + description=segment.description, + feature=segment.feature, + uuid=uuid.uuid4(), + version_of_id=segment.id, + ) + + segment.version += 1 + segment.save() + + return cloned_segment + + version_1 = _deep_clone(segment) + version_2 = _deep_clone(segment) + + version_3 = segment + + # Subnested segment rules. + segment_rule_2 = SegmentRule.objects.create( + rule=segment_rule_1, + type="ALL", + ) + + Condition.objects.create( + operator=constants.EQUAL, + property="age", + value="21", + rule=segment_rule_2, + ) + + # When we run the migration in reverse. + new_state = migrator.apply_tested_migration( + ("segments", "0022_add_soft_delete_to_segment_rules_and_conditions") + ) + + # Then any historical versions of the segment are deleted. + NewSegment = new_state.apps.get_model("segments", "Segment") + + new_segment_v1 = NewSegment.objects.get(id=version_1.id) + assert new_segment_v1.deleted_at is not None + + new_segment_v2 = NewSegment.objects.get(id=version_2.id) + assert new_segment_v2.deleted_at is not None + + new_segment_v3 = NewSegment.objects.get(id=version_3.id) + assert new_segment_v3.deleted_at is None From 2517e9dfd42a58adb69a52050f4e3fc1663bc127 Mon Sep 17 00:00:00 2001 From: Kyle Johnson Date: Tue, 13 Aug 2024 16:31:19 +0100 Subject: [PATCH 121/247] fix: save feature error handling (#4058) --- frontend/common/stores/feature-list-store.ts | 254 ++++++++++--------- frontend/web/components/ErrorMessage.js | 4 +- frontend/web/project/api.js | 5 + 3 files changed, 143 insertions(+), 120 deletions(-) diff --git a/frontend/common/stores/feature-list-store.ts b/frontend/common/stores/feature-list-store.ts index b2067cf5695b..a47c9ffe27d5 100644 --- a/frontend/common/stores/feature-list-store.ts +++ b/frontend/common/stores/feature-list-store.ts @@ -90,8 +90,11 @@ const controller = { }), project_id: projectId, }) - .then((res) => - Promise.all( + .then((res) => { + if (res.error) { + throw res.error?.error || res.error + } + return Promise.all( (flag.multivariate_options || []).map((v) => data .post( @@ -107,8 +110,8 @@ const controller = { data.get( `${Project.api}projects/${projectId}/features/${res.data.id}/`, ), - ), - ) + ) + }) .then(() => Promise.all([ data.get(`${Project.api}projects/${projectId}/features/`), @@ -322,123 +325,132 @@ const controller = { store.saving() API.trackEvent(Constants.events.EDIT_FEATURE) - segmentOverridesProm.then(() => { - if (mode !== 'VALUE') { - prom = Promise.resolve() - } else if (environmentFlag) { - prom = data - .get( - `${Project.api}environments/${environmentId}/featurestates/${environmentFlag.id}/`, - ) - .then((environmentFeatureStates) => { - const multivariate_feature_state_values = - environmentFeatureStates.multivariate_feature_state_values && - environmentFeatureStates.multivariate_feature_state_values.map( - (v) => { - const matching = - environmentFlag.multivariate_feature_state_values.find( - (m) => m.id === v.multivariate_feature_option, - ) || {} - return { - ...v, - percentage_allocation: - matching.default_percentage_allocation, - } - }, - ) - environmentFlag.multivariate_feature_state_values = - multivariate_feature_state_values - return data.put( + segmentOverridesProm + .then(() => { + if (mode !== 'VALUE') { + prom = Promise.resolve() + } else if (environmentFlag) { + prom = data + .get( `${Project.api}environments/${environmentId}/featurestates/${environmentFlag.id}/`, - Object.assign({}, environmentFlag, { - enabled: flag.default_enabled, - feature_state_value: Utils.getTypedValue( - flag.initial_value, - undefined, - true, - ), - }), ) - }) - } else { - prom = data.post( - `${Project.api}environments/${environmentId}/featurestates/`, - Object.assign({}, flag, { - enabled: false, - environment: environmentId, - feature: projectFlag, - }), - ) - } - - const segmentOverridesRequest = - mode === 'SEGMENT' && segmentOverrides - ? (segmentOverrides.length - ? updateSegmentPriorities( - getStore(), - segmentOverrides.map((override, index) => ({ - id: override.id, - priority: index, - })), + .then((environmentFeatureStates) => { + const multivariate_feature_state_values = + environmentFeatureStates.multivariate_feature_state_values && + environmentFeatureStates.multivariate_feature_state_values.map( + (v) => { + const matching = + environmentFlag.multivariate_feature_state_values.find( + (m) => m.id === v.multivariate_feature_option, + ) || {} + return { + ...v, + percentage_allocation: + matching.default_percentage_allocation, + } + }, ) - : Promise.resolve([]) - ).then(() => - Promise.all( - segmentOverrides.map((override) => - data.put( - `${Project.api}features/featurestates/${override.feature_segment_value.id}/`, - { - ...override.feature_segment_value, - enabled: override.enabled, - feature_state_value: Utils.valueToFeatureState( - override.value, - ), - multivariate_feature_state_values: - override.multivariate_options && - override.multivariate_options.map((o) => { - if (o.multivariate_feature_option) return o - return { - multivariate_feature_option: - environmentFlag.multivariate_feature_state_values[ - o.multivariate_feature_option_index - ].multivariate_feature_option, - percentage_allocation: o.percentage_allocation, - } - }), - }, + environmentFlag.multivariate_feature_state_values = + multivariate_feature_state_values + return data.put( + `${Project.api}environments/${environmentId}/featurestates/${environmentFlag.id}/`, + Object.assign({}, environmentFlag, { + enabled: flag.default_enabled, + feature_state_value: Utils.getTypedValue( + flag.initial_value, + undefined, + true, ), - ), - ), - ) - : Promise.resolve() - - Promise.all([prom, segmentOverridesRequest]).then(([res, segmentRes]) => { - if (store.model) { - store.model.keyedEnvironmentFeatures[projectFlag.id] = res - if (segmentRes) { - const feature = _.find( - store.model.features, - (f) => f.id === projectFlag.id, - ) - if (feature) { - feature.feature_segments = _.map( - segmentRes.feature_segments, - (segment) => ({ - ...segment, - segment: segment.segment.id, }), ) - } - } + }) + } else { + prom = data.post( + `${Project.api}environments/${environmentId}/featurestates/`, + Object.assign({}, flag, { + enabled: false, + environment: environmentId, + feature: projectFlag, + }), + ) } - if (store.model) { - store.model.lastSaved = new Date().valueOf() - } - onComplete && onComplete() - store.saved({}) + const segmentOverridesRequest = + mode === 'SEGMENT' && segmentOverrides + ? (segmentOverrides.length + ? updateSegmentPriorities( + getStore(), + segmentOverrides.map((override, index) => ({ + id: override.id, + priority: index, + })), + ) + : Promise.resolve([]) + ).then(() => + Promise.all( + segmentOverrides.map((override) => + data.put( + `${Project.api}features/featurestates/${override.feature_segment_value.id}/`, + { + ...override.feature_segment_value, + enabled: override.enabled, + feature_state_value: Utils.valueToFeatureState( + override.value, + ), + multivariate_feature_state_values: + override.multivariate_options && + override.multivariate_options.map((o) => { + if (o.multivariate_feature_option) return o + return { + multivariate_feature_option: + environmentFlag + .multivariate_feature_state_values[ + o.multivariate_feature_option_index + ].multivariate_feature_option, + percentage_allocation: o.percentage_allocation, + } + }), + }, + ), + ), + ), + ) + : Promise.resolve() + + Promise.all([prom, segmentOverridesRequest]) + .then(([res, segmentRes]) => { + if (store.model) { + store.model.keyedEnvironmentFeatures[projectFlag.id] = res + if (segmentRes) { + const feature = _.find( + store.model.features, + (f) => f.id === projectFlag.id, + ) + if (feature) { + feature.feature_segments = _.map( + segmentRes.feature_segments, + (segment) => ({ + ...segment, + segment: segment.segment.id, + }), + ) + } + } + } + + if (store.model) { + store.model.lastSaved = new Date().valueOf() + } + onComplete && onComplete() + store.saved({}) + }) + .catch((e) => { + API.ajaxHandler(store, e) + }) + }) + .catch((e) => { + API.ajaxHandler(store, e) }) - }) }, editFeatureStateChangeRequest: async ( projectId: string, @@ -770,13 +782,17 @@ const controller = { ) } - prom.then((res) => { - if (store.model) { - store.model.lastSaved = new Date().valueOf() - } - onComplete && onComplete() - store.saved({}) - }) + prom + .then((res) => { + if (store.model) { + store.model.lastSaved = new Date().valueOf() + } + onComplete && onComplete() + store.saved({}) + }) + .catch((e) => { + API.ajaxHandler(store, e) + }) }, getFeatureUsage(projectId, environmentId, flag, period) { data diff --git a/frontend/web/components/ErrorMessage.js b/frontend/web/components/ErrorMessage.js index cbf1832f6c7e..b50f644e6093 100644 --- a/frontend/web/components/ErrorMessage.js +++ b/frontend/web/components/ErrorMessage.js @@ -28,7 +28,9 @@ export default class ErrorMessage extends PureComponent { - {typeof error === 'object' ? ( + {error instanceof Error ? ( + error.message + ) : typeof error === 'object' ? (
Date: Tue, 13 Aug 2024 13:43:25 -0400 Subject: [PATCH 122/247] test: seed data for permission and roles e2e tests (#4173) Co-authored-by: Matthew Elwell --- api/app/settings/common.py | 16 +++ api/e2etests/e2e_seed_data.py | 132 ++++++++++++++++-- api/e2etests/middleware.py | 16 +++ api/e2etests/permissions.py | 13 +- api/projects/permissions.py | 2 + .../end_to_end/test_integration_e2e_tests.py | 9 +- .../test_unit_projects_permissions.py | 26 ++++ 7 files changed, 194 insertions(+), 20 deletions(-) create mode 100644 api/e2etests/middleware.py diff --git a/api/app/settings/common.py b/api/app/settings/common.py index c925ef9344fd..c3fdc55384c7 100644 --- a/api/app/settings/common.py +++ b/api/app/settings/common.py @@ -517,6 +517,10 @@ LOGOUT_URL = "/admin/logout/" # Enable E2E tests +E2E_TEST_AUTH_TOKEN = env.str("E2E_TEST_AUTH_TOKEN", default=None) +if E2E_TEST_AUTH_TOKEN is not None: + MIDDLEWARE.append("e2etests.middleware.E2ETestMiddleware") + ENABLE_FE_E2E = env.bool("ENABLE_FE_E2E", default=False) # Email associated with user that is used by front end for end to end testing purposes E2E_TEST_EMAIL_DOMAIN = "flagsmithe2etestdomain.io" @@ -526,6 +530,18 @@ E2E_CHANGE_EMAIL_USER = f"e2e_change_email@{E2E_TEST_EMAIL_DOMAIN}" # User email address used for the rest of the E2E tests E2E_USER = f"e2e_user@{E2E_TEST_EMAIL_DOMAIN}" +E2E_NON_ADMIN_USER_WITH_ORG_PERMISSIONS = ( + f"e2e_non_admin_user_with_org_permissions@{E2E_TEST_EMAIL_DOMAIN}" +) +E2E_NON_ADMIN_USER_WITH_PROJECT_PERMISSIONS = ( + f"e2e_non_admin_user_with_project_permissions@{E2E_TEST_EMAIL_DOMAIN}" +) +E2E_NON_ADMIN_USER_WITH_ENV_PERMISSIONS = ( + f"e2e_non_admin_user_with_env_permissions@{E2E_TEST_EMAIL_DOMAIN}" +) +E2E_NON_ADMIN_USER_WITH_A_ROLE = ( + f"e2e_non_admin_user_with_a_role@{E2E_TEST_EMAIL_DOMAIN}" +) # Identity for E2E segment tests E2E_IDENTITY = "test-identity" diff --git a/api/e2etests/e2e_seed_data.py b/api/e2etests/e2e_seed_data.py index d21d0054031b..eba97bd65cb8 100644 --- a/api/e2etests/e2e_seed_data.py +++ b/api/e2etests/e2e_seed_data.py @@ -4,13 +4,34 @@ from edge_api.identities.models import EdgeIdentity from environments.identities.models import Identity from environments.models import Environment +from environments.permissions.constants import ( + UPDATE_FEATURE_STATE, + VIEW_ENVIRONMENT, + VIEW_IDENTITIES, +) +from environments.permissions.models import UserEnvironmentPermission from organisations.models import Organisation, OrganisationRole, Subscription -from projects.models import Project -from users.models import FFAdminUser +from organisations.permissions.models import UserOrganisationPermission +from organisations.permissions.permissions import ( + CREATE_PROJECT, + MANAGE_USER_GROUPS, +) +from organisations.subscriptions.constants import SCALE_UP +from projects.models import Project, UserProjectPermission +from projects.permissions import ( + CREATE_ENVIRONMENT, + CREATE_FEATURE, + VIEW_AUDIT_LOG, + VIEW_PROJECT, +) +from users.models import FFAdminUser, UserPermissionGroup # Password used by all the test users PASSWORD = "Str0ngp4ssw0rd!" +PROJECT_PERMISSION_PROJECT = "My Test Project 5 Project Permission" +ENV_PERMISSION_PROJECT = "My Test Project 6 Env Permission" + def delete_user_and_its_organisations(user_email: str) -> None: user: FFAdminUser | None = FFAdminUser.objects.filter(email=user_email).first() @@ -25,6 +46,18 @@ def teardown() -> None: delete_user_and_its_organisations(user_email=settings.E2E_SIGNUP_USER) delete_user_and_its_organisations(user_email=settings.E2E_USER) delete_user_and_its_organisations(user_email=settings.E2E_CHANGE_EMAIL_USER) + delete_user_and_its_organisations( + user_email=settings.E2E_NON_ADMIN_USER_WITH_ORG_PERMISSIONS + ) + delete_user_and_its_organisations( + user_email=settings.E2E_NON_ADMIN_USER_WITH_PROJECT_PERMISSIONS + ) + delete_user_and_its_organisations( + user_email=settings.E2E_NON_ADMIN_USER_WITH_ENV_PERMISSIONS + ) + delete_user_and_its_organisations( + user_email=settings.E2E_NON_ADMIN_USER_WITH_A_ROLE + ) def seed_data() -> None: @@ -36,6 +69,44 @@ def seed_data() -> None: username=settings.E2E_USER, ) org_admin.add_organisation(organisation, OrganisationRole.ADMIN) + non_admin_user_with_org_permissions: FFAdminUser = FFAdminUser.objects.create_user( + email=settings.E2E_NON_ADMIN_USER_WITH_ORG_PERMISSIONS, + password=PASSWORD, + ) + non_admin_user_with_project_permissions: FFAdminUser = ( + FFAdminUser.objects.create_user( + email=settings.E2E_NON_ADMIN_USER_WITH_PROJECT_PERMISSIONS, + password=PASSWORD, + ) + ) + non_admin_user_with_env_permissions: FFAdminUser = FFAdminUser.objects.create_user( + email=settings.E2E_NON_ADMIN_USER_WITH_ENV_PERMISSIONS, + password=PASSWORD, + ) + non_admin_user_with_a_role: FFAdminUser = FFAdminUser.objects.create_user( + email=settings.E2E_NON_ADMIN_USER_WITH_A_ROLE, + password=PASSWORD, + ) + non_admin_user_with_org_permissions.add_organisation( + organisation, + ) + non_admin_user_with_project_permissions.add_organisation( + organisation, + ) + non_admin_user_with_env_permissions.add_organisation( + organisation, + ) + non_admin_user_with_a_role.add_organisation( + organisation, + ) + + # Add permissions to the non-admin user with org permissions + user_org_permission = UserOrganisationPermission.objects.create( + user=non_admin_user_with_org_permissions, organisation=organisation + ) + user_org_permission.add_permission(CREATE_PROJECT) + user_org_permission.add_permission(MANAGE_USER_GROUPS) + UserPermissionGroup.objects.create(name="TestGroup", organisation=organisation) # We add different projects and environments to give each e2e test its own isolated context. project_test_data = [ @@ -49,7 +120,17 @@ def seed_data() -> None: {"name": "My Test Project 2", "environments": ["Development"]}, {"name": "My Test Project 3", "environments": ["Development"]}, {"name": "My Test Project 4", "environments": ["Development"]}, + { + "name": PROJECT_PERMISSION_PROJECT, + "environments": ["Development"], + }, + {"name": ENV_PERMISSION_PROJECT, "environments": ["Development"]}, + {"name": "My Test Project 7 Role", "environments": ["Development"]}, ] + # Upgrade organisation seats + Subscription.objects.filter(organisation__in=org_admin.organisations.all()).update( + max_seats=8, plan=SCALE_UP, subscription_id="test_subscription_id" + ) # Create projects and environments projects = [] @@ -58,19 +139,59 @@ def seed_data() -> None: project = Project.objects.create( name=project_info["name"], organisation=organisation ) + if project_info["name"] == PROJECT_PERMISSION_PROJECT: + # Add permissions to the non-admin user with project permissions + user_proj_permission: UserProjectPermission = ( + UserProjectPermission.objects.create( + user=non_admin_user_with_project_permissions, project=project + ) + ) + [ + user_proj_permission.add_permission(permission_key) + for permission_key in [ + VIEW_PROJECT, + CREATE_ENVIRONMENT, + CREATE_FEATURE, + VIEW_AUDIT_LOG, + ] + ] projects.append(project) for env_name in project_info["environments"]: environment = Environment.objects.create(name=env_name, project=project) + + if project_info["name"] == ENV_PERMISSION_PROJECT: + # Add permissions to the non-admin user with env permissions + user_env_permission = UserEnvironmentPermission.objects.create( + user=non_admin_user_with_env_permissions, environment=environment + ) + user_env_proj_permission: UserProjectPermission = ( + UserProjectPermission.objects.create( + user=non_admin_user_with_env_permissions, project=project + ) + ) + user_env_proj_permission.add_permission(VIEW_PROJECT) + user_env_proj_permission.add_permission(CREATE_FEATURE) + [ + user_env_permission.add_permission(permission_key) + for permission_key in [ + VIEW_ENVIRONMENT, + UPDATE_FEATURE_STATE, + VIEW_IDENTITIES, + ] + ] environments.append(environment) - # We're only creating identities for 3 of the 5 environments because + # We're only creating identities for 6 of the 7 environments because # they are necessary for the environments created above and to keep # the e2e tests isolated." identities_test_data = [ {"identifier": settings.E2E_IDENTITY, "environment": environments[2]}, {"identifier": settings.E2E_IDENTITY, "environment": environments[3]}, {"identifier": settings.E2E_IDENTITY, "environment": environments[4]}, + {"identifier": settings.E2E_IDENTITY, "environment": environments[5]}, + {"identifier": settings.E2E_IDENTITY, "environment": environments[6]}, + {"identifier": settings.E2E_IDENTITY, "environment": environments[7]}, ] for identity_info in identities_test_data: @@ -82,8 +203,3 @@ def seed_data() -> None: EdgeIdentity(engine_identity).save() else: Identity.objects.create(**identity_info) - - # Upgrade organisation seats - Subscription.objects.filter(organisation__in=org_admin.organisations.all()).update( - max_seats=2 - ) diff --git a/api/e2etests/middleware.py b/api/e2etests/middleware.py new file mode 100644 index 000000000000..c57c3aeef4ec --- /dev/null +++ b/api/e2etests/middleware.py @@ -0,0 +1,16 @@ +from django.conf import settings + + +class E2ETestMiddleware: + def __init__(self, get_response): + self.get_response = get_response + + def __call__(self, request): + request.is_e2e = False + if ( + request.META.get("HTTP_X_E2E_TEST_AUTH_TOKEN") + == settings.E2E_TEST_AUTH_TOKEN + ): + request.is_e2e = True + + return self.get_response(request) diff --git a/api/e2etests/permissions.py b/api/e2etests/permissions.py index 43a2b5f1a46f..c7d977261190 100644 --- a/api/e2etests/permissions.py +++ b/api/e2etests/permissions.py @@ -1,13 +1,8 @@ -import os - +from django.views import View from rest_framework.permissions import BasePermission +from rest_framework.request import Request class E2ETestPermission(BasePermission): - def has_permission(self, request, view): - if "E2E_TEST_AUTH_TOKEN" not in os.environ: - return False - return ( - request.META.get("HTTP_X_E2E_TEST_AUTH_TOKEN") - == os.environ["E2E_TEST_AUTH_TOKEN"] - ) + def has_permission(self, request: Request, view: View) -> bool: + return getattr(request, "is_e2e", False) is True diff --git a/api/projects/permissions.py b/api/projects/permissions.py index 1b4bddc24a45..2df8503cc1d1 100644 --- a/api/projects/permissions.py +++ b/api/projects/permissions.py @@ -48,12 +48,14 @@ def has_permission(self, request, view): subscription_metadata = ( organisation.subscription.get_subscription_metadata() ) + total_projects_created = Project.objects.filter( organisation=organisation ).count() if ( subscription_metadata.projects and total_projects_created >= subscription_metadata.projects + and not getattr(request, "is_e2e", False) is True ): return False if organisation.restrict_project_create_to_admin: diff --git a/api/tests/integration/e2etests/end_to_end/test_integration_e2e_tests.py b/api/tests/integration/e2etests/end_to_end/test_integration_e2e_tests.py index c2d187007839..731ffe28d3a2 100644 --- a/api/tests/integration/e2etests/end_to_end/test_integration_e2e_tests.py +++ b/api/tests/integration/e2etests/end_to_end/test_integration_e2e_tests.py @@ -5,6 +5,7 @@ from rest_framework.test import APIClient from organisations.models import Subscription +from organisations.subscriptions.constants import SCALE_UP from users.models import FFAdminUser @@ -14,8 +15,8 @@ def test_e2e_teardown(settings, db) -> None: token = "test-token" register_url = "/api/v1/auth/users/" settings.ENABLE_FE_E2E = True - - os.environ["E2E_TEST_AUTH_TOKEN"] = token + settings.E2E_TEST_AUTH_TOKEN = token + settings.MIDDLEWARE.append("e2etests.middleware.E2ETestMiddleware") client = APIClient(HTTP_X_E2E_TEST_AUTH_TOKEN=token) @@ -41,7 +42,9 @@ def test_e2e_teardown(settings, db) -> None: for subscription in Subscription.objects.filter( organisation__in=e2e_user.organisations.all() ): - assert subscription.max_seats == 2 + assert subscription.max_seats == 8 + assert subscription.plan == SCALE_UP + assert subscription.subscription_id == "test_subscription_id" def test_e2e_teardown_with_incorrect_token(settings, db): diff --git a/api/tests/unit/projects/test_unit_projects_permissions.py b/api/tests/unit/projects/test_unit_projects_permissions.py index 8662d58bbd5f..f7b874ab0b81 100644 --- a/api/tests/unit/projects/test_unit_projects_permissions.py +++ b/api/tests/unit/projects/test_unit_projects_permissions.py @@ -1,3 +1,4 @@ +import os from unittest import mock import pytest @@ -58,6 +59,31 @@ def test_create_project_has_permission( assert response is True +def test_create_project_has_permission_with_e2e_test_auth_token( + staff_user: FFAdminUser, + organisation: Organisation, + with_organisation_permissions: WithOrganisationPermissionsCallable, +) -> None: + # Given + with_organisation_permissions([CREATE_PROJECT]) + mock_request = mock.MagicMock( + user=staff_user, data={"name": "Test", "organisation": organisation.id} + ) + token = "test-token" + settings.ENABLE_FE_E2E = True + os.environ["E2E_TEST_AUTH_TOKEN"] = token + + mock_request.META = {"E2E_TEST_AUTH_TOKEN": token} + mock_view = mock.MagicMock(action="create", detail=False) + project_permissions = ProjectPermissions() + + # When + response = project_permissions.has_permission(mock_request, mock_view) + + # Then + assert response is True + + def test_admin_can_update_project_has_permission( organisation: Organisation, staff_user: FFAdminUser, From a5197d1ff5ad9f50fe680dc054dcfeb7a4bca289 Mon Sep 17 00:00:00 2001 From: Gagan Date: Wed, 14 Aug 2024 12:58:08 +0530 Subject: [PATCH 123/247] refactor(get_permitted_environments): get rid of .distinct() (#4486) --- api/permissions/permission_service.py | 9 +++++---- .../unit/environments/test_unit_environments_views.py | 2 +- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/api/permissions/permission_service.py b/api/permissions/permission_service.py index aa938401f228..9cff1f13e486 100644 --- a/api/permissions/permission_service.py +++ b/api/permissions/permission_service.py @@ -134,12 +134,13 @@ def get_permitted_environments_for_user( return queryset.prefetch_related("metadata") return queryset - base_filter = get_base_permission_filter( + environment_ids_from_base_filter = get_object_id_from_base_permission_filter( user, Environment, permission_key, tag_ids=tag_ids ) - filter_ = base_filter & Q(project=project) + queryset = Environment.objects.filter( + id__in=environment_ids_from_base_filter, project=project + ) - queryset = Environment.objects.filter(filter_) if prefetch_metadata: queryset = queryset.prefetch_related("metadata") @@ -148,7 +149,7 @@ def get_permitted_environments_for_user( # the select parameters. This leads to an N+1 query for # lists of environments when description is included, as # each environment object re-queries the DB seperately. - return queryset.distinct().defer("description") + return queryset.defer("description") def get_permitted_environments_for_master_api_key( diff --git a/api/tests/unit/environments/test_unit_environments_views.py b/api/tests/unit/environments/test_unit_environments_views.py index 6329efae684b..d5fec31fe8de 100644 --- a/api/tests/unit/environments/test_unit_environments_views.py +++ b/api/tests/unit/environments/test_unit_environments_views.py @@ -569,7 +569,7 @@ def test_view_environment_with_staff__query_count_is_expected( url = reverse("api-v1:environments:environment-list") data = {"project": project.id} - expected_query_count = 7 + expected_query_count = 9 # When with django_assert_num_queries(expected_query_count): response = staff_client.get(url, data=data, content_type="application/json") From 9e25990aaeaad8e13e7bc508aba8d08173f45e29 Mon Sep 17 00:00:00 2001 From: Kyle Johnson Date: Wed, 14 Aug 2024 09:48:38 +0100 Subject: [PATCH 124/247] chore: flagsmith on flagsmith sdk urls (#4436) --- .../code-help/create-user/create-user-curl.js | 2 +- .../code-help/create-user/create-user-flutter.js | 13 +++++++++++-- .../code-help/create-user/create-user-ios.js | 7 ++++++- .../code-help/create-user/create-user-java.js | 9 ++++++++- .../code-help/create-user/create-user-js.js | 7 ++++++- .../code-help/create-user/create-user-next.js | 13 +++++++++++-- .../code-help/create-user/create-user-node.js | 14 ++++++++++---- .../code-help/create-user/create-user-php.js | 6 +++++- .../code-help/create-user/create-user-python.js | 7 ++++++- .../code-help/create-user/create-user-react.js | 16 +++++++++++----- .../code-help/create-user/create-user-ruby.js | 8 ++++++-- .../code-help/create-user/create-user-rust.js | 7 ++++++- frontend/common/code-help/init/init-curl.js | 2 +- frontend/common/code-help/init/init-flutter.js | 13 +++++++++++-- frontend/common/code-help/init/init-go.js | 7 ++++++- frontend/common/code-help/init/init-ios.js | 7 ++++++- frontend/common/code-help/init/init-java.js | 8 +++++++- frontend/common/code-help/init/init-js.js | 3 ++- frontend/common/code-help/init/init-next.js | 13 +++++++++++-- frontend/common/code-help/init/init-node.js | 7 ++++++- frontend/common/code-help/init/init-php.js | 5 ++++- frontend/common/code-help/init/init-python.js | 7 ++++++- frontend/common/code-help/init/init-react.js | 12 +++++++++--- frontend/common/code-help/init/init-ruby.js | 8 ++++++-- frontend/common/code-help/init/init-rust.js | 7 ++++++- .../offline_client/offline-client-curl.js | 2 +- .../offline_server/offline-server-curl.js | 2 +- frontend/common/code-help/traits/traits-curl.js | 2 +- frontend/common/code-help/traits/traits-java.js | 9 ++++++++- frontend/common/code-help/traits/traits-js.js | 8 +++++++- frontend/common/code-help/traits/traits-next.js | 13 +++++++++++-- frontend/common/code-help/traits/traits-node.js | 15 ++++++++++----- frontend/common/code-help/traits/traits-php.js | 5 ++++- .../common/code-help/traits/traits-python.js | 7 ++++++- frontend/common/code-help/traits/traits-react.js | 16 +++++++++++----- frontend/common/code-help/traits/traits-ruby.js | 8 ++++++-- frontend/common/code-help/traits/traits-rust.js | 7 ++++++- frontend/common/constants.ts | 6 ++++-- 38 files changed, 244 insertions(+), 64 deletions(-) diff --git a/frontend/common/code-help/create-user/create-user-curl.js b/frontend/common/code-help/create-user/create-user-curl.js index 5f5c3883317d..a13d8edc5224 100644 --- a/frontend/common/code-help/create-user/create-user-curl.js +++ b/frontend/common/code-help/create-user/create-user-curl.js @@ -1,6 +1,6 @@ module.exports = (envId, { USER_ID }, userId) => `// Identify/create user -curl -i 'https://edge.api.flagsmith.com/api/v1/identities/?identifier=${ +curl -i '${Project.flagsmithClientAPI}identities/?identifier=${ userId || USER_ID }' \\ -H 'x-environment-key: ${envId}' diff --git a/frontend/common/code-help/create-user/create-user-flutter.js b/frontend/common/code-help/create-user/create-user-flutter.js index a6f5f5aae9e7..c639c79ed466 100644 --- a/frontend/common/code-help/create-user/create-user-flutter.js +++ b/frontend/common/code-help/create-user/create-user-flutter.js @@ -1,9 +1,14 @@ +import Constants from 'common/constants' module.exports = ( envId, { FEATURE_NAME, FEATURE_NAME_ALT, USER_ID }, userId, ) => `final flagsmithClient = FlagsmithClient( - apiKey: '${envId}' + apiKey: '${envId}',${ + Constants.isCustomFlagsmithUrl + ? `\n baseURI: '${Project.flagsmithClientAPI}',` + : '' +} config: config, seeds: [ Flag.seed('feature', enabled: true), @@ -12,7 +17,11 @@ module.exports = ( //if you prefer async initialization then you should use //final flagsmithClient = await FlagsmithClient.init( -// apiKey: 'YOUR_ENV_API_KEY', +// apiKey: '${envId}',${ + Constants.isCustomFlagsmithUrl + ? `\n// baseURI: '${Project.flagsmithClientAPI}',` + : '' +} // config: config, // seeds: [ // Flag.seed('feature', enabled: true), diff --git a/frontend/common/code-help/create-user/create-user-ios.js b/frontend/common/code-help/create-user/create-user-ios.js index 8fb6add2bd03..c1fbe2958494 100644 --- a/frontend/common/code-help/create-user/create-user-ios.js +++ b/frontend/common/code-help/create-user/create-user-ios.js @@ -1,3 +1,5 @@ +import Constants from 'common/constants' + module.exports = ( envId, { FEATURE_NAME, FEATURE_NAME_ALT, USER_ID }, @@ -8,7 +10,10 @@ func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool { - Flagsmith.shared.apiKey = "${envId}" + Flagsmith.shared.apiKey = "${envId}"${ + Constants.isCustomFlagsmithUrl && + `\n Flagsmith.shared.baseURL = "${Project.flagsmithClientAPI}"\n` +} // This will create a user in the dashboard if they don't already exist // Check for a feature diff --git a/frontend/common/code-help/create-user/create-user-java.js b/frontend/common/code-help/create-user/create-user-java.js index f92a94e3a69d..40809e0af308 100644 --- a/frontend/common/code-help/create-user/create-user-java.js +++ b/frontend/common/code-help/create-user/create-user-java.js @@ -1,10 +1,17 @@ +import Constants from 'common/constants' + module.exports = ( envId, { FEATURE_NAME, FEATURE_NAME_ALT, LIB_NAME, LIB_NAME_JAVA, USER_ID }, userId, ) => `${LIB_NAME_JAVA} ${LIB_NAME} = ${LIB_NAME_JAVA} .newBuilder() - .setApiKey("${envId}") + .setApiKey("${envId}")${ + Constants.isCustomFlagsmithUrl && + `\n .withConfiguration(FlagsmithConfig.builder() + .baseUri("${Project.flagsmithClientAPI}") + .build())` +} .build(); // Identify the user diff --git a/frontend/common/code-help/create-user/create-user-js.js b/frontend/common/code-help/create-user/create-user-js.js index cf1ae6d72b25..eaa8aad92bf1 100644 --- a/frontend/common/code-help/create-user/create-user-js.js +++ b/frontend/common/code-help/create-user/create-user-js.js @@ -1,3 +1,4 @@ +import Constants from 'common/constants' module.exports = ( envId, { @@ -13,7 +14,11 @@ module.exports = ( // Option 1: initialise with an identity and traits ${LIB_NAME}.init({ - environmentID: "${envId}", + environmentID: "${envId}",${ + Constants.isCustomFlagsmithUrl + ? `\n api: "${Project.flagsmithClientAPI}",` + : '' +} identity: "${userId || USER_ID}", traits: { "${TRAIT_NAME}": 21 }, onChange: (oldFlags, params) => { /* ... */ }, diff --git a/frontend/common/code-help/create-user/create-user-next.js b/frontend/common/code-help/create-user/create-user-next.js index 36ee38e9313c..796ace1b04c1 100644 --- a/frontend/common/code-help/create-user/create-user-next.js +++ b/frontend/common/code-help/create-user/create-user-next.js @@ -1,3 +1,4 @@ +import Constants from 'common/constants' module.exports = ( envId, { FEATURE_NAME, FEATURE_NAME_ALT, LIB_NAME, TRAIT_NAME, USER_ID }, @@ -25,7 +26,11 @@ export default function App({ Component, pageProps, flagsmithState } { <FlagsmithProvider serverState={flagsmithState} options={{ - environmentID: '${envId}', + environmentID: "${envId}",${ + Constants.isCustomFlagsmithUrl + ? `\n api: "${Project.flagsmithClientAPI}",` + : '' +} }} flagsmith={flagsmith}> <Component {...pageProps} /> @@ -36,7 +41,11 @@ export default function App({ Component, pageProps, flagsmithState } { MyApp.getInitialProps = async () => { // calls page's \`getInitialProps\` and fills \`appProps.pageProps\` await flagsmith.init({ // fetches flags on the server - environmentID, + environmentID: "${envId}",${ + Constants.isCustomFlagsmithUrl + ? `\n api: "${Project.flagsmithClientAPI}",` + : '' +} preventFetch: true }); await flagsmith.identify('${ diff --git a/frontend/common/code-help/create-user/create-user-node.js b/frontend/common/code-help/create-user/create-user-node.js index ae39aa855eba..46e393cb1a1b 100644 --- a/frontend/common/code-help/create-user/create-user-node.js +++ b/frontend/common/code-help/create-user/create-user-node.js @@ -1,12 +1,18 @@ +import Constants from 'common/constants' + module.exports = ( envId, - { FEATURE_NAME, FEATURE_NAME_ALT, USER_ID }, + { FEATURE_NAME, FEATURE_NAME_ALT, LIB_NAME, NPM_NODE_CLIENT, USER_ID }, userId, -) => `const Flagsmith = require('flagsmith-nodejs'); +) => `import Flagsmith from "${NPM_NODE_CLIENT}"; // Add this line if you're using ${LIB_NAME} via npm -const flagsmith = new Flagsmith( +const ${LIB_NAME} = new Flagsmith({${ + Constants.isCustomFlagsmithUrl && + `\n apiUrl: '${Project.flagsmithClientAPI}',` +} environmentKey: '${envId}' -); +}); + // Identify the user const flags = await flagsmith.getIdentityFlags('${ diff --git a/frontend/common/code-help/create-user/create-user-php.js b/frontend/common/code-help/create-user/create-user-php.js index 7eea0fe838e9..1995d959d0df 100644 --- a/frontend/common/code-help/create-user/create-user-php.js +++ b/frontend/common/code-help/create-user/create-user-php.js @@ -1,10 +1,14 @@ +import Constants from 'common/constants' + module.exports = ( envId, { FEATURE_NAME, FEATURE_NAME_ALT, USER_ID }, userId, ) => `use Flagsmith\\Flagsmith; -$flagsmith = new Flagsmith('${envId}'); +$flagsmith = new Flagsmith('${envId}'${ + Constants.isCustomFlagsmithUrl && `,\n '${Project.flagsmithClientAPI}'\n` +}); // Identify the user $flags = $flagsmith->getIdentityFlags('${userId}', $traits); diff --git a/frontend/common/code-help/create-user/create-user-python.js b/frontend/common/code-help/create-user/create-user-python.js index d8618f97b0a4..82c8bbf9e42a 100644 --- a/frontend/common/code-help/create-user/create-user-python.js +++ b/frontend/common/code-help/create-user/create-user-python.js @@ -1,10 +1,15 @@ +import Constants from 'common/constants' + module.exports = ( envId, { FEATURE_NAME, FEATURE_NAME_ALT, USER_ID }, userId, ) => `from flagsmith import Flagsmith -flagsmith = Flagsmith(environment_key="${envId}") +flagsmith = Flagsmith(environment_key="${envId}"${ + Constants.isCustomFlagsmithUrl && + `,\n api_url="${Project.flagsmithClientAPI}"\n` +}) # Identify the user identity_flags = flagsmith.get_identity_flags(identifier="${ diff --git a/frontend/common/code-help/create-user/create-user-react.js b/frontend/common/code-help/create-user/create-user-react.js index e83fea32b3a4..2b17dd249c76 100644 --- a/frontend/common/code-help/create-user/create-user-react.js +++ b/frontend/common/code-help/create-user/create-user-react.js @@ -1,16 +1,22 @@ +import Constants from 'common/constants' + module.exports = ( envId, - { FEATURE_NAME, FEATURE_NAME_ALT, LIB_NAME, TRAIT_NAME, USER_ID }, + { FEATURE_NAME, FEATURE_NAME_ALT, NPM_CLIENT, TRAIT_NAME, USER_ID }, userId, ) => ` // Option 1: Initialise with an identity -import { FlagsmithProvider } from 'flagsmith/react'; +import { FlagsmithProvider } from '${NPM_CLIENT}/react'; export default function App() { return ( <FlagsmithProvider options={{ - environmentID: '${envId}', + environmentID: '${envId}',${ + Constants.isCustomFlagsmithUrl + ? `\n api: '${Project.flagsmithClientAPI}',` + : '' +} identity: '${userId || USER_ID}', traits: {${TRAIT_NAME}: 21}, }} @@ -22,8 +28,8 @@ export default function App() { // Option 2: Identify after initialising -import flagsmith from '${LIB_NAME}'; -import { useFlags, useFlagsmith } from 'flagsmith/react'; +import flagsmith from '${NPM_CLIENT}'; +import { useFlags, useFlagsmith } from '${NPM_CLIENT}/react'; export default function HomePage() { const flags = useFlags(['${FEATURE_NAME}','${FEATURE_NAME_ALT}']); // only causes re-render if specified flag values / traits change diff --git a/frontend/common/code-help/create-user/create-user-ruby.js b/frontend/common/code-help/create-user/create-user-ruby.js index 65d89dbc09c9..5da2f36aeedd 100644 --- a/frontend/common/code-help/create-user/create-user-ruby.js +++ b/frontend/common/code-help/create-user/create-user-ruby.js @@ -1,3 +1,5 @@ +import Constants from 'common/constants' + module.exports = ( envId, { FEATURE_NAME, FEATURE_NAME_ALT, USER_ID }, @@ -5,8 +7,10 @@ module.exports = ( ) => `require "flagsmith" $flagsmith = Flagsmith::Client.new( - environment_key: '${envId}' -) + environment_key="${envId}"${ + Constants.isCustomFlagsmithUrl && + `,\n api_url="${Project.flagsmithClientAPI}"\n` +}) // Identify the user $flags = $flagsmith.get_identity_flags('${userId || USER_ID}') diff --git a/frontend/common/code-help/create-user/create-user-rust.js b/frontend/common/code-help/create-user/create-user-rust.js index 492951a843eb..93fdd7818f79 100644 --- a/frontend/common/code-help/create-user/create-user-rust.js +++ b/frontend/common/code-help/create-user/create-user-rust.js @@ -1,3 +1,5 @@ +import Constants from 'common/constants' + module.exports = ( envId, { FEATURE_NAME, FEATURE_NAME_ALT, USER_ID }, @@ -5,7 +7,10 @@ module.exports = ( ) => ` use flagsmith::{Flag, Flagsmith, FlagsmithOptions}; -let options = FlagsmithOptions {..Default::default()}; +let options = FlagsmithOptions {${ + Constants.isCustomFlagsmithUrl && + `api_url: "${Project.flagsmithClientAPI}".to_string(),\n` +}..Default::default()}; let flagsmith = Flagsmith::new( "${envId}".to_string(), options, diff --git a/frontend/common/code-help/init/init-curl.js b/frontend/common/code-help/init/init-curl.js index a45093e6d404..5f847440ad67 100644 --- a/frontend/common/code-help/init/init-curl.js +++ b/frontend/common/code-help/init/init-curl.js @@ -1,4 +1,4 @@ module.exports = (envId) => ` -curl -i 'https://edge.api.flagsmith.com/api/v1/flags/' \\ +curl -i '${Project.flagsmithClientAPI}flags/' \\ -H 'x-environment-key: ${envId}' ` diff --git a/frontend/common/code-help/init/init-flutter.js b/frontend/common/code-help/init/init-flutter.js index 295807e415de..bcd3c531b0df 100644 --- a/frontend/common/code-help/init/init-flutter.js +++ b/frontend/common/code-help/init/init-flutter.js @@ -1,10 +1,15 @@ +import Constants from 'common/constants' module.exports = ( envId, { FEATURE_NAME, FEATURE_NAME_ALT }, ) => `//In your application, initialise the Flagsmith client with your API key: final flagsmithClient = FlagsmithClient( - apiKey: '${envId}' + apiKey: '${envId}',${ + Constants.isCustomFlagsmithUrl + ? `\n baseURI: '${Project.flagsmithClientAPI}',` + : '' +} config: config, seeds: [ Flag.seed('feature', enabled: true), @@ -13,7 +18,11 @@ final flagsmithClient = FlagsmithClient( //if you prefer async initialization then you should use //final flagsmithClient = await FlagsmithClient.init( -// apiKey: 'YOUR_ENV_API_KEY', +// apiKey: '${envId}',${ + Constants.isCustomFlagsmithUrl + ? `\n// baseURI: '${Project.flagsmithClientAPI}',` + : '' +} // config: config, // seeds: [ // Flag.seed('feature', enabled: true), diff --git a/frontend/common/code-help/init/init-go.js b/frontend/common/code-help/init/init-go.js index 10cf678bad23..3441e307a04b 100644 --- a/frontend/common/code-help/init/init-go.js +++ b/frontend/common/code-help/init/init-go.js @@ -1,9 +1,14 @@ +import Constants from 'common/constants' + module.exports = (envId, { FEATURE_NAME, FEATURE_NAME_ALT }, customFeature) => ` ctx, cancel := context.WithCancel(context.Background()) defer cancel() // Initialise the Flagsmith client -client := flagsmith.NewClient('${envId}', flagsmith.WithContext(ctx)) +client := flagsmith.NewClient('${envId}',${ + Constants.isCustomFlagsmithUrl && + `\nflagsmith.WithBaseURL("${Project.flagsmithClientAPI}"),\n` +}flagsmith.WithContext(ctx)) // The method below triggers a network request flags, _ := client.GetEnvironmentFlags() diff --git a/frontend/common/code-help/init/init-ios.js b/frontend/common/code-help/init/init-ios.js index cd4252154c2f..e983bd0a1e5c 100644 --- a/frontend/common/code-help/init/init-ios.js +++ b/frontend/common/code-help/init/init-ios.js @@ -1,3 +1,5 @@ +import Constants from 'common/constants' + module.exports = ( envId, { FEATURE_NAME, FEATURE_NAME_ALT }, @@ -6,7 +8,10 @@ module.exports = ( func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool { - Flagsmith.shared.apiKey = "${envId}" + Flagsmith.shared.apiKey = "${envId}"${ + Constants.isCustomFlagsmithUrl && + `\n Flagsmith.shared.baseURL = "${Project.flagsmithClientAPI}"\n` +} // Check for a feature Flagsmith.shared .hasFeatureFlag(withID: "${FEATURE_NAME}", forIdentity: nil) { (result) in diff --git a/frontend/common/code-help/init/init-java.js b/frontend/common/code-help/init/init-java.js index f2d44607a5ac..57954e91d348 100644 --- a/frontend/common/code-help/init/init-java.js +++ b/frontend/common/code-help/init/init-java.js @@ -1,10 +1,16 @@ +import Constants from 'common/constants' module.exports = ( envId, { FEATURE_NAME, FEATURE_NAME_ALT, LIB_NAME, LIB_NAME_JAVA }, customFeature, ) => `${LIB_NAME_JAVA} ${LIB_NAME} = ${LIB_NAME_JAVA} .newBuilder() - .setApiKey("${envId}") + .setApiKey("${envId}")${ + Constants.isCustomFlagsmithUrl && + `\n .withConfiguration(FlagsmithConfig.builder() + .baseUri("${Project.flagsmithClientAPI}") + .build())` +} .build(); Flags flags = flagsmith.getEnvironmentFlags(); diff --git a/frontend/common/code-help/init/init-js.js b/frontend/common/code-help/init/init-js.js index 7dbfa8b11008..def8e9a2867e 100644 --- a/frontend/common/code-help/init/init-js.js +++ b/frontend/common/code-help/init/init-js.js @@ -1,3 +1,4 @@ +import Constants from 'common/constants' module.exports = ( envId, { FEATURE_FUNCTION, FEATURE_NAME, FEATURE_NAME_ALT, LIB_NAME, NPM_CLIENT }, @@ -5,7 +6,7 @@ module.exports = ( ) => `import ${LIB_NAME} from "${NPM_CLIENT}"; // Add this line if you're using ${LIB_NAME} via npm ${LIB_NAME}.init({ - environmentID: "${envId}", + environmentID: "${envId}",${Constants.isCustomFlagsmithUrl ? `\n api: "${Project.flagsmithClientAPI}",` : ''} onChange: (oldFlags, params) => { // Occurs whenever flags are changed // Determines if the update came from the server or local cached storage const { isFromServer } = params; diff --git a/frontend/common/code-help/init/init-next.js b/frontend/common/code-help/init/init-next.js index b5c679c899f9..07e0047ad0e9 100644 --- a/frontend/common/code-help/init/init-next.js +++ b/frontend/common/code-help/init/init-next.js @@ -1,3 +1,4 @@ +import Constants from 'common/constants' module.exports = ( envId, { FEATURE_NAME, FEATURE_NAME_ALT, LIB_NAME, NPM_CLIENT }, @@ -10,7 +11,11 @@ export default function App({ Component, pageProps, flagsmithState } { <FlagsmithProvider serverState={flagsmithState} options={{ - environmentID: '${envId}', + environmentID: "${envId}",${ + Constants.isCustomFlagsmithUrl + ? `\n api: "${Project.flagsmithClientAPI}",` + : '' +} }} flagsmith={flagsmith}> <Component {...pageProps} /> @@ -20,7 +25,11 @@ export default function App({ Component, pageProps, flagsmithState } { App.getInitialProps = async () => { await flagsmith.init({ // fetches flags on the server and passes them to the App - environmentID, + environmentID: "${envId}",${ + Constants.isCustomFlagsmithUrl + ? `\n api: "${Project.flagsmithClientAPI}",` + : '' +} }); return { flagsmithState: flagsmith.getState() } } diff --git a/frontend/common/code-help/init/init-node.js b/frontend/common/code-help/init/init-node.js index cc81b604a1de..4c49926e9a06 100644 --- a/frontend/common/code-help/init/init-node.js +++ b/frontend/common/code-help/init/init-node.js @@ -1,10 +1,15 @@ +import Constants from 'common/constants' + module.exports = ( envId, { FEATURE_NAME, FEATURE_NAME_ALT, LIB_NAME, NPM_NODE_CLIENT }, customFeature, ) => `import Flagsmith from "${NPM_NODE_CLIENT}"; // Add this line if you're using ${LIB_NAME} via npm -const ${LIB_NAME} = new Flagsmith({ +const ${LIB_NAME} = new Flagsmith({${ + Constants.isCustomFlagsmithUrl && + `\n apiUrl: '${Project.flagsmithClientAPI}',` +} environmentKey: '${envId}' }); diff --git a/frontend/common/code-help/init/init-php.js b/frontend/common/code-help/init/init-php.js index 9c7168f3effd..9d27c31711a4 100644 --- a/frontend/common/code-help/init/init-php.js +++ b/frontend/common/code-help/init/init-php.js @@ -1,9 +1,12 @@ +import Constants from 'common/constants' module.exports = ( envId, { FEATURE_NAME, FEATURE_NAME_ALT }, ) => `use Flagsmith\\Flagsmith; -$flagsmith = new Flagsmith('${envId}'); +$flagsmith = new Flagsmith('${envId}'${ + Constants.isCustomFlagsmithUrl && `,\n '${Project.flagsmithClientAPI}'\n` +}); // Check for a feature $${FEATURE_NAME} = $flags->isFeatureEnabled('${FEATURE_NAME}'); diff --git a/frontend/common/code-help/init/init-python.js b/frontend/common/code-help/init/init-python.js index 2f596b26b410..088300c490e9 100644 --- a/frontend/common/code-help/init/init-python.js +++ b/frontend/common/code-help/init/init-python.js @@ -1,9 +1,14 @@ +import Constants from 'common/constants' + module.exports = ( envId, { FEATURE_NAME, FEATURE_NAME_ALT }, ) => `from flagsmith import Flagsmith -flagsmith = Flagsmith(environment_key="${envId}") +flagsmith = Flagsmith(environment_key="${envId}"${ + Constants.isCustomFlagsmithUrl && + `,\n api_url="${Project.flagsmithClientAPI}"\n` +}) # The method below triggers a network request flags = flagsmith.get_environment_flags() diff --git a/frontend/common/code-help/init/init-react.js b/frontend/common/code-help/init/init-react.js index 41b6ffa79685..f04e0233719f 100644 --- a/frontend/common/code-help/init/init-react.js +++ b/frontend/common/code-help/init/init-react.js @@ -1,3 +1,5 @@ +import Constants from 'common/constants' + module.exports = ( envId, { FEATURE_NAME, FEATURE_NAME_ALT, LIB_NAME, NPM_CLIENT }, @@ -9,7 +11,11 @@ export default function App() { return ( <FlagsmithProvider options={{ - environmentID: '${envId}', + environmentID: '${envId}',${ + Constants.isCustomFlagsmithUrl + ? `\n api: '${Project.flagsmithClientAPI}',` + : '' +} }} flagsmith={flagsmith}> {...Your app} @@ -18,8 +24,8 @@ export default function App() { } // Home Page -import flagsmith from 'flagsmith'; -import { useFlags, useFlagsmith } from 'flagsmith/react'; +import ${LIB_NAME} from '${NPM_CLIENT}'; +import { useFlags, useFlagsmith } from '${NPM_CLIENT}/react'; export default function HomePage() { const flags = useFlags(['${FEATURE_NAME}','${FEATURE_NAME_ALT}']); // only causes re-render if specified flag values / traits change diff --git a/frontend/common/code-help/init/init-ruby.js b/frontend/common/code-help/init/init-ruby.js index 605411d88fb4..2d91a0006fc3 100644 --- a/frontend/common/code-help/init/init-ruby.js +++ b/frontend/common/code-help/init/init-ruby.js @@ -1,3 +1,5 @@ +import Constants from 'common/constants' + module.exports = ( envId, { FEATURE_NAME, FEATURE_NAME_ALT }, @@ -5,8 +7,10 @@ module.exports = ( ) => `require "flagsmith" $flagsmith = Flagsmith::Client.new( - environment_key: '${envId}' -) + environment_key="${envId}"${ + Constants.isCustomFlagsmithUrl && + `,\n api_url="${Project.flagsmithClientAPI}"\n` +}) // Load the environment's flags locally $flags = $flagsmith.get_environment_flags diff --git a/frontend/common/code-help/init/init-rust.js b/frontend/common/code-help/init/init-rust.js index 1ae1d72dcc3e..f547961eded4 100644 --- a/frontend/common/code-help/init/init-rust.js +++ b/frontend/common/code-help/init/init-rust.js @@ -1,7 +1,12 @@ +import Constants from 'common/constants' + module.exports = (envId, { FEATURE_NAME, FEATURE_NAME_ALT }) => ` use flagsmith::{Flag, Flagsmith, FlagsmithOptions}; -let options = FlagsmithOptions {..Default::default()}; +let options = FlagsmithOptions {${ + Constants.isCustomFlagsmithUrl && + `api_url: "${Project.flagsmithClientAPI}".to_string(),\n` +}..Default::default()}; let flagsmith = Flagsmith::new( "${envId}".to_string(), options, diff --git a/frontend/common/code-help/offline_client/offline-client-curl.js b/frontend/common/code-help/offline_client/offline-client-curl.js index e244733554f5..ceee3d366008 100644 --- a/frontend/common/code-help/offline_client/offline-client-curl.js +++ b/frontend/common/code-help/offline_client/offline-client-curl.js @@ -1,4 +1,4 @@ module.exports = (envId) => ` -curl -i 'https://edge.api.flagsmith.com/api/v1/flags/' \\ +curl -i '${Project.flagsmithClientAPI}flags/' \\ -H 'x-environment-key: ${envId}' | tee flagsmith.json ` diff --git a/frontend/common/code-help/offline_server/offline-server-curl.js b/frontend/common/code-help/offline_server/offline-server-curl.js index 36c4b202b4b7..44ceb1f56f61 100644 --- a/frontend/common/code-help/offline_server/offline-server-curl.js +++ b/frontend/common/code-help/offline_server/offline-server-curl.js @@ -1,4 +1,4 @@ module.exports = (serversideEnvironmentKey) => ` -curl -i 'https://edge.api.flagsmith.com/api/v1/environment-document/' \\ +curl -i '${Project.flagsmithClientAPI}environment-document/' \\ -H 'x-environment-key: ${serversideEnvironmentKey}' | tee flagsmith.json ` diff --git a/frontend/common/code-help/traits/traits-curl.js b/frontend/common/code-help/traits/traits-curl.js index c7fecc02b9e8..4837e58c34bb 100644 --- a/frontend/common/code-help/traits/traits-curl.js +++ b/frontend/common/code-help/traits/traits-curl.js @@ -2,7 +2,7 @@ module.exports = ( envId, { TRAIT_NAME, USER_ID }, userId, -) => `curl -i -X POST 'https://edge.api.flagsmith.com/api/v1/identities/' \\ +) => `curl -i -X POST '${Project.flagsmithClientAPI}identities/' \\ -H 'x-environment-key: ${envId}' \\ -H 'Content-Type: application/json; charset=utf-8' \\ -d $'{ diff --git a/frontend/common/code-help/traits/traits-java.js b/frontend/common/code-help/traits/traits-java.js index dc4bd8654e2b..6b4db8cb1815 100644 --- a/frontend/common/code-help/traits/traits-java.js +++ b/frontend/common/code-help/traits/traits-java.js @@ -1,10 +1,17 @@ +import Constants from 'common/constants' + module.exports = ( envId, { LIB_NAME, LIB_NAME_JAVA, TRAIT_NAME }, userId, ) => `${LIB_NAME_JAVA} ${LIB_NAME} = ${LIB_NAME_JAVA} .newBuilder() -.setApiKey("${envId}") +.setApiKey("${envId}")${ + Constants.isCustomFlagsmithUrl && + `\n .withConfiguration(FlagsmithConfig.builder() + .baseUri("${Project.flagsmithClientAPI}") + .build())` +} .build(); Map traits = new HashMap(); diff --git a/frontend/common/code-help/traits/traits-js.js b/frontend/common/code-help/traits/traits-js.js index fb3a03819957..11f8708cf5a3 100644 --- a/frontend/common/code-help/traits/traits-js.js +++ b/frontend/common/code-help/traits/traits-js.js @@ -1,10 +1,16 @@ +import Constants from 'common/constants' + module.exports = ( envId, { LIB_NAME, TRAIT_NAME, USER_ID }, userId, ) => `// Option 1: initialise with an identity and traits ${LIB_NAME}.init({ - environmentID: "${envId}", + environmentID: "${envId}",${ + Constants.isCustomFlagsmithUrl + ? `\n api: "${Project.flagsmithClientAPI}",` + : '' +} identity: "${userId || USER_ID}", traits: { "${TRAIT_NAME}": 21 }, onChange: (oldFlags, params) => { /* ... */ }, diff --git a/frontend/common/code-help/traits/traits-next.js b/frontend/common/code-help/traits/traits-next.js index 9db7062b7884..11618d3b1752 100644 --- a/frontend/common/code-help/traits/traits-next.js +++ b/frontend/common/code-help/traits/traits-next.js @@ -1,3 +1,4 @@ +import Constants from 'common/constants' module.exports = ( envId, { FEATURE_NAME, FEATURE_NAME_ALT, LIB_NAME, TRAIT_NAME }, @@ -33,7 +34,11 @@ export default function App({ Component, pageProps, flagsmithState } { <FlagsmithProvider serverState={flagsmithState} options={{ - environmentID: '${envId}', + environmentID: "${envId}",${ + Constants.isCustomFlagsmithUrl + ? `\n api: "${Project.flagsmithClientAPI}",` + : '' +} }} flagsmith={flagsmith}> <Component {...pageProps} /> @@ -44,7 +49,11 @@ export default function App({ Component, pageProps, flagsmithState } { MyApp.getInitialProps = async () => { // calls page's \`getInitialProps\` and fills \`appProps.pageProps\` await flagsmith.init({ // fetches flags on the server - environmentID, + environmentID: "${envId}",${ + Constants.isCustomFlagsmithUrl + ? `\n api: "${Project.flagsmithClientAPI}",` + : '' +} preventFetch: true }); await flagsmith.identify('${USER_ID}', {${TRAIT_NAME}: 21}); // Will hydrate the app with the user's flags diff --git a/frontend/common/code-help/traits/traits-node.js b/frontend/common/code-help/traits/traits-node.js index f3a0cd8854c6..0692cdd2a963 100644 --- a/frontend/common/code-help/traits/traits-node.js +++ b/frontend/common/code-help/traits/traits-node.js @@ -1,12 +1,17 @@ +import Constants from 'common/constants' + module.exports = ( envId, - { TRAIT_NAME, USER_ID }, -) => `const Flagsmith = require('flagsmith-nodejs'); + { FEATURE_NAME, TRAIT_NAME, LIB_NAME, NPM_NODE_CLIENT, USER_ID }, + userId, +) => `import Flagsmith from "${NPM_NODE_CLIENT}"; // Add this line if you're using ${LIB_NAME} via npm -const flagsmith = new Flagsmith( +const ${LIB_NAME} = new Flagsmith({${ + Constants.isCustomFlagsmithUrl && + `\n apiUrl: '${Project.flagsmithClientAPI}',` +} environmentKey: '${envId}' -); - +}); // Identify a user, set their traits and retrieve the flags const traits = { ${TRAIT_NAME}: 'robin_reliant' }; const flags = await flagsmith.getIdentityFlags('${USER_ID}', traits); diff --git a/frontend/common/code-help/traits/traits-php.js b/frontend/common/code-help/traits/traits-php.js index c433b2e1952d..e889ea288e43 100644 --- a/frontend/common/code-help/traits/traits-php.js +++ b/frontend/common/code-help/traits/traits-php.js @@ -1,6 +1,9 @@ +import Constants from 'common/constants' module.exports = (envId, { TRAIT_NAME }, userId) => `use Flagsmith\\Flagsmith; -$flagsmith = new Flagsmith('${envId}'); +$flagsmith = new Flagsmith('${envId}'${ + Constants.isCustomFlagsmithUrl && `,\n '${Project.flagsmithClientAPI}'\n` +}); $traits = (object) [ '${TRAIT_NAME}' => 42 ]; diff --git a/frontend/common/code-help/traits/traits-python.js b/frontend/common/code-help/traits/traits-python.js index 940338035f52..45f65e79c72f 100644 --- a/frontend/common/code-help/traits/traits-python.js +++ b/frontend/common/code-help/traits/traits-python.js @@ -1,10 +1,15 @@ +import Constants from 'common/constants' + module.exports = ( envId, { TRAIT_NAME }, userId, ) => `from flagsmith import Flagsmith -flagsmith = Flagsmith(environment_key="${envId}") +flagsmith = Flagsmith(environment_key="${envId}"${ + Constants.isCustomFlagsmithUrl && + `,\n api_url="${Project.flagsmithClientAPI}"\n` +}) traits = {"${TRAIT_NAME}": 42} diff --git a/frontend/common/code-help/traits/traits-react.js b/frontend/common/code-help/traits/traits-react.js index b698f68d8dca..dd80794752c0 100644 --- a/frontend/common/code-help/traits/traits-react.js +++ b/frontend/common/code-help/traits/traits-react.js @@ -1,16 +1,22 @@ +import Constants from 'common/constants' + module.exports = ( envId, - { FEATURE_NAME, FEATURE_NAME_ALT, LIB_NAME, TRAIT_NAME, USER_ID }, + { FEATURE_NAME, FEATURE_NAME_ALT, LIB_NAME, NPM_CLIENT, TRAIT_NAME, USER_ID }, userId, ) => ` // Option 1: Initialise with an identity and traits -import { FlagsmithProvider } from 'flagsmith/react'; +import { FlagsmithProvider } from '${NPM_CLIENT}/react'; export default function App() { return ( <FlagsmithProvider options={{ - environmentID: '${envId}', + environmentID: '${envId}',${ + Constants.isCustomFlagsmithUrl + ? `\n api: '${Project.flagsmithClientAPI}',` + : '' +} identity: '${userId || USER_ID}', traits: {${TRAIT_NAME}: 21}, }} @@ -21,8 +27,8 @@ export default function App() { } // Option 2: Set traits / identify after initialising -import flagsmith from '${LIB_NAME}'; -import { useFlags, useFlagsmith } from 'flagsmith/react'; +import flagsmith from '${NPM_CLIENT}'; +import { useFlags, useFlagsmith } from '${NPM_CLIENT}/react'; export default function HomePage() { const flags = useFlags(['${FEATURE_NAME}','${FEATURE_NAME_ALT}']); // only causes re-render if specified flag values / traits change diff --git a/frontend/common/code-help/traits/traits-ruby.js b/frontend/common/code-help/traits/traits-ruby.js index dfb745ddde6f..bde960f914e3 100644 --- a/frontend/common/code-help/traits/traits-ruby.js +++ b/frontend/common/code-help/traits/traits-ruby.js @@ -1,8 +1,12 @@ +import Constants from 'common/constants' + module.exports = (envId, { TRAIT_NAME }, userId) => `require "flagsmith" $flagsmith = Flagsmith::Client.new( - environment_key: '${envId}' -) + environment_key="${envId}"${ + Constants.isCustomFlagsmithUrl && + `,\n api_url="${Project.flagsmithClientAPI}"\n` +}) traits = {"${TRAIT_NAME}": 42} diff --git a/frontend/common/code-help/traits/traits-rust.js b/frontend/common/code-help/traits/traits-rust.js index c563a9857b4a..f2ea797712e7 100644 --- a/frontend/common/code-help/traits/traits-rust.js +++ b/frontend/common/code-help/traits/traits-rust.js @@ -1,9 +1,14 @@ +import Constants from 'common/constants' + module.exports = (envId, { USER_ID }, userId) => ` use flagsmith::{Flag, Flagsmith, FlagsmithOptions}; use flagsmith_flag_engine::types::{FlagsmithValue, FlagsmithValueType}; use flagsmith_flag_engine::identities::Trait; -let options = FlagsmithOptions {..Default::default()}; +let options = FlagsmithOptions {${ + Constants.isCustomFlagsmithUrl && + `api_url: "${Project.flagsmithClientAPI}".to_string(),\n` +}..Default::default()}; let flagsmith = Flagsmith::new( "${envId}".to_string(), options, diff --git a/frontend/common/constants.ts b/frontend/common/constants.ts index 3e39d72ac042..4ee96ed23665 100644 --- a/frontend/common/constants.ts +++ b/frontend/common/constants.ts @@ -1,6 +1,6 @@ import { OAuthType } from './types/requests' import { SegmentCondition } from './types/responses' - +import Project from './project' const keywords = { FEATURE_FUNCTION: 'myCoolFeature', FEATURE_NAME: 'my_cool_feature', @@ -113,7 +113,7 @@ export default { 'PHP': require('./code-help/init/init-php')(envId, keywords), 'Python': require('./code-help/init/init-python')(envId, keywords), 'React': require('./code-help/init/init-react')(envId, keywords), - 'React Native': require('./code-help/init/init-js')( + 'React Native': require('./code-help/init/init-react')( envId, keywordsReactNative, ), @@ -438,6 +438,8 @@ export default { githubIssue: 'GitHub Issue', githubPR: 'Github PR', }, + isCustomFlagsmithUrl: + Project.flagsmithClientAPI !== 'https://edge.api.flagsmith.com/api/v1/', modals: { 'PAYMENT': 'Payment Modal', }, From a395a470924628f6d239fb72f964a282b61a2e6b Mon Sep 17 00:00:00 2001 From: Zach Aysan Date: Wed, 14 Aug 2024 09:28:59 -0400 Subject: [PATCH 125/247] fix: Add decorator for running task every hour (#4481) --- api/organisations/tasks.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/api/organisations/tasks.py b/api/organisations/tasks.py index f03a9fba3e81..fea2b492cf23 100644 --- a/api/organisations/tasks.py +++ b/api/organisations/tasks.py @@ -79,6 +79,9 @@ def send_org_subscription_cancelled_alert( ) +@register_recurring_task( + run_every=timedelta(hours=6), +) @register_task_handler() def update_organisation_subscription_information_influx_cache(): subscription_info_cache.update_caches((SubscriptionCacheEntity.INFLUX,)) From f9cc1b0f1ca81182377efa5a755bc18d6087a595 Mon Sep 17 00:00:00 2001 From: Matthew Elwell Date: Wed, 14 Aug 2024 17:25:14 +0100 Subject: [PATCH 126/247] chore(admin): better token search in admin (#4497) --- api/custom_auth/admin.py | 11 +++++++++++ 1 file changed, 11 insertions(+) create mode 100644 api/custom_auth/admin.py diff --git a/api/custom_auth/admin.py b/api/custom_auth/admin.py new file mode 100644 index 000000000000..4eb8673c63fb --- /dev/null +++ b/api/custom_auth/admin.py @@ -0,0 +1,11 @@ +from django.contrib import admin +from rest_framework.authtoken.admin import TokenAdmin +from rest_framework.authtoken.models import TokenProxy + + +class CustomTokenAdmin(TokenAdmin): + search_fields = ("user__email",) + + +admin.site.unregister(TokenProxy) +admin.site.register(TokenProxy, CustomTokenAdmin) From 6ba44f844bdb5c5295edb71da1d9396c8ec5755c Mon Sep 17 00:00:00 2001 From: Kyle Johnson Date: Wed, 14 Aug 2024 21:51:50 +0100 Subject: [PATCH 127/247] chore: Plan based access UI (#4281) --- frontend/common/constants.ts | 16 +- frontend/common/utils/utils.tsx | 153 +++--- frontend/web/components/App.js | 9 +- frontend/web/components/AuditLog.tsx | 25 +- frontend/web/components/ButterBar.tsx | 2 +- frontend/web/components/EditPermissions.tsx | 475 ++++++++---------- frontend/web/components/ErrorMessage.js | 2 +- frontend/web/components/FlagOwnerGroups.js | 44 +- frontend/web/components/FlagOwners.js | 58 +-- frontend/web/components/PermissionsTabs.tsx | 5 +- frontend/web/components/PlanBasedAccess.tsx | 231 +++++++++ frontend/web/components/SamlTab.tsx | 5 +- frontend/web/components/Setting.tsx | 64 +++ frontend/web/components/SettingsButton.tsx | 39 +- .../web/components/SimpleTwoFactor/index.js | 2 +- frontend/web/components/Tooltip.tsx | 9 +- frontend/web/components/base/forms/Button.tsx | 20 +- .../web/components/metadata/MetadataPage.tsx | 7 +- frontend/web/components/modals/CreateFlag.js | 95 ++-- .../web/components/modals/CreateProject.js | 41 +- frontend/web/components/modals/InviteUsers.js | 24 +- frontend/web/components/modals/Payment.js | 30 ++ .../components/pages/AccountSettingsPage.js | 48 +- .../web/components/pages/AuditLogPage.tsx | 27 +- .../components/pages/ChangeRequestsPage.js | 36 +- .../pages/EnvironmentSettingsPage.js | 353 ++++++------- .../components/pages/FeatureHistoryPage.tsx | 19 +- .../pages/OrganisationSettingsPage.js | 46 +- .../components/pages/ProjectSettingsPage.js | 171 +++---- .../components/pages/ScheduledChangesPage.js | 187 +++---- .../pages/UsersAndPermissionsPage.tsx | 71 ++- frontend/web/styles/_variables.scss | 16 +- frontend/web/styles/project/_buttons.scss | 27 +- frontend/web/styles/project/_tooltips.scss | 3 + frontend/web/styles/project/_utils.scss | 7 + 35 files changed, 1296 insertions(+), 1071 deletions(-) create mode 100644 frontend/web/components/PlanBasedAccess.tsx create mode 100644 frontend/web/components/Setting.tsx diff --git a/frontend/common/constants.ts b/frontend/common/constants.ts index 4ee96ed23665..410fcbab3414 100644 --- a/frontend/common/constants.ts +++ b/frontend/common/constants.ts @@ -1,5 +1,7 @@ import { OAuthType } from './types/requests' import { SegmentCondition } from './types/responses' +import Utils from './utils/utils' + import Project from './project' const keywords = { FEATURE_FUNCTION: 'myCoolFeature', @@ -348,6 +350,12 @@ export default { } }, 'VIEW_FEATURE': { 'category': 'Features', 'event': 'Feature viewed' }, + VIEW_LOCKED_FEATURE: (feature: string) => { + return { + 'category': 'Locked Feature', + 'event': `View Locked Feature ${feature}`, + } + }, 'VIEW_SEGMENT': { 'category': 'Segment', 'event': 'Segment viewed' }, 'VIEW_USER_FEATURE': { 'category': 'User Features', @@ -434,6 +442,13 @@ export default { 'TRAITS_ID': 150, }, }, + getUpgradeUrl: (feature?: string) => { + return Utils.isSaas() + ? '/organisation-settings?tab=billing' + : `https://www.flagsmith.com/pricing${ + feature ? `utm_source=${feature}` : '' + }` + }, githubType: { githubIssue: 'GitHub Issue', githubPR: 'Github PR', @@ -549,5 +564,4 @@ export default { '#DE3163', ], untaggedTag: { color: '#dedede', label: 'Untagged' }, - upgradeURL: '/organisation-settings?tab=billing', } diff --git a/frontend/common/utils/utils.tsx b/frontend/common/utils/utils.tsx index d4e6e6e54676..ef8f56367aed 100644 --- a/frontend/common/utils/utils.tsx +++ b/frontend/common/utils/utils.tsx @@ -25,6 +25,25 @@ import Format from './format' const semver = require('semver') +export type PaidFeature = + | 'FLAG_OWNERS' + | 'RBAC' + | 'AUDIT' + | 'FORCE_2FA' + | '4_EYES' + | 'STALE_FLAGS' + | 'VERSIONING' + | 'AUTO_SEATS' + | 'METADATA' + | 'REALTIME' + | 'SAML' + | 'SCHEDULE_FLAGS' + | 'CREATE_ADDITIONAL_PROJECT' + | '2FA' + +// Define a type for plan categories +type Plan = 'start-up' | 'scale-up' | 'enterprise' | null + export const planNames = { enterprise: 'Enterprise', free: 'Free', @@ -268,6 +287,20 @@ const Utils = Object.assign({}, require('./base/_utils'), { getManageUserPermissionDescription() { return 'Manage Identities' }, + getNextPlan: (skipFree?: boolean) => { + const currentPlan = Utils.getPlanName(AccountStore.getActiveOrgPlan()) + switch (currentPlan) { + case planNames.free: { + return skipFree ? planNames.startup : planNames.scaleUp + } + case planNames.startup: { + return planNames.startup + } + default: { + return planNames.enterprise + } + } + }, getOrganisationHomePage(id?: string) { const orgId = id || AccountStore.getOrganisation()?.id if (!orgId) { @@ -275,6 +308,7 @@ const Utils = Object.assign({}, require('./base/_utils'), { } return `/organisation/${orgId}/projects` }, + getPermissionList( isAdmin: boolean, permissions: string[] | undefined | null, @@ -305,8 +339,10 @@ const Utils = Object.assign({}, require('./base/_utils'), { .map((item) => `${Format.enumeration.get(item)}`), } }, - getPlanName: (plan: string) => { + if (plan && plan.includes('free')) { + return planNames.free + } if (plan && plan.includes('scale-up')) { return planNames.scaleUp } @@ -324,77 +360,27 @@ const Utils = Object.assign({}, require('./base/_utils'), { } return planNames.free }, - getPlanPermission: (plan: string, permission: string) => { - let valid = true + getPlanPermission: (plan: string, feature: PaidFeature) => { const planName = Utils.getPlanName(plan) - if (!plan || planName === planNames.free) { return false } const isScaleupOrGreater = planName !== planNames.startup const isEnterprise = planName === planNames.enterprise - const isSaas = Utils.isSaas() - switch (permission) { - case 'FLAG_OWNERS': { - valid = isScaleupOrGreater - break - } - case 'CREATE_ADDITIONAL_PROJECT': { - valid = true // startup or greater - break - } - case '2FA': { - valid = true // startup or greater - break - } - case 'RBAC': { - valid = isScaleupOrGreater - break - } - case 'AUDIT': { - valid = isScaleupOrGreater - break - } - case 'AUTO_SEATS': { - valid = isScaleupOrGreater && !isEnterprise - break - } - case 'FORCE_2FA': { - valid = isScaleupOrGreater - break - } - case 'SCHEDULE_FLAGS': { - valid = true // startup or greater - break - } - case '4_EYES': { - valid = isScaleupOrGreater - break - } - case 'REALTIME': { - valid = isEnterprise && isSaas - break - } - case 'STALE_FLAGS': { - valid = isEnterprise - break - } - case 'SAML': { - valid = isEnterprise - break - } - case 'METADATA': { - valid = isEnterprise - break - } - default: - valid = true - break + if (feature === 'AUTO_SEATS') { + return isScaleupOrGreater && !isEnterprise + } + + const requiredPlan = Utils.getRequiredPlan(feature) + if (requiredPlan === 'enterprise') { + return isEnterprise + } else if (requiredPlan === 'scale-up') { + return isScaleupOrGreater } - return valid + return true }, - getPlansPermission: (permission: string) => { - const isOrgPermission = permission !== '2FA' + getPlansPermission: (feature: PaidFeature) => { + const isOrgPermission = feature !== '2FA' const plans = isOrgPermission ? AccountStore.getActiveOrgPlan() ? [AccountStore.getActiveOrgPlan()] @@ -405,16 +391,51 @@ const Utils = Object.assign({}, require('./base/_utils'), { return false } const found = _.find( - plans.map((plan: string) => Utils.getPlanPermission(plan, permission)), + plans.map((plan: string) => Utils.getPlanPermission(plan, feature)), (perm) => !!perm, ) return !!found }, - getProjectColour(index: number) { return Constants.projectColors[index % (Constants.projectColors.length - 1)] }, + getRequiredPlan: (feature: PaidFeature) => { + let plan + switch (feature) { + case 'FLAG_OWNERS': + case 'RBAC': + case 'AUDIT': + case 'FORCE_2FA': + case '4_EYES': { + plan = 'scale-up' + break + } + case 'STALE_FLAGS': + case 'REALTIME': + case 'METADATA': + case 'SAML': { + plan = 'enterprise' + break + } + + case 'SCHEDULE_FLAGS': + case 'CREATE_ADDITIONAL_PROJECT': + case '2FA': { + plan = 'start-up' // startup or greater + break + } + default: { + plan = null + break + } + } + if (plan && !Utils.isSaas()) { + plan = 'enterprise' + } + return plan as Plan + }, + getSDKEndpoint(_project: ProjectType) { const project = _project || ProjectStore.model diff --git a/frontend/web/components/App.js b/frontend/web/components/App.js index 5df9a8ed880f..2c23469c543a 100644 --- a/frontend/web/components/App.js +++ b/frontend/web/components/App.js @@ -578,15 +578,8 @@ const App = class extends Component { id={projectId} > {({ permission }) => - permission && - Utils.getPlansPermission('RBAC') && ( + permission && ( } id='audit-log-link' to={`/project/${projectId}/audit-log`} diff --git a/frontend/web/components/AuditLog.tsx b/frontend/web/components/AuditLog.tsx index 8242acff616b..1cb2ad201807 100644 --- a/frontend/web/components/AuditLog.tsx +++ b/frontend/web/components/AuditLog.tsx @@ -10,6 +10,7 @@ import Tag from './tags/Tag' import PanelSearch from './PanelSearch' import JSONReference from './JSONReference' import moment from 'moment' +import PlanBasedBanner from './PlanBasedAccess' type AuditLogType = { environmentId: string @@ -159,18 +160,6 @@ const AuditLog: FC = (props) => { const { env: envFilter } = Utils.fromParam() - const hasRbacPermission = Utils.getPlansPermission('AUDIT') - if (!hasRbacPermission) { - return ( -
-
- To access this feature please upgrade your account to scaleup or - higher. -
-
- ) - } - return ( = (props) => { ) } -export default withRouter(AuditLog as any) +type AuditLogWrapperType = AuditLogType + +const AuditLogWrapper: FC = (props) => { + return ( + + + + ) +} + +export default withRouter(AuditLogWrapper as any) diff --git a/frontend/web/components/ButterBar.tsx b/frontend/web/components/ButterBar.tsx index c7ff78ec0349..2dc99c4ccf26 100644 --- a/frontend/web/components/ButterBar.tsx +++ b/frontend/web/components/ButterBar.tsx @@ -97,7 +97,7 @@ const ButterBar: React.FC = ({ billingStatus, projectId }) => { {Utils.getFlagsmithHasFeature('read_only_mode') && (
Your organisation is over its usage limit, please{' '} - upgrade your plan. + upgrade your plan.
)} {Utils.getFlagsmithHasFeature('show_dunning_banner') && diff --git a/frontend/web/components/EditPermissions.tsx b/frontend/web/components/EditPermissions.tsx index 84478b3bdb95..957e83f7e065 100644 --- a/frontend/web/components/EditPermissions.tsx +++ b/frontend/web/components/EditPermissions.tsx @@ -61,6 +61,8 @@ import InputGroup from './base/forms/InputGroup' import classNames from 'classnames' import OrganisationProvider from 'common/providers/OrganisationProvider' import { useHasPermission } from 'common/providers/Permission' +import PlanBasedAccess from './PlanBasedAccess' + const Project = require('common/project') type EditPermissionModalType = { @@ -667,7 +669,6 @@ const _EditPermissionsModal: FC = withAdminPermissions( const rolesAdded = getRoles(roles, rolesSelected || []) const isAdmin = admin() - const hasRbacPermission = Utils.getPlansPermission('RBAC') const [search, setSearch] = useState() @@ -676,195 +677,169 @@ const _EditPermissionsModal: FC = withAdminPermissions(
) : ( -
-
- {level !== 'organisation' && ( -
- - -
- Administrator -
-
- {hasRbacPermission ? ( - `Full View and Write permissions for the given ${Format.camelCase( - level, - )}.` - ) : ( - - Role-based access is not available on our Free Plan. - Please visit{' '} - - our Pricing Page - {' '} - for more information on our licensing options. - - )} -
-
- { - toggleAdmin() - setValueChanged(true) - }} - checked={isAdmin} - /> -
-
- )} - {!hasRbacPermission && ( - - Role-based access is not available on our Free Plan. Please visit{' '} - - our Pricing Page - {' '} - for more information on our licensing options. - - )} - { - const name = Format.enumeration.get(item.key).toLowerCase() - return name.includes(search?.toLowerCase() || '') - }} - title='Permissions' - className='no-pad mb-2' - items={permissions} - renderRow={(p: AvailablePermission) => { - const levelUpperCase = level.toUpperCase() - const disabled = - level !== 'organisation' && - p.key !== `VIEW_${levelUpperCase}` && - !hasPermission(`VIEW_${levelUpperCase}`) - return ( - - - - {Format.enumeration.get(p.key)} -
{p.description}
-
- { - setValueChanged(true) - togglePermission(p.key) - }} - disabled={ - disabled || admin() || !hasRbacPermission || saving - } - checked={!disabled && hasPermission(p.key)} - /> -
+ +
+
+ {level !== 'organisation' && ( +
+ + +
+ Administrator +
+
+ { + toggleAdmin() + setValueChanged(true) + }} + checked={isAdmin} + />
- ) - }} - /> +
+ )} + { + const name = Format.enumeration.get(item.key).toLowerCase() + return name.includes(search?.toLowerCase() || '') + }} + title='Permissions' + className='no-pad mb-2' + items={permissions} + renderRow={(p: AvailablePermission) => { + const levelUpperCase = level.toUpperCase() + const disabled = + level !== 'organisation' && + p.key !== `VIEW_${levelUpperCase}` && + !hasPermission(`VIEW_${levelUpperCase}`) + return ( + + + + {Format.enumeration.get(p.key)} +
+ {p.description} +
+
+ { + setValueChanged(true) + togglePermission(p.key) + }} + disabled={disabled || admin() || saving} + checked={!disabled && hasPermission(p.key)} + /> +
+
+ ) + }} + /> -

- This will edit the permissions for{' '} - - {isGroup ? ( - `the ${group?.name || ''} group` - ) : user ? ( - <> - {user.first_name || ''} {user.last_name || ''} - - ) : role ? ( - ` ${role.name}` - ) : ( - ` ${name}` - )} - - . -

+

+ This will edit the permissions for{' '} + + {isGroup ? ( + `the ${group?.name || ''} group` + ) : user ? ( + <> + {user.first_name || ''} {user.last_name || ''} + + ) : role ? ( + ` ${role.name}` + ) : ( + ` ${name}` + )} + + . +

- {parentError && !role && ( -
- - The selected {isGroup ? 'group' : 'user'} does not have explicit - user permissions to view this {parentLevel}. If the user does - not belong to any groups with this permissions, you may have to - adjust their permissions in{' '} - { - if (parentSettingsLink) { - push(parentSettingsLink) - } - closeModal() - }} - > - {parentLevel} settings - - . - + {parentError && !role && ( +
+ + The selected {isGroup ? 'group' : 'user'} does not have + explicit user permissions to view this {parentLevel}. If the + user does not belong to any groups with this permissions, you + may have to adjust their permissions in{' '} + { + if (parentSettingsLink) { + push(parentSettingsLink) + } + closeModal() + }} + > + {parentLevel} settings + + . + +
+ )} +
+ {roles && level === 'organisation' && ( + + + + Roles: + {rolesAdded?.map((r) => ( + removeOwner(r.id)} + className='chip' + style={{ marginBottom: 4, marginTop: 4 }} + > + {r.name} + + + + + ))} + + +
+ } + type='text' + title='Assign roles' + tooltip='Assigns what role the user/group will have' + inputProps={{ + className: 'full-width', + style: { minHeight: 80 }, + }} + className='full-width' + placeholder='Add an optional description...' + /> + + )} + {level !== 'environment' && level !== 'project' && ( +
+ v.role)} + onAdd={addRole} + onRemove={removeOwner} + isOpen={showRoles} + onToggle={() => setShowRoles(!showRoles)} + />
)}
- {roles && level === 'organisation' && ( - - - - Roles: - {rolesAdded?.map((r) => ( - removeOwner(r.id)} - className='chip' - style={{ marginBottom: 4, marginTop: 4 }} - > - {r.name} - - - - - ))} - - -
- } - type='text' - title='Assign roles' - tooltip='Assigns what role the user/group will have' - inputProps={{ - className: 'full-width', - style: { minHeight: 80 }, - }} - className='full-width' - placeholder='Add an optional description...' - /> - - )} - {level !== 'environment' && level !== 'project' && ( -
- v.role)} - onAdd={addRole} - onRemove={removeOwner} - isOpen={showRoles} - onToggle={() => setShowRoles(!showRoles)} - /> -
- )} -
+ ) }), ) @@ -940,7 +915,6 @@ const EditPermissions: FC = (props) => { 'p-0 side-modal', ) } - const hasRbacPermission = Utils.getPlansPermission('RBAC') return (
@@ -1103,80 +1077,71 @@ const EditPermissions: FC = (props) => { - {hasRbacPermission ? ( - <> - -
{roleTabTitle}
-
- -
- Roles -
-
- Description -
+ + +
{roleTabTitle}
+
+ +
+ Roles +
+
+ Description +
+
+ } + renderRow={(role: Role) => ( + + editRolePermissions(role)} + className='table-column px-3' + style={{ + width: rolesWidths[0], + }} + > + {role.name} - } - renderRow={(role: Role) => ( editRolePermissions(role)} + style={{ + width: rolesWidths[1], + }} > - editRolePermissions(role)} - className='table-column px-3' - style={{ - width: rolesWidths[0], - }} - > - {role.name} - - editRolePermissions(role)} - style={{ - width: rolesWidths[1], - }} - > - {role.description} - + {role.description} - )} - renderNoResults={ - -
- - {`You currently have no roles with ${level} permissions.`} - -
-
- } - isLoading={false} - /> - - ) : ( -
- - To use role features you have to upgrade your - plan. - -
- )} +
+ )} + renderNoResults={ + +
+ + {`You currently have no roles with ${level} permissions.`} + +
+
+ } + isLoading={false} + /> +
diff --git a/frontend/web/components/ErrorMessage.js b/frontend/web/components/ErrorMessage.js index b50f644e6093..b3961117ef3f 100644 --- a/frontend/web/components/ErrorMessage.js +++ b/frontend/web/components/ErrorMessage.js @@ -50,7 +50,7 @@ export default class ErrorMessage extends PureComponent { + + ) : ( + + )} +
+ ) + + if (theme === 'badge') { + if (hasPlan) { + return null + } + return ( + + ) + } else if (theme === 'description') { + if (hasPlan) { + return ( +

+ {featureDescriptions[feature].description} +

+ ) + } + return ( +
+
+
{featureDescriptions[feature].description}
+ {ctas} +
+
+ ) + } + if (hasPlan) { + return <>{children} + } + return ( +
+

+ {featureDescriptions[feature].title} + +

+ +
+ ) +} + +export default PlanBasedBanner + +export const getPlanBasedOption = function ( + data: { label: string; value: any }, + feature: PaidFeature, +) { + return { + ...data, + isDisabled: !Utils.getPlansPermission(feature), + label: ( +
+ {data.label} + +
+ ), + } +} diff --git a/frontend/web/components/SamlTab.tsx b/frontend/web/components/SamlTab.tsx index bb8f47bcbbe0..a456f77585a1 100644 --- a/frontend/web/components/SamlTab.tsx +++ b/frontend/web/components/SamlTab.tsx @@ -12,6 +12,7 @@ import { import CreateSAML from './modals/CreateSAML' import Switch from './Switch' import { SAMLConfiguration } from 'common/types/responses' +import PlanBasedBanner from './PlanBasedAccess' export type SamlTabType = { organisationId: number @@ -34,7 +35,7 @@ const SamlTab: FC = ({ organisationId }) => { } return ( -
+ = ({ organisationId }) => { )} /> -
+ ) } diff --git a/frontend/web/components/Setting.tsx b/frontend/web/components/Setting.tsx new file mode 100644 index 000000000000..a093fffaa021 --- /dev/null +++ b/frontend/web/components/Setting.tsx @@ -0,0 +1,64 @@ +import React, { FC, ReactNode } from 'react' +import Utils, { PaidFeature } from 'common/utils/utils' +import Switch from './Switch' +import classNames from 'classnames' +import PlanBasedBanner, { featureDescriptions } from './PlanBasedAccess' + +type PlanBasedAccessSettingType = { + feature?: PaidFeature + disabled?: boolean + checked?: boolean + onChange?: (newValue: boolean) => void + title?: ReactNode + description?: ReactNode + component?: ReactNode + 'data-test'?: string +} + +const Setting: FC = ({ + checked, + component, + description, + disabled, + feature, + onChange, + title, + ...props +}) => { + const hasPlan = !feature || Utils.getPlansPermission(feature) + + return ( + <> + + {!component && ( +
+ +
+ )} +
+ + {feature ? featureDescriptions[feature].title : title} + + {!!feature && } +
+
+ {feature ? ( + + ) : ( +

{description}

+ )} + {!!feature && !!hasPlan && component} + + ) +} + +export default Setting diff --git a/frontend/web/components/SettingsButton.tsx b/frontend/web/components/SettingsButton.tsx index 5379c194e7b5..c32540433752 100644 --- a/frontend/web/components/SettingsButton.tsx +++ b/frontend/web/components/SettingsButton.tsx @@ -1,20 +1,45 @@ import React, { FC, ReactNode } from 'react' import Icon from './Icon' +import Utils, { PaidFeature } from 'common/utils/utils' +import classNames from 'classnames' +import PlanBasedBanner from './PlanBasedAccess' type SettingsButtonType = { onClick: () => void children: ReactNode + content?: ReactNode + feature?: PaidFeature } -const SettingsButton: FC = ({ children, onClick }) => { +const SettingsButton: FC = ({ + children, + content, + feature, + onClick, +}) => { + const hasPlan = !feature || Utils.getPlansPermission(feature) return ( - - - + <> + + + + {!!feature && } + - + {content} + ) } diff --git a/frontend/web/components/SimpleTwoFactor/index.js b/frontend/web/components/SimpleTwoFactor/index.js index 79b2d04fcaba..2527f1bd8f3a 100644 --- a/frontend/web/components/SimpleTwoFactor/index.js +++ b/frontend/web/components/SimpleTwoFactor/index.js @@ -42,7 +42,7 @@ export default class TheComponent extends Component {
{!hasEnabled && (
-
+
diff --git a/frontend/web/components/Tooltip.tsx b/frontend/web/components/Tooltip.tsx index 842cb314f1dd..a55b52ca8672 100644 --- a/frontend/web/components/Tooltip.tsx +++ b/frontend/web/components/Tooltip.tsx @@ -1,6 +1,7 @@ import React, { FC, ReactElement, ReactNode, useRef } from 'react' import ReactTooltip, { TooltipProps as _TooltipProps } from 'react-tooltip' import Utils from 'common/utils/utils' +import classNames from 'classnames' export type TooltipProps = { title: ReactNode @@ -8,6 +9,7 @@ export type TooltipProps = { place?: _TooltipProps['place'] plainText?: boolean titleClassName?: string + tooltipClassName?: string } const Tooltip: FC = ({ @@ -16,6 +18,7 @@ const Tooltip: FC = ({ plainText, title, titleClassName, + tooltipClassName, }) => { const id = Utils.GUID() @@ -31,7 +34,11 @@ const Tooltip: FC = ({ )} {!!children && ( - + {plainText ? ( `${children}` ) : ( diff --git a/frontend/web/components/base/forms/Button.tsx b/frontend/web/components/base/forms/Button.tsx index 92a2642beb4d..b6d501566370 100644 --- a/frontend/web/components/base/forms/Button.tsx +++ b/frontend/web/components/base/forms/Button.tsx @@ -2,6 +2,8 @@ import cn from 'classnames' import { ButtonHTMLAttributes, FC, HTMLAttributeAnchorTarget } from 'react' import Icon, { IconName } from 'components/Icon' import Constants from 'common/constants' +import Utils, { PaidFeature } from 'common/utils/utils' +import PlanBasedBanner from 'components/PlanBasedAccess' export const themeClassNames = { danger: 'btn btn-danger', @@ -27,6 +29,7 @@ export type ButtonType = ButtonHTMLAttributes & { iconLeftColour?: keyof typeof Constants.colours iconLeft?: IconName href?: string + feature?: PaidFeature target?: HTMLAttributeAnchorTarget theme?: keyof typeof themeClassNames size?: keyof typeof sizeClassNames @@ -35,6 +38,7 @@ export type ButtonType = ButtonHTMLAttributes & { export const Button: FC = ({ children, className, + feature, href, iconLeft, iconLeftColour, @@ -47,11 +51,13 @@ export const Button: FC = ({ type = 'button', ...rest }) => { - return href ? ( + const hasPlan = feature ? Utils.getPlansPermission(feature) : true + return href || !hasPlan ? ( {!!iconLeft && ( @@ -62,6 +68,15 @@ export const Button: FC = ({ /> )} {children} + {!!iconRight && ( + + )} ) : (
+ ) } diff --git a/frontend/web/components/modals/CreateFlag.js b/frontend/web/components/modals/CreateFlag.js index f24b7ea3fbe9..8e938fe6d690 100644 --- a/frontend/web/components/modals/CreateFlag.js +++ b/frontend/web/components/modals/CreateFlag.js @@ -41,6 +41,7 @@ import { getGithubIntegration } from 'common/services/useGithubIntegration' import { removeUserOverride } from 'components/RemoveUserOverride' import ExternalResourcesLinkTab from 'components/ExternalResourcesLinkTab' import { saveFeatureWithValidation } from 'components/saveFeatureWithValidation' +import PlanBasedBanner from 'components/PlanBasedAccess' const CreateFlag = class extends Component { static displayName = 'CreateFlag' @@ -541,7 +542,6 @@ const CreateFlag = class extends Component { const is4Eyes = !!environment && Utils.changeRequestsEnabled(environment.minimum_change_request_approvals) - const canSchedule = Utils.getPlansPermission('SCHEDULE_FLAGS') const project = ProjectStore.model const caseSensitive = project?.only_allow_lower_case_feature_names const regex = project?.feature_name_regex @@ -565,7 +565,7 @@ const CreateFlag = class extends Component { const Settings = (projectAdmin, createFeature, featureContentType) => ( <> {!identity && this.state.tags && ( - + permission && ( <> - + - + + ) } )} - + {!identity && ( - + + {!is4Eyes && ( <> - {canSchedule ? ( - - ) : ( - - {isSaving - ? existingChangeRequest - ? 'Updating Change Request' - : 'Scheduling Update' - : existingChangeRequest - ? 'Update Change Request' - : 'Schedule Update'} - - } - > - { - 'This feature is available on our start-up plan' - } - - )} + )} diff --git a/frontend/web/components/modals/CreateProject.js b/frontend/web/components/modals/CreateProject.js index 36fe18883264..945229cb1575 100644 --- a/frontend/web/components/modals/CreateProject.js +++ b/frontend/web/components/modals/CreateProject.js @@ -4,6 +4,7 @@ import ErrorMessage from 'components/ErrorMessage' import Button from 'components/base/forms/Button' import Constants from 'common/constants' import { setInterceptClose } from './base/ModalDefault' +import PlanBasedAccess from 'components/PlanBasedAccess' const CreateProject = class extends Component { static displayName = 'CreateProject' @@ -52,12 +53,11 @@ const CreateProject = class extends Component { {({ createProject, error, isSaving, projects }) => { const hasProject = !!projects && !!projects.length - const canCreate = !!Utils.getPlansPermission( - 'CREATE_ADDITIONAL_PROJECT', - ) - const disableCreate = !canCreate && hasProject - - return ( + const canCreate = + !hasProject || + !!Utils.getPlansPermission('CREATE_ADDITIONAL_PROJECT') + const disableCreate = !canCreate + const inner = (
- {disableCreate && ( - - View and manage multiple projects in your organisation with - the{' '} - { - document.location.replace(Constants.upgradeURL) - }} - > - Startup plan - - - )} (this.input = e)} data-test='projectName' @@ -108,7 +94,7 @@ const CreateProject = class extends Component { type='submit' data-test='create-project-btn' id='create-project-btn' - disabled={isSaving || !name} + disabled={!canCreate || isSaving || !name} className='text-right' > {isSaving ? 'Creating' : 'Create Project'} @@ -117,6 +103,19 @@ const CreateProject = class extends Component {
) + if (hasProject) { + return ( + <> + + {inner} + + ) + } + return inner }}
) diff --git a/frontend/web/components/modals/InviteUsers.js b/frontend/web/components/modals/InviteUsers.js index 20d51780c804..19b7aa258d9a 100644 --- a/frontend/web/components/modals/InviteUsers.js +++ b/frontend/web/components/modals/InviteUsers.js @@ -5,6 +5,7 @@ import Constants from 'common/constants' import Icon from 'components/Icon' import { add } from 'ionicons/icons' import { IonIcon } from '@ionic/react' +import { getPlanBasedOption } from 'components/PlanBasedAccess' const InviteUsers = class extends Component { static displayName = 'InviteUsers' @@ -63,7 +64,6 @@ const InviteUsers = class extends Component { render() { const { invites } = this.state - const hasRbacPermission = Utils.getPlansPermission('RBAC') return ( @@ -114,14 +114,20 @@ const InviteUsers = class extends Component { value={invite.role} onChange={(role) => this.onChange(index, 'role', role)} className='pl-2 react-select' - options={_.map(Constants.roles, (label, value) => ({ - isDisabled: value !== 'ADMIN' && !hasRbacPermission, - label: - value !== 'ADMIN' && !hasRbacPermission - ? `${label} - Please upgrade for role based access` - : label, - value, - }))} + options={_.map(Constants.roles, (label, value) => + value === 'ADMIN' + ? { + label, + value, + } + : getPlanBasedOption( + { + label, + value, + }, + 'RBAC', + ), + )} /> {invites.length > 1 ? ( diff --git a/frontend/web/components/modals/Payment.js b/frontend/web/components/modals/Payment.js index 3503c1c42959..d6e44ffbde97 100644 --- a/frontend/web/components/modals/Payment.js +++ b/frontend/web/components/modals/Payment.js @@ -286,6 +286,16 @@ const Payment = class extends Component {
Scheduled Flags
+
  • + + + + +
    + Feature version history (1 per feature) +
    +
    +
  • @@ -510,6 +520,16 @@ const Payment = class extends Component {
    Scheduled Flags
  • +
  • + + + + +
    + Feature version history (1 per feature) +
    +
    +
  • @@ -718,6 +738,16 @@ const Payment = class extends Component {
    Scheduled Flags
  • +
  • + + + + +
    + Unlimited feature version history +
    +
    +
  • diff --git a/frontend/web/components/pages/AccountSettingsPage.js b/frontend/web/components/pages/AccountSettingsPage.js index 920fb67fb8f9..b1e0540af67e 100644 --- a/frontend/web/components/pages/AccountSettingsPage.js +++ b/frontend/web/components/pages/AccountSettingsPage.js @@ -18,6 +18,7 @@ import PageTitle from 'components/PageTitle' import { Link } from 'react-router-dom' import InfoMessage from 'components/InfoMessage' import Constants from 'common/constants' +import Setting from 'components/Setting' class TheComponent extends Component { static displayName = 'TheComponent' @@ -152,7 +153,6 @@ class TheComponent extends Component { {({}) => { const { isSaving } = this.state const forced2Factor = AccountStore.forced2Factor() - const has2fPermission = Utils.getPlansPermission('2FA') return forced2Factor ? (
    @@ -279,22 +279,16 @@ class TheComponent extends Component {

    - - { - flagsmith.setTrait('json_inspect', v).then(() => { - toast('Updated') - }) - }} - checked={flagsmith.getTrait('json_inspect')} - className='mr-3' - /> -
    Show JSON References
    -
    -

    - Enabling this will allow you to inspect the JSON of - entities such as features within the platform. -

    + { + flagsmith.setTrait('json_inspect', v).then(() => { + toast('Updated') + }) + }} + checked={flagsmith.getTrait('json_inspect')} + title={'Show JSON References'} + description={`Enabling this will allow you to inspect the JSON of entities such as features within the platform.`} + />

    @@ -448,25 +442,7 @@ class TheComponent extends Component { )}
    -
    Two-Factor Authentication
    -

    - Increase your account's security by enabling Two-Factor - Authentication (2FA). -

    -
    -
    - {has2fPermission ? ( - - ) : ( -
    - - Manage payment plan - -
    - )} + } feature='2FA' />
    diff --git a/frontend/web/components/pages/AuditLogPage.tsx b/frontend/web/components/pages/AuditLogPage.tsx index 6fe804f59c79..6b90de97f5cb 100644 --- a/frontend/web/components/pages/AuditLogPage.tsx +++ b/frontend/web/components/pages/AuditLogPage.tsx @@ -1,6 +1,5 @@ import React, { FC, useEffect, useState } from 'react' // we need this to make JSX compile import ConfigProvider from 'common/providers/ConfigProvider' -import ToggleChip from 'components/ToggleChip' import Utils from 'common/utils/utils' import { Project } from 'common/types/responses' import { RouterChildContext } from 'react-router' @@ -8,8 +7,7 @@ import AuditLog from 'components/AuditLog' import ProjectProvider from 'common/providers/ProjectProvider' import PageTitle from 'components/PageTitle' import Tag from 'components/tags/Tag' -import { Link } from 'react-router-dom' -import Constants from 'common/constants' +import { featureDescriptions } from 'components/PlanBasedAccess' type AuditLogType = { router: RouterChildContext['router'] @@ -36,26 +34,13 @@ const AuditLogPage: FC = (props) => { ) } }, [environment]) - const hasRbacPermission = Utils.getPlansPermission('AUDIT') - if (!hasRbacPermission) { - return ( -
    -
    - To access this feature please{' '} - - upgrade your account to scaleup - {' '} - or higher. -
    -
    - ) - } return (
    - - View all activity that occured generically across the project and - specific to this environment. - + {Utils.getPlansPermission('AUDIT') && ( + + {featureDescriptions.AUDIT.description} + + )}
    diff --git a/frontend/web/components/pages/ChangeRequestsPage.js b/frontend/web/components/pages/ChangeRequestsPage.js index f9dddf7ac471..f4a39db037c1 100644 --- a/frontend/web/components/pages/ChangeRequestsPage.js +++ b/frontend/web/components/pages/ChangeRequestsPage.js @@ -13,7 +13,7 @@ import PageTitle from 'components/PageTitle' import { timeOutline } from 'ionicons/icons' import { IonIcon } from '@ionic/react' import Utils from 'common/utils/utils' -import Constants from 'common/constants' +import PlanBasedAccess, { featureDescriptions } from 'components/PlanBasedAccess'; const ChangeRequestsPage = class extends Component { static displayName = 'ChangeRequestsPage' @@ -69,35 +69,19 @@ const ChangeRequestsPage = class extends Component { const environment = ProjectStore.getEnvironment(environmentId) - const has4EyesPermission = Utils.getPlansPermission('4_EYES') return (
    - - View and manage proposed feature state changes. - - - {!has4EyesPermission ? ( -
    - - View and manage your feature changes with a Change Request flow - with our Scale-up plan. - Find out more{' '} - - . - -
    - ) : ( + {!!Utils.getPlansPermission('4_EYES') && ( + + {featureDescriptions['4_EYES'].description} + + )} + +

    {environment && @@ -301,8 +285,8 @@ const ChangeRequestsPage = class extends Component {

    - )} -
    +
    +
    ) } diff --git a/frontend/web/components/pages/EnvironmentSettingsPage.js b/frontend/web/components/pages/EnvironmentSettingsPage.js index 26745fca1e6c..5275b4cf9d38 100644 --- a/frontend/web/components/pages/EnvironmentSettingsPage.js +++ b/frontend/web/components/pages/EnvironmentSettingsPage.js @@ -25,6 +25,7 @@ import AddMetadataToEntity from 'components/metadata/AddMetadataToEntity' import { getSupportedContentType } from 'common/services/useSupportedContentType' import EnvironmentVersioningListener from 'components/EnvironmentVersioningListener' import Format from 'common/utils/format' +import Setting from 'components/Setting' const showDisabledFlagOptions = [ { label: 'Inherit from Project', value: null }, @@ -336,7 +337,7 @@ const EnvironmentSettingsPage = class extends Component {
    General Settings
    -
    +
    (this.input = e)} @@ -393,30 +394,30 @@ const EnvironmentSettingsPage = class extends Component {

    -
    - - - this.setState( - { - banner_text: value - ? `${env.name} Environment` - : null, - }, - this.saveEnv, - ) - } - checked={ - typeof this.state.banner_text === 'string' - } - /> -
    Environment Banner
    -
    -

    - This will show a banner whenever you view its pages, - this is generally used to warn people that they are - viewing and editing a sensitive environment. -

    +
    + + this.setState( + { + banner_text: value + ? `${env.name} Environment` + : null, + }, + this.saveEnv, + ) + } + checked={typeof this.state.banner_text === 'string'} + title={'Environment Banner'} + description={ +
    + This will show a banner whenever you view its + pages. +
    + This is generally used to warn people that they + are viewing and editing a sensitive environment. +
    + } + /> {typeof this.state.banner_text === 'string' && ( {Utils.getFlagsmithHasFeature('feature_versioning') && (
    -
    +
    - - -
    - Feature versioning -
    -
    - -

    - Allows you to attach versions to updating - feature values and segment overrides. This - setting may take up to a minute to take affect. -
    - - Warning! Enabling this is irreversable - -

    + + Allows you to attach versions to updating + feature values and segment overrides. +
    + This setting may take up to a minute to take + affect. +
    +
    + Enabling this is irreversible. +
    +
    + } + disabled={ + use_v2_feature_versioning || + this.state.enabledFeatureVersioning + } + data-test={ + use_v2_feature_versioning + ? 'feature-versioning-enabled' + : 'enable-versioning' + } + checked={use_v2_feature_versioning} + onChange={onEnableVersioning} + />
    )} -
    - - { - this.confirmToggle( - 'Confirm Environment Setting', - 'hide_sensitive_data', - hide_sensitive_data, - ) - }} - /> -
    Hide sensitive data
    -
    -

    - Exclude sensitive data from endpoints returning - flags and identity information to the SDKs or via - our REST API. For full information on the excluded - fields see documentation{' '} - -

    - Warning! Enabling this feature will change the - response from the API and could break your - existing code. -
    -

    +
    + { + this.confirmToggle( + 'Confirm Environment Setting', + 'hide_sensitive_data', + hide_sensitive_data, + ) + }} + description={ +
    + Exclude sensitive data from endpoints returning + flags and identity information to the SDKs or + via our REST API. +
    + For full information on the excluded fields see + documentation{' '} + +
    + Enabling this feature will change the response + from the API and could break your existing + code. +
    +
    + } + />
    - - - - this.setState( - { - minimum_change_request_approvals: v - ? 0 - : null, - }, - this.saveEnv, - ) - } - /> -
    Change Requests
    -
    - {!has4EyesPermission ? ( -

    - View and manage your feature changes with a Change - Request flow with our{' '} - - Scale-up plan - - . Find out more{' '} - - . -

    - ) : ( -

    - Require a minimum number of people to approve - changes to features.{' '} - -

    - )} - + + + this.setState( + { + minimum_change_request_approvals: v + ? 0 + : null, + }, + this.saveEnv, + ) + } + /> {Utils.changeRequestsEnabled( this.state.minimum_change_request_approvals, ) && @@ -630,7 +592,7 @@ const EnvironmentSettingsPage = class extends Component { )}
    - +
    Delete Environment
    @@ -674,7 +636,7 @@ const EnvironmentSettingsPage = class extends Component { json={env} className='mb-4' /> -
    +
    @@ -714,60 +676,49 @@ const EnvironmentSettingsPage = class extends Component {

    - - { - this.setState( - { allow_client_traits: v }, - this.saveEnv, - ) - }} - /> -
    - Allow client SDKs to set user traits -
    -
    -

    - Disabling this option will prevent client SDKs - from using the client key from setting traits. -

    + { + this.setState( + { allow_client_traits: v }, + this.saveEnv, + ) + }} + />
    - - { - this.setState( - { - use_identity_composite_key_for_hashing: - v, - }, - this.saveEnv, - ) - }} - /> -
    - Use Consistent Hashing -
    -
    -

    - Enabling this setting will ensure that - multivariate and percentage split evaluations - made by the API are consistent with those made - by local evaluation mode in our server side - SDKs. -

    - Warning: Toggling this setting will mean that - some users will start receiving different - values for multivariate flags and flags with a - percentage split segment override via the API - / remote evaluation. Values received in local - evaluation mode will not change. -
    -

    + { + this.setState( + { + use_identity_composite_key_for_hashing: v, + }, + this.saveEnv, + ) + }} + title={`Use Consistent Hashing`} + description={ +
    + Enabling this setting will ensure that + multivariate and percentage split + evaluations made by the API are consistent + with those made by local evaluation mode in + our server side SDKs. +
    + Toggling this setting will mean that some + users will start receiving different + values for multivariate flags and flags + with a percentage split segment override + via the API / remote evaluation. Values + received in local evaluation mode will not + change. +
    +
    + } + />
    diff --git a/frontend/web/components/pages/FeatureHistoryPage.tsx b/frontend/web/components/pages/FeatureHistoryPage.tsx index 751667bc37d3..d6a8abb319d3 100644 --- a/frontend/web/components/pages/FeatureHistoryPage.tsx +++ b/frontend/web/components/pages/FeatureHistoryPage.tsx @@ -20,6 +20,8 @@ import TableFilterItem from 'components/tables/TableFilterItem' import moment from 'moment' import { Link } from 'react-router-dom' import DateList from 'components/DateList' +import classNames from 'classnames' +import PlanBasedBanner from 'components/PlanBasedAccess' const widths = [250, 150] type FeatureHistoryPageType = { @@ -59,8 +61,8 @@ const FeatureHistoryPage: FC = ({ match, router }) => { const [selected, setSelected] = useState(null) const live = data?.results?.[0] const [compareToLive, setCompareToLive] = useState(false) - const [diff, setDiff] = useState(null) + const versionLimit = 3 return (
    @@ -89,6 +91,14 @@ const FeatureHistoryPage: FC = ({ match, router }) => {
    + {!!versionLimit && ( + + )} items={data} isLoading={isLoading} @@ -96,10 +106,15 @@ const FeatureHistoryPage: FC = ({ match, router }) => { prevPage={() => setPage(page + 1)} goToPage={setPage} renderRow={(v: TFeatureVersion, i: number) => { + const isOverLimit = !!versionLimit && i + 1 > versionLimit const user = users?.find((user) => v.published_by === user.id) return ( - +
    @@ -232,18 +233,13 @@ const OrganisationSettingsPage = class extends Component { AccountStore.getUser() && AccountStore.getOrganisationRole() === 'ADMIN' ) { - const showSaml = - Utils.getPlanPermission( - organisation.subscription?.plan, - 'SAML', - ) && Utils.getFlagsmithHasFeature('saml_configuration') displayedTabs.push( ...[ SettingsTab.General, paymentsEnabled && !isAWS ? SettingsTab.Billing : null, SettingsTab.Keys, SettingsTab.Webhooks, - showSaml ? SettingsTab.SAML : null, + SettingsTab.SAML, ].filter((v) => !!v), ) } else { @@ -307,37 +303,17 @@ const OrganisationSettingsPage = class extends Component {

    -
    - - {!force2faPermission ? ( - - } - > - To access this feature please upgrade - your account to scaleup or higher." - - ) : ( - - )} -
    Enforce 2FA
    -
    -

    - Enabling this setting forces users within - the organisation to setup 2 factor security. -

    +
    +
    {Utils.getFlagsmithHasFeature( 'restrict_project_create_to_admin', ) && ( - +
    Admin Settings

    - +
    Delete Organisation
    diff --git a/frontend/web/components/pages/ProjectSettingsPage.js b/frontend/web/components/pages/ProjectSettingsPage.js index eb667e32f8ce..4d2bd372c31b 100644 --- a/frontend/web/components/pages/ProjectSettingsPage.js +++ b/frontend/web/components/pages/ProjectSettingsPage.js @@ -23,6 +23,9 @@ import ProjectUsage from 'components/ProjectUsage' import ProjectStore from 'common/stores/project-store' import Tooltip from 'components/Tooltip' import { Link } from 'react-router-dom' +import Setting from 'components/Setting' +import PlanBasedBanner from 'components/PlanBasedAccess' +import classNames from 'classnames' const ProjectSettingsPage = class extends Component { static displayName = 'ProjectSettingsPage' @@ -167,9 +170,7 @@ const ProjectSettingsPage = class extends Component { const { name, stale_flags_limit_days } = this.state const hasStaleFlagsPermission = Utils.getPlansPermission('STALE_FLAGS') - const metadataEnable = - Utils.getPlansPermission('METADATA') && - Utils.getFlagsmithHasFeature('enable_metadata') + const metadataEnable = Utils.getFlagsmithHasFeature('enable_metadata') return (
    @@ -229,7 +230,7 @@ const ProjectSettingsPage = class extends Component { /> -
    + {!!hasVersioning && ( - - -
    - (this.input = e)} - value={ - this.state.stale_flags_limit_days - } - onChange={(e) => - this.setState({ - stale_flags_limit_days: parseInt( - Utils.safeParseEventValue(e), - ), - }) - } - isValid={!!stale_flags_limit_days} - type='number' - placeholder='Number of Days' - /> -
    + <> +
    + + + + + } + > + {`If no changes have been made to a feature in any environment within this threshold the feature will be tagged as stale. You will need to enable feature versioning in your environments for stale features to be detected.`} + + +
    +
    + +
    + (this.input = e)} + value={this.state.stale_flags_limit_days} + onChange={(e) => + this.setState({ + stale_flags_limit_days: parseInt( + Utils.safeParseEventValue(e), + ), + }) + } + isValid={!!stale_flags_limit_days} + type='number' + placeholder='Number of Days' + />
    - } - > - {`${ - !hasStaleFlagsPermission - ? 'This feature is available with our enterprise plan. ' - : '' - }If no changes have been made to a feature in any environment within this threshold the feature will be tagged as stale. You will need to enable feature versioning in your environments for stale features to be detected.`} - + +
    + {!hasStaleFlagsPermission && ( + + )} + )}

    - + - + - + {!Utils.getIsEdge() && !!Utils.isSaas() && ( - +
    Global Edge API Opt in @@ -484,7 +510,7 @@ const ProjectSettingsPage = class extends Component { )}
    - +
    Delete Project
    @@ -518,57 +544,22 @@ const ProjectSettingsPage = class extends Component { tabLabel='SDK Settings' > {Utils.isSaas() && - Utils.getFlagsmithHasFeature('realtime_setting') && ( - - - - this.toggleRealtimeUpdates( - project, - editProject, - ) - } - checked={project.enable_realtime_updates} - /> -
    - Enable Realtime Updates -
    -
    - -

    - Pushes realtime updates to client-side SDKs when - features and segment overrides are adjusted in the - dashboard. - {!Utils.getPlansPermission('REALTIME') && ( - <> - This feature is available with our{' '} - - enterprise plan - - . - - )}{' '} - Find out more{' '} - - here - - . -

    + Utils.getFlagsmithHasFeature('realtime_setting') && + Utils.isSaas() && ( + + + this.toggleRealtimeUpdates(project, editProject) + } + checked={project.enable_realtime_updates} + /> )}
    - + - - Manage feature state changes that have been scheduled to go live. - - - { -
    - {!hasSchedulePlan ? ( -
    - - Schedule feature state changes with a Change Request flow - with our{' '} - Start-up plan. Find - out more{' '} - - . - -
    - ) : ( - <> - ( - - )} - paging={dataScheduledPaging} - nextPage={() => - AppActions.getChangeRequests( - this.props.match.params.environmentId, - { live_from_after: this.state.live_after }, - dataPaging.next, - ) - } - prevPage={() => - AppActions.getChangeRequests( - this.props.match.params.environmentId, - { live_from_after: this.state.live_after }, - dataPaging.previous, - ) - } - goToPage={(page) => - AppActions.getChangeRequests( - this.props.match.params.environmentId, - { live_from_after: this.state.live_after }, - `${Project.api}environments/${environmentId}/list-change-requests/?page=${page}`, - ) - } - renderRow={({ created_at, id, title, user: _user }) => { - const user = - OrganisationStore.model && - OrganisationStore.model.users.find( - (v) => v.id === _user, - ) - return ( - - -
    {title}
    -
    - Created{' '} - {moment(created_at).format('Do MMM YYYY HH:mma')}{' '} - by {user && user.first_name}{' '} - {user && user.last_name} -
    -
    -
    - + {!!Utils.getPlansPermission('SCHEDULE_FLAGS') && ( + + {featureDescriptions.SCHEDULE_FLAGS.description} + + )} + + + { +
    + ( + + )} + paging={dataScheduledPaging} + nextPage={() => + AppActions.getChangeRequests( + this.props.match.params.environmentId, + { live_from_after: this.state.live_after }, + dataPaging.next, + ) + } + prevPage={() => + AppActions.getChangeRequests( + this.props.match.params.environmentId, + { live_from_after: this.state.live_after }, + dataPaging.previous, + ) + } + goToPage={(page) => + AppActions.getChangeRequests( + this.props.match.params.environmentId, + { live_from_after: this.state.live_after }, + `${Project.api}environments/${environmentId}/list-change-requests/?page=${page}`, + ) + } + renderRow={({ created_at, id, title, user: _user }) => { + const user = + OrganisationStore.model && + OrganisationStore.model.users.find((v) => v.id === _user) + return ( + + +
    {title}
    +
    + Created{' '} + {moment(created_at).format('Do MMM YYYY HH:mma')} by{' '} + {user && user.first_name} {user && user.last_name}
    - - ) - }} - /> - - )} -
    - } -
    + +
    + +
    + + ) + }} + /> +
    + } + +
    ) } diff --git a/frontend/web/components/pages/UsersAndPermissionsPage.tsx b/frontend/web/components/pages/UsersAndPermissionsPage.tsx index 11280578f81b..ebe8da49ea02 100644 --- a/frontend/web/components/pages/UsersAndPermissionsPage.tsx +++ b/frontend/web/components/pages/UsersAndPermissionsPage.tsx @@ -36,6 +36,7 @@ import sortBy from 'lodash/sortBy' import UserAction from 'components/UserAction' import Icon from 'components/Icon' import RolesTable from 'components/RolesTable' +import PlanBasedBanner, { getPlanBasedOption } from 'components/PlanBasedAccess' type UsersAndPermissionsPageType = { router: RouterChildContext['router'] @@ -92,7 +93,6 @@ const UsersAndPermissionsInner: FC = ({ 'side-modal', ) } - const hasRbacPermission = Utils.getPlansPermission('RBAC') const meta = subscriptionMeta || organisation.subscription || { max_seats: 1 } const max_seats = meta.max_seats || 1 const isAWS = AccountStore.getPaymentMethod() === 'AWS_MARKETPLACE' @@ -252,7 +252,7 @@ const UsersAndPermissionsInner: FC = ({ . ) : needsUpgradeForAdditionalSeats ? ( - +
    If you wish to invite any additional members, please{' '} { @@ -260,7 +260,7 @@ const UsersAndPermissionsInner: FC = ({ href='#' onClick={() => { router.history.replace( - Constants.upgradeURL, + Constants.getUpgradeUrl(), ) }} > @@ -268,7 +268,7 @@ const UsersAndPermissionsInner: FC = ({ } . - +
    ) : ( You will automatically be charged @@ -308,13 +308,13 @@ const UsersAndPermissionsInner: FC = ({ label: 'Organisation Administrator', value: 'ADMIN', }, - { - isDisabled: !hasRbacPermission, - label: hasRbacPermission - ? 'User' - : 'User - Please upgrade for role based access', - value: 'USER', - }, + getPlanBasedOption( + { + label: 'User', + value: 'USER', + }, + 'RBAC', + ), ]} className='react-select select-sm' /> @@ -512,17 +512,19 @@ const UsersAndPermissionsInner: FC = ({ } options={map( Constants.roles, - (label, value) => ({ - isDisabled: - value !== 'ADMIN' && - !hasRbacPermission, - label: - value !== 'ADMIN' && - !hasRbacPermission - ? `${label} - Please upgrade for role based access` - : label, - value, - }), + (label, value) => + value === 'ADMIN' + ? { + label, + value, + } + : getPlanBasedOption( + { + label, + value, + }, + 'RBAC', + ), )} menuPortalTarget={document.body} menuPosition='absolute' @@ -725,21 +727,16 @@ const UsersAndPermissionsInner: FC = ({
    - {hasRbacPermission ? ( - <> - - - ) : ( -
    - - To use role features you have to - upgrade your plan. - -
    - )} + + +
    diff --git a/frontend/web/styles/_variables.scss b/frontend/web/styles/_variables.scss index 6ab9a92da052..cbcbe8a24175 100644 --- a/frontend/web/styles/_variables.scss +++ b/frontend/web/styles/_variables.scss @@ -42,6 +42,7 @@ $primary: #6837fc; $primary400: #906af6; $primary600: #4e25db; $primary700: #3919b7; +$primary800: #2a2054; $primary900: #1d113e; $danger: #ef4d56; @@ -55,8 +56,9 @@ $info: #0aaddf; $warning: #ff9f43; $secondary400: #fae392; -$secondary500: #f7d56e; -$secondary600: #d4b050; +$secondary500: #F7D56E; +$secondary600: #e5c55f; +$secondary700: #d4b050; $secondary: #fae392; // Links and Anchors @@ -130,6 +132,12 @@ $btn-secondary-hover-bg: $basic-alpha-16; $btn-secondary-hover-bg-dark: $white-alpha-16; $btn-secondary-active-bg: $basic-alpha-24; $btn-secondary-active-bg-dark: $white-alpha-24; +$btn-tertiary-bg: $secondary500; +$btn-tertiary-bg-dark: $secondary600; +$btn-tertiary-hover-bg: $secondary600; +$btn-tertiary-hover-bg-dark: $secondary700; +$btn-tertiary-active-bg: $secondary700; +$btn-tertiary-active-bg-dark: $secondary700; $btn-outline-hover-bg: $primary-alfa-8; $btn-outline-hover-bg-dark: $basic-alpha-8; $btn-outline-focus-bg-dark: $primary-alfa-8; @@ -345,6 +353,7 @@ $scrollbar-thumb-dark-hover: $bg-dark300; $theme-colors: ( light200: $bg-light200, primary: $primary, + primary800: $primary800, danger: $danger, success: $success, info: $info, @@ -356,6 +365,9 @@ $theme-colors: ( .bg-light200 { background-color: $body-bg-dark !important; } + .bg-primary800 { + background-color: $body-bg-dark !important; + } } .bg-faint { background-color: #fafafa; diff --git a/frontend/web/styles/project/_buttons.scss b/frontend/web/styles/project/_buttons.scss index b3240bd2fafc..4eec54c0df1a 100644 --- a/frontend/web/styles/project/_buttons.scss +++ b/frontend/web/styles/project/_buttons.scss @@ -84,16 +84,31 @@ button.btn { &-secondary { color: $body-color; - background-color: $btn-secondary-bg; + background-color: $btn-secondary-bg !important; &:hover, &:focus { - background-color: $btn-secondary-hover-bg; + background-color: $btn-secondary-hover-bg !important; color: $body-color; } &.btn:active { - background-color: $btn-secondary-active-bg; + background-color: $btn-secondary-active-bg !important; + color: $body-color; + } + } + &-tertiary { + color: $primary900; + background-color: $btn-tertiary-bg; + box-shadow: 0 10px 20px rgba(247, 213, 110, .2); + &:hover, + &:focus { + background-color: $btn-tertiary-hover-bg; + color: $body-color; + } + + &.btn:active { + background-color: $btn-tertiary-active-bg; color: $body-color; } } @@ -405,14 +420,14 @@ $add-btn-size: 34px; } &.btn-secondary { color: white; - background-color: $btn-secondary-bg-dark; + background-color: $btn-secondary-bg-dark !important; &:hover, &:focus { - background-color: $btn-secondary-hover-bg-dark; + background-color: $btn-secondary-hover-bg-dark !important; } &:active { color: white; - background-color: $btn-secondary-active-bg-dark; + background-color: $btn-secondary-active-bg-dark !important; } } &.btn-success { diff --git a/frontend/web/styles/project/_tooltips.scss b/frontend/web/styles/project/_tooltips.scss index 13057eb0abfe..3723bf7ca98f 100644 --- a/frontend/web/styles/project/_tooltips.scss +++ b/frontend/web/styles/project/_tooltips.scss @@ -13,6 +13,9 @@ $shadow-dark: 0 4px 4px 0 #00000029; opacity: 1; } + &.tooltip-lg.show { + max-width: 900px; + } i { font-style: normal; color: $primary; diff --git a/frontend/web/styles/project/_utils.scss b/frontend/web/styles/project/_utils.scss index f8028c1840ee..a602ced544c3 100644 --- a/frontend/web/styles/project/_utils.scss +++ b/frontend/web/styles/project/_utils.scss @@ -174,6 +174,9 @@ .bg-primary { background: $primary !important; + &.bg-opacity-10 { + background: rgba($primary,0.1) !important; + } } .dark { .text-body { @@ -215,3 +218,7 @@ min-width: 0; } } + +.blur { + filter: blur(5px); +} From 9bae21cdcba0f4d37c8d4838137f8376e3749215 Mon Sep 17 00:00:00 2001 From: Zach Aysan Date: Thu, 15 Aug 2024 04:39:26 -0400 Subject: [PATCH 128/247] fix: Remove grace period where necessary from blocked notification (#4496) Co-authored-by: Matthew Elwell --- api/organisations/task_helpers.py | 5 +- .../api_flags_blocked_notification.html | 2 +- .../api_flags_blocked_notification.txt | 2 +- .../test_unit_organisations_tasks.py | 47 +++++++++++++++++-- 4 files changed, 50 insertions(+), 6 deletions(-) diff --git a/api/organisations/task_helpers.py b/api/organisations/task_helpers.py index 65d382358615..2466e3c4d377 100644 --- a/api/organisations/task_helpers.py +++ b/api/organisations/task_helpers.py @@ -26,7 +26,10 @@ def send_api_flags_blocked_notification(organisation: Organisation) -> None: userorganisation__organisation=organisation, ) - context = {"organisation": organisation} + context = { + "organisation": organisation, + "grace_period": not hasattr(organisation, "breached_grace_period"), + } message = "organisations/api_flags_blocked_notification.txt" html_message = "organisations/api_flags_blocked_notification.html" diff --git a/api/organisations/templates/organisations/api_flags_blocked_notification.html b/api/organisations/templates/organisations/api_flags_blocked_notification.html index 48dfac9de004..03fdfaa16897 100644 --- a/api/organisations/templates/organisations/api_flags_blocked_notification.html +++ b/api/organisations/templates/organisations/api_flags_blocked_notification.html @@ -9,7 +9,7 @@ - This is a system generated notification related to your Flagsmith API Usage. As per previous warnings, we have had to block your company {{ organisation.name }} after the 7 day grace period. Flags are not currently being served for your organization, and will continue to be blocked until your billing period resets or you upgrade your account. You can upgrade your account at app.flagsmith.com. + This is a system generated notification related to your Flagsmith API Usage. As per previous warnings, we have had to block your company {{ organisation.name }}{% if grace_period %} after the 7 day grace period{% endif %}. Flags are not currently being served for your organization, and will continue to be blocked until your billing period resets or you upgrade your account. You can upgrade your account at app.flagsmith.com. diff --git a/api/organisations/templates/organisations/api_flags_blocked_notification.txt b/api/organisations/templates/organisations/api_flags_blocked_notification.txt index 200c009ae1ff..12d4b624efba 100644 --- a/api/organisations/templates/organisations/api_flags_blocked_notification.txt +++ b/api/organisations/templates/organisations/api_flags_blocked_notification.txt @@ -1,6 +1,6 @@ Hi there, -This is a system generated notification related to your Flagsmith API Usage. As per previous warnings, we have had to block your company {{ organisation.name }} after the 7 day grace period. Flags are not currently being served for your organization, and will continue to be blocked until your billing period resets or you upgrade your account. You can upgrade your account at app.flagsmith.com. +This is a system generated notification related to your Flagsmith API Usage. As per previous warnings, we have had to block your company {{ organisation.name }}{% if grace_period %} after the 7 day grace period{% endif %}. Flags are not currently being served for your organization, and will continue to be blocked until your billing period resets or you upgrade your account. You can upgrade your account at app.flagsmith.com. Thank you! diff --git a/api/tests/unit/organisations/test_unit_organisations_tasks.py b/api/tests/unit/organisations/test_unit_organisations_tasks.py index 4d1a38dd9520..b59e4633144f 100644 --- a/api/tests/unit/organisations/test_unit_organisations_tasks.py +++ b/api/tests/unit/organisations/test_unit_organisations_tasks.py @@ -1284,6 +1284,7 @@ def test_restrict_use_due_to_api_limit_grace_period_over( organisation3 = Organisation.objects.create(name="Org #3") organisation4 = Organisation.objects.create(name="Org #4") organisation5 = Organisation.objects.create(name="Org #5") + organisation6 = Organisation.objects.create(name="Org #6") for org in [ organisation, @@ -1291,6 +1292,7 @@ def test_restrict_use_due_to_api_limit_grace_period_over( organisation3, organisation4, organisation5, + organisation6, ]: OrganisationSubscriptionInformationCache.objects.create( organisation=org, @@ -1309,7 +1311,13 @@ def test_restrict_use_due_to_api_limit_grace_period_over( mock_api_usage.return_value = 12_005 # Add users to test email delivery - for org in [organisation2, organisation3, organisation4, organisation5]: + for org in [ + organisation2, + organisation3, + organisation4, + organisation5, + organisation6, + ]: admin_user.add_organisation(org, role=OrganisationRole.ADMIN) staff_user.add_organisation(org, role=OrganisationRole.USER) @@ -1358,6 +1366,15 @@ def test_restrict_use_due_to_api_limit_grace_period_over( percent_usage=120, ) + # Should be immediately blocked because they've previously breached the grace + # period + OrganisationAPIUsageNotification.objects.create( + notified_at=now, + organisation=organisation6, + percent_usage=120, + ) + OrganisationBreachedGracePeriod.objects.create(organisation=organisation6) + # When restrict_use_due_to_api_limit_grace_period_over() @@ -1367,6 +1384,7 @@ def test_restrict_use_due_to_api_limit_grace_period_over( organisation3.refresh_from_db() organisation4.refresh_from_db() organisation5.refresh_from_db() + organisation6.refresh_from_db() # Organisation without breaching 100 percent usage is ok. assert organisation3.stop_serving_flags is False @@ -1390,6 +1408,9 @@ def test_restrict_use_due_to_api_limit_grace_period_over( assert organisation2.stop_serving_flags is True assert organisation2.block_access_to_admin is True assert organisation2.api_limit_access_block + assert organisation6.stop_serving_flags is True + assert organisation6.block_access_to_admin is True + assert organisation6.api_limit_access_block client_mock.get_identity_flags.call_args_list == [ call( @@ -1406,9 +1427,16 @@ def test_restrict_use_due_to_api_limit_grace_period_over( "subscription.plan": organisation2.subscription.plan, }, ), + call( + f"org.{organisation6.id}", + traits={ + "organisation_id": organisation6.id, + "subscription.plan": organisation6.subscription.plan, + }, + ), ] - assert len(mailoutbox) == 2 + assert len(mailoutbox) == 3 email1 = mailoutbox[0] assert email1.subject == "Flagsmith API use has been blocked due to overuse" assert email1.body == render_to_string( @@ -1428,11 +1456,24 @@ def test_restrict_use_due_to_api_limit_grace_period_over( assert email2.alternatives[0][0] == render_to_string( "organisations/api_flags_blocked_notification.html", - context={"organisation": organisation2}, + context={"organisation": organisation2, "grace_period": False}, ) assert email2.from_email == "noreply@flagsmith.com" assert email2.to == ["admin@example.com", "staff@example.com"] + email3 = mailoutbox[2] + assert email3.subject == "Flagsmith API use has been blocked due to overuse" + assert len(email3.alternatives) == 1 + assert len(email3.alternatives[0]) == 2 + assert email3.alternatives[0][1] == "text/html" + + assert email3.alternatives[0][0] == render_to_string( + "organisations/api_flags_blocked_notification.html", + context={"organisation": organisation6, "grace_period": False}, + ) + assert email3.from_email == "noreply@flagsmith.com" + assert email3.to == ["admin@example.com", "staff@example.com"] + # Organisations that change their subscription are unblocked. organisation.subscription.plan = "scale-up-v2" organisation.subscription.save() From cb8472d669f50d2dfc3d9837d6a7049840b08a7a Mon Sep 17 00:00:00 2001 From: Zach Aysan Date: Thu, 15 Aug 2024 06:28:55 -0400 Subject: [PATCH 129/247] fix: Make influx cache task recurring (#4495) Co-authored-by: Matthew Elwell --- api/organisations/tasks.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/api/organisations/tasks.py b/api/organisations/tasks.py index fea2b492cf23..951924fa13ab 100644 --- a/api/organisations/tasks.py +++ b/api/organisations/tasks.py @@ -82,6 +82,14 @@ def send_org_subscription_cancelled_alert( @register_recurring_task( run_every=timedelta(hours=6), ) +def update_organisation_subscription_information_influx_cache_recurring(): + """ + We're redefining the task function here to register a recurring task + since the decorators don't stack correctly. (TODO) + """ + update_organisation_subscription_information_influx_cache() # pragma: no cover + + @register_task_handler() def update_organisation_subscription_information_influx_cache(): subscription_info_cache.update_caches((SubscriptionCacheEntity.INFLUX,)) From 174d437a4a654e9ea34645d86f515fa65eb85660 Mon Sep 17 00:00:00 2001 From: Gagan Date: Fri, 16 Aug 2024 14:45:01 +0530 Subject: [PATCH 130/247] fix(delete-feature-via-role): bump rbac (#4508) --- api/Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/Makefile b/api/Makefile index f989f9caa617..e3090e8c6815 100644 --- a/api/Makefile +++ b/api/Makefile @@ -12,7 +12,7 @@ POETRY_VERSION ?= 1.8.3 GUNICORN_LOGGER_CLASS ?= util.logging.GunicornJsonCapableLogger SAML_REVISION ?= v1.6.3 -RBAC_REVISION ?= v0.7.0 +RBAC_REVISION ?= v0.8.0 -include .env-local -include $(DOTENV_OVERRIDE_FILE) From 1e3888aae3f4429089643c478300b8d94e856caf Mon Sep 17 00:00:00 2001 From: Matthew Elwell Date: Mon, 19 Aug 2024 13:52:05 +0100 Subject: [PATCH 131/247] fix: allow unknown attrs from cb json meta (#4509) --- api/organisations/subscriptions/metadata.py | 1 + .../unit/organisations/chargebee/test_unit_chargebee_cache.py | 4 ++++ 2 files changed, 5 insertions(+) diff --git a/api/organisations/subscriptions/metadata.py b/api/organisations/subscriptions/metadata.py index f897a3a01880..c6308892be2e 100644 --- a/api/organisations/subscriptions/metadata.py +++ b/api/organisations/subscriptions/metadata.py @@ -10,6 +10,7 @@ def __init__( api_calls: int = 0, projects: typing.Optional[int] = None, chargebee_email=None, + **kwargs, # allows for extra unknown attrs from CB json metadata ): self.seats = seats self.api_calls = api_calls diff --git a/api/tests/unit/organisations/chargebee/test_unit_chargebee_cache.py b/api/tests/unit/organisations/chargebee/test_unit_chargebee_cache.py index 333a22940472..cc1f726ff7ee 100644 --- a/api/tests/unit/organisations/chargebee/test_unit_chargebee_cache.py +++ b/api/tests/unit/organisations/chargebee/test_unit_chargebee_cache.py @@ -57,6 +57,7 @@ def test_chargebee_cache(mocker, db): "seats": 10, "api_calls": 100, "projects": 10, + "some_unknown_key": 1, # should be ignored } plan_id = "plan_id" plan_items = [ @@ -69,6 +70,7 @@ def test_chargebee_cache(mocker, db): "seats": 1, "api_calls": 10, "projects": 1, + "some_unknown_key": 1, # should be ignored } addon_id = "addon_id" addon_items = [ @@ -90,8 +92,10 @@ def test_chargebee_cache(mocker, db): assert cache.plans[plan_id].seats == plan_metadata["seats"] assert cache.plans[plan_id].api_calls == plan_metadata["api_calls"] assert cache.plans[plan_id].projects == plan_metadata["projects"] + assert not hasattr(cache.plans[plan_id], "some_unknown_key") assert len(cache.addons) == 1 assert cache.addons[addon_id].seats == addon_metadata["seats"] assert cache.addons[addon_id].api_calls == addon_metadata["api_calls"] assert cache.addons[addon_id].projects == addon_metadata["projects"] + assert not hasattr(cache.addons[addon_id], "some_unknown_key") From 7034fa4fbe0f16e0253f11affe68e059fde88a6a Mon Sep 17 00:00:00 2001 From: Gagan Date: Mon, 19 Aug 2024 19:21:27 +0530 Subject: [PATCH 132/247] fix(views/features): use get_environment_flags_list (#4511) --- api/features/views.py | 8 ++++---- .../unit/features/test_unit_features_views.py | 20 +++++++++++-------- 2 files changed, 16 insertions(+), 12 deletions(-) diff --git a/api/features/views.py b/api/features/views.py index b276bd8f4b98..b62973aceca1 100644 --- a/api/features/views.py +++ b/api/features/views.py @@ -172,11 +172,11 @@ def get_queryset(self): identity__isnull=True, feature_segment__isnull=True, ) - feature_states = FeatureState.objects.get_live_feature_states( - self.environment, + feature_states = get_environment_flags_list( + environment=self.environment, additional_filters=q, - ).select_related("feature_state_value", "feature") - + additional_select_related_args=["feature_state_value", "feature"], + ) self._feature_states = {fs.feature_id: fs for fs in feature_states} return queryset diff --git a/api/tests/unit/features/test_unit_features_views.py b/api/tests/unit/features/test_unit_features_views.py index 0226d4b1a615..8874dbe91a50 100644 --- a/api/tests/unit/features/test_unit_features_views.py +++ b/api/tests/unit/features/test_unit_features_views.py @@ -2814,18 +2814,22 @@ def test_list_features_with_feature_state( project=project, ) + # This should be ignored due to versioning. feature_state1 = feature.feature_states.filter(environment=environment).first() feature_state1.enabled = True feature_state1.version = 1 feature_state1.save() - feature_state_value1 = feature_state1.feature_state_value - feature_state_value1.string_value = None - feature_state_value1.integer_value = 1945 - feature_state_value1.type = INTEGER - feature_state_value1.save() - - # This should be ignored due to versioning. + # This should be ignored due to less recent live_from compared to the next feature state + # event though it has a higher version. + FeatureState.objects.create( + feature=feature, + environment=environment, + live_from=two_hours_ago, + enabled=True, + version=101, + ) + # This should be returned feature_state_versioned = FeatureState.objects.create( feature=feature, environment=environment, @@ -2896,8 +2900,8 @@ def test_list_features_with_feature_state( assert len(response.data["results"]) == 3 results = response.data["results"] - assert results[0]["environment_feature_state"]["enabled"] is True + assert results[0]["environment_feature_state"]["id"] == feature_state_versioned.id assert results[0]["environment_feature_state"]["feature_state_value"] == 2005 assert results[0]["name"] == feature.name assert results[1]["environment_feature_state"]["enabled"] is True From 19bc58ed8b1f8e689842b3181b6bf266f1a507aa Mon Sep 17 00:00:00 2001 From: Novak Zaballa <41410593+novakzaballa@users.noreply.github.com> Date: Mon, 19 Aug 2024 12:12:15 -0400 Subject: [PATCH 133/247] fix: Solve API GitHub integration issues (#4502) --- api/conftest.py | 6 +- .../feature_external_resources/models.py | 15 ++- .../feature_external_resources/views.py | 4 +- api/integrations/github/client.py | 20 +++- api/integrations/github/models.py | 2 +- api/integrations/github/serializers.py | 4 +- api/integrations/github/views.py | 6 +- ...t_unit_feature_external_resources_views.py | 36 +++---- .../github/test_unit_github_views.py | 96 ++++++++++++++----- 9 files changed, 130 insertions(+), 59 deletions(-) diff --git a/api/conftest.py b/api/conftest.py index ff6732ee44d3..4476ea624855 100644 --- a/api/conftest.py +++ b/api/conftest.py @@ -40,7 +40,7 @@ from features.value_types import STRING from features.versioning.tasks import enable_v2_versioning from features.workflows.core.models import ChangeRequest -from integrations.github.models import GithubConfiguration, GithubRepository +from integrations.github.models import GithubConfiguration, GitHubRepository from metadata.models import ( Metadata, MetadataField, @@ -1079,8 +1079,8 @@ def github_configuration(organisation: Organisation) -> GithubConfiguration: def github_repository( github_configuration: GithubConfiguration, project: Project, -) -> GithubRepository: - return GithubRepository.objects.create( +) -> GitHubRepository: + return GitHubRepository.objects.create( github_configuration=github_configuration, repository_owner="repositoryownertest", repository_name="repositorynametest", diff --git a/api/features/feature_external_resources/models.py b/api/features/feature_external_resources/models.py index 8df2428e3158..3fcf60b86eb0 100644 --- a/api/features/feature_external_resources/models.py +++ b/api/features/feature_external_resources/models.py @@ -1,5 +1,6 @@ import json import logging +import re from django.db import models from django.db.models import Q @@ -14,7 +15,7 @@ from features.models import Feature, FeatureState from integrations.github.constants import GitHubEventType, GitHubTag from integrations.github.github import call_github_task -from integrations.github.models import GithubRepository +from integrations.github.models import GitHubRepository from organisations.models import Organisation from projects.tags.models import Tag, TagType @@ -79,9 +80,19 @@ def execute_after_save_actions(self): .get(id=self.feature.project.organisation_id) .github_config.first() ): - github_repo = GithubRepository.objects.get( + if self.type == "GITHUB_PR": + pattern = r"github.com/([^/]+)/([^/]+)/pull/\d+$" + elif self.type == "GITHUB_ISSUE": + pattern = r"github.com/([^/]+)/([^/]+)/issues/\d+$" + + url_match = re.search(pattern, self.url) + owner, repo = url_match.groups() + + github_repo = GitHubRepository.objects.get( github_configuration=github_configuration.id, project=self.feature.project, + repository_owner=owner, + repository_name=repo, ) if github_repo.tagging_enabled: github_tag = Tag.objects.get( diff --git a/api/features/feature_external_resources/views.py b/api/features/feature_external_resources/views.py index 002a8e5da89a..f0b6117b70c1 100644 --- a/api/features/feature_external_resources/views.py +++ b/api/features/feature_external_resources/views.py @@ -10,7 +10,7 @@ get_github_issue_pr_title_and_state, label_github_issue_pr, ) -from integrations.github.models import GithubRepository +from integrations.github.models import GitHubRepository from organisations.models import Organisation from .models import FeatureExternalResource @@ -85,7 +85,7 @@ def create(self, request, *args, **kwargs): url_match = re.search(pattern, url) if url_match: owner, repo, issue = url_match.groups() - if GithubRepository.objects.get( + if GitHubRepository.objects.get( github_configuration=github_configuration, repository_owner=owner, repository_name=repo, diff --git a/api/integrations/github/client.py b/api/integrations/github/client.py index 4d4293b30a48..caf6ca13deba 100644 --- a/api/integrations/github/client.py +++ b/api/integrations/github/client.py @@ -155,9 +155,23 @@ def fetch_search_github_resource( headers: dict[str, str] = build_request_headers( github_configuration.installation_id ) - response = requests.get(url, headers=headers, timeout=GITHUB_API_CALLS_TIMEOUT) - response.raise_for_status() - json_response = response.json() + try: + response = requests.get(url, headers=headers, timeout=GITHUB_API_CALLS_TIMEOUT) + response.raise_for_status() + json_response = response.json() + + except HTTPError: + response_content = response.content.decode("utf-8") + error_message = ( + "The resources do not exist or you do not have permission to view them" + ) + error_data = json.loads(response_content) + if error_data.get("message", "") == "Validation Failed" and any( + error.get("code", "") == "invalid" for error in error_data.get("errors", []) + ): + logger.warning(error_message) + raise ValueError(error_message) + results = [ { "html_url": i["html_url"], diff --git a/api/integrations/github/models.py b/api/integrations/github/models.py index 546b009d601e..9b621d4c9b3f 100644 --- a/api/integrations/github/models.py +++ b/api/integrations/github/models.py @@ -41,7 +41,7 @@ class Meta: ordering = ("id",) -class GithubRepository(LifecycleModelMixin, SoftDeleteExportableModel): +class GitHubRepository(LifecycleModelMixin, SoftDeleteExportableModel): github_configuration = models.ForeignKey( GithubConfiguration, related_name="repository_config", on_delete=models.CASCADE ) diff --git a/api/integrations/github/serializers.py b/api/integrations/github/serializers.py index 9d1cf3a81635..eb0a6b3dd927 100644 --- a/api/integrations/github/serializers.py +++ b/api/integrations/github/serializers.py @@ -7,7 +7,7 @@ PaginatedQueryParams, RepoQueryParams, ) -from integrations.github.models import GithubConfiguration, GithubRepository +from integrations.github.models import GithubConfiguration, GitHubRepository class GithubConfigurationSerializer(ModelSerializer): @@ -19,7 +19,7 @@ class Meta: class GithubRepositorySerializer(ModelSerializer): class Meta: - model = GithubRepository + model = GitHubRepository optional_fields = ("search_text", "page") fields = ( "id", diff --git a/api/integrations/github/views.py b/api/integrations/github/views.py index 8d48a17dfa84..eac8239511f2 100644 --- a/api/integrations/github/views.py +++ b/api/integrations/github/views.py @@ -27,7 +27,7 @@ tag_by_event_type, ) from integrations.github.helpers import github_webhook_payload_is_valid -from integrations.github.models import GithubConfiguration, GithubRepository +from integrations.github.models import GithubConfiguration, GitHubRepository from integrations.github.permissions import HasPermissionToGithubConfiguration from integrations.github.serializers import ( GithubConfigurationSerializer, @@ -126,7 +126,7 @@ class GithubRepositoryViewSet(viewsets.ModelViewSet): GithubIsAdminOrganisation, ) serializer_class = GithubRepositorySerializer - model_class = GithubRepository + model_class = GitHubRepository def perform_create(self, serializer): github_configuration_id = self.kwargs["github_pk"] @@ -136,7 +136,7 @@ def get_queryset(self): try: if github_pk := self.kwargs.get("github_pk"): int(github_pk) - return GithubRepository.objects.filter(github_configuration=github_pk) + return GitHubRepository.objects.filter(github_configuration=github_pk) except ValueError: raise ValidationError({"github_pk": ["Must be an integer"]}) diff --git a/api/tests/unit/features/test_unit_feature_external_resources_views.py b/api/tests/unit/features/test_unit_feature_external_resources_views.py index d69b05766c0e..d6ba8c501b14 100644 --- a/api/tests/unit/features/test_unit_feature_external_resources_views.py +++ b/api/tests/unit/features/test_unit_feature_external_resources_views.py @@ -21,7 +21,7 @@ ) from features.versioning.models import EnvironmentFeatureVersion from integrations.github.constants import GITHUB_API_URL, GITHUB_API_VERSION -from integrations.github.models import GithubConfiguration, GithubRepository +from integrations.github.models import GithubConfiguration, GitHubRepository from projects.models import Project from segments.models import Segment from tests.types import WithEnvironmentPermissionsCallable @@ -73,7 +73,7 @@ def test_create_feature_external_resource( environment: Environment, project: Project, github_configuration: GithubConfiguration, - github_repository: GithubRepository, + github_repository: GitHubRepository, post_request_mock: MagicMock, mock_github_client_generate_token: MagicMock, ) -> None: @@ -186,7 +186,7 @@ def test_cannot_create_feature_external_resource_with_an_invalid_gh_url( feature: Feature, project: Project, github_configuration: GithubConfiguration, - github_repository: GithubRepository, + github_repository: GitHubRepository, ) -> None: # Given feature_external_resource_data = { @@ -216,7 +216,7 @@ def test_cannot_create_feature_external_resource_with_an_incorrect_gh_type( feature: Feature, project: Project, github_configuration: GithubConfiguration, - github_repository: GithubRepository, + github_repository: GitHubRepository, ) -> None: # Given feature_external_resource_data = { @@ -316,7 +316,7 @@ def test_cannot_create_feature_external_resource_due_to_unique_constraint( feature: Feature, project: Project, github_configuration: GithubConfiguration, - github_repository: GithubRepository, + github_repository: GitHubRepository, feature_external_resource: FeatureExternalResource, ) -> None: # Given @@ -346,7 +346,7 @@ def test_update_feature_external_resource( feature_external_resource: FeatureExternalResource, project: Project, github_configuration: GithubConfiguration, - github_repository: GithubRepository, + github_repository: GitHubRepository, post_request_mock: MagicMock, mocker: MockerFixture, ) -> None: @@ -357,7 +357,7 @@ def test_update_feature_external_resource( mock_generate_token.return_value = "mocked_token" feature_external_resource_data = { "type": "GITHUB_ISSUE", - "url": "https://github.com/userexample/example-project-repo/issues/12", + "url": f"https://github.com/{github_repository.repository_owner}/{github_repository.repository_name}/issues/12", "feature": feature.id, "metadata": '{"state": "open"}', } @@ -378,7 +378,7 @@ def test_delete_feature_external_resource( feature: Feature, project: Project, github_configuration: GithubConfiguration, - github_repository: GithubRepository, + github_repository: GitHubRepository, feature_external_resource: FeatureExternalResource, post_request_mock: MagicMock, mocker: MockerFixture, @@ -417,7 +417,7 @@ def test_get_feature_external_resources( feature: Feature, project: Project, github_configuration: GithubConfiguration, - github_repository: GithubRepository, + github_repository: GitHubRepository, feature_external_resource: FeatureExternalResource, mock_github_client_generate_token: MagicMock, ) -> None: @@ -446,7 +446,7 @@ def test_get_feature_external_resource( feature: Feature, project: Project, github_configuration: GithubConfiguration, - github_repository: GithubRepository, + github_repository: GitHubRepository, feature_external_resource: FeatureExternalResource, ) -> None: # Given @@ -472,7 +472,7 @@ def test_create_github_comment_on_feature_state_updated( feature: Feature, project: Project, github_configuration: GithubConfiguration, - github_repository: GithubRepository, + github_repository: GitHubRepository, post_request_mock: MagicMock, mocker: MockerFixture, environment: Environment, @@ -532,7 +532,7 @@ def test_create_github_comment_on_feature_was_deleted( feature: Feature, project: Project, github_configuration: GithubConfiguration, - github_repository: GithubRepository, + github_repository: GitHubRepository, feature_external_resource: FeatureExternalResource, post_request_mock: MagicMock, mock_github_client_generate_token: MagicMock, @@ -566,7 +566,7 @@ def test_create_github_comment_on_segment_override_updated( segment_override_for_feature_with_value: FeatureState, project: Project, github_configuration: GithubConfiguration, - github_repository: GithubRepository, + github_repository: GitHubRepository, post_request_mock: MagicMock, environment: Environment, admin_client: APIClient, @@ -624,7 +624,7 @@ def test_create_github_comment_on_segment_override_deleted( segment_override_for_feature_with_value: FeatureState, feature_with_value_segment: FeatureSegment, github_configuration: GithubConfiguration, - github_repository: GithubRepository, + github_repository: GitHubRepository, post_request_mock: MagicMock, admin_client_new: APIClient, feature_with_value_external_resource: FeatureExternalResource, @@ -665,7 +665,7 @@ def test_create_github_comment_using_v2( environment: Environment, project: Project, github_configuration: GithubConfiguration, - github_repository: GithubRepository, + github_repository: GitHubRepository, feature_external_resource: FeatureExternalResource, post_request_mock: MagicMock, mocker: MockerFixture, @@ -738,7 +738,7 @@ def test_create_github_comment_using_v2_fails_on_wrong_params( environment: Environment, project: Project, github_configuration: GithubConfiguration, - github_repository: GithubRepository, + github_repository: GitHubRepository, feature_external_resource: FeatureExternalResource, post_request_mock: MagicMock, mocker: MockerFixture, @@ -781,7 +781,7 @@ def test_create_feature_external_resource_on_environment_with_v2( admin_client_new: APIClient, project: Project, github_configuration: GithubConfiguration, - github_repository: GithubRepository, + github_repository: GitHubRepository, segment_override_for_feature_with_value: FeatureState, environment_v2_versioning: Environment, post_request_mock: MagicMock, @@ -858,7 +858,7 @@ def test_cannot_create_feature_external_resource_for_the_same_feature_and_resour feature: Feature, project: Project, github_configuration: GithubConfiguration, - github_repository: GithubRepository, + github_repository: GitHubRepository, feature_external_resource_gh_pr: FeatureExternalResource, ) -> None: # Given diff --git a/api/tests/unit/integrations/github/test_unit_github_views.py b/api/tests/unit/integrations/github/test_unit_github_views.py index a1d9add23f36..ede44b4b6ddf 100644 --- a/api/tests/unit/integrations/github/test_unit_github_views.py +++ b/api/tests/unit/integrations/github/test_unit_github_views.py @@ -16,7 +16,7 @@ from features.feature_external_resources.models import FeatureExternalResource from features.models import Feature from integrations.github.constants import GITHUB_API_URL -from integrations.github.models import GithubConfiguration, GithubRepository +from integrations.github.models import GithubConfiguration, GitHubRepository from integrations.github.views import ( github_api_call_error_handler, github_webhook_payload_is_valid, @@ -158,7 +158,7 @@ def test_delete_github_configuration( admin_client_new: APIClient, organisation: Organisation, github_configuration: GithubConfiguration, - github_repository: GithubRepository, + github_repository: GitHubRepository, mocker: MockerFixture, ) -> None: # Given @@ -193,7 +193,7 @@ def test_cannot_delete_github_configuration_when_delete_github_installation_resp admin_client_new: APIClient, organisation: Organisation, github_configuration: GithubConfiguration, - github_repository: GithubRepository, + github_repository: GitHubRepository, mocker: MockerFixture, ) -> None: # Given @@ -294,7 +294,7 @@ def test_create_github_repository( # Then assert response.status_code == status.HTTP_201_CREATED - assert GithubRepository.objects.filter(repository_owner="repositoryowner").exists() + assert GitHubRepository.objects.filter(repository_owner="repositoryowner").exists() @responses.activate @@ -341,7 +341,7 @@ def test_create_github_repository_and_label_already_Existe( # Then mocker_logger.warning.assert_called_once_with("Label already exists") assert response.status_code == status.HTTP_201_CREATED - assert GithubRepository.objects.filter(repository_owner="repositoryowner").exists() + assert GitHubRepository.objects.filter(repository_owner="repositoryowner").exists() def test_cannot_create_github_repository_when_does_not_have_permissions( @@ -374,7 +374,7 @@ def test_cannot_create_github_repository_due_to_unique_constraint( organisation: Organisation, github_configuration: GithubConfiguration, project: Project, - github_repository: GithubRepository, + github_repository: GitHubRepository, ) -> None: # Given data = { @@ -404,7 +404,7 @@ def test_github_delete_repository( admin_client_new: APIClient, organisation: Organisation, github_configuration: GithubConfiguration, - github_repository: GithubRepository, + github_repository: GitHubRepository, feature_external_resource: FeatureExternalResource, mock_github_client_generate_token: MagicMock, ) -> None: @@ -476,7 +476,7 @@ def test_fetch_pull_requests( admin_client_new: APIClient, organisation: Organisation, github_configuration: GithubConfiguration, - github_repository: GithubRepository, + github_repository: GitHubRepository, mock_github_client_generate_token: MagicMock, mocker: MockerFixture, ) -> None: @@ -511,7 +511,7 @@ def test_fetch_issues( admin_client_new: APIClient, organisation: Organisation, github_configuration: GithubConfiguration, - github_repository: GithubRepository, + github_repository: GitHubRepository, mock_github_client_generate_token: MagicMock, mocker: MockerFixture, ) -> None: @@ -547,16 +547,30 @@ def test_fetch_issues( ) +@responses.activate def test_fetch_issues_returns_error_on_bad_response_from_github( admin_client_new: APIClient, organisation: Organisation, github_configuration: GithubConfiguration, - github_repository: GithubRepository, + github_repository: GitHubRepository, mock_github_client_generate_token: MagicMock, mocker: MockerFixture, ) -> None: # Given - mocker.patch("requests.get", side_effect=mocked_requests_get_error) + + mock_response = { + "message": "Validation Failed", + "errors": [{"message": "Error", "code": "not_found"}], + "documentation_url": "https://docs.github.com/v3/search/", + "status": "404", + } + + responses.add( + method="GET", + url="https://api.github.com/search/issues?q=%20repo:repo/repo%20is:issue%20is:open%20in:title%20in:body&per_page=100&page=1", # noqa: E501 + status=status.HTTP_404_NOT_FOUND, + json=mock_response, + ) url = reverse("api-v1:organisations:get-github-issues", args=[organisation.id]) data = {"repo_owner": "owner", "repo_name": "repo"} # When @@ -564,9 +578,41 @@ def test_fetch_issues_returns_error_on_bad_response_from_github( # Then assert response.status_code == status.HTTP_502_BAD_GATEWAY + assert "Failed to retrieve GitHub issues." in response.json()["detail"] + + +@responses.activate +def test_search_issues_returns_error_on_bad_search_params( + admin_client_new: APIClient, + organisation: Organisation, + github_configuration: GithubConfiguration, + github_repository: GitHubRepository, + mock_github_client_generate_token: MagicMock, + mocker: MockerFixture, +) -> None: + # Given + mock_response = { + "message": "Validation Failed", + "errors": [{"message": "Error", "code": "invalid"}], + "documentation_url": "https://docs.github.com/v3/search/", + "status": "422", + } + responses.add( + method="GET", + url="https://api.github.com/search/issues?q=%20repo:owner/repo%20is:issue%20is:open%20in:title%20in:body&per_page=100&page=1", # noqa: E501 + status=status.HTTP_422_UNPROCESSABLE_ENTITY, + json=mock_response, + ) + url = reverse("api-v1:organisations:get-github-issues", args=[organisation.id]) + data = {"repo_owner": "owner", "repo_name": "repo"} + # When + response = admin_client_new.get(url, data=data) + + # Then + assert response.status_code == status.HTTP_400_BAD_REQUEST response_json = response.json() assert ( - "Failed to retrieve GitHub issues. Error: HTTP Error 404" + "Failed to retrieve GitHub issues. Error: The resources do not exist or you do not have permission to view them" in response_json["detail"] ) @@ -576,7 +622,7 @@ def test_fetch_repositories( admin_client_new: APIClient, organisation: Organisation, github_configuration: GithubConfiguration, - github_repository: GithubRepository, + github_repository: GitHubRepository, mock_github_client_generate_token: MagicMock, ) -> None: # Given @@ -651,7 +697,7 @@ def test_cannot_fetch_issues_or_prs_when_does_not_have_permissions( test_user_client: APIClient, organisation: Organisation, github_configuration: GithubConfiguration, - github_repository: GithubRepository, + github_repository: GitHubRepository, mock_github_client_generate_token: MagicMock, reverse_url: str, ) -> None: @@ -725,7 +771,7 @@ def test_github_webhook_merged_a_pull_request( api_client: APIClient, feature: Feature, github_configuration: GithubConfiguration, - github_repository: GithubRepository, + github_repository: GitHubRepository, feature_external_resource: FeatureExternalResource, set_github_webhook_secret, ) -> None: @@ -870,7 +916,7 @@ def test_cannot_fetch_pull_requests_when_github_request_call_failed( admin_client_new: APIClient, organisation: Organisation, github_configuration: GithubConfiguration, - github_repository: GithubRepository, + github_repository: GitHubRepository, mock_github_client_generate_token: MagicMock, ) -> None: # Given @@ -898,7 +944,7 @@ def test_cannot_fetch_pulls_when_the_github_response_was_invalid( admin_client_new: APIClient, organisation: Organisation, github_configuration: GithubConfiguration, - github_repository: GithubRepository, + github_repository: GitHubRepository, mock_github_client_generate_token: MagicMock, ) -> None: # Given @@ -938,7 +984,7 @@ def test_fetch_github_repo_contributors( admin_client_new: APIClient, organisation: Organisation, github_configuration: GithubConfiguration, - github_repository: GithubRepository, + github_repository: GitHubRepository, mock_github_client_generate_token: MagicMock, ) -> None: # Given @@ -996,7 +1042,7 @@ def test_fetch_github_repo_contributors_with_invalid_query_params( admin_client_new: APIClient, organisation: Organisation, github_configuration: GithubConfiguration, - github_repository: GithubRepository, + github_repository: GitHubRepository, ) -> None: # Given url = reverse( @@ -1055,7 +1101,7 @@ def test_send_the_invalid_number_page_or_page_size_param_returns_400( admin_client: APIClient, organisation: Organisation, github_configuration: GithubConfiguration, - github_repository: GithubRepository, + github_repository: GitHubRepository, page: int, page_size: int, error_detail: str, @@ -1098,7 +1144,7 @@ def test_send_the_invalid_type_page_or_page_size_param_returns_400( admin_client: APIClient, organisation: Organisation, github_configuration: GithubConfiguration, - github_repository: GithubRepository, + github_repository: GitHubRepository, page: int, page_size: int, error_response: dict[str, Any], @@ -1127,7 +1173,7 @@ def test_label_and_tags_no_added_when_tagging_is_disabled( admin_client_new: APIClient, project: Project, environment: Environment, - github_repository: GithubRepository, + github_repository: GitHubRepository, feature_with_value: Feature, mock_github_client_generate_token: MagicMock, post_request_mock: MagicMock, @@ -1166,7 +1212,7 @@ def test_update_github_repository( admin_client_new: APIClient, organisation: Organisation, github_configuration: GithubConfiguration, - github_repository: GithubRepository, + github_repository: GitHubRepository, project: Project, mocker: MockerFixture, mock_github_client_generate_token: MagicMock, @@ -1198,7 +1244,7 @@ def test_update_github_repository( # Then assert response.status_code == status.HTTP_200_OK - assert GithubRepository.objects.filter(repository_owner="repositoryowner").exists() - assert GithubRepository.objects.get( + assert GitHubRepository.objects.filter(repository_owner="repositoryowner").exists() + assert GitHubRepository.objects.get( repository_owner="repositoryowner" ).tagging_enabled From ba8ae60d6e3e0b7f5b84501f2c6c47763267e8be Mon Sep 17 00:00:00 2001 From: Zach Aysan Date: Mon, 19 Aug 2024 12:33:44 -0400 Subject: [PATCH 134/247] fix: Add logic to handle grace period breached for paid accounts (#4512) Co-authored-by: Matthew Elwell --- api/organisations/tasks.py | 11 +++- .../test_unit_organisations_tasks.py | 59 +++++++++++++++++++ 2 files changed, 69 insertions(+), 1 deletion(-) diff --git a/api/organisations/tasks.py b/api/organisations/tasks.py index 951924fa13ab..14f7c7331daa 100644 --- a/api/organisations/tasks.py +++ b/api/organisations/tasks.py @@ -206,8 +206,17 @@ def charge_for_api_call_count_overages(): api_usage = get_current_api_usage(organisation.id) # Grace period for organisations < 200% of usage. - if api_usage / subscription_cache.allowed_30d_api_calls < 2.0: + if ( + not hasattr(organisation, "breached_grace_period") + and api_usage / subscription_cache.allowed_30d_api_calls < 2.0 + ): logger.info("API Usage below normal usage or grace period.") + + # Set organisation grace period breach for following months. + if api_usage / subscription_cache.allowed_30d_api_calls > 1.0: + OrganisationBreachedGracePeriod.objects.get_or_create( + organisation=organisation + ) continue api_billings = OrganisationAPIBilling.objects.filter( diff --git a/api/tests/unit/organisations/test_unit_organisations_tasks.py b/api/tests/unit/organisations/test_unit_organisations_tasks.py index b59e4633144f..6ab939aca3f1 100644 --- a/api/tests/unit/organisations/test_unit_organisations_tasks.py +++ b/api/tests/unit/organisations/test_unit_organisations_tasks.py @@ -928,6 +928,65 @@ def test_charge_for_api_call_count_overages_grace_period( # Then mock_chargebee_update.assert_not_called() assert OrganisationAPIBilling.objects.count() == 0 + assert organisation.breached_grace_period + + +@pytest.mark.freeze_time("2023-01-19T09:09:47.325132+00:00") +def test_charge_for_api_call_count_overages_grace_period_over( + organisation: Organisation, + mocker: MockerFixture, +) -> None: + # Given + now = timezone.now() + OrganisationSubscriptionInformationCache.objects.create( + organisation=organisation, + allowed_seats=10, + allowed_projects=3, + allowed_30d_api_calls=100_000, + chargebee_email="test@example.com", + current_billing_term_starts_at=now - timedelta(days=30), + current_billing_term_ends_at=now + timedelta(minutes=30), + ) + organisation.subscription.subscription_id = "fancy_sub_id23" + organisation.subscription.plan = "scale-up-v2" + organisation.subscription.save() + OrganisationAPIUsageNotification.objects.create( + organisation=organisation, + percent_usage=100, + notified_at=now, + ) + + OrganisationBreachedGracePeriod.objects.create(organisation=organisation) + get_client_mock = mocker.patch("organisations.tasks.get_client") + client_mock = MagicMock() + get_client_mock.return_value = client_mock + client_mock.get_identity_flags.return_value.is_feature_enabled.return_value = True + + mock_chargebee_update = mocker.patch( + "organisations.chargebee.chargebee.chargebee.Subscription.update" + ) + mock_api_usage = mocker.patch( + "organisations.tasks.get_current_api_usage", + ) + # Set the return value to something less than 200% of base rate + mock_api_usage.return_value = 115_000 + assert OrganisationAPIBilling.objects.count() == 0 + + # When + charge_for_api_call_count_overages() + + # Then + # Since the OrganisationBreachedGracePeriod was created already + # the charges go through. + mock_chargebee_update.assert_called_once_with( + "fancy_sub_id23", + { + "addons": [{"id": "additional-api-scale-up-monthly", "quantity": 1}], + "prorate": False, + "invoice_immediately": False, + }, + ) + assert OrganisationAPIBilling.objects.count() == 1 @pytest.mark.freeze_time("2023-01-19T09:09:47.325132+00:00") From d273679b3236c3fceccfeb71c401df5a2d69ad27 Mon Sep 17 00:00:00 2001 From: Matthew Elwell Date: Mon, 19 Aug 2024 18:56:44 +0100 Subject: [PATCH 135/247] fix: subscription info cache race condition (#4518) --- api/organisations/models.py | 26 ++++++++++++------ .../test_unit_organisations_tasks.py | 27 ++++++++++++++++++- 2 files changed, 44 insertions(+), 9 deletions(-) diff --git a/api/organisations/models.py b/api/organisations/models.py index 1451cacc30b3..4665ad668742 100644 --- a/api/organisations/models.py +++ b/api/organisations/models.py @@ -303,18 +303,14 @@ def save_as_free_subscription(self): if not getattr(self.organisation, "subscription_information_cache", None): return - # There is a weird bug where the cache is present, but the id is unset. - # See here for more: https://flagsmith.sentry.io/issues/4945988284/ - if not self.organisation.subscription_information_cache.id: - return - - self.organisation.subscription_information_cache.delete() + self.organisation.subscription_information_cache.reset_to_defaults() + self.organisation.subscription_information_cache.save() def prepare_for_cancel( self, cancellation_date=timezone.now(), update_chargebee=True ) -> None: """ - This method get's a subscription ready for cancelation. + This method gets a subscription ready for cancellation. If cancellation_date is in the future some aspects are reserved for a task after the date has passed. @@ -451,7 +447,7 @@ class OrganisationSubscriptionInformationCache(LifecycleModelMixin, models.Model api_calls_7d = models.IntegerField(default=0) api_calls_30d = models.IntegerField(default=0) - allowed_seats = models.IntegerField(default=1) + allowed_seats = models.IntegerField(default=MAX_SEATS_IN_FREE_PLAN) allowed_30d_api_calls = models.IntegerField(default=MAX_API_CALLS_IN_FREE_PLAN) allowed_projects = models.IntegerField(default=1, blank=True, null=True) @@ -461,6 +457,20 @@ class OrganisationSubscriptionInformationCache(LifecycleModelMixin, models.Model def erase_api_notifications(self): self.organisation.api_usage_notifications.all().delete() + def reset_to_defaults(self): + """ + Resets all limits and CB related data to the defaults, leaving the + usage data intact. + """ + self.current_billing_term_starts_at = None + self.current_billing_term_ends_at = None + + self.allowed_seats = MAX_SEATS_IN_FREE_PLAN + self.allowed_30d_api_calls = MAX_API_CALLS_IN_FREE_PLAN + self.allowed_projects = 1 + + self.chargebee_email = None + class OrganisationAPIUsageNotification(models.Model): organisation = models.ForeignKey( diff --git a/api/tests/unit/organisations/test_unit_organisations_tasks.py b/api/tests/unit/organisations/test_unit_organisations_tasks.py index 6ab939aca3f1..d97261447a2f 100644 --- a/api/tests/unit/organisations/test_unit_organisations_tasks.py +++ b/api/tests/unit/organisations/test_unit_organisations_tasks.py @@ -109,6 +109,13 @@ def test_subscription_cancellation(db: None) -> None: organisation = Organisation.objects.create() OrganisationSubscriptionInformationCache.objects.create( organisation=organisation, + allowed_seats=5, + allowed_30d_api_calls=1_000_000, + allowed_projects=None, + api_calls_24h=30_000, + api_calls_7d=210_000, + api_calls_30d=900_000, + chargebee_email="foo@example.com", ) UserOrganisation.objects.create( organisation=organisation, @@ -138,7 +145,8 @@ def test_subscription_cancellation(db: None) -> None: # Then organisation.refresh_from_db() subscription.refresh_from_db() - assert getattr(organisation, "subscription_information_cache", None) is None + organisation.subscription_information_cache.refresh_from_db() + assert subscription.subscription_id is None assert subscription.subscription_date is None assert subscription.plan == FREE_PLAN_ID @@ -151,6 +159,23 @@ def test_subscription_cancellation(db: None) -> None: assert subscription.cancellation_date is None assert subscription.notes == notes + # The CB / limit data on the subscription information cache object is reset + assert organisation.subscription_information_cache.chargebee_email is None + assert ( + organisation.subscription_information_cache.allowed_30d_api_calls + == MAX_API_CALLS_IN_FREE_PLAN + ) + assert organisation.subscription_information_cache.allowed_projects == 1 + assert ( + organisation.subscription_information_cache.allowed_seats + == MAX_SEATS_IN_FREE_PLAN + ) + + # But the usage data isn't + assert organisation.subscription_information_cache.api_calls_24h == 30_000 + assert organisation.subscription_information_cache.api_calls_7d == 210_000 + assert organisation.subscription_information_cache.api_calls_30d == 900_000 + @pytest.mark.freeze_time("2023-01-19T09:12:34+00:00") def test_finish_subscription_cancellation(db: None, mocker: MockerFixture) -> None: From 33074f349e24ea06cede711e797d941c0bc042c4 Mon Sep 17 00:00:00 2001 From: Zach Aysan Date: Mon, 19 Aug 2024 14:46:05 -0400 Subject: [PATCH 136/247] fix: Catch API billing errors (#4514) --- api/organisations/tasks.py | 32 +++-- .../test_unit_organisations_tasks.py | 118 ++++++++++++++++++ 2 files changed, 138 insertions(+), 12 deletions(-) diff --git a/api/organisations/tasks.py b/api/organisations/tasks.py index 14f7c7331daa..0b3b9267f7c2 100644 --- a/api/organisations/tasks.py +++ b/api/organisations/tasks.py @@ -230,19 +230,27 @@ def charge_for_api_call_count_overages(): logger.info("API Usage below current API limit.") continue - if organisation.subscription.plan in {SCALE_UP, SCALE_UP_V2}: - add_100k_api_calls_scale_up( - organisation.subscription.subscription_id, - math.ceil(api_overage / 100_000), - ) - elif organisation.subscription.plan in {STARTUP, STARTUP_V2}: - add_100k_api_calls_start_up( - organisation.subscription.subscription_id, - math.ceil(api_overage / 100_000), - ) - else: + try: + if organisation.subscription.plan in {SCALE_UP, SCALE_UP_V2}: + add_100k_api_calls_scale_up( + organisation.subscription.subscription_id, + math.ceil(api_overage / 100_000), + ) + elif organisation.subscription.plan in {STARTUP, STARTUP_V2}: + add_100k_api_calls_start_up( + organisation.subscription.subscription_id, + math.ceil(api_overage / 100_000), + ) + else: + logger.error( + f"Unable to bill for API overages for plan `{organisation.subscription.plan}` " + f"for organisation {organisation.id}" + ) + continue + except Exception: logger.error( - f"Unable to bill for API overages for plan `{organisation.subscription.plan}`" + f"Unable to charge organisation {organisation.id} due to billing error", + exc_info=True, ) continue diff --git a/api/tests/unit/organisations/test_unit_organisations_tasks.py b/api/tests/unit/organisations/test_unit_organisations_tasks.py index d97261447a2f..6b0d6231c747 100644 --- a/api/tests/unit/organisations/test_unit_organisations_tasks.py +++ b/api/tests/unit/organisations/test_unit_organisations_tasks.py @@ -1189,6 +1189,124 @@ def test_charge_for_api_call_count_overages_start_up( calls_mock.assert_not_called() +@pytest.mark.freeze_time("2023-01-19T09:09:47.325132+00:00") +def test_charge_for_api_call_count_overages_non_standard( + organisation: Organisation, + mocker: MockerFixture, + inspecting_handler: logging.Handler, +) -> None: + # Given + now = timezone.now() + + from organisations.tasks import logger + + logger.addHandler(inspecting_handler) + + OrganisationSubscriptionInformationCache.objects.create( + organisation=organisation, + allowed_seats=10, + allowed_projects=3, + allowed_30d_api_calls=100_000, + chargebee_email="test@example.com", + current_billing_term_starts_at=now - timedelta(days=30), + current_billing_term_ends_at=now + timedelta(minutes=30), + ) + organisation.subscription.subscription_id = "fancy_sub_id23" + organisation.subscription.plan = "nonstandard-v2" + organisation.subscription.save() + OrganisationAPIUsageNotification.objects.create( + organisation=organisation, + percent_usage=100, + notified_at=now, + ) + + get_client_mock = mocker.patch("organisations.tasks.get_client") + client_mock = MagicMock() + get_client_mock.return_value = client_mock + client_mock.get_identity_flags.return_value.is_feature_enabled.return_value = True + mocker.patch("organisations.chargebee.chargebee.chargebee.Subscription.retrieve") + mock_chargebee_update = mocker.patch( + "organisations.chargebee.chargebee.chargebee.Subscription.update" + ) + + mock_api_usage = mocker.patch( + "organisations.tasks.get_current_api_usage", + ) + mock_api_usage.return_value = 202_005 + + # When + charge_for_api_call_count_overages() + + # Then + mock_chargebee_update.assert_not_called() + assert inspecting_handler.messages == [ + f"Unable to bill for API overages for plan `{organisation.subscription.plan}` " + f"for organisation {organisation.id}" + ] + + assert OrganisationAPIBilling.objects.count() == 0 + + +@pytest.mark.freeze_time("2023-01-19T09:09:47.325132+00:00") +def test_charge_for_api_call_count_overages_with_exception( + organisation: Organisation, + mocker: MockerFixture, + inspecting_handler: logging.Handler, +) -> None: + # Given + now = timezone.now() + + from organisations.tasks import logger + + logger.addHandler(inspecting_handler) + + OrganisationSubscriptionInformationCache.objects.create( + organisation=organisation, + allowed_seats=10, + allowed_projects=3, + allowed_30d_api_calls=100_000, + chargebee_email="test@example.com", + current_billing_term_starts_at=now - timedelta(days=30), + current_billing_term_ends_at=now + timedelta(minutes=30), + ) + organisation.subscription.subscription_id = "fancy_sub_id23" + organisation.subscription.plan = "startup-v2" + organisation.subscription.save() + OrganisationAPIUsageNotification.objects.create( + organisation=organisation, + percent_usage=100, + notified_at=now, + ) + + get_client_mock = mocker.patch("organisations.tasks.get_client") + client_mock = MagicMock() + get_client_mock.return_value = client_mock + client_mock.get_identity_flags.return_value.is_feature_enabled.return_value = True + mocker.patch("organisations.chargebee.chargebee.chargebee.Subscription.retrieve") + mock_chargebee_update = mocker.patch( + "organisations.chargebee.chargebee.chargebee.Subscription.update" + ) + + mock_api_usage = mocker.patch( + "organisations.tasks.get_current_api_usage", + ) + mock_api_usage.return_value = 202_005 + mocker.patch( + "organisations.tasks.add_100k_api_calls_start_up", + side_effect=ValueError("An error occurred"), + ) + + # When + charge_for_api_call_count_overages() + + # Then + assert inspecting_handler.messages[0].startswith( + f"Unable to charge organisation {organisation.id} due to billing error" + ) + mock_chargebee_update.assert_not_called() + assert OrganisationAPIBilling.objects.count() == 0 + + @pytest.mark.freeze_time("2023-01-19T09:09:47.325132+00:00") def test_charge_for_api_call_count_overages_start_up_with_api_billing( organisation: Organisation, From 79bae8d41beb60b961ae288998fea66cecb98787 Mon Sep 17 00:00:00 2001 From: Gagan Date: Tue, 20 Aug 2024 14:54:45 +0530 Subject: [PATCH 137/247] deps: bump task processor and workflow (#4519) --- api/poetry.lock | 11 +++++------ api/pyproject.toml | 4 ++-- 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/api/poetry.lock b/api/poetry.lock index c1aea3b00353..87f841237dcc 100644 --- a/api/poetry.lock +++ b/api/poetry.lock @@ -1393,8 +1393,8 @@ simplejson = "~3.19.1" [package.source] type = "git" url = "https://github.com/Flagsmith/flagsmith-task-processor" -reference = "v1.0.1" -resolved_reference = "a61b8212fd579bf08bc9c5201f4a58c3f3888d24" +reference = "v1.0.2" +resolved_reference = "1424d95cbadd18dfc57debbdf0ea2d3d13ea1446" [[package]] name = "freezegun" @@ -3898,13 +3898,12 @@ develop = false [package.dependencies] flagsmith-common = {git = "https://github.com/Flagsmith/flagsmith-common", tag = "v1.0.0"} -flagsmith-task-processor = {git = "https://github.com/Flagsmith/flagsmith-task-processor", tag = "v1.0.1"} [package.source] type = "git" url = "https://github.com/flagsmith/flagsmith-workflows" -reference = "v2.4.2" -resolved_reference = "eae1b033a36992a4423898564dca775e14e73eb8" +reference = "v2.4.3" +resolved_reference = "4416374a8a4f8fc779c6e20ecfb76ee009ed33db" [[package]] name = "wrapt" @@ -4023,4 +4022,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = ">=3.11, <3.13" -content-hash = "3942cbbe67d99afaa19eda8c405ad810bd9d4b6bde880afd32beac70edf6e12f" +content-hash = "a5f910d13738afb4bd290ddd38c61ef315aa3f9940ebe2e69460ba0dc8806343" diff --git a/api/pyproject.toml b/api/pyproject.toml index 749dcdb87de5..a7a798a28454 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -169,7 +169,7 @@ pygithub = "2.1.1" hubspot-api-client = "^8.2.1" djangorestframework-dataclasses = "^1.3.1" pyotp = "^2.9.0" -flagsmith-task-processor = { git = "https://github.com/Flagsmith/flagsmith-task-processor", tag = "v1.0.1" } +flagsmith-task-processor = { git = "https://github.com/Flagsmith/flagsmith-task-processor", tag = "v1.0.2" } flagsmith-common = { git = "https://github.com/Flagsmith/flagsmith-common", tag = "v1.0.0" } [tool.poetry.group.auth-controller] @@ -194,7 +194,7 @@ flagsmith-ldap = { git = "https://github.com/flagsmith/flagsmith-ldap", tag = "v optional = true [tool.poetry.group.workflows.dependencies] -workflows-logic = { git = "https://github.com/flagsmith/flagsmith-workflows", tag = "v2.4.2" } +workflows-logic = { git = "https://github.com/flagsmith/flagsmith-workflows", tag = "v2.4.3" } [tool.poetry.group.dev.dependencies] django-test-migrations = "~1.2.0" From 80e6cc5188535caad17c005e33fd7f8d96517264 Mon Sep 17 00:00:00 2001 From: Flagsmith Bot <65724737+flagsmithdev@users.noreply.github.com> Date: Tue, 20 Aug 2024 10:44:13 +0100 Subject: [PATCH 138/247] chore(main): release 2.137.0 (#4487) --- .release-please-manifest.json | 2 +- CHANGELOG.md | 25 +++++++++++++++++++++++++ version.txt | 2 +- 3 files changed, 27 insertions(+), 2 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 04bca1732ea1..9d85dbbb7591 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "2.136.0" + ".": "2.137.0" } \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 3669c6f0b6c5..fa4f31dd2c9c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,30 @@ # Changelog +## [2.137.0](https://github.com/Flagsmith/flagsmith/compare/v2.136.0...v2.137.0) (2024-08-20) + + +### Features + +* make pg usage cache timeout configurable ([#4485](https://github.com/Flagsmith/flagsmith/issues/4485)) ([cd4fbe7](https://github.com/Flagsmith/flagsmith/commit/cd4fbe7bbf27b17a7d6bd1161c9f7c2431ae9a2f)) +* Tweak email wording for grace periods ([#4482](https://github.com/Flagsmith/flagsmith/issues/4482)) ([36e634c](https://github.com/Flagsmith/flagsmith/commit/36e634ca057e6aa55ffed41686a05df0883a1062)) + + +### Bug Fixes + +* Add decorator for running task every hour ([#4481](https://github.com/Flagsmith/flagsmith/issues/4481)) ([a395a47](https://github.com/Flagsmith/flagsmith/commit/a395a470924628f6d239fb72f964a282b61a2e6b)) +* Add logic to handle grace period breached for paid accounts ([#4512](https://github.com/Flagsmith/flagsmith/issues/4512)) ([ba8ae60](https://github.com/Flagsmith/flagsmith/commit/ba8ae60d6e3e0b7f5b84501f2c6c47763267e8be)) +* add reverse sql to versioning migration ([#4491](https://github.com/Flagsmith/flagsmith/issues/4491)) ([a6a0f91](https://github.com/Flagsmith/flagsmith/commit/a6a0f918394d0f044994d159d4e440c3a9ebcdef)) +* allow unknown attrs from cb json meta ([#4509](https://github.com/Flagsmith/flagsmith/issues/4509)) ([1e3888a](https://github.com/Flagsmith/flagsmith/commit/1e3888aae3f4429089643c478300b8d94e856caf)) +* Catch API billing errors ([#4514](https://github.com/Flagsmith/flagsmith/issues/4514)) ([33074f3](https://github.com/Flagsmith/flagsmith/commit/33074f349e24ea06cede711e797d941c0bc042c4)) +* **delete-feature-via-role:** bump rbac ([#4508](https://github.com/Flagsmith/flagsmith/issues/4508)) ([174d437](https://github.com/Flagsmith/flagsmith/commit/174d437a4a654e9ea34645d86f515fa65eb85660)) +* Make influx cache task recurring ([#4495](https://github.com/Flagsmith/flagsmith/issues/4495)) ([cb8472d](https://github.com/Flagsmith/flagsmith/commit/cb8472d669f50d2dfc3d9837d6a7049840b08a7a)) +* Remove grace period where necessary from blocked notification ([#4496](https://github.com/Flagsmith/flagsmith/issues/4496)) ([9bae21c](https://github.com/Flagsmith/flagsmith/commit/9bae21cdcba0f4d37c8d4838137f8376e3749215)) +* Rename match variable in external feature resources ([#4490](https://github.com/Flagsmith/flagsmith/issues/4490)) ([bf82b9d](https://github.com/Flagsmith/flagsmith/commit/bf82b9d64976e16cc7d598e6f5cd124cafbe30ca)) +* save feature error handling ([#4058](https://github.com/Flagsmith/flagsmith/issues/4058)) ([2517e9d](https://github.com/Flagsmith/flagsmith/commit/2517e9dfd42a58adb69a52050f4e3fc1663bc127)) +* Solve API GitHub integration issues ([#4502](https://github.com/Flagsmith/flagsmith/issues/4502)) ([19bc58e](https://github.com/Flagsmith/flagsmith/commit/19bc58ed8b1f8e689842b3181b6bf266f1a507aa)) +* subscription info cache race condition ([#4518](https://github.com/Flagsmith/flagsmith/issues/4518)) ([d273679](https://github.com/Flagsmith/flagsmith/commit/d273679b3236c3fceccfeb71c401df5a2d69ad27)) +* **views/features:** use get_environment_flags_list ([#4511](https://github.com/Flagsmith/flagsmith/issues/4511)) ([7034fa4](https://github.com/Flagsmith/flagsmith/commit/7034fa4fbe0f16e0253f11affe68e059fde88a6a)) + ## [2.136.0](https://github.com/Flagsmith/flagsmith/compare/v2.135.1...v2.136.0) (2024-08-13) diff --git a/version.txt b/version.txt index ac8faeddc8a7..2b97695c4831 100644 --- a/version.txt +++ b/version.txt @@ -1 +1 @@ -2.136.0 +2.137.0 From 6744f2a8fcfa19678e9085bfae5a979670cc4d8e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 20 Aug 2024 11:15:07 +0100 Subject: [PATCH 139/247] chore(deps): bump axios from 1.6.2 to 1.7.4 in /frontend (#4520) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- frontend/package-lock.json | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/frontend/package-lock.json b/frontend/package-lock.json index 96585659a6f1..607baafb7b2b 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -5398,11 +5398,11 @@ } }, "node_modules/axios": { - "version": "1.6.2", - "resolved": "https://registry.npmjs.org/axios/-/axios-1.6.2.tgz", - "integrity": "sha512-7i24Ri4pmDRfJTR7LDBhsOTtcm+9kjX5WiY1X3wIisx6G9So3pfMkEiU7emUBe46oceVImccTEM3k6C5dbVW8A==", + "version": "1.7.4", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.7.4.tgz", + "integrity": "sha512-DukmaFRnY6AzAALSH4J2M3k6PkaC+MfaAGdEERRWcC9q3/TWQwLpHR8ZRLKTdQ3aBDL64EdluRDjJqKw+BPZEw==", "dependencies": { - "follow-redirects": "^1.15.0", + "follow-redirects": "^1.15.6", "form-data": "^4.0.0", "proxy-from-env": "^1.1.0" } @@ -23856,11 +23856,11 @@ "dev": true }, "axios": { - "version": "1.6.2", - "resolved": "https://registry.npmjs.org/axios/-/axios-1.6.2.tgz", - "integrity": "sha512-7i24Ri4pmDRfJTR7LDBhsOTtcm+9kjX5WiY1X3wIisx6G9So3pfMkEiU7emUBe46oceVImccTEM3k6C5dbVW8A==", + "version": "1.7.4", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.7.4.tgz", + "integrity": "sha512-DukmaFRnY6AzAALSH4J2M3k6PkaC+MfaAGdEERRWcC9q3/TWQwLpHR8ZRLKTdQ3aBDL64EdluRDjJqKw+BPZEw==", "requires": { - "follow-redirects": "^1.15.0", + "follow-redirects": "^1.15.6", "form-data": "^4.0.0", "proxy-from-env": "^1.1.0" }, From 5cfdababfe043e8767e3147876dd42ce3f79c030 Mon Sep 17 00:00:00 2001 From: Zach Aysan Date: Tue, 20 Aug 2024 06:22:47 -0400 Subject: [PATCH 140/247] fix: Update email wording for paid customers with API usage notifications (#4517) --- .../templates/organisations/api_usage_notification.html | 5 ++--- .../templates/organisations/api_usage_notification.txt | 3 +-- .../organisations/api_usage_notification_limit.html | 5 ++--- .../templates/organisations/api_usage_notification_limit.txt | 2 +- 4 files changed, 6 insertions(+), 9 deletions(-) diff --git a/api/organisations/templates/organisations/api_usage_notification.html b/api/organisations/templates/organisations/api_usage_notification.html index 74deaac7fc7d..bf19da6ae883 100644 --- a/api/organisations/templates/organisations/api_usage_notification.html +++ b/api/organisations/templates/organisations/api_usage_notification.html @@ -14,9 +14,8 @@ current subscription period. {% if organisation.is_paid %} - If this is expected, no action is required. If you are expecting to go over, you can upgrade your - organisation’s account limits by reaching out to support@flagsmith.com. We will automatically charge - for overages after our first grace period of 30 days. + If this is expected, no action is required. If you are expecting to go over, you can upgrade your organisation’s account + limits by reaching out to support@flagsmith.com. We will automatically charge for overages at the end of the pay period. For more information check the Terms of Service. {% else %} Please note that once 100% use has been breached, the serving of feature flags and admin access may be disabled{% if grace_period %} after a 7-day grace period{% endif %}. Please reach out to diff --git a/api/organisations/templates/organisations/api_usage_notification.txt b/api/organisations/templates/organisations/api_usage_notification.txt index e02fe3f967c5..714b65893e82 100644 --- a/api/organisations/templates/organisations/api_usage_notification.txt +++ b/api/organisations/templates/organisations/api_usage_notification.txt @@ -5,8 +5,7 @@ reached {{ matched_threshold }}% of your API usage within the current subscripti {% if organisation.is_paid %} If this is expected, no action is required. If you are expecting to go over, you can upgrade your organisation’s account -limits by reaching out to support@flagsmith.com. We will automatically charge for overages after our first grace period -of 30 days. +limits by reaching out to support@flagsmith.com. We will automatically charge for overages at the end of the pay period. For more information check the Terms of Service at https://www.flagsmith.com/terms-of-service. {% else %} Please note that once 100% use has been breached, the serving of feature flags and admin access may be disabled{% if grace_period %} after a 7-day grace period{% endif %}. Please reach out to support@flagsmith.com in order to upgrade your account. diff --git a/api/organisations/templates/organisations/api_usage_notification_limit.html b/api/organisations/templates/organisations/api_usage_notification_limit.html index fa79e1196f96..328256d94c71 100644 --- a/api/organisations/templates/organisations/api_usage_notification_limit.html +++ b/api/organisations/templates/organisations/api_usage_notification_limit.html @@ -14,9 +14,8 @@ current subscription period. {% if organisation.is_paid %} - We will charge for overages after our first grace period of 30 days. Please see the pricing page for - more information. You can reach out to support@flagsmith.com if you’d like to take advantage of better - contracted rates. + We will charge for overages at the end of the pay period. Please see the pricing page for more information or check the Terms of Service. + You can reach out to support@flagsmith.com if you’d like to take advantage of better contracted rates. {% else %} Please note that the serving of feature flags and admin access will be disabled{% if grace_period %} after a 7 day grace period{% endif %} until the next subscription period. If you’d like to continue service you can upgrade your diff --git a/api/organisations/templates/organisations/api_usage_notification_limit.txt b/api/organisations/templates/organisations/api_usage_notification_limit.txt index 65c8d1eaa48c..e530f217bea4 100644 --- a/api/organisations/templates/organisations/api_usage_notification_limit.txt +++ b/api/organisations/templates/organisations/api_usage_notification_limit.txt @@ -4,7 +4,7 @@ This is a system generated notification related to your Flagsmith API Usage. You has reached {{ matched_threshold }}% of your API usage within the current subscription period. {% if organisation.is_paid %} -We will charge for overages after our first grace period of 30 days. Please see the pricing page for more information. +We will charge for overages at the end of the pay period. Please see the pricing page for more information or check the Terms of Service at https://www.flagsmith.com/terms-of-service. You can reach out to support@flagsmith.com if you’d like to take advantage of better contracted rates. {% else %} Please note that the serving of feature flags and admin access will be disabled{% if grace_period %} after a 7 day From 3f561ee4591349381c63d82c2e67b92fe6cabc40 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rodrigo=20L=C3=B3pez=20Dato?= Date: Tue, 20 Aug 2024 10:30:50 -0300 Subject: [PATCH 141/247] feat: Copy ACS URL for SAML configurations to clipboard. Disable editing SAML configuration names (#4494) Co-authored-by: kyle-ssg --- .../authentication/01-SAML/index.md | 24 ++++---- .../components/SAMLAttributeMappingTable.tsx | 4 +- frontend/web/components/SamlTab.tsx | 30 ++++++---- frontend/web/components/modals/CreateSAML.tsx | 59 +++++++++++++++---- 4 files changed, 83 insertions(+), 34 deletions(-) diff --git a/docs/docs/system-administration/authentication/01-SAML/index.md b/docs/docs/system-administration/authentication/01-SAML/index.md index 5cc0105014ab..334391eb575f 100644 --- a/docs/docs/system-administration/authentication/01-SAML/index.md +++ b/docs/docs/system-administration/authentication/01-SAML/index.md @@ -15,19 +15,22 @@ SAML tab, you'll be able to configure it. In the UI, you will be able to configure the following fields. -**Name:** (**Required**) A short name for the organisation, used as the input when clicking "Single Sign-on" at login -(note this is unique across all tenants and will form part of the URL so should only be alphanumeric + '-,\_'). +**Name:** (**Required**) A short name for the organisation, used as the input when clicking "Single Sign-On" at login. +This name must be unique across all Flagsmith organisations and forms part of the URL that your identity provider will +post SAML messages to during authentication. -**Frontend URL**: (**Required**) This should be the base URL of the Flagsmith dashboard. +**Frontend URL**: (**Required**) This should be the base URL of the Flagsmith dashboard. Users will be redirected here +after authenticating successfully. -**Allow IdP initiated**: This field determines whether logins can be initiated from the IdP. +**Allow IdP-initiated**: If enabled, users will be able to log in directly from your identity provider without needing +to visit the Flagsmith login page. -**IdP metadata xml**: The metadata from the IdP. +**IdP metadata XML**: The metadata from your identity provider. Once you have configured your identity provider, you can download the service provider metadata XML document with the button "Download Service Provider Metadata". -### Assertion Consumer Service URL +### Assertion consumer service URL The assertion consumer service (ACS) URL, also known as single sign-on URL, for this SAML configuration will be at the following path, replacing `flagsmith.example.com` with your Flagsmith API's domain: @@ -66,12 +69,11 @@ Flagsmith also maps user attributes from the following claims in the SAML assert | Flagsmith attribute | IdP claims | | ------------------- | ---------------------------------------------------- | -| `email` | `mail`, `email` or `emailAddress` | -| `first_name` | `gn`, `givenName` or the first part of `displayName` | -| `last_name` | `sn`, `surname` or the second part of `displayName` | +| Email | `mail`, `email` or `emailAddress` | +| First name | `gn`, `givenName` or the first part of `displayName` | +| Last name | `sn`, `surname` or the second part of `displayName` | -You can override these mappings by adding the corresponding IdP attribute names to your SAML configuration from the -Django admin interface. +To add custom attribute mappings, edit your SAML configuration and open the Attribute Mappings tab. ## Permissions for SAML users diff --git a/frontend/web/components/SAMLAttributeMappingTable.tsx b/frontend/web/components/SAMLAttributeMappingTable.tsx index 14151eb89caf..6d13ee477b30 100644 --- a/frontend/web/components/SAMLAttributeMappingTable.tsx +++ b/frontend/web/components/SAMLAttributeMappingTable.tsx @@ -37,11 +37,11 @@ const SAMLAttributeMappingTable: FC = ({ header={ -
    SAML Attribute Name
    +
    SAML attribute name
    - IDP Attribute Name + IdP attribute name
    diff --git a/frontend/web/components/SamlTab.tsx b/frontend/web/components/SamlTab.tsx index a456f77585a1..2d29e228afd8 100644 --- a/frontend/web/components/SamlTab.tsx +++ b/frontend/web/components/SamlTab.tsx @@ -13,10 +13,11 @@ import CreateSAML from './modals/CreateSAML' import Switch from './Switch' import { SAMLConfiguration } from 'common/types/responses' import PlanBasedBanner from './PlanBasedAccess' - + export type SamlTabType = { organisationId: number } + const SamlTab: FC = ({ organisationId }) => { const { data } = useGetSamlConfigurationsQuery({ organisation_id: organisationId, @@ -37,7 +38,7 @@ const SamlTab: FC = ({ organisationId }) => { return ( = ({ organisationId }) => { openCreateSAML('Create SAML configuration', organisationId) }} > - {'Create a SAML Configuration'} + {'Create a SAML configuration'} } /> @@ -62,11 +63,17 @@ const SamlTab: FC = ({ organisationId }) => { } header={ - -
    SAML Name
    + +
    Configuration name
    -
    - Allow IDP Initiated +
    + Allow IdP-initiated +
    +
    + Action
    } @@ -81,16 +88,19 @@ const SamlTab: FC = ({ organisationId }) => { ) }} space - className='list-item clickable cursor-pointer' + className='list-item py-2 py-md-0 clickable cursor-pointer' key={samlConf.name} >
    {samlConf.name}
    -
    +
    -
    +
    +
    +
    + )}
    {isEdit && ( {% endfor %} diff --git a/api/tests/unit/custom_auth/test_unit_custom_auth_views.py b/api/tests/unit/custom_auth/test_unit_custom_auth_views.py new file mode 100644 index 000000000000..09b6d45de632 --- /dev/null +++ b/api/tests/unit/custom_auth/test_unit_custom_auth_views.py @@ -0,0 +1,22 @@ +from django.urls import reverse +from rest_framework import status +from rest_framework.test import APIClient + +from users.models import FFAdminUser + + +def test_get_current_user(staff_user: FFAdminUser, staff_client: APIClient) -> None: + # Given + url = reverse("api-v1:custom_auth:ffadminuser-me") + + # When + response = staff_client.get(url) + + # Then + assert response.status_code == status.HTTP_200_OK + + response_json = response.json() + assert response_json["email"] == staff_user.email + assert response_json["first_name"] == staff_user.first_name + assert response_json["last_name"] == staff_user.last_name + assert response_json["uuid"] == str(staff_user.uuid) diff --git a/api/tests/unit/features/test_unit_features_views.py b/api/tests/unit/features/test_unit_features_views.py index b5b631efe007..092d9b7c08a1 100644 --- a/api/tests/unit/features/test_unit_features_views.py +++ b/api/tests/unit/features/test_unit_features_views.py @@ -140,6 +140,7 @@ def test_remove_owners_only_remove_specified_owners( "first_name": user_3.first_name, "last_name": user_3.last_name, "last_login": None, + "uuid": mock.ANY, } @@ -1565,6 +1566,7 @@ def test_add_owners_adds_owner( "first_name": staff_user.first_name, "last_name": staff_user.last_name, "last_login": None, + "uuid": mock.ANY, } assert json_response["owners"][1] == { "id": admin_user.id, @@ -1572,6 +1574,7 @@ def test_add_owners_adds_owner( "first_name": admin_user.first_name, "last_name": admin_user.last_name, "last_login": None, + "uuid": mock.ANY, } diff --git a/api/users/admin.py b/api/users/admin.py index 85663231e8fd..4ce68597736a 100644 --- a/api/users/admin.py +++ b/api/users/admin.py @@ -44,6 +44,7 @@ class CustomUserAdmin(UserAdmin): "is_staff", "is_active", "date_joined", + "uuid", ) list_filter = ( @@ -57,6 +58,7 @@ class CustomUserAdmin(UserAdmin): "username", "first_name", "last_name", + "uuid", ) inlines = [UserOrganisationInline] diff --git a/api/users/migrations/0037_add_uuid_field_to_user_model.py b/api/users/migrations/0037_add_uuid_field_to_user_model.py new file mode 100644 index 000000000000..f5eb30f549e8 --- /dev/null +++ b/api/users/migrations/0037_add_uuid_field_to_user_model.py @@ -0,0 +1,37 @@ +# Generated by Django 3.2.25 on 2024-08-12 14:21 +from django.apps.registry import Apps +from django.db import migrations, models +import uuid + +from django.db.backends.base.schema import BaseDatabaseSchemaEditor + + +def set_default_uuids(apps: Apps, schema_editor: BaseDatabaseSchemaEditor) -> None: + user_model = apps.get_model("users", "FFAdminUser") + + users = list(user_model.objects.all()) + for user in users: + user.uuid = uuid.uuid4() + + user_model.objects.bulk_update(users, fields=["uuid"]) + + +class Migration(migrations.Migration): + + dependencies = [ + ('users', '0036_create_hubspot_lead'), + ] + + operations = [ + migrations.AddField( + model_name='ffadminuser', + name='uuid', + field=models.UUIDField(default=uuid.uuid4), + ), + migrations.RunPython(set_default_uuids, reverse_code=migrations.RunPython.noop), + migrations.AlterField( + model_name='ffadminuser', + name='uuid', + field=models.UUIDField(default=uuid.uuid4, editable=False, unique=True), + ), + ] diff --git a/api/users/models.py b/api/users/models.py index e2e272c3ba3a..3c458e3b0e94 100644 --- a/api/users/models.py +++ b/api/users/models.py @@ -1,5 +1,6 @@ import logging import typing +import uuid from datetime import timedelta from django.conf import settings @@ -112,6 +113,8 @@ class FFAdminUser(LifecycleModel, AbstractUser): choices=SignUpType.choices, max_length=100, blank=True, null=True ) + uuid = models.UUIDField(default=uuid.uuid4, editable=False, unique=True) + USERNAME_FIELD = "email" REQUIRED_FIELDS = ["first_name", "last_name", "sign_up_type"] diff --git a/api/users/serializers.py b/api/users/serializers.py index d7fef9200201..858ae491a2ba 100644 --- a/api/users/serializers.py +++ b/api/users/serializers.py @@ -44,7 +44,7 @@ class UserFullSerializer(serializers.ModelSerializer): class Meta: model = FFAdminUser - fields = ("id", "email", "first_name", "last_name", "organisations") + fields = ("id", "email", "first_name", "last_name", "organisations", "uuid") class UserLoginSerializer(serializers.ModelSerializer): @@ -57,7 +57,7 @@ class UserListSerializer(serializers.ModelSerializer): role = serializers.SerializerMethodField(read_only=True) join_date = serializers.SerializerMethodField(read_only=True) - default_fields = ("id", "email", "first_name", "last_name", "last_login") + default_fields = ("id", "email", "first_name", "last_name", "last_login", "uuid") organisation_users_fields = ( "role", "date_joined", @@ -134,12 +134,14 @@ class UserPermissionGroupSerializerDetail(UserPermissionGroupSerializer): class CustomCurrentUserSerializer(DjoserUserSerializer): auth_type = serializers.CharField(read_only=True) is_superuser = serializers.BooleanField(read_only=True) + uuid = serializers.UUIDField(read_only=True) class Meta(DjoserUserSerializer.Meta): fields = DjoserUserSerializer.Meta.fields + ( "auth_type", "is_superuser", "date_joined", + "uuid", ) From 910b3ed11f5ffe3767d967228664018578be4ed7 Mon Sep 17 00:00:00 2001 From: Matthew Elwell Date: Wed, 21 Aug 2024 18:01:51 +0100 Subject: [PATCH 147/247] fix: usage and analytics data duplicates the current day (#4529) --- api/app_analytics/influxdb_wrapper.py | 6 +++--- api/organisations/views.py | 4 ++-- .../test_unit_app_analytics_influxdb_wrapper.py | 6 +++--- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/api/app_analytics/influxdb_wrapper.py b/api/app_analytics/influxdb_wrapper.py index 4a61ece6ee17..9ae505e4d2c8 100644 --- a/api/app_analytics/influxdb_wrapper.py +++ b/api/app_analytics/influxdb_wrapper.py @@ -186,7 +186,7 @@ def get_event_list_for_organisation( results = InfluxDBWrapper.influx_query_manager( filters=f'|> filter(fn:(r) => r._measurement == "api_call") \ |> filter(fn: (r) => r["organisation_id"] == "{organisation_id}")', - extra="|> aggregateWindow(every: 24h, fn: sum)", + extra='|> aggregateWindow(every: 24h, fn: sum, timeSrc: "_start")', date_start=date_start, date_stop=date_stop, ) @@ -241,7 +241,7 @@ def get_multiple_event_list_for_organisation( date_start=date_start, date_stop=date_stop, filters=build_filter_string(filters), - extra="|> aggregateWindow(every: 24h, fn: sum)", + extra='|> aggregateWindow(every: 24h, fn: sum, timeSrc: "_start")', ) if not results: return results @@ -319,7 +319,7 @@ def get_multiple_event_list_for_feature( |> filter(fn: (r) => r["_field"] == "request_count") \ |> filter(fn: (r) => r["environment_id"] == "{environment_id}") \ |> filter(fn: (r) => r["feature_id"] == "{feature_name}")', - extra=f'|> aggregateWindow(every: {aggregate_every}, fn: sum, createEmpty: false) \ + extra=f'|> aggregateWindow(every: {aggregate_every}, fn: sum, createEmpty: false, timeSrc: "_start") \ |> yield(name: "sum")', ) if not results: diff --git a/api/organisations/views.py b/api/organisations/views.py index 5fb5ca85a4fd..aeb63359881f 100644 --- a/api/organisations/views.py +++ b/api/organisations/views.py @@ -146,7 +146,7 @@ def remove_users(self, request, pk): @swagger_auto_schema( deprecated=True, - operation_description="Please use ​​/api​/v1​/organisations​/{organisation_pk}​/usage-data​/total-count​/", + operation_description="Please use /api/v1/organisations/{organisation_pk}/usage-data/total-count/", ) @action( detail=True, @@ -221,7 +221,7 @@ def get_hosted_page_url_for_subscription_upgrade(self, request, pk): @swagger_auto_schema( deprecated=True, - operation_description="Please use ​​/api​/v1​/organisations​/{organisation_pk}​/usage-data​/", + operation_description="Please use /api/v1/organisations/{organisation_pk}/usage-data/", query_serializer=InfluxDataQuerySerializer(), ) @action(detail=True, methods=["GET"], url_path="influx-data") diff --git a/api/tests/unit/app_analytics/test_unit_app_analytics_influxdb_wrapper.py b/api/tests/unit/app_analytics/test_unit_app_analytics_influxdb_wrapper.py index d82b6f162978..48d6ea04b534 100644 --- a/api/tests/unit/app_analytics/test_unit_app_analytics_influxdb_wrapper.py +++ b/api/tests/unit/app_analytics/test_unit_app_analytics_influxdb_wrapper.py @@ -131,7 +131,7 @@ def test_influx_db_query_when_get_events_list_then_query_api_called(monkeypatch) f'|> filter(fn: (r) => r["organisation_id"] == "{org_id}") ' f'|> drop(columns: ["organisation", "organisation_id", "type", "project", ' f'"project_id", "environment", "environment_id", "host"])' - f"|> aggregateWindow(every: 24h, fn: sum)" + f'|> aggregateWindow(every: 24h, fn: sum, timeSrc: "_start")' ) mock_influxdb_client = mock.MagicMock() monkeypatch.setattr( @@ -198,7 +198,7 @@ def test_influx_db_query_when_get_multiple_events_for_organisation_then_query_ap f"{build_filter_string(expected_filters)}" '|> drop(columns: ["organisation", "organisation_id", "type", "project", ' '"project_id", "environment", "environment_id", "host"]) ' - "|> aggregateWindow(every: 24h, fn: sum)" + '|> aggregateWindow(every: 24h, fn: sum, timeSrc: "_start")' ) .replace(" ", "") .replace("\n", "") @@ -237,7 +237,7 @@ def test_influx_db_query_when_get_multiple_events_for_feature_then_query_api_cal f'|> filter(fn: (r) => r["feature_id"] == "{feature_name}") ' '|> drop(columns: ["organisation", "organisation_id", "type", "project", ' '"project_id", "environment", "environment_id", "host"])' - "|> aggregateWindow(every: 24h, fn: sum, createEmpty: false) " + '|> aggregateWindow(every: 24h, fn: sum, createEmpty: false, timeSrc: "_start") ' '|> yield(name: "sum")' ) From 968b894f779f72c1a227acd660a3dfb06735a935 Mon Sep 17 00:00:00 2001 From: Kyle Johnson Date: Wed, 21 Aug 2024 18:02:38 +0100 Subject: [PATCH 148/247] feat: usage period filter (#4526) --- frontend/common/types/requests.ts | 16 ++ frontend/web/components/App.js | 20 --- frontend/web/components/OrganisationUsage.tsx | 154 +++++++++++++----- 3 files changed, 131 insertions(+), 59 deletions(-) diff --git a/frontend/common/types/requests.ts b/frontend/common/types/requests.ts index b97b023de8c4..c4e5935b0d21 100644 --- a/frontend/common/types/requests.ts +++ b/frontend/common/types/requests.ts @@ -22,6 +22,22 @@ export type PagedRequest = T & { } export type OAuthType = 'github' | 'saml' | 'google' export type PermissionLevel = 'organisation' | 'project' | 'environment' +export const billingPeriods = [ + { + label: 'Current billing period', + value: 'current_billing_period', + }, + { + label: 'Previous billing period', + value: 'previous_billing_period', + }, + { label: 'Last 90 days', value: '90_day_period' }, + { label: 'Last 30 days', value: undefined }, +] +export const freePeriods = [ + { label: 'Last 90 days', value: '90_day_period' }, + { label: 'Last 30 days', value: undefined }, +] export type CreateVersionFeatureState = { environmentId: number featureId: number diff --git a/frontend/web/components/App.js b/frontend/web/components/App.js index 26e352925e67..39e550569a38 100644 --- a/frontend/web/components/App.js +++ b/frontend/web/components/App.js @@ -16,11 +16,9 @@ import { Provider } from 'react-redux' import { getStore } from 'common/store' import { resolveAuthFlow } from '@datadog/ui-extensions-sdk' import ConfigProvider from 'common/providers/ConfigProvider' -import { getOrganisationUsage } from 'common/services/useOrganisationUsage' import Button from './base/forms/Button' import Icon from './Icon' import AccountStore from 'common/stores/account-store' -import InfoMessage from './InfoMessage' import OrganisationLimit from './OrganisationLimit' import GithubStar from './GithubStar' import Tooltip from './Tooltip' @@ -49,7 +47,6 @@ const App = class extends Component { } state = { - activeOrganisation: 0, asideIsVisible: !isMobile, lastEnvironmentId: '', lastProjectId: '', @@ -102,11 +99,9 @@ const App = class extends Component { } this.listenTo(OrganisationStore, 'change', () => this.forceUpdate()) this.listenTo(ProjectStore, 'change', () => this.forceUpdate()) - this.listenTo(AccountStore, 'change', this.getOrganisationUsage) if (AccountStore.model) { this.onLogin() } - this.getOrganisationUsage() window.addEventListener('scroll', this.handleScroll) const updateLastViewed = () => { AsyncStorage.getItem('lastEnv').then((res) => { @@ -123,21 +118,6 @@ const App = class extends Component { updateLastViewed() } - getOrganisationUsage = () => { - if ( - AccountStore.getOrganisation()?.id && - this.state.activeOrganisation !== AccountStore.getOrganisation().id - ) { - getOrganisationUsage(getStore(), { - organisationId: AccountStore.getOrganisation()?.id, - }).then((res) => { - this.setState({ - activeOrganisation: AccountStore.getOrganisation().id, - }) - }) - } - } - toggleDarkMode = () => { const newValue = !Utils.getFlagsmithHasFeature('dark_mode') flagsmith.setTrait('dark_mode', newValue) diff --git a/frontend/web/components/OrganisationUsage.tsx b/frontend/web/components/OrganisationUsage.tsx index 6f07a65937bd..10f3af853344 100644 --- a/frontend/web/components/OrganisationUsage.tsx +++ b/frontend/web/components/OrganisationUsage.tsx @@ -1,4 +1,4 @@ -import Utils from 'common/utils/utils' +import Utils, { planNames } from 'common/utils/utils' import React, { FC, useState } from 'react' import { Bar, @@ -19,6 +19,10 @@ import { ValueType, } from 'recharts/types/component/DefaultTooltipContent' import InfoMessage from './InfoMessage' +import { IonIcon } from '@ionic/react' +import { checkmarkSharp } from 'ionicons/icons' +import AccountStore from 'common/stores/account-store' +import { billingPeriods, freePeriods, Req } from 'common/types/requests' type OrganisationUsageType = { organisationId: string @@ -26,19 +30,28 @@ type OrganisationUsageType = { type LegendItemType = { title: string value: number + selection: string[] + onChange: (v: string) => void colour?: string } -const LegendItem: FC = ({ colour, title, value }) => { +const LegendItem: FC = ({ + colour, + onChange, + selection, + title, + value, +}) => { if (!value) { return null } return (

    {Utils.numberWithCommas(value)}

    - + onChange(title)}> {!!colour && ( = ({ colour, title, value }) => { height: 16, width: 16, }} - /> + > + {selection.includes(title) && ( + + )} + )} {title} @@ -57,9 +74,14 @@ const LegendItem: FC = ({ colour, title, value }) => { const OrganisationUsage: FC = ({ organisationId }) => { const [project, setProject] = useState() const [environment, setEnvironment] = useState() + const currentPlan = Utils.getPlanName(AccountStore.getActiveOrgPlan()) + const [billingPeriod, setBillingPeriod] = useState< + Req['getOrganisationUsage']['billing_period'] + >(currentPlan === planNames.free ? '90_day_period' : 'current_billing_period') const { data } = useGetOrganisationUsageQuery( { + billing_period: billingPeriod, environmentId: environment, organisationId, projectId: project, @@ -67,10 +89,33 @@ const OrganisationUsage: FC = ({ organisationId }) => { { skip: !organisationId }, ) const colours = ['#0AADDF', '#27AB95', '#FF9F43', '#EF4D56'] + const [selection, setSelection] = useState([ + 'Flags', + 'Identities', + 'Environment Document', + 'Traits', + ]) + const updateSelection = (key) => { + if (selection.includes(key)) { + setSelection(selection.filter((v) => v !== key)) + } else { + setSelection(selection.concat([key])) + } + } return data?.totals ? ( -
    -
    +
    +
    + + setExternalResourceType(v.label)} - options={githubTypes.map((e) => { - return { label: e.label, value: e.id } - })} - /> - - - - {externalResourceType && ( - setFeatureExternalResource(v)} - repoOwner={repoOwner} - repoName={repoName} - githubResource={ - (externalResourceType && - ( - _.find(_.values(Constants.resourceTypes), { - label: externalResourceType!, - }) as any - ).resourceType) || - '' - } - > - setFeatureExternalResource(v)} - lastSavedResource={lastSavedResource} - /> - - )} - -
    - -
    -
    -
    - ) -} -const ExternalResourcesLinkTab: FC = ({ - environmentId, - featureId, - githubId, - organisationId, - projectId, -}) => { - const [repoName, setRepoName] = useState('') - const [repoOwner, setRepoOwner] = useState('') + const [createExternalResource] = useCreateExternalResourceMutation() + const [resourceType, setResourceType] = useState(githubTypes[0].resourceType) const [selectedResources, setSelectedResources] = useState() + const addResource = (featureExternalResource: GithubResource) => { + const type = Object.keys(Constants.resourceTypes).find( + (key: string) => + Constants.resourceTypes[key as keyof typeof Constants.resourceTypes] + .resourceType === resourceType, + ) + createExternalResource({ + body: { + feature: parseInt(featureId), + metadata: { + 'draft': featureExternalResource.draft, + 'merged': featureExternalResource.merged, + 'state': featureExternalResource.state, + 'title': featureExternalResource.title, + }, + type: type, + url: featureExternalResource.html_url, + }, + feature_id: featureId, + project_id: projectId, + }).then(() => { + toast('External Resource Added') + AppActions.refreshFeatures(parseInt(projectId), environmentId) + }) + } return ( <> -
    GitHub Issues and Pull Requests linked
    + v.url!)} + orgId={organisationId} + /> setSelectedResources(r) } /> - - - { - const repoData = v.split('/') - setRepoName(repoData[0]) - setRepoOwner(repoData[1]) - }} - /> - {repoName && repoOwner && ( - - )} - ) } diff --git a/frontend/web/components/ExternalResourcesTable.tsx b/frontend/web/components/ExternalResourcesTable.tsx index e5b9b5ab9cff..9ea9964bc320 100644 --- a/frontend/web/components/ExternalResourcesTable.tsx +++ b/frontend/web/components/ExternalResourcesTable.tsx @@ -14,8 +14,6 @@ export type ExternalResourcesTableBase = { featureId: string projectId: string organisationId: string - repoName: string - repoOwner: string } export type ExternalResourcesTableType = ExternalResourcesTableBase & { @@ -177,7 +175,7 @@ const ExternalResourcesTable: FC = ({ /> )} renderNoResults={ - + You have no external resources linked for this feature. } diff --git a/frontend/web/components/GitHubResourceSelectProvider.tsx b/frontend/web/components/GitHubResourceSelectProvider.tsx deleted file mode 100644 index 9bf3109abb58..000000000000 --- a/frontend/web/components/GitHubResourceSelectProvider.tsx +++ /dev/null @@ -1,106 +0,0 @@ -import { createContext, useContext, FC, useEffect, useState } from 'react' -import { useGetGithubResourcesQuery } from 'common/services/useGithub' -import { ExternalResource, GithubResource, Res } from 'common/types/responses' -import useInfiniteScroll from 'common/useInfiniteScroll' -import { Req } from 'common/types/requests' - -type GitHubResourceSelectProviderType = { - children: React.ReactNode - githubResource: string - lastSavedResource: string | undefined - linkedExternalResources: ExternalResource[] - onChange: (v: string) => void - orgId: string - repoOwner: string - repoName: string -} - -type GitHubResourceSelectContextType = { - count: number - githubResources?: GithubResource[] - isFetching: boolean - isLoading: boolean - loadMore: () => void - loadingCombinedData: boolean - nextPage?: string - searchItems: (search: string) => void - refresh: () => void -} - -const GitHubResourceSelectContext = createContext< - GitHubResourceSelectContextType | undefined ->(undefined) - -export const GitHubResourceSelectProvider: FC< - GitHubResourceSelectProviderType -> = ({ children, ...props }) => { - const [externalResourcesSelect, setExternalResourcesSelect] = - useState() - - const throttleDelay = 300 - - const { - data, - isFetching, - isLoading, - loadMore, - loadingCombinedData, - refresh, - searchItems, - } = useInfiniteScroll( - useGetGithubResourcesQuery, - { - github_resource: props.githubResource, - organisation_id: props.orgId, - page_size: 100, - repo_name: props.repoName, - repo_owner: props.repoOwner, - }, - throttleDelay, - ) - - const { count, next, results } = data || { results: [] } - - useEffect(() => { - if (results && props.linkedExternalResources) { - setExternalResourcesSelect( - results.filter((i: GithubResource) => { - const same = props.linkedExternalResources?.some( - (r) => i.html_url === r.url, - ) - return !same - }), - ) - } - // eslint-disable-next-line react-hooks/exhaustive-deps - }, [data, props.linkedExternalResources]) - - return ( - - {children} - - ) -} - -export const useGitHubResourceSelectProvider = () => { - const context = useContext(GitHubResourceSelectContext) - if (!context) { - throw new Error( - 'useGitHubResourceSelect must be used within a GitHubResourceSelectProvider', - ) - } - return context -} diff --git a/frontend/web/components/GitHubResourcesSelect.tsx b/frontend/web/components/GitHubResourcesSelect.tsx index 8b814ec62fb3..1ad4dc921f63 100644 --- a/frontend/web/components/GitHubResourcesSelect.tsx +++ b/frontend/web/components/GitHubResourcesSelect.tsx @@ -1,195 +1,136 @@ -import React, { FC, useEffect, useRef, useState } from 'react' -import { GithubResource } from 'common/types/responses' +import React, { FC, useEffect, useMemo, useState } from 'react' +import { ExternalResource, GithubResource, Res } from 'common/types/responses' import Utils from 'common/utils/utils' -import { FixedSizeList } from 'react-window' -import InfiniteLoader from 'react-window-infinite-loader' -import { useGitHubResourceSelectProvider } from './GitHubResourceSelectProvider' -import { components } from 'react-select' -import Button from './base/forms/Button' -import Icon from './Icon' -import Select from 'react-select' - -type MenuListType = { - children: React.ReactNode - searchText: (v: string) => void - selectProps: any - width: number - [key: string]: any -} - -const MenuList: FC = ({ - children, - searchText, - selectProps, - width, - ...rest -}) => { - const infiniteLoaderRef = useRef(null) - const hasMountedRef = useRef(false) - const childrenArray = React.Children.toArray(children) - const { - isFetching, - isLoading: parentIsLoading, - loadMore, - loadingCombinedData, - nextPage, - } = useGitHubResourceSelectProvider() - const [isLoading, setIsLoading] = useState(parentIsLoading) - const loadMoreItems = - isFetching || isLoading || !nextPage ? () => {} : loadMore - const isItemLoaded = (index: number) => childrenArray[index] !== undefined - const itemCount = childrenArray.length + (nextPage ? 1 : 0) - const itemSize = 60 - - const moreItems = async ( - startIndex: number, - stopIndex: number, - ): Promise => { - return loadMoreItems() - } - - useEffect(() => { - setIsLoading(parentIsLoading || loadingCombinedData) - }, [parentIsLoading, loadingCombinedData]) - - useEffect(() => { - // Reset cached items when "searchText" changes. - if (hasMountedRef.current) { - if (infiniteLoaderRef.current) { - if (loadingCombinedData) { - ;( - infiniteLoaderRef.current as InfiniteLoader - ).resetloadMoreItemsCache() - } - } - } - hasMountedRef.current = true - }, [searchText, loadingCombinedData]) - - return isLoading ? ( -
    Loading...
    - ) : !itemCount ? ( -
    No results found
    - ) : ( - - {({ onItemsRendered, ref }) => ( - - {({ index, isScrolling, style, ...rest }) => { - const child = childrenArray[index] - return ( -
    - {isItemLoaded(index) ? ( - child - ) : ( -
    - -
    - )} -
    - ) - }} -
    - )} -
    - ) -} - -const CustomControl = ({ - children, - ...props -}: { - children: React.ReactNode -}) => { - const { refresh } = useGitHubResourceSelectProvider() - return ( - - {children} - - - ) -} +import useInfiniteScroll from 'common/useInfiniteScroll' +import { Req } from 'common/types/requests' +import { useGetGithubResourcesQuery } from 'common/services/useGithub' +import Constants from 'common/constants' +import MyRepositoriesSelect from './MyRepositoriesSelect' export type GitHubResourcesSelectType = { onChange: (value: string) => void - lastSavedResource: string | undefined + linkedExternalResources: ExternalResource[] | undefined + orgId: string + githubId: string + resourceType: string + value: string[] | undefined // an array of resource URLs + setResourceType: (value: string) => void } type GitHubResourcesValueType = { value: string } - const GitHubResourcesSelect: FC = ({ - lastSavedResource, + githubId, onChange, + orgId, + resourceType, + setResourceType, + value, }) => { - const { githubResources, isFetching, isLoading, searchItems } = - useGitHubResourceSelectProvider() + const githubTypes = Object.values(Constants.resourceTypes).filter( + (v) => v.type === 'GITHUB', + ) + const [repo, setRepo] = useState('') + const { repoName, repoOwner } = useMemo(() => { + if (!repo) { + return {} + } + const [repoName, repoOwner] = repo.split('/') + return { repoName, repoOwner } + }, [repo]) + + const { data, isFetching, isLoading, searchItems } = useInfiniteScroll< + Req['getGithubResources'], + Res['githubResources'] + >( + useGetGithubResourcesQuery, + { + github_resource: resourceType, + organisation_id: orgId, + page_size: 100, + repo_name: `${repoName}`, + repo_owner: `${repoOwner}`, + }, + 100, + { skip: !resourceType || !orgId || !repoName || !repoOwner }, + ) + const [selectedOption, setSelectedOption] = useState(null) const [searchText, setSearchText] = React.useState('') - useEffect(() => { - if (selectedOption && selectedOption.value === lastSavedResource) { - setSelectedOption(null) - } - }, [lastSavedResource, selectedOption]) - return ( -
    - v.resourceType === resourceType)} + onChange={(v: { resourceType: string }) => + setResourceType(v.resourceType) + } + options={githubTypes.map((e) => { + return { + label: e.label, + resourceType: e.resourceType, + value: e.id, + } + })} + /> +
    +
    + {!!repoName && !!repoOwner && ( +
    + onChange(v?.value)} - options={repositories?.map((i: GithubRepository) => { - return { - label: `${i.repository_name} - ${i.repository_owner}`, - value: `${i.repository_name}/${i.repository_owner}`, - } - })} - /> -
    + - {!v.id || v.is_feature_specific ? ( - - ) : ( - - )} + } else { + window.open( + `${document.location.origin}/project/${this.props.projectId}/segments?id=${v.segment}`, + '_blank', + ) + } + }} + canEdit={permission} + /> , ) } - {!readOnly && ( - - )}
    - -
    +
    +
    +
    + + Enabled +
    + } + > + Controls whether the feature is enabled for users belonging to + this segment. + +
    + { + if (!readOnly) { + this.setState({ changed: true }) + toggle(v) + } + }} + /> +
    {showValue ? ( <> - - { - this.setState({ changed: true }) - setValue( - Utils.getTypedValue(Utils.safeParseEventValue(e)), - ) - } - } - placeholder="Value e.g. 'big' " - /> +
    + + { + this.setState({ changed: true }) + setValue( + Utils.getTypedValue(Utils.safeParseEventValue(e)), + ) + } + } + placeholder="Value e.g. 'big' " + /> +
    ) : ( - <> +
    - - )} - {!!controlValue && - (!multivariateOptions || !multivariateOptions.length) && ( -
    - -
    - )} - - {!!multivariateOptions?.length && ( -
    - - { - const foundMv = - v.multivariate_options && - v.multivariate_options.find( - (v) => v.multivariate_feature_option === mv.id, - ) - if (foundMv) { - return { - ...mv, - default_percentage_allocation: - foundMv.percentage_allocation, - } - } - return { - ...mv, - default_percentage_allocation: 0, - } - })} - setVariations={(i, e, variationOverrides) => { - setVariations(i, e, variationOverrides) - this.setState({ changed: true }) - }} - setValue={(i, e, variationOverrides) => { - setVariations(i, e, variationOverrides) - this.setState({ changed: true }) - }} - updateVariation={(i, e, variationOverrides) => { - setVariations(i, e, variationOverrides) - this.setState({ changed: true }) - }} - weightTitle='Override Weight %' - /> -
    )}
    + {!!multivariateOptions?.length && ( +
    + { + const foundMv = + v.multivariate_options && + v.multivariate_options.find( + (v) => v.multivariate_feature_option === mv.id, + ) + if (foundMv) { + return { + ...mv, + default_percentage_allocation: + foundMv.percentage_allocation, + } + } + return { + ...mv, + default_percentage_allocation: 0, + } + })} + setVariations={(i, e, variationOverrides) => { + setVariations(i, e, variationOverrides) + this.setState({ changed: true }) + }} + setValue={(i, e, variationOverrides) => { + setVariations(i, e, variationOverrides) + this.setState({ changed: true }) + }} + updateVariation={(i, e, variationOverrides) => { + setVariations(i, e, variationOverrides) + this.setState({ changed: true }) + }} + weightTitle='Override Weight %' + /> +
    + )}
    ) } @@ -581,164 +564,160 @@ class TheComponent extends Component { segmentOverrideLimitAlert.percentage >= 100 return (
    - - {({ permission: manageSegments }) => { - return ( -
    - {!this.props.id && - !this.props.disableCreate && - !this.props.showCreateSegment && - !this.props.readOnly && ( - - - this.setState({ selectedSegment }, this.addItem) - } - /> - - )} - {!this.props.showCreateSegment && - !this.props.readOnly && - !this.props.disableCreate && ( -
    - -
    - )} - {this.props.showCreateSegment && !this.state.segmentEditId && ( - )} - {this.props.showCreateSegment && this.state.segmentEditId && ( - { - this.setState({ - segmentEditId: undefined, - }) - this.props.setShowCreateSegment(false) - }} - onCancel={() => { - this.setState({ segmentEditId: undefined }) - this.props.setShowCreateSegment(false) - }} - environmentId={this.props.environmentId} - projectId={this.props.projectId} - /> - )} - {visibleValues && - !!visibleValues.length && - !this.props.showCreateSegment && ( -
    - {!this.props.id && ( -
    - - Segment overrides override the environment defaults, - prioritise them by dragging it to the top of the - list. Segment overrides will only apply when you - identify via the SDK, any identity overrides will - take priority.{' '} - - Check the Docs for more details - - . - - -
    - )} - {value && ( - <> - ({ - ...v, - }))} - setSegmentEditId={this.setSegmentEditId} - onSortEnd={this.onSortEnd} - projectFlag={this.props.projectFlag} - /> -
    - -
    - - )} + {value && ( + <> + { + const tagName = e.target.tagName.toLowerCase() + + // Check if the clicked element is a button, input, or textarea + if ( + tagName === 'input' || + tagName === 'textarea' || + tagName === 'button' + ) { + return true // Cancel sorting for inputs, buttons, etc. + } + + // Cancel if the clicked element has class 'feature-action__list' + if ( + e.target.closest('.feature-action__item') || // Checks for parent elements with the class + e.target.closest('.hljs') + ) { + return true // Cancel sorting if the target or parent has these classes + } + + // Otherwise, allow sorting + return false + }} // Here we pass the function to prevent sorting in certain cases + disabled={this.props.readOnly} + id={this.props.id} + name={this.props.name} + controlValue={this.props.controlValue} + multivariateOptions={multivariateOptions} + confirmRemove={this.confirmRemove} + setVariations={this.setVariations} + toggle={this.toggle} + setValue={this.setValue} + readOnly={this.props.readOnly} + showEditSegment={this.props.showEditSegment} + environmentId={this.props.environmentId} + projectId={this.props.projectId} + setShowCreateSegment={this.props.setShowCreateSegment} + items={value.map((v) => ({ + ...v, + }))} + setSegmentEditId={this.setSegmentEditId} + onSortEnd={this.onSortEnd} + projectFlag={this.props.projectFlag} + /> +
    +
    - )} + + )}
    - ) - }} - + )} +
    ) } diff --git a/frontend/web/components/modals/CreateFlag.js b/frontend/web/components/modals/CreateFlag.js index 046d4aead47f..1c1e0ad8b19e 100644 --- a/frontend/web/components/modals/CreateFlag.js +++ b/frontend/web/components/modals/CreateFlag.js @@ -1016,17 +1016,13 @@ const CreateFlag = class extends Component { const onCreateFeature = saveFeatureWithValidation(() => { this.save(createFlag, isSaving) }) + const isLimitReached = false const featureLimitAlert = Utils.calculateRemainingLimitsPercentage( project.total_features, project.max_features_allowed, ) - const showIdentityOverrides = - !identity && - isEdit && - !existingChangeRequest && - !hideIdentityOverridesTab return (
    - - - Segment Overrides{' '} - -
    + +
    + + Segment Overrides{' '} + + + } + place='top' + > + { + Constants.strings + .SEGMENT_OVERRIDES_DESCRIPTION + } + +
    + - { - Constants.strings - .SEGMENT_OVERRIDES_DESCRIPTION + {({ + permission: + manageSegmentOverrides, + }) => + !this.state.showCreateSegment && + !!manageSegmentOverrides && + !this.props.disableCreate && ( +
    + +
    + ) } -
    + {!this.state.showCreateSegment && !noPermissions && ( + {error} + + ) +} + +export default EditIdentity diff --git a/frontend/web/components/InfoMessage.tsx b/frontend/web/components/InfoMessage.tsx index 5d8c305dc294..0d59637289fd 100644 --- a/frontend/web/components/InfoMessage.tsx +++ b/frontend/web/components/InfoMessage.tsx @@ -3,6 +3,7 @@ import Icon, { IconName } from './Icon' import { chevronForward, close as closeIcon, chevronDown } from 'ionicons/icons' import { IonIcon } from '@ionic/react' import { FC } from 'react' +import Button from 'components/base/forms/Button'; type InfoMessageType = { buttonText?: string @@ -79,14 +80,14 @@ const InfoMessage: FC = ({
    {!isCollapsed && ( - <> -
    {children}
    +
    +
    {children}
    {url && buttonText && ( - + )} - +
    )}
    {isClosable && ( diff --git a/frontend/web/components/PanelSearch.js b/frontend/web/components/PanelSearch.js index d413ae78b8d4..8d5bd17747f1 100644 --- a/frontend/web/components/PanelSearch.js +++ b/frontend/web/components/PanelSearch.js @@ -229,6 +229,7 @@ const PanelSearch = class extends Component { placeholder='Search' search /> + {this.props.filterRowContent} )} diff --git a/frontend/web/components/TryIt.js b/frontend/web/components/TryIt.js index bbf72ba83428..cc7e4fc59cfc 100644 --- a/frontend/web/components/TryIt.js +++ b/frontend/web/components/TryIt.js @@ -103,6 +103,4 @@ const TryIt = class extends Component { } } -TryIt.propTypes = {} - -module.exports = ConfigProvider(TryIt) +export default ConfigProvider(TryIt) diff --git a/frontend/web/components/base/forms/Button.tsx b/frontend/web/components/base/forms/Button.tsx index b6d501566370..194268535c0f 100644 --- a/frontend/web/components/base/forms/Button.tsx +++ b/frontend/web/components/base/forms/Button.tsx @@ -33,6 +33,7 @@ export type ButtonType = ButtonHTMLAttributes & { target?: HTMLAttributeAnchorTarget theme?: keyof typeof themeClassNames size?: keyof typeof sizeClassNames + iconSize?: number } export const Button: FC = ({ @@ -44,6 +45,7 @@ export const Button: FC = ({ iconLeftColour, iconRight, iconRightColour, + iconSize = 24, onMouseUp, size = 'default', target, @@ -65,6 +67,7 @@ export const Button: FC = ({ fill={iconLeftColour ? Constants.colours[iconLeftColour] : undefined} className='me-2' name={iconLeft} + width={iconSize} /> )} {children} @@ -75,6 +78,7 @@ export const Button: FC = ({ } className='ml-2' name={iconRight} + width={iconSize} /> )} @@ -95,6 +99,7 @@ export const Button: FC = ({ fill={iconLeftColour ? Constants.colours[iconLeftColour] : undefined} className='mr-2' name={iconLeft} + width={iconSize} /> )} {children} @@ -105,6 +110,7 @@ export const Button: FC = ({ } className='ml-2' name={iconRight} + width={iconSize} /> )} diff --git a/frontend/web/components/modals/CreateFlag.js b/frontend/web/components/modals/CreateFlag.js index 38c3b958dc22..6d968ea6b7b6 100644 --- a/frontend/web/components/modals/CreateFlag.js +++ b/frontend/web/components/modals/CreateFlag.js @@ -5,6 +5,8 @@ import data from 'common/data/base/_data' import ProjectStore from 'common/stores/project-store' import ConfigProvider from 'common/providers/ConfigProvider' import FeatureListStore from 'common/stores/feature-list-store' +import IdentityProvider from 'common/providers/IdentityProvider' + import { Bar, BarChart, diff --git a/frontend/web/components/modals/CreateTrait.js b/frontend/web/components/modals/CreateTrait.js index c7f4234d8971..19a542081d02 100644 --- a/frontend/web/components/modals/CreateTrait.js +++ b/frontend/web/components/modals/CreateTrait.js @@ -4,6 +4,7 @@ import Constants from 'common/constants' import Format from 'common/utils/format' import ErrorMessage from 'components/ErrorMessage' import ModalHR from './ModalHR' +import IdentityProvider from 'common/providers/IdentityProvider' const CreateTrait = class extends Component { static displayName = 'CreateTrait' @@ -171,4 +172,4 @@ const CreateTrait = class extends Component { CreateTrait.propTypes = {} -module.exports = CreateTrait +export default CreateTrait diff --git a/frontend/web/components/pages/SegmentsPage.tsx b/frontend/web/components/pages/SegmentsPage.tsx index 3e05f20897d6..89ece6ddbcf4 100644 --- a/frontend/web/components/pages/SegmentsPage.tsx +++ b/frontend/web/components/pages/SegmentsPage.tsx @@ -27,7 +27,7 @@ import classNames from 'classnames' import InfoMessage from 'components/InfoMessage' import { withRouter } from 'react-router-dom' -const CodeHelp = require('../../components/CodeHelp') +import CodeHelp from 'components/CodeHelp' type SegmentsPageType = { router: RouterChildContext['router'] match: { diff --git a/frontend/web/components/pages/UserPage.js b/frontend/web/components/pages/UserPage.js deleted file mode 100644 index c70853f7cf9b..000000000000 --- a/frontend/web/components/pages/UserPage.js +++ /dev/null @@ -1,1318 +0,0 @@ -import React, { Component } from 'react' -import ConfirmToggleFeature from 'components/modals/ConfirmToggleFeature' -import CreateFlagModal from 'components/modals/CreateFlag' -import CreateTraitModal from 'components/modals/CreateTrait' -import TryIt from 'components/TryIt' -import CreateSegmentModal from 'components/modals/CreateSegment' -import FeatureListStore from 'common/stores/feature-list-store' -import { getTags } from 'common/services/useTag' -import { getStore } from 'common/store' -import TagValues from 'components/tags/TagValues' -import _data from 'common/data/base/_data' -import JSONReference from 'components/JSONReference' -import Constants from 'common/constants' -import IdentitySegmentsProvider from 'common/providers/IdentitySegmentsProvider' -import ConfigProvider from 'common/providers/ConfigProvider' -import Permission from 'common/providers/Permission' -import Icon from 'components/Icon' -import FeatureValue from 'components/FeatureValue' -import PageTitle from 'components/PageTitle' -import TableTagFilter from 'components/tables/TableTagFilter' -import TableSearchFilter from 'components/tables/TableSearchFilter' -import TableFilterOptions from 'components/tables/TableFilterOptions' -import TableSortFilter from 'components/tables/TableSortFilter' -import { getViewMode, setViewMode } from 'common/useViewMode' -import classNames from 'classnames' -import IdentifierString from 'components/IdentifierString' -import Button from 'components/base/forms/Button' -import { removeUserOverride } from 'components/RemoveUserOverride' -import TableOwnerFilter from 'components/tables/TableOwnerFilter' -import TableGroupsFilter from 'components/tables/TableGroupsFilter' -import TableValueFilter from 'components/tables/TableValueFilter' -import Format from 'common/utils/format' -import InfoMessage from 'components/InfoMessage' -const width = [200, 48, 78] -const valuesEqual = (actualValue, flagValue) => { - const nullFalseyA = - actualValue == null || - actualValue === '' || - typeof actualValue === 'undefined' - const nullFalseyB = - flagValue == null || flagValue === '' || typeof flagValue === 'undefined' - if (nullFalseyA && nullFalseyB) { - return true - } - return actualValue === flagValue -} - -const UserPage = class extends Component { - static displayName = 'UserPage' - - constructor(props, context) { - super(props, context) - - const params = Utils.fromParam() - this.state = { - group_owners: - typeof params.group_owners === 'string' - ? params.group_owners.split(',').map((v) => parseInt(v)) - : [], - is_enabled: - params.is_enabled === 'true' - ? true - : params.is_enabled === 'false' - ? false - : null, - loadedOnce: false, - owners: - typeof params.owners === 'string' - ? params.owners.split(',').map((v) => parseInt(v)) - : [], - page: params.page ? parseInt(params.page) - 1 : 1, - preselect: Utils.fromParam().flag, - search: params.search || null, - showArchived: params.is_archived === 'true', - sort: { - label: Format.camelCase(params.sortBy || 'Name'), - sortBy: params.sortBy || 'name', - sortOrder: params.sortOrder || 'asc', - }, - tag_strategy: params.tag_strategy || 'INTERSECTION', - tags: - typeof params.tags === 'string' - ? params.tags.split(',').map((v) => parseInt(v)) - : [], - value_search: - typeof params.value_search === 'string' ? params.value_search : '', - } - } - - getFilter = () => ({ - group_owners: this.state.group_owners?.length - ? this.state.group_owners - : undefined, - is_archived: this.state.showArchived, - is_enabled: - this.state.is_enabled === null ? undefined : this.state.is_enabled, - owners: this.state.owners?.length ? this.state.owners : undefined, - tag_strategy: this.state.tag_strategy, - tags: - !this.state.tags || !this.state.tags.length - ? undefined - : this.state.tags.join(','), - value_search: this.state.value_search ? this.state.value_search : undefined, - }) - - componentDidMount() { - const { - match: { params }, - } = this.props - - AppActions.getIdentity( - this.props.match.params.environmentId, - this.props.match.params.id, - ) - AppActions.getIdentitySegments( - this.props.match.params.projectId, - this.props.match.params.id, - ) - AppActions.getFeatures( - this.props.match.params.projectId, - this.props.match.params.environmentId, - true, - this.state.search, - this.state.sort, - 0, - this.getFilter(), - ) - getTags(getStore(), { - projectId: `${params.projectId}`, - }) - this.getActualFlags() - API.trackPage(Constants.pages.USER) - } - - onSave = () => { - this.getActualFlags() - } - - editSegment = (segment) => { - API.trackEvent(Constants.events.VIEW_SEGMENT) - openModal( - `Segment - ${segment.name}`, - , - 'side-modal create-segment-modal', - ) - } - - getActualFlags = () => { - const { environmentId, id } = this.props.match.params - const url = `${ - Project.api - }environments/${environmentId}/${Utils.getIdentitiesEndpoint()}/${id}/${Utils.getFeatureStatesEndpoint()}/all/` - _data - .get(url) - .then((res) => { - this.setState({ actualFlags: _.keyBy(res, (v) => v.feature.name) }) - }) - .catch(() => {}) - } - - onTraitSaved = () => { - AppActions.getIdentitySegments( - this.props.match.params.projectId, - this.props.match.params.id, - ) - } - - confirmToggle = (projectFlag, environmentFlag, cb) => { - openModal( - 'Toggle Feature', - , - 'p-0', - ) - } - editFeature = ( - projectFlag, - environmentFlag, - identityFlag, - multivariate_feature_state_values, - ) => { - history.replaceState( - {}, - null, - `${document.location.pathname}?flag=${projectFlag.name}`, - ) - API.trackEvent(Constants.events.VIEW_USER_FEATURE) - openModal( - - - Edit User Feature:{' '} - {projectFlag.name} - - - , - , - 'side-modal create-feature-modal overflow-y-auto', - () => { - history.replaceState({}, null, `${document.location.pathname}`) - }, - ) - } - - createTrait = () => { - API.trackEvent(Constants.events.VIEW_USER_FEATURE) - openModal( - 'Create User Trait', - , - 'p-0', - ) - } - - editTrait = (trait) => { - API.trackEvent(Constants.events.VIEW_USER_FEATURE) - openModal( - 'Edit User Trait', - , - 'p-0', - ) - } - - removeTrait = (id, trait_key) => { - openConfirm({ - body: ( -
    - {'Are you sure you want to delete trait '} - {trait_key} - { - ' from this user? Traits can be re-added here or via one of our SDKs.' - } -
    - ), - destructive: true, - onYes: () => - AppActions.deleteIdentityTrait( - this.props.match.params.environmentId, - this.props.match.params.id, - id || trait_key, - ), - title: 'Delete Trait', - yesText: 'Confirm', - }) - } - getURLParams = () => ({ - ...this.getFilter(), - group_owners: (this.state.group_owners || [])?.join(',') || undefined, - owners: (this.state.owners || [])?.join(',') || undefined, - page: this.state.page || 1, - search: this.state.search || '', - sortBy: this.state.sort.sortBy, - sortOrder: this.state.sort.sortOrder, - tags: (this.state.tags || [])?.join(',') || undefined, - }) - - filter = () => { - const currentParams = Utils.fromParam() - if (!currentParams.flag) { - // don't replace page if we are currently viewing a feature - this.props.router.history.replace( - `${document.location.pathname}?${Utils.toParam(this.getURLParams())}`, - ) - } - AppActions.searchFeatures( - this.props.match.params.projectId, - this.props.match.params.environmentId, - true, - this.state.search, - this.state.sort, - this.getFilter(), - ) - } - - render() { - const { actualFlags } = this.state - const { environmentId, projectId } = this.props.match.params - const preventAddTrait = !AccountStore.getOrganisation().persist_trait_data - return ( -
    - - {({ permission: manageUserPermission }) => ( - - {({ permission }) => ( -
    - - {( - { - environmentFlags, - identity, - identityFlags, - isLoading, - projectFlags, - traits, - }, - { removeFlag, toggleFlag }, - ) => - isLoading && - !this.state.tags.length && - !this.state.tags.length && - !this.state.showArchived && - typeof this.state.search !== 'string' && - (!identityFlags || !actualFlags || !projectFlags) ? ( -
    - -
    - ) : ( - <> - - } - > - View and manage feature states and traits for this - user. -
    -
    -
    -
    - - - - Features -
    - - Overriding features here will take - priority over any segment override. - Any features that are not overridden - for this user will fallback to any - segment overrides or the environment - defaults. - -
    -
    - } - renderFooter={() => ( - <> - - - - - )} - header={ - -
    - { - FeatureListStore.isLoading = true - this.setState( - { - search: - Utils.safeParseEventValue( - e, - ), - }, - this.filter, - ) - }} - value={this.state.search} - /> - - { - this.setState( - { - tag_strategy, - }, - this.filter, - ) - }} - isLoading={ - FeatureListStore.isLoading - } - onToggleArchived={(value) => { - if ( - value !== - this.state.showArchived - ) { - FeatureListStore.isLoading = true - this.setState( - { - showArchived: - !this.state - .showArchived, - }, - this.filter, - ) - } - }} - showArchived={ - this.state.showArchived - } - onClearAll={() => { - FeatureListStore.isLoading = true - this.setState( - { - showArchived: false, - tags: [], - }, - this.filter, - ) - }} - onChange={(tags) => { - FeatureListStore.isLoading = true - if ( - tags.includes('') && - tags.length > 1 - ) { - if ( - !this.state.tags.includes( - '', - ) - ) { - this.setState( - { tags: [''] }, - this.filter, - ) - } else { - this.setState( - { - tags: tags.filter( - (v) => !!v, - ), - }, - this.filter, - ) - } - } else { - this.setState( - { tags }, - this.filter, - ) - } - AsyncStorage.setItem( - `${projectId}tags`, - JSON.stringify(tags), - ) - }} - /> - { - this.setState( - { - is_enabled: enabled, - value_search: valueSearch, - }, - this.filter, - ) - }} - /> - { - FeatureListStore.isLoading = true - this.setState( - { - owners: owners, - }, - this.filter, - ) - }} - /> - { - FeatureListStore.isLoading = true - this.setState( - { - group_owners: group_owners, - }, - this.filter, - ) - }} - /> - - { - FeatureListStore.isLoading = true - this.setState( - { sort }, - this.filter, - ) - }} - /> - -
    -
    - } - isLoading={FeatureListStore.isLoading} - items={projectFlags} - renderRow={( - { description, id, name }, - i, - ) => { - const identityFlag = - identityFlags[id] || {} - const environmentFlag = - (environmentFlags && - environmentFlags[id]) || - {} - const hasUserOverride = - identityFlag.identity || - identityFlag.identity_uuid - const flagEnabled = hasUserOverride - ? identityFlag.enabled - : environmentFlag.enabled // show default value s - const flagValue = hasUserOverride - ? identityFlag.feature_state_value - : environmentFlag.feature_state_value - - const actualEnabled = - (actualFlags && - !!actualFlags && - actualFlags[name] && - actualFlags[name].enabled) || - false - const actualValue = - !!actualFlags && - actualFlags[name] && - actualFlags[name].feature_state_value - const flagEnabledDifferent = - hasUserOverride - ? false - : actualEnabled !== flagEnabled - const flagValueDifferent = hasUserOverride - ? false - : !valuesEqual(actualValue, flagValue) - const projectFlag = - projectFlags && - projectFlags.find( - (p) => - p.id === - (environmentFlag && - environmentFlag.feature), - ) - const isMultiVariateOverride = - flagValueDifferent && - projectFlag && - projectFlag.multivariate_options && - projectFlag.multivariate_options.find( - (v) => { - const value = - Utils.featureStateToValue(v) - return value === actualValue - }, - ) - const flagDifferent = - flagEnabledDifferent || - flagValueDifferent - const onClick = () => { - if (permission) { - this.editFeature( - _.find(projectFlags, { id }), - environmentFlags && - environmentFlags[id], - (identityFlags && - identityFlags[id]) || - actualFlags[name], - identityFlags && - identityFlags[id] && - identityFlags[id] - .multivariate_feature_state_values, - ) - } - } - const isCompact = - getViewMode() === 'compact' - if ( - name === this.state.preselect && - actualFlags - ) { - this.state.preselect = null - onClick() - } - return ( -
    - - - - - - - {description ? ( - {name} - } - > - {description} - - ) : ( - name - )} - - - - - - - {hasUserOverride ? ( -
    - Overriding defaults -
    - ) : flagEnabledDifferent ? ( -
    - - - {isMultiVariateOverride ? ( - - This flag is being - overridden by a - variation defined on - your feature, the - control value is{' '} - - {flagEnabled - ? 'on' - : 'off'} - {' '} - for this user - - ) : ( - - This flag is being - overridden by - segments and would - normally be{' '} - - {flagEnabled - ? 'on' - : 'off'} - {' '} - for this user - - )} - - -
    - ) : flagValueDifferent ? ( - isMultiVariateOverride ? ( -
    - - This feature is being - overriden by a % - variation in the - environment, the control - value of this feature is{' '} - - -
    - ) : ( -
    - - This feature is being - overriden by segments - and would normally be{' '} - {' '} - for this user - -
    - ) - ) : ( - getViewMode() === - 'default' && ( -
    - Using environment defaults -
    - ) - )} -
    -
    -
    -
    - -
    -
    { - e.stopPropagation() - }} - > - {Utils.renderWithPermission( - permission, - Constants.environmentPermissions( - Utils.getManageFeaturePermissionDescription( - false, - true, - ), - ), - - this.confirmToggle( - _.find(projectFlags, { - id, - }), - actualFlags[name], - () => { - toggleFlag({ - environmentFlag: - actualFlags[name], - environmentId: - this.props.match - .params - .environmentId, - identity: - this.props.match - .params.id, - identityFlag, - projectFlag: { id }, - }) - }, - ) - } - />, - )} -
    -
    { - e.stopPropagation() - }} - > - {hasUserOverride && ( - <> - {Utils.renderWithPermission( - permission, - Constants.environmentPermissions( - Utils.getManageFeaturePermissionDescription( - false, - true, - ), - ), - , - )} - - )} -
    -
    - ) - }} - renderSearchWithNoResults - paging={FeatureListStore.paging} - search={this.state.search} - nextPage={() => - AppActions.getFeatures( - this.props.match.params.projectId, - this.props.match.params.environmentId, - true, - this.state.search, - this.state.sort, - FeatureListStore.paging.next, - this.getFilter(), - ) - } - prevPage={() => - AppActions.getFeatures( - this.props.match.params.projectId, - this.props.match.params.environmentId, - true, - this.state.search, - this.state.sort, - FeatureListStore.paging.previous, - this.getFilter(), - ) - } - goToPage={(page) => - AppActions.getFeatures( - this.props.match.params.projectId, - this.props.match.params.environmentId, - true, - this.state.search, - this.state.sort, - page, - this.getFilter(), - ) - } - /> - - {!preventAddTrait && ( - - - {Utils.renderWithPermission( - manageUserPermission, - Constants.environmentPermissions( - Utils.getManageUserPermissionDescription(), - ), - , - )} -
    - } - header={ - - - Trait - - - Value - -
    - Remove -
    -
    - } - renderRow={( - { id, trait_key, trait_value }, - i, - ) => ( - - this.editTrait({ - id, - trait_key, - trait_value, - }) - } - > - -
    - {trait_key} -
    -
    - - - -
    e.stopPropagation()} - > - {Utils.renderWithPermission( - manageUserPermission, - Constants.environmentPermissions( - Utils.getManageUserPermissionDescription(), - ), - , - )} -
    -
    - )} - renderNoResults={ - - {Utils.renderWithPermission( - manageUserPermission, - Constants.environmentPermissions( - Utils.getManageUserPermissionDescription(), - ), - , - )} -
    - } - > -
    - - This user has no traits. - -
    - - } - filterRow={({ trait_key }, search) => - trait_key - .toLowerCase() - .indexOf(search) > -1 - } - /> - - )} - - {({ segments }) => - !segments ? ( -
    - -
    - ) : ( - - - - Name - - - Description - - - } - items={segments || []} - renderRow={( - { created_date, description, name }, - i, - ) => ( - - this.editSegment(segments[i]) - } - className='list-item clickable' - space - key={i} - > - -
    - this.editSegment( - segments[i], - ) - } - > - - {name} - -
    -
    - Created{' '} - {moment(created_date).format( - 'DD/MMM/YYYY', - )} -
    -
    - - {description ? ( -
    - {description} -
    -
    - ) : ( - '' - )} -
    -
    - )} - renderNoResults={ - -
    - - This user is not a member of - any segments. - -
    -
    - } - filterRow={({ name }, search) => - name.toLowerCase().indexOf(search) > - -1 - } - /> -
    - ) - } -
    - -
    -
    - - - - - - -
    -
    - - ) - } - -
    - )} - - )} - -
    - ) - } -} - -UserPage.propTypes = {} - -module.exports = ConfigProvider(UserPage) diff --git a/frontend/web/components/pages/UserPage.tsx b/frontend/web/components/pages/UserPage.tsx new file mode 100644 index 000000000000..f7bbbc300434 --- /dev/null +++ b/frontend/web/components/pages/UserPage.tsx @@ -0,0 +1,1203 @@ +import React, { FC, useCallback, useEffect, useState } from 'react' +import { RouterChildContext } from 'react-router' +import keyBy from 'lodash/keyBy' + +import { getStore } from 'common/store' +import { getTags } from 'common/services/useTag' +import { getViewMode, setViewMode } from 'common/useViewMode' +import { removeUserOverride } from 'components/RemoveUserOverride' +import { + FeatureState, + IdentityFeatureState, + ProjectFlag, +} from 'common/types/responses' +import API from 'project/api' +import AccountStore from 'common/stores/account-store' +import AppActions from 'common/dispatcher/app-actions' +import Button from 'components/base/forms/Button' +import CodeHelp from 'components/CodeHelp' +import ConfigProvider from 'common/providers/ConfigProvider' +import ConfirmToggleFeature from 'components/modals/ConfirmToggleFeature' +import Constants from 'common/constants' +import CreateFlagModal from 'components/modals/CreateFlag' +import CreateSegmentModal from 'components/modals/CreateSegment' +import CreateTraitModal from 'components/modals/CreateTrait' +import EditIdentity from 'components/EditIdentity' +import FeatureListStore from 'common/stores/feature-list-store' +import FeatureValue from 'components/FeatureValue' +import Format from 'common/utils/format' +import Icon from 'components/Icon' +import IdentifierString from 'components/IdentifierString' +import IdentityProvider from 'common/providers/IdentityProvider' +import IdentitySegmentsProvider from 'common/providers/IdentitySegmentsProvider' +import InfoMessage from 'components/InfoMessage' +import JSONReference from 'components/JSONReference' +import PageTitle from 'components/PageTitle' +import Panel from 'components/base/grid/Panel' +import PanelSearch from 'components/PanelSearch' +import Permission from 'common/providers/Permission' +import Project from 'common/project' +import Switch from 'components/Switch' +import TableFilterOptions from 'components/tables/TableFilterOptions' +import TableGroupsFilter from 'components/tables/TableGroupsFilter' +import TableOwnerFilter from 'components/tables/TableOwnerFilter' +import TableSearchFilter from 'components/tables/TableSearchFilter' +import TableSortFilter from 'components/tables/TableSortFilter' +import TableTagFilter from 'components/tables/TableTagFilter' +import TableValueFilter from 'components/tables/TableValueFilter' +import TagValues from 'components/tags/TagValues' +import TryIt from 'components/TryIt' +import Utils from 'common/utils/utils' +import _data from 'common/data/base/_data' +import classNames from 'classnames' +import moment from 'moment' + +const width = [200, 48, 78] + +const valuesEqual = (actualValue: any, flagValue: any) => { + const nullFalseyA = + actualValue == null || + actualValue === '' || + typeof actualValue === 'undefined' + const nullFalseyB = + flagValue == null || flagValue === '' || typeof flagValue === 'undefined' + return nullFalseyA && nullFalseyB ? true : actualValue === flagValue +} +type UserPageType = { + router: RouterChildContext['router'] + match: { + params: { + environmentId: string + projectId: string + id: string + identity: string + } + } +} +const UserPage: FC = (props) => { + const params = Utils.fromParam() + const { router } = props + const { environmentId, id, identity, projectId } = props.match.params + + // Separate state hooks + const [groupOwners, setGroupOwners] = useState( + typeof params.group_owners === 'string' + ? params.group_owners.split(',').map((v: string) => parseInt(v)) + : [], + ) + const [isEnabled, setIsEnabled] = useState( + params.is_enabled === 'true' + ? true + : params.is_enabled === 'false' + ? false + : null, + ) + const [owners, setOwners] = useState( + typeof params.owners === 'string' + ? params.owners.split(',').map((v: string) => parseInt(v)) + : [], + ) + const [preselect, setPreselect] = useState(Utils.fromParam().flag) + const [search, setSearch] = useState(params.search || null) + const [showArchived, setShowArchived] = useState( + params.is_archived === 'true', + ) + const [sort, setSort] = useState({ + label: Format.camelCase(params.sortBy || 'Name'), + sortBy: params.sortBy || 'name', + sortOrder: params.sortOrder || 'asc', + }) + const [tagStrategy, setTagStrategy] = useState( + params.tag_strategy || 'INTERSECTION', + ) + const [tags, setTags] = useState( + typeof params.tags === 'string' + ? params.tags.split(',').map((v: string) => parseInt(v)) + : [], + ) + const [valueSearch, setValueSearch] = useState(params.value_search || '') + const [actualFlags, setActualFlags] = + useState>() + + const getFilter = useCallback( + () => ({ + group_owners: groupOwners.length ? groupOwners : undefined, + is_archived: showArchived, + is_enabled: isEnabled === null ? undefined : isEnabled, + owners: owners.length ? owners : undefined, + tag_strategy: tagStrategy, + tags: tags.length ? tags.join(',') : undefined, + value_search: valueSearch ? valueSearch : undefined, + }), + [ + groupOwners, + showArchived, + isEnabled, + owners, + tagStrategy, + tags, + valueSearch, + ], + ) + useEffect(() => { + AppActions.searchFeatures( + projectId, + environmentId, + true, + search, + sort, + getFilter(), + ) + }, [search, sort, getFilter, environmentId, projectId]) + + useEffect(() => { + AppActions.getIdentity(environmentId, id) + AppActions.getIdentitySegments(projectId, id) + getTags(getStore(), { projectId: `${projectId}` }) + getActualFlags() + API.trackPage(Constants.pages.USER) + // eslint-disable-next-line + }, []) + + const getActualFlags = () => { + const url = `${ + Project.api + }environments/${environmentId}/${Utils.getIdentitiesEndpoint()}/${id}/${Utils.getFeatureStatesEndpoint()}/all/` + _data.get(url).then((res: IdentityFeatureState[]) => { + setActualFlags(keyBy(res, (v: IdentityFeatureState) => v.feature.name)) + }) + } + + const onSave = () => { + getActualFlags() + } + + const editSegment = (segment: any) => { + API.trackEvent(Constants.events.VIEW_SEGMENT) + openModal( + `Segment - ${segment.name}`, + , + 'side-modal create-segment-modal', + ) + } + + const confirmToggle = (projectFlag: any, environmentFlag: any, cb: any) => { + openModal( + 'Toggle Feature', + , + 'p-0', + ) + } + const editFeature = ( + projectFlag: ProjectFlag, + environmentFlag: FeatureState, + identityFlag: IdentityFeatureState, + multivariate_feature_state_values: IdentityFeatureState['multivariate_feature_state_values'], + ) => { + history.replaceState( + {}, + '', + `${document.location.pathname}?flag=${projectFlag.name}`, + ) + API.trackEvent(Constants.events.VIEW_USER_FEATURE) + openModal( + + + Edit User Feature:{' '} + {projectFlag.name} + + + , + , + 'side-modal create-feature-modal overflow-y-auto', + () => { + history.replaceState({}, '', `${document.location.pathname}`) + }, + ) + } + + const createTrait = () => { + API.trackEvent(Constants.events.VIEW_USER_FEATURE) + openModal( + 'Create User Trait', + , + 'p-0', + ) + } + + const filter = () => { + const currentParams = Utils.fromParam() + if (!currentParams.flag) { + props.router.history.replace( + `${document.location.pathname}?${Utils.toParam(getFilter())}`, + ) + } + AppActions.searchFeatures( + projectId, + environmentId, + true, + search, + sort, + getFilter(), + ) + } + const onTraitSaved = () => { + AppActions.getIdentitySegments(projectId, id) + } + + const editTrait = (trait: { + id: string + trait_key: string + trait_value: string + }) => { + openModal( + 'Edit User Trait', + , + 'p-0', + ) + } + + const removeTrait = (id: string, trait_key: string) => { + openConfirm({ + body: ( +
    + {'Are you sure you want to delete trait '} + {trait_key} + { + ' from this user? Traits can be re-added here or via one of our SDKs.' + } +
    + ), + destructive: true, + onYes: () => + AppActions.deleteIdentityTrait(environmentId, id, id || trait_key), + title: 'Delete Trait', + yesText: 'Confirm', + }) + } + + const preventAddTrait = !AccountStore.getOrganisation().persist_trait_data + const isEdge = Utils.getIsEdge() + const showAliases = isEdge && Utils.getFlagsmithHasFeature('identity_aliases') + + return ( +
    + + {({ permission: manageUserPermission }) => ( + + {({ permission }) => ( +
    + + {( + { + environmentFlags, + identity, + identityFlags, + isLoading, + projectFlags, + traits, + }: any, + { toggleFlag }: any, + ) => + isLoading && + !tags.length && + !showArchived && + typeof search !== 'string' && + (!identityFlags || !actualFlags || !projectFlags) ? ( +
    + +
    + ) : ( + <> + + } + > + {showAliases && ( + <> +
    + + Alias:{' '} + + } + > + Aliases allow you to add searchable names to + an identity + + +
    + + )} + View and manage feature states and traits for this + user. +
    +
    +
    +
    + + + + Features +
    + + Overriding features here will take + priority over any segment override. + Any features that are not overridden + for this user will fallback to any + segment overrides or the environment + defaults. + +
    +
    + } + renderFooter={() => ( + <> + + + + + )} + header={ + +
    + { + FeatureListStore.isLoading = true + setSearch( + Utils.safeParseEventValue(e), + ) + }} + value={search} + /> + + { + setTagStrategy(strategy) + }} + isLoading={ + FeatureListStore.isLoading + } + onToggleArchived={(value) => { + if (value !== showArchived) { + FeatureListStore.isLoading = + true + setShowArchived(!showArchived) + } + }} + showArchived={showArchived} + onChange={(newTags) => { + FeatureListStore.isLoading = true + setTags( + newTags.includes('') && + newTags.length > 1 + ? [''] + : newTags, + ) + }} + /> + { + setIsEnabled(enabled) + setValueSearch(valueSearch) + }} + /> + { + FeatureListStore.isLoading = true + setOwners(newOwners) + }} + /> + { + FeatureListStore.isLoading = true + setGroupOwners(newGroupOwners) + }} + /> + + { + FeatureListStore.isLoading = true + setSort(newSort) + }} + /> + +
    +
    + } + isLoading={FeatureListStore.isLoading} + items={projectFlags} + renderRow={( + { description, id: featureId, name }: any, + i: number, + ) => { + const identityFlag = + identityFlags[featureId] || {} + const environmentFlag = + (environmentFlags && + environmentFlags[featureId]) || + {} + const hasUserOverride = + identityFlag.identity || + identityFlag.identity_uuid + const flagEnabled = hasUserOverride + ? identityFlag.enabled + : environmentFlag.enabled + const flagValue = hasUserOverride + ? identityFlag.feature_state_value + : environmentFlag.feature_state_value + const actualEnabled = + actualFlags && actualFlags[name]?.enabled + const actualValue = + actualFlags && + actualFlags[name]?.feature_state_value + const flagEnabledDifferent = hasUserOverride + ? false + : actualEnabled !== flagEnabled + const flagValueDifferent = hasUserOverride + ? false + : !valuesEqual(actualValue, flagValue) + const projectFlag = projectFlags?.find( + (p: any) => + p.id === environmentFlag.feature, + ) + const isMultiVariateOverride = + flagValueDifferent && + projectFlag?.multivariate_options?.find( + (v: any) => + Utils.featureStateToValue(v) === + actualValue, + ) + const flagDifferent = + flagEnabledDifferent || flagValueDifferent + + const onClick = () => { + if (permission) { + editFeature( + projectFlag, + environmentFlags[featureId], + identityFlags[featureId] || + actualFlags![name], + identityFlags[featureId] + ?.multivariate_feature_state_values, + ) + } + } + + const isCompact = + getViewMode() === 'compact' + if (name === preselect && actualFlags) { + setPreselect(null) + onClick() + } + + return ( +
    + + + + + + + {description ? ( + {name} + } + > + {description} + + ) : ( + name + )} + + + + + + {hasUserOverride ? ( +
    + Overriding defaults +
    + ) : flagEnabledDifferent ? ( +
    + + + {isMultiVariateOverride ? ( + + This flag is being + overridden by a + variation defined on + your feature, the + control value is{' '} + + {flagEnabled + ? 'on' + : 'off'} + {' '} + for this user + + ) : ( + + This flag is being + overridden by segments + and would normally be{' '} + + {flagEnabled + ? 'on' + : 'off'} + {' '} + for this user + + )} + + +
    + ) : flagValueDifferent ? ( + isMultiVariateOverride ? ( +
    + + This feature is being + overridden by a % + variation in the + environment, the control + value of this feature is{' '} + + +
    + ) : ( +
    + + This feature is being + overridden by segments and + would normally be{' '} + {' '} + for this user + +
    + ) + ) : ( + getViewMode() === 'default' && ( +
    + Using environment defaults +
    + ) + )} +
    +
    +
    +
    + +
    +
    e.stopPropagation()} + > + {Utils.renderWithPermission( + permission, + Constants.environmentPermissions( + Utils.getManageFeaturePermissionDescription( + false, + true, + ), + ), + + confirmToggle( + projectFlag, + actualFlags![name], + () => + toggleFlag({ + environmentFlag: + actualFlags![name], + environmentId, + identity: id, + identityFlag, + projectFlag: { + id: featureId, + }, + }), + ) + } + />, + )} +
    +
    e.stopPropagation()} + > + {hasUserOverride && ( + <> + {Utils.renderWithPermission( + permission, + Constants.environmentPermissions( + Utils.getManageFeaturePermissionDescription( + false, + true, + ), + ), + , + )} + + )} +
    +
    + ) + }} + renderSearchWithNoResults + paging={FeatureListStore.paging} + search={search} + nextPage={() => + AppActions.getFeatures( + projectId, + environmentId, + true, + search, + sort, + FeatureListStore.paging.next, + getFilter(), + ) + } + prevPage={() => + AppActions.getFeatures( + projectId, + environmentId, + true, + search, + sort, + FeatureListStore.paging.previous, + getFilter(), + ) + } + goToPage={(pageNumber: number) => + AppActions.getFeatures( + projectId, + environmentId, + true, + search, + sort, + pageNumber, + getFilter(), + ) + } + /> + + {!preventAddTrait && ( + + + {Utils.renderWithPermission( + manageUserPermission, + Constants.environmentPermissions( + Utils.getManageUserPermissionDescription(), + ), + , + )} +
    + } + header={ + + + Trait + + + Value + +
    + Remove +
    +
    + } + renderRow={( + { id, trait_key, trait_value }: any, + i: number, + ) => ( + + editTrait({ + id, + trait_key, + trait_value, + }) + } + > + +
    + {trait_key} +
    +
    + + + +
    e.stopPropagation()} + > + {Utils.renderWithPermission( + manageUserPermission, + Constants.environmentPermissions( + Utils.getManageUserPermissionDescription(), + ), + , + )} +
    +
    + )} + renderNoResults={ + + {Utils.renderWithPermission( + manageUserPermission, + Constants.environmentPermissions( + Utils.getManageUserPermissionDescription(), + ), + , + )} +
    + } + > +
    + + This user has no traits. + +
    + + } + filterRow={( + { trait_key }: any, + searchString: string, + ) => + trait_key + .toLowerCase() + .indexOf(searchString.toLowerCase()) > + -1 + } + /> + + )} + + {({ segments }: any) => + !segments ? ( +
    + +
    + ) : ( + + + + Name + + + Description + + + } + items={segments || []} + renderRow={( + { + created_date, + description, + name, + }: any, + i: number, + ) => ( + + editSegment(segments[i]) + } + > + +
    + editSegment(segments[i]) + } + > + + {name} + +
    +
    + Created{' '} + {moment(created_date).format( + 'DD/MMM/YYYY', + )} +
    +
    + + {description && ( +
    {description}
    + )} +
    +
    + )} + renderNoResults={ + +
    + + This user is not a member of any + segments. + +
    +
    + } + filterRow={( + { name }: any, + searchString: string, + ) => + name + .toLowerCase() + .indexOf( + searchString.toLowerCase(), + ) > -1 + } + /> +
    + ) + } +
    + +
    +
    + + + + + + +
    +
    + + ) + } + +
    + )} + + )} + +
    + ) +} + +export default ConfigProvider(UserPage) diff --git a/frontend/web/components/pages/UsersPage.tsx b/frontend/web/components/pages/UsersPage.tsx index 8bf1cda3c76b..f74e51480181 100644 --- a/frontend/web/components/pages/UsersPage.tsx +++ b/frontend/web/components/pages/UsersPage.tsx @@ -19,10 +19,8 @@ import JSONReference from 'components/JSONReference' // we need this to make JSX import Utils from 'common/utils/utils' import Icon from 'components/Icon' import PageTitle from 'components/PageTitle' -import Format from 'common/utils/format' import IdentifierString from 'components/IdentifierString' - -const CodeHelp = require('../CodeHelp') +import CodeHelp from 'components/CodeHelp' type UsersPageType = { router: RouterChildContext['router'] @@ -33,6 +31,10 @@ type UsersPageType = { } } } +const searchTypes = [ + { label: 'ID', value: 'id' }, + { label: 'Alias', value: 'alias' }, +] const UsersPage: FC = (props) => { const [page, setPage] = useState<{ number: number @@ -52,15 +54,19 @@ const UsersPage: FC = (props) => { ) const [deleteIdentity] = useDeleteIdentityMutation({}) const isEdge = Utils.getIsEdge() + const [searchType, setSearchType] = useState<'id' | 'alias'>('id') + + const showAliases = isEdge && Utils.getFlagsmithHasFeature('identity_aliases') const { data: identities, isLoading } = useGetIdentitiesQuery({ + dashboard_alias: searchType === 'alias' ? search?.toLowerCase() : undefined, environmentId: props.match.params.environmentId, isEdge, page: page.number, pageType: page.pageType, page_size: 10, pages: page.pages, - q: search, + q: searchType === 'alias' ? undefined : search, }) const { environmentId } = props.match.params @@ -149,6 +155,19 @@ const UsersPage: FC = (props) => { +